##// END OF EJS Templates
pull-requests: allow to show range diff in pr view
marcink -
r3124:ddd1ae7b default
parent child Browse files
Show More
@@ -1,679 +1,680 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import logging
22 import logging
23 import operator
23 import operator
24
24
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
26
26
27 from rhodecode.lib import helpers as h, diffs
27 from rhodecode.lib import helpers as h, diffs
28 from rhodecode.lib.utils2 import (
28 from rhodecode.lib.utils2 import (
29 StrictAttributeDict, safe_int, datetime_to_time, safe_unicode)
29 StrictAttributeDict, safe_int, datetime_to_time, safe_unicode)
30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
31 from rhodecode.model import repo
31 from rhodecode.model import repo
32 from rhodecode.model import repo_group
32 from rhodecode.model import repo_group
33 from rhodecode.model import user_group
33 from rhodecode.model import user_group
34 from rhodecode.model import user
34 from rhodecode.model import user
35 from rhodecode.model.db import User
35 from rhodecode.model.db import User
36 from rhodecode.model.scm import ScmModel
36 from rhodecode.model.scm import ScmModel
37 from rhodecode.model.settings import VcsSettingsModel
37 from rhodecode.model.settings import VcsSettingsModel
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 ADMIN_PREFIX = '/_admin'
42 ADMIN_PREFIX = '/_admin'
43 STATIC_FILE_PREFIX = '/_static'
43 STATIC_FILE_PREFIX = '/_static'
44
44
45 URL_NAME_REQUIREMENTS = {
45 URL_NAME_REQUIREMENTS = {
46 # group name can have a slash in them, but they must not end with a slash
46 # group name can have a slash in them, but they must not end with a slash
47 'group_name': r'.*?[^/]',
47 'group_name': r'.*?[^/]',
48 'repo_group_name': r'.*?[^/]',
48 'repo_group_name': r'.*?[^/]',
49 # repo names can have a slash in them, but they must not end with a slash
49 # repo names can have a slash in them, but they must not end with a slash
50 'repo_name': r'.*?[^/]',
50 'repo_name': r'.*?[^/]',
51 # file path eats up everything at the end
51 # file path eats up everything at the end
52 'f_path': r'.*',
52 'f_path': r'.*',
53 # reference types
53 # reference types
54 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
54 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
55 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
55 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
56 }
56 }
57
57
58
58
59 def add_route_with_slash(config,name, pattern, **kw):
59 def add_route_with_slash(config,name, pattern, **kw):
60 config.add_route(name, pattern, **kw)
60 config.add_route(name, pattern, **kw)
61 if not pattern.endswith('/'):
61 if not pattern.endswith('/'):
62 config.add_route(name + '_slash', pattern + '/', **kw)
62 config.add_route(name + '_slash', pattern + '/', **kw)
63
63
64
64
65 def add_route_requirements(route_path, requirements=None):
65 def add_route_requirements(route_path, requirements=None):
66 """
66 """
67 Adds regex requirements to pyramid routes using a mapping dict
67 Adds regex requirements to pyramid routes using a mapping dict
68 e.g::
68 e.g::
69 add_route_requirements('{repo_name}/settings')
69 add_route_requirements('{repo_name}/settings')
70 """
70 """
71 requirements = requirements or URL_NAME_REQUIREMENTS
71 requirements = requirements or URL_NAME_REQUIREMENTS
72 for key, regex in requirements.items():
72 for key, regex in requirements.items():
73 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
73 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
74 return route_path
74 return route_path
75
75
76
76
77 def get_format_ref_id(repo):
77 def get_format_ref_id(repo):
78 """Returns a `repo` specific reference formatter function"""
78 """Returns a `repo` specific reference formatter function"""
79 if h.is_svn(repo):
79 if h.is_svn(repo):
80 return _format_ref_id_svn
80 return _format_ref_id_svn
81 else:
81 else:
82 return _format_ref_id
82 return _format_ref_id
83
83
84
84
85 def _format_ref_id(name, raw_id):
85 def _format_ref_id(name, raw_id):
86 """Default formatting of a given reference `name`"""
86 """Default formatting of a given reference `name`"""
87 return name
87 return name
88
88
89
89
90 def _format_ref_id_svn(name, raw_id):
90 def _format_ref_id_svn(name, raw_id):
91 """Special way of formatting a reference for Subversion including path"""
91 """Special way of formatting a reference for Subversion including path"""
92 return '%s@%s' % (name, raw_id)
92 return '%s@%s' % (name, raw_id)
93
93
94
94
95 class TemplateArgs(StrictAttributeDict):
95 class TemplateArgs(StrictAttributeDict):
96 pass
96 pass
97
97
98
98
99 class BaseAppView(object):
99 class BaseAppView(object):
100
100
101 def __init__(self, context, request):
101 def __init__(self, context, request):
102 self.request = request
102 self.request = request
103 self.context = context
103 self.context = context
104 self.session = request.session
104 self.session = request.session
105 if not hasattr(request, 'user'):
105 if not hasattr(request, 'user'):
106 # NOTE(marcink): edge case, we ended up in matched route
106 # NOTE(marcink): edge case, we ended up in matched route
107 # but probably of web-app context, e.g API CALL/VCS CALL
107 # but probably of web-app context, e.g API CALL/VCS CALL
108 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
108 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
109 log.warning('Unable to process request `%s` in this scope', request)
109 log.warning('Unable to process request `%s` in this scope', request)
110 raise HTTPBadRequest()
110 raise HTTPBadRequest()
111
111
112 self._rhodecode_user = request.user # auth user
112 self._rhodecode_user = request.user # auth user
113 self._rhodecode_db_user = self._rhodecode_user.get_instance()
113 self._rhodecode_db_user = self._rhodecode_user.get_instance()
114 self._maybe_needs_password_change(
114 self._maybe_needs_password_change(
115 request.matched_route.name, self._rhodecode_db_user)
115 request.matched_route.name, self._rhodecode_db_user)
116
116
117 def _maybe_needs_password_change(self, view_name, user_obj):
117 def _maybe_needs_password_change(self, view_name, user_obj):
118 log.debug('Checking if user %s needs password change on view %s',
118 log.debug('Checking if user %s needs password change on view %s',
119 user_obj, view_name)
119 user_obj, view_name)
120 skip_user_views = [
120 skip_user_views = [
121 'logout', 'login',
121 'logout', 'login',
122 'my_account_password', 'my_account_password_update'
122 'my_account_password', 'my_account_password_update'
123 ]
123 ]
124
124
125 if not user_obj:
125 if not user_obj:
126 return
126 return
127
127
128 if user_obj.username == User.DEFAULT_USER:
128 if user_obj.username == User.DEFAULT_USER:
129 return
129 return
130
130
131 now = time.time()
131 now = time.time()
132 should_change = user_obj.user_data.get('force_password_change')
132 should_change = user_obj.user_data.get('force_password_change')
133 change_after = safe_int(should_change) or 0
133 change_after = safe_int(should_change) or 0
134 if should_change and now > change_after:
134 if should_change and now > change_after:
135 log.debug('User %s requires password change', user_obj)
135 log.debug('User %s requires password change', user_obj)
136 h.flash('You are required to change your password', 'warning',
136 h.flash('You are required to change your password', 'warning',
137 ignore_duplicate=True)
137 ignore_duplicate=True)
138
138
139 if view_name not in skip_user_views:
139 if view_name not in skip_user_views:
140 raise HTTPFound(
140 raise HTTPFound(
141 self.request.route_path('my_account_password'))
141 self.request.route_path('my_account_password'))
142
142
143 def _log_creation_exception(self, e, repo_name):
143 def _log_creation_exception(self, e, repo_name):
144 _ = self.request.translate
144 _ = self.request.translate
145 reason = None
145 reason = None
146 if len(e.args) == 2:
146 if len(e.args) == 2:
147 reason = e.args[1]
147 reason = e.args[1]
148
148
149 if reason == 'INVALID_CERTIFICATE':
149 if reason == 'INVALID_CERTIFICATE':
150 log.exception(
150 log.exception(
151 'Exception creating a repository: invalid certificate')
151 'Exception creating a repository: invalid certificate')
152 msg = (_('Error creating repository %s: invalid certificate')
152 msg = (_('Error creating repository %s: invalid certificate')
153 % repo_name)
153 % repo_name)
154 else:
154 else:
155 log.exception("Exception creating a repository")
155 log.exception("Exception creating a repository")
156 msg = (_('Error creating repository %s')
156 msg = (_('Error creating repository %s')
157 % repo_name)
157 % repo_name)
158 return msg
158 return msg
159
159
160 def _get_local_tmpl_context(self, include_app_defaults=True):
160 def _get_local_tmpl_context(self, include_app_defaults=True):
161 c = TemplateArgs()
161 c = TemplateArgs()
162 c.auth_user = self.request.user
162 c.auth_user = self.request.user
163 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
163 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
164 c.rhodecode_user = self.request.user
164 c.rhodecode_user = self.request.user
165
165
166 if include_app_defaults:
166 if include_app_defaults:
167 from rhodecode.lib.base import attach_context_attributes
167 from rhodecode.lib.base import attach_context_attributes
168 attach_context_attributes(c, self.request, self.request.user.user_id)
168 attach_context_attributes(c, self.request, self.request.user.user_id)
169
169
170 return c
170 return c
171
171
172 def _get_template_context(self, tmpl_args, **kwargs):
172 def _get_template_context(self, tmpl_args, **kwargs):
173
173
174 local_tmpl_args = {
174 local_tmpl_args = {
175 'defaults': {},
175 'defaults': {},
176 'errors': {},
176 'errors': {},
177 'c': tmpl_args
177 'c': tmpl_args
178 }
178 }
179 local_tmpl_args.update(kwargs)
179 local_tmpl_args.update(kwargs)
180 return local_tmpl_args
180 return local_tmpl_args
181
181
182 def load_default_context(self):
182 def load_default_context(self):
183 """
183 """
184 example:
184 example:
185
185
186 def load_default_context(self):
186 def load_default_context(self):
187 c = self._get_local_tmpl_context()
187 c = self._get_local_tmpl_context()
188 c.custom_var = 'foobar'
188 c.custom_var = 'foobar'
189
189
190 return c
190 return c
191 """
191 """
192 raise NotImplementedError('Needs implementation in view class')
192 raise NotImplementedError('Needs implementation in view class')
193
193
194
194
195 class RepoAppView(BaseAppView):
195 class RepoAppView(BaseAppView):
196
196
197 def __init__(self, context, request):
197 def __init__(self, context, request):
198 super(RepoAppView, self).__init__(context, request)
198 super(RepoAppView, self).__init__(context, request)
199 self.db_repo = request.db_repo
199 self.db_repo = request.db_repo
200 self.db_repo_name = self.db_repo.repo_name
200 self.db_repo_name = self.db_repo.repo_name
201 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
201 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
202
202
203 def _handle_missing_requirements(self, error):
203 def _handle_missing_requirements(self, error):
204 log.error(
204 log.error(
205 'Requirements are missing for repository %s: %s',
205 'Requirements are missing for repository %s: %s',
206 self.db_repo_name, safe_unicode(error))
206 self.db_repo_name, safe_unicode(error))
207
207
208 def _get_local_tmpl_context(self, include_app_defaults=True):
208 def _get_local_tmpl_context(self, include_app_defaults=True):
209 _ = self.request.translate
209 _ = self.request.translate
210 c = super(RepoAppView, self)._get_local_tmpl_context(
210 c = super(RepoAppView, self)._get_local_tmpl_context(
211 include_app_defaults=include_app_defaults)
211 include_app_defaults=include_app_defaults)
212
212
213 # register common vars for this type of view
213 # register common vars for this type of view
214 c.rhodecode_db_repo = self.db_repo
214 c.rhodecode_db_repo = self.db_repo
215 c.repo_name = self.db_repo_name
215 c.repo_name = self.db_repo_name
216 c.repository_pull_requests = self.db_repo_pull_requests
216 c.repository_pull_requests = self.db_repo_pull_requests
217 self.path_filter = PathFilter(None)
217 self.path_filter = PathFilter(None)
218
218
219 c.repository_requirements_missing = {}
219 c.repository_requirements_missing = {}
220 try:
220 try:
221 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
221 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
222 if self.rhodecode_vcs_repo:
222 if self.rhodecode_vcs_repo:
223 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
223 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
224 c.auth_user.username)
224 c.auth_user.username)
225 self.path_filter = PathFilter(path_perms)
225 self.path_filter = PathFilter(path_perms)
226 except RepositoryRequirementError as e:
226 except RepositoryRequirementError as e:
227 c.repository_requirements_missing = {'error': str(e)}
227 c.repository_requirements_missing = {'error': str(e)}
228 self._handle_missing_requirements(e)
228 self._handle_missing_requirements(e)
229 self.rhodecode_vcs_repo = None
229 self.rhodecode_vcs_repo = None
230
230
231 c.path_filter = self.path_filter # used by atom_feed_entry.mako
231 c.path_filter = self.path_filter # used by atom_feed_entry.mako
232
232
233 if self.rhodecode_vcs_repo is None:
233 if self.rhodecode_vcs_repo is None:
234 # unable to fetch this repo as vcs instance, report back to user
234 # unable to fetch this repo as vcs instance, report back to user
235 h.flash(_(
235 h.flash(_(
236 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
236 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
237 "Please check if it exist, or is not damaged.") %
237 "Please check if it exist, or is not damaged.") %
238 {'repo_name': c.repo_name},
238 {'repo_name': c.repo_name},
239 category='error', ignore_duplicate=True)
239 category='error', ignore_duplicate=True)
240 if c.repository_requirements_missing:
240 if c.repository_requirements_missing:
241 route = self.request.matched_route.name
241 route = self.request.matched_route.name
242 if route.startswith(('edit_repo', 'repo_summary')):
242 if route.startswith(('edit_repo', 'repo_summary')):
243 # allow summary and edit repo on missing requirements
243 # allow summary and edit repo on missing requirements
244 return c
244 return c
245
245
246 raise HTTPFound(
246 raise HTTPFound(
247 h.route_path('repo_summary', repo_name=self.db_repo_name))
247 h.route_path('repo_summary', repo_name=self.db_repo_name))
248
248
249 else: # redirect if we don't show missing requirements
249 else: # redirect if we don't show missing requirements
250 raise HTTPFound(h.route_path('home'))
250 raise HTTPFound(h.route_path('home'))
251
251
252 return c
252 return c
253
253
254 def _get_f_path_unchecked(self, matchdict, default=None):
254 def _get_f_path_unchecked(self, matchdict, default=None):
255 """
255 """
256 Should only be used by redirects, everything else should call _get_f_path
256 Should only be used by redirects, everything else should call _get_f_path
257 """
257 """
258 f_path = matchdict.get('f_path')
258 f_path = matchdict.get('f_path')
259 if f_path:
259 if f_path:
260 # fix for multiple initial slashes that causes errors for GIT
260 # fix for multiple initial slashes that causes errors for GIT
261 return f_path.lstrip('/')
261 return f_path.lstrip('/')
262
262
263 return default
263 return default
264
264
265 def _get_f_path(self, matchdict, default=None):
265 def _get_f_path(self, matchdict, default=None):
266 f_path_match = self._get_f_path_unchecked(matchdict, default)
266 f_path_match = self._get_f_path_unchecked(matchdict, default)
267 return self.path_filter.assert_path_permissions(f_path_match)
267 return self.path_filter.assert_path_permissions(f_path_match)
268
268
269 def _get_general_setting(self, target_repo, settings_key, default=False):
269 def _get_general_setting(self, target_repo, settings_key, default=False):
270 settings_model = VcsSettingsModel(repo=target_repo)
270 settings_model = VcsSettingsModel(repo=target_repo)
271 settings = settings_model.get_general_settings()
271 settings = settings_model.get_general_settings()
272 return settings.get(settings_key, default)
272 return settings.get(settings_key, default)
273
273
274
274
275 class PathFilter(object):
275 class PathFilter(object):
276
276
277 # Expects and instance of BasePathPermissionChecker or None
277 # Expects and instance of BasePathPermissionChecker or None
278 def __init__(self, permission_checker):
278 def __init__(self, permission_checker):
279 self.permission_checker = permission_checker
279 self.permission_checker = permission_checker
280
280
281 def assert_path_permissions(self, path):
281 def assert_path_permissions(self, path):
282 if path and self.permission_checker and not self.permission_checker.has_access(path):
282 if path and self.permission_checker and not self.permission_checker.has_access(path):
283 raise HTTPForbidden()
283 raise HTTPForbidden()
284 return path
284 return path
285
285
286 def filter_patchset(self, patchset):
286 def filter_patchset(self, patchset):
287 if not self.permission_checker or not patchset:
287 if not self.permission_checker or not patchset:
288 return patchset, False
288 return patchset, False
289 had_filtered = False
289 had_filtered = False
290 filtered_patchset = []
290 filtered_patchset = []
291 for patch in patchset:
291 for patch in patchset:
292 filename = patch.get('filename', None)
292 filename = patch.get('filename', None)
293 if not filename or self.permission_checker.has_access(filename):
293 if not filename or self.permission_checker.has_access(filename):
294 filtered_patchset.append(patch)
294 filtered_patchset.append(patch)
295 else:
295 else:
296 had_filtered = True
296 had_filtered = True
297 if had_filtered:
297 if had_filtered:
298 if isinstance(patchset, diffs.LimitedDiffContainer):
298 if isinstance(patchset, diffs.LimitedDiffContainer):
299 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
299 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
300 return filtered_patchset, True
300 return filtered_patchset, True
301 else:
301 else:
302 return patchset, False
302 return patchset, False
303
303
304 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
304 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
305 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
305 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
306 result = diffset.render_patchset(filtered_patchset, source_ref=source_ref, target_ref=target_ref)
306 result = diffset.render_patchset(
307 filtered_patchset, source_ref=source_ref, target_ref=target_ref)
307 result.has_hidden_changes = has_hidden_changes
308 result.has_hidden_changes = has_hidden_changes
308 return result
309 return result
309
310
310 def get_raw_patch(self, diff_processor):
311 def get_raw_patch(self, diff_processor):
311 if self.permission_checker is None:
312 if self.permission_checker is None:
312 return diff_processor.as_raw()
313 return diff_processor.as_raw()
313 elif self.permission_checker.has_full_access:
314 elif self.permission_checker.has_full_access:
314 return diff_processor.as_raw()
315 return diff_processor.as_raw()
315 else:
316 else:
316 return '# Repository has user-specific filters, raw patch generation is disabled.'
317 return '# Repository has user-specific filters, raw patch generation is disabled.'
317
318
318 @property
319 @property
319 def is_enabled(self):
320 def is_enabled(self):
320 return self.permission_checker is not None
321 return self.permission_checker is not None
321
322
322
323
323 class RepoGroupAppView(BaseAppView):
324 class RepoGroupAppView(BaseAppView):
324 def __init__(self, context, request):
325 def __init__(self, context, request):
325 super(RepoGroupAppView, self).__init__(context, request)
326 super(RepoGroupAppView, self).__init__(context, request)
326 self.db_repo_group = request.db_repo_group
327 self.db_repo_group = request.db_repo_group
327 self.db_repo_group_name = self.db_repo_group.group_name
328 self.db_repo_group_name = self.db_repo_group.group_name
328
329
329 def _revoke_perms_on_yourself(self, form_result):
330 def _revoke_perms_on_yourself(self, form_result):
330 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
331 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
331 form_result['perm_updates'])
332 form_result['perm_updates'])
332 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
333 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
333 form_result['perm_additions'])
334 form_result['perm_additions'])
334 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
335 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
335 form_result['perm_deletions'])
336 form_result['perm_deletions'])
336 admin_perm = 'group.admin'
337 admin_perm = 'group.admin'
337 if _updates and _updates[0][1] != admin_perm or \
338 if _updates and _updates[0][1] != admin_perm or \
338 _additions and _additions[0][1] != admin_perm or \
339 _additions and _additions[0][1] != admin_perm or \
339 _deletions and _deletions[0][1] != admin_perm:
340 _deletions and _deletions[0][1] != admin_perm:
340 return True
341 return True
341 return False
342 return False
342
343
343
344
344 class UserGroupAppView(BaseAppView):
345 class UserGroupAppView(BaseAppView):
345 def __init__(self, context, request):
346 def __init__(self, context, request):
346 super(UserGroupAppView, self).__init__(context, request)
347 super(UserGroupAppView, self).__init__(context, request)
347 self.db_user_group = request.db_user_group
348 self.db_user_group = request.db_user_group
348 self.db_user_group_name = self.db_user_group.users_group_name
349 self.db_user_group_name = self.db_user_group.users_group_name
349
350
350
351
351 class UserAppView(BaseAppView):
352 class UserAppView(BaseAppView):
352 def __init__(self, context, request):
353 def __init__(self, context, request):
353 super(UserAppView, self).__init__(context, request)
354 super(UserAppView, self).__init__(context, request)
354 self.db_user = request.db_user
355 self.db_user = request.db_user
355 self.db_user_id = self.db_user.user_id
356 self.db_user_id = self.db_user.user_id
356
357
357 _ = self.request.translate
358 _ = self.request.translate
358 if not request.db_user_supports_default:
359 if not request.db_user_supports_default:
359 if self.db_user.username == User.DEFAULT_USER:
360 if self.db_user.username == User.DEFAULT_USER:
360 h.flash(_("Editing user `{}` is disabled.".format(
361 h.flash(_("Editing user `{}` is disabled.".format(
361 User.DEFAULT_USER)), category='warning')
362 User.DEFAULT_USER)), category='warning')
362 raise HTTPFound(h.route_path('users'))
363 raise HTTPFound(h.route_path('users'))
363
364
364
365
365 class DataGridAppView(object):
366 class DataGridAppView(object):
366 """
367 """
367 Common class to have re-usable grid rendering components
368 Common class to have re-usable grid rendering components
368 """
369 """
369
370
370 def _extract_ordering(self, request, column_map=None):
371 def _extract_ordering(self, request, column_map=None):
371 column_map = column_map or {}
372 column_map = column_map or {}
372 column_index = safe_int(request.GET.get('order[0][column]'))
373 column_index = safe_int(request.GET.get('order[0][column]'))
373 order_dir = request.GET.get(
374 order_dir = request.GET.get(
374 'order[0][dir]', 'desc')
375 'order[0][dir]', 'desc')
375 order_by = request.GET.get(
376 order_by = request.GET.get(
376 'columns[%s][data][sort]' % column_index, 'name_raw')
377 'columns[%s][data][sort]' % column_index, 'name_raw')
377
378
378 # translate datatable to DB columns
379 # translate datatable to DB columns
379 order_by = column_map.get(order_by) or order_by
380 order_by = column_map.get(order_by) or order_by
380
381
381 search_q = request.GET.get('search[value]')
382 search_q = request.GET.get('search[value]')
382 return search_q, order_by, order_dir
383 return search_q, order_by, order_dir
383
384
384 def _extract_chunk(self, request):
385 def _extract_chunk(self, request):
385 start = safe_int(request.GET.get('start'), 0)
386 start = safe_int(request.GET.get('start'), 0)
386 length = safe_int(request.GET.get('length'), 25)
387 length = safe_int(request.GET.get('length'), 25)
387 draw = safe_int(request.GET.get('draw'))
388 draw = safe_int(request.GET.get('draw'))
388 return draw, start, length
389 return draw, start, length
389
390
390 def _get_order_col(self, order_by, model):
391 def _get_order_col(self, order_by, model):
391 if isinstance(order_by, basestring):
392 if isinstance(order_by, basestring):
392 try:
393 try:
393 return operator.attrgetter(order_by)(model)
394 return operator.attrgetter(order_by)(model)
394 except AttributeError:
395 except AttributeError:
395 return None
396 return None
396 else:
397 else:
397 return order_by
398 return order_by
398
399
399
400
400 class BaseReferencesView(RepoAppView):
401 class BaseReferencesView(RepoAppView):
401 """
402 """
402 Base for reference view for branches, tags and bookmarks.
403 Base for reference view for branches, tags and bookmarks.
403 """
404 """
404 def load_default_context(self):
405 def load_default_context(self):
405 c = self._get_local_tmpl_context()
406 c = self._get_local_tmpl_context()
406
407
407
408
408 return c
409 return c
409
410
410 def load_refs_context(self, ref_items, partials_template):
411 def load_refs_context(self, ref_items, partials_template):
411 _render = self.request.get_partial_renderer(partials_template)
412 _render = self.request.get_partial_renderer(partials_template)
412 pre_load = ["author", "date", "message"]
413 pre_load = ["author", "date", "message"]
413
414
414 is_svn = h.is_svn(self.rhodecode_vcs_repo)
415 is_svn = h.is_svn(self.rhodecode_vcs_repo)
415 is_hg = h.is_hg(self.rhodecode_vcs_repo)
416 is_hg = h.is_hg(self.rhodecode_vcs_repo)
416
417
417 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
418 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
418
419
419 closed_refs = {}
420 closed_refs = {}
420 if is_hg:
421 if is_hg:
421 closed_refs = self.rhodecode_vcs_repo.branches_closed
422 closed_refs = self.rhodecode_vcs_repo.branches_closed
422
423
423 data = []
424 data = []
424 for ref_name, commit_id in ref_items:
425 for ref_name, commit_id in ref_items:
425 commit = self.rhodecode_vcs_repo.get_commit(
426 commit = self.rhodecode_vcs_repo.get_commit(
426 commit_id=commit_id, pre_load=pre_load)
427 commit_id=commit_id, pre_load=pre_load)
427 closed = ref_name in closed_refs
428 closed = ref_name in closed_refs
428
429
429 # TODO: johbo: Unify generation of reference links
430 # TODO: johbo: Unify generation of reference links
430 use_commit_id = '/' in ref_name or is_svn
431 use_commit_id = '/' in ref_name or is_svn
431
432
432 if use_commit_id:
433 if use_commit_id:
433 files_url = h.route_path(
434 files_url = h.route_path(
434 'repo_files',
435 'repo_files',
435 repo_name=self.db_repo_name,
436 repo_name=self.db_repo_name,
436 f_path=ref_name if is_svn else '',
437 f_path=ref_name if is_svn else '',
437 commit_id=commit_id)
438 commit_id=commit_id)
438
439
439 else:
440 else:
440 files_url = h.route_path(
441 files_url = h.route_path(
441 'repo_files',
442 'repo_files',
442 repo_name=self.db_repo_name,
443 repo_name=self.db_repo_name,
443 f_path=ref_name if is_svn else '',
444 f_path=ref_name if is_svn else '',
444 commit_id=ref_name,
445 commit_id=ref_name,
445 _query=dict(at=ref_name))
446 _query=dict(at=ref_name))
446
447
447 data.append({
448 data.append({
448 "name": _render('name', ref_name, files_url, closed),
449 "name": _render('name', ref_name, files_url, closed),
449 "name_raw": ref_name,
450 "name_raw": ref_name,
450 "date": _render('date', commit.date),
451 "date": _render('date', commit.date),
451 "date_raw": datetime_to_time(commit.date),
452 "date_raw": datetime_to_time(commit.date),
452 "author": _render('author', commit.author),
453 "author": _render('author', commit.author),
453 "commit": _render(
454 "commit": _render(
454 'commit', commit.message, commit.raw_id, commit.idx),
455 'commit', commit.message, commit.raw_id, commit.idx),
455 "commit_raw": commit.idx,
456 "commit_raw": commit.idx,
456 "compare": _render(
457 "compare": _render(
457 'compare', format_ref_id(ref_name, commit.raw_id)),
458 'compare', format_ref_id(ref_name, commit.raw_id)),
458 })
459 })
459
460
460 return data
461 return data
461
462
462
463
463 class RepoRoutePredicate(object):
464 class RepoRoutePredicate(object):
464 def __init__(self, val, config):
465 def __init__(self, val, config):
465 self.val = val
466 self.val = val
466
467
467 def text(self):
468 def text(self):
468 return 'repo_route = %s' % self.val
469 return 'repo_route = %s' % self.val
469
470
470 phash = text
471 phash = text
471
472
472 def __call__(self, info, request):
473 def __call__(self, info, request):
473 if hasattr(request, 'vcs_call'):
474 if hasattr(request, 'vcs_call'):
474 # skip vcs calls
475 # skip vcs calls
475 return
476 return
476
477
477 repo_name = info['match']['repo_name']
478 repo_name = info['match']['repo_name']
478 repo_model = repo.RepoModel()
479 repo_model = repo.RepoModel()
479
480
480 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
481 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
481
482
482 def redirect_if_creating(route_info, db_repo):
483 def redirect_if_creating(route_info, db_repo):
483 skip_views = ['edit_repo_advanced_delete']
484 skip_views = ['edit_repo_advanced_delete']
484 route = route_info['route']
485 route = route_info['route']
485 # we should skip delete view so we can actually "remove" repositories
486 # we should skip delete view so we can actually "remove" repositories
486 # if they get stuck in creating state.
487 # if they get stuck in creating state.
487 if route.name in skip_views:
488 if route.name in skip_views:
488 return
489 return
489
490
490 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
491 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
491 repo_creating_url = request.route_path(
492 repo_creating_url = request.route_path(
492 'repo_creating', repo_name=db_repo.repo_name)
493 'repo_creating', repo_name=db_repo.repo_name)
493 raise HTTPFound(repo_creating_url)
494 raise HTTPFound(repo_creating_url)
494
495
495 if by_name_match:
496 if by_name_match:
496 # register this as request object we can re-use later
497 # register this as request object we can re-use later
497 request.db_repo = by_name_match
498 request.db_repo = by_name_match
498 redirect_if_creating(info, by_name_match)
499 redirect_if_creating(info, by_name_match)
499 return True
500 return True
500
501
501 by_id_match = repo_model.get_repo_by_id(repo_name)
502 by_id_match = repo_model.get_repo_by_id(repo_name)
502 if by_id_match:
503 if by_id_match:
503 request.db_repo = by_id_match
504 request.db_repo = by_id_match
504 redirect_if_creating(info, by_id_match)
505 redirect_if_creating(info, by_id_match)
505 return True
506 return True
506
507
507 return False
508 return False
508
509
509
510
510 class RepoForbidArchivedRoutePredicate(object):
511 class RepoForbidArchivedRoutePredicate(object):
511 def __init__(self, val, config):
512 def __init__(self, val, config):
512 self.val = val
513 self.val = val
513
514
514 def text(self):
515 def text(self):
515 return 'repo_forbid_archived = %s' % self.val
516 return 'repo_forbid_archived = %s' % self.val
516
517
517 phash = text
518 phash = text
518
519
519 def __call__(self, info, request):
520 def __call__(self, info, request):
520 _ = request.translate
521 _ = request.translate
521 rhodecode_db_repo = request.db_repo
522 rhodecode_db_repo = request.db_repo
522
523
523 log.debug(
524 log.debug(
524 '%s checking if archived flag for repo for %s',
525 '%s checking if archived flag for repo for %s',
525 self.__class__.__name__, rhodecode_db_repo.repo_name)
526 self.__class__.__name__, rhodecode_db_repo.repo_name)
526
527
527 if rhodecode_db_repo.archived:
528 if rhodecode_db_repo.archived:
528 log.warning('Current view is not supported for archived repo:%s',
529 log.warning('Current view is not supported for archived repo:%s',
529 rhodecode_db_repo.repo_name)
530 rhodecode_db_repo.repo_name)
530
531
531 h.flash(
532 h.flash(
532 h.literal(_('Action not supported for archived repository.')),
533 h.literal(_('Action not supported for archived repository.')),
533 category='warning')
534 category='warning')
534 summary_url = request.route_path(
535 summary_url = request.route_path(
535 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
536 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
536 raise HTTPFound(summary_url)
537 raise HTTPFound(summary_url)
537 return True
538 return True
538
539
539
540
540 class RepoTypeRoutePredicate(object):
541 class RepoTypeRoutePredicate(object):
541 def __init__(self, val, config):
542 def __init__(self, val, config):
542 self.val = val or ['hg', 'git', 'svn']
543 self.val = val or ['hg', 'git', 'svn']
543
544
544 def text(self):
545 def text(self):
545 return 'repo_accepted_type = %s' % self.val
546 return 'repo_accepted_type = %s' % self.val
546
547
547 phash = text
548 phash = text
548
549
549 def __call__(self, info, request):
550 def __call__(self, info, request):
550 if hasattr(request, 'vcs_call'):
551 if hasattr(request, 'vcs_call'):
551 # skip vcs calls
552 # skip vcs calls
552 return
553 return
553
554
554 rhodecode_db_repo = request.db_repo
555 rhodecode_db_repo = request.db_repo
555
556
556 log.debug(
557 log.debug(
557 '%s checking repo type for %s in %s',
558 '%s checking repo type for %s in %s',
558 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
559 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
559
560
560 if rhodecode_db_repo.repo_type in self.val:
561 if rhodecode_db_repo.repo_type in self.val:
561 return True
562 return True
562 else:
563 else:
563 log.warning('Current view is not supported for repo type:%s',
564 log.warning('Current view is not supported for repo type:%s',
564 rhodecode_db_repo.repo_type)
565 rhodecode_db_repo.repo_type)
565 return False
566 return False
566
567
567
568
568 class RepoGroupRoutePredicate(object):
569 class RepoGroupRoutePredicate(object):
569 def __init__(self, val, config):
570 def __init__(self, val, config):
570 self.val = val
571 self.val = val
571
572
572 def text(self):
573 def text(self):
573 return 'repo_group_route = %s' % self.val
574 return 'repo_group_route = %s' % self.val
574
575
575 phash = text
576 phash = text
576
577
577 def __call__(self, info, request):
578 def __call__(self, info, request):
578 if hasattr(request, 'vcs_call'):
579 if hasattr(request, 'vcs_call'):
579 # skip vcs calls
580 # skip vcs calls
580 return
581 return
581
582
582 repo_group_name = info['match']['repo_group_name']
583 repo_group_name = info['match']['repo_group_name']
583 repo_group_model = repo_group.RepoGroupModel()
584 repo_group_model = repo_group.RepoGroupModel()
584 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
585 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
585
586
586 if by_name_match:
587 if by_name_match:
587 # register this as request object we can re-use later
588 # register this as request object we can re-use later
588 request.db_repo_group = by_name_match
589 request.db_repo_group = by_name_match
589 return True
590 return True
590
591
591 return False
592 return False
592
593
593
594
594 class UserGroupRoutePredicate(object):
595 class UserGroupRoutePredicate(object):
595 def __init__(self, val, config):
596 def __init__(self, val, config):
596 self.val = val
597 self.val = val
597
598
598 def text(self):
599 def text(self):
599 return 'user_group_route = %s' % self.val
600 return 'user_group_route = %s' % self.val
600
601
601 phash = text
602 phash = text
602
603
603 def __call__(self, info, request):
604 def __call__(self, info, request):
604 if hasattr(request, 'vcs_call'):
605 if hasattr(request, 'vcs_call'):
605 # skip vcs calls
606 # skip vcs calls
606 return
607 return
607
608
608 user_group_id = info['match']['user_group_id']
609 user_group_id = info['match']['user_group_id']
609 user_group_model = user_group.UserGroup()
610 user_group_model = user_group.UserGroup()
610 by_id_match = user_group_model.get(user_group_id, cache=False)
611 by_id_match = user_group_model.get(user_group_id, cache=False)
611
612
612 if by_id_match:
613 if by_id_match:
613 # register this as request object we can re-use later
614 # register this as request object we can re-use later
614 request.db_user_group = by_id_match
615 request.db_user_group = by_id_match
615 return True
616 return True
616
617
617 return False
618 return False
618
619
619
620
620 class UserRoutePredicateBase(object):
621 class UserRoutePredicateBase(object):
621 supports_default = None
622 supports_default = None
622
623
623 def __init__(self, val, config):
624 def __init__(self, val, config):
624 self.val = val
625 self.val = val
625
626
626 def text(self):
627 def text(self):
627 raise NotImplementedError()
628 raise NotImplementedError()
628
629
629 def __call__(self, info, request):
630 def __call__(self, info, request):
630 if hasattr(request, 'vcs_call'):
631 if hasattr(request, 'vcs_call'):
631 # skip vcs calls
632 # skip vcs calls
632 return
633 return
633
634
634 user_id = info['match']['user_id']
635 user_id = info['match']['user_id']
635 user_model = user.User()
636 user_model = user.User()
636 by_id_match = user_model.get(user_id, cache=False)
637 by_id_match = user_model.get(user_id, cache=False)
637
638
638 if by_id_match:
639 if by_id_match:
639 # register this as request object we can re-use later
640 # register this as request object we can re-use later
640 request.db_user = by_id_match
641 request.db_user = by_id_match
641 request.db_user_supports_default = self.supports_default
642 request.db_user_supports_default = self.supports_default
642 return True
643 return True
643
644
644 return False
645 return False
645
646
646
647
647 class UserRoutePredicate(UserRoutePredicateBase):
648 class UserRoutePredicate(UserRoutePredicateBase):
648 supports_default = False
649 supports_default = False
649
650
650 def text(self):
651 def text(self):
651 return 'user_route = %s' % self.val
652 return 'user_route = %s' % self.val
652
653
653 phash = text
654 phash = text
654
655
655
656
656 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
657 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
657 supports_default = True
658 supports_default = True
658
659
659 def text(self):
660 def text(self):
660 return 'user_with_default_route = %s' % self.val
661 return 'user_with_default_route = %s' % self.val
661
662
662 phash = text
663 phash = text
663
664
664
665
665 def includeme(config):
666 def includeme(config):
666 config.add_route_predicate(
667 config.add_route_predicate(
667 'repo_route', RepoRoutePredicate)
668 'repo_route', RepoRoutePredicate)
668 config.add_route_predicate(
669 config.add_route_predicate(
669 'repo_accepted_types', RepoTypeRoutePredicate)
670 'repo_accepted_types', RepoTypeRoutePredicate)
670 config.add_route_predicate(
671 config.add_route_predicate(
671 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
672 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
672 config.add_route_predicate(
673 config.add_route_predicate(
673 'repo_group_route', RepoGroupRoutePredicate)
674 'repo_group_route', RepoGroupRoutePredicate)
674 config.add_route_predicate(
675 config.add_route_predicate(
675 'user_group_route', UserGroupRoutePredicate)
676 'user_group_route', UserGroupRoutePredicate)
676 config.add_route_predicate(
677 config.add_route_predicate(
677 'user_route_with_default', UserRouteWithDefaultPredicate)
678 'user_route_with_default', UserRouteWithDefaultPredicate)
678 config.add_route_predicate(
679 config.add_route_predicate(
679 'user_route', UserRoutePredicate)
680 'user_route', UserRoutePredicate)
@@ -1,314 +1,320 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.tests import TestController
23 from rhodecode.tests import TestController
24
24
25 from rhodecode.model.db import (
25 from rhodecode.model.db import (
26 ChangesetComment, Notification, UserNotification)
26 ChangesetComment, Notification, UserNotification)
27 from rhodecode.model.meta import Session
27 from rhodecode.model.meta import Session
28 from rhodecode.lib import helpers as h
28 from rhodecode.lib import helpers as h
29
29
30
30
31 def route_path(name, params=None, **kwargs):
31 def route_path(name, params=None, **kwargs):
32 import urllib
32 import urllib
33
33
34 base_url = {
34 base_url = {
35 'repo_commit': '/{repo_name}/changeset/{commit_id}',
35 'repo_commit': '/{repo_name}/changeset/{commit_id}',
36 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create',
36 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create',
37 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview',
37 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview',
38 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete',
38 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete',
39 }[name].format(**kwargs)
39 }[name].format(**kwargs)
40
40
41 if params:
41 if params:
42 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
42 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
43 return base_url
43 return base_url
44
44
45
45
46 @pytest.mark.backends("git", "hg", "svn")
46 @pytest.mark.backends("git", "hg", "svn")
47 class TestRepoCommitCommentsView(TestController):
47 class TestRepoCommitCommentsView(TestController):
48
48
49 @pytest.fixture(autouse=True)
49 @pytest.fixture(autouse=True)
50 def prepare(self, request, baseapp):
50 def prepare(self, request, baseapp):
51 for x in ChangesetComment.query().all():
51 for x in ChangesetComment.query().all():
52 Session().delete(x)
52 Session().delete(x)
53 Session().commit()
53 Session().commit()
54
54
55 for x in Notification.query().all():
55 for x in Notification.query().all():
56 Session().delete(x)
56 Session().delete(x)
57 Session().commit()
57 Session().commit()
58
58
59 request.addfinalizer(self.cleanup)
59 request.addfinalizer(self.cleanup)
60
60
61 def cleanup(self):
61 def cleanup(self):
62 for x in ChangesetComment.query().all():
62 for x in ChangesetComment.query().all():
63 Session().delete(x)
63 Session().delete(x)
64 Session().commit()
64 Session().commit()
65
65
66 for x in Notification.query().all():
66 for x in Notification.query().all():
67 Session().delete(x)
67 Session().delete(x)
68 Session().commit()
68 Session().commit()
69
69
70 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
70 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
71 def test_create(self, comment_type, backend):
71 def test_create(self, comment_type, backend):
72 self.log_user()
72 self.log_user()
73 commit = backend.repo.get_commit('300')
73 commit = backend.repo.get_commit('300')
74 commit_id = commit.raw_id
74 commit_id = commit.raw_id
75 text = u'CommentOnCommit'
75 text = u'CommentOnCommit'
76
76
77 params = {'text': text, 'csrf_token': self.csrf_token,
77 params = {'text': text, 'csrf_token': self.csrf_token,
78 'comment_type': comment_type}
78 'comment_type': comment_type}
79 self.app.post(
79 self.app.post(
80 route_path('repo_commit_comment_create',
80 route_path('repo_commit_comment_create',
81 repo_name=backend.repo_name, commit_id=commit_id),
81 repo_name=backend.repo_name, commit_id=commit_id),
82 params=params)
82 params=params)
83
83
84 response = self.app.get(
84 response = self.app.get(
85 route_path('repo_commit',
85 route_path('repo_commit',
86 repo_name=backend.repo_name, commit_id=commit_id))
86 repo_name=backend.repo_name, commit_id=commit_id))
87
87
88 # test DB
88 # test DB
89 assert ChangesetComment.query().count() == 1
89 assert ChangesetComment.query().count() == 1
90 assert_comment_links(response, ChangesetComment.query().count(), 0)
90 assert_comment_links(response, ChangesetComment.query().count(), 0)
91
91
92 assert Notification.query().count() == 1
92 assert Notification.query().count() == 1
93 assert ChangesetComment.query().count() == 1
93 assert ChangesetComment.query().count() == 1
94
94
95 notification = Notification.query().all()[0]
95 notification = Notification.query().all()[0]
96
96
97 comment_id = ChangesetComment.query().first().comment_id
97 comment_id = ChangesetComment.query().first().comment_id
98 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
98 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
99
99
100 author = notification.created_by_user.username_and_name
100 author = notification.created_by_user.username_and_name
101 sbj = '{0} left a {1} on commit `{2}` in the {3} repository'.format(
101 sbj = '{0} left a {1} on commit `{2}` in the {3} repository'.format(
102 author, comment_type, h.show_id(commit), backend.repo_name)
102 author, comment_type, h.show_id(commit), backend.repo_name)
103 assert sbj == notification.subject
103 assert sbj == notification.subject
104
104
105 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
105 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
106 backend.repo_name, commit_id, comment_id))
106 backend.repo_name, commit_id, comment_id))
107 assert lnk in notification.body
107 assert lnk in notification.body
108
108
109 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
109 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
110 def test_create_inline(self, comment_type, backend):
110 def test_create_inline(self, comment_type, backend):
111 self.log_user()
111 self.log_user()
112 commit = backend.repo.get_commit('300')
112 commit = backend.repo.get_commit('300')
113 commit_id = commit.raw_id
113 commit_id = commit.raw_id
114 text = u'CommentOnCommit'
114 text = u'CommentOnCommit'
115 f_path = 'vcs/web/simplevcs/views/repository.py'
115 f_path = 'vcs/web/simplevcs/views/repository.py'
116 line = 'n1'
116 line = 'n1'
117
117
118 params = {'text': text, 'f_path': f_path, 'line': line,
118 params = {'text': text, 'f_path': f_path, 'line': line,
119 'comment_type': comment_type,
119 'comment_type': comment_type,
120 'csrf_token': self.csrf_token}
120 'csrf_token': self.csrf_token}
121
121
122 self.app.post(
122 self.app.post(
123 route_path('repo_commit_comment_create',
123 route_path('repo_commit_comment_create',
124 repo_name=backend.repo_name, commit_id=commit_id),
124 repo_name=backend.repo_name, commit_id=commit_id),
125 params=params)
125 params=params)
126
126
127 response = self.app.get(
127 response = self.app.get(
128 route_path('repo_commit',
128 route_path('repo_commit',
129 repo_name=backend.repo_name, commit_id=commit_id))
129 repo_name=backend.repo_name, commit_id=commit_id))
130
130
131 # test DB
131 # test DB
132 assert ChangesetComment.query().count() == 1
132 assert ChangesetComment.query().count() == 1
133 assert_comment_links(response, 0, ChangesetComment.query().count())
133 assert_comment_links(response, 0, ChangesetComment.query().count())
134
134
135 if backend.alias == 'svn':
135 if backend.alias == 'svn':
136 response.mustcontain(
136 response.mustcontain(
137 '''data-f-path="vcs/commands/summary.py" '''
137 '''data-f-path="vcs/commands/summary.py" '''
138 '''id="a_c--ad05457a43f8"'''
138 '''id="a_c-300-ad05457a43f8"'''
139 )
139 )
140 else:
140 if backend.alias == 'git':
141 response.mustcontain(
141 response.mustcontain(
142 '''data-f-path="vcs/backends/hg.py" '''
142 '''data-f-path="vcs/backends/hg.py" '''
143 '''id="a_c--9c390eb52cd6"'''
143 '''id="a_c-883e775e89ea-9c390eb52cd6"'''
144 )
145
146 if backend.alias == 'hg':
147 response.mustcontain(
148 '''data-f-path="vcs/backends/hg.py" '''
149 '''id="a_c-e58d85a3973b-9c390eb52cd6"'''
144 )
150 )
145
151
146 assert Notification.query().count() == 1
152 assert Notification.query().count() == 1
147 assert ChangesetComment.query().count() == 1
153 assert ChangesetComment.query().count() == 1
148
154
149 notification = Notification.query().all()[0]
155 notification = Notification.query().all()[0]
150 comment = ChangesetComment.query().first()
156 comment = ChangesetComment.query().first()
151 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
157 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
152
158
153 assert comment.revision == commit_id
159 assert comment.revision == commit_id
154
160
155 author = notification.created_by_user.username_and_name
161 author = notification.created_by_user.username_and_name
156 sbj = '{0} left a {1} on file `{2}` in commit `{3}` in the {4} repository'.format(
162 sbj = '{0} left a {1} on file `{2}` in commit `{3}` in the {4} repository'.format(
157 author, comment_type, f_path, h.show_id(commit), backend.repo_name)
163 author, comment_type, f_path, h.show_id(commit), backend.repo_name)
158
164
159 assert sbj == notification.subject
165 assert sbj == notification.subject
160
166
161 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
167 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
162 backend.repo_name, commit_id, comment.comment_id))
168 backend.repo_name, commit_id, comment.comment_id))
163 assert lnk in notification.body
169 assert lnk in notification.body
164 assert 'on line n1' in notification.body
170 assert 'on line n1' in notification.body
165
171
166 def test_create_with_mention(self, backend):
172 def test_create_with_mention(self, backend):
167 self.log_user()
173 self.log_user()
168
174
169 commit_id = backend.repo.get_commit('300').raw_id
175 commit_id = backend.repo.get_commit('300').raw_id
170 text = u'@test_regular check CommentOnCommit'
176 text = u'@test_regular check CommentOnCommit'
171
177
172 params = {'text': text, 'csrf_token': self.csrf_token}
178 params = {'text': text, 'csrf_token': self.csrf_token}
173 self.app.post(
179 self.app.post(
174 route_path('repo_commit_comment_create',
180 route_path('repo_commit_comment_create',
175 repo_name=backend.repo_name, commit_id=commit_id),
181 repo_name=backend.repo_name, commit_id=commit_id),
176 params=params)
182 params=params)
177
183
178 response = self.app.get(
184 response = self.app.get(
179 route_path('repo_commit',
185 route_path('repo_commit',
180 repo_name=backend.repo_name, commit_id=commit_id))
186 repo_name=backend.repo_name, commit_id=commit_id))
181 # test DB
187 # test DB
182 assert ChangesetComment.query().count() == 1
188 assert ChangesetComment.query().count() == 1
183 assert_comment_links(response, ChangesetComment.query().count(), 0)
189 assert_comment_links(response, ChangesetComment.query().count(), 0)
184
190
185 notification = Notification.query().one()
191 notification = Notification.query().one()
186
192
187 assert len(notification.recipients) == 2
193 assert len(notification.recipients) == 2
188 users = [x.username for x in notification.recipients]
194 users = [x.username for x in notification.recipients]
189
195
190 # test_regular gets notification by @mention
196 # test_regular gets notification by @mention
191 assert sorted(users) == [u'test_admin', u'test_regular']
197 assert sorted(users) == [u'test_admin', u'test_regular']
192
198
193 def test_create_with_status_change(self, backend):
199 def test_create_with_status_change(self, backend):
194 self.log_user()
200 self.log_user()
195 commit = backend.repo.get_commit('300')
201 commit = backend.repo.get_commit('300')
196 commit_id = commit.raw_id
202 commit_id = commit.raw_id
197 text = u'CommentOnCommit'
203 text = u'CommentOnCommit'
198 f_path = 'vcs/web/simplevcs/views/repository.py'
204 f_path = 'vcs/web/simplevcs/views/repository.py'
199 line = 'n1'
205 line = 'n1'
200
206
201 params = {'text': text, 'changeset_status': 'approved',
207 params = {'text': text, 'changeset_status': 'approved',
202 'csrf_token': self.csrf_token}
208 'csrf_token': self.csrf_token}
203
209
204 self.app.post(
210 self.app.post(
205 route_path(
211 route_path(
206 'repo_commit_comment_create',
212 'repo_commit_comment_create',
207 repo_name=backend.repo_name, commit_id=commit_id),
213 repo_name=backend.repo_name, commit_id=commit_id),
208 params=params)
214 params=params)
209
215
210 response = self.app.get(
216 response = self.app.get(
211 route_path('repo_commit',
217 route_path('repo_commit',
212 repo_name=backend.repo_name, commit_id=commit_id))
218 repo_name=backend.repo_name, commit_id=commit_id))
213
219
214 # test DB
220 # test DB
215 assert ChangesetComment.query().count() == 1
221 assert ChangesetComment.query().count() == 1
216 assert_comment_links(response, ChangesetComment.query().count(), 0)
222 assert_comment_links(response, ChangesetComment.query().count(), 0)
217
223
218 assert Notification.query().count() == 1
224 assert Notification.query().count() == 1
219 assert ChangesetComment.query().count() == 1
225 assert ChangesetComment.query().count() == 1
220
226
221 notification = Notification.query().all()[0]
227 notification = Notification.query().all()[0]
222
228
223 comment_id = ChangesetComment.query().first().comment_id
229 comment_id = ChangesetComment.query().first().comment_id
224 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
230 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
225
231
226 author = notification.created_by_user.username_and_name
232 author = notification.created_by_user.username_and_name
227 sbj = '[status: Approved] {0} left a note on commit `{1}` in the {2} repository'.format(
233 sbj = '[status: Approved] {0} left a note on commit `{1}` in the {2} repository'.format(
228 author, h.show_id(commit), backend.repo_name)
234 author, h.show_id(commit), backend.repo_name)
229 assert sbj == notification.subject
235 assert sbj == notification.subject
230
236
231 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
237 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
232 backend.repo_name, commit_id, comment_id))
238 backend.repo_name, commit_id, comment_id))
233 assert lnk in notification.body
239 assert lnk in notification.body
234
240
235 def test_delete(self, backend):
241 def test_delete(self, backend):
236 self.log_user()
242 self.log_user()
237 commit_id = backend.repo.get_commit('300').raw_id
243 commit_id = backend.repo.get_commit('300').raw_id
238 text = u'CommentOnCommit'
244 text = u'CommentOnCommit'
239
245
240 params = {'text': text, 'csrf_token': self.csrf_token}
246 params = {'text': text, 'csrf_token': self.csrf_token}
241 self.app.post(
247 self.app.post(
242 route_path(
248 route_path(
243 'repo_commit_comment_create',
249 'repo_commit_comment_create',
244 repo_name=backend.repo_name, commit_id=commit_id),
250 repo_name=backend.repo_name, commit_id=commit_id),
245 params=params)
251 params=params)
246
252
247 comments = ChangesetComment.query().all()
253 comments = ChangesetComment.query().all()
248 assert len(comments) == 1
254 assert len(comments) == 1
249 comment_id = comments[0].comment_id
255 comment_id = comments[0].comment_id
250
256
251 self.app.post(
257 self.app.post(
252 route_path('repo_commit_comment_delete',
258 route_path('repo_commit_comment_delete',
253 repo_name=backend.repo_name,
259 repo_name=backend.repo_name,
254 commit_id=commit_id,
260 commit_id=commit_id,
255 comment_id=comment_id),
261 comment_id=comment_id),
256 params={'csrf_token': self.csrf_token})
262 params={'csrf_token': self.csrf_token})
257
263
258 comments = ChangesetComment.query().all()
264 comments = ChangesetComment.query().all()
259 assert len(comments) == 0
265 assert len(comments) == 0
260
266
261 response = self.app.get(
267 response = self.app.get(
262 route_path('repo_commit',
268 route_path('repo_commit',
263 repo_name=backend.repo_name, commit_id=commit_id))
269 repo_name=backend.repo_name, commit_id=commit_id))
264 assert_comment_links(response, 0, 0)
270 assert_comment_links(response, 0, 0)
265
271
266 @pytest.mark.parametrize('renderer, input, output', [
272 @pytest.mark.parametrize('renderer, input, output', [
267 ('rst', 'plain text', '<p>plain text</p>'),
273 ('rst', 'plain text', '<p>plain text</p>'),
268 ('rst', 'header\n======', '<h1 class="title">header</h1>'),
274 ('rst', 'header\n======', '<h1 class="title">header</h1>'),
269 ('rst', '*italics*', '<em>italics</em>'),
275 ('rst', '*italics*', '<em>italics</em>'),
270 ('rst', '**bold**', '<strong>bold</strong>'),
276 ('rst', '**bold**', '<strong>bold</strong>'),
271 ('markdown', 'plain text', '<p>plain text</p>'),
277 ('markdown', 'plain text', '<p>plain text</p>'),
272 ('markdown', '# header', '<h1>header</h1>'),
278 ('markdown', '# header', '<h1>header</h1>'),
273 ('markdown', '*italics*', '<em>italics</em>'),
279 ('markdown', '*italics*', '<em>italics</em>'),
274 ('markdown', '**bold**', '<strong>bold</strong>'),
280 ('markdown', '**bold**', '<strong>bold</strong>'),
275 ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain',
281 ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain',
276 'md-header', 'md-italics', 'md-bold', ])
282 'md-header', 'md-italics', 'md-bold', ])
277 def test_preview(self, renderer, input, output, backend, xhr_header):
283 def test_preview(self, renderer, input, output, backend, xhr_header):
278 self.log_user()
284 self.log_user()
279 params = {
285 params = {
280 'renderer': renderer,
286 'renderer': renderer,
281 'text': input,
287 'text': input,
282 'csrf_token': self.csrf_token
288 'csrf_token': self.csrf_token
283 }
289 }
284 commit_id = '0' * 16 # fake this for tests
290 commit_id = '0' * 16 # fake this for tests
285 response = self.app.post(
291 response = self.app.post(
286 route_path('repo_commit_comment_preview',
292 route_path('repo_commit_comment_preview',
287 repo_name=backend.repo_name, commit_id=commit_id,),
293 repo_name=backend.repo_name, commit_id=commit_id,),
288 params=params,
294 params=params,
289 extra_environ=xhr_header)
295 extra_environ=xhr_header)
290
296
291 response.mustcontain(output)
297 response.mustcontain(output)
292
298
293
299
294 def assert_comment_links(response, comments, inline_comments):
300 def assert_comment_links(response, comments, inline_comments):
295 if comments == 1:
301 if comments == 1:
296 comments_text = "%d Commit comment" % comments
302 comments_text = "%d Commit comment" % comments
297 else:
303 else:
298 comments_text = "%d Commit comments" % comments
304 comments_text = "%d Commit comments" % comments
299
305
300 if inline_comments == 1:
306 if inline_comments == 1:
301 inline_comments_text = "%d Inline Comment" % inline_comments
307 inline_comments_text = "%d Inline Comment" % inline_comments
302 else:
308 else:
303 inline_comments_text = "%d Inline Comments" % inline_comments
309 inline_comments_text = "%d Inline Comments" % inline_comments
304
310
305 if comments:
311 if comments:
306 response.mustcontain('<a href="#comments">%s</a>,' % comments_text)
312 response.mustcontain('<a href="#comments">%s</a>,' % comments_text)
307 else:
313 else:
308 response.mustcontain(comments_text)
314 response.mustcontain(comments_text)
309
315
310 if inline_comments:
316 if inline_comments:
311 response.mustcontain(
317 response.mustcontain(
312 'id="inline-comments-counter">%s</' % inline_comments_text)
318 'id="inline-comments-counter">%s</' % inline_comments_text)
313 else:
319 else:
314 response.mustcontain(inline_comments_text)
320 response.mustcontain(inline_comments_text)
@@ -1,697 +1,666 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import lxml.html
23 import lxml.html
24
24
25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
26 from rhodecode.tests import assert_session_flash
26 from rhodecode.tests import assert_session_flash
27 from rhodecode.tests.utils import AssertResponse, commit_change
27 from rhodecode.tests.utils import AssertResponse, commit_change
28
28
29
29
30 def route_path(name, params=None, **kwargs):
30 def route_path(name, params=None, **kwargs):
31 import urllib
31 import urllib
32
32
33 base_url = {
33 base_url = {
34 'repo_compare_select': '/{repo_name}/compare',
34 'repo_compare_select': '/{repo_name}/compare',
35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
36 }[name].format(**kwargs)
36 }[name].format(**kwargs)
37
37
38 if params:
38 if params:
39 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
39 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
40 return base_url
40 return base_url
41
41
42
42
43 @pytest.mark.usefixtures("autologin_user", "app")
43 @pytest.mark.usefixtures("autologin_user", "app")
44 class TestCompareView(object):
44 class TestCompareView(object):
45
45
46 def test_compare_index_is_reached_at_least_once(self, backend):
46 def test_compare_index_is_reached_at_least_once(self, backend):
47 repo = backend.repo
47 repo = backend.repo
48 self.app.get(
48 self.app.get(
49 route_path('repo_compare_select', repo_name=repo.repo_name))
49 route_path('repo_compare_select', repo_name=repo.repo_name))
50
50
51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
52 def test_compare_remote_with_different_commit_indexes(self, backend):
52 def test_compare_remote_with_different_commit_indexes(self, backend):
53 # Preparing the following repository structure:
53 # Preparing the following repository structure:
54 #
54 #
55 # Origin repository has two commits:
55 # Origin repository has two commits:
56 #
56 #
57 # 0 1
57 # 0 1
58 # A -- D
58 # A -- D
59 #
59 #
60 # The fork of it has a few more commits and "D" has a commit index
60 # The fork of it has a few more commits and "D" has a commit index
61 # which does not exist in origin.
61 # which does not exist in origin.
62 #
62 #
63 # 0 1 2 3 4
63 # 0 1 2 3 4
64 # A -- -- -- D -- E
64 # A -- -- -- D -- E
65 # \- B -- C
65 # \- B -- C
66 #
66 #
67
67
68 fork = backend.create_repo()
68 fork = backend.create_repo()
69
69
70 # prepare fork
70 # prepare fork
71 commit0 = commit_change(
71 commit0 = commit_change(
72 fork.repo_name, filename='file1', content='A',
72 fork.repo_name, filename='file1', content='A',
73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
74
74
75 commit1 = commit_change(
75 commit1 = commit_change(
76 fork.repo_name, filename='file1', content='B',
76 fork.repo_name, filename='file1', content='B',
77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
78
78
79 commit_change( # commit 2
79 commit_change( # commit 2
80 fork.repo_name, filename='file1', content='C',
80 fork.repo_name, filename='file1', content='C',
81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
82
82
83 commit3 = commit_change(
83 commit3 = commit_change(
84 fork.repo_name, filename='file1', content='D',
84 fork.repo_name, filename='file1', content='D',
85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
86
86
87 commit4 = commit_change(
87 commit4 = commit_change(
88 fork.repo_name, filename='file1', content='E',
88 fork.repo_name, filename='file1', content='E',
89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
90
90
91 # prepare origin repository, taking just the history up to D
91 # prepare origin repository, taking just the history up to D
92 origin = backend.create_repo()
92 origin = backend.create_repo()
93
93
94 origin_repo = origin.scm_instance(cache=False)
94 origin_repo = origin.scm_instance(cache=False)
95 origin_repo.config.clear_section('hooks')
95 origin_repo.config.clear_section('hooks')
96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
97
97
98 # Verify test fixture setup
98 # Verify test fixture setup
99 # This does not work for git
99 # This does not work for git
100 if backend.alias != 'git':
100 if backend.alias != 'git':
101 assert 5 == len(fork.scm_instance().commit_ids)
101 assert 5 == len(fork.scm_instance().commit_ids)
102 assert 2 == len(origin_repo.commit_ids)
102 assert 2 == len(origin_repo.commit_ids)
103
103
104 # Comparing the revisions
104 # Comparing the revisions
105 response = self.app.get(
105 response = self.app.get(
106 route_path('repo_compare',
106 route_path('repo_compare',
107 repo_name=origin.repo_name,
107 repo_name=origin.repo_name,
108 source_ref_type="rev",
108 source_ref_type="rev", source_ref=commit3.raw_id,
109 source_ref=commit3.raw_id,
109 target_ref_type="rev", target_ref=commit4.raw_id,
110 target_ref_type="rev",
110 params=dict(merge='1', target_repo=fork.repo_name)
111 target_ref=commit4.raw_id,
111 ))
112 params=dict(merge='1', target_repo=fork.repo_name)
113 ))
114
112
115 compare_page = ComparePage(response)
113 compare_page = ComparePage(response)
116 compare_page.contains_commits([commit4])
114 compare_page.contains_commits([commit4])
117
115
118 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
116 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
119 def test_compare_forks_on_branch_extra_commits(self, backend):
117 def test_compare_forks_on_branch_extra_commits(self, backend):
120 repo1 = backend.create_repo()
118 repo1 = backend.create_repo()
121
119
122 # commit something !
120 # commit something !
123 commit0 = commit_change(
121 commit0 = commit_change(
124 repo1.repo_name, filename='file1', content='line1\n',
122 repo1.repo_name, filename='file1', content='line1\n',
125 message='commit1', vcs_type=backend.alias, parent=None,
123 message='commit1', vcs_type=backend.alias, parent=None,
126 newfile=True)
124 newfile=True)
127
125
128 # fork this repo
126 # fork this repo
129 repo2 = backend.create_fork()
127 repo2 = backend.create_fork()
130
128
131 # add two extra commit into fork
129 # add two extra commit into fork
132 commit1 = commit_change(
130 commit1 = commit_change(
133 repo2.repo_name, filename='file1', content='line1\nline2\n',
131 repo2.repo_name, filename='file1', content='line1\nline2\n',
134 message='commit2', vcs_type=backend.alias, parent=commit0)
132 message='commit2', vcs_type=backend.alias, parent=commit0)
135
133
136 commit2 = commit_change(
134 commit2 = commit_change(
137 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
135 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
138 message='commit3', vcs_type=backend.alias, parent=commit1)
136 message='commit3', vcs_type=backend.alias, parent=commit1)
139
137
140 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
138 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
141 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
139 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
142
140
143 response = self.app.get(
141 response = self.app.get(
144 route_path('repo_compare',
142 route_path('repo_compare',
145 repo_name=repo1.repo_name,
143 repo_name=repo1.repo_name,
146 source_ref_type="branch",
144 source_ref_type="branch", source_ref=commit_id2,
147 source_ref=commit_id2,
145 target_ref_type="branch", target_ref=commit_id1,
148 target_ref_type="branch",
146 params=dict(merge='1', target_repo=repo2.repo_name)
149 target_ref=commit_id1,
147 ))
150 params=dict(merge='1', target_repo=repo2.repo_name)
151 ))
152
148
153 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
149 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
154 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
150 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
155
151
156 compare_page = ComparePage(response)
152 compare_page = ComparePage(response)
157 compare_page.contains_change_summary(1, 2, 0)
153 compare_page.contains_change_summary(1, 2, 0)
158 compare_page.contains_commits([commit1, commit2])
154 compare_page.contains_commits([commit1, commit2])
159 compare_page.contains_file_links_and_anchors([
155
160 ('file1', 'a_c--826e8142e6ba'),
156 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
161 ])
157 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
162
158
163 # Swap is removed when comparing branches since it's a PR feature and
159 # Swap is removed when comparing branches since it's a PR feature and
164 # it is then a preview mode
160 # it is then a preview mode
165 compare_page.swap_is_hidden()
161 compare_page.swap_is_hidden()
166 compare_page.target_source_are_disabled()
162 compare_page.target_source_are_disabled()
167
163
168 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
164 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
169 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(
165 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(
170 self, backend):
166 self, backend):
171 repo1 = backend.create_repo()
167 repo1 = backend.create_repo()
172
168
173 # commit something !
169 # commit something !
174 commit0 = commit_change(
170 commit0 = commit_change(
175 repo1.repo_name, filename='file1', content='line1\n',
171 repo1.repo_name, filename='file1', content='line1\n',
176 message='commit1', vcs_type=backend.alias, parent=None,
172 message='commit1', vcs_type=backend.alias, parent=None,
177 newfile=True)
173 newfile=True)
178
174
179 # fork this repo
175 # fork this repo
180 repo2 = backend.create_fork()
176 repo2 = backend.create_fork()
181
177
182 # now commit something to origin repo
178 # now commit something to origin repo
183 commit_change(
179 commit_change(
184 repo1.repo_name, filename='file2', content='line1file2\n',
180 repo1.repo_name, filename='file2', content='line1file2\n',
185 message='commit2', vcs_type=backend.alias, parent=commit0,
181 message='commit2', vcs_type=backend.alias, parent=commit0,
186 newfile=True)
182 newfile=True)
187
183
188 # add two extra commit into fork
184 # add two extra commit into fork
189 commit1 = commit_change(
185 commit1 = commit_change(
190 repo2.repo_name, filename='file1', content='line1\nline2\n',
186 repo2.repo_name, filename='file1', content='line1\nline2\n',
191 message='commit2', vcs_type=backend.alias, parent=commit0)
187 message='commit2', vcs_type=backend.alias, parent=commit0)
192
188
193 commit2 = commit_change(
189 commit2 = commit_change(
194 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
190 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
195 message='commit3', vcs_type=backend.alias, parent=commit1)
191 message='commit3', vcs_type=backend.alias, parent=commit1)
196
192
197 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
193 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
198 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
194 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
199
195
200 response = self.app.get(
196 response = self.app.get(
201 route_path('repo_compare',
197 route_path('repo_compare',
202 repo_name=repo1.repo_name,
198 repo_name=repo1.repo_name,
203 source_ref_type="branch",
199 source_ref_type="branch", source_ref=commit_id2,
204 source_ref=commit_id2,
200 target_ref_type="branch", target_ref=commit_id1,
205 target_ref_type="branch",
201 params=dict(merge='1', target_repo=repo2.repo_name),
206 target_ref=commit_id1,
202 ))
207 params=dict(merge='1', target_repo=repo2.repo_name),
208 ))
209
203
210 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
204 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
211 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
205 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
212
206
213 compare_page = ComparePage(response)
207 compare_page = ComparePage(response)
214 compare_page.contains_change_summary(1, 2, 0)
208 compare_page.contains_change_summary(1, 2, 0)
215 compare_page.contains_commits([commit1, commit2])
209 compare_page.contains_commits([commit1, commit2])
216 compare_page.contains_file_links_and_anchors([
210 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
217 ('file1', 'a_c--826e8142e6ba'),
211 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
218 ])
219
212
220 # Swap is removed when comparing branches since it's a PR feature and
213 # Swap is removed when comparing branches since it's a PR feature and
221 # it is then a preview mode
214 # it is then a preview mode
222 compare_page.swap_is_hidden()
215 compare_page.swap_is_hidden()
223 compare_page.target_source_are_disabled()
216 compare_page.target_source_are_disabled()
224
217
225 @pytest.mark.xfail_backends("svn")
218 @pytest.mark.xfail_backends("svn")
226 # TODO(marcink): no svn support for compare two seperate repos
219 # TODO(marcink): no svn support for compare two seperate repos
227 def test_compare_of_unrelated_forks(self, backend):
220 def test_compare_of_unrelated_forks(self, backend):
228 orig = backend.create_repo(number_of_commits=1)
221 orig = backend.create_repo(number_of_commits=1)
229 fork = backend.create_repo(number_of_commits=1)
222 fork = backend.create_repo(number_of_commits=1)
230
223
231 response = self.app.get(
224 response = self.app.get(
232 route_path('repo_compare',
225 route_path('repo_compare',
233 repo_name=orig.repo_name,
226 repo_name=orig.repo_name,
234 source_ref_type="rev",
227 source_ref_type="rev", source_ref="tip",
235 source_ref="tip",
228 target_ref_type="rev", target_ref="tip",
236 target_ref_type="rev",
229 params=dict(merge='1', target_repo=fork.repo_name),
237 target_ref="tip",
230 ),
238 params=dict(merge='1', target_repo=fork.repo_name),
239 ),
240 status=302)
231 status=302)
241 response = response.follow()
232 response = response.follow()
242 response.mustcontain("Repositories unrelated.")
233 response.mustcontain("Repositories unrelated.")
243
234
244 @pytest.mark.xfail_backends("svn")
235 @pytest.mark.xfail_backends("svn")
245 def test_compare_cherry_pick_commits_from_bottom(self, backend):
236 def test_compare_cherry_pick_commits_from_bottom(self, backend):
246
237
247 # repo1:
238 # repo1:
248 # commit0:
239 # commit0:
249 # commit1:
240 # commit1:
250 # repo1-fork- in which we will cherry pick bottom commits
241 # repo1-fork- in which we will cherry pick bottom commits
251 # commit0:
242 # commit0:
252 # commit1:
243 # commit1:
253 # commit2: x
244 # commit2: x
254 # commit3: x
245 # commit3: x
255 # commit4: x
246 # commit4: x
256 # commit5:
247 # commit5:
257 # make repo1, and commit1+commit2
248 # make repo1, and commit1+commit2
258
249
259 repo1 = backend.create_repo()
250 repo1 = backend.create_repo()
260
251
261 # commit something !
252 # commit something !
262 commit0 = commit_change(
253 commit0 = commit_change(
263 repo1.repo_name, filename='file1', content='line1\n',
254 repo1.repo_name, filename='file1', content='line1\n',
264 message='commit1', vcs_type=backend.alias, parent=None,
255 message='commit1', vcs_type=backend.alias, parent=None,
265 newfile=True)
256 newfile=True)
266 commit1 = commit_change(
257 commit1 = commit_change(
267 repo1.repo_name, filename='file1', content='line1\nline2\n',
258 repo1.repo_name, filename='file1', content='line1\nline2\n',
268 message='commit2', vcs_type=backend.alias, parent=commit0)
259 message='commit2', vcs_type=backend.alias, parent=commit0)
269
260
270 # fork this repo
261 # fork this repo
271 repo2 = backend.create_fork()
262 repo2 = backend.create_fork()
272
263
273 # now make commit3-6
264 # now make commit3-6
274 commit2 = commit_change(
265 commit2 = commit_change(
275 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
266 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
276 message='commit3', vcs_type=backend.alias, parent=commit1)
267 message='commit3', vcs_type=backend.alias, parent=commit1)
277 commit3 = commit_change(
268 commit3 = commit_change(
278 repo1.repo_name, filename='file1',
269 repo1.repo_name, filename='file1',
279 content='line1\nline2\nline3\nline4\n', message='commit4',
270 content='line1\nline2\nline3\nline4\n', message='commit4',
280 vcs_type=backend.alias, parent=commit2)
271 vcs_type=backend.alias, parent=commit2)
281 commit4 = commit_change(
272 commit4 = commit_change(
282 repo1.repo_name, filename='file1',
273 repo1.repo_name, filename='file1',
283 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
274 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
284 vcs_type=backend.alias, parent=commit3)
275 vcs_type=backend.alias, parent=commit3)
285 commit_change( # commit 5
276 commit_change( # commit 5
286 repo1.repo_name, filename='file1',
277 repo1.repo_name, filename='file1',
287 content='line1\nline2\nline3\nline4\nline5\nline6\n',
278 content='line1\nline2\nline3\nline4\nline5\nline6\n',
288 message='commit6', vcs_type=backend.alias, parent=commit4)
279 message='commit6', vcs_type=backend.alias, parent=commit4)
289
280
290 response = self.app.get(
281 response = self.app.get(
291 route_path('repo_compare',
282 route_path('repo_compare',
292 repo_name=repo2.repo_name,
283 repo_name=repo2.repo_name,
293 source_ref_type="rev",
284 # parent of commit2, in target repo2
294 # parent of commit2, in target repo2
285 source_ref_type="rev", source_ref=commit1.raw_id,
295 source_ref=commit1.raw_id,
286 target_ref_type="rev", target_ref=commit4.raw_id,
296 target_ref_type="rev",
287 params=dict(merge='1', target_repo=repo1.repo_name),
297 target_ref=commit4.raw_id,
288 ))
298 params=dict(merge='1', target_repo=repo1.repo_name),
299 ))
300 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
289 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
301 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
290 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
302
291
303 # files
292 # files
304 compare_page = ComparePage(response)
293 compare_page = ComparePage(response)
305 compare_page.contains_change_summary(1, 3, 0)
294 compare_page.contains_change_summary(1, 3, 0)
306 compare_page.contains_commits([commit2, commit3, commit4])
295 compare_page.contains_commits([commit2, commit3, commit4])
307 compare_page.contains_file_links_and_anchors([
296 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
308 ('file1', 'a_c--826e8142e6ba'),
297 compare_page.contains_file_links_and_anchors([('file1', anchor),])
309 ])
310
298
311 @pytest.mark.xfail_backends("svn")
299 @pytest.mark.xfail_backends("svn")
312 def test_compare_cherry_pick_commits_from_top(self, backend):
300 def test_compare_cherry_pick_commits_from_top(self, backend):
313 # repo1:
301 # repo1:
314 # commit0:
302 # commit0:
315 # commit1:
303 # commit1:
316 # repo1-fork- in which we will cherry pick bottom commits
304 # repo1-fork- in which we will cherry pick bottom commits
317 # commit0:
305 # commit0:
318 # commit1:
306 # commit1:
319 # commit2:
307 # commit2:
320 # commit3: x
308 # commit3: x
321 # commit4: x
309 # commit4: x
322 # commit5: x
310 # commit5: x
323
311
324 # make repo1, and commit1+commit2
312 # make repo1, and commit1+commit2
325 repo1 = backend.create_repo()
313 repo1 = backend.create_repo()
326
314
327 # commit something !
315 # commit something !
328 commit0 = commit_change(
316 commit0 = commit_change(
329 repo1.repo_name, filename='file1', content='line1\n',
317 repo1.repo_name, filename='file1', content='line1\n',
330 message='commit1', vcs_type=backend.alias, parent=None,
318 message='commit1', vcs_type=backend.alias, parent=None,
331 newfile=True)
319 newfile=True)
332 commit1 = commit_change(
320 commit1 = commit_change(
333 repo1.repo_name, filename='file1', content='line1\nline2\n',
321 repo1.repo_name, filename='file1', content='line1\nline2\n',
334 message='commit2', vcs_type=backend.alias, parent=commit0)
322 message='commit2', vcs_type=backend.alias, parent=commit0)
335
323
336 # fork this repo
324 # fork this repo
337 backend.create_fork()
325 backend.create_fork()
338
326
339 # now make commit3-6
327 # now make commit3-6
340 commit2 = commit_change(
328 commit2 = commit_change(
341 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
329 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
342 message='commit3', vcs_type=backend.alias, parent=commit1)
330 message='commit3', vcs_type=backend.alias, parent=commit1)
343 commit3 = commit_change(
331 commit3 = commit_change(
344 repo1.repo_name, filename='file1',
332 repo1.repo_name, filename='file1',
345 content='line1\nline2\nline3\nline4\n', message='commit4',
333 content='line1\nline2\nline3\nline4\n', message='commit4',
346 vcs_type=backend.alias, parent=commit2)
334 vcs_type=backend.alias, parent=commit2)
347 commit4 = commit_change(
335 commit4 = commit_change(
348 repo1.repo_name, filename='file1',
336 repo1.repo_name, filename='file1',
349 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
337 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
350 vcs_type=backend.alias, parent=commit3)
338 vcs_type=backend.alias, parent=commit3)
351 commit5 = commit_change(
339 commit5 = commit_change(
352 repo1.repo_name, filename='file1',
340 repo1.repo_name, filename='file1',
353 content='line1\nline2\nline3\nline4\nline5\nline6\n',
341 content='line1\nline2\nline3\nline4\nline5\nline6\n',
354 message='commit6', vcs_type=backend.alias, parent=commit4)
342 message='commit6', vcs_type=backend.alias, parent=commit4)
355
343
356 response = self.app.get(
344 response = self.app.get(
357 route_path('repo_compare',
345 route_path('repo_compare',
358 repo_name=repo1.repo_name,
346 repo_name=repo1.repo_name,
359 source_ref_type="rev",
347 # parent of commit3, not in source repo2
360 # parent of commit3, not in source repo2
348 source_ref_type="rev", source_ref=commit2.raw_id,
361 source_ref=commit2.raw_id,
349 target_ref_type="rev", target_ref=commit5.raw_id,
362 target_ref_type="rev",
350 params=dict(merge='1'),))
363 target_ref=commit5.raw_id,
364 params=dict(merge='1'),
365 ))
366
351
367 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
368 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
369
354
370 compare_page = ComparePage(response)
355 compare_page = ComparePage(response)
371 compare_page.contains_change_summary(1, 3, 0)
356 compare_page.contains_change_summary(1, 3, 0)
372 compare_page.contains_commits([commit3, commit4, commit5])
357 compare_page.contains_commits([commit3, commit4, commit5])
373
358
374 # files
359 # files
375 compare_page.contains_file_links_and_anchors([
360 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
376 ('file1', 'a_c--826e8142e6ba'),
361 compare_page.contains_file_links_and_anchors([('file1', anchor),])
377 ])
378
362
379 @pytest.mark.xfail_backends("svn")
363 @pytest.mark.xfail_backends("svn")
380 def test_compare_remote_branches(self, backend):
364 def test_compare_remote_branches(self, backend):
381 repo1 = backend.repo
365 repo1 = backend.repo
382 repo2 = backend.create_fork()
366 repo2 = backend.create_fork()
383
367
384 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
369 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
385 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
371 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
386
372
387 response = self.app.get(
373 response = self.app.get(
388 route_path('repo_compare',
374 route_path('repo_compare',
389 repo_name=repo1.repo_name,
375 repo_name=repo1.repo_name,
390 source_ref_type="rev",
376 source_ref_type="rev", source_ref=commit_id1,
391 source_ref=commit_id1,
377 target_ref_type="rev", target_ref=commit_id2,
392 target_ref_type="rev",
378 params=dict(merge='1', target_repo=repo2.repo_name),
393 target_ref=commit_id2,
379 ))
394 params=dict(merge='1', target_repo=repo2.repo_name),
395 ))
396
380
397 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
398 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
399
383
400 compare_page = ComparePage(response)
384 compare_page = ComparePage(response)
401
385
402 # outgoing commits between those commits
386 # outgoing commits between those commits
403 compare_page.contains_commits(
387 compare_page.contains_commits(
404 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
405
389
406 # files
390 # files
407 compare_page.contains_file_links_and_anchors([
391 compare_page.contains_file_links_and_anchors([
408 ('vcs/backends/hg.py', 'a_c--9c390eb52cd6'),
392 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
409 ('vcs/backends/__init__.py', 'a_c--41b41c1f2796'),
393 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
410 ('vcs/backends/base.py', 'a_c--2f574d260608'),
394 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
411 ])
395 ])
412
396
413 @pytest.mark.xfail_backends("svn")
397 @pytest.mark.xfail_backends("svn")
414 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
415 repo1 = backend.create_repo()
399 repo1 = backend.create_repo()
416 r1_name = repo1.repo_name
400 r1_name = repo1.repo_name
417
401
418 commit0 = commit_change(
402 commit0 = commit_change(
419 repo=r1_name, filename='file1',
403 repo=r1_name, filename='file1',
420 content='line1', message='commit1', vcs_type=backend.alias,
404 content='line1', message='commit1', vcs_type=backend.alias,
421 newfile=True)
405 newfile=True)
422 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
423
407
424 # fork the repo1
408 # fork the repo1
425 repo2 = backend.create_fork()
409 repo2 = backend.create_fork()
426 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
427
411
428 self.r2_id = repo2.repo_id
412 self.r2_id = repo2.repo_id
429 r2_name = repo2.repo_name
413 r2_name = repo2.repo_name
430
414
431 commit1 = commit_change(
415 commit1 = commit_change(
432 repo=r2_name, filename='file1-fork',
416 repo=r2_name, filename='file1-fork',
433 content='file1-line1-from-fork', message='commit1-fork',
417 content='file1-line1-from-fork', message='commit1-fork',
434 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
435 newfile=True)
419 newfile=True)
436
420
437 commit2 = commit_change(
421 commit2 = commit_change(
438 repo=r2_name, filename='file2-fork',
422 repo=r2_name, filename='file2-fork',
439 content='file2-line1-from-fork', message='commit2-fork',
423 content='file2-line1-from-fork', message='commit2-fork',
440 vcs_type=backend.alias, parent=commit1,
424 vcs_type=backend.alias, parent=commit1,
441 newfile=True)
425 newfile=True)
442
426
443 commit_change( # commit 3
427 commit_change( # commit 3
444 repo=r2_name, filename='file3-fork',
428 repo=r2_name, filename='file3-fork',
445 content='file3-line1-from-fork', message='commit3-fork',
429 content='file3-line1-from-fork', message='commit3-fork',
446 vcs_type=backend.alias, parent=commit2, newfile=True)
430 vcs_type=backend.alias, parent=commit2, newfile=True)
447
431
448 # compare !
432 # compare !
449 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
450 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
451
435
452 response = self.app.get(
436 response = self.app.get(
453 route_path('repo_compare',
437 route_path('repo_compare',
454 repo_name=r2_name,
438 repo_name=r2_name,
455 source_ref_type="branch",
439 source_ref_type="branch", source_ref=commit_id1,
456 source_ref=commit_id1,
440 target_ref_type="branch", target_ref=commit_id2,
457 target_ref_type="branch",
441 params=dict(merge='1', target_repo=r1_name),
458 target_ref=commit_id2,
442 ))
459 params=dict(merge='1', target_repo=r1_name),
460 ))
461
443
462 response.mustcontain('%s@%s' % (r2_name, commit_id1))
444 response.mustcontain('%s@%s' % (r2_name, commit_id1))
463 response.mustcontain('%s@%s' % (r1_name, commit_id2))
445 response.mustcontain('%s@%s' % (r1_name, commit_id2))
464 response.mustcontain('No files')
446 response.mustcontain('No files')
465 response.mustcontain('No commits in this compare')
447 response.mustcontain('No commits in this compare')
466
448
467 commit0 = commit_change(
449 commit0 = commit_change(
468 repo=r1_name, filename='file2',
450 repo=r1_name, filename='file2',
469 content='line1-added-after-fork', message='commit2-parent',
451 content='line1-added-after-fork', message='commit2-parent',
470 vcs_type=backend.alias, parent=None, newfile=True)
452 vcs_type=backend.alias, parent=None, newfile=True)
471
453
472 # compare !
454 # compare !
473 response = self.app.get(
455 response = self.app.get(
474 route_path('repo_compare',
456 route_path('repo_compare',
475 repo_name=r2_name,
457 repo_name=r2_name,
476 source_ref_type="branch",
458 source_ref_type="branch", source_ref=commit_id1,
477 source_ref=commit_id1,
459 target_ref_type="branch", target_ref=commit_id2,
478 target_ref_type="branch",
460 params=dict(merge='1', target_repo=r1_name),
479 target_ref=commit_id2,
461 ))
480 params=dict(merge='1', target_repo=r1_name),
481 ))
482
462
483 response.mustcontain('%s@%s' % (r2_name, commit_id1))
463 response.mustcontain('%s@%s' % (r2_name, commit_id1))
484 response.mustcontain('%s@%s' % (r1_name, commit_id2))
464 response.mustcontain('%s@%s' % (r1_name, commit_id2))
485
465
486 response.mustcontain("""commit2-parent""")
466 response.mustcontain("""commit2-parent""")
487 response.mustcontain("""line1-added-after-fork""")
467 response.mustcontain("""line1-added-after-fork""")
488 compare_page = ComparePage(response)
468 compare_page = ComparePage(response)
489 compare_page.contains_change_summary(1, 1, 0)
469 compare_page.contains_change_summary(1, 1, 0)
490
470
491 @pytest.mark.xfail_backends("svn")
471 @pytest.mark.xfail_backends("svn")
492 def test_compare_commits(self, backend, xhr_header):
472 def test_compare_commits(self, backend, xhr_header):
493 commit0 = backend.repo.get_commit(commit_idx=0)
473 commit0 = backend.repo.get_commit(commit_idx=0)
494 commit1 = backend.repo.get_commit(commit_idx=1)
474 commit1 = backend.repo.get_commit(commit_idx=1)
495
475
496 response = self.app.get(
476 response = self.app.get(
497 route_path('repo_compare',
477 route_path('repo_compare',
498 repo_name=backend.repo_name,
478 repo_name=backend.repo_name,
499 source_ref_type="rev",
479 source_ref_type="rev", source_ref=commit0.raw_id,
500 source_ref=commit0.raw_id,
480 target_ref_type="rev", target_ref=commit1.raw_id,
501 target_ref_type="rev",
481 params=dict(merge='1')
502 target_ref=commit1.raw_id,
482 ),
503 params=dict(merge='1')
483 extra_environ=xhr_header, )
504 ),
505 extra_environ=xhr_header,)
506
484
507 # outgoing commits between those commits
485 # outgoing commits between those commits
508 compare_page = ComparePage(response)
486 compare_page = ComparePage(response)
509 compare_page.contains_commits(commits=[commit1], ancestors=[commit0])
487 compare_page.contains_commits(commits=[commit1], ancestors=[commit0])
510
488
511 def test_errors_when_comparing_unknown_source_repo(self, backend):
489 def test_errors_when_comparing_unknown_source_repo(self, backend):
512 repo = backend.repo
490 repo = backend.repo
513 badrepo = 'badrepo'
491 badrepo = 'badrepo'
514
492
515 response = self.app.get(
493 response = self.app.get(
516 route_path('repo_compare',
494 route_path('repo_compare',
517 repo_name=badrepo,
495 repo_name=badrepo,
518 source_ref_type="rev",
496 source_ref_type="rev", source_ref='tip',
519 source_ref='tip',
497 target_ref_type="rev", target_ref='tip',
520 target_ref_type="rev",
498 params=dict(merge='1', target_repo=repo.repo_name)
521 target_ref='tip',
499 ),
522 params=dict(merge='1', target_repo=repo.repo_name)
523 ),
524 status=404)
500 status=404)
525
501
526 def test_errors_when_comparing_unknown_target_repo(self, backend):
502 def test_errors_when_comparing_unknown_target_repo(self, backend):
527 repo = backend.repo
503 repo = backend.repo
528 badrepo = 'badrepo'
504 badrepo = 'badrepo'
529
505
530 response = self.app.get(
506 response = self.app.get(
531 route_path('repo_compare',
507 route_path('repo_compare',
532 repo_name=repo.repo_name,
508 repo_name=repo.repo_name,
533 source_ref_type="rev",
509 source_ref_type="rev", source_ref='tip',
534 source_ref='tip',
510 target_ref_type="rev", target_ref='tip',
535 target_ref_type="rev",
511 params=dict(merge='1', target_repo=badrepo),
536 target_ref='tip',
512 ),
537 params=dict(merge='1', target_repo=badrepo),
538 ),
539 status=302)
513 status=302)
540 redirected = response.follow()
514 redirected = response.follow()
541 redirected.mustcontain(
515 redirected.mustcontain(
542 'Could not find the target repo: `{}`'.format(badrepo))
516 'Could not find the target repo: `{}`'.format(badrepo))
543
517
544 def test_compare_not_in_preview_mode(self, backend_stub):
518 def test_compare_not_in_preview_mode(self, backend_stub):
545 commit0 = backend_stub.repo.get_commit(commit_idx=0)
519 commit0 = backend_stub.repo.get_commit(commit_idx=0)
546 commit1 = backend_stub.repo.get_commit(commit_idx=1)
520 commit1 = backend_stub.repo.get_commit(commit_idx=1)
547
521
548 response = self.app.get(
522 response = self.app.get(
549 route_path('repo_compare',
523 route_path('repo_compare',
550 repo_name=backend_stub.repo_name,
524 repo_name=backend_stub.repo_name,
551 source_ref_type="rev",
525 source_ref_type="rev", source_ref=commit0.raw_id,
552 source_ref=commit0.raw_id,
526 target_ref_type="rev", target_ref=commit1.raw_id,
553 target_ref_type="rev",
527 ))
554 target_ref=commit1.raw_id,
555 ))
556
528
557 # outgoing commits between those commits
529 # outgoing commits between those commits
558 compare_page = ComparePage(response)
530 compare_page = ComparePage(response)
559 compare_page.swap_is_visible()
531 compare_page.swap_is_visible()
560 compare_page.target_source_are_enabled()
532 compare_page.target_source_are_enabled()
561
533
562 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
534 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
563 orig = backend_hg.create_repo(number_of_commits=1)
535 orig = backend_hg.create_repo(number_of_commits=1)
564 fork = backend_hg.create_fork()
536 fork = backend_hg.create_fork()
565
537
566 settings_util.create_repo_rhodecode_ui(
538 settings_util.create_repo_rhodecode_ui(
567 orig, 'extensions', value='', key='largefiles', active=False)
539 orig, 'extensions', value='', key='largefiles', active=False)
568 settings_util.create_repo_rhodecode_ui(
540 settings_util.create_repo_rhodecode_ui(
569 fork, 'extensions', value='', key='largefiles', active=True)
541 fork, 'extensions', value='', key='largefiles', active=True)
570
542
571 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
543 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
572 'MercurialRepository.compare')
544 'MercurialRepository.compare')
573 with mock.patch(compare_module) as compare_mock:
545 with mock.patch(compare_module) as compare_mock:
574 compare_mock.side_effect = RepositoryRequirementError()
546 compare_mock.side_effect = RepositoryRequirementError()
575
547
576 response = self.app.get(
548 response = self.app.get(
577 route_path('repo_compare',
549 route_path('repo_compare',
578 repo_name=orig.repo_name,
550 repo_name=orig.repo_name,
579 source_ref_type="rev",
551 source_ref_type="rev", source_ref="tip",
580 source_ref="tip",
552 target_ref_type="rev", target_ref="tip",
581 target_ref_type="rev",
553 params=dict(merge='1', target_repo=fork.repo_name),
582 target_ref="tip",
554 ),
583 params=dict(merge='1', target_repo=fork.repo_name),
584 ),
585 status=302)
555 status=302)
586
556
587 assert_session_flash(
557 assert_session_flash(
588 response,
558 response,
589 'Could not compare repos with different large file settings')
559 'Could not compare repos with different large file settings')
590
560
591
561
592 @pytest.mark.usefixtures("autologin_user")
562 @pytest.mark.usefixtures("autologin_user")
593 class TestCompareControllerSvn(object):
563 class TestCompareControllerSvn(object):
594
564
595 def test_supports_references_with_path(self, app, backend_svn):
565 def test_supports_references_with_path(self, app, backend_svn):
596 repo = backend_svn['svn-simple-layout']
566 repo = backend_svn['svn-simple-layout']
597 commit_id = repo.get_commit(commit_idx=-1).raw_id
567 commit_id = repo.get_commit(commit_idx=-1).raw_id
598 response = app.get(
568 response = app.get(
599 route_path('repo_compare',
569 route_path('repo_compare',
600 repo_name=repo.repo_name,
570 repo_name=repo.repo_name,
601 source_ref_type="tag",
571 source_ref_type="tag",
602 source_ref="%s@%s" % ('tags/v0.1', commit_id),
572 source_ref="%s@%s" % ('tags/v0.1', commit_id),
603 target_ref_type="tag",
573 target_ref_type="tag",
604 target_ref="%s@%s" % ('tags/v0.2', commit_id),
574 target_ref="%s@%s" % ('tags/v0.2', commit_id),
605 params=dict(merge='1'),
575 params=dict(merge='1'),
606 ),
576 ),
607 status=200)
577 status=200)
608
578
609 # Expecting no commits, since both paths are at the same revision
579 # Expecting no commits, since both paths are at the same revision
610 response.mustcontain('No commits in this compare')
580 response.mustcontain('No commits in this compare')
611
581
612 # Should find only one file changed when comparing those two tags
582 # Should find only one file changed when comparing those two tags
613 response.mustcontain('example.py')
583 response.mustcontain('example.py')
614 compare_page = ComparePage(response)
584 compare_page = ComparePage(response)
615 compare_page.contains_change_summary(1, 5, 1)
585 compare_page.contains_change_summary(1, 5, 1)
616
586
617 def test_shows_commits_if_different_ids(self, app, backend_svn):
587 def test_shows_commits_if_different_ids(self, app, backend_svn):
618 repo = backend_svn['svn-simple-layout']
588 repo = backend_svn['svn-simple-layout']
619 source_id = repo.get_commit(commit_idx=-6).raw_id
589 source_id = repo.get_commit(commit_idx=-6).raw_id
620 target_id = repo.get_commit(commit_idx=-1).raw_id
590 target_id = repo.get_commit(commit_idx=-1).raw_id
621 response = app.get(
591 response = app.get(
622 route_path('repo_compare',
592 route_path('repo_compare',
623 repo_name=repo.repo_name,
593 repo_name=repo.repo_name,
624 source_ref_type="tag",
594 source_ref_type="tag",
625 source_ref="%s@%s" % ('tags/v0.1', source_id),
595 source_ref="%s@%s" % ('tags/v0.1', source_id),
626 target_ref_type="tag",
596 target_ref_type="tag",
627 target_ref="%s@%s" % ('tags/v0.2', target_id),
597 target_ref="%s@%s" % ('tags/v0.2', target_id),
628 params=dict(merge='1')
598 params=dict(merge='1')
629 ),
599 ),
630 status=200)
600 status=200)
631
601
632 # It should show commits
602 # It should show commits
633 assert 'No commits in this compare' not in response.body
603 assert 'No commits in this compare' not in response.body
634
604
635 # Should find only one file changed when comparing those two tags
605 # Should find only one file changed when comparing those two tags
636 response.mustcontain('example.py')
606 response.mustcontain('example.py')
637 compare_page = ComparePage(response)
607 compare_page = ComparePage(response)
638 compare_page.contains_change_summary(1, 5, 1)
608 compare_page.contains_change_summary(1, 5, 1)
639
609
640
610
641 class ComparePage(AssertResponse):
611 class ComparePage(AssertResponse):
642 """
612 """
643 Abstracts the page template from the tests
613 Abstracts the page template from the tests
644 """
614 """
645
615
646 def contains_file_links_and_anchors(self, files):
616 def contains_file_links_and_anchors(self, files):
647 doc = lxml.html.fromstring(self.response.body)
617 doc = lxml.html.fromstring(self.response.body)
648 for filename, file_id in files:
618 for filename, file_id in files:
649 self.contains_one_anchor(file_id)
619 self.contains_one_anchor(file_id)
650 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
620 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
651 assert len(diffblock) == 1
621 assert len(diffblock) == 1
652 assert len(diffblock[0].cssselect('a[href="#%s"]' % file_id)) == 1
622 assert len(diffblock[0].cssselect('a[href="#%s"]' % file_id)) == 1
653
623
654 def contains_change_summary(self, files_changed, inserted, deleted):
624 def contains_change_summary(self, files_changed, inserted, deleted):
655 template = (
625 template = (
656 "{files_changed} file{plural} changed: "
626 "{files_changed} file{plural} changed: "
657 "{inserted} inserted, {deleted} deleted")
627 "{inserted} inserted, {deleted} deleted")
658 self.response.mustcontain(template.format(
628 self.response.mustcontain(template.format(
659 files_changed=files_changed,
629 files_changed=files_changed,
660 plural="s" if files_changed > 1 else "",
630 plural="s" if files_changed > 1 else "",
661 inserted=inserted,
631 inserted=inserted,
662 deleted=deleted))
632 deleted=deleted))
663
633
664 def contains_commits(self, commits, ancestors=None):
634 def contains_commits(self, commits, ancestors=None):
665 response = self.response
635 response = self.response
666
636
667 for commit in commits:
637 for commit in commits:
668 # Expecting to see the commit message in an element which
638 # Expecting to see the commit message in an element which
669 # has the ID "c-{commit.raw_id}"
639 # has the ID "c-{commit.raw_id}"
670 self.element_contains('#c-' + commit.raw_id, commit.message)
640 self.element_contains('#c-' + commit.raw_id, commit.message)
671 self.contains_one_link(
641 self.contains_one_link(
672 'r%s:%s' % (commit.idx, commit.short_id),
642 'r%s:%s' % (commit.idx, commit.short_id),
673 self._commit_url(commit))
643 self._commit_url(commit))
674 if ancestors:
644 if ancestors:
675 response.mustcontain('Ancestor')
645 response.mustcontain('Ancestor')
676 for ancestor in ancestors:
646 for ancestor in ancestors:
677 self.contains_one_link(
647 self.contains_one_link(
678 ancestor.short_id, self._commit_url(ancestor))
648 ancestor.short_id, self._commit_url(ancestor))
679
649
680 def _commit_url(self, commit):
650 def _commit_url(self, commit):
681 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
651 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
682
652
683 def swap_is_hidden(self):
653 def swap_is_hidden(self):
684 assert '<a id="btn-swap"' not in self.response.text
654 assert '<a id="btn-swap"' not in self.response.text
685
655
686 def swap_is_visible(self):
656 def swap_is_visible(self):
687 assert '<a id="btn-swap"' in self.response.text
657 assert '<a id="btn-swap"' in self.response.text
688
658
689 def target_source_are_disabled(self):
659 def target_source_are_disabled(self):
690 response = self.response
660 response = self.response
691 response.mustcontain("var enable_fields = false;")
661 response.mustcontain("var enable_fields = false;")
692 response.mustcontain('.select2("enable", enable_fields)')
662 response.mustcontain('.select2("enable", enable_fields)')
693
663
694 def target_source_are_enabled(self):
664 def target_source_are_enabled(self):
695 response = self.response
665 response = self.response
696 response.mustcontain("var enable_fields = true;")
666 response.mustcontain("var enable_fields = true;")
697
@@ -1,163 +1,167 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from .test_repo_compare import ComparePage
23 from .test_repo_compare import ComparePage
24
24
25
25
26 def route_path(name, params=None, **kwargs):
26 def route_path(name, params=None, **kwargs):
27 import urllib
27 import urllib
28
28
29 base_url = {
29 base_url = {
30 'repo_compare_select': '/{repo_name}/compare',
30 'repo_compare_select': '/{repo_name}/compare',
31 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
31 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
32 }[name].format(**kwargs)
32 }[name].format(**kwargs)
33
33
34 if params:
34 if params:
35 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
35 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
36 return base_url
36 return base_url
37
37
38
38
39 @pytest.mark.usefixtures("autologin_user", "app")
39 @pytest.mark.usefixtures("autologin_user", "app")
40 class TestCompareView(object):
40 class TestCompareView(object):
41
41
42 @pytest.mark.xfail_backends("svn", msg="Depends on branch and tag support")
42 @pytest.mark.xfail_backends("svn", msg="Depends on branch and tag support")
43 def test_compare_tag(self, backend):
43 def test_compare_tag(self, backend):
44 tag1 = 'v0.1.2'
44 tag1 = 'v0.1.2'
45 tag2 = 'v0.1.3'
45 tag2 = 'v0.1.3'
46 response = self.app.get(
46 response = self.app.get(
47 route_path(
47 route_path(
48 'repo_compare',
48 'repo_compare',
49 repo_name=backend.repo_name,
49 repo_name=backend.repo_name,
50 source_ref_type="tag",
50 source_ref_type="tag", source_ref=tag1,
51 source_ref=tag1,
51 target_ref_type="tag", target_ref=tag2),
52 target_ref_type="tag",
53 target_ref=tag2),
54 status=200)
52 status=200)
55
53
56 response.mustcontain('%s@%s' % (backend.repo_name, tag1))
54 response.mustcontain('%s@%s' % (backend.repo_name, tag1))
57 response.mustcontain('%s@%s' % (backend.repo_name, tag2))
55 response.mustcontain('%s@%s' % (backend.repo_name, tag2))
58
56
59 # outgoing commits between tags
57 # outgoing commits between tags
60 commit_indexes = {
58 commit_indexes = {
61 'git': [113] + range(115, 121),
59 'git': [113] + range(115, 121),
62 'hg': [112] + range(115, 121),
60 'hg': [112] + range(115, 121),
63 }
61 }
64 repo = backend.repo
62 repo = backend.repo
65 commits = (repo.get_commit(commit_idx=idx)
63 commits = (repo.get_commit(commit_idx=idx)
66 for idx in commit_indexes[backend.alias])
64 for idx in commit_indexes[backend.alias])
67 compare_page = ComparePage(response)
65 compare_page = ComparePage(response)
68 compare_page.contains_change_summary(11, 94, 64)
66 compare_page.contains_change_summary(11, 94, 64)
69 compare_page.contains_commits(commits)
67 compare_page.contains_commits(commits)
70
68
71 # files diff
69 # files diff
70 short_id = short_id_new = ''
71 if backend.alias == 'git':
72 short_id = '5a3a8fb00555'
73 short_id_new = '0ba5f8a46600'
74 if backend.alias == 'hg':
75 short_id = '17544fbfcd33'
76 short_id_new = 'a7e60bff65d5'
77
72 compare_page.contains_file_links_and_anchors([
78 compare_page.contains_file_links_and_anchors([
73 ('docs/api/utils/index.rst', 'a_c--1c5cf9e91c12'),
79 # modified
74 ('test_and_report.sh', 'a_c--e3305437df55'),
80 ('docs/api/utils/index.rst', 'a_c-{}-1c5cf9e91c12'.format(short_id)),
75 ('.hgignore', 'a_c--c8e92ef85cd1'),
81 ('test_and_report.sh', 'a_c-{}-e3305437df55'.format(short_id)),
76 ('.hgtags', 'a_c--6e08b694d687'),
82 # added
77 ('docs/api/index.rst', 'a_c--2c14b00f3393'),
83 ('.hgignore', 'a_c-{}-c8e92ef85cd1'.format(short_id_new)),
78 ('vcs/__init__.py', 'a_c--430ccbc82bdf'),
84 ('.hgtags', 'a_c-{}-6e08b694d687'.format(short_id_new)),
79 ('vcs/backends/hg.py', 'a_c--9c390eb52cd6'),
85 ('docs/api/index.rst', 'a_c-{}-2c14b00f3393'.format(short_id_new)),
80 ('vcs/utils/__init__.py', 'a_c--ebb592c595c0'),
86 ('vcs/__init__.py', 'a_c-{}-430ccbc82bdf'.format(short_id_new)),
81 ('vcs/utils/annotate.py', 'a_c--7abc741b5052'),
87 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(short_id_new)),
82 ('vcs/utils/diffs.py', 'a_c--2ef0ef106c56'),
88 ('vcs/utils/__init__.py', 'a_c-{}-ebb592c595c0'.format(short_id_new)),
83 ('vcs/utils/lazy.py', 'a_c--3150cb87d4b7'),
89 ('vcs/utils/annotate.py', 'a_c-{}-7abc741b5052'.format(short_id_new)),
90 ('vcs/utils/diffs.py', 'a_c-{}-2ef0ef106c56'.format(short_id_new)),
91 ('vcs/utils/lazy.py', 'a_c-{}-3150cb87d4b7'.format(short_id_new)),
84 ])
92 ])
85
93
86 @pytest.mark.xfail_backends("svn", msg="Depends on branch and tag support")
94 @pytest.mark.xfail_backends("svn", msg="Depends on branch and tag support")
87 def test_compare_tag_branch(self, backend):
95 def test_compare_tag_branch(self, backend):
88 revisions = {
96 revisions = {
89 'hg': {
97 'hg': {
90 'tag': 'v0.2.0',
98 'tag': 'v0.2.0',
91 'branch': 'default',
99 'branch': 'default',
92 'response': (147, 5701, 10177)
100 'response': (147, 5701, 10177)
93 },
101 },
94 'git': {
102 'git': {
95 'tag': 'v0.2.2',
103 'tag': 'v0.2.2',
96 'branch': 'master',
104 'branch': 'master',
97 'response': (71, 2269, 3416)
105 'response': (71, 2269, 3416)
98 },
106 },
99 }
107 }
100
108
101 # Backend specific data, depends on the test repository for
109 # Backend specific data, depends on the test repository for
102 # functional tests.
110 # functional tests.
103 data = revisions[backend.alias]
111 data = revisions[backend.alias]
104
112
105 response = self.app.get(
113 response = self.app.get(
106 route_path(
114 route_path(
107 'repo_compare',
115 'repo_compare',
108 repo_name=backend.repo_name,
116 repo_name=backend.repo_name,
109 source_ref_type='branch',
117 source_ref_type='branch', source_ref=data['branch'],
110 source_ref=data['branch'],
118 target_ref_type="tag", target_ref=data['tag'],
111 target_ref_type="tag",
112 target_ref=data['tag'],
113 ))
119 ))
114
120
115 response.mustcontain('%s@%s' % (backend.repo_name, data['branch']))
121 response.mustcontain('%s@%s' % (backend.repo_name, data['branch']))
116 response.mustcontain('%s@%s' % (backend.repo_name, data['tag']))
122 response.mustcontain('%s@%s' % (backend.repo_name, data['tag']))
117 compare_page = ComparePage(response)
123 compare_page = ComparePage(response)
118 compare_page.contains_change_summary(*data['response'])
124 compare_page.contains_change_summary(*data['response'])
119
125
120 def test_index_branch(self, backend):
126 def test_index_branch(self, backend):
121 head_id = backend.default_head_id
127 head_id = backend.default_head_id
122 response = self.app.get(
128 response = self.app.get(
123 route_path(
129 route_path(
124 'repo_compare',
130 'repo_compare',
125 repo_name=backend.repo_name,
131 repo_name=backend.repo_name,
126 source_ref_type="branch",
132 source_ref_type="branch", source_ref=head_id,
127 source_ref=head_id,
133 target_ref_type="branch", target_ref=head_id,
128 target_ref_type="branch",
129 target_ref=head_id,
130 ))
134 ))
131
135
132 response.mustcontain('%s@%s' % (backend.repo_name, head_id))
136 response.mustcontain('%s@%s' % (backend.repo_name, head_id))
133
137
134 # branches are equal
138 # branches are equal
135 response.mustcontain('No files')
139 response.mustcontain('No files')
136 response.mustcontain('No commits in this compare')
140 response.mustcontain('No commits in this compare')
137
141
138 def test_compare_commits(self, backend):
142 def test_compare_commits(self, backend):
139 repo = backend.repo
143 repo = backend.repo
140 commit1 = repo.get_commit(commit_idx=0)
144 commit1 = repo.get_commit(commit_idx=0)
145 commit1_short_id = commit1.short_id
141 commit2 = repo.get_commit(commit_idx=1)
146 commit2 = repo.get_commit(commit_idx=1)
147 commit2_short_id = commit2.short_id
142
148
143 response = self.app.get(
149 response = self.app.get(
144 route_path(
150 route_path(
145 'repo_compare',
151 'repo_compare',
146 repo_name=backend.repo_name,
152 repo_name=backend.repo_name,
147 source_ref_type="rev",
153 source_ref_type="rev", source_ref=commit1.raw_id,
148 source_ref=commit1.raw_id,
154 target_ref_type="rev", target_ref=commit2.raw_id,
149 target_ref_type="rev",
150 target_ref=commit2.raw_id,
151 ))
155 ))
152 response.mustcontain('%s@%s' % (backend.repo_name, commit1.raw_id))
156 response.mustcontain('%s@%s' % (backend.repo_name, commit1.raw_id))
153 response.mustcontain('%s@%s' % (backend.repo_name, commit2.raw_id))
157 response.mustcontain('%s@%s' % (backend.repo_name, commit2.raw_id))
154 compare_page = ComparePage(response)
158 compare_page = ComparePage(response)
155
159
156 # files
160 # files
157 compare_page.contains_change_summary(1, 7, 0)
161 compare_page.contains_change_summary(1, 7, 0)
158
162
159 # outgoing commits between those commits
163 # outgoing commits between those commits
160 compare_page.contains_commits([commit2])
164 compare_page.contains_commits([commit2])
161 compare_page.contains_file_links_and_anchors([
165 anchor = 'a_c-{}-c8e92ef85cd1'.format(commit2_short_id)
162 ('.hgignore', 'a_c--c8e92ef85cd1'),
166 response.mustcontain(anchor)
163 ])
167 compare_page.contains_file_links_and_anchors([('.hgignore', anchor),])
@@ -1,1228 +1,1233 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 from rhodecode.tests.utils import AssertResponse
35 from rhodecode.tests.utils import AssertResponse
36
36
37
37
38 def route_path(name, params=None, **kwargs):
38 def route_path(name, params=None, **kwargs):
39 import urllib
39 import urllib
40
40
41 base_url = {
41 base_url = {
42 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog': '/{repo_name}/changelog',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 }[name].format(**kwargs)
56 }[name].format(**kwargs)
57
57
58 if params:
58 if params:
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 return base_url
60 return base_url
61
61
62
62
63 @pytest.mark.usefixtures('app', 'autologin_user')
63 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.backends("git", "hg")
64 @pytest.mark.backends("git", "hg")
65 class TestPullrequestsView(object):
65 class TestPullrequestsView(object):
66
66
67 def test_index(self, backend):
67 def test_index(self, backend):
68 self.app.get(route_path(
68 self.app.get(route_path(
69 'pullrequest_new',
69 'pullrequest_new',
70 repo_name=backend.repo_name))
70 repo_name=backend.repo_name))
71
71
72 def test_option_menu_create_pull_request_exists(self, backend):
72 def test_option_menu_create_pull_request_exists(self, backend):
73 repo_name = backend.repo_name
73 repo_name = backend.repo_name
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75
75
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 'pullrequest_new', repo_name=repo_name)
77 'pullrequest_new', repo_name=repo_name)
78 response.mustcontain(create_pr_link)
78 response.mustcontain(create_pr_link)
79
79
80 def test_create_pr_form_with_raw_commit_id(self, backend):
80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 repo = backend.repo
81 repo = backend.repo
82
82
83 self.app.get(
83 self.app.get(
84 route_path('pullrequest_new',
84 route_path('pullrequest_new', repo_name=repo.repo_name,
85 repo_name=repo.repo_name,
85 commit=repo.get_commit().raw_id),
86 commit=repo.get_commit().raw_id),
87 status=200)
86 status=200)
88
87
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
88 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 def test_show(self, pr_util, pr_merge_enabled):
89 @pytest.mark.parametrize('range_diff', ["0", "1"])
90 def test_show(self, pr_util, pr_merge_enabled, range_diff):
91 pull_request = pr_util.create_pull_request(
91 pull_request = pr_util.create_pull_request(
92 mergeable=pr_merge_enabled, enable_notifications=False)
92 mergeable=pr_merge_enabled, enable_notifications=False)
93
93
94 response = self.app.get(route_path(
94 response = self.app.get(route_path(
95 'pullrequest_show',
95 'pullrequest_show',
96 repo_name=pull_request.target_repo.scm_instance().name,
96 repo_name=pull_request.target_repo.scm_instance().name,
97 pull_request_id=pull_request.pull_request_id))
97 pull_request_id=pull_request.pull_request_id,
98 params={'range-diff': range_diff}))
98
99
99 for commit_id in pull_request.revisions:
100 for commit_id in pull_request.revisions:
100 response.mustcontain(commit_id)
101 response.mustcontain(commit_id)
101
102
102 assert pull_request.target_ref_parts.type in response
103 assert pull_request.target_ref_parts.type in response
103 assert pull_request.target_ref_parts.name in response
104 assert pull_request.target_ref_parts.name in response
104 target_clone_url = pull_request.target_repo.clone_url()
105 target_clone_url = pull_request.target_repo.clone_url()
105 assert target_clone_url in response
106 assert target_clone_url in response
106
107
107 assert 'class="pull-request-merge"' in response
108 assert 'class="pull-request-merge"' in response
108 assert (
109 if pr_merge_enabled:
109 'Server-side pull request merging is disabled.'
110 response.mustcontain('Pull request reviewer approval is pending')
110 in response) != pr_merge_enabled
111 else:
112 response.mustcontain('Server-side pull request merging is disabled.')
113
114 if range_diff == "1":
115 response.mustcontain('Turn off: Show the diff as commit range')
111
116
112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
117 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 # Logout
118 # Logout
114 response = self.app.post(
119 response = self.app.post(
115 h.route_path('logout'),
120 h.route_path('logout'),
116 params={'csrf_token': csrf_token})
121 params={'csrf_token': csrf_token})
117 # Login as regular user
122 # Login as regular user
118 response = self.app.post(h.route_path('login'),
123 response = self.app.post(h.route_path('login'),
119 {'username': TEST_USER_REGULAR_LOGIN,
124 {'username': TEST_USER_REGULAR_LOGIN,
120 'password': 'test12'})
125 'password': 'test12'})
121
126
122 pull_request = pr_util.create_pull_request(
127 pull_request = pr_util.create_pull_request(
123 author=TEST_USER_REGULAR_LOGIN)
128 author=TEST_USER_REGULAR_LOGIN)
124
129
125 response = self.app.get(route_path(
130 response = self.app.get(route_path(
126 'pullrequest_show',
131 'pullrequest_show',
127 repo_name=pull_request.target_repo.scm_instance().name,
132 repo_name=pull_request.target_repo.scm_instance().name,
128 pull_request_id=pull_request.pull_request_id))
133 pull_request_id=pull_request.pull_request_id))
129
134
130 response.mustcontain('Server-side pull request merging is disabled.')
135 response.mustcontain('Server-side pull request merging is disabled.')
131
136
132 assert_response = response.assert_response()
137 assert_response = response.assert_response()
133 # for regular user without a merge permissions, we don't see it
138 # for regular user without a merge permissions, we don't see it
134 assert_response.no_element_exists('#close-pull-request-action')
139 assert_response.no_element_exists('#close-pull-request-action')
135
140
136 user_util.grant_user_permission_to_repo(
141 user_util.grant_user_permission_to_repo(
137 pull_request.target_repo,
142 pull_request.target_repo,
138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
143 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 'repository.write')
144 'repository.write')
140 response = self.app.get(route_path(
145 response = self.app.get(route_path(
141 'pullrequest_show',
146 'pullrequest_show',
142 repo_name=pull_request.target_repo.scm_instance().name,
147 repo_name=pull_request.target_repo.scm_instance().name,
143 pull_request_id=pull_request.pull_request_id))
148 pull_request_id=pull_request.pull_request_id))
144
149
145 response.mustcontain('Server-side pull request merging is disabled.')
150 response.mustcontain('Server-side pull request merging is disabled.')
146
151
147 assert_response = response.assert_response()
152 assert_response = response.assert_response()
148 # now regular user has a merge permissions, we have CLOSE button
153 # now regular user has a merge permissions, we have CLOSE button
149 assert_response.one_element_exists('#close-pull-request-action')
154 assert_response.one_element_exists('#close-pull-request-action')
150
155
151 def test_show_invalid_commit_id(self, pr_util):
156 def test_show_invalid_commit_id(self, pr_util):
152 # Simulating invalid revisions which will cause a lookup error
157 # Simulating invalid revisions which will cause a lookup error
153 pull_request = pr_util.create_pull_request()
158 pull_request = pr_util.create_pull_request()
154 pull_request.revisions = ['invalid']
159 pull_request.revisions = ['invalid']
155 Session().add(pull_request)
160 Session().add(pull_request)
156 Session().commit()
161 Session().commit()
157
162
158 response = self.app.get(route_path(
163 response = self.app.get(route_path(
159 'pullrequest_show',
164 'pullrequest_show',
160 repo_name=pull_request.target_repo.scm_instance().name,
165 repo_name=pull_request.target_repo.scm_instance().name,
161 pull_request_id=pull_request.pull_request_id))
166 pull_request_id=pull_request.pull_request_id))
162
167
163 for commit_id in pull_request.revisions:
168 for commit_id in pull_request.revisions:
164 response.mustcontain(commit_id)
169 response.mustcontain(commit_id)
165
170
166 def test_show_invalid_source_reference(self, pr_util):
171 def test_show_invalid_source_reference(self, pr_util):
167 pull_request = pr_util.create_pull_request()
172 pull_request = pr_util.create_pull_request()
168 pull_request.source_ref = 'branch:b:invalid'
173 pull_request.source_ref = 'branch:b:invalid'
169 Session().add(pull_request)
174 Session().add(pull_request)
170 Session().commit()
175 Session().commit()
171
176
172 self.app.get(route_path(
177 self.app.get(route_path(
173 'pullrequest_show',
178 'pullrequest_show',
174 repo_name=pull_request.target_repo.scm_instance().name,
179 repo_name=pull_request.target_repo.scm_instance().name,
175 pull_request_id=pull_request.pull_request_id))
180 pull_request_id=pull_request.pull_request_id))
176
181
177 def test_edit_title_description(self, pr_util, csrf_token):
182 def test_edit_title_description(self, pr_util, csrf_token):
178 pull_request = pr_util.create_pull_request()
183 pull_request = pr_util.create_pull_request()
179 pull_request_id = pull_request.pull_request_id
184 pull_request_id = pull_request.pull_request_id
180
185
181 response = self.app.post(
186 response = self.app.post(
182 route_path('pullrequest_update',
187 route_path('pullrequest_update',
183 repo_name=pull_request.target_repo.repo_name,
188 repo_name=pull_request.target_repo.repo_name,
184 pull_request_id=pull_request_id),
189 pull_request_id=pull_request_id),
185 params={
190 params={
186 'edit_pull_request': 'true',
191 'edit_pull_request': 'true',
187 'title': 'New title',
192 'title': 'New title',
188 'description': 'New description',
193 'description': 'New description',
189 'csrf_token': csrf_token})
194 'csrf_token': csrf_token})
190
195
191 assert_session_flash(
196 assert_session_flash(
192 response, u'Pull request title & description updated.',
197 response, u'Pull request title & description updated.',
193 category='success')
198 category='success')
194
199
195 pull_request = PullRequest.get(pull_request_id)
200 pull_request = PullRequest.get(pull_request_id)
196 assert pull_request.title == 'New title'
201 assert pull_request.title == 'New title'
197 assert pull_request.description == 'New description'
202 assert pull_request.description == 'New description'
198
203
199 def test_edit_title_description_closed(self, pr_util, csrf_token):
204 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 pull_request = pr_util.create_pull_request()
205 pull_request = pr_util.create_pull_request()
201 pull_request_id = pull_request.pull_request_id
206 pull_request_id = pull_request.pull_request_id
202 repo_name = pull_request.target_repo.repo_name
207 repo_name = pull_request.target_repo.repo_name
203 pr_util.close()
208 pr_util.close()
204
209
205 response = self.app.post(
210 response = self.app.post(
206 route_path('pullrequest_update',
211 route_path('pullrequest_update',
207 repo_name=repo_name, pull_request_id=pull_request_id),
212 repo_name=repo_name, pull_request_id=pull_request_id),
208 params={
213 params={
209 'edit_pull_request': 'true',
214 'edit_pull_request': 'true',
210 'title': 'New title',
215 'title': 'New title',
211 'description': 'New description',
216 'description': 'New description',
212 'csrf_token': csrf_token}, status=200)
217 'csrf_token': csrf_token}, status=200)
213 assert_session_flash(
218 assert_session_flash(
214 response, u'Cannot update closed pull requests.',
219 response, u'Cannot update closed pull requests.',
215 category='error')
220 category='error')
216
221
217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
222 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
223 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219
224
220 pull_request = pr_util.create_pull_request()
225 pull_request = pr_util.create_pull_request()
221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
226 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 Session().add(pull_request)
227 Session().add(pull_request)
223 Session().commit()
228 Session().commit()
224
229
225 pull_request_id = pull_request.pull_request_id
230 pull_request_id = pull_request.pull_request_id
226
231
227 response = self.app.post(
232 response = self.app.post(
228 route_path('pullrequest_update',
233 route_path('pullrequest_update',
229 repo_name=pull_request.target_repo.repo_name,
234 repo_name=pull_request.target_repo.repo_name,
230 pull_request_id=pull_request_id),
235 pull_request_id=pull_request_id),
231 params={'update_commits': 'true',
236 params={'update_commits': 'true',
232 'csrf_token': csrf_token})
237 'csrf_token': csrf_token})
233
238
234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 UpdateFailureReason.MISSING_SOURCE_REF])
240 UpdateFailureReason.MISSING_SOURCE_REF])
236 assert_session_flash(response, expected_msg, category='error')
241 assert_session_flash(response, expected_msg, category='error')
237
242
238 def test_missing_target_reference(self, pr_util, csrf_token):
243 def test_missing_target_reference(self, pr_util, csrf_token):
239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 pull_request = pr_util.create_pull_request(
245 pull_request = pr_util.create_pull_request(
241 approved=True, mergeable=True)
246 approved=True, mergeable=True)
242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
247 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 Session().add(pull_request)
248 Session().add(pull_request)
244 Session().commit()
249 Session().commit()
245
250
246 pull_request_id = pull_request.pull_request_id
251 pull_request_id = pull_request.pull_request_id
247 pull_request_url = route_path(
252 pull_request_url = route_path(
248 'pullrequest_show',
253 'pullrequest_show',
249 repo_name=pull_request.target_repo.repo_name,
254 repo_name=pull_request.target_repo.repo_name,
250 pull_request_id=pull_request_id)
255 pull_request_id=pull_request_id)
251
256
252 response = self.app.get(pull_request_url)
257 response = self.app.get(pull_request_url)
253
258
254 assertr = AssertResponse(response)
259 assertr = AssertResponse(response)
255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
260 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 MergeFailureReason.MISSING_TARGET_REF]
261 MergeFailureReason.MISSING_TARGET_REF]
257 assertr.element_contains(
262 assertr.element_contains(
258 'span[data-role="merge-message"]', str(expected_msg))
263 'span[data-role="merge-message"]', str(expected_msg))
259
264
260 def test_comment_and_close_pull_request_custom_message_approved(
265 def test_comment_and_close_pull_request_custom_message_approved(
261 self, pr_util, csrf_token, xhr_header):
266 self, pr_util, csrf_token, xhr_header):
262
267
263 pull_request = pr_util.create_pull_request(approved=True)
268 pull_request = pr_util.create_pull_request(approved=True)
264 pull_request_id = pull_request.pull_request_id
269 pull_request_id = pull_request.pull_request_id
265 author = pull_request.user_id
270 author = pull_request.user_id
266 repo = pull_request.target_repo.repo_id
271 repo = pull_request.target_repo.repo_id
267
272
268 self.app.post(
273 self.app.post(
269 route_path('pullrequest_comment_create',
274 route_path('pullrequest_comment_create',
270 repo_name=pull_request.target_repo.scm_instance().name,
275 repo_name=pull_request.target_repo.scm_instance().name,
271 pull_request_id=pull_request_id),
276 pull_request_id=pull_request_id),
272 params={
277 params={
273 'close_pull_request': '1',
278 'close_pull_request': '1',
274 'text': 'Closing a PR',
279 'text': 'Closing a PR',
275 'csrf_token': csrf_token},
280 'csrf_token': csrf_token},
276 extra_environ=xhr_header,)
281 extra_environ=xhr_header,)
277
282
278 journal = UserLog.query()\
283 journal = UserLog.query()\
279 .filter(UserLog.user_id == author)\
284 .filter(UserLog.user_id == author)\
280 .filter(UserLog.repository_id == repo) \
285 .filter(UserLog.repository_id == repo) \
281 .order_by('user_log_id') \
286 .order_by('user_log_id') \
282 .all()
287 .all()
283 assert journal[-1].action == 'repo.pull_request.close'
288 assert journal[-1].action == 'repo.pull_request.close'
284
289
285 pull_request = PullRequest.get(pull_request_id)
290 pull_request = PullRequest.get(pull_request_id)
286 assert pull_request.is_closed()
291 assert pull_request.is_closed()
287
292
288 status = ChangesetStatusModel().get_status(
293 status = ChangesetStatusModel().get_status(
289 pull_request.source_repo, pull_request=pull_request)
294 pull_request.source_repo, pull_request=pull_request)
290 assert status == ChangesetStatus.STATUS_APPROVED
295 assert status == ChangesetStatus.STATUS_APPROVED
291 comments = ChangesetComment().query() \
296 comments = ChangesetComment().query() \
292 .filter(ChangesetComment.pull_request == pull_request) \
297 .filter(ChangesetComment.pull_request == pull_request) \
293 .order_by(ChangesetComment.comment_id.asc())\
298 .order_by(ChangesetComment.comment_id.asc())\
294 .all()
299 .all()
295 assert comments[-1].text == 'Closing a PR'
300 assert comments[-1].text == 'Closing a PR'
296
301
297 def test_comment_force_close_pull_request_rejected(
302 def test_comment_force_close_pull_request_rejected(
298 self, pr_util, csrf_token, xhr_header):
303 self, pr_util, csrf_token, xhr_header):
299 pull_request = pr_util.create_pull_request()
304 pull_request = pr_util.create_pull_request()
300 pull_request_id = pull_request.pull_request_id
305 pull_request_id = pull_request.pull_request_id
301 PullRequestModel().update_reviewers(
306 PullRequestModel().update_reviewers(
302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
307 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 pull_request.author)
308 pull_request.author)
304 author = pull_request.user_id
309 author = pull_request.user_id
305 repo = pull_request.target_repo.repo_id
310 repo = pull_request.target_repo.repo_id
306
311
307 self.app.post(
312 self.app.post(
308 route_path('pullrequest_comment_create',
313 route_path('pullrequest_comment_create',
309 repo_name=pull_request.target_repo.scm_instance().name,
314 repo_name=pull_request.target_repo.scm_instance().name,
310 pull_request_id=pull_request_id),
315 pull_request_id=pull_request_id),
311 params={
316 params={
312 'close_pull_request': '1',
317 'close_pull_request': '1',
313 'csrf_token': csrf_token},
318 'csrf_token': csrf_token},
314 extra_environ=xhr_header)
319 extra_environ=xhr_header)
315
320
316 pull_request = PullRequest.get(pull_request_id)
321 pull_request = PullRequest.get(pull_request_id)
317
322
318 journal = UserLog.query()\
323 journal = UserLog.query()\
319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
324 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 .order_by('user_log_id') \
325 .order_by('user_log_id') \
321 .all()
326 .all()
322 assert journal[-1].action == 'repo.pull_request.close'
327 assert journal[-1].action == 'repo.pull_request.close'
323
328
324 # check only the latest status, not the review status
329 # check only the latest status, not the review status
325 status = ChangesetStatusModel().get_status(
330 status = ChangesetStatusModel().get_status(
326 pull_request.source_repo, pull_request=pull_request)
331 pull_request.source_repo, pull_request=pull_request)
327 assert status == ChangesetStatus.STATUS_REJECTED
332 assert status == ChangesetStatus.STATUS_REJECTED
328
333
329 def test_comment_and_close_pull_request(
334 def test_comment_and_close_pull_request(
330 self, pr_util, csrf_token, xhr_header):
335 self, pr_util, csrf_token, xhr_header):
331 pull_request = pr_util.create_pull_request()
336 pull_request = pr_util.create_pull_request()
332 pull_request_id = pull_request.pull_request_id
337 pull_request_id = pull_request.pull_request_id
333
338
334 response = self.app.post(
339 response = self.app.post(
335 route_path('pullrequest_comment_create',
340 route_path('pullrequest_comment_create',
336 repo_name=pull_request.target_repo.scm_instance().name,
341 repo_name=pull_request.target_repo.scm_instance().name,
337 pull_request_id=pull_request.pull_request_id),
342 pull_request_id=pull_request.pull_request_id),
338 params={
343 params={
339 'close_pull_request': 'true',
344 'close_pull_request': 'true',
340 'csrf_token': csrf_token},
345 'csrf_token': csrf_token},
341 extra_environ=xhr_header)
346 extra_environ=xhr_header)
342
347
343 assert response.json
348 assert response.json
344
349
345 pull_request = PullRequest.get(pull_request_id)
350 pull_request = PullRequest.get(pull_request_id)
346 assert pull_request.is_closed()
351 assert pull_request.is_closed()
347
352
348 # check only the latest status, not the review status
353 # check only the latest status, not the review status
349 status = ChangesetStatusModel().get_status(
354 status = ChangesetStatusModel().get_status(
350 pull_request.source_repo, pull_request=pull_request)
355 pull_request.source_repo, pull_request=pull_request)
351 assert status == ChangesetStatus.STATUS_REJECTED
356 assert status == ChangesetStatus.STATUS_REJECTED
352
357
353 def test_create_pull_request(self, backend, csrf_token):
358 def test_create_pull_request(self, backend, csrf_token):
354 commits = [
359 commits = [
355 {'message': 'ancestor'},
360 {'message': 'ancestor'},
356 {'message': 'change'},
361 {'message': 'change'},
357 {'message': 'change2'},
362 {'message': 'change2'},
358 ]
363 ]
359 commit_ids = backend.create_master_repo(commits)
364 commit_ids = backend.create_master_repo(commits)
360 target = backend.create_repo(heads=['ancestor'])
365 target = backend.create_repo(heads=['ancestor'])
361 source = backend.create_repo(heads=['change2'])
366 source = backend.create_repo(heads=['change2'])
362
367
363 response = self.app.post(
368 response = self.app.post(
364 route_path('pullrequest_create', repo_name=source.repo_name),
369 route_path('pullrequest_create', repo_name=source.repo_name),
365 [
370 [
366 ('source_repo', source.repo_name),
371 ('source_repo', source.repo_name),
367 ('source_ref', 'branch:default:' + commit_ids['change2']),
372 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 ('target_repo', target.repo_name),
373 ('target_repo', target.repo_name),
369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
374 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 ('common_ancestor', commit_ids['ancestor']),
375 ('common_ancestor', commit_ids['ancestor']),
371 ('pullrequest_title', 'Title'),
376 ('pullrequest_title', 'Title'),
372 ('pullrequest_desc', 'Description'),
377 ('pullrequest_desc', 'Description'),
373 ('description_renderer', 'markdown'),
378 ('description_renderer', 'markdown'),
374 ('__start__', 'review_members:sequence'),
379 ('__start__', 'review_members:sequence'),
375 ('__start__', 'reviewer:mapping'),
380 ('__start__', 'reviewer:mapping'),
376 ('user_id', '1'),
381 ('user_id', '1'),
377 ('__start__', 'reasons:sequence'),
382 ('__start__', 'reasons:sequence'),
378 ('reason', 'Some reason'),
383 ('reason', 'Some reason'),
379 ('__end__', 'reasons:sequence'),
384 ('__end__', 'reasons:sequence'),
380 ('__start__', 'rules:sequence'),
385 ('__start__', 'rules:sequence'),
381 ('__end__', 'rules:sequence'),
386 ('__end__', 'rules:sequence'),
382 ('mandatory', 'False'),
387 ('mandatory', 'False'),
383 ('__end__', 'reviewer:mapping'),
388 ('__end__', 'reviewer:mapping'),
384 ('__end__', 'review_members:sequence'),
389 ('__end__', 'review_members:sequence'),
385 ('__start__', 'revisions:sequence'),
390 ('__start__', 'revisions:sequence'),
386 ('revisions', commit_ids['change']),
391 ('revisions', commit_ids['change']),
387 ('revisions', commit_ids['change2']),
392 ('revisions', commit_ids['change2']),
388 ('__end__', 'revisions:sequence'),
393 ('__end__', 'revisions:sequence'),
389 ('user', ''),
394 ('user', ''),
390 ('csrf_token', csrf_token),
395 ('csrf_token', csrf_token),
391 ],
396 ],
392 status=302)
397 status=302)
393
398
394 location = response.headers['Location']
399 location = response.headers['Location']
395 pull_request_id = location.rsplit('/', 1)[1]
400 pull_request_id = location.rsplit('/', 1)[1]
396 assert pull_request_id != 'new'
401 assert pull_request_id != 'new'
397 pull_request = PullRequest.get(int(pull_request_id))
402 pull_request = PullRequest.get(int(pull_request_id))
398
403
399 # check that we have now both revisions
404 # check that we have now both revisions
400 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
405 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
401 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
406 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
402 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
407 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
403 assert pull_request.target_ref == expected_target_ref
408 assert pull_request.target_ref == expected_target_ref
404
409
405 def test_reviewer_notifications(self, backend, csrf_token):
410 def test_reviewer_notifications(self, backend, csrf_token):
406 # We have to use the app.post for this test so it will create the
411 # We have to use the app.post for this test so it will create the
407 # notifications properly with the new PR
412 # notifications properly with the new PR
408 commits = [
413 commits = [
409 {'message': 'ancestor',
414 {'message': 'ancestor',
410 'added': [FileNode('file_A', content='content_of_ancestor')]},
415 'added': [FileNode('file_A', content='content_of_ancestor')]},
411 {'message': 'change',
416 {'message': 'change',
412 'added': [FileNode('file_a', content='content_of_change')]},
417 'added': [FileNode('file_a', content='content_of_change')]},
413 {'message': 'change-child'},
418 {'message': 'change-child'},
414 {'message': 'ancestor-child', 'parents': ['ancestor'],
419 {'message': 'ancestor-child', 'parents': ['ancestor'],
415 'added': [
420 'added': [
416 FileNode('file_B', content='content_of_ancestor_child')]},
421 FileNode('file_B', content='content_of_ancestor_child')]},
417 {'message': 'ancestor-child-2'},
422 {'message': 'ancestor-child-2'},
418 ]
423 ]
419 commit_ids = backend.create_master_repo(commits)
424 commit_ids = backend.create_master_repo(commits)
420 target = backend.create_repo(heads=['ancestor-child'])
425 target = backend.create_repo(heads=['ancestor-child'])
421 source = backend.create_repo(heads=['change'])
426 source = backend.create_repo(heads=['change'])
422
427
423 response = self.app.post(
428 response = self.app.post(
424 route_path('pullrequest_create', repo_name=source.repo_name),
429 route_path('pullrequest_create', repo_name=source.repo_name),
425 [
430 [
426 ('source_repo', source.repo_name),
431 ('source_repo', source.repo_name),
427 ('source_ref', 'branch:default:' + commit_ids['change']),
432 ('source_ref', 'branch:default:' + commit_ids['change']),
428 ('target_repo', target.repo_name),
433 ('target_repo', target.repo_name),
429 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
434 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
430 ('common_ancestor', commit_ids['ancestor']),
435 ('common_ancestor', commit_ids['ancestor']),
431 ('pullrequest_title', 'Title'),
436 ('pullrequest_title', 'Title'),
432 ('pullrequest_desc', 'Description'),
437 ('pullrequest_desc', 'Description'),
433 ('description_renderer', 'markdown'),
438 ('description_renderer', 'markdown'),
434 ('__start__', 'review_members:sequence'),
439 ('__start__', 'review_members:sequence'),
435 ('__start__', 'reviewer:mapping'),
440 ('__start__', 'reviewer:mapping'),
436 ('user_id', '2'),
441 ('user_id', '2'),
437 ('__start__', 'reasons:sequence'),
442 ('__start__', 'reasons:sequence'),
438 ('reason', 'Some reason'),
443 ('reason', 'Some reason'),
439 ('__end__', 'reasons:sequence'),
444 ('__end__', 'reasons:sequence'),
440 ('__start__', 'rules:sequence'),
445 ('__start__', 'rules:sequence'),
441 ('__end__', 'rules:sequence'),
446 ('__end__', 'rules:sequence'),
442 ('mandatory', 'False'),
447 ('mandatory', 'False'),
443 ('__end__', 'reviewer:mapping'),
448 ('__end__', 'reviewer:mapping'),
444 ('__end__', 'review_members:sequence'),
449 ('__end__', 'review_members:sequence'),
445 ('__start__', 'revisions:sequence'),
450 ('__start__', 'revisions:sequence'),
446 ('revisions', commit_ids['change']),
451 ('revisions', commit_ids['change']),
447 ('__end__', 'revisions:sequence'),
452 ('__end__', 'revisions:sequence'),
448 ('user', ''),
453 ('user', ''),
449 ('csrf_token', csrf_token),
454 ('csrf_token', csrf_token),
450 ],
455 ],
451 status=302)
456 status=302)
452
457
453 location = response.headers['Location']
458 location = response.headers['Location']
454
459
455 pull_request_id = location.rsplit('/', 1)[1]
460 pull_request_id = location.rsplit('/', 1)[1]
456 assert pull_request_id != 'new'
461 assert pull_request_id != 'new'
457 pull_request = PullRequest.get(int(pull_request_id))
462 pull_request = PullRequest.get(int(pull_request_id))
458
463
459 # Check that a notification was made
464 # Check that a notification was made
460 notifications = Notification.query()\
465 notifications = Notification.query()\
461 .filter(Notification.created_by == pull_request.author.user_id,
466 .filter(Notification.created_by == pull_request.author.user_id,
462 Notification.type_ == Notification.TYPE_PULL_REQUEST,
467 Notification.type_ == Notification.TYPE_PULL_REQUEST,
463 Notification.subject.contains(
468 Notification.subject.contains(
464 "wants you to review pull request #%s" % pull_request_id))
469 "wants you to review pull request #%s" % pull_request_id))
465 assert len(notifications.all()) == 1
470 assert len(notifications.all()) == 1
466
471
467 # Change reviewers and check that a notification was made
472 # Change reviewers and check that a notification was made
468 PullRequestModel().update_reviewers(
473 PullRequestModel().update_reviewers(
469 pull_request.pull_request_id, [(1, [], False, [])],
474 pull_request.pull_request_id, [(1, [], False, [])],
470 pull_request.author)
475 pull_request.author)
471 assert len(notifications.all()) == 2
476 assert len(notifications.all()) == 2
472
477
473 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
478 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
474 csrf_token):
479 csrf_token):
475 commits = [
480 commits = [
476 {'message': 'ancestor',
481 {'message': 'ancestor',
477 'added': [FileNode('file_A', content='content_of_ancestor')]},
482 'added': [FileNode('file_A', content='content_of_ancestor')]},
478 {'message': 'change',
483 {'message': 'change',
479 'added': [FileNode('file_a', content='content_of_change')]},
484 'added': [FileNode('file_a', content='content_of_change')]},
480 {'message': 'change-child'},
485 {'message': 'change-child'},
481 {'message': 'ancestor-child', 'parents': ['ancestor'],
486 {'message': 'ancestor-child', 'parents': ['ancestor'],
482 'added': [
487 'added': [
483 FileNode('file_B', content='content_of_ancestor_child')]},
488 FileNode('file_B', content='content_of_ancestor_child')]},
484 {'message': 'ancestor-child-2'},
489 {'message': 'ancestor-child-2'},
485 ]
490 ]
486 commit_ids = backend.create_master_repo(commits)
491 commit_ids = backend.create_master_repo(commits)
487 target = backend.create_repo(heads=['ancestor-child'])
492 target = backend.create_repo(heads=['ancestor-child'])
488 source = backend.create_repo(heads=['change'])
493 source = backend.create_repo(heads=['change'])
489
494
490 response = self.app.post(
495 response = self.app.post(
491 route_path('pullrequest_create', repo_name=source.repo_name),
496 route_path('pullrequest_create', repo_name=source.repo_name),
492 [
497 [
493 ('source_repo', source.repo_name),
498 ('source_repo', source.repo_name),
494 ('source_ref', 'branch:default:' + commit_ids['change']),
499 ('source_ref', 'branch:default:' + commit_ids['change']),
495 ('target_repo', target.repo_name),
500 ('target_repo', target.repo_name),
496 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
501 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
497 ('common_ancestor', commit_ids['ancestor']),
502 ('common_ancestor', commit_ids['ancestor']),
498 ('pullrequest_title', 'Title'),
503 ('pullrequest_title', 'Title'),
499 ('pullrequest_desc', 'Description'),
504 ('pullrequest_desc', 'Description'),
500 ('description_renderer', 'markdown'),
505 ('description_renderer', 'markdown'),
501 ('__start__', 'review_members:sequence'),
506 ('__start__', 'review_members:sequence'),
502 ('__start__', 'reviewer:mapping'),
507 ('__start__', 'reviewer:mapping'),
503 ('user_id', '1'),
508 ('user_id', '1'),
504 ('__start__', 'reasons:sequence'),
509 ('__start__', 'reasons:sequence'),
505 ('reason', 'Some reason'),
510 ('reason', 'Some reason'),
506 ('__end__', 'reasons:sequence'),
511 ('__end__', 'reasons:sequence'),
507 ('__start__', 'rules:sequence'),
512 ('__start__', 'rules:sequence'),
508 ('__end__', 'rules:sequence'),
513 ('__end__', 'rules:sequence'),
509 ('mandatory', 'False'),
514 ('mandatory', 'False'),
510 ('__end__', 'reviewer:mapping'),
515 ('__end__', 'reviewer:mapping'),
511 ('__end__', 'review_members:sequence'),
516 ('__end__', 'review_members:sequence'),
512 ('__start__', 'revisions:sequence'),
517 ('__start__', 'revisions:sequence'),
513 ('revisions', commit_ids['change']),
518 ('revisions', commit_ids['change']),
514 ('__end__', 'revisions:sequence'),
519 ('__end__', 'revisions:sequence'),
515 ('user', ''),
520 ('user', ''),
516 ('csrf_token', csrf_token),
521 ('csrf_token', csrf_token),
517 ],
522 ],
518 status=302)
523 status=302)
519
524
520 location = response.headers['Location']
525 location = response.headers['Location']
521
526
522 pull_request_id = location.rsplit('/', 1)[1]
527 pull_request_id = location.rsplit('/', 1)[1]
523 assert pull_request_id != 'new'
528 assert pull_request_id != 'new'
524 pull_request = PullRequest.get(int(pull_request_id))
529 pull_request = PullRequest.get(int(pull_request_id))
525
530
526 # target_ref has to point to the ancestor's commit_id in order to
531 # target_ref has to point to the ancestor's commit_id in order to
527 # show the correct diff
532 # show the correct diff
528 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
533 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
529 assert pull_request.target_ref == expected_target_ref
534 assert pull_request.target_ref == expected_target_ref
530
535
531 # Check generated diff contents
536 # Check generated diff contents
532 response = response.follow()
537 response = response.follow()
533 assert 'content_of_ancestor' not in response.body
538 assert 'content_of_ancestor' not in response.body
534 assert 'content_of_ancestor-child' not in response.body
539 assert 'content_of_ancestor-child' not in response.body
535 assert 'content_of_change' in response.body
540 assert 'content_of_change' in response.body
536
541
537 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
542 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
538 # Clear any previous calls to rcextensions
543 # Clear any previous calls to rcextensions
539 rhodecode.EXTENSIONS.calls.clear()
544 rhodecode.EXTENSIONS.calls.clear()
540
545
541 pull_request = pr_util.create_pull_request(
546 pull_request = pr_util.create_pull_request(
542 approved=True, mergeable=True)
547 approved=True, mergeable=True)
543 pull_request_id = pull_request.pull_request_id
548 pull_request_id = pull_request.pull_request_id
544 repo_name = pull_request.target_repo.scm_instance().name,
549 repo_name = pull_request.target_repo.scm_instance().name,
545
550
546 response = self.app.post(
551 response = self.app.post(
547 route_path('pullrequest_merge',
552 route_path('pullrequest_merge',
548 repo_name=str(repo_name[0]),
553 repo_name=str(repo_name[0]),
549 pull_request_id=pull_request_id),
554 pull_request_id=pull_request_id),
550 params={'csrf_token': csrf_token}).follow()
555 params={'csrf_token': csrf_token}).follow()
551
556
552 pull_request = PullRequest.get(pull_request_id)
557 pull_request = PullRequest.get(pull_request_id)
553
558
554 assert response.status_int == 200
559 assert response.status_int == 200
555 assert pull_request.is_closed()
560 assert pull_request.is_closed()
556 assert_pull_request_status(
561 assert_pull_request_status(
557 pull_request, ChangesetStatus.STATUS_APPROVED)
562 pull_request, ChangesetStatus.STATUS_APPROVED)
558
563
559 # Check the relevant log entries were added
564 # Check the relevant log entries were added
560 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
565 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
561 actions = [log.action for log in user_logs]
566 actions = [log.action for log in user_logs]
562 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
567 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
563 expected_actions = [
568 expected_actions = [
564 u'repo.pull_request.close',
569 u'repo.pull_request.close',
565 u'repo.pull_request.merge',
570 u'repo.pull_request.merge',
566 u'repo.pull_request.comment.create'
571 u'repo.pull_request.comment.create'
567 ]
572 ]
568 assert actions == expected_actions
573 assert actions == expected_actions
569
574
570 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
575 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
571 actions = [log for log in user_logs]
576 actions = [log for log in user_logs]
572 assert actions[-1].action == 'user.push'
577 assert actions[-1].action == 'user.push'
573 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
578 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
574
579
575 # Check post_push rcextension was really executed
580 # Check post_push rcextension was really executed
576 push_calls = rhodecode.EXTENSIONS.calls['post_push']
581 push_calls = rhodecode.EXTENSIONS.calls['post_push']
577 assert len(push_calls) == 1
582 assert len(push_calls) == 1
578 unused_last_call_args, last_call_kwargs = push_calls[0]
583 unused_last_call_args, last_call_kwargs = push_calls[0]
579 assert last_call_kwargs['action'] == 'push'
584 assert last_call_kwargs['action'] == 'push'
580 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
585 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
581
586
582 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
587 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
583 pull_request = pr_util.create_pull_request(mergeable=False)
588 pull_request = pr_util.create_pull_request(mergeable=False)
584 pull_request_id = pull_request.pull_request_id
589 pull_request_id = pull_request.pull_request_id
585 pull_request = PullRequest.get(pull_request_id)
590 pull_request = PullRequest.get(pull_request_id)
586
591
587 response = self.app.post(
592 response = self.app.post(
588 route_path('pullrequest_merge',
593 route_path('pullrequest_merge',
589 repo_name=pull_request.target_repo.scm_instance().name,
594 repo_name=pull_request.target_repo.scm_instance().name,
590 pull_request_id=pull_request.pull_request_id),
595 pull_request_id=pull_request.pull_request_id),
591 params={'csrf_token': csrf_token}).follow()
596 params={'csrf_token': csrf_token}).follow()
592
597
593 assert response.status_int == 200
598 assert response.status_int == 200
594 response.mustcontain(
599 response.mustcontain(
595 'Merge is not currently possible because of below failed checks.')
600 'Merge is not currently possible because of below failed checks.')
596 response.mustcontain('Server-side pull request merging is disabled.')
601 response.mustcontain('Server-side pull request merging is disabled.')
597
602
598 @pytest.mark.skip_backends('svn')
603 @pytest.mark.skip_backends('svn')
599 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
604 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
600 pull_request = pr_util.create_pull_request(mergeable=True)
605 pull_request = pr_util.create_pull_request(mergeable=True)
601 pull_request_id = pull_request.pull_request_id
606 pull_request_id = pull_request.pull_request_id
602 repo_name = pull_request.target_repo.scm_instance().name
607 repo_name = pull_request.target_repo.scm_instance().name
603
608
604 response = self.app.post(
609 response = self.app.post(
605 route_path('pullrequest_merge',
610 route_path('pullrequest_merge',
606 repo_name=repo_name,
611 repo_name=repo_name,
607 pull_request_id=pull_request_id),
612 pull_request_id=pull_request_id),
608 params={'csrf_token': csrf_token}).follow()
613 params={'csrf_token': csrf_token}).follow()
609
614
610 assert response.status_int == 200
615 assert response.status_int == 200
611
616
612 response.mustcontain(
617 response.mustcontain(
613 'Merge is not currently possible because of below failed checks.')
618 'Merge is not currently possible because of below failed checks.')
614 response.mustcontain('Pull request reviewer approval is pending.')
619 response.mustcontain('Pull request reviewer approval is pending.')
615
620
616 def test_merge_pull_request_renders_failure_reason(
621 def test_merge_pull_request_renders_failure_reason(
617 self, user_regular, csrf_token, pr_util):
622 self, user_regular, csrf_token, pr_util):
618 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
619 pull_request_id = pull_request.pull_request_id
624 pull_request_id = pull_request.pull_request_id
620 repo_name = pull_request.target_repo.scm_instance().name
625 repo_name = pull_request.target_repo.scm_instance().name
621
626
622 model_patcher = mock.patch.multiple(
627 model_patcher = mock.patch.multiple(
623 PullRequestModel,
628 PullRequestModel,
624 merge_repo=mock.Mock(return_value=MergeResponse(
629 merge_repo=mock.Mock(return_value=MergeResponse(
625 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
630 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
626 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
631 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
627
632
628 with model_patcher:
633 with model_patcher:
629 response = self.app.post(
634 response = self.app.post(
630 route_path('pullrequest_merge',
635 route_path('pullrequest_merge',
631 repo_name=repo_name,
636 repo_name=repo_name,
632 pull_request_id=pull_request_id),
637 pull_request_id=pull_request_id),
633 params={'csrf_token': csrf_token}, status=302)
638 params={'csrf_token': csrf_token}, status=302)
634
639
635 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
640 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
636 MergeFailureReason.PUSH_FAILED])
641 MergeFailureReason.PUSH_FAILED])
637
642
638 def test_update_source_revision(self, backend, csrf_token):
643 def test_update_source_revision(self, backend, csrf_token):
639 commits = [
644 commits = [
640 {'message': 'ancestor'},
645 {'message': 'ancestor'},
641 {'message': 'change'},
646 {'message': 'change'},
642 {'message': 'change-2'},
647 {'message': 'change-2'},
643 ]
648 ]
644 commit_ids = backend.create_master_repo(commits)
649 commit_ids = backend.create_master_repo(commits)
645 target = backend.create_repo(heads=['ancestor'])
650 target = backend.create_repo(heads=['ancestor'])
646 source = backend.create_repo(heads=['change'])
651 source = backend.create_repo(heads=['change'])
647
652
648 # create pr from a in source to A in target
653 # create pr from a in source to A in target
649 pull_request = PullRequest()
654 pull_request = PullRequest()
650 pull_request.source_repo = source
655 pull_request.source_repo = source
651 # TODO: johbo: Make sure that we write the source ref this way!
656 # TODO: johbo: Make sure that we write the source ref this way!
652 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
657 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
653 branch=backend.default_branch_name, commit_id=commit_ids['change'])
658 branch=backend.default_branch_name, commit_id=commit_ids['change'])
654 pull_request.target_repo = target
659 pull_request.target_repo = target
655
660
656 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
661 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
657 branch=backend.default_branch_name,
662 branch=backend.default_branch_name,
658 commit_id=commit_ids['ancestor'])
663 commit_id=commit_ids['ancestor'])
659 pull_request.revisions = [commit_ids['change']]
664 pull_request.revisions = [commit_ids['change']]
660 pull_request.title = u"Test"
665 pull_request.title = u"Test"
661 pull_request.description = u"Description"
666 pull_request.description = u"Description"
662 pull_request.author = UserModel().get_by_username(
667 pull_request.author = UserModel().get_by_username(
663 TEST_USER_ADMIN_LOGIN)
668 TEST_USER_ADMIN_LOGIN)
664 Session().add(pull_request)
669 Session().add(pull_request)
665 Session().commit()
670 Session().commit()
666 pull_request_id = pull_request.pull_request_id
671 pull_request_id = pull_request.pull_request_id
667
672
668 # source has ancestor - change - change-2
673 # source has ancestor - change - change-2
669 backend.pull_heads(source, heads=['change-2'])
674 backend.pull_heads(source, heads=['change-2'])
670
675
671 # update PR
676 # update PR
672 self.app.post(
677 self.app.post(
673 route_path('pullrequest_update',
678 route_path('pullrequest_update',
674 repo_name=target.repo_name,
679 repo_name=target.repo_name,
675 pull_request_id=pull_request_id),
680 pull_request_id=pull_request_id),
676 params={'update_commits': 'true',
681 params={'update_commits': 'true',
677 'csrf_token': csrf_token})
682 'csrf_token': csrf_token})
678
683
679 # check that we have now both revisions
684 # check that we have now both revisions
680 pull_request = PullRequest.get(pull_request_id)
685 pull_request = PullRequest.get(pull_request_id)
681 assert pull_request.revisions == [
686 assert pull_request.revisions == [
682 commit_ids['change-2'], commit_ids['change']]
687 commit_ids['change-2'], commit_ids['change']]
683
688
684 # TODO: johbo: this should be a test on its own
689 # TODO: johbo: this should be a test on its own
685 response = self.app.get(route_path(
690 response = self.app.get(route_path(
686 'pullrequest_new',
691 'pullrequest_new',
687 repo_name=target.repo_name))
692 repo_name=target.repo_name))
688 assert response.status_int == 200
693 assert response.status_int == 200
689 assert 'Pull request updated to' in response.body
694 assert 'Pull request updated to' in response.body
690 assert 'with 1 added, 0 removed commits.' in response.body
695 assert 'with 1 added, 0 removed commits.' in response.body
691
696
692 def test_update_target_revision(self, backend, csrf_token):
697 def test_update_target_revision(self, backend, csrf_token):
693 commits = [
698 commits = [
694 {'message': 'ancestor'},
699 {'message': 'ancestor'},
695 {'message': 'change'},
700 {'message': 'change'},
696 {'message': 'ancestor-new', 'parents': ['ancestor']},
701 {'message': 'ancestor-new', 'parents': ['ancestor']},
697 {'message': 'change-rebased'},
702 {'message': 'change-rebased'},
698 ]
703 ]
699 commit_ids = backend.create_master_repo(commits)
704 commit_ids = backend.create_master_repo(commits)
700 target = backend.create_repo(heads=['ancestor'])
705 target = backend.create_repo(heads=['ancestor'])
701 source = backend.create_repo(heads=['change'])
706 source = backend.create_repo(heads=['change'])
702
707
703 # create pr from a in source to A in target
708 # create pr from a in source to A in target
704 pull_request = PullRequest()
709 pull_request = PullRequest()
705 pull_request.source_repo = source
710 pull_request.source_repo = source
706 # TODO: johbo: Make sure that we write the source ref this way!
711 # TODO: johbo: Make sure that we write the source ref this way!
707 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
712 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
708 branch=backend.default_branch_name, commit_id=commit_ids['change'])
713 branch=backend.default_branch_name, commit_id=commit_ids['change'])
709 pull_request.target_repo = target
714 pull_request.target_repo = target
710 # TODO: johbo: Target ref should be branch based, since tip can jump
715 # TODO: johbo: Target ref should be branch based, since tip can jump
711 # from branch to branch
716 # from branch to branch
712 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
713 branch=backend.default_branch_name,
718 branch=backend.default_branch_name,
714 commit_id=commit_ids['ancestor'])
719 commit_id=commit_ids['ancestor'])
715 pull_request.revisions = [commit_ids['change']]
720 pull_request.revisions = [commit_ids['change']]
716 pull_request.title = u"Test"
721 pull_request.title = u"Test"
717 pull_request.description = u"Description"
722 pull_request.description = u"Description"
718 pull_request.author = UserModel().get_by_username(
723 pull_request.author = UserModel().get_by_username(
719 TEST_USER_ADMIN_LOGIN)
724 TEST_USER_ADMIN_LOGIN)
720 Session().add(pull_request)
725 Session().add(pull_request)
721 Session().commit()
726 Session().commit()
722 pull_request_id = pull_request.pull_request_id
727 pull_request_id = pull_request.pull_request_id
723
728
724 # target has ancestor - ancestor-new
729 # target has ancestor - ancestor-new
725 # source has ancestor - ancestor-new - change-rebased
730 # source has ancestor - ancestor-new - change-rebased
726 backend.pull_heads(target, heads=['ancestor-new'])
731 backend.pull_heads(target, heads=['ancestor-new'])
727 backend.pull_heads(source, heads=['change-rebased'])
732 backend.pull_heads(source, heads=['change-rebased'])
728
733
729 # update PR
734 # update PR
730 self.app.post(
735 self.app.post(
731 route_path('pullrequest_update',
736 route_path('pullrequest_update',
732 repo_name=target.repo_name,
737 repo_name=target.repo_name,
733 pull_request_id=pull_request_id),
738 pull_request_id=pull_request_id),
734 params={'update_commits': 'true',
739 params={'update_commits': 'true',
735 'csrf_token': csrf_token},
740 'csrf_token': csrf_token},
736 status=200)
741 status=200)
737
742
738 # check that we have now both revisions
743 # check that we have now both revisions
739 pull_request = PullRequest.get(pull_request_id)
744 pull_request = PullRequest.get(pull_request_id)
740 assert pull_request.revisions == [commit_ids['change-rebased']]
745 assert pull_request.revisions == [commit_ids['change-rebased']]
741 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
742 branch=backend.default_branch_name,
747 branch=backend.default_branch_name,
743 commit_id=commit_ids['ancestor-new'])
748 commit_id=commit_ids['ancestor-new'])
744
749
745 # TODO: johbo: This should be a test on its own
750 # TODO: johbo: This should be a test on its own
746 response = self.app.get(route_path(
751 response = self.app.get(route_path(
747 'pullrequest_new',
752 'pullrequest_new',
748 repo_name=target.repo_name))
753 repo_name=target.repo_name))
749 assert response.status_int == 200
754 assert response.status_int == 200
750 assert 'Pull request updated to' in response.body
755 assert 'Pull request updated to' in response.body
751 assert 'with 1 added, 1 removed commits.' in response.body
756 assert 'with 1 added, 1 removed commits.' in response.body
752
757
753 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
758 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
754 backend = backend_git
759 backend = backend_git
755 commits = [
760 commits = [
756 {'message': 'master-commit-1'},
761 {'message': 'master-commit-1'},
757 {'message': 'master-commit-2-change-1'},
762 {'message': 'master-commit-2-change-1'},
758 {'message': 'master-commit-3-change-2'},
763 {'message': 'master-commit-3-change-2'},
759
764
760 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
765 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
761 {'message': 'feat-commit-2'},
766 {'message': 'feat-commit-2'},
762 ]
767 ]
763 commit_ids = backend.create_master_repo(commits)
768 commit_ids = backend.create_master_repo(commits)
764 target = backend.create_repo(heads=['master-commit-3-change-2'])
769 target = backend.create_repo(heads=['master-commit-3-change-2'])
765 source = backend.create_repo(heads=['feat-commit-2'])
770 source = backend.create_repo(heads=['feat-commit-2'])
766
771
767 # create pr from a in source to A in target
772 # create pr from a in source to A in target
768 pull_request = PullRequest()
773 pull_request = PullRequest()
769 pull_request.source_repo = source
774 pull_request.source_repo = source
770 # TODO: johbo: Make sure that we write the source ref this way!
775 # TODO: johbo: Make sure that we write the source ref this way!
771 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
776 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
772 branch=backend.default_branch_name,
777 branch=backend.default_branch_name,
773 commit_id=commit_ids['master-commit-3-change-2'])
778 commit_id=commit_ids['master-commit-3-change-2'])
774
779
775 pull_request.target_repo = target
780 pull_request.target_repo = target
776 # TODO: johbo: Target ref should be branch based, since tip can jump
781 # TODO: johbo: Target ref should be branch based, since tip can jump
777 # from branch to branch
782 # from branch to branch
778 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
783 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
779 branch=backend.default_branch_name,
784 branch=backend.default_branch_name,
780 commit_id=commit_ids['feat-commit-2'])
785 commit_id=commit_ids['feat-commit-2'])
781
786
782 pull_request.revisions = [
787 pull_request.revisions = [
783 commit_ids['feat-commit-1'],
788 commit_ids['feat-commit-1'],
784 commit_ids['feat-commit-2']
789 commit_ids['feat-commit-2']
785 ]
790 ]
786 pull_request.title = u"Test"
791 pull_request.title = u"Test"
787 pull_request.description = u"Description"
792 pull_request.description = u"Description"
788 pull_request.author = UserModel().get_by_username(
793 pull_request.author = UserModel().get_by_username(
789 TEST_USER_ADMIN_LOGIN)
794 TEST_USER_ADMIN_LOGIN)
790 Session().add(pull_request)
795 Session().add(pull_request)
791 Session().commit()
796 Session().commit()
792 pull_request_id = pull_request.pull_request_id
797 pull_request_id = pull_request.pull_request_id
793
798
794 # PR is created, now we simulate a force-push into target,
799 # PR is created, now we simulate a force-push into target,
795 # that drops a 2 last commits
800 # that drops a 2 last commits
796 vcsrepo = target.scm_instance()
801 vcsrepo = target.scm_instance()
797 vcsrepo.config.clear_section('hooks')
802 vcsrepo.config.clear_section('hooks')
798 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
803 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
799
804
800 # update PR
805 # update PR
801 self.app.post(
806 self.app.post(
802 route_path('pullrequest_update',
807 route_path('pullrequest_update',
803 repo_name=target.repo_name,
808 repo_name=target.repo_name,
804 pull_request_id=pull_request_id),
809 pull_request_id=pull_request_id),
805 params={'update_commits': 'true',
810 params={'update_commits': 'true',
806 'csrf_token': csrf_token},
811 'csrf_token': csrf_token},
807 status=200)
812 status=200)
808
813
809 response = self.app.get(route_path(
814 response = self.app.get(route_path(
810 'pullrequest_new',
815 'pullrequest_new',
811 repo_name=target.repo_name))
816 repo_name=target.repo_name))
812 assert response.status_int == 200
817 assert response.status_int == 200
813 response.mustcontain('Pull request updated to')
818 response.mustcontain('Pull request updated to')
814 response.mustcontain('with 0 added, 0 removed commits.')
819 response.mustcontain('with 0 added, 0 removed commits.')
815
820
816 def test_update_of_ancestor_reference(self, backend, csrf_token):
821 def test_update_of_ancestor_reference(self, backend, csrf_token):
817 commits = [
822 commits = [
818 {'message': 'ancestor'},
823 {'message': 'ancestor'},
819 {'message': 'change'},
824 {'message': 'change'},
820 {'message': 'change-2'},
825 {'message': 'change-2'},
821 {'message': 'ancestor-new', 'parents': ['ancestor']},
826 {'message': 'ancestor-new', 'parents': ['ancestor']},
822 {'message': 'change-rebased'},
827 {'message': 'change-rebased'},
823 ]
828 ]
824 commit_ids = backend.create_master_repo(commits)
829 commit_ids = backend.create_master_repo(commits)
825 target = backend.create_repo(heads=['ancestor'])
830 target = backend.create_repo(heads=['ancestor'])
826 source = backend.create_repo(heads=['change'])
831 source = backend.create_repo(heads=['change'])
827
832
828 # create pr from a in source to A in target
833 # create pr from a in source to A in target
829 pull_request = PullRequest()
834 pull_request = PullRequest()
830 pull_request.source_repo = source
835 pull_request.source_repo = source
831 # TODO: johbo: Make sure that we write the source ref this way!
836 # TODO: johbo: Make sure that we write the source ref this way!
832 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
837 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
833 branch=backend.default_branch_name,
838 branch=backend.default_branch_name,
834 commit_id=commit_ids['change'])
839 commit_id=commit_ids['change'])
835 pull_request.target_repo = target
840 pull_request.target_repo = target
836 # TODO: johbo: Target ref should be branch based, since tip can jump
841 # TODO: johbo: Target ref should be branch based, since tip can jump
837 # from branch to branch
842 # from branch to branch
838 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
843 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
839 branch=backend.default_branch_name,
844 branch=backend.default_branch_name,
840 commit_id=commit_ids['ancestor'])
845 commit_id=commit_ids['ancestor'])
841 pull_request.revisions = [commit_ids['change']]
846 pull_request.revisions = [commit_ids['change']]
842 pull_request.title = u"Test"
847 pull_request.title = u"Test"
843 pull_request.description = u"Description"
848 pull_request.description = u"Description"
844 pull_request.author = UserModel().get_by_username(
849 pull_request.author = UserModel().get_by_username(
845 TEST_USER_ADMIN_LOGIN)
850 TEST_USER_ADMIN_LOGIN)
846 Session().add(pull_request)
851 Session().add(pull_request)
847 Session().commit()
852 Session().commit()
848 pull_request_id = pull_request.pull_request_id
853 pull_request_id = pull_request.pull_request_id
849
854
850 # target has ancestor - ancestor-new
855 # target has ancestor - ancestor-new
851 # source has ancestor - ancestor-new - change-rebased
856 # source has ancestor - ancestor-new - change-rebased
852 backend.pull_heads(target, heads=['ancestor-new'])
857 backend.pull_heads(target, heads=['ancestor-new'])
853 backend.pull_heads(source, heads=['change-rebased'])
858 backend.pull_heads(source, heads=['change-rebased'])
854
859
855 # update PR
860 # update PR
856 self.app.post(
861 self.app.post(
857 route_path('pullrequest_update',
862 route_path('pullrequest_update',
858 repo_name=target.repo_name,
863 repo_name=target.repo_name,
859 pull_request_id=pull_request_id),
864 pull_request_id=pull_request_id),
860 params={'update_commits': 'true',
865 params={'update_commits': 'true',
861 'csrf_token': csrf_token},
866 'csrf_token': csrf_token},
862 status=200)
867 status=200)
863
868
864 # Expect the target reference to be updated correctly
869 # Expect the target reference to be updated correctly
865 pull_request = PullRequest.get(pull_request_id)
870 pull_request = PullRequest.get(pull_request_id)
866 assert pull_request.revisions == [commit_ids['change-rebased']]
871 assert pull_request.revisions == [commit_ids['change-rebased']]
867 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
872 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
868 branch=backend.default_branch_name,
873 branch=backend.default_branch_name,
869 commit_id=commit_ids['ancestor-new'])
874 commit_id=commit_ids['ancestor-new'])
870 assert pull_request.target_ref == expected_target_ref
875 assert pull_request.target_ref == expected_target_ref
871
876
872 def test_remove_pull_request_branch(self, backend_git, csrf_token):
877 def test_remove_pull_request_branch(self, backend_git, csrf_token):
873 branch_name = 'development'
878 branch_name = 'development'
874 commits = [
879 commits = [
875 {'message': 'initial-commit'},
880 {'message': 'initial-commit'},
876 {'message': 'old-feature'},
881 {'message': 'old-feature'},
877 {'message': 'new-feature', 'branch': branch_name},
882 {'message': 'new-feature', 'branch': branch_name},
878 ]
883 ]
879 repo = backend_git.create_repo(commits)
884 repo = backend_git.create_repo(commits)
880 commit_ids = backend_git.commit_ids
885 commit_ids = backend_git.commit_ids
881
886
882 pull_request = PullRequest()
887 pull_request = PullRequest()
883 pull_request.source_repo = repo
888 pull_request.source_repo = repo
884 pull_request.target_repo = repo
889 pull_request.target_repo = repo
885 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
890 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
886 branch=branch_name, commit_id=commit_ids['new-feature'])
891 branch=branch_name, commit_id=commit_ids['new-feature'])
887 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
892 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
888 branch=backend_git.default_branch_name,
893 branch=backend_git.default_branch_name,
889 commit_id=commit_ids['old-feature'])
894 commit_id=commit_ids['old-feature'])
890 pull_request.revisions = [commit_ids['new-feature']]
895 pull_request.revisions = [commit_ids['new-feature']]
891 pull_request.title = u"Test"
896 pull_request.title = u"Test"
892 pull_request.description = u"Description"
897 pull_request.description = u"Description"
893 pull_request.author = UserModel().get_by_username(
898 pull_request.author = UserModel().get_by_username(
894 TEST_USER_ADMIN_LOGIN)
899 TEST_USER_ADMIN_LOGIN)
895 Session().add(pull_request)
900 Session().add(pull_request)
896 Session().commit()
901 Session().commit()
897
902
898 vcs = repo.scm_instance()
903 vcs = repo.scm_instance()
899 vcs.remove_ref('refs/heads/{}'.format(branch_name))
904 vcs.remove_ref('refs/heads/{}'.format(branch_name))
900
905
901 response = self.app.get(route_path(
906 response = self.app.get(route_path(
902 'pullrequest_show',
907 'pullrequest_show',
903 repo_name=repo.repo_name,
908 repo_name=repo.repo_name,
904 pull_request_id=pull_request.pull_request_id))
909 pull_request_id=pull_request.pull_request_id))
905
910
906 assert response.status_int == 200
911 assert response.status_int == 200
907 assert_response = AssertResponse(response)
912 assert_response = AssertResponse(response)
908 assert_response.element_contains(
913 assert_response.element_contains(
909 '#changeset_compare_view_content .alert strong',
914 '#changeset_compare_view_content .alert strong',
910 'Missing commits')
915 'Missing commits')
911 assert_response.element_contains(
916 assert_response.element_contains(
912 '#changeset_compare_view_content .alert',
917 '#changeset_compare_view_content .alert',
913 'This pull request cannot be displayed, because one or more'
918 'This pull request cannot be displayed, because one or more'
914 ' commits no longer exist in the source repository.')
919 ' commits no longer exist in the source repository.')
915
920
916 def test_strip_commits_from_pull_request(
921 def test_strip_commits_from_pull_request(
917 self, backend, pr_util, csrf_token):
922 self, backend, pr_util, csrf_token):
918 commits = [
923 commits = [
919 {'message': 'initial-commit'},
924 {'message': 'initial-commit'},
920 {'message': 'old-feature'},
925 {'message': 'old-feature'},
921 {'message': 'new-feature', 'parents': ['initial-commit']},
926 {'message': 'new-feature', 'parents': ['initial-commit']},
922 ]
927 ]
923 pull_request = pr_util.create_pull_request(
928 pull_request = pr_util.create_pull_request(
924 commits, target_head='initial-commit', source_head='new-feature',
929 commits, target_head='initial-commit', source_head='new-feature',
925 revisions=['new-feature'])
930 revisions=['new-feature'])
926
931
927 vcs = pr_util.source_repository.scm_instance()
932 vcs = pr_util.source_repository.scm_instance()
928 if backend.alias == 'git':
933 if backend.alias == 'git':
929 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
934 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
930 else:
935 else:
931 vcs.strip(pr_util.commit_ids['new-feature'])
936 vcs.strip(pr_util.commit_ids['new-feature'])
932
937
933 response = self.app.get(route_path(
938 response = self.app.get(route_path(
934 'pullrequest_show',
939 'pullrequest_show',
935 repo_name=pr_util.target_repository.repo_name,
940 repo_name=pr_util.target_repository.repo_name,
936 pull_request_id=pull_request.pull_request_id))
941 pull_request_id=pull_request.pull_request_id))
937
942
938 assert response.status_int == 200
943 assert response.status_int == 200
939 assert_response = AssertResponse(response)
944 assert_response = AssertResponse(response)
940 assert_response.element_contains(
945 assert_response.element_contains(
941 '#changeset_compare_view_content .alert strong',
946 '#changeset_compare_view_content .alert strong',
942 'Missing commits')
947 'Missing commits')
943 assert_response.element_contains(
948 assert_response.element_contains(
944 '#changeset_compare_view_content .alert',
949 '#changeset_compare_view_content .alert',
945 'This pull request cannot be displayed, because one or more'
950 'This pull request cannot be displayed, because one or more'
946 ' commits no longer exist in the source repository.')
951 ' commits no longer exist in the source repository.')
947 assert_response.element_contains(
952 assert_response.element_contains(
948 '#update_commits',
953 '#update_commits',
949 'Update commits')
954 'Update commits')
950
955
951 def test_strip_commits_and_update(
956 def test_strip_commits_and_update(
952 self, backend, pr_util, csrf_token):
957 self, backend, pr_util, csrf_token):
953 commits = [
958 commits = [
954 {'message': 'initial-commit'},
959 {'message': 'initial-commit'},
955 {'message': 'old-feature'},
960 {'message': 'old-feature'},
956 {'message': 'new-feature', 'parents': ['old-feature']},
961 {'message': 'new-feature', 'parents': ['old-feature']},
957 ]
962 ]
958 pull_request = pr_util.create_pull_request(
963 pull_request = pr_util.create_pull_request(
959 commits, target_head='old-feature', source_head='new-feature',
964 commits, target_head='old-feature', source_head='new-feature',
960 revisions=['new-feature'], mergeable=True)
965 revisions=['new-feature'], mergeable=True)
961
966
962 vcs = pr_util.source_repository.scm_instance()
967 vcs = pr_util.source_repository.scm_instance()
963 if backend.alias == 'git':
968 if backend.alias == 'git':
964 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
969 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
965 else:
970 else:
966 vcs.strip(pr_util.commit_ids['new-feature'])
971 vcs.strip(pr_util.commit_ids['new-feature'])
967
972
968 response = self.app.post(
973 response = self.app.post(
969 route_path('pullrequest_update',
974 route_path('pullrequest_update',
970 repo_name=pull_request.target_repo.repo_name,
975 repo_name=pull_request.target_repo.repo_name,
971 pull_request_id=pull_request.pull_request_id),
976 pull_request_id=pull_request.pull_request_id),
972 params={'update_commits': 'true',
977 params={'update_commits': 'true',
973 'csrf_token': csrf_token})
978 'csrf_token': csrf_token})
974
979
975 assert response.status_int == 200
980 assert response.status_int == 200
976 assert response.body == 'true'
981 assert response.body == 'true'
977
982
978 # Make sure that after update, it won't raise 500 errors
983 # Make sure that after update, it won't raise 500 errors
979 response = self.app.get(route_path(
984 response = self.app.get(route_path(
980 'pullrequest_show',
985 'pullrequest_show',
981 repo_name=pr_util.target_repository.repo_name,
986 repo_name=pr_util.target_repository.repo_name,
982 pull_request_id=pull_request.pull_request_id))
987 pull_request_id=pull_request.pull_request_id))
983
988
984 assert response.status_int == 200
989 assert response.status_int == 200
985 assert_response = AssertResponse(response)
990 assert_response = AssertResponse(response)
986 assert_response.element_contains(
991 assert_response.element_contains(
987 '#changeset_compare_view_content .alert strong',
992 '#changeset_compare_view_content .alert strong',
988 'Missing commits')
993 'Missing commits')
989
994
990 def test_branch_is_a_link(self, pr_util):
995 def test_branch_is_a_link(self, pr_util):
991 pull_request = pr_util.create_pull_request()
996 pull_request = pr_util.create_pull_request()
992 pull_request.source_ref = 'branch:origin:1234567890abcdef'
997 pull_request.source_ref = 'branch:origin:1234567890abcdef'
993 pull_request.target_ref = 'branch:target:abcdef1234567890'
998 pull_request.target_ref = 'branch:target:abcdef1234567890'
994 Session().add(pull_request)
999 Session().add(pull_request)
995 Session().commit()
1000 Session().commit()
996
1001
997 response = self.app.get(route_path(
1002 response = self.app.get(route_path(
998 'pullrequest_show',
1003 'pullrequest_show',
999 repo_name=pull_request.target_repo.scm_instance().name,
1004 repo_name=pull_request.target_repo.scm_instance().name,
1000 pull_request_id=pull_request.pull_request_id))
1005 pull_request_id=pull_request.pull_request_id))
1001 assert response.status_int == 200
1006 assert response.status_int == 200
1002 assert_response = AssertResponse(response)
1007 assert_response = AssertResponse(response)
1003
1008
1004 origin = assert_response.get_element('.pr-origininfo .tag')
1009 origin = assert_response.get_element('.pr-origininfo .tag')
1005 origin_children = origin.getchildren()
1010 origin_children = origin.getchildren()
1006 assert len(origin_children) == 1
1011 assert len(origin_children) == 1
1007 target = assert_response.get_element('.pr-targetinfo .tag')
1012 target = assert_response.get_element('.pr-targetinfo .tag')
1008 target_children = target.getchildren()
1013 target_children = target.getchildren()
1009 assert len(target_children) == 1
1014 assert len(target_children) == 1
1010
1015
1011 expected_origin_link = route_path(
1016 expected_origin_link = route_path(
1012 'repo_changelog',
1017 'repo_changelog',
1013 repo_name=pull_request.source_repo.scm_instance().name,
1018 repo_name=pull_request.source_repo.scm_instance().name,
1014 params=dict(branch='origin'))
1019 params=dict(branch='origin'))
1015 expected_target_link = route_path(
1020 expected_target_link = route_path(
1016 'repo_changelog',
1021 'repo_changelog',
1017 repo_name=pull_request.target_repo.scm_instance().name,
1022 repo_name=pull_request.target_repo.scm_instance().name,
1018 params=dict(branch='target'))
1023 params=dict(branch='target'))
1019 assert origin_children[0].attrib['href'] == expected_origin_link
1024 assert origin_children[0].attrib['href'] == expected_origin_link
1020 assert origin_children[0].text == 'branch: origin'
1025 assert origin_children[0].text == 'branch: origin'
1021 assert target_children[0].attrib['href'] == expected_target_link
1026 assert target_children[0].attrib['href'] == expected_target_link
1022 assert target_children[0].text == 'branch: target'
1027 assert target_children[0].text == 'branch: target'
1023
1028
1024 def test_bookmark_is_not_a_link(self, pr_util):
1029 def test_bookmark_is_not_a_link(self, pr_util):
1025 pull_request = pr_util.create_pull_request()
1030 pull_request = pr_util.create_pull_request()
1026 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1031 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1027 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1032 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1028 Session().add(pull_request)
1033 Session().add(pull_request)
1029 Session().commit()
1034 Session().commit()
1030
1035
1031 response = self.app.get(route_path(
1036 response = self.app.get(route_path(
1032 'pullrequest_show',
1037 'pullrequest_show',
1033 repo_name=pull_request.target_repo.scm_instance().name,
1038 repo_name=pull_request.target_repo.scm_instance().name,
1034 pull_request_id=pull_request.pull_request_id))
1039 pull_request_id=pull_request.pull_request_id))
1035 assert response.status_int == 200
1040 assert response.status_int == 200
1036 assert_response = AssertResponse(response)
1041 assert_response = AssertResponse(response)
1037
1042
1038 origin = assert_response.get_element('.pr-origininfo .tag')
1043 origin = assert_response.get_element('.pr-origininfo .tag')
1039 assert origin.text.strip() == 'bookmark: origin'
1044 assert origin.text.strip() == 'bookmark: origin'
1040 assert origin.getchildren() == []
1045 assert origin.getchildren() == []
1041
1046
1042 target = assert_response.get_element('.pr-targetinfo .tag')
1047 target = assert_response.get_element('.pr-targetinfo .tag')
1043 assert target.text.strip() == 'bookmark: target'
1048 assert target.text.strip() == 'bookmark: target'
1044 assert target.getchildren() == []
1049 assert target.getchildren() == []
1045
1050
1046 def test_tag_is_not_a_link(self, pr_util):
1051 def test_tag_is_not_a_link(self, pr_util):
1047 pull_request = pr_util.create_pull_request()
1052 pull_request = pr_util.create_pull_request()
1048 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1053 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1049 pull_request.target_ref = 'tag:target:abcdef1234567890'
1054 pull_request.target_ref = 'tag:target:abcdef1234567890'
1050 Session().add(pull_request)
1055 Session().add(pull_request)
1051 Session().commit()
1056 Session().commit()
1052
1057
1053 response = self.app.get(route_path(
1058 response = self.app.get(route_path(
1054 'pullrequest_show',
1059 'pullrequest_show',
1055 repo_name=pull_request.target_repo.scm_instance().name,
1060 repo_name=pull_request.target_repo.scm_instance().name,
1056 pull_request_id=pull_request.pull_request_id))
1061 pull_request_id=pull_request.pull_request_id))
1057 assert response.status_int == 200
1062 assert response.status_int == 200
1058 assert_response = AssertResponse(response)
1063 assert_response = AssertResponse(response)
1059
1064
1060 origin = assert_response.get_element('.pr-origininfo .tag')
1065 origin = assert_response.get_element('.pr-origininfo .tag')
1061 assert origin.text.strip() == 'tag: origin'
1066 assert origin.text.strip() == 'tag: origin'
1062 assert origin.getchildren() == []
1067 assert origin.getchildren() == []
1063
1068
1064 target = assert_response.get_element('.pr-targetinfo .tag')
1069 target = assert_response.get_element('.pr-targetinfo .tag')
1065 assert target.text.strip() == 'tag: target'
1070 assert target.text.strip() == 'tag: target'
1066 assert target.getchildren() == []
1071 assert target.getchildren() == []
1067
1072
1068 @pytest.mark.parametrize('mergeable', [True, False])
1073 @pytest.mark.parametrize('mergeable', [True, False])
1069 def test_shadow_repository_link(
1074 def test_shadow_repository_link(
1070 self, mergeable, pr_util, http_host_only_stub):
1075 self, mergeable, pr_util, http_host_only_stub):
1071 """
1076 """
1072 Check that the pull request summary page displays a link to the shadow
1077 Check that the pull request summary page displays a link to the shadow
1073 repository if the pull request is mergeable. If it is not mergeable
1078 repository if the pull request is mergeable. If it is not mergeable
1074 the link should not be displayed.
1079 the link should not be displayed.
1075 """
1080 """
1076 pull_request = pr_util.create_pull_request(
1081 pull_request = pr_util.create_pull_request(
1077 mergeable=mergeable, enable_notifications=False)
1082 mergeable=mergeable, enable_notifications=False)
1078 target_repo = pull_request.target_repo.scm_instance()
1083 target_repo = pull_request.target_repo.scm_instance()
1079 pr_id = pull_request.pull_request_id
1084 pr_id = pull_request.pull_request_id
1080 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1085 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1081 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1086 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1082
1087
1083 response = self.app.get(route_path(
1088 response = self.app.get(route_path(
1084 'pullrequest_show',
1089 'pullrequest_show',
1085 repo_name=target_repo.name,
1090 repo_name=target_repo.name,
1086 pull_request_id=pr_id))
1091 pull_request_id=pr_id))
1087
1092
1088 assertr = AssertResponse(response)
1093 assertr = AssertResponse(response)
1089 if mergeable:
1094 if mergeable:
1090 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1095 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1091 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1096 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1092 else:
1097 else:
1093 assertr.no_element_exists('.pr-mergeinfo')
1098 assertr.no_element_exists('.pr-mergeinfo')
1094
1099
1095
1100
1096 @pytest.mark.usefixtures('app')
1101 @pytest.mark.usefixtures('app')
1097 @pytest.mark.backends("git", "hg")
1102 @pytest.mark.backends("git", "hg")
1098 class TestPullrequestsControllerDelete(object):
1103 class TestPullrequestsControllerDelete(object):
1099 def test_pull_request_delete_button_permissions_admin(
1104 def test_pull_request_delete_button_permissions_admin(
1100 self, autologin_user, user_admin, pr_util):
1105 self, autologin_user, user_admin, pr_util):
1101 pull_request = pr_util.create_pull_request(
1106 pull_request = pr_util.create_pull_request(
1102 author=user_admin.username, enable_notifications=False)
1107 author=user_admin.username, enable_notifications=False)
1103
1108
1104 response = self.app.get(route_path(
1109 response = self.app.get(route_path(
1105 'pullrequest_show',
1110 'pullrequest_show',
1106 repo_name=pull_request.target_repo.scm_instance().name,
1111 repo_name=pull_request.target_repo.scm_instance().name,
1107 pull_request_id=pull_request.pull_request_id))
1112 pull_request_id=pull_request.pull_request_id))
1108
1113
1109 response.mustcontain('id="delete_pullrequest"')
1114 response.mustcontain('id="delete_pullrequest"')
1110 response.mustcontain('Confirm to delete this pull request')
1115 response.mustcontain('Confirm to delete this pull request')
1111
1116
1112 def test_pull_request_delete_button_permissions_owner(
1117 def test_pull_request_delete_button_permissions_owner(
1113 self, autologin_regular_user, user_regular, pr_util):
1118 self, autologin_regular_user, user_regular, pr_util):
1114 pull_request = pr_util.create_pull_request(
1119 pull_request = pr_util.create_pull_request(
1115 author=user_regular.username, enable_notifications=False)
1120 author=user_regular.username, enable_notifications=False)
1116
1121
1117 response = self.app.get(route_path(
1122 response = self.app.get(route_path(
1118 'pullrequest_show',
1123 'pullrequest_show',
1119 repo_name=pull_request.target_repo.scm_instance().name,
1124 repo_name=pull_request.target_repo.scm_instance().name,
1120 pull_request_id=pull_request.pull_request_id))
1125 pull_request_id=pull_request.pull_request_id))
1121
1126
1122 response.mustcontain('id="delete_pullrequest"')
1127 response.mustcontain('id="delete_pullrequest"')
1123 response.mustcontain('Confirm to delete this pull request')
1128 response.mustcontain('Confirm to delete this pull request')
1124
1129
1125 def test_pull_request_delete_button_permissions_forbidden(
1130 def test_pull_request_delete_button_permissions_forbidden(
1126 self, autologin_regular_user, user_regular, user_admin, pr_util):
1131 self, autologin_regular_user, user_regular, user_admin, pr_util):
1127 pull_request = pr_util.create_pull_request(
1132 pull_request = pr_util.create_pull_request(
1128 author=user_admin.username, enable_notifications=False)
1133 author=user_admin.username, enable_notifications=False)
1129
1134
1130 response = self.app.get(route_path(
1135 response = self.app.get(route_path(
1131 'pullrequest_show',
1136 'pullrequest_show',
1132 repo_name=pull_request.target_repo.scm_instance().name,
1137 repo_name=pull_request.target_repo.scm_instance().name,
1133 pull_request_id=pull_request.pull_request_id))
1138 pull_request_id=pull_request.pull_request_id))
1134 response.mustcontain(no=['id="delete_pullrequest"'])
1139 response.mustcontain(no=['id="delete_pullrequest"'])
1135 response.mustcontain(no=['Confirm to delete this pull request'])
1140 response.mustcontain(no=['Confirm to delete this pull request'])
1136
1141
1137 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1142 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1138 self, autologin_regular_user, user_regular, user_admin, pr_util,
1143 self, autologin_regular_user, user_regular, user_admin, pr_util,
1139 user_util):
1144 user_util):
1140
1145
1141 pull_request = pr_util.create_pull_request(
1146 pull_request = pr_util.create_pull_request(
1142 author=user_admin.username, enable_notifications=False)
1147 author=user_admin.username, enable_notifications=False)
1143
1148
1144 user_util.grant_user_permission_to_repo(
1149 user_util.grant_user_permission_to_repo(
1145 pull_request.target_repo, user_regular,
1150 pull_request.target_repo, user_regular,
1146 'repository.write')
1151 'repository.write')
1147
1152
1148 response = self.app.get(route_path(
1153 response = self.app.get(route_path(
1149 'pullrequest_show',
1154 'pullrequest_show',
1150 repo_name=pull_request.target_repo.scm_instance().name,
1155 repo_name=pull_request.target_repo.scm_instance().name,
1151 pull_request_id=pull_request.pull_request_id))
1156 pull_request_id=pull_request.pull_request_id))
1152
1157
1153 response.mustcontain('id="open_edit_pullrequest"')
1158 response.mustcontain('id="open_edit_pullrequest"')
1154 response.mustcontain('id="delete_pullrequest"')
1159 response.mustcontain('id="delete_pullrequest"')
1155 response.mustcontain(no=['Confirm to delete this pull request'])
1160 response.mustcontain(no=['Confirm to delete this pull request'])
1156
1161
1157 def test_delete_comment_returns_404_if_comment_does_not_exist(
1162 def test_delete_comment_returns_404_if_comment_does_not_exist(
1158 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1163 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1159
1164
1160 pull_request = pr_util.create_pull_request(
1165 pull_request = pr_util.create_pull_request(
1161 author=user_admin.username, enable_notifications=False)
1166 author=user_admin.username, enable_notifications=False)
1162
1167
1163 self.app.post(
1168 self.app.post(
1164 route_path(
1169 route_path(
1165 'pullrequest_comment_delete',
1170 'pullrequest_comment_delete',
1166 repo_name=pull_request.target_repo.scm_instance().name,
1171 repo_name=pull_request.target_repo.scm_instance().name,
1167 pull_request_id=pull_request.pull_request_id,
1172 pull_request_id=pull_request.pull_request_id,
1168 comment_id=1024404),
1173 comment_id=1024404),
1169 extra_environ=xhr_header,
1174 extra_environ=xhr_header,
1170 params={'csrf_token': csrf_token},
1175 params={'csrf_token': csrf_token},
1171 status=404
1176 status=404
1172 )
1177 )
1173
1178
1174 def test_delete_comment(
1179 def test_delete_comment(
1175 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1180 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1176
1181
1177 pull_request = pr_util.create_pull_request(
1182 pull_request = pr_util.create_pull_request(
1178 author=user_admin.username, enable_notifications=False)
1183 author=user_admin.username, enable_notifications=False)
1179 comment = pr_util.create_comment()
1184 comment = pr_util.create_comment()
1180 comment_id = comment.comment_id
1185 comment_id = comment.comment_id
1181
1186
1182 response = self.app.post(
1187 response = self.app.post(
1183 route_path(
1188 route_path(
1184 'pullrequest_comment_delete',
1189 'pullrequest_comment_delete',
1185 repo_name=pull_request.target_repo.scm_instance().name,
1190 repo_name=pull_request.target_repo.scm_instance().name,
1186 pull_request_id=pull_request.pull_request_id,
1191 pull_request_id=pull_request.pull_request_id,
1187 comment_id=comment_id),
1192 comment_id=comment_id),
1188 extra_environ=xhr_header,
1193 extra_environ=xhr_header,
1189 params={'csrf_token': csrf_token},
1194 params={'csrf_token': csrf_token},
1190 status=200
1195 status=200
1191 )
1196 )
1192 assert response.body == 'true'
1197 assert response.body == 'true'
1193
1198
1194 @pytest.mark.parametrize('url_type', [
1199 @pytest.mark.parametrize('url_type', [
1195 'pullrequest_new',
1200 'pullrequest_new',
1196 'pullrequest_create',
1201 'pullrequest_create',
1197 'pullrequest_update',
1202 'pullrequest_update',
1198 'pullrequest_merge',
1203 'pullrequest_merge',
1199 ])
1204 ])
1200 def test_pull_request_is_forbidden_on_archived_repo(
1205 def test_pull_request_is_forbidden_on_archived_repo(
1201 self, autologin_user, backend, xhr_header, user_util, url_type):
1206 self, autologin_user, backend, xhr_header, user_util, url_type):
1202
1207
1203 # create a temporary repo
1208 # create a temporary repo
1204 source = user_util.create_repo(repo_type=backend.alias)
1209 source = user_util.create_repo(repo_type=backend.alias)
1205 repo_name = source.repo_name
1210 repo_name = source.repo_name
1206 repo = Repository.get_by_repo_name(repo_name)
1211 repo = Repository.get_by_repo_name(repo_name)
1207 repo.archived = True
1212 repo.archived = True
1208 Session().commit()
1213 Session().commit()
1209
1214
1210 response = self.app.get(
1215 response = self.app.get(
1211 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1216 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1212
1217
1213 msg = 'Action not supported for archived repository.'
1218 msg = 'Action not supported for archived repository.'
1214 assert_session_flash(response, msg)
1219 assert_session_flash(response, msg)
1215
1220
1216
1221
1217 def assert_pull_request_status(pull_request, expected_status):
1222 def assert_pull_request_status(pull_request, expected_status):
1218 status = ChangesetStatusModel().calculated_review_status(
1223 status = ChangesetStatusModel().calculated_review_status(
1219 pull_request=pull_request)
1224 pull_request=pull_request)
1220 assert status == expected_status
1225 assert status == expected_status
1221
1226
1222
1227
1223 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1228 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1224 @pytest.mark.usefixtures("autologin_user")
1229 @pytest.mark.usefixtures("autologin_user")
1225 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1230 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1226 response = app.get(
1231 response = app.get(
1227 route_path(route, repo_name=backend_svn.repo_name), status=404)
1232 route_path(route, repo_name=backend_svn.repo_name), status=404)
1228
1233
@@ -1,590 +1,590 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23 import collections
23 import collections
24
24
25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
26 from pyramid.view import view_config
26 from pyramid.view import view_config
27 from pyramid.renderers import render
27 from pyramid.renderers import render
28 from pyramid.response import Response
28 from pyramid.response import Response
29
29
30 from rhodecode.apps._base import RepoAppView
30 from rhodecode.apps._base import RepoAppView
31
31
32 from rhodecode.lib import diffs, codeblocks
32 from rhodecode.lib import diffs, codeblocks
33 from rhodecode.lib.auth import (
33 from rhodecode.lib.auth import (
34 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
34 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
35
35
36 from rhodecode.lib.compat import OrderedDict
36 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.diffs import cache_diff, load_cached_diff, diff_cache_exist
37 from rhodecode.lib.diffs import cache_diff, load_cached_diff, diff_cache_exist
38 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
38 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
39 import rhodecode.lib.helpers as h
39 import rhodecode.lib.helpers as h
40 from rhodecode.lib.utils2 import safe_unicode, str2bool
40 from rhodecode.lib.utils2 import safe_unicode, str2bool
41 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib.vcs.backends.base import EmptyCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 RepositoryError, CommitDoesNotExistError)
43 RepositoryError, CommitDoesNotExistError)
44 from rhodecode.model.db import ChangesetComment, ChangesetStatus
44 from rhodecode.model.db import ChangesetComment, ChangesetStatus
45 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
48 from rhodecode.model.settings import VcsSettingsModel
48 from rhodecode.model.settings import VcsSettingsModel
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 def _update_with_GET(params, request):
53 def _update_with_GET(params, request):
54 for k in ['diff1', 'diff2', 'diff']:
54 for k in ['diff1', 'diff2', 'diff']:
55 params[k] += request.GET.getall(k)
55 params[k] += request.GET.getall(k)
56
56
57
57
58 def get_ignore_ws(fid, request):
58 def get_ignore_ws(fid, request):
59 ig_ws_global = request.GET.get('ignorews')
59 ig_ws_global = request.GET.get('ignorews')
60 ig_ws = filter(lambda k: k.startswith('WS'), request.GET.getall(fid))
60 ig_ws = filter(lambda k: k.startswith('WS'), request.GET.getall(fid))
61 if ig_ws:
61 if ig_ws:
62 try:
62 try:
63 return int(ig_ws[0].split(':')[-1])
63 return int(ig_ws[0].split(':')[-1])
64 except Exception:
64 except Exception:
65 pass
65 pass
66 return ig_ws_global
66 return ig_ws_global
67
67
68
68
69 def _ignorews_url(request, fileid=None):
69 def _ignorews_url(request, fileid=None):
70 _ = request.translate
70 _ = request.translate
71 fileid = str(fileid) if fileid else None
71 fileid = str(fileid) if fileid else None
72 params = collections.defaultdict(list)
72 params = collections.defaultdict(list)
73 _update_with_GET(params, request)
73 _update_with_GET(params, request)
74 label = _('Show whitespace')
74 label = _('Show whitespace')
75 tooltiplbl = _('Show whitespace for all diffs')
75 tooltiplbl = _('Show whitespace for all diffs')
76 ig_ws = get_ignore_ws(fileid, request)
76 ig_ws = get_ignore_ws(fileid, request)
77 ln_ctx = get_line_ctx(fileid, request)
77 ln_ctx = get_line_ctx(fileid, request)
78
78
79 if ig_ws is None:
79 if ig_ws is None:
80 params['ignorews'] += [1]
80 params['ignorews'] += [1]
81 label = _('Ignore whitespace')
81 label = _('Ignore whitespace')
82 tooltiplbl = _('Ignore whitespace for all diffs')
82 tooltiplbl = _('Ignore whitespace for all diffs')
83 ctx_key = 'context'
83 ctx_key = 'context'
84 ctx_val = ln_ctx
84 ctx_val = ln_ctx
85
85
86 # if we have passed in ln_ctx pass it along to our params
86 # if we have passed in ln_ctx pass it along to our params
87 if ln_ctx:
87 if ln_ctx:
88 params[ctx_key] += [ctx_val]
88 params[ctx_key] += [ctx_val]
89
89
90 if fileid:
90 if fileid:
91 params['anchor'] = 'a_' + fileid
91 params['anchor'] = 'a_' + fileid
92 return h.link_to(label, request.current_route_path(_query=params),
92 return h.link_to(label, request.current_route_path(_query=params),
93 title=tooltiplbl, class_='tooltip')
93 title=tooltiplbl, class_='tooltip')
94
94
95
95
96 def get_line_ctx(fid, request):
96 def get_line_ctx(fid, request):
97 ln_ctx_global = request.GET.get('context')
97 ln_ctx_global = request.GET.get('context')
98 if fid:
98 if fid:
99 ln_ctx = filter(lambda k: k.startswith('C'), request.GET.getall(fid))
99 ln_ctx = filter(lambda k: k.startswith('C'), request.GET.getall(fid))
100 else:
100 else:
101 _ln_ctx = filter(lambda k: k.startswith('C'), request.GET)
101 _ln_ctx = filter(lambda k: k.startswith('C'), request.GET)
102 ln_ctx = request.GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
102 ln_ctx = request.GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
103 if ln_ctx:
103 if ln_ctx:
104 ln_ctx = [ln_ctx]
104 ln_ctx = [ln_ctx]
105
105
106 if ln_ctx:
106 if ln_ctx:
107 retval = ln_ctx[0].split(':')[-1]
107 retval = ln_ctx[0].split(':')[-1]
108 else:
108 else:
109 retval = ln_ctx_global
109 retval = ln_ctx_global
110
110
111 try:
111 try:
112 return min(diffs.MAX_CONTEXT, int(retval))
112 return min(diffs.MAX_CONTEXT, int(retval))
113 except Exception:
113 except Exception:
114 return 3
114 return 3
115
115
116
116
117 def _context_url(request, fileid=None):
117 def _context_url(request, fileid=None):
118 """
118 """
119 Generates a url for context lines.
119 Generates a url for context lines.
120
120
121 :param fileid:
121 :param fileid:
122 """
122 """
123
123
124 _ = request.translate
124 _ = request.translate
125 fileid = str(fileid) if fileid else None
125 fileid = str(fileid) if fileid else None
126 ig_ws = get_ignore_ws(fileid, request)
126 ig_ws = get_ignore_ws(fileid, request)
127 ln_ctx = (get_line_ctx(fileid, request) or 3) * 2
127 ln_ctx = (get_line_ctx(fileid, request) or 3) * 2
128
128
129 params = collections.defaultdict(list)
129 params = collections.defaultdict(list)
130 _update_with_GET(params, request)
130 _update_with_GET(params, request)
131
131
132 if ln_ctx > 0:
132 if ln_ctx > 0:
133 params['context'] += [ln_ctx]
133 params['context'] += [ln_ctx]
134
134
135 if ig_ws:
135 if ig_ws:
136 ig_ws_key = 'ignorews'
136 ig_ws_key = 'ignorews'
137 ig_ws_val = 1
137 ig_ws_val = 1
138 params[ig_ws_key] += [ig_ws_val]
138 params[ig_ws_key] += [ig_ws_val]
139
139
140 lbl = _('Increase context')
140 lbl = _('Increase context')
141 tooltiplbl = _('Increase context for all diffs')
141 tooltiplbl = _('Increase context for all diffs')
142
142
143 if fileid:
143 if fileid:
144 params['anchor'] = 'a_' + fileid
144 params['anchor'] = 'a_' + fileid
145 return h.link_to(lbl, request.current_route_path(_query=params),
145 return h.link_to(lbl, request.current_route_path(_query=params),
146 title=tooltiplbl, class_='tooltip')
146 title=tooltiplbl, class_='tooltip')
147
147
148
148
149 class RepoCommitsView(RepoAppView):
149 class RepoCommitsView(RepoAppView):
150 def load_default_context(self):
150 def load_default_context(self):
151 c = self._get_local_tmpl_context(include_app_defaults=True)
151 c = self._get_local_tmpl_context(include_app_defaults=True)
152 c.rhodecode_repo = self.rhodecode_vcs_repo
152 c.rhodecode_repo = self.rhodecode_vcs_repo
153
153
154 return c
154 return c
155
155
156 def _is_diff_cache_enabled(self, target_repo):
156 def _is_diff_cache_enabled(self, target_repo):
157 caching_enabled = self._get_general_setting(
157 caching_enabled = self._get_general_setting(
158 target_repo, 'rhodecode_diff_cache')
158 target_repo, 'rhodecode_diff_cache')
159 log.debug('Diff caching enabled: %s', caching_enabled)
159 log.debug('Diff caching enabled: %s', caching_enabled)
160 return caching_enabled
160 return caching_enabled
161
161
162 def _commit(self, commit_id_range, method):
162 def _commit(self, commit_id_range, method):
163 _ = self.request.translate
163 _ = self.request.translate
164 c = self.load_default_context()
164 c = self.load_default_context()
165 c.ignorews_url = _ignorews_url
165 c.ignorews_url = _ignorews_url
166 c.context_url = _context_url
166 c.context_url = _context_url
167 c.fulldiff = self.request.GET.get('fulldiff')
167 c.fulldiff = self.request.GET.get('fulldiff')
168
168
169 # fetch global flags of ignore ws or context lines
169 # fetch global flags of ignore ws or context lines
170 context_lcl = get_line_ctx('', self.request)
170 context_lcl = get_line_ctx('', self.request)
171 ign_whitespace_lcl = get_ignore_ws('', self.request)
171 ign_whitespace_lcl = get_ignore_ws('', self.request)
172
172
173 # diff_limit will cut off the whole diff if the limit is applied
173 # diff_limit will cut off the whole diff if the limit is applied
174 # otherwise it will just hide the big files from the front-end
174 # otherwise it will just hide the big files from the front-end
175 diff_limit = c.visual.cut_off_limit_diff
175 diff_limit = c.visual.cut_off_limit_diff
176 file_limit = c.visual.cut_off_limit_file
176 file_limit = c.visual.cut_off_limit_file
177
177
178 # get ranges of commit ids if preset
178 # get ranges of commit ids if preset
179 commit_range = commit_id_range.split('...')[:2]
179 commit_range = commit_id_range.split('...')[:2]
180
180
181 try:
181 try:
182 pre_load = ['affected_files', 'author', 'branch', 'date',
182 pre_load = ['affected_files', 'author', 'branch', 'date',
183 'message', 'parents']
183 'message', 'parents']
184
184
185 if len(commit_range) == 2:
185 if len(commit_range) == 2:
186 commits = self.rhodecode_vcs_repo.get_commits(
186 commits = self.rhodecode_vcs_repo.get_commits(
187 start_id=commit_range[0], end_id=commit_range[1],
187 start_id=commit_range[0], end_id=commit_range[1],
188 pre_load=pre_load)
188 pre_load=pre_load)
189 commits = list(commits)
189 commits = list(commits)
190 else:
190 else:
191 commits = [self.rhodecode_vcs_repo.get_commit(
191 commits = [self.rhodecode_vcs_repo.get_commit(
192 commit_id=commit_id_range, pre_load=pre_load)]
192 commit_id=commit_id_range, pre_load=pre_load)]
193
193
194 c.commit_ranges = commits
194 c.commit_ranges = commits
195 if not c.commit_ranges:
195 if not c.commit_ranges:
196 raise RepositoryError(
196 raise RepositoryError(
197 'The commit range returned an empty result')
197 'The commit range returned an empty result')
198 except CommitDoesNotExistError:
198 except CommitDoesNotExistError:
199 msg = _('No such commit exists for this repository')
199 msg = _('No such commit exists for this repository')
200 h.flash(msg, category='error')
200 h.flash(msg, category='error')
201 raise HTTPNotFound()
201 raise HTTPNotFound()
202 except Exception:
202 except Exception:
203 log.exception("General failure")
203 log.exception("General failure")
204 raise HTTPNotFound()
204 raise HTTPNotFound()
205
205
206 c.changes = OrderedDict()
206 c.changes = OrderedDict()
207 c.lines_added = 0
207 c.lines_added = 0
208 c.lines_deleted = 0
208 c.lines_deleted = 0
209
209
210 # auto collapse if we have more than limit
210 # auto collapse if we have more than limit
211 collapse_limit = diffs.DiffProcessor._collapse_commits_over
211 collapse_limit = diffs.DiffProcessor._collapse_commits_over
212 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
212 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
213
213
214 c.commit_statuses = ChangesetStatus.STATUSES
214 c.commit_statuses = ChangesetStatus.STATUSES
215 c.inline_comments = []
215 c.inline_comments = []
216 c.files = []
216 c.files = []
217
217
218 c.statuses = []
218 c.statuses = []
219 c.comments = []
219 c.comments = []
220 c.unresolved_comments = []
220 c.unresolved_comments = []
221 if len(c.commit_ranges) == 1:
221 if len(c.commit_ranges) == 1:
222 commit = c.commit_ranges[0]
222 commit = c.commit_ranges[0]
223 c.comments = CommentsModel().get_comments(
223 c.comments = CommentsModel().get_comments(
224 self.db_repo.repo_id,
224 self.db_repo.repo_id,
225 revision=commit.raw_id)
225 revision=commit.raw_id)
226 c.statuses.append(ChangesetStatusModel().get_status(
226 c.statuses.append(ChangesetStatusModel().get_status(
227 self.db_repo.repo_id, commit.raw_id))
227 self.db_repo.repo_id, commit.raw_id))
228 # comments from PR
228 # comments from PR
229 statuses = ChangesetStatusModel().get_statuses(
229 statuses = ChangesetStatusModel().get_statuses(
230 self.db_repo.repo_id, commit.raw_id,
230 self.db_repo.repo_id, commit.raw_id,
231 with_revisions=True)
231 with_revisions=True)
232 prs = set(st.pull_request for st in statuses
232 prs = set(st.pull_request for st in statuses
233 if st.pull_request is not None)
233 if st.pull_request is not None)
234 # from associated statuses, check the pull requests, and
234 # from associated statuses, check the pull requests, and
235 # show comments from them
235 # show comments from them
236 for pr in prs:
236 for pr in prs:
237 c.comments.extend(pr.comments)
237 c.comments.extend(pr.comments)
238
238
239 c.unresolved_comments = CommentsModel()\
239 c.unresolved_comments = CommentsModel()\
240 .get_commit_unresolved_todos(commit.raw_id)
240 .get_commit_unresolved_todos(commit.raw_id)
241
241
242 diff = None
242 diff = None
243 # Iterate over ranges (default commit view is always one commit)
243 # Iterate over ranges (default commit view is always one commit)
244 for commit in c.commit_ranges:
244 for commit in c.commit_ranges:
245 c.changes[commit.raw_id] = []
245 c.changes[commit.raw_id] = []
246
246
247 commit2 = commit
247 commit2 = commit
248 commit1 = commit.parents[0] if commit.parents else EmptyCommit()
248 commit1 = commit.first_parent
249
249
250 if method == 'show':
250 if method == 'show':
251 inline_comments = CommentsModel().get_inline_comments(
251 inline_comments = CommentsModel().get_inline_comments(
252 self.db_repo.repo_id, revision=commit.raw_id)
252 self.db_repo.repo_id, revision=commit.raw_id)
253 c.inline_cnt = CommentsModel().get_inline_comments_count(
253 c.inline_cnt = CommentsModel().get_inline_comments_count(
254 inline_comments)
254 inline_comments)
255 c.inline_comments = inline_comments
255 c.inline_comments = inline_comments
256
256
257 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
257 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
258 self.db_repo)
258 self.db_repo)
259 cache_file_path = diff_cache_exist(
259 cache_file_path = diff_cache_exist(
260 cache_path, 'diff', commit.raw_id,
260 cache_path, 'diff', commit.raw_id,
261 ign_whitespace_lcl, context_lcl, c.fulldiff)
261 ign_whitespace_lcl, context_lcl, c.fulldiff)
262
262
263 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
263 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
264 force_recache = str2bool(self.request.GET.get('force_recache'))
264 force_recache = str2bool(self.request.GET.get('force_recache'))
265
265
266 cached_diff = None
266 cached_diff = None
267 if caching_enabled:
267 if caching_enabled:
268 cached_diff = load_cached_diff(cache_file_path)
268 cached_diff = load_cached_diff(cache_file_path)
269
269
270 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
270 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
271 if not force_recache and has_proper_diff_cache:
271 if not force_recache and has_proper_diff_cache:
272 diffset = cached_diff['diff']
272 diffset = cached_diff['diff']
273 else:
273 else:
274 vcs_diff = self.rhodecode_vcs_repo.get_diff(
274 vcs_diff = self.rhodecode_vcs_repo.get_diff(
275 commit1, commit2,
275 commit1, commit2,
276 ignore_whitespace=ign_whitespace_lcl,
276 ignore_whitespace=ign_whitespace_lcl,
277 context=context_lcl)
277 context=context_lcl)
278
278
279 diff_processor = diffs.DiffProcessor(
279 diff_processor = diffs.DiffProcessor(
280 vcs_diff, format='newdiff', diff_limit=diff_limit,
280 vcs_diff, format='newdiff', diff_limit=diff_limit,
281 file_limit=file_limit, show_full_diff=c.fulldiff)
281 file_limit=file_limit, show_full_diff=c.fulldiff)
282
282
283 _parsed = diff_processor.prepare()
283 _parsed = diff_processor.prepare()
284
284
285 diffset = codeblocks.DiffSet(
285 diffset = codeblocks.DiffSet(
286 repo_name=self.db_repo_name,
286 repo_name=self.db_repo_name,
287 source_node_getter=codeblocks.diffset_node_getter(commit1),
287 source_node_getter=codeblocks.diffset_node_getter(commit1),
288 target_node_getter=codeblocks.diffset_node_getter(commit2))
288 target_node_getter=codeblocks.diffset_node_getter(commit2))
289
289
290 diffset = self.path_filter.render_patchset_filtered(
290 diffset = self.path_filter.render_patchset_filtered(
291 diffset, _parsed, commit1.raw_id, commit2.raw_id)
291 diffset, _parsed, commit1.raw_id, commit2.raw_id)
292
292
293 # save cached diff
293 # save cached diff
294 if caching_enabled:
294 if caching_enabled:
295 cache_diff(cache_file_path, diffset, None)
295 cache_diff(cache_file_path, diffset, None)
296
296
297 c.limited_diff = diffset.limited_diff
297 c.limited_diff = diffset.limited_diff
298 c.changes[commit.raw_id] = diffset
298 c.changes[commit.raw_id] = diffset
299 else:
299 else:
300 # TODO(marcink): no cache usage here...
300 # TODO(marcink): no cache usage here...
301 _diff = self.rhodecode_vcs_repo.get_diff(
301 _diff = self.rhodecode_vcs_repo.get_diff(
302 commit1, commit2,
302 commit1, commit2,
303 ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
303 ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
304 diff_processor = diffs.DiffProcessor(
304 diff_processor = diffs.DiffProcessor(
305 _diff, format='newdiff', diff_limit=diff_limit,
305 _diff, format='newdiff', diff_limit=diff_limit,
306 file_limit=file_limit, show_full_diff=c.fulldiff)
306 file_limit=file_limit, show_full_diff=c.fulldiff)
307 # downloads/raw we only need RAW diff nothing else
307 # downloads/raw we only need RAW diff nothing else
308 diff = self.path_filter.get_raw_patch(diff_processor)
308 diff = self.path_filter.get_raw_patch(diff_processor)
309 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
309 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
310
310
311 # sort comments by how they were generated
311 # sort comments by how they were generated
312 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
312 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
313
313
314 if len(c.commit_ranges) == 1:
314 if len(c.commit_ranges) == 1:
315 c.commit = c.commit_ranges[0]
315 c.commit = c.commit_ranges[0]
316 c.parent_tmpl = ''.join(
316 c.parent_tmpl = ''.join(
317 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
317 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
318
318
319 if method == 'download':
319 if method == 'download':
320 response = Response(diff)
320 response = Response(diff)
321 response.content_type = 'text/plain'
321 response.content_type = 'text/plain'
322 response.content_disposition = (
322 response.content_disposition = (
323 'attachment; filename=%s.diff' % commit_id_range[:12])
323 'attachment; filename=%s.diff' % commit_id_range[:12])
324 return response
324 return response
325 elif method == 'patch':
325 elif method == 'patch':
326 c.diff = safe_unicode(diff)
326 c.diff = safe_unicode(diff)
327 patch = render(
327 patch = render(
328 'rhodecode:templates/changeset/patch_changeset.mako',
328 'rhodecode:templates/changeset/patch_changeset.mako',
329 self._get_template_context(c), self.request)
329 self._get_template_context(c), self.request)
330 response = Response(patch)
330 response = Response(patch)
331 response.content_type = 'text/plain'
331 response.content_type = 'text/plain'
332 return response
332 return response
333 elif method == 'raw':
333 elif method == 'raw':
334 response = Response(diff)
334 response = Response(diff)
335 response.content_type = 'text/plain'
335 response.content_type = 'text/plain'
336 return response
336 return response
337 elif method == 'show':
337 elif method == 'show':
338 if len(c.commit_ranges) == 1:
338 if len(c.commit_ranges) == 1:
339 html = render(
339 html = render(
340 'rhodecode:templates/changeset/changeset.mako',
340 'rhodecode:templates/changeset/changeset.mako',
341 self._get_template_context(c), self.request)
341 self._get_template_context(c), self.request)
342 return Response(html)
342 return Response(html)
343 else:
343 else:
344 c.ancestor = None
344 c.ancestor = None
345 c.target_repo = self.db_repo
345 c.target_repo = self.db_repo
346 html = render(
346 html = render(
347 'rhodecode:templates/changeset/changeset_range.mako',
347 'rhodecode:templates/changeset/changeset_range.mako',
348 self._get_template_context(c), self.request)
348 self._get_template_context(c), self.request)
349 return Response(html)
349 return Response(html)
350
350
351 raise HTTPBadRequest()
351 raise HTTPBadRequest()
352
352
353 @LoginRequired()
353 @LoginRequired()
354 @HasRepoPermissionAnyDecorator(
354 @HasRepoPermissionAnyDecorator(
355 'repository.read', 'repository.write', 'repository.admin')
355 'repository.read', 'repository.write', 'repository.admin')
356 @view_config(
356 @view_config(
357 route_name='repo_commit', request_method='GET',
357 route_name='repo_commit', request_method='GET',
358 renderer=None)
358 renderer=None)
359 def repo_commit_show(self):
359 def repo_commit_show(self):
360 commit_id = self.request.matchdict['commit_id']
360 commit_id = self.request.matchdict['commit_id']
361 return self._commit(commit_id, method='show')
361 return self._commit(commit_id, method='show')
362
362
363 @LoginRequired()
363 @LoginRequired()
364 @HasRepoPermissionAnyDecorator(
364 @HasRepoPermissionAnyDecorator(
365 'repository.read', 'repository.write', 'repository.admin')
365 'repository.read', 'repository.write', 'repository.admin')
366 @view_config(
366 @view_config(
367 route_name='repo_commit_raw', request_method='GET',
367 route_name='repo_commit_raw', request_method='GET',
368 renderer=None)
368 renderer=None)
369 @view_config(
369 @view_config(
370 route_name='repo_commit_raw_deprecated', request_method='GET',
370 route_name='repo_commit_raw_deprecated', request_method='GET',
371 renderer=None)
371 renderer=None)
372 def repo_commit_raw(self):
372 def repo_commit_raw(self):
373 commit_id = self.request.matchdict['commit_id']
373 commit_id = self.request.matchdict['commit_id']
374 return self._commit(commit_id, method='raw')
374 return self._commit(commit_id, method='raw')
375
375
376 @LoginRequired()
376 @LoginRequired()
377 @HasRepoPermissionAnyDecorator(
377 @HasRepoPermissionAnyDecorator(
378 'repository.read', 'repository.write', 'repository.admin')
378 'repository.read', 'repository.write', 'repository.admin')
379 @view_config(
379 @view_config(
380 route_name='repo_commit_patch', request_method='GET',
380 route_name='repo_commit_patch', request_method='GET',
381 renderer=None)
381 renderer=None)
382 def repo_commit_patch(self):
382 def repo_commit_patch(self):
383 commit_id = self.request.matchdict['commit_id']
383 commit_id = self.request.matchdict['commit_id']
384 return self._commit(commit_id, method='patch')
384 return self._commit(commit_id, method='patch')
385
385
386 @LoginRequired()
386 @LoginRequired()
387 @HasRepoPermissionAnyDecorator(
387 @HasRepoPermissionAnyDecorator(
388 'repository.read', 'repository.write', 'repository.admin')
388 'repository.read', 'repository.write', 'repository.admin')
389 @view_config(
389 @view_config(
390 route_name='repo_commit_download', request_method='GET',
390 route_name='repo_commit_download', request_method='GET',
391 renderer=None)
391 renderer=None)
392 def repo_commit_download(self):
392 def repo_commit_download(self):
393 commit_id = self.request.matchdict['commit_id']
393 commit_id = self.request.matchdict['commit_id']
394 return self._commit(commit_id, method='download')
394 return self._commit(commit_id, method='download')
395
395
396 @LoginRequired()
396 @LoginRequired()
397 @NotAnonymous()
397 @NotAnonymous()
398 @HasRepoPermissionAnyDecorator(
398 @HasRepoPermissionAnyDecorator(
399 'repository.read', 'repository.write', 'repository.admin')
399 'repository.read', 'repository.write', 'repository.admin')
400 @CSRFRequired()
400 @CSRFRequired()
401 @view_config(
401 @view_config(
402 route_name='repo_commit_comment_create', request_method='POST',
402 route_name='repo_commit_comment_create', request_method='POST',
403 renderer='json_ext')
403 renderer='json_ext')
404 def repo_commit_comment_create(self):
404 def repo_commit_comment_create(self):
405 _ = self.request.translate
405 _ = self.request.translate
406 commit_id = self.request.matchdict['commit_id']
406 commit_id = self.request.matchdict['commit_id']
407
407
408 c = self.load_default_context()
408 c = self.load_default_context()
409 status = self.request.POST.get('changeset_status', None)
409 status = self.request.POST.get('changeset_status', None)
410 text = self.request.POST.get('text')
410 text = self.request.POST.get('text')
411 comment_type = self.request.POST.get('comment_type')
411 comment_type = self.request.POST.get('comment_type')
412 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
412 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
413
413
414 if status:
414 if status:
415 text = text or (_('Status change %(transition_icon)s %(status)s')
415 text = text or (_('Status change %(transition_icon)s %(status)s')
416 % {'transition_icon': '>',
416 % {'transition_icon': '>',
417 'status': ChangesetStatus.get_status_lbl(status)})
417 'status': ChangesetStatus.get_status_lbl(status)})
418
418
419 multi_commit_ids = []
419 multi_commit_ids = []
420 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
420 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
421 if _commit_id not in ['', None, EmptyCommit.raw_id]:
421 if _commit_id not in ['', None, EmptyCommit.raw_id]:
422 if _commit_id not in multi_commit_ids:
422 if _commit_id not in multi_commit_ids:
423 multi_commit_ids.append(_commit_id)
423 multi_commit_ids.append(_commit_id)
424
424
425 commit_ids = multi_commit_ids or [commit_id]
425 commit_ids = multi_commit_ids or [commit_id]
426
426
427 comment = None
427 comment = None
428 for current_id in filter(None, commit_ids):
428 for current_id in filter(None, commit_ids):
429 comment = CommentsModel().create(
429 comment = CommentsModel().create(
430 text=text,
430 text=text,
431 repo=self.db_repo.repo_id,
431 repo=self.db_repo.repo_id,
432 user=self._rhodecode_db_user.user_id,
432 user=self._rhodecode_db_user.user_id,
433 commit_id=current_id,
433 commit_id=current_id,
434 f_path=self.request.POST.get('f_path'),
434 f_path=self.request.POST.get('f_path'),
435 line_no=self.request.POST.get('line'),
435 line_no=self.request.POST.get('line'),
436 status_change=(ChangesetStatus.get_status_lbl(status)
436 status_change=(ChangesetStatus.get_status_lbl(status)
437 if status else None),
437 if status else None),
438 status_change_type=status,
438 status_change_type=status,
439 comment_type=comment_type,
439 comment_type=comment_type,
440 resolves_comment_id=resolves_comment_id,
440 resolves_comment_id=resolves_comment_id,
441 auth_user=self._rhodecode_user
441 auth_user=self._rhodecode_user
442 )
442 )
443
443
444 # get status if set !
444 # get status if set !
445 if status:
445 if status:
446 # if latest status was from pull request and it's closed
446 # if latest status was from pull request and it's closed
447 # disallow changing status !
447 # disallow changing status !
448 # dont_allow_on_closed_pull_request = True !
448 # dont_allow_on_closed_pull_request = True !
449
449
450 try:
450 try:
451 ChangesetStatusModel().set_status(
451 ChangesetStatusModel().set_status(
452 self.db_repo.repo_id,
452 self.db_repo.repo_id,
453 status,
453 status,
454 self._rhodecode_db_user.user_id,
454 self._rhodecode_db_user.user_id,
455 comment,
455 comment,
456 revision=current_id,
456 revision=current_id,
457 dont_allow_on_closed_pull_request=True
457 dont_allow_on_closed_pull_request=True
458 )
458 )
459 except StatusChangeOnClosedPullRequestError:
459 except StatusChangeOnClosedPullRequestError:
460 msg = _('Changing the status of a commit associated with '
460 msg = _('Changing the status of a commit associated with '
461 'a closed pull request is not allowed')
461 'a closed pull request is not allowed')
462 log.exception(msg)
462 log.exception(msg)
463 h.flash(msg, category='warning')
463 h.flash(msg, category='warning')
464 raise HTTPFound(h.route_path(
464 raise HTTPFound(h.route_path(
465 'repo_commit', repo_name=self.db_repo_name,
465 'repo_commit', repo_name=self.db_repo_name,
466 commit_id=current_id))
466 commit_id=current_id))
467
467
468 # finalize, commit and redirect
468 # finalize, commit and redirect
469 Session().commit()
469 Session().commit()
470
470
471 data = {
471 data = {
472 'target_id': h.safeid(h.safe_unicode(
472 'target_id': h.safeid(h.safe_unicode(
473 self.request.POST.get('f_path'))),
473 self.request.POST.get('f_path'))),
474 }
474 }
475 if comment:
475 if comment:
476 c.co = comment
476 c.co = comment
477 rendered_comment = render(
477 rendered_comment = render(
478 'rhodecode:templates/changeset/changeset_comment_block.mako',
478 'rhodecode:templates/changeset/changeset_comment_block.mako',
479 self._get_template_context(c), self.request)
479 self._get_template_context(c), self.request)
480
480
481 data.update(comment.get_dict())
481 data.update(comment.get_dict())
482 data.update({'rendered_text': rendered_comment})
482 data.update({'rendered_text': rendered_comment})
483
483
484 return data
484 return data
485
485
486 @LoginRequired()
486 @LoginRequired()
487 @NotAnonymous()
487 @NotAnonymous()
488 @HasRepoPermissionAnyDecorator(
488 @HasRepoPermissionAnyDecorator(
489 'repository.read', 'repository.write', 'repository.admin')
489 'repository.read', 'repository.write', 'repository.admin')
490 @CSRFRequired()
490 @CSRFRequired()
491 @view_config(
491 @view_config(
492 route_name='repo_commit_comment_preview', request_method='POST',
492 route_name='repo_commit_comment_preview', request_method='POST',
493 renderer='string', xhr=True)
493 renderer='string', xhr=True)
494 def repo_commit_comment_preview(self):
494 def repo_commit_comment_preview(self):
495 # Technically a CSRF token is not needed as no state changes with this
495 # Technically a CSRF token is not needed as no state changes with this
496 # call. However, as this is a POST is better to have it, so automated
496 # call. However, as this is a POST is better to have it, so automated
497 # tools don't flag it as potential CSRF.
497 # tools don't flag it as potential CSRF.
498 # Post is required because the payload could be bigger than the maximum
498 # Post is required because the payload could be bigger than the maximum
499 # allowed by GET.
499 # allowed by GET.
500
500
501 text = self.request.POST.get('text')
501 text = self.request.POST.get('text')
502 renderer = self.request.POST.get('renderer') or 'rst'
502 renderer = self.request.POST.get('renderer') or 'rst'
503 if text:
503 if text:
504 return h.render(text, renderer=renderer, mentions=True)
504 return h.render(text, renderer=renderer, mentions=True)
505 return ''
505 return ''
506
506
507 @LoginRequired()
507 @LoginRequired()
508 @NotAnonymous()
508 @NotAnonymous()
509 @HasRepoPermissionAnyDecorator(
509 @HasRepoPermissionAnyDecorator(
510 'repository.read', 'repository.write', 'repository.admin')
510 'repository.read', 'repository.write', 'repository.admin')
511 @CSRFRequired()
511 @CSRFRequired()
512 @view_config(
512 @view_config(
513 route_name='repo_commit_comment_delete', request_method='POST',
513 route_name='repo_commit_comment_delete', request_method='POST',
514 renderer='json_ext')
514 renderer='json_ext')
515 def repo_commit_comment_delete(self):
515 def repo_commit_comment_delete(self):
516 commit_id = self.request.matchdict['commit_id']
516 commit_id = self.request.matchdict['commit_id']
517 comment_id = self.request.matchdict['comment_id']
517 comment_id = self.request.matchdict['comment_id']
518
518
519 comment = ChangesetComment.get_or_404(comment_id)
519 comment = ChangesetComment.get_or_404(comment_id)
520 if not comment:
520 if not comment:
521 log.debug('Comment with id:%s not found, skipping', comment_id)
521 log.debug('Comment with id:%s not found, skipping', comment_id)
522 # comment already deleted in another call probably
522 # comment already deleted in another call probably
523 return True
523 return True
524
524
525 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
525 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
526 super_admin = h.HasPermissionAny('hg.admin')()
526 super_admin = h.HasPermissionAny('hg.admin')()
527 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
527 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
528 is_repo_comment = comment.repo.repo_name == self.db_repo_name
528 is_repo_comment = comment.repo.repo_name == self.db_repo_name
529 comment_repo_admin = is_repo_admin and is_repo_comment
529 comment_repo_admin = is_repo_admin and is_repo_comment
530
530
531 if super_admin or comment_owner or comment_repo_admin:
531 if super_admin or comment_owner or comment_repo_admin:
532 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
532 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
533 Session().commit()
533 Session().commit()
534 return True
534 return True
535 else:
535 else:
536 log.warning('No permissions for user %s to delete comment_id: %s',
536 log.warning('No permissions for user %s to delete comment_id: %s',
537 self._rhodecode_db_user, comment_id)
537 self._rhodecode_db_user, comment_id)
538 raise HTTPNotFound()
538 raise HTTPNotFound()
539
539
540 @LoginRequired()
540 @LoginRequired()
541 @HasRepoPermissionAnyDecorator(
541 @HasRepoPermissionAnyDecorator(
542 'repository.read', 'repository.write', 'repository.admin')
542 'repository.read', 'repository.write', 'repository.admin')
543 @view_config(
543 @view_config(
544 route_name='repo_commit_data', request_method='GET',
544 route_name='repo_commit_data', request_method='GET',
545 renderer='json_ext', xhr=True)
545 renderer='json_ext', xhr=True)
546 def repo_commit_data(self):
546 def repo_commit_data(self):
547 commit_id = self.request.matchdict['commit_id']
547 commit_id = self.request.matchdict['commit_id']
548 self.load_default_context()
548 self.load_default_context()
549
549
550 try:
550 try:
551 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
551 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
552 except CommitDoesNotExistError as e:
552 except CommitDoesNotExistError as e:
553 return EmptyCommit(message=str(e))
553 return EmptyCommit(message=str(e))
554
554
555 @LoginRequired()
555 @LoginRequired()
556 @HasRepoPermissionAnyDecorator(
556 @HasRepoPermissionAnyDecorator(
557 'repository.read', 'repository.write', 'repository.admin')
557 'repository.read', 'repository.write', 'repository.admin')
558 @view_config(
558 @view_config(
559 route_name='repo_commit_children', request_method='GET',
559 route_name='repo_commit_children', request_method='GET',
560 renderer='json_ext', xhr=True)
560 renderer='json_ext', xhr=True)
561 def repo_commit_children(self):
561 def repo_commit_children(self):
562 commit_id = self.request.matchdict['commit_id']
562 commit_id = self.request.matchdict['commit_id']
563 self.load_default_context()
563 self.load_default_context()
564
564
565 try:
565 try:
566 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
566 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
567 children = commit.children
567 children = commit.children
568 except CommitDoesNotExistError:
568 except CommitDoesNotExistError:
569 children = []
569 children = []
570
570
571 result = {"results": children}
571 result = {"results": children}
572 return result
572 return result
573
573
574 @LoginRequired()
574 @LoginRequired()
575 @HasRepoPermissionAnyDecorator(
575 @HasRepoPermissionAnyDecorator(
576 'repository.read', 'repository.write', 'repository.admin')
576 'repository.read', 'repository.write', 'repository.admin')
577 @view_config(
577 @view_config(
578 route_name='repo_commit_parents', request_method='GET',
578 route_name='repo_commit_parents', request_method='GET',
579 renderer='json_ext')
579 renderer='json_ext')
580 def repo_commit_parents(self):
580 def repo_commit_parents(self):
581 commit_id = self.request.matchdict['commit_id']
581 commit_id = self.request.matchdict['commit_id']
582 self.load_default_context()
582 self.load_default_context()
583
583
584 try:
584 try:
585 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
585 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
586 parents = commit.parents
586 parents = commit.parents
587 except CommitDoesNotExistError:
587 except CommitDoesNotExistError:
588 parents = []
588 parents = []
589 result = {"results": parents}
589 result = {"results": parents}
590 return result
590 return result
@@ -1,1326 +1,1393 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
34
34
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
36 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.base import vcs_operation_context
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 NotAnonymous, CSRFRequired)
41 NotAnonymous, CSRFRequired)
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
45 RepositoryRequirementError, EmptyRepositoryError)
45 RepositoryRequirementError, EmptyRepositoryError)
46 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.comment import CommentsModel
48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
49 ChangesetComment, ChangesetStatus, Repository)
49 ChangesetComment, ChangesetStatus, Repository)
50 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.forms import PullRequestForm
51 from rhodecode.model.meta import Session
51 from rhodecode.model.meta import Session
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
53 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.scm import ScmModel
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
59
59
60 def load_default_context(self):
60 def load_default_context(self):
61 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c = self._get_local_tmpl_context(include_app_defaults=True)
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
64 # backward compat., we use for OLD PRs a plain renderer
64 # backward compat., we use for OLD PRs a plain renderer
65 c.renderer = 'plain'
65 c.renderer = 'plain'
66 return c
66 return c
67
67
68 def _get_pull_requests_list(
68 def _get_pull_requests_list(
69 self, repo_name, source, filter_type, opened_by, statuses):
69 self, repo_name, source, filter_type, opened_by, statuses):
70
70
71 draw, start, limit = self._extract_chunk(self.request)
71 draw, start, limit = self._extract_chunk(self.request)
72 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 search_q, order_by, order_dir = self._extract_ordering(self.request)
73 _render = self.request.get_partial_renderer(
73 _render = self.request.get_partial_renderer(
74 'rhodecode:templates/data_table/_dt_elements.mako')
74 'rhodecode:templates/data_table/_dt_elements.mako')
75
75
76 # pagination
76 # pagination
77
77
78 if filter_type == 'awaiting_review':
78 if filter_type == 'awaiting_review':
79 pull_requests = PullRequestModel().get_awaiting_review(
79 pull_requests = PullRequestModel().get_awaiting_review(
80 repo_name, source=source, opened_by=opened_by,
80 repo_name, source=source, opened_by=opened_by,
81 statuses=statuses, offset=start, length=limit,
81 statuses=statuses, offset=start, length=limit,
82 order_by=order_by, order_dir=order_dir)
82 order_by=order_by, order_dir=order_dir)
83 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 pull_requests_total_count = PullRequestModel().count_awaiting_review(
84 repo_name, source=source, statuses=statuses,
84 repo_name, source=source, statuses=statuses,
85 opened_by=opened_by)
85 opened_by=opened_by)
86 elif filter_type == 'awaiting_my_review':
86 elif filter_type == 'awaiting_my_review':
87 pull_requests = PullRequestModel().get_awaiting_my_review(
87 pull_requests = PullRequestModel().get_awaiting_my_review(
88 repo_name, source=source, opened_by=opened_by,
88 repo_name, source=source, opened_by=opened_by,
89 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 user_id=self._rhodecode_user.user_id, statuses=statuses,
90 offset=start, length=limit, order_by=order_by,
90 offset=start, length=limit, order_by=order_by,
91 order_dir=order_dir)
91 order_dir=order_dir)
92 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
93 repo_name, source=source, user_id=self._rhodecode_user.user_id,
93 repo_name, source=source, user_id=self._rhodecode_user.user_id,
94 statuses=statuses, opened_by=opened_by)
94 statuses=statuses, opened_by=opened_by)
95 else:
95 else:
96 pull_requests = PullRequestModel().get_all(
96 pull_requests = PullRequestModel().get_all(
97 repo_name, source=source, opened_by=opened_by,
97 repo_name, source=source, opened_by=opened_by,
98 statuses=statuses, offset=start, length=limit,
98 statuses=statuses, offset=start, length=limit,
99 order_by=order_by, order_dir=order_dir)
99 order_by=order_by, order_dir=order_dir)
100 pull_requests_total_count = PullRequestModel().count_all(
100 pull_requests_total_count = PullRequestModel().count_all(
101 repo_name, source=source, statuses=statuses,
101 repo_name, source=source, statuses=statuses,
102 opened_by=opened_by)
102 opened_by=opened_by)
103
103
104 data = []
104 data = []
105 comments_model = CommentsModel()
105 comments_model = CommentsModel()
106 for pr in pull_requests:
106 for pr in pull_requests:
107 comments = comments_model.get_all_comments(
107 comments = comments_model.get_all_comments(
108 self.db_repo.repo_id, pull_request=pr)
108 self.db_repo.repo_id, pull_request=pr)
109
109
110 data.append({
110 data.append({
111 'name': _render('pullrequest_name',
111 'name': _render('pullrequest_name',
112 pr.pull_request_id, pr.target_repo.repo_name),
112 pr.pull_request_id, pr.target_repo.repo_name),
113 'name_raw': pr.pull_request_id,
113 'name_raw': pr.pull_request_id,
114 'status': _render('pullrequest_status',
114 'status': _render('pullrequest_status',
115 pr.calculated_review_status()),
115 pr.calculated_review_status()),
116 'title': _render(
116 'title': _render(
117 'pullrequest_title', pr.title, pr.description),
117 'pullrequest_title', pr.title, pr.description),
118 'description': h.escape(pr.description),
118 'description': h.escape(pr.description),
119 'updated_on': _render('pullrequest_updated_on',
119 'updated_on': _render('pullrequest_updated_on',
120 h.datetime_to_time(pr.updated_on)),
120 h.datetime_to_time(pr.updated_on)),
121 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'updated_on_raw': h.datetime_to_time(pr.updated_on),
122 'created_on': _render('pullrequest_updated_on',
122 'created_on': _render('pullrequest_updated_on',
123 h.datetime_to_time(pr.created_on)),
123 h.datetime_to_time(pr.created_on)),
124 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'created_on_raw': h.datetime_to_time(pr.created_on),
125 'author': _render('pullrequest_author',
125 'author': _render('pullrequest_author',
126 pr.author.full_contact, ),
126 pr.author.full_contact, ),
127 'author_raw': pr.author.full_name,
127 'author_raw': pr.author.full_name,
128 'comments': _render('pullrequest_comments', len(comments)),
128 'comments': _render('pullrequest_comments', len(comments)),
129 'comments_raw': len(comments),
129 'comments_raw': len(comments),
130 'closed': pr.is_closed(),
130 'closed': pr.is_closed(),
131 })
131 })
132
132
133 data = ({
133 data = ({
134 'draw': draw,
134 'draw': draw,
135 'data': data,
135 'data': data,
136 'recordsTotal': pull_requests_total_count,
136 'recordsTotal': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
138 })
138 })
139 return data
139 return data
140
140
141 def get_recache_flag(self):
142 for flag_name in ['force_recache', 'force-recache', 'no-cache']:
143 flag_val = self.request.GET.get(flag_name)
144 if str2bool(flag_val):
145 return True
146 return False
147
141 @LoginRequired()
148 @LoginRequired()
142 @HasRepoPermissionAnyDecorator(
149 @HasRepoPermissionAnyDecorator(
143 'repository.read', 'repository.write', 'repository.admin')
150 'repository.read', 'repository.write', 'repository.admin')
144 @view_config(
151 @view_config(
145 route_name='pullrequest_show_all', request_method='GET',
152 route_name='pullrequest_show_all', request_method='GET',
146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
153 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
147 def pull_request_list(self):
154 def pull_request_list(self):
148 c = self.load_default_context()
155 c = self.load_default_context()
149
156
150 req_get = self.request.GET
157 req_get = self.request.GET
151 c.source = str2bool(req_get.get('source'))
158 c.source = str2bool(req_get.get('source'))
152 c.closed = str2bool(req_get.get('closed'))
159 c.closed = str2bool(req_get.get('closed'))
153 c.my = str2bool(req_get.get('my'))
160 c.my = str2bool(req_get.get('my'))
154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
161 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
162 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
156
163
157 c.active = 'open'
164 c.active = 'open'
158 if c.my:
165 if c.my:
159 c.active = 'my'
166 c.active = 'my'
160 if c.closed:
167 if c.closed:
161 c.active = 'closed'
168 c.active = 'closed'
162 if c.awaiting_review and not c.source:
169 if c.awaiting_review and not c.source:
163 c.active = 'awaiting'
170 c.active = 'awaiting'
164 if c.source and not c.awaiting_review:
171 if c.source and not c.awaiting_review:
165 c.active = 'source'
172 c.active = 'source'
166 if c.awaiting_my_review:
173 if c.awaiting_my_review:
167 c.active = 'awaiting_my'
174 c.active = 'awaiting_my'
168
175
169 return self._get_template_context(c)
176 return self._get_template_context(c)
170
177
171 @LoginRequired()
178 @LoginRequired()
172 @HasRepoPermissionAnyDecorator(
179 @HasRepoPermissionAnyDecorator(
173 'repository.read', 'repository.write', 'repository.admin')
180 'repository.read', 'repository.write', 'repository.admin')
174 @view_config(
181 @view_config(
175 route_name='pullrequest_show_all_data', request_method='GET',
182 route_name='pullrequest_show_all_data', request_method='GET',
176 renderer='json_ext', xhr=True)
183 renderer='json_ext', xhr=True)
177 def pull_request_list_data(self):
184 def pull_request_list_data(self):
178 self.load_default_context()
185 self.load_default_context()
179
186
180 # additional filters
187 # additional filters
181 req_get = self.request.GET
188 req_get = self.request.GET
182 source = str2bool(req_get.get('source'))
189 source = str2bool(req_get.get('source'))
183 closed = str2bool(req_get.get('closed'))
190 closed = str2bool(req_get.get('closed'))
184 my = str2bool(req_get.get('my'))
191 my = str2bool(req_get.get('my'))
185 awaiting_review = str2bool(req_get.get('awaiting_review'))
192 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
193 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187
194
188 filter_type = 'awaiting_review' if awaiting_review \
195 filter_type = 'awaiting_review' if awaiting_review \
189 else 'awaiting_my_review' if awaiting_my_review \
196 else 'awaiting_my_review' if awaiting_my_review \
190 else None
197 else None
191
198
192 opened_by = None
199 opened_by = None
193 if my:
200 if my:
194 opened_by = [self._rhodecode_user.user_id]
201 opened_by = [self._rhodecode_user.user_id]
195
202
196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
203 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 if closed:
204 if closed:
198 statuses = [PullRequest.STATUS_CLOSED]
205 statuses = [PullRequest.STATUS_CLOSED]
199
206
200 data = self._get_pull_requests_list(
207 data = self._get_pull_requests_list(
201 repo_name=self.db_repo_name, source=source,
208 repo_name=self.db_repo_name, source=source,
202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
209 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203
210
204 return data
211 return data
205
212
206 def _is_diff_cache_enabled(self, target_repo):
213 def _is_diff_cache_enabled(self, target_repo):
207 caching_enabled = self._get_general_setting(
214 caching_enabled = self._get_general_setting(
208 target_repo, 'rhodecode_diff_cache')
215 target_repo, 'rhodecode_diff_cache')
209 log.debug('Diff caching enabled: %s', caching_enabled)
216 log.debug('Diff caching enabled: %s', caching_enabled)
210 return caching_enabled
217 return caching_enabled
211
218
212 def _get_diffset(self, source_repo_name, source_repo,
219 def _get_diffset(self, source_repo_name, source_repo,
213 source_ref_id, target_ref_id,
220 source_ref_id, target_ref_id,
214 target_commit, source_commit, diff_limit, file_limit,
221 target_commit, source_commit, diff_limit, file_limit,
215 fulldiff):
222 fulldiff):
216
223
217 vcs_diff = PullRequestModel().get_diff(
224 vcs_diff = PullRequestModel().get_diff(
218 source_repo, source_ref_id, target_ref_id)
225 source_repo, source_ref_id, target_ref_id)
219
226
220 diff_processor = diffs.DiffProcessor(
227 diff_processor = diffs.DiffProcessor(
221 vcs_diff, format='newdiff', diff_limit=diff_limit,
228 vcs_diff, format='newdiff', diff_limit=diff_limit,
222 file_limit=file_limit, show_full_diff=fulldiff)
229 file_limit=file_limit, show_full_diff=fulldiff)
223
230
224 _parsed = diff_processor.prepare()
231 _parsed = diff_processor.prepare()
225
232
226 diffset = codeblocks.DiffSet(
233 diffset = codeblocks.DiffSet(
227 repo_name=self.db_repo_name,
234 repo_name=self.db_repo_name,
228 source_repo_name=source_repo_name,
235 source_repo_name=source_repo_name,
229 source_node_getter=codeblocks.diffset_node_getter(target_commit),
236 source_node_getter=codeblocks.diffset_node_getter(target_commit),
230 target_node_getter=codeblocks.diffset_node_getter(source_commit),
237 target_node_getter=codeblocks.diffset_node_getter(source_commit),
231 )
238 )
232 diffset = self.path_filter.render_patchset_filtered(
239 diffset = self.path_filter.render_patchset_filtered(
233 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
240 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
234
241
235 return diffset
242 return diffset
236
243
244 def _get_range_diffset(self, source_scm, source_repo,
245 commit1, commit2, diff_limit, file_limit,
246 fulldiff, ign_whitespace_lcl, context_lcl):
247 vcs_diff = source_scm.get_diff(
248 commit1, commit2,
249 ignore_whitespace=ign_whitespace_lcl,
250 context=context_lcl)
251
252 diff_processor = diffs.DiffProcessor(
253 vcs_diff, format='newdiff', diff_limit=diff_limit,
254 file_limit=file_limit, show_full_diff=fulldiff)
255
256 _parsed = diff_processor.prepare()
257
258 diffset = codeblocks.DiffSet(
259 repo_name=source_repo.repo_name,
260 source_node_getter=codeblocks.diffset_node_getter(commit1),
261 target_node_getter=codeblocks.diffset_node_getter(commit2))
262
263 diffset = self.path_filter.render_patchset_filtered(
264 diffset, _parsed, commit1.raw_id, commit2.raw_id)
265
266 return diffset
267
237 @LoginRequired()
268 @LoginRequired()
238 @HasRepoPermissionAnyDecorator(
269 @HasRepoPermissionAnyDecorator(
239 'repository.read', 'repository.write', 'repository.admin')
270 'repository.read', 'repository.write', 'repository.admin')
240 @view_config(
271 @view_config(
241 route_name='pullrequest_show', request_method='GET',
272 route_name='pullrequest_show', request_method='GET',
242 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
273 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
243 def pull_request_show(self):
274 def pull_request_show(self):
244 pull_request_id = self.request.matchdict['pull_request_id']
275 pull_request_id = self.request.matchdict['pull_request_id']
245
276
246 c = self.load_default_context()
277 c = self.load_default_context()
247
278
248 version = self.request.GET.get('version')
279 version = self.request.GET.get('version')
249 from_version = self.request.GET.get('from_version') or version
280 from_version = self.request.GET.get('from_version') or version
250 merge_checks = self.request.GET.get('merge_checks')
281 merge_checks = self.request.GET.get('merge_checks')
251 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
282 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
252 force_refresh = str2bool(self.request.GET.get('force_refresh'))
283 force_refresh = str2bool(self.request.GET.get('force_refresh'))
253
284
254 (pull_request_latest,
285 (pull_request_latest,
255 pull_request_at_ver,
286 pull_request_at_ver,
256 pull_request_display_obj,
287 pull_request_display_obj,
257 at_version) = PullRequestModel().get_pr_version(
288 at_version) = PullRequestModel().get_pr_version(
258 pull_request_id, version=version)
289 pull_request_id, version=version)
259 pr_closed = pull_request_latest.is_closed()
290 pr_closed = pull_request_latest.is_closed()
260
291
261 if pr_closed and (version or from_version):
292 if pr_closed and (version or from_version):
262 # not allow to browse versions
293 # not allow to browse versions
263 raise HTTPFound(h.route_path(
294 raise HTTPFound(h.route_path(
264 'pullrequest_show', repo_name=self.db_repo_name,
295 'pullrequest_show', repo_name=self.db_repo_name,
265 pull_request_id=pull_request_id))
296 pull_request_id=pull_request_id))
266
297
267 versions = pull_request_display_obj.versions()
298 versions = pull_request_display_obj.versions()
299 # used to store per-commit range diffs
300 c.changes = collections.OrderedDict()
301 c.range_diff_on = self.request.GET.get('range-diff') == "1"
268
302
269 c.at_version = at_version
303 c.at_version = at_version
270 c.at_version_num = (at_version
304 c.at_version_num = (at_version
271 if at_version and at_version != 'latest'
305 if at_version and at_version != 'latest'
272 else None)
306 else None)
273 c.at_version_pos = ChangesetComment.get_index_from_version(
307 c.at_version_pos = ChangesetComment.get_index_from_version(
274 c.at_version_num, versions)
308 c.at_version_num, versions)
275
309
276 (prev_pull_request_latest,
310 (prev_pull_request_latest,
277 prev_pull_request_at_ver,
311 prev_pull_request_at_ver,
278 prev_pull_request_display_obj,
312 prev_pull_request_display_obj,
279 prev_at_version) = PullRequestModel().get_pr_version(
313 prev_at_version) = PullRequestModel().get_pr_version(
280 pull_request_id, version=from_version)
314 pull_request_id, version=from_version)
281
315
282 c.from_version = prev_at_version
316 c.from_version = prev_at_version
283 c.from_version_num = (prev_at_version
317 c.from_version_num = (prev_at_version
284 if prev_at_version and prev_at_version != 'latest'
318 if prev_at_version and prev_at_version != 'latest'
285 else None)
319 else None)
286 c.from_version_pos = ChangesetComment.get_index_from_version(
320 c.from_version_pos = ChangesetComment.get_index_from_version(
287 c.from_version_num, versions)
321 c.from_version_num, versions)
288
322
289 # define if we're in COMPARE mode or VIEW at version mode
323 # define if we're in COMPARE mode or VIEW at version mode
290 compare = at_version != prev_at_version
324 compare = at_version != prev_at_version
291
325
292 # pull_requests repo_name we opened it against
326 # pull_requests repo_name we opened it against
293 # ie. target_repo must match
327 # ie. target_repo must match
294 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
328 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
295 raise HTTPNotFound()
329 raise HTTPNotFound()
296
330
297 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
331 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
298 pull_request_at_ver)
332 pull_request_at_ver)
299
333
300 c.pull_request = pull_request_display_obj
334 c.pull_request = pull_request_display_obj
301 c.renderer = pull_request_at_ver.description_renderer or c.renderer
335 c.renderer = pull_request_at_ver.description_renderer or c.renderer
302 c.pull_request_latest = pull_request_latest
336 c.pull_request_latest = pull_request_latest
303
337
304 if compare or (at_version and not at_version == 'latest'):
338 if compare or (at_version and not at_version == 'latest'):
305 c.allowed_to_change_status = False
339 c.allowed_to_change_status = False
306 c.allowed_to_update = False
340 c.allowed_to_update = False
307 c.allowed_to_merge = False
341 c.allowed_to_merge = False
308 c.allowed_to_delete = False
342 c.allowed_to_delete = False
309 c.allowed_to_comment = False
343 c.allowed_to_comment = False
310 c.allowed_to_close = False
344 c.allowed_to_close = False
311 else:
345 else:
312 can_change_status = PullRequestModel().check_user_change_status(
346 can_change_status = PullRequestModel().check_user_change_status(
313 pull_request_at_ver, self._rhodecode_user)
347 pull_request_at_ver, self._rhodecode_user)
314 c.allowed_to_change_status = can_change_status and not pr_closed
348 c.allowed_to_change_status = can_change_status and not pr_closed
315
349
316 c.allowed_to_update = PullRequestModel().check_user_update(
350 c.allowed_to_update = PullRequestModel().check_user_update(
317 pull_request_latest, self._rhodecode_user) and not pr_closed
351 pull_request_latest, self._rhodecode_user) and not pr_closed
318 c.allowed_to_merge = PullRequestModel().check_user_merge(
352 c.allowed_to_merge = PullRequestModel().check_user_merge(
319 pull_request_latest, self._rhodecode_user) and not pr_closed
353 pull_request_latest, self._rhodecode_user) and not pr_closed
320 c.allowed_to_delete = PullRequestModel().check_user_delete(
354 c.allowed_to_delete = PullRequestModel().check_user_delete(
321 pull_request_latest, self._rhodecode_user) and not pr_closed
355 pull_request_latest, self._rhodecode_user) and not pr_closed
322 c.allowed_to_comment = not pr_closed
356 c.allowed_to_comment = not pr_closed
323 c.allowed_to_close = c.allowed_to_merge and not pr_closed
357 c.allowed_to_close = c.allowed_to_merge and not pr_closed
324
358
325 c.forbid_adding_reviewers = False
359 c.forbid_adding_reviewers = False
326 c.forbid_author_to_review = False
360 c.forbid_author_to_review = False
327 c.forbid_commit_author_to_review = False
361 c.forbid_commit_author_to_review = False
328
362
329 if pull_request_latest.reviewer_data and \
363 if pull_request_latest.reviewer_data and \
330 'rules' in pull_request_latest.reviewer_data:
364 'rules' in pull_request_latest.reviewer_data:
331 rules = pull_request_latest.reviewer_data['rules'] or {}
365 rules = pull_request_latest.reviewer_data['rules'] or {}
332 try:
366 try:
333 c.forbid_adding_reviewers = rules.get(
367 c.forbid_adding_reviewers = rules.get(
334 'forbid_adding_reviewers')
368 'forbid_adding_reviewers')
335 c.forbid_author_to_review = rules.get(
369 c.forbid_author_to_review = rules.get(
336 'forbid_author_to_review')
370 'forbid_author_to_review')
337 c.forbid_commit_author_to_review = rules.get(
371 c.forbid_commit_author_to_review = rules.get(
338 'forbid_commit_author_to_review')
372 'forbid_commit_author_to_review')
339 except Exception:
373 except Exception:
340 pass
374 pass
341
375
342 # check merge capabilities
376 # check merge capabilities
343 _merge_check = MergeCheck.validate(
377 _merge_check = MergeCheck.validate(
344 pull_request_latest, auth_user=self._rhodecode_user,
378 pull_request_latest, auth_user=self._rhodecode_user,
345 translator=self.request.translate,
379 translator=self.request.translate,
346 force_shadow_repo_refresh=force_refresh)
380 force_shadow_repo_refresh=force_refresh)
347 c.pr_merge_errors = _merge_check.error_details
381 c.pr_merge_errors = _merge_check.error_details
348 c.pr_merge_possible = not _merge_check.failed
382 c.pr_merge_possible = not _merge_check.failed
349 c.pr_merge_message = _merge_check.merge_msg
383 c.pr_merge_message = _merge_check.merge_msg
350
384
351 c.pr_merge_info = MergeCheck.get_merge_conditions(
385 c.pr_merge_info = MergeCheck.get_merge_conditions(
352 pull_request_latest, translator=self.request.translate)
386 pull_request_latest, translator=self.request.translate)
353
387
354 c.pull_request_review_status = _merge_check.review_status
388 c.pull_request_review_status = _merge_check.review_status
355 if merge_checks:
389 if merge_checks:
356 self.request.override_renderer = \
390 self.request.override_renderer = \
357 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
391 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
358 return self._get_template_context(c)
392 return self._get_template_context(c)
359
393
360 comments_model = CommentsModel()
394 comments_model = CommentsModel()
361
395
362 # reviewers and statuses
396 # reviewers and statuses
363 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
397 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
364 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
398 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
365
399
366 # GENERAL COMMENTS with versions #
400 # GENERAL COMMENTS with versions #
367 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
401 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
368 q = q.order_by(ChangesetComment.comment_id.asc())
402 q = q.order_by(ChangesetComment.comment_id.asc())
369 general_comments = q
403 general_comments = q
370
404
371 # pick comments we want to render at current version
405 # pick comments we want to render at current version
372 c.comment_versions = comments_model.aggregate_comments(
406 c.comment_versions = comments_model.aggregate_comments(
373 general_comments, versions, c.at_version_num)
407 general_comments, versions, c.at_version_num)
374 c.comments = c.comment_versions[c.at_version_num]['until']
408 c.comments = c.comment_versions[c.at_version_num]['until']
375
409
376 # INLINE COMMENTS with versions #
410 # INLINE COMMENTS with versions #
377 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
411 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
378 q = q.order_by(ChangesetComment.comment_id.asc())
412 q = q.order_by(ChangesetComment.comment_id.asc())
379 inline_comments = q
413 inline_comments = q
380
414
381 c.inline_versions = comments_model.aggregate_comments(
415 c.inline_versions = comments_model.aggregate_comments(
382 inline_comments, versions, c.at_version_num, inline=True)
416 inline_comments, versions, c.at_version_num, inline=True)
383
417
384 # inject latest version
418 # inject latest version
385 latest_ver = PullRequest.get_pr_display_object(
419 latest_ver = PullRequest.get_pr_display_object(
386 pull_request_latest, pull_request_latest)
420 pull_request_latest, pull_request_latest)
387
421
388 c.versions = versions + [latest_ver]
422 c.versions = versions + [latest_ver]
389
423
390 # if we use version, then do not show later comments
424 # if we use version, then do not show later comments
391 # than current version
425 # than current version
392 display_inline_comments = collections.defaultdict(
426 display_inline_comments = collections.defaultdict(
393 lambda: collections.defaultdict(list))
427 lambda: collections.defaultdict(list))
394 for co in inline_comments:
428 for co in inline_comments:
395 if c.at_version_num:
429 if c.at_version_num:
396 # pick comments that are at least UPTO given version, so we
430 # pick comments that are at least UPTO given version, so we
397 # don't render comments for higher version
431 # don't render comments for higher version
398 should_render = co.pull_request_version_id and \
432 should_render = co.pull_request_version_id and \
399 co.pull_request_version_id <= c.at_version_num
433 co.pull_request_version_id <= c.at_version_num
400 else:
434 else:
401 # showing all, for 'latest'
435 # showing all, for 'latest'
402 should_render = True
436 should_render = True
403
437
404 if should_render:
438 if should_render:
405 display_inline_comments[co.f_path][co.line_no].append(co)
439 display_inline_comments[co.f_path][co.line_no].append(co)
406
440
407 # load diff data into template context, if we use compare mode then
441 # load diff data into template context, if we use compare mode then
408 # diff is calculated based on changes between versions of PR
442 # diff is calculated based on changes between versions of PR
409
443
410 source_repo = pull_request_at_ver.source_repo
444 source_repo = pull_request_at_ver.source_repo
411 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
445 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
412
446
413 target_repo = pull_request_at_ver.target_repo
447 target_repo = pull_request_at_ver.target_repo
414 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
448 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
415
449
416 if compare:
450 if compare:
417 # in compare switch the diff base to latest commit from prev version
451 # in compare switch the diff base to latest commit from prev version
418 target_ref_id = prev_pull_request_display_obj.revisions[0]
452 target_ref_id = prev_pull_request_display_obj.revisions[0]
419
453
420 # despite opening commits for bookmarks/branches/tags, we always
454 # despite opening commits for bookmarks/branches/tags, we always
421 # convert this to rev to prevent changes after bookmark or branch change
455 # convert this to rev to prevent changes after bookmark or branch change
422 c.source_ref_type = 'rev'
456 c.source_ref_type = 'rev'
423 c.source_ref = source_ref_id
457 c.source_ref = source_ref_id
424
458
425 c.target_ref_type = 'rev'
459 c.target_ref_type = 'rev'
426 c.target_ref = target_ref_id
460 c.target_ref = target_ref_id
427
461
428 c.source_repo = source_repo
462 c.source_repo = source_repo
429 c.target_repo = target_repo
463 c.target_repo = target_repo
430
464
431 c.commit_ranges = []
465 c.commit_ranges = []
432 source_commit = EmptyCommit()
466 source_commit = EmptyCommit()
433 target_commit = EmptyCommit()
467 target_commit = EmptyCommit()
434 c.missing_requirements = False
468 c.missing_requirements = False
435
469
436 source_scm = source_repo.scm_instance()
470 source_scm = source_repo.scm_instance()
437 target_scm = target_repo.scm_instance()
471 target_scm = target_repo.scm_instance()
438
472
439 shadow_scm = None
473 shadow_scm = None
440 try:
474 try:
441 shadow_scm = pull_request_latest.get_shadow_repo()
475 shadow_scm = pull_request_latest.get_shadow_repo()
442 except Exception:
476 except Exception:
443 log.debug('Failed to get shadow repo', exc_info=True)
477 log.debug('Failed to get shadow repo', exc_info=True)
444 # try first the existing source_repo, and then shadow
478 # try first the existing source_repo, and then shadow
445 # repo if we can obtain one
479 # repo if we can obtain one
446 commits_source_repo = source_scm or shadow_scm
480 commits_source_repo = source_scm or shadow_scm
447
481
448 c.commits_source_repo = commits_source_repo
482 c.commits_source_repo = commits_source_repo
449 c.ancestor = None # set it to None, to hide it from PR view
483 c.ancestor = None # set it to None, to hide it from PR view
450
484
451 # empty version means latest, so we keep this to prevent
485 # empty version means latest, so we keep this to prevent
452 # double caching
486 # double caching
453 version_normalized = version or 'latest'
487 version_normalized = version or 'latest'
454 from_version_normalized = from_version or 'latest'
488 from_version_normalized = from_version or 'latest'
455
489
456 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
490 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
457 target_repo)
458 cache_file_path = diff_cache_exist(
491 cache_file_path = diff_cache_exist(
459 cache_path, 'pull_request', pull_request_id, version_normalized,
492 cache_path, 'pull_request', pull_request_id, version_normalized,
460 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
493 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
461
494
462 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
495 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
463 force_recache = str2bool(self.request.GET.get('force_recache'))
496 force_recache = self.get_recache_flag()
464
497
465 cached_diff = None
498 cached_diff = None
466 if caching_enabled:
499 if caching_enabled:
467 cached_diff = load_cached_diff(cache_file_path)
500 cached_diff = load_cached_diff(cache_file_path)
468
501
469 has_proper_commit_cache = (
502 has_proper_commit_cache = (
470 cached_diff and cached_diff.get('commits')
503 cached_diff and cached_diff.get('commits')
471 and len(cached_diff.get('commits', [])) == 5
504 and len(cached_diff.get('commits', [])) == 5
472 and cached_diff.get('commits')[0]
505 and cached_diff.get('commits')[0]
473 and cached_diff.get('commits')[3])
506 and cached_diff.get('commits')[3])
474 if not force_recache and has_proper_commit_cache:
507
508 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
475 diff_commit_cache = \
509 diff_commit_cache = \
476 (ancestor_commit, commit_cache, missing_requirements,
510 (ancestor_commit, commit_cache, missing_requirements,
477 source_commit, target_commit) = cached_diff['commits']
511 source_commit, target_commit) = cached_diff['commits']
478 else:
512 else:
479 diff_commit_cache = \
513 diff_commit_cache = \
480 (ancestor_commit, commit_cache, missing_requirements,
514 (ancestor_commit, commit_cache, missing_requirements,
481 source_commit, target_commit) = self.get_commits(
515 source_commit, target_commit) = self.get_commits(
482 commits_source_repo,
516 commits_source_repo,
483 pull_request_at_ver,
517 pull_request_at_ver,
484 source_commit,
518 source_commit,
485 source_ref_id,
519 source_ref_id,
486 source_scm,
520 source_scm,
487 target_commit,
521 target_commit,
488 target_ref_id,
522 target_ref_id,
489 target_scm)
523 target_scm)
490
524
491 # register our commit range
525 # register our commit range
492 for comm in commit_cache.values():
526 for comm in commit_cache.values():
493 c.commit_ranges.append(comm)
527 c.commit_ranges.append(comm)
494
528
495 c.missing_requirements = missing_requirements
529 c.missing_requirements = missing_requirements
496 c.ancestor_commit = ancestor_commit
530 c.ancestor_commit = ancestor_commit
497 c.statuses = source_repo.statuses(
531 c.statuses = source_repo.statuses(
498 [x.raw_id for x in c.commit_ranges])
532 [x.raw_id for x in c.commit_ranges])
499
533
500 # auto collapse if we have more than limit
534 # auto collapse if we have more than limit
501 collapse_limit = diffs.DiffProcessor._collapse_commits_over
535 collapse_limit = diffs.DiffProcessor._collapse_commits_over
502 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
536 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
503 c.compare_mode = compare
537 c.compare_mode = compare
504
538
505 # diff_limit is the old behavior, will cut off the whole diff
539 # diff_limit is the old behavior, will cut off the whole diff
506 # if the limit is applied otherwise will just hide the
540 # if the limit is applied otherwise will just hide the
507 # big files from the front-end
541 # big files from the front-end
508 diff_limit = c.visual.cut_off_limit_diff
542 diff_limit = c.visual.cut_off_limit_diff
509 file_limit = c.visual.cut_off_limit_file
543 file_limit = c.visual.cut_off_limit_file
510
544
511 c.missing_commits = False
545 c.missing_commits = False
512 if (c.missing_requirements
546 if (c.missing_requirements
513 or isinstance(source_commit, EmptyCommit)
547 or isinstance(source_commit, EmptyCommit)
514 or source_commit == target_commit):
548 or source_commit == target_commit):
515
549
516 c.missing_commits = True
550 c.missing_commits = True
517 else:
551 else:
518 c.inline_comments = display_inline_comments
552 c.inline_comments = display_inline_comments
519
553
520 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
554 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
521 if not force_recache and has_proper_diff_cache:
555 if not force_recache and has_proper_diff_cache:
522 c.diffset = cached_diff['diff']
556 c.diffset = cached_diff['diff']
523 (ancestor_commit, commit_cache, missing_requirements,
557 (ancestor_commit, commit_cache, missing_requirements,
524 source_commit, target_commit) = cached_diff['commits']
558 source_commit, target_commit) = cached_diff['commits']
525 else:
559 else:
526 c.diffset = self._get_diffset(
560 c.diffset = self._get_diffset(
527 c.source_repo.repo_name, commits_source_repo,
561 c.source_repo.repo_name, commits_source_repo,
528 source_ref_id, target_ref_id,
562 source_ref_id, target_ref_id,
529 target_commit, source_commit,
563 target_commit, source_commit,
530 diff_limit, file_limit, c.fulldiff)
564 diff_limit, file_limit, c.fulldiff)
531
565
532 # save cached diff
566 # save cached diff
533 if caching_enabled:
567 if caching_enabled:
534 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
568 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
535
569
536 c.limited_diff = c.diffset.limited_diff
570 c.limited_diff = c.diffset.limited_diff
537
571
538 # calculate removed files that are bound to comments
572 # calculate removed files that are bound to comments
539 comment_deleted_files = [
573 comment_deleted_files = [
540 fname for fname in display_inline_comments
574 fname for fname in display_inline_comments
541 if fname not in c.diffset.file_stats]
575 if fname not in c.diffset.file_stats]
542
576
543 c.deleted_files_comments = collections.defaultdict(dict)
577 c.deleted_files_comments = collections.defaultdict(dict)
544 for fname, per_line_comments in display_inline_comments.items():
578 for fname, per_line_comments in display_inline_comments.items():
545 if fname in comment_deleted_files:
579 if fname in comment_deleted_files:
546 c.deleted_files_comments[fname]['stats'] = 0
580 c.deleted_files_comments[fname]['stats'] = 0
547 c.deleted_files_comments[fname]['comments'] = list()
581 c.deleted_files_comments[fname]['comments'] = list()
548 for lno, comments in per_line_comments.items():
582 for lno, comments in per_line_comments.items():
549 c.deleted_files_comments[fname]['comments'].extend(
583 c.deleted_files_comments[fname]['comments'].extend(comments)
550 comments)
584
585 # maybe calculate the range diff
586 if c.range_diff_on:
587 # TODO(marcink): set whitespace/context
588 context_lcl = 3
589 ign_whitespace_lcl = False
590
591 for commit in c.commit_ranges:
592 commit2 = commit
593 commit1 = commit.first_parent
594
595 range_diff_cache_file_path = diff_cache_exist(
596 cache_path, 'diff', commit.raw_id,
597 ign_whitespace_lcl, context_lcl, c.fulldiff)
598
599 cached_diff = None
600 if caching_enabled:
601 cached_diff = load_cached_diff(range_diff_cache_file_path)
602
603 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
604 if not force_recache and has_proper_diff_cache:
605 diffset = cached_diff['diff']
606 else:
607 diffset = self._get_range_diffset(
608 source_scm, source_repo,
609 commit1, commit2, diff_limit, file_limit,
610 c.fulldiff, ign_whitespace_lcl, context_lcl
611 )
612
613 # save cached diff
614 if caching_enabled:
615 cache_diff(range_diff_cache_file_path, diffset, None)
616
617 c.changes[commit.raw_id] = diffset
551
618
552 # this is a hack to properly display links, when creating PR, the
619 # this is a hack to properly display links, when creating PR, the
553 # compare view and others uses different notation, and
620 # compare view and others uses different notation, and
554 # compare_commits.mako renders links based on the target_repo.
621 # compare_commits.mako renders links based on the target_repo.
555 # We need to swap that here to generate it properly on the html side
622 # We need to swap that here to generate it properly on the html side
556 c.target_repo = c.source_repo
623 c.target_repo = c.source_repo
557
624
558 c.commit_statuses = ChangesetStatus.STATUSES
625 c.commit_statuses = ChangesetStatus.STATUSES
559
626
560 c.show_version_changes = not pr_closed
627 c.show_version_changes = not pr_closed
561 if c.show_version_changes:
628 if c.show_version_changes:
562 cur_obj = pull_request_at_ver
629 cur_obj = pull_request_at_ver
563 prev_obj = prev_pull_request_at_ver
630 prev_obj = prev_pull_request_at_ver
564
631
565 old_commit_ids = prev_obj.revisions
632 old_commit_ids = prev_obj.revisions
566 new_commit_ids = cur_obj.revisions
633 new_commit_ids = cur_obj.revisions
567 commit_changes = PullRequestModel()._calculate_commit_id_changes(
634 commit_changes = PullRequestModel()._calculate_commit_id_changes(
568 old_commit_ids, new_commit_ids)
635 old_commit_ids, new_commit_ids)
569 c.commit_changes_summary = commit_changes
636 c.commit_changes_summary = commit_changes
570
637
571 # calculate the diff for commits between versions
638 # calculate the diff for commits between versions
572 c.commit_changes = []
639 c.commit_changes = []
573 mark = lambda cs, fw: list(
640 mark = lambda cs, fw: list(
574 h.itertools.izip_longest([], cs, fillvalue=fw))
641 h.itertools.izip_longest([], cs, fillvalue=fw))
575 for c_type, raw_id in mark(commit_changes.added, 'a') \
642 for c_type, raw_id in mark(commit_changes.added, 'a') \
576 + mark(commit_changes.removed, 'r') \
643 + mark(commit_changes.removed, 'r') \
577 + mark(commit_changes.common, 'c'):
644 + mark(commit_changes.common, 'c'):
578
645
579 if raw_id in commit_cache:
646 if raw_id in commit_cache:
580 commit = commit_cache[raw_id]
647 commit = commit_cache[raw_id]
581 else:
648 else:
582 try:
649 try:
583 commit = commits_source_repo.get_commit(raw_id)
650 commit = commits_source_repo.get_commit(raw_id)
584 except CommitDoesNotExistError:
651 except CommitDoesNotExistError:
585 # in case we fail extracting still use "dummy" commit
652 # in case we fail extracting still use "dummy" commit
586 # for display in commit diff
653 # for display in commit diff
587 commit = h.AttributeDict(
654 commit = h.AttributeDict(
588 {'raw_id': raw_id,
655 {'raw_id': raw_id,
589 'message': 'EMPTY or MISSING COMMIT'})
656 'message': 'EMPTY or MISSING COMMIT'})
590 c.commit_changes.append([c_type, commit])
657 c.commit_changes.append([c_type, commit])
591
658
592 # current user review statuses for each version
659 # current user review statuses for each version
593 c.review_versions = {}
660 c.review_versions = {}
594 if self._rhodecode_user.user_id in allowed_reviewers:
661 if self._rhodecode_user.user_id in allowed_reviewers:
595 for co in general_comments:
662 for co in general_comments:
596 if co.author.user_id == self._rhodecode_user.user_id:
663 if co.author.user_id == self._rhodecode_user.user_id:
597 status = co.status_change
664 status = co.status_change
598 if status:
665 if status:
599 _ver_pr = status[0].comment.pull_request_version_id
666 _ver_pr = status[0].comment.pull_request_version_id
600 c.review_versions[_ver_pr] = status[0]
667 c.review_versions[_ver_pr] = status[0]
601
668
602 return self._get_template_context(c)
669 return self._get_template_context(c)
603
670
604 def get_commits(
671 def get_commits(
605 self, commits_source_repo, pull_request_at_ver, source_commit,
672 self, commits_source_repo, pull_request_at_ver, source_commit,
606 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
673 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
607 commit_cache = collections.OrderedDict()
674 commit_cache = collections.OrderedDict()
608 missing_requirements = False
675 missing_requirements = False
609 try:
676 try:
610 pre_load = ["author", "branch", "date", "message"]
677 pre_load = ["author", "branch", "date", "message", "parents"]
611 show_revs = pull_request_at_ver.revisions
678 show_revs = pull_request_at_ver.revisions
612 for rev in show_revs:
679 for rev in show_revs:
613 comm = commits_source_repo.get_commit(
680 comm = commits_source_repo.get_commit(
614 commit_id=rev, pre_load=pre_load)
681 commit_id=rev, pre_load=pre_load)
615 commit_cache[comm.raw_id] = comm
682 commit_cache[comm.raw_id] = comm
616
683
617 # Order here matters, we first need to get target, and then
684 # Order here matters, we first need to get target, and then
618 # the source
685 # the source
619 target_commit = commits_source_repo.get_commit(
686 target_commit = commits_source_repo.get_commit(
620 commit_id=safe_str(target_ref_id))
687 commit_id=safe_str(target_ref_id))
621
688
622 source_commit = commits_source_repo.get_commit(
689 source_commit = commits_source_repo.get_commit(
623 commit_id=safe_str(source_ref_id))
690 commit_id=safe_str(source_ref_id))
624 except CommitDoesNotExistError:
691 except CommitDoesNotExistError:
625 log.warning(
692 log.warning(
626 'Failed to get commit from `{}` repo'.format(
693 'Failed to get commit from `{}` repo'.format(
627 commits_source_repo), exc_info=True)
694 commits_source_repo), exc_info=True)
628 except RepositoryRequirementError:
695 except RepositoryRequirementError:
629 log.warning(
696 log.warning(
630 'Failed to get all required data from repo', exc_info=True)
697 'Failed to get all required data from repo', exc_info=True)
631 missing_requirements = True
698 missing_requirements = True
632 ancestor_commit = None
699 ancestor_commit = None
633 try:
700 try:
634 ancestor_id = source_scm.get_common_ancestor(
701 ancestor_id = source_scm.get_common_ancestor(
635 source_commit.raw_id, target_commit.raw_id, target_scm)
702 source_commit.raw_id, target_commit.raw_id, target_scm)
636 ancestor_commit = source_scm.get_commit(ancestor_id)
703 ancestor_commit = source_scm.get_commit(ancestor_id)
637 except Exception:
704 except Exception:
638 ancestor_commit = None
705 ancestor_commit = None
639 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
706 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
640
707
641 def assure_not_empty_repo(self):
708 def assure_not_empty_repo(self):
642 _ = self.request.translate
709 _ = self.request.translate
643
710
644 try:
711 try:
645 self.db_repo.scm_instance().get_commit()
712 self.db_repo.scm_instance().get_commit()
646 except EmptyRepositoryError:
713 except EmptyRepositoryError:
647 h.flash(h.literal(_('There are no commits yet')),
714 h.flash(h.literal(_('There are no commits yet')),
648 category='warning')
715 category='warning')
649 raise HTTPFound(
716 raise HTTPFound(
650 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
717 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
651
718
652 @LoginRequired()
719 @LoginRequired()
653 @NotAnonymous()
720 @NotAnonymous()
654 @HasRepoPermissionAnyDecorator(
721 @HasRepoPermissionAnyDecorator(
655 'repository.read', 'repository.write', 'repository.admin')
722 'repository.read', 'repository.write', 'repository.admin')
656 @view_config(
723 @view_config(
657 route_name='pullrequest_new', request_method='GET',
724 route_name='pullrequest_new', request_method='GET',
658 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
725 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
659 def pull_request_new(self):
726 def pull_request_new(self):
660 _ = self.request.translate
727 _ = self.request.translate
661 c = self.load_default_context()
728 c = self.load_default_context()
662
729
663 self.assure_not_empty_repo()
730 self.assure_not_empty_repo()
664 source_repo = self.db_repo
731 source_repo = self.db_repo
665
732
666 commit_id = self.request.GET.get('commit')
733 commit_id = self.request.GET.get('commit')
667 branch_ref = self.request.GET.get('branch')
734 branch_ref = self.request.GET.get('branch')
668 bookmark_ref = self.request.GET.get('bookmark')
735 bookmark_ref = self.request.GET.get('bookmark')
669
736
670 try:
737 try:
671 source_repo_data = PullRequestModel().generate_repo_data(
738 source_repo_data = PullRequestModel().generate_repo_data(
672 source_repo, commit_id=commit_id,
739 source_repo, commit_id=commit_id,
673 branch=branch_ref, bookmark=bookmark_ref,
740 branch=branch_ref, bookmark=bookmark_ref,
674 translator=self.request.translate)
741 translator=self.request.translate)
675 except CommitDoesNotExistError as e:
742 except CommitDoesNotExistError as e:
676 log.exception(e)
743 log.exception(e)
677 h.flash(_('Commit does not exist'), 'error')
744 h.flash(_('Commit does not exist'), 'error')
678 raise HTTPFound(
745 raise HTTPFound(
679 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
746 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
680
747
681 default_target_repo = source_repo
748 default_target_repo = source_repo
682
749
683 if source_repo.parent:
750 if source_repo.parent:
684 parent_vcs_obj = source_repo.parent.scm_instance()
751 parent_vcs_obj = source_repo.parent.scm_instance()
685 if parent_vcs_obj and not parent_vcs_obj.is_empty():
752 if parent_vcs_obj and not parent_vcs_obj.is_empty():
686 # change default if we have a parent repo
753 # change default if we have a parent repo
687 default_target_repo = source_repo.parent
754 default_target_repo = source_repo.parent
688
755
689 target_repo_data = PullRequestModel().generate_repo_data(
756 target_repo_data = PullRequestModel().generate_repo_data(
690 default_target_repo, translator=self.request.translate)
757 default_target_repo, translator=self.request.translate)
691
758
692 selected_source_ref = source_repo_data['refs']['selected_ref']
759 selected_source_ref = source_repo_data['refs']['selected_ref']
693 title_source_ref = ''
760 title_source_ref = ''
694 if selected_source_ref:
761 if selected_source_ref:
695 title_source_ref = selected_source_ref.split(':', 2)[1]
762 title_source_ref = selected_source_ref.split(':', 2)[1]
696 c.default_title = PullRequestModel().generate_pullrequest_title(
763 c.default_title = PullRequestModel().generate_pullrequest_title(
697 source=source_repo.repo_name,
764 source=source_repo.repo_name,
698 source_ref=title_source_ref,
765 source_ref=title_source_ref,
699 target=default_target_repo.repo_name
766 target=default_target_repo.repo_name
700 )
767 )
701
768
702 c.default_repo_data = {
769 c.default_repo_data = {
703 'source_repo_name': source_repo.repo_name,
770 'source_repo_name': source_repo.repo_name,
704 'source_refs_json': json.dumps(source_repo_data),
771 'source_refs_json': json.dumps(source_repo_data),
705 'target_repo_name': default_target_repo.repo_name,
772 'target_repo_name': default_target_repo.repo_name,
706 'target_refs_json': json.dumps(target_repo_data),
773 'target_refs_json': json.dumps(target_repo_data),
707 }
774 }
708 c.default_source_ref = selected_source_ref
775 c.default_source_ref = selected_source_ref
709
776
710 return self._get_template_context(c)
777 return self._get_template_context(c)
711
778
712 @LoginRequired()
779 @LoginRequired()
713 @NotAnonymous()
780 @NotAnonymous()
714 @HasRepoPermissionAnyDecorator(
781 @HasRepoPermissionAnyDecorator(
715 'repository.read', 'repository.write', 'repository.admin')
782 'repository.read', 'repository.write', 'repository.admin')
716 @view_config(
783 @view_config(
717 route_name='pullrequest_repo_refs', request_method='GET',
784 route_name='pullrequest_repo_refs', request_method='GET',
718 renderer='json_ext', xhr=True)
785 renderer='json_ext', xhr=True)
719 def pull_request_repo_refs(self):
786 def pull_request_repo_refs(self):
720 self.load_default_context()
787 self.load_default_context()
721 target_repo_name = self.request.matchdict['target_repo_name']
788 target_repo_name = self.request.matchdict['target_repo_name']
722 repo = Repository.get_by_repo_name(target_repo_name)
789 repo = Repository.get_by_repo_name(target_repo_name)
723 if not repo:
790 if not repo:
724 raise HTTPNotFound()
791 raise HTTPNotFound()
725
792
726 target_perm = HasRepoPermissionAny(
793 target_perm = HasRepoPermissionAny(
727 'repository.read', 'repository.write', 'repository.admin')(
794 'repository.read', 'repository.write', 'repository.admin')(
728 target_repo_name)
795 target_repo_name)
729 if not target_perm:
796 if not target_perm:
730 raise HTTPNotFound()
797 raise HTTPNotFound()
731
798
732 return PullRequestModel().generate_repo_data(
799 return PullRequestModel().generate_repo_data(
733 repo, translator=self.request.translate)
800 repo, translator=self.request.translate)
734
801
735 @LoginRequired()
802 @LoginRequired()
736 @NotAnonymous()
803 @NotAnonymous()
737 @HasRepoPermissionAnyDecorator(
804 @HasRepoPermissionAnyDecorator(
738 'repository.read', 'repository.write', 'repository.admin')
805 'repository.read', 'repository.write', 'repository.admin')
739 @view_config(
806 @view_config(
740 route_name='pullrequest_repo_destinations', request_method='GET',
807 route_name='pullrequest_repo_destinations', request_method='GET',
741 renderer='json_ext', xhr=True)
808 renderer='json_ext', xhr=True)
742 def pull_request_repo_destinations(self):
809 def pull_request_repo_destinations(self):
743 _ = self.request.translate
810 _ = self.request.translate
744 filter_query = self.request.GET.get('query')
811 filter_query = self.request.GET.get('query')
745
812
746 query = Repository.query() \
813 query = Repository.query() \
747 .order_by(func.length(Repository.repo_name)) \
814 .order_by(func.length(Repository.repo_name)) \
748 .filter(
815 .filter(
749 or_(Repository.repo_name == self.db_repo.repo_name,
816 or_(Repository.repo_name == self.db_repo.repo_name,
750 Repository.fork_id == self.db_repo.repo_id))
817 Repository.fork_id == self.db_repo.repo_id))
751
818
752 if filter_query:
819 if filter_query:
753 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
820 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
754 query = query.filter(
821 query = query.filter(
755 Repository.repo_name.ilike(ilike_expression))
822 Repository.repo_name.ilike(ilike_expression))
756
823
757 add_parent = False
824 add_parent = False
758 if self.db_repo.parent:
825 if self.db_repo.parent:
759 if filter_query in self.db_repo.parent.repo_name:
826 if filter_query in self.db_repo.parent.repo_name:
760 parent_vcs_obj = self.db_repo.parent.scm_instance()
827 parent_vcs_obj = self.db_repo.parent.scm_instance()
761 if parent_vcs_obj and not parent_vcs_obj.is_empty():
828 if parent_vcs_obj and not parent_vcs_obj.is_empty():
762 add_parent = True
829 add_parent = True
763
830
764 limit = 20 - 1 if add_parent else 20
831 limit = 20 - 1 if add_parent else 20
765 all_repos = query.limit(limit).all()
832 all_repos = query.limit(limit).all()
766 if add_parent:
833 if add_parent:
767 all_repos += [self.db_repo.parent]
834 all_repos += [self.db_repo.parent]
768
835
769 repos = []
836 repos = []
770 for obj in ScmModel().get_repos(all_repos):
837 for obj in ScmModel().get_repos(all_repos):
771 repos.append({
838 repos.append({
772 'id': obj['name'],
839 'id': obj['name'],
773 'text': obj['name'],
840 'text': obj['name'],
774 'type': 'repo',
841 'type': 'repo',
775 'repo_id': obj['dbrepo']['repo_id'],
842 'repo_id': obj['dbrepo']['repo_id'],
776 'repo_type': obj['dbrepo']['repo_type'],
843 'repo_type': obj['dbrepo']['repo_type'],
777 'private': obj['dbrepo']['private'],
844 'private': obj['dbrepo']['private'],
778
845
779 })
846 })
780
847
781 data = {
848 data = {
782 'more': False,
849 'more': False,
783 'results': [{
850 'results': [{
784 'text': _('Repositories'),
851 'text': _('Repositories'),
785 'children': repos
852 'children': repos
786 }] if repos else []
853 }] if repos else []
787 }
854 }
788 return data
855 return data
789
856
790 @LoginRequired()
857 @LoginRequired()
791 @NotAnonymous()
858 @NotAnonymous()
792 @HasRepoPermissionAnyDecorator(
859 @HasRepoPermissionAnyDecorator(
793 'repository.read', 'repository.write', 'repository.admin')
860 'repository.read', 'repository.write', 'repository.admin')
794 @CSRFRequired()
861 @CSRFRequired()
795 @view_config(
862 @view_config(
796 route_name='pullrequest_create', request_method='POST',
863 route_name='pullrequest_create', request_method='POST',
797 renderer=None)
864 renderer=None)
798 def pull_request_create(self):
865 def pull_request_create(self):
799 _ = self.request.translate
866 _ = self.request.translate
800 self.assure_not_empty_repo()
867 self.assure_not_empty_repo()
801 self.load_default_context()
868 self.load_default_context()
802
869
803 controls = peppercorn.parse(self.request.POST.items())
870 controls = peppercorn.parse(self.request.POST.items())
804
871
805 try:
872 try:
806 form = PullRequestForm(
873 form = PullRequestForm(
807 self.request.translate, self.db_repo.repo_id)()
874 self.request.translate, self.db_repo.repo_id)()
808 _form = form.to_python(controls)
875 _form = form.to_python(controls)
809 except formencode.Invalid as errors:
876 except formencode.Invalid as errors:
810 if errors.error_dict.get('revisions'):
877 if errors.error_dict.get('revisions'):
811 msg = 'Revisions: %s' % errors.error_dict['revisions']
878 msg = 'Revisions: %s' % errors.error_dict['revisions']
812 elif errors.error_dict.get('pullrequest_title'):
879 elif errors.error_dict.get('pullrequest_title'):
813 msg = errors.error_dict.get('pullrequest_title')
880 msg = errors.error_dict.get('pullrequest_title')
814 else:
881 else:
815 msg = _('Error creating pull request: {}').format(errors)
882 msg = _('Error creating pull request: {}').format(errors)
816 log.exception(msg)
883 log.exception(msg)
817 h.flash(msg, 'error')
884 h.flash(msg, 'error')
818
885
819 # would rather just go back to form ...
886 # would rather just go back to form ...
820 raise HTTPFound(
887 raise HTTPFound(
821 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
888 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
822
889
823 source_repo = _form['source_repo']
890 source_repo = _form['source_repo']
824 source_ref = _form['source_ref']
891 source_ref = _form['source_ref']
825 target_repo = _form['target_repo']
892 target_repo = _form['target_repo']
826 target_ref = _form['target_ref']
893 target_ref = _form['target_ref']
827 commit_ids = _form['revisions'][::-1]
894 commit_ids = _form['revisions'][::-1]
828
895
829 # find the ancestor for this pr
896 # find the ancestor for this pr
830 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
897 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
831 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
898 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
832
899
833 # re-check permissions again here
900 # re-check permissions again here
834 # source_repo we must have read permissions
901 # source_repo we must have read permissions
835
902
836 source_perm = HasRepoPermissionAny(
903 source_perm = HasRepoPermissionAny(
837 'repository.read',
904 'repository.read',
838 'repository.write', 'repository.admin')(source_db_repo.repo_name)
905 'repository.write', 'repository.admin')(source_db_repo.repo_name)
839 if not source_perm:
906 if not source_perm:
840 msg = _('Not Enough permissions to source repo `{}`.'.format(
907 msg = _('Not Enough permissions to source repo `{}`.'.format(
841 source_db_repo.repo_name))
908 source_db_repo.repo_name))
842 h.flash(msg, category='error')
909 h.flash(msg, category='error')
843 # copy the args back to redirect
910 # copy the args back to redirect
844 org_query = self.request.GET.mixed()
911 org_query = self.request.GET.mixed()
845 raise HTTPFound(
912 raise HTTPFound(
846 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
913 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
847 _query=org_query))
914 _query=org_query))
848
915
849 # target repo we must have read permissions, and also later on
916 # target repo we must have read permissions, and also later on
850 # we want to check branch permissions here
917 # we want to check branch permissions here
851 target_perm = HasRepoPermissionAny(
918 target_perm = HasRepoPermissionAny(
852 'repository.read',
919 'repository.read',
853 'repository.write', 'repository.admin')(target_db_repo.repo_name)
920 'repository.write', 'repository.admin')(target_db_repo.repo_name)
854 if not target_perm:
921 if not target_perm:
855 msg = _('Not Enough permissions to target repo `{}`.'.format(
922 msg = _('Not Enough permissions to target repo `{}`.'.format(
856 target_db_repo.repo_name))
923 target_db_repo.repo_name))
857 h.flash(msg, category='error')
924 h.flash(msg, category='error')
858 # copy the args back to redirect
925 # copy the args back to redirect
859 org_query = self.request.GET.mixed()
926 org_query = self.request.GET.mixed()
860 raise HTTPFound(
927 raise HTTPFound(
861 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
928 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
862 _query=org_query))
929 _query=org_query))
863
930
864 source_scm = source_db_repo.scm_instance()
931 source_scm = source_db_repo.scm_instance()
865 target_scm = target_db_repo.scm_instance()
932 target_scm = target_db_repo.scm_instance()
866
933
867 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
934 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
868 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
935 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
869
936
870 ancestor = source_scm.get_common_ancestor(
937 ancestor = source_scm.get_common_ancestor(
871 source_commit.raw_id, target_commit.raw_id, target_scm)
938 source_commit.raw_id, target_commit.raw_id, target_scm)
872
939
873 # recalculate target ref based on ancestor
940 # recalculate target ref based on ancestor
874 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
941 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
875 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
942 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
876
943
877 get_default_reviewers_data, validate_default_reviewers = \
944 get_default_reviewers_data, validate_default_reviewers = \
878 PullRequestModel().get_reviewer_functions()
945 PullRequestModel().get_reviewer_functions()
879
946
880 # recalculate reviewers logic, to make sure we can validate this
947 # recalculate reviewers logic, to make sure we can validate this
881 reviewer_rules = get_default_reviewers_data(
948 reviewer_rules = get_default_reviewers_data(
882 self._rhodecode_db_user, source_db_repo,
949 self._rhodecode_db_user, source_db_repo,
883 source_commit, target_db_repo, target_commit)
950 source_commit, target_db_repo, target_commit)
884
951
885 given_reviewers = _form['review_members']
952 given_reviewers = _form['review_members']
886 reviewers = validate_default_reviewers(
953 reviewers = validate_default_reviewers(
887 given_reviewers, reviewer_rules)
954 given_reviewers, reviewer_rules)
888
955
889 pullrequest_title = _form['pullrequest_title']
956 pullrequest_title = _form['pullrequest_title']
890 title_source_ref = source_ref.split(':', 2)[1]
957 title_source_ref = source_ref.split(':', 2)[1]
891 if not pullrequest_title:
958 if not pullrequest_title:
892 pullrequest_title = PullRequestModel().generate_pullrequest_title(
959 pullrequest_title = PullRequestModel().generate_pullrequest_title(
893 source=source_repo,
960 source=source_repo,
894 source_ref=title_source_ref,
961 source_ref=title_source_ref,
895 target=target_repo
962 target=target_repo
896 )
963 )
897
964
898 description = _form['pullrequest_desc']
965 description = _form['pullrequest_desc']
899 description_renderer = _form['description_renderer']
966 description_renderer = _form['description_renderer']
900
967
901 try:
968 try:
902 pull_request = PullRequestModel().create(
969 pull_request = PullRequestModel().create(
903 created_by=self._rhodecode_user.user_id,
970 created_by=self._rhodecode_user.user_id,
904 source_repo=source_repo,
971 source_repo=source_repo,
905 source_ref=source_ref,
972 source_ref=source_ref,
906 target_repo=target_repo,
973 target_repo=target_repo,
907 target_ref=target_ref,
974 target_ref=target_ref,
908 revisions=commit_ids,
975 revisions=commit_ids,
909 reviewers=reviewers,
976 reviewers=reviewers,
910 title=pullrequest_title,
977 title=pullrequest_title,
911 description=description,
978 description=description,
912 description_renderer=description_renderer,
979 description_renderer=description_renderer,
913 reviewer_data=reviewer_rules,
980 reviewer_data=reviewer_rules,
914 auth_user=self._rhodecode_user
981 auth_user=self._rhodecode_user
915 )
982 )
916 Session().commit()
983 Session().commit()
917
984
918 h.flash(_('Successfully opened new pull request'),
985 h.flash(_('Successfully opened new pull request'),
919 category='success')
986 category='success')
920 except Exception:
987 except Exception:
921 msg = _('Error occurred during creation of this pull request.')
988 msg = _('Error occurred during creation of this pull request.')
922 log.exception(msg)
989 log.exception(msg)
923 h.flash(msg, category='error')
990 h.flash(msg, category='error')
924
991
925 # copy the args back to redirect
992 # copy the args back to redirect
926 org_query = self.request.GET.mixed()
993 org_query = self.request.GET.mixed()
927 raise HTTPFound(
994 raise HTTPFound(
928 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
995 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
929 _query=org_query))
996 _query=org_query))
930
997
931 raise HTTPFound(
998 raise HTTPFound(
932 h.route_path('pullrequest_show', repo_name=target_repo,
999 h.route_path('pullrequest_show', repo_name=target_repo,
933 pull_request_id=pull_request.pull_request_id))
1000 pull_request_id=pull_request.pull_request_id))
934
1001
935 @LoginRequired()
1002 @LoginRequired()
936 @NotAnonymous()
1003 @NotAnonymous()
937 @HasRepoPermissionAnyDecorator(
1004 @HasRepoPermissionAnyDecorator(
938 'repository.read', 'repository.write', 'repository.admin')
1005 'repository.read', 'repository.write', 'repository.admin')
939 @CSRFRequired()
1006 @CSRFRequired()
940 @view_config(
1007 @view_config(
941 route_name='pullrequest_update', request_method='POST',
1008 route_name='pullrequest_update', request_method='POST',
942 renderer='json_ext')
1009 renderer='json_ext')
943 def pull_request_update(self):
1010 def pull_request_update(self):
944 pull_request = PullRequest.get_or_404(
1011 pull_request = PullRequest.get_or_404(
945 self.request.matchdict['pull_request_id'])
1012 self.request.matchdict['pull_request_id'])
946 _ = self.request.translate
1013 _ = self.request.translate
947
1014
948 self.load_default_context()
1015 self.load_default_context()
949
1016
950 if pull_request.is_closed():
1017 if pull_request.is_closed():
951 log.debug('update: forbidden because pull request is closed')
1018 log.debug('update: forbidden because pull request is closed')
952 msg = _(u'Cannot update closed pull requests.')
1019 msg = _(u'Cannot update closed pull requests.')
953 h.flash(msg, category='error')
1020 h.flash(msg, category='error')
954 return True
1021 return True
955
1022
956 # only owner or admin can update it
1023 # only owner or admin can update it
957 allowed_to_update = PullRequestModel().check_user_update(
1024 allowed_to_update = PullRequestModel().check_user_update(
958 pull_request, self._rhodecode_user)
1025 pull_request, self._rhodecode_user)
959 if allowed_to_update:
1026 if allowed_to_update:
960 controls = peppercorn.parse(self.request.POST.items())
1027 controls = peppercorn.parse(self.request.POST.items())
961
1028
962 if 'review_members' in controls:
1029 if 'review_members' in controls:
963 self._update_reviewers(
1030 self._update_reviewers(
964 pull_request, controls['review_members'],
1031 pull_request, controls['review_members'],
965 pull_request.reviewer_data)
1032 pull_request.reviewer_data)
966 elif str2bool(self.request.POST.get('update_commits', 'false')):
1033 elif str2bool(self.request.POST.get('update_commits', 'false')):
967 self._update_commits(pull_request)
1034 self._update_commits(pull_request)
968 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1035 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
969 self._edit_pull_request(pull_request)
1036 self._edit_pull_request(pull_request)
970 else:
1037 else:
971 raise HTTPBadRequest()
1038 raise HTTPBadRequest()
972 return True
1039 return True
973 raise HTTPForbidden()
1040 raise HTTPForbidden()
974
1041
975 def _edit_pull_request(self, pull_request):
1042 def _edit_pull_request(self, pull_request):
976 _ = self.request.translate
1043 _ = self.request.translate
977
1044
978 try:
1045 try:
979 PullRequestModel().edit(
1046 PullRequestModel().edit(
980 pull_request,
1047 pull_request,
981 self.request.POST.get('title'),
1048 self.request.POST.get('title'),
982 self.request.POST.get('description'),
1049 self.request.POST.get('description'),
983 self.request.POST.get('description_renderer'),
1050 self.request.POST.get('description_renderer'),
984 self._rhodecode_user)
1051 self._rhodecode_user)
985 except ValueError:
1052 except ValueError:
986 msg = _(u'Cannot update closed pull requests.')
1053 msg = _(u'Cannot update closed pull requests.')
987 h.flash(msg, category='error')
1054 h.flash(msg, category='error')
988 return
1055 return
989 else:
1056 else:
990 Session().commit()
1057 Session().commit()
991
1058
992 msg = _(u'Pull request title & description updated.')
1059 msg = _(u'Pull request title & description updated.')
993 h.flash(msg, category='success')
1060 h.flash(msg, category='success')
994 return
1061 return
995
1062
996 def _update_commits(self, pull_request):
1063 def _update_commits(self, pull_request):
997 _ = self.request.translate
1064 _ = self.request.translate
998 resp = PullRequestModel().update_commits(pull_request)
1065 resp = PullRequestModel().update_commits(pull_request)
999
1066
1000 if resp.executed:
1067 if resp.executed:
1001
1068
1002 if resp.target_changed and resp.source_changed:
1069 if resp.target_changed and resp.source_changed:
1003 changed = 'target and source repositories'
1070 changed = 'target and source repositories'
1004 elif resp.target_changed and not resp.source_changed:
1071 elif resp.target_changed and not resp.source_changed:
1005 changed = 'target repository'
1072 changed = 'target repository'
1006 elif not resp.target_changed and resp.source_changed:
1073 elif not resp.target_changed and resp.source_changed:
1007 changed = 'source repository'
1074 changed = 'source repository'
1008 else:
1075 else:
1009 changed = 'nothing'
1076 changed = 'nothing'
1010
1077
1011 msg = _(
1078 msg = _(
1012 u'Pull request updated to "{source_commit_id}" with '
1079 u'Pull request updated to "{source_commit_id}" with '
1013 u'{count_added} added, {count_removed} removed commits. '
1080 u'{count_added} added, {count_removed} removed commits. '
1014 u'Source of changes: {change_source}')
1081 u'Source of changes: {change_source}')
1015 msg = msg.format(
1082 msg = msg.format(
1016 source_commit_id=pull_request.source_ref_parts.commit_id,
1083 source_commit_id=pull_request.source_ref_parts.commit_id,
1017 count_added=len(resp.changes.added),
1084 count_added=len(resp.changes.added),
1018 count_removed=len(resp.changes.removed),
1085 count_removed=len(resp.changes.removed),
1019 change_source=changed)
1086 change_source=changed)
1020 h.flash(msg, category='success')
1087 h.flash(msg, category='success')
1021
1088
1022 channel = '/repo${}$/pr/{}'.format(
1089 channel = '/repo${}$/pr/{}'.format(
1023 pull_request.target_repo.repo_name,
1090 pull_request.target_repo.repo_name,
1024 pull_request.pull_request_id)
1091 pull_request.pull_request_id)
1025 message = msg + (
1092 message = msg + (
1026 ' - <a onclick="window.location.reload()">'
1093 ' - <a onclick="window.location.reload()">'
1027 '<strong>{}</strong></a>'.format(_('Reload page')))
1094 '<strong>{}</strong></a>'.format(_('Reload page')))
1028 channelstream.post_message(
1095 channelstream.post_message(
1029 channel, message, self._rhodecode_user.username,
1096 channel, message, self._rhodecode_user.username,
1030 registry=self.request.registry)
1097 registry=self.request.registry)
1031 else:
1098 else:
1032 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1099 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1033 warning_reasons = [
1100 warning_reasons = [
1034 UpdateFailureReason.NO_CHANGE,
1101 UpdateFailureReason.NO_CHANGE,
1035 UpdateFailureReason.WRONG_REF_TYPE,
1102 UpdateFailureReason.WRONG_REF_TYPE,
1036 ]
1103 ]
1037 category = 'warning' if resp.reason in warning_reasons else 'error'
1104 category = 'warning' if resp.reason in warning_reasons else 'error'
1038 h.flash(msg, category=category)
1105 h.flash(msg, category=category)
1039
1106
1040 @LoginRequired()
1107 @LoginRequired()
1041 @NotAnonymous()
1108 @NotAnonymous()
1042 @HasRepoPermissionAnyDecorator(
1109 @HasRepoPermissionAnyDecorator(
1043 'repository.read', 'repository.write', 'repository.admin')
1110 'repository.read', 'repository.write', 'repository.admin')
1044 @CSRFRequired()
1111 @CSRFRequired()
1045 @view_config(
1112 @view_config(
1046 route_name='pullrequest_merge', request_method='POST',
1113 route_name='pullrequest_merge', request_method='POST',
1047 renderer='json_ext')
1114 renderer='json_ext')
1048 def pull_request_merge(self):
1115 def pull_request_merge(self):
1049 """
1116 """
1050 Merge will perform a server-side merge of the specified
1117 Merge will perform a server-side merge of the specified
1051 pull request, if the pull request is approved and mergeable.
1118 pull request, if the pull request is approved and mergeable.
1052 After successful merging, the pull request is automatically
1119 After successful merging, the pull request is automatically
1053 closed, with a relevant comment.
1120 closed, with a relevant comment.
1054 """
1121 """
1055 pull_request = PullRequest.get_or_404(
1122 pull_request = PullRequest.get_or_404(
1056 self.request.matchdict['pull_request_id'])
1123 self.request.matchdict['pull_request_id'])
1057
1124
1058 self.load_default_context()
1125 self.load_default_context()
1059 check = MergeCheck.validate(
1126 check = MergeCheck.validate(
1060 pull_request, auth_user=self._rhodecode_user,
1127 pull_request, auth_user=self._rhodecode_user,
1061 translator=self.request.translate)
1128 translator=self.request.translate)
1062 merge_possible = not check.failed
1129 merge_possible = not check.failed
1063
1130
1064 for err_type, error_msg in check.errors:
1131 for err_type, error_msg in check.errors:
1065 h.flash(error_msg, category=err_type)
1132 h.flash(error_msg, category=err_type)
1066
1133
1067 if merge_possible:
1134 if merge_possible:
1068 log.debug("Pre-conditions checked, trying to merge.")
1135 log.debug("Pre-conditions checked, trying to merge.")
1069 extras = vcs_operation_context(
1136 extras = vcs_operation_context(
1070 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1137 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1071 username=self._rhodecode_db_user.username, action='push',
1138 username=self._rhodecode_db_user.username, action='push',
1072 scm=pull_request.target_repo.repo_type)
1139 scm=pull_request.target_repo.repo_type)
1073 self._merge_pull_request(
1140 self._merge_pull_request(
1074 pull_request, self._rhodecode_db_user, extras)
1141 pull_request, self._rhodecode_db_user, extras)
1075 else:
1142 else:
1076 log.debug("Pre-conditions failed, NOT merging.")
1143 log.debug("Pre-conditions failed, NOT merging.")
1077
1144
1078 raise HTTPFound(
1145 raise HTTPFound(
1079 h.route_path('pullrequest_show',
1146 h.route_path('pullrequest_show',
1080 repo_name=pull_request.target_repo.repo_name,
1147 repo_name=pull_request.target_repo.repo_name,
1081 pull_request_id=pull_request.pull_request_id))
1148 pull_request_id=pull_request.pull_request_id))
1082
1149
1083 def _merge_pull_request(self, pull_request, user, extras):
1150 def _merge_pull_request(self, pull_request, user, extras):
1084 _ = self.request.translate
1151 _ = self.request.translate
1085 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1152 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1086
1153
1087 if merge_resp.executed:
1154 if merge_resp.executed:
1088 log.debug("The merge was successful, closing the pull request.")
1155 log.debug("The merge was successful, closing the pull request.")
1089 PullRequestModel().close_pull_request(
1156 PullRequestModel().close_pull_request(
1090 pull_request.pull_request_id, user)
1157 pull_request.pull_request_id, user)
1091 Session().commit()
1158 Session().commit()
1092 msg = _('Pull request was successfully merged and closed.')
1159 msg = _('Pull request was successfully merged and closed.')
1093 h.flash(msg, category='success')
1160 h.flash(msg, category='success')
1094 else:
1161 else:
1095 log.debug(
1162 log.debug(
1096 "The merge was not successful. Merge response: %s",
1163 "The merge was not successful. Merge response: %s",
1097 merge_resp)
1164 merge_resp)
1098 msg = PullRequestModel().merge_status_message(
1165 msg = PullRequestModel().merge_status_message(
1099 merge_resp.failure_reason)
1166 merge_resp.failure_reason)
1100 h.flash(msg, category='error')
1167 h.flash(msg, category='error')
1101
1168
1102 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1169 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1103 _ = self.request.translate
1170 _ = self.request.translate
1104 get_default_reviewers_data, validate_default_reviewers = \
1171 get_default_reviewers_data, validate_default_reviewers = \
1105 PullRequestModel().get_reviewer_functions()
1172 PullRequestModel().get_reviewer_functions()
1106
1173
1107 try:
1174 try:
1108 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1175 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1109 except ValueError as e:
1176 except ValueError as e:
1110 log.error('Reviewers Validation: {}'.format(e))
1177 log.error('Reviewers Validation: {}'.format(e))
1111 h.flash(e, category='error')
1178 h.flash(e, category='error')
1112 return
1179 return
1113
1180
1114 PullRequestModel().update_reviewers(
1181 PullRequestModel().update_reviewers(
1115 pull_request, reviewers, self._rhodecode_user)
1182 pull_request, reviewers, self._rhodecode_user)
1116 h.flash(_('Pull request reviewers updated.'), category='success')
1183 h.flash(_('Pull request reviewers updated.'), category='success')
1117 Session().commit()
1184 Session().commit()
1118
1185
1119 @LoginRequired()
1186 @LoginRequired()
1120 @NotAnonymous()
1187 @NotAnonymous()
1121 @HasRepoPermissionAnyDecorator(
1188 @HasRepoPermissionAnyDecorator(
1122 'repository.read', 'repository.write', 'repository.admin')
1189 'repository.read', 'repository.write', 'repository.admin')
1123 @CSRFRequired()
1190 @CSRFRequired()
1124 @view_config(
1191 @view_config(
1125 route_name='pullrequest_delete', request_method='POST',
1192 route_name='pullrequest_delete', request_method='POST',
1126 renderer='json_ext')
1193 renderer='json_ext')
1127 def pull_request_delete(self):
1194 def pull_request_delete(self):
1128 _ = self.request.translate
1195 _ = self.request.translate
1129
1196
1130 pull_request = PullRequest.get_or_404(
1197 pull_request = PullRequest.get_or_404(
1131 self.request.matchdict['pull_request_id'])
1198 self.request.matchdict['pull_request_id'])
1132 self.load_default_context()
1199 self.load_default_context()
1133
1200
1134 pr_closed = pull_request.is_closed()
1201 pr_closed = pull_request.is_closed()
1135 allowed_to_delete = PullRequestModel().check_user_delete(
1202 allowed_to_delete = PullRequestModel().check_user_delete(
1136 pull_request, self._rhodecode_user) and not pr_closed
1203 pull_request, self._rhodecode_user) and not pr_closed
1137
1204
1138 # only owner can delete it !
1205 # only owner can delete it !
1139 if allowed_to_delete:
1206 if allowed_to_delete:
1140 PullRequestModel().delete(pull_request, self._rhodecode_user)
1207 PullRequestModel().delete(pull_request, self._rhodecode_user)
1141 Session().commit()
1208 Session().commit()
1142 h.flash(_('Successfully deleted pull request'),
1209 h.flash(_('Successfully deleted pull request'),
1143 category='success')
1210 category='success')
1144 raise HTTPFound(h.route_path('pullrequest_show_all',
1211 raise HTTPFound(h.route_path('pullrequest_show_all',
1145 repo_name=self.db_repo_name))
1212 repo_name=self.db_repo_name))
1146
1213
1147 log.warning('user %s tried to delete pull request without access',
1214 log.warning('user %s tried to delete pull request without access',
1148 self._rhodecode_user)
1215 self._rhodecode_user)
1149 raise HTTPNotFound()
1216 raise HTTPNotFound()
1150
1217
1151 @LoginRequired()
1218 @LoginRequired()
1152 @NotAnonymous()
1219 @NotAnonymous()
1153 @HasRepoPermissionAnyDecorator(
1220 @HasRepoPermissionAnyDecorator(
1154 'repository.read', 'repository.write', 'repository.admin')
1221 'repository.read', 'repository.write', 'repository.admin')
1155 @CSRFRequired()
1222 @CSRFRequired()
1156 @view_config(
1223 @view_config(
1157 route_name='pullrequest_comment_create', request_method='POST',
1224 route_name='pullrequest_comment_create', request_method='POST',
1158 renderer='json_ext')
1225 renderer='json_ext')
1159 def pull_request_comment_create(self):
1226 def pull_request_comment_create(self):
1160 _ = self.request.translate
1227 _ = self.request.translate
1161
1228
1162 pull_request = PullRequest.get_or_404(
1229 pull_request = PullRequest.get_or_404(
1163 self.request.matchdict['pull_request_id'])
1230 self.request.matchdict['pull_request_id'])
1164 pull_request_id = pull_request.pull_request_id
1231 pull_request_id = pull_request.pull_request_id
1165
1232
1166 if pull_request.is_closed():
1233 if pull_request.is_closed():
1167 log.debug('comment: forbidden because pull request is closed')
1234 log.debug('comment: forbidden because pull request is closed')
1168 raise HTTPForbidden()
1235 raise HTTPForbidden()
1169
1236
1170 allowed_to_comment = PullRequestModel().check_user_comment(
1237 allowed_to_comment = PullRequestModel().check_user_comment(
1171 pull_request, self._rhodecode_user)
1238 pull_request, self._rhodecode_user)
1172 if not allowed_to_comment:
1239 if not allowed_to_comment:
1173 log.debug(
1240 log.debug(
1174 'comment: forbidden because pull request is from forbidden repo')
1241 'comment: forbidden because pull request is from forbidden repo')
1175 raise HTTPForbidden()
1242 raise HTTPForbidden()
1176
1243
1177 c = self.load_default_context()
1244 c = self.load_default_context()
1178
1245
1179 status = self.request.POST.get('changeset_status', None)
1246 status = self.request.POST.get('changeset_status', None)
1180 text = self.request.POST.get('text')
1247 text = self.request.POST.get('text')
1181 comment_type = self.request.POST.get('comment_type')
1248 comment_type = self.request.POST.get('comment_type')
1182 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1249 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1183 close_pull_request = self.request.POST.get('close_pull_request')
1250 close_pull_request = self.request.POST.get('close_pull_request')
1184
1251
1185 # the logic here should work like following, if we submit close
1252 # the logic here should work like following, if we submit close
1186 # pr comment, use `close_pull_request_with_comment` function
1253 # pr comment, use `close_pull_request_with_comment` function
1187 # else handle regular comment logic
1254 # else handle regular comment logic
1188
1255
1189 if close_pull_request:
1256 if close_pull_request:
1190 # only owner or admin or person with write permissions
1257 # only owner or admin or person with write permissions
1191 allowed_to_close = PullRequestModel().check_user_update(
1258 allowed_to_close = PullRequestModel().check_user_update(
1192 pull_request, self._rhodecode_user)
1259 pull_request, self._rhodecode_user)
1193 if not allowed_to_close:
1260 if not allowed_to_close:
1194 log.debug('comment: forbidden because not allowed to close '
1261 log.debug('comment: forbidden because not allowed to close '
1195 'pull request %s', pull_request_id)
1262 'pull request %s', pull_request_id)
1196 raise HTTPForbidden()
1263 raise HTTPForbidden()
1197 comment, status = PullRequestModel().close_pull_request_with_comment(
1264 comment, status = PullRequestModel().close_pull_request_with_comment(
1198 pull_request, self._rhodecode_user, self.db_repo, message=text,
1265 pull_request, self._rhodecode_user, self.db_repo, message=text,
1199 auth_user=self._rhodecode_user)
1266 auth_user=self._rhodecode_user)
1200 Session().flush()
1267 Session().flush()
1201 events.trigger(
1268 events.trigger(
1202 events.PullRequestCommentEvent(pull_request, comment))
1269 events.PullRequestCommentEvent(pull_request, comment))
1203
1270
1204 else:
1271 else:
1205 # regular comment case, could be inline, or one with status.
1272 # regular comment case, could be inline, or one with status.
1206 # for that one we check also permissions
1273 # for that one we check also permissions
1207
1274
1208 allowed_to_change_status = PullRequestModel().check_user_change_status(
1275 allowed_to_change_status = PullRequestModel().check_user_change_status(
1209 pull_request, self._rhodecode_user)
1276 pull_request, self._rhodecode_user)
1210
1277
1211 if status and allowed_to_change_status:
1278 if status and allowed_to_change_status:
1212 message = (_('Status change %(transition_icon)s %(status)s')
1279 message = (_('Status change %(transition_icon)s %(status)s')
1213 % {'transition_icon': '>',
1280 % {'transition_icon': '>',
1214 'status': ChangesetStatus.get_status_lbl(status)})
1281 'status': ChangesetStatus.get_status_lbl(status)})
1215 text = text or message
1282 text = text or message
1216
1283
1217 comment = CommentsModel().create(
1284 comment = CommentsModel().create(
1218 text=text,
1285 text=text,
1219 repo=self.db_repo.repo_id,
1286 repo=self.db_repo.repo_id,
1220 user=self._rhodecode_user.user_id,
1287 user=self._rhodecode_user.user_id,
1221 pull_request=pull_request,
1288 pull_request=pull_request,
1222 f_path=self.request.POST.get('f_path'),
1289 f_path=self.request.POST.get('f_path'),
1223 line_no=self.request.POST.get('line'),
1290 line_no=self.request.POST.get('line'),
1224 status_change=(ChangesetStatus.get_status_lbl(status)
1291 status_change=(ChangesetStatus.get_status_lbl(status)
1225 if status and allowed_to_change_status else None),
1292 if status and allowed_to_change_status else None),
1226 status_change_type=(status
1293 status_change_type=(status
1227 if status and allowed_to_change_status else None),
1294 if status and allowed_to_change_status else None),
1228 comment_type=comment_type,
1295 comment_type=comment_type,
1229 resolves_comment_id=resolves_comment_id,
1296 resolves_comment_id=resolves_comment_id,
1230 auth_user=self._rhodecode_user
1297 auth_user=self._rhodecode_user
1231 )
1298 )
1232
1299
1233 if allowed_to_change_status:
1300 if allowed_to_change_status:
1234 # calculate old status before we change it
1301 # calculate old status before we change it
1235 old_calculated_status = pull_request.calculated_review_status()
1302 old_calculated_status = pull_request.calculated_review_status()
1236
1303
1237 # get status if set !
1304 # get status if set !
1238 if status:
1305 if status:
1239 ChangesetStatusModel().set_status(
1306 ChangesetStatusModel().set_status(
1240 self.db_repo.repo_id,
1307 self.db_repo.repo_id,
1241 status,
1308 status,
1242 self._rhodecode_user.user_id,
1309 self._rhodecode_user.user_id,
1243 comment,
1310 comment,
1244 pull_request=pull_request
1311 pull_request=pull_request
1245 )
1312 )
1246
1313
1247 Session().flush()
1314 Session().flush()
1248 # this is somehow required to get access to some relationship
1315 # this is somehow required to get access to some relationship
1249 # loaded on comment
1316 # loaded on comment
1250 Session().refresh(comment)
1317 Session().refresh(comment)
1251
1318
1252 events.trigger(
1319 events.trigger(
1253 events.PullRequestCommentEvent(pull_request, comment))
1320 events.PullRequestCommentEvent(pull_request, comment))
1254
1321
1255 # we now calculate the status of pull request, and based on that
1322 # we now calculate the status of pull request, and based on that
1256 # calculation we set the commits status
1323 # calculation we set the commits status
1257 calculated_status = pull_request.calculated_review_status()
1324 calculated_status = pull_request.calculated_review_status()
1258 if old_calculated_status != calculated_status:
1325 if old_calculated_status != calculated_status:
1259 PullRequestModel()._trigger_pull_request_hook(
1326 PullRequestModel()._trigger_pull_request_hook(
1260 pull_request, self._rhodecode_user, 'review_status_change')
1327 pull_request, self._rhodecode_user, 'review_status_change')
1261
1328
1262 Session().commit()
1329 Session().commit()
1263
1330
1264 data = {
1331 data = {
1265 'target_id': h.safeid(h.safe_unicode(
1332 'target_id': h.safeid(h.safe_unicode(
1266 self.request.POST.get('f_path'))),
1333 self.request.POST.get('f_path'))),
1267 }
1334 }
1268 if comment:
1335 if comment:
1269 c.co = comment
1336 c.co = comment
1270 rendered_comment = render(
1337 rendered_comment = render(
1271 'rhodecode:templates/changeset/changeset_comment_block.mako',
1338 'rhodecode:templates/changeset/changeset_comment_block.mako',
1272 self._get_template_context(c), self.request)
1339 self._get_template_context(c), self.request)
1273
1340
1274 data.update(comment.get_dict())
1341 data.update(comment.get_dict())
1275 data.update({'rendered_text': rendered_comment})
1342 data.update({'rendered_text': rendered_comment})
1276
1343
1277 return data
1344 return data
1278
1345
1279 @LoginRequired()
1346 @LoginRequired()
1280 @NotAnonymous()
1347 @NotAnonymous()
1281 @HasRepoPermissionAnyDecorator(
1348 @HasRepoPermissionAnyDecorator(
1282 'repository.read', 'repository.write', 'repository.admin')
1349 'repository.read', 'repository.write', 'repository.admin')
1283 @CSRFRequired()
1350 @CSRFRequired()
1284 @view_config(
1351 @view_config(
1285 route_name='pullrequest_comment_delete', request_method='POST',
1352 route_name='pullrequest_comment_delete', request_method='POST',
1286 renderer='json_ext')
1353 renderer='json_ext')
1287 def pull_request_comment_delete(self):
1354 def pull_request_comment_delete(self):
1288 pull_request = PullRequest.get_or_404(
1355 pull_request = PullRequest.get_or_404(
1289 self.request.matchdict['pull_request_id'])
1356 self.request.matchdict['pull_request_id'])
1290
1357
1291 comment = ChangesetComment.get_or_404(
1358 comment = ChangesetComment.get_or_404(
1292 self.request.matchdict['comment_id'])
1359 self.request.matchdict['comment_id'])
1293 comment_id = comment.comment_id
1360 comment_id = comment.comment_id
1294
1361
1295 if pull_request.is_closed():
1362 if pull_request.is_closed():
1296 log.debug('comment: forbidden because pull request is closed')
1363 log.debug('comment: forbidden because pull request is closed')
1297 raise HTTPForbidden()
1364 raise HTTPForbidden()
1298
1365
1299 if not comment:
1366 if not comment:
1300 log.debug('Comment with id:%s not found, skipping', comment_id)
1367 log.debug('Comment with id:%s not found, skipping', comment_id)
1301 # comment already deleted in another call probably
1368 # comment already deleted in another call probably
1302 return True
1369 return True
1303
1370
1304 if comment.pull_request.is_closed():
1371 if comment.pull_request.is_closed():
1305 # don't allow deleting comments on closed pull request
1372 # don't allow deleting comments on closed pull request
1306 raise HTTPForbidden()
1373 raise HTTPForbidden()
1307
1374
1308 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1375 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1309 super_admin = h.HasPermissionAny('hg.admin')()
1376 super_admin = h.HasPermissionAny('hg.admin')()
1310 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1377 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1311 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1378 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1312 comment_repo_admin = is_repo_admin and is_repo_comment
1379 comment_repo_admin = is_repo_admin and is_repo_comment
1313
1380
1314 if super_admin or comment_owner or comment_repo_admin:
1381 if super_admin or comment_owner or comment_repo_admin:
1315 old_calculated_status = comment.pull_request.calculated_review_status()
1382 old_calculated_status = comment.pull_request.calculated_review_status()
1316 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1383 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1317 Session().commit()
1384 Session().commit()
1318 calculated_status = comment.pull_request.calculated_review_status()
1385 calculated_status = comment.pull_request.calculated_review_status()
1319 if old_calculated_status != calculated_status:
1386 if old_calculated_status != calculated_status:
1320 PullRequestModel()._trigger_pull_request_hook(
1387 PullRequestModel()._trigger_pull_request_hook(
1321 comment.pull_request, self._rhodecode_user, 'review_status_change')
1388 comment.pull_request, self._rhodecode_user, 'review_status_change')
1322 return True
1389 return True
1323 else:
1390 else:
1324 log.warning('No permissions for user %s to delete comment_id: %s',
1391 log.warning('No permissions for user %s to delete comment_id: %s',
1325 self._rhodecode_db_user, comment_id)
1392 self._rhodecode_db_user, comment_id)
1326 raise HTTPNotFound()
1393 raise HTTPNotFound()
@@ -1,762 +1,771 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import difflib
22 import difflib
23 from itertools import groupby
23 from itertools import groupby
24
24
25 from pygments import lex
25 from pygments import lex
26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
27 from pygments.lexers.special import TextLexer, Token
27 from pygments.lexers.special import TextLexer, Token
28 from pygments.lexers import get_lexer_by_name
28 from pygments.lexers import get_lexer_by_name
29
29
30 from rhodecode.lib.helpers import (
30 from rhodecode.lib.helpers import (
31 get_lexer_for_filenode, html_escape, get_custom_lexer)
31 get_lexer_for_filenode, html_escape, get_custom_lexer)
32 from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict, safe_unicode
32 from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict, safe_unicode
33 from rhodecode.lib.vcs.nodes import FileNode
33 from rhodecode.lib.vcs.nodes import FileNode
34 from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError
34 from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError
35 from rhodecode.lib.diff_match_patch import diff_match_patch
35 from rhodecode.lib.diff_match_patch import diff_match_patch
36 from rhodecode.lib.diffs import LimitedDiffContainer, DEL_FILENODE, BIN_FILENODE
36 from rhodecode.lib.diffs import LimitedDiffContainer, DEL_FILENODE, BIN_FILENODE
37
37
38
38
39 plain_text_lexer = get_lexer_by_name(
39 plain_text_lexer = get_lexer_by_name(
40 'text', stripall=False, stripnl=False, ensurenl=False)
40 'text', stripall=False, stripnl=False, ensurenl=False)
41
41
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 def filenode_as_lines_tokens(filenode, lexer=None):
46 def filenode_as_lines_tokens(filenode, lexer=None):
47 org_lexer = lexer
47 org_lexer = lexer
48 lexer = lexer or get_lexer_for_filenode(filenode)
48 lexer = lexer or get_lexer_for_filenode(filenode)
49 log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s',
49 log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s',
50 lexer, filenode, org_lexer)
50 lexer, filenode, org_lexer)
51 tokens = tokenize_string(filenode.content, lexer)
51 tokens = tokenize_string(filenode.content, lexer)
52 lines = split_token_stream(tokens)
52 lines = split_token_stream(tokens)
53 rv = list(lines)
53 rv = list(lines)
54 return rv
54 return rv
55
55
56
56
57 def tokenize_string(content, lexer):
57 def tokenize_string(content, lexer):
58 """
58 """
59 Use pygments to tokenize some content based on a lexer
59 Use pygments to tokenize some content based on a lexer
60 ensuring all original new lines and whitespace is preserved
60 ensuring all original new lines and whitespace is preserved
61 """
61 """
62
62
63 lexer.stripall = False
63 lexer.stripall = False
64 lexer.stripnl = False
64 lexer.stripnl = False
65 lexer.ensurenl = False
65 lexer.ensurenl = False
66
66
67 if isinstance(lexer, TextLexer):
67 if isinstance(lexer, TextLexer):
68 lexed = [(Token.Text, content)]
68 lexed = [(Token.Text, content)]
69 else:
69 else:
70 lexed = lex(content, lexer)
70 lexed = lex(content, lexer)
71
71
72 for token_type, token_text in lexed:
72 for token_type, token_text in lexed:
73 yield pygment_token_class(token_type), token_text
73 yield pygment_token_class(token_type), token_text
74
74
75
75
76 def split_token_stream(tokens):
76 def split_token_stream(tokens):
77 """
77 """
78 Take a list of (TokenType, text) tuples and split them by a string
78 Take a list of (TokenType, text) tuples and split them by a string
79
79
80 split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
80 split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
81 [(TEXT, 'some'), (TEXT, 'text'),
81 [(TEXT, 'some'), (TEXT, 'text'),
82 (TEXT, 'more'), (TEXT, 'text')]
82 (TEXT, 'more'), (TEXT, 'text')]
83 """
83 """
84
84
85 buffer = []
85 buffer = []
86 for token_class, token_text in tokens:
86 for token_class, token_text in tokens:
87 parts = token_text.split('\n')
87 parts = token_text.split('\n')
88 for part in parts[:-1]:
88 for part in parts[:-1]:
89 buffer.append((token_class, part))
89 buffer.append((token_class, part))
90 yield buffer
90 yield buffer
91 buffer = []
91 buffer = []
92
92
93 buffer.append((token_class, parts[-1]))
93 buffer.append((token_class, parts[-1]))
94
94
95 if buffer:
95 if buffer:
96 yield buffer
96 yield buffer
97
97
98
98
99 def filenode_as_annotated_lines_tokens(filenode):
99 def filenode_as_annotated_lines_tokens(filenode):
100 """
100 """
101 Take a file node and return a list of annotations => lines, if no annotation
101 Take a file node and return a list of annotations => lines, if no annotation
102 is found, it will be None.
102 is found, it will be None.
103
103
104 eg:
104 eg:
105
105
106 [
106 [
107 (annotation1, [
107 (annotation1, [
108 (1, line1_tokens_list),
108 (1, line1_tokens_list),
109 (2, line2_tokens_list),
109 (2, line2_tokens_list),
110 ]),
110 ]),
111 (annotation2, [
111 (annotation2, [
112 (3, line1_tokens_list),
112 (3, line1_tokens_list),
113 ]),
113 ]),
114 (None, [
114 (None, [
115 (4, line1_tokens_list),
115 (4, line1_tokens_list),
116 ]),
116 ]),
117 (annotation1, [
117 (annotation1, [
118 (5, line1_tokens_list),
118 (5, line1_tokens_list),
119 (6, line2_tokens_list),
119 (6, line2_tokens_list),
120 ])
120 ])
121 ]
121 ]
122 """
122 """
123
123
124 commit_cache = {} # cache commit_getter lookups
124 commit_cache = {} # cache commit_getter lookups
125
125
126 def _get_annotation(commit_id, commit_getter):
126 def _get_annotation(commit_id, commit_getter):
127 if commit_id not in commit_cache:
127 if commit_id not in commit_cache:
128 commit_cache[commit_id] = commit_getter()
128 commit_cache[commit_id] = commit_getter()
129 return commit_cache[commit_id]
129 return commit_cache[commit_id]
130
130
131 annotation_lookup = {
131 annotation_lookup = {
132 line_no: _get_annotation(commit_id, commit_getter)
132 line_no: _get_annotation(commit_id, commit_getter)
133 for line_no, commit_id, commit_getter, line_content
133 for line_no, commit_id, commit_getter, line_content
134 in filenode.annotate
134 in filenode.annotate
135 }
135 }
136
136
137 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
137 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
138 for line_no, tokens
138 for line_no, tokens
139 in enumerate(filenode_as_lines_tokens(filenode), 1))
139 in enumerate(filenode_as_lines_tokens(filenode), 1))
140
140
141 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
141 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
142
142
143 for annotation, group in grouped_annotations_lines:
143 for annotation, group in grouped_annotations_lines:
144 yield (
144 yield (
145 annotation, [(line_no, tokens)
145 annotation, [(line_no, tokens)
146 for (_, line_no, tokens) in group]
146 for (_, line_no, tokens) in group]
147 )
147 )
148
148
149
149
150 def render_tokenstream(tokenstream):
150 def render_tokenstream(tokenstream):
151 result = []
151 result = []
152 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
152 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
153
153
154 if token_class:
154 if token_class:
155 result.append(u'<span class="%s">' % token_class)
155 result.append(u'<span class="%s">' % token_class)
156 else:
156 else:
157 result.append(u'<span>')
157 result.append(u'<span>')
158
158
159 for op_tag, token_text in token_ops_texts:
159 for op_tag, token_text in token_ops_texts:
160
160
161 if op_tag:
161 if op_tag:
162 result.append(u'<%s>' % op_tag)
162 result.append(u'<%s>' % op_tag)
163
163
164 escaped_text = html_escape(token_text)
164 escaped_text = html_escape(token_text)
165
165
166 # TODO: dan: investigate showing hidden characters like space/nl/tab
166 # TODO: dan: investigate showing hidden characters like space/nl/tab
167 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
167 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
168 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
168 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
169 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
169 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
170
170
171 result.append(escaped_text)
171 result.append(escaped_text)
172
172
173 if op_tag:
173 if op_tag:
174 result.append(u'</%s>' % op_tag)
174 result.append(u'</%s>' % op_tag)
175
175
176 result.append(u'</span>')
176 result.append(u'</span>')
177
177
178 html = ''.join(result)
178 html = ''.join(result)
179 return html
179 return html
180
180
181
181
182 def rollup_tokenstream(tokenstream):
182 def rollup_tokenstream(tokenstream):
183 """
183 """
184 Group a token stream of the format:
184 Group a token stream of the format:
185
185
186 ('class', 'op', 'text')
186 ('class', 'op', 'text')
187 or
187 or
188 ('class', 'text')
188 ('class', 'text')
189
189
190 into
190 into
191
191
192 [('class1',
192 [('class1',
193 [('op1', 'text'),
193 [('op1', 'text'),
194 ('op2', 'text')]),
194 ('op2', 'text')]),
195 ('class2',
195 ('class2',
196 [('op3', 'text')])]
196 [('op3', 'text')])]
197
197
198 This is used to get the minimal tags necessary when
198 This is used to get the minimal tags necessary when
199 rendering to html eg for a token stream ie.
199 rendering to html eg for a token stream ie.
200
200
201 <span class="A"><ins>he</ins>llo</span>
201 <span class="A"><ins>he</ins>llo</span>
202 vs
202 vs
203 <span class="A"><ins>he</ins></span><span class="A">llo</span>
203 <span class="A"><ins>he</ins></span><span class="A">llo</span>
204
204
205 If a 2 tuple is passed in, the output op will be an empty string.
205 If a 2 tuple is passed in, the output op will be an empty string.
206
206
207 eg:
207 eg:
208
208
209 >>> rollup_tokenstream([('classA', '', 'h'),
209 >>> rollup_tokenstream([('classA', '', 'h'),
210 ('classA', 'del', 'ell'),
210 ('classA', 'del', 'ell'),
211 ('classA', '', 'o'),
211 ('classA', '', 'o'),
212 ('classB', '', ' '),
212 ('classB', '', ' '),
213 ('classA', '', 'the'),
213 ('classA', '', 'the'),
214 ('classA', '', 're'),
214 ('classA', '', 're'),
215 ])
215 ])
216
216
217 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
217 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
218 ('classB', [('', ' ')],
218 ('classB', [('', ' ')],
219 ('classA', [('', 'there')]]
219 ('classA', [('', 'there')]]
220
220
221 """
221 """
222 if tokenstream and len(tokenstream[0]) == 2:
222 if tokenstream and len(tokenstream[0]) == 2:
223 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
223 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
224
224
225 result = []
225 result = []
226 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
226 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
227 ops = []
227 ops = []
228 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
228 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
229 text_buffer = []
229 text_buffer = []
230 for t_class, t_op, t_text in token_text_list:
230 for t_class, t_op, t_text in token_text_list:
231 text_buffer.append(t_text)
231 text_buffer.append(t_text)
232 ops.append((token_op, ''.join(text_buffer)))
232 ops.append((token_op, ''.join(text_buffer)))
233 result.append((token_class, ops))
233 result.append((token_class, ops))
234 return result
234 return result
235
235
236
236
237 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
237 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
238 """
238 """
239 Converts a list of (token_class, token_text) tuples to a list of
239 Converts a list of (token_class, token_text) tuples to a list of
240 (token_class, token_op, token_text) tuples where token_op is one of
240 (token_class, token_op, token_text) tuples where token_op is one of
241 ('ins', 'del', '')
241 ('ins', 'del', '')
242
242
243 :param old_tokens: list of (token_class, token_text) tuples of old line
243 :param old_tokens: list of (token_class, token_text) tuples of old line
244 :param new_tokens: list of (token_class, token_text) tuples of new line
244 :param new_tokens: list of (token_class, token_text) tuples of new line
245 :param use_diff_match_patch: boolean, will use google's diff match patch
245 :param use_diff_match_patch: boolean, will use google's diff match patch
246 library which has options to 'smooth' out the character by character
246 library which has options to 'smooth' out the character by character
247 differences making nicer ins/del blocks
247 differences making nicer ins/del blocks
248 """
248 """
249
249
250 old_tokens_result = []
250 old_tokens_result = []
251 new_tokens_result = []
251 new_tokens_result = []
252
252
253 similarity = difflib.SequenceMatcher(None,
253 similarity = difflib.SequenceMatcher(None,
254 ''.join(token_text for token_class, token_text in old_tokens),
254 ''.join(token_text for token_class, token_text in old_tokens),
255 ''.join(token_text for token_class, token_text in new_tokens)
255 ''.join(token_text for token_class, token_text in new_tokens)
256 ).ratio()
256 ).ratio()
257
257
258 if similarity < 0.6: # return, the blocks are too different
258 if similarity < 0.6: # return, the blocks are too different
259 for token_class, token_text in old_tokens:
259 for token_class, token_text in old_tokens:
260 old_tokens_result.append((token_class, '', token_text))
260 old_tokens_result.append((token_class, '', token_text))
261 for token_class, token_text in new_tokens:
261 for token_class, token_text in new_tokens:
262 new_tokens_result.append((token_class, '', token_text))
262 new_tokens_result.append((token_class, '', token_text))
263 return old_tokens_result, new_tokens_result, similarity
263 return old_tokens_result, new_tokens_result, similarity
264
264
265 token_sequence_matcher = difflib.SequenceMatcher(None,
265 token_sequence_matcher = difflib.SequenceMatcher(None,
266 [x[1] for x in old_tokens],
266 [x[1] for x in old_tokens],
267 [x[1] for x in new_tokens])
267 [x[1] for x in new_tokens])
268
268
269 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
269 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
270 # check the differences by token block types first to give a more
270 # check the differences by token block types first to give a more
271 # nicer "block" level replacement vs character diffs
271 # nicer "block" level replacement vs character diffs
272
272
273 if tag == 'equal':
273 if tag == 'equal':
274 for token_class, token_text in old_tokens[o1:o2]:
274 for token_class, token_text in old_tokens[o1:o2]:
275 old_tokens_result.append((token_class, '', token_text))
275 old_tokens_result.append((token_class, '', token_text))
276 for token_class, token_text in new_tokens[n1:n2]:
276 for token_class, token_text in new_tokens[n1:n2]:
277 new_tokens_result.append((token_class, '', token_text))
277 new_tokens_result.append((token_class, '', token_text))
278 elif tag == 'delete':
278 elif tag == 'delete':
279 for token_class, token_text in old_tokens[o1:o2]:
279 for token_class, token_text in old_tokens[o1:o2]:
280 old_tokens_result.append((token_class, 'del', token_text))
280 old_tokens_result.append((token_class, 'del', token_text))
281 elif tag == 'insert':
281 elif tag == 'insert':
282 for token_class, token_text in new_tokens[n1:n2]:
282 for token_class, token_text in new_tokens[n1:n2]:
283 new_tokens_result.append((token_class, 'ins', token_text))
283 new_tokens_result.append((token_class, 'ins', token_text))
284 elif tag == 'replace':
284 elif tag == 'replace':
285 # if same type token blocks must be replaced, do a diff on the
285 # if same type token blocks must be replaced, do a diff on the
286 # characters in the token blocks to show individual changes
286 # characters in the token blocks to show individual changes
287
287
288 old_char_tokens = []
288 old_char_tokens = []
289 new_char_tokens = []
289 new_char_tokens = []
290 for token_class, token_text in old_tokens[o1:o2]:
290 for token_class, token_text in old_tokens[o1:o2]:
291 for char in token_text:
291 for char in token_text:
292 old_char_tokens.append((token_class, char))
292 old_char_tokens.append((token_class, char))
293
293
294 for token_class, token_text in new_tokens[n1:n2]:
294 for token_class, token_text in new_tokens[n1:n2]:
295 for char in token_text:
295 for char in token_text:
296 new_char_tokens.append((token_class, char))
296 new_char_tokens.append((token_class, char))
297
297
298 old_string = ''.join([token_text for
298 old_string = ''.join([token_text for
299 token_class, token_text in old_char_tokens])
299 token_class, token_text in old_char_tokens])
300 new_string = ''.join([token_text for
300 new_string = ''.join([token_text for
301 token_class, token_text in new_char_tokens])
301 token_class, token_text in new_char_tokens])
302
302
303 char_sequence = difflib.SequenceMatcher(
303 char_sequence = difflib.SequenceMatcher(
304 None, old_string, new_string)
304 None, old_string, new_string)
305 copcodes = char_sequence.get_opcodes()
305 copcodes = char_sequence.get_opcodes()
306 obuffer, nbuffer = [], []
306 obuffer, nbuffer = [], []
307
307
308 if use_diff_match_patch:
308 if use_diff_match_patch:
309 dmp = diff_match_patch()
309 dmp = diff_match_patch()
310 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
310 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
311 reps = dmp.diff_main(old_string, new_string)
311 reps = dmp.diff_main(old_string, new_string)
312 dmp.diff_cleanupEfficiency(reps)
312 dmp.diff_cleanupEfficiency(reps)
313
313
314 a, b = 0, 0
314 a, b = 0, 0
315 for op, rep in reps:
315 for op, rep in reps:
316 l = len(rep)
316 l = len(rep)
317 if op == 0:
317 if op == 0:
318 for i, c in enumerate(rep):
318 for i, c in enumerate(rep):
319 obuffer.append((old_char_tokens[a+i][0], '', c))
319 obuffer.append((old_char_tokens[a+i][0], '', c))
320 nbuffer.append((new_char_tokens[b+i][0], '', c))
320 nbuffer.append((new_char_tokens[b+i][0], '', c))
321 a += l
321 a += l
322 b += l
322 b += l
323 elif op == -1:
323 elif op == -1:
324 for i, c in enumerate(rep):
324 for i, c in enumerate(rep):
325 obuffer.append((old_char_tokens[a+i][0], 'del', c))
325 obuffer.append((old_char_tokens[a+i][0], 'del', c))
326 a += l
326 a += l
327 elif op == 1:
327 elif op == 1:
328 for i, c in enumerate(rep):
328 for i, c in enumerate(rep):
329 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
329 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
330 b += l
330 b += l
331 else:
331 else:
332 for ctag, co1, co2, cn1, cn2 in copcodes:
332 for ctag, co1, co2, cn1, cn2 in copcodes:
333 if ctag == 'equal':
333 if ctag == 'equal':
334 for token_class, token_text in old_char_tokens[co1:co2]:
334 for token_class, token_text in old_char_tokens[co1:co2]:
335 obuffer.append((token_class, '', token_text))
335 obuffer.append((token_class, '', token_text))
336 for token_class, token_text in new_char_tokens[cn1:cn2]:
336 for token_class, token_text in new_char_tokens[cn1:cn2]:
337 nbuffer.append((token_class, '', token_text))
337 nbuffer.append((token_class, '', token_text))
338 elif ctag == 'delete':
338 elif ctag == 'delete':
339 for token_class, token_text in old_char_tokens[co1:co2]:
339 for token_class, token_text in old_char_tokens[co1:co2]:
340 obuffer.append((token_class, 'del', token_text))
340 obuffer.append((token_class, 'del', token_text))
341 elif ctag == 'insert':
341 elif ctag == 'insert':
342 for token_class, token_text in new_char_tokens[cn1:cn2]:
342 for token_class, token_text in new_char_tokens[cn1:cn2]:
343 nbuffer.append((token_class, 'ins', token_text))
343 nbuffer.append((token_class, 'ins', token_text))
344 elif ctag == 'replace':
344 elif ctag == 'replace':
345 for token_class, token_text in old_char_tokens[co1:co2]:
345 for token_class, token_text in old_char_tokens[co1:co2]:
346 obuffer.append((token_class, 'del', token_text))
346 obuffer.append((token_class, 'del', token_text))
347 for token_class, token_text in new_char_tokens[cn1:cn2]:
347 for token_class, token_text in new_char_tokens[cn1:cn2]:
348 nbuffer.append((token_class, 'ins', token_text))
348 nbuffer.append((token_class, 'ins', token_text))
349
349
350 old_tokens_result.extend(obuffer)
350 old_tokens_result.extend(obuffer)
351 new_tokens_result.extend(nbuffer)
351 new_tokens_result.extend(nbuffer)
352
352
353 return old_tokens_result, new_tokens_result, similarity
353 return old_tokens_result, new_tokens_result, similarity
354
354
355
355
356 def diffset_node_getter(commit):
356 def diffset_node_getter(commit):
357 def get_node(fname):
357 def get_node(fname):
358 try:
358 try:
359 return commit.get_node(fname)
359 return commit.get_node(fname)
360 except NodeDoesNotExistError:
360 except NodeDoesNotExistError:
361 return None
361 return None
362
362
363 return get_node
363 return get_node
364
364
365
365
366 class DiffSet(object):
366 class DiffSet(object):
367 """
367 """
368 An object for parsing the diff result from diffs.DiffProcessor and
368 An object for parsing the diff result from diffs.DiffProcessor and
369 adding highlighting, side by side/unified renderings and line diffs
369 adding highlighting, side by side/unified renderings and line diffs
370 """
370 """
371
371
372 HL_REAL = 'REAL' # highlights using original file, slow
372 HL_REAL = 'REAL' # highlights using original file, slow
373 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
373 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
374 # in the case of multiline code
374 # in the case of multiline code
375 HL_NONE = 'NONE' # no highlighting, fastest
375 HL_NONE = 'NONE' # no highlighting, fastest
376
376
377 def __init__(self, highlight_mode=HL_REAL, repo_name=None,
377 def __init__(self, highlight_mode=HL_REAL, repo_name=None,
378 source_repo_name=None,
378 source_repo_name=None,
379 source_node_getter=lambda filename: None,
379 source_node_getter=lambda filename: None,
380 target_node_getter=lambda filename: None,
380 target_node_getter=lambda filename: None,
381 source_nodes=None, target_nodes=None,
381 source_nodes=None, target_nodes=None,
382 # files over this size will use fast highlighting
382 # files over this size will use fast highlighting
383 max_file_size_limit=150 * 1024,
383 max_file_size_limit=150 * 1024,
384 ):
384 ):
385
385
386 self.highlight_mode = highlight_mode
386 self.highlight_mode = highlight_mode
387 self.highlighted_filenodes = {}
387 self.highlighted_filenodes = {}
388 self.source_node_getter = source_node_getter
388 self.source_node_getter = source_node_getter
389 self.target_node_getter = target_node_getter
389 self.target_node_getter = target_node_getter
390 self.source_nodes = source_nodes or {}
390 self.source_nodes = source_nodes or {}
391 self.target_nodes = target_nodes or {}
391 self.target_nodes = target_nodes or {}
392 self.repo_name = repo_name
392 self.repo_name = repo_name
393 self.source_repo_name = source_repo_name or repo_name
393 self.source_repo_name = source_repo_name or repo_name
394 self.max_file_size_limit = max_file_size_limit
394 self.max_file_size_limit = max_file_size_limit
395
395
396 def render_patchset(self, patchset, source_ref=None, target_ref=None):
396 def render_patchset(self, patchset, source_ref=None, target_ref=None):
397 diffset = AttributeDict(dict(
397 diffset = AttributeDict(dict(
398 lines_added=0,
398 lines_added=0,
399 lines_deleted=0,
399 lines_deleted=0,
400 changed_files=0,
400 changed_files=0,
401 files=[],
401 files=[],
402 file_stats={},
402 file_stats={},
403 limited_diff=isinstance(patchset, LimitedDiffContainer),
403 limited_diff=isinstance(patchset, LimitedDiffContainer),
404 repo_name=self.repo_name,
404 repo_name=self.repo_name,
405 source_repo_name=self.source_repo_name,
405 source_repo_name=self.source_repo_name,
406 source_ref=source_ref,
406 source_ref=source_ref,
407 target_ref=target_ref,
407 target_ref=target_ref,
408 ))
408 ))
409 for patch in patchset:
409 for patch in patchset:
410 diffset.file_stats[patch['filename']] = patch['stats']
410 diffset.file_stats[patch['filename']] = patch['stats']
411 filediff = self.render_patch(patch)
411 filediff = self.render_patch(patch)
412 filediff.diffset = StrictAttributeDict(dict(
412 filediff.diffset = StrictAttributeDict(dict(
413 source_ref=diffset.source_ref,
413 source_ref=diffset.source_ref,
414 target_ref=diffset.target_ref,
414 target_ref=diffset.target_ref,
415 repo_name=diffset.repo_name,
415 repo_name=diffset.repo_name,
416 source_repo_name=diffset.source_repo_name,
416 source_repo_name=diffset.source_repo_name,
417 ))
417 ))
418 diffset.files.append(filediff)
418 diffset.files.append(filediff)
419 diffset.changed_files += 1
419 diffset.changed_files += 1
420 if not patch['stats']['binary']:
420 if not patch['stats']['binary']:
421 diffset.lines_added += patch['stats']['added']
421 diffset.lines_added += patch['stats']['added']
422 diffset.lines_deleted += patch['stats']['deleted']
422 diffset.lines_deleted += patch['stats']['deleted']
423
423
424 return diffset
424 return diffset
425
425
426 _lexer_cache = {}
426 _lexer_cache = {}
427
427
428 def _get_lexer_for_filename(self, filename, filenode=None):
428 def _get_lexer_for_filename(self, filename, filenode=None):
429 # cached because we might need to call it twice for source/target
429 # cached because we might need to call it twice for source/target
430 if filename not in self._lexer_cache:
430 if filename not in self._lexer_cache:
431 if filenode:
431 if filenode:
432 lexer = filenode.lexer
432 lexer = filenode.lexer
433 extension = filenode.extension
433 extension = filenode.extension
434 else:
434 else:
435 lexer = FileNode.get_lexer(filename=filename)
435 lexer = FileNode.get_lexer(filename=filename)
436 extension = filename.split('.')[-1]
436 extension = filename.split('.')[-1]
437
437
438 lexer = get_custom_lexer(extension) or lexer
438 lexer = get_custom_lexer(extension) or lexer
439 self._lexer_cache[filename] = lexer
439 self._lexer_cache[filename] = lexer
440 return self._lexer_cache[filename]
440 return self._lexer_cache[filename]
441
441
442 def render_patch(self, patch):
442 def render_patch(self, patch):
443 log.debug('rendering diff for %r', patch['filename'])
443 log.debug('rendering diff for %r', patch['filename'])
444
444
445 source_filename = patch['original_filename']
445 source_filename = patch['original_filename']
446 target_filename = patch['filename']
446 target_filename = patch['filename']
447
447
448 source_lexer = plain_text_lexer
448 source_lexer = plain_text_lexer
449 target_lexer = plain_text_lexer
449 target_lexer = plain_text_lexer
450
450
451 if not patch['stats']['binary']:
451 if not patch['stats']['binary']:
452 node_hl_mode = self.HL_NONE if patch['chunks'] == [] else None
452 node_hl_mode = self.HL_NONE if patch['chunks'] == [] else None
453 hl_mode = node_hl_mode or self.highlight_mode
453 hl_mode = node_hl_mode or self.highlight_mode
454
454
455 if hl_mode == self.HL_REAL:
455 if hl_mode == self.HL_REAL:
456 if (source_filename and patch['operation'] in ('D', 'M')
456 if (source_filename and patch['operation'] in ('D', 'M')
457 and source_filename not in self.source_nodes):
457 and source_filename not in self.source_nodes):
458 self.source_nodes[source_filename] = (
458 self.source_nodes[source_filename] = (
459 self.source_node_getter(source_filename))
459 self.source_node_getter(source_filename))
460
460
461 if (target_filename and patch['operation'] in ('A', 'M')
461 if (target_filename and patch['operation'] in ('A', 'M')
462 and target_filename not in self.target_nodes):
462 and target_filename not in self.target_nodes):
463 self.target_nodes[target_filename] = (
463 self.target_nodes[target_filename] = (
464 self.target_node_getter(target_filename))
464 self.target_node_getter(target_filename))
465
465
466 elif hl_mode == self.HL_FAST:
466 elif hl_mode == self.HL_FAST:
467 source_lexer = self._get_lexer_for_filename(source_filename)
467 source_lexer = self._get_lexer_for_filename(source_filename)
468 target_lexer = self._get_lexer_for_filename(target_filename)
468 target_lexer = self._get_lexer_for_filename(target_filename)
469
469
470 source_file = self.source_nodes.get(source_filename, source_filename)
470 source_file = self.source_nodes.get(source_filename, source_filename)
471 target_file = self.target_nodes.get(target_filename, target_filename)
471 target_file = self.target_nodes.get(target_filename, target_filename)
472 raw_id_uid = ''
473 if self.source_nodes.get(source_filename):
474 raw_id_uid = self.source_nodes[source_filename].commit.raw_id
475
476 if not raw_id_uid and self.target_nodes.get(target_filename):
477 # in case this is a new file we only have it in target
478 raw_id_uid = self.target_nodes[target_filename].commit.raw_id
472
479
473 source_filenode, target_filenode = None, None
480 source_filenode, target_filenode = None, None
474
481
475 # TODO: dan: FileNode.lexer works on the content of the file - which
482 # TODO: dan: FileNode.lexer works on the content of the file - which
476 # can be slow - issue #4289 explains a lexer clean up - which once
483 # can be slow - issue #4289 explains a lexer clean up - which once
477 # done can allow caching a lexer for a filenode to avoid the file lookup
484 # done can allow caching a lexer for a filenode to avoid the file lookup
478 if isinstance(source_file, FileNode):
485 if isinstance(source_file, FileNode):
479 source_filenode = source_file
486 source_filenode = source_file
480 #source_lexer = source_file.lexer
487 #source_lexer = source_file.lexer
481 source_lexer = self._get_lexer_for_filename(source_filename)
488 source_lexer = self._get_lexer_for_filename(source_filename)
482 source_file.lexer = source_lexer
489 source_file.lexer = source_lexer
483
490
484 if isinstance(target_file, FileNode):
491 if isinstance(target_file, FileNode):
485 target_filenode = target_file
492 target_filenode = target_file
486 #target_lexer = target_file.lexer
493 #target_lexer = target_file.lexer
487 target_lexer = self._get_lexer_for_filename(target_filename)
494 target_lexer = self._get_lexer_for_filename(target_filename)
488 target_file.lexer = target_lexer
495 target_file.lexer = target_lexer
489
496
490 source_file_path, target_file_path = None, None
497 source_file_path, target_file_path = None, None
491
498
492 if source_filename != '/dev/null':
499 if source_filename != '/dev/null':
493 source_file_path = source_filename
500 source_file_path = source_filename
494 if target_filename != '/dev/null':
501 if target_filename != '/dev/null':
495 target_file_path = target_filename
502 target_file_path = target_filename
496
503
497 source_file_type = source_lexer.name
504 source_file_type = source_lexer.name
498 target_file_type = target_lexer.name
505 target_file_type = target_lexer.name
499
506
500 filediff = AttributeDict({
507 filediff = AttributeDict({
501 'source_file_path': source_file_path,
508 'source_file_path': source_file_path,
502 'target_file_path': target_file_path,
509 'target_file_path': target_file_path,
503 'source_filenode': source_filenode,
510 'source_filenode': source_filenode,
504 'target_filenode': target_filenode,
511 'target_filenode': target_filenode,
505 'source_file_type': target_file_type,
512 'source_file_type': target_file_type,
506 'target_file_type': source_file_type,
513 'target_file_type': source_file_type,
507 'patch': {'filename': patch['filename'], 'stats': patch['stats']},
514 'patch': {'filename': patch['filename'], 'stats': patch['stats']},
508 'operation': patch['operation'],
515 'operation': patch['operation'],
509 'source_mode': patch['stats']['old_mode'],
516 'source_mode': patch['stats']['old_mode'],
510 'target_mode': patch['stats']['new_mode'],
517 'target_mode': patch['stats']['new_mode'],
511 'limited_diff': isinstance(patch, LimitedDiffContainer),
518 'limited_diff': isinstance(patch, LimitedDiffContainer),
512 'hunks': [],
519 'hunks': [],
513 'hunk_ops': None,
520 'hunk_ops': None,
514 'diffset': self,
521 'diffset': self,
522 'raw_id': raw_id_uid,
515 })
523 })
524
516 file_chunks = patch['chunks'][1:]
525 file_chunks = patch['chunks'][1:]
517 for hunk in file_chunks:
526 for hunk in file_chunks:
518 hunkbit = self.parse_hunk(hunk, source_file, target_file)
527 hunkbit = self.parse_hunk(hunk, source_file, target_file)
519 hunkbit.source_file_path = source_file_path
528 hunkbit.source_file_path = source_file_path
520 hunkbit.target_file_path = target_file_path
529 hunkbit.target_file_path = target_file_path
521 filediff.hunks.append(hunkbit)
530 filediff.hunks.append(hunkbit)
522
531
523 # Simulate hunk on OPS type line which doesn't really contain any diff
532 # Simulate hunk on OPS type line which doesn't really contain any diff
524 # this allows commenting on those
533 # this allows commenting on those
525 if not file_chunks:
534 if not file_chunks:
526 actions = []
535 actions = []
527 for op_id, op_text in filediff.patch['stats']['ops'].items():
536 for op_id, op_text in filediff.patch['stats']['ops'].items():
528 if op_id == DEL_FILENODE:
537 if op_id == DEL_FILENODE:
529 actions.append(u'file was removed')
538 actions.append(u'file was removed')
530 elif op_id == BIN_FILENODE:
539 elif op_id == BIN_FILENODE:
531 actions.append(u'binary diff hidden')
540 actions.append(u'binary diff hidden')
532 else:
541 else:
533 actions.append(safe_unicode(op_text))
542 actions.append(safe_unicode(op_text))
534 action_line = u'NO CONTENT: ' + \
543 action_line = u'NO CONTENT: ' + \
535 u', '.join(actions) or u'UNDEFINED_ACTION'
544 u', '.join(actions) or u'UNDEFINED_ACTION'
536
545
537 hunk_ops = {'source_length': 0, 'source_start': 0,
546 hunk_ops = {'source_length': 0, 'source_start': 0,
538 'lines': [
547 'lines': [
539 {'new_lineno': 0, 'old_lineno': 1,
548 {'new_lineno': 0, 'old_lineno': 1,
540 'action': 'unmod-no-hl', 'line': action_line}
549 'action': 'unmod-no-hl', 'line': action_line}
541 ],
550 ],
542 'section_header': u'', 'target_start': 1, 'target_length': 1}
551 'section_header': u'', 'target_start': 1, 'target_length': 1}
543
552
544 hunkbit = self.parse_hunk(hunk_ops, source_file, target_file)
553 hunkbit = self.parse_hunk(hunk_ops, source_file, target_file)
545 hunkbit.source_file_path = source_file_path
554 hunkbit.source_file_path = source_file_path
546 hunkbit.target_file_path = target_file_path
555 hunkbit.target_file_path = target_file_path
547 filediff.hunk_ops = hunkbit
556 filediff.hunk_ops = hunkbit
548 return filediff
557 return filediff
549
558
550 def parse_hunk(self, hunk, source_file, target_file):
559 def parse_hunk(self, hunk, source_file, target_file):
551 result = AttributeDict(dict(
560 result = AttributeDict(dict(
552 source_start=hunk['source_start'],
561 source_start=hunk['source_start'],
553 source_length=hunk['source_length'],
562 source_length=hunk['source_length'],
554 target_start=hunk['target_start'],
563 target_start=hunk['target_start'],
555 target_length=hunk['target_length'],
564 target_length=hunk['target_length'],
556 section_header=hunk['section_header'],
565 section_header=hunk['section_header'],
557 lines=[],
566 lines=[],
558 ))
567 ))
559 before, after = [], []
568 before, after = [], []
560
569
561 for line in hunk['lines']:
570 for line in hunk['lines']:
562 if line['action'] in ['unmod', 'unmod-no-hl']:
571 if line['action'] in ['unmod', 'unmod-no-hl']:
563 no_hl = line['action'] == 'unmod-no-hl'
572 no_hl = line['action'] == 'unmod-no-hl'
564 result.lines.extend(
573 result.lines.extend(
565 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
574 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
566 after.append(line)
575 after.append(line)
567 before.append(line)
576 before.append(line)
568 elif line['action'] == 'add':
577 elif line['action'] == 'add':
569 after.append(line)
578 after.append(line)
570 elif line['action'] == 'del':
579 elif line['action'] == 'del':
571 before.append(line)
580 before.append(line)
572 elif line['action'] == 'old-no-nl':
581 elif line['action'] == 'old-no-nl':
573 before.append(line)
582 before.append(line)
574 elif line['action'] == 'new-no-nl':
583 elif line['action'] == 'new-no-nl':
575 after.append(line)
584 after.append(line)
576
585
577 all_actions = [x['action'] for x in after] + [x['action'] for x in before]
586 all_actions = [x['action'] for x in after] + [x['action'] for x in before]
578 no_hl = {x for x in all_actions} == {'unmod-no-hl'}
587 no_hl = {x for x in all_actions} == {'unmod-no-hl'}
579 result.lines.extend(
588 result.lines.extend(
580 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
589 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
581 # NOTE(marcink): we must keep list() call here so we can cache the result...
590 # NOTE(marcink): we must keep list() call here so we can cache the result...
582 result.unified = list(self.as_unified(result.lines))
591 result.unified = list(self.as_unified(result.lines))
583 result.sideside = result.lines
592 result.sideside = result.lines
584
593
585 return result
594 return result
586
595
587 def parse_lines(self, before_lines, after_lines, source_file, target_file,
596 def parse_lines(self, before_lines, after_lines, source_file, target_file,
588 no_hl=False):
597 no_hl=False):
589 # TODO: dan: investigate doing the diff comparison and fast highlighting
598 # TODO: dan: investigate doing the diff comparison and fast highlighting
590 # on the entire before and after buffered block lines rather than by
599 # on the entire before and after buffered block lines rather than by
591 # line, this means we can get better 'fast' highlighting if the context
600 # line, this means we can get better 'fast' highlighting if the context
592 # allows it - eg.
601 # allows it - eg.
593 # line 4: """
602 # line 4: """
594 # line 5: this gets highlighted as a string
603 # line 5: this gets highlighted as a string
595 # line 6: """
604 # line 6: """
596
605
597 lines = []
606 lines = []
598
607
599 before_newline = AttributeDict()
608 before_newline = AttributeDict()
600 after_newline = AttributeDict()
609 after_newline = AttributeDict()
601 if before_lines and before_lines[-1]['action'] == 'old-no-nl':
610 if before_lines and before_lines[-1]['action'] == 'old-no-nl':
602 before_newline_line = before_lines.pop(-1)
611 before_newline_line = before_lines.pop(-1)
603 before_newline.content = '\n {}'.format(
612 before_newline.content = '\n {}'.format(
604 render_tokenstream(
613 render_tokenstream(
605 [(x[0], '', x[1])
614 [(x[0], '', x[1])
606 for x in [('nonl', before_newline_line['line'])]]))
615 for x in [('nonl', before_newline_line['line'])]]))
607
616
608 if after_lines and after_lines[-1]['action'] == 'new-no-nl':
617 if after_lines and after_lines[-1]['action'] == 'new-no-nl':
609 after_newline_line = after_lines.pop(-1)
618 after_newline_line = after_lines.pop(-1)
610 after_newline.content = '\n {}'.format(
619 after_newline.content = '\n {}'.format(
611 render_tokenstream(
620 render_tokenstream(
612 [(x[0], '', x[1])
621 [(x[0], '', x[1])
613 for x in [('nonl', after_newline_line['line'])]]))
622 for x in [('nonl', after_newline_line['line'])]]))
614
623
615 while before_lines or after_lines:
624 while before_lines or after_lines:
616 before, after = None, None
625 before, after = None, None
617 before_tokens, after_tokens = None, None
626 before_tokens, after_tokens = None, None
618
627
619 if before_lines:
628 if before_lines:
620 before = before_lines.pop(0)
629 before = before_lines.pop(0)
621 if after_lines:
630 if after_lines:
622 after = after_lines.pop(0)
631 after = after_lines.pop(0)
623
632
624 original = AttributeDict()
633 original = AttributeDict()
625 modified = AttributeDict()
634 modified = AttributeDict()
626
635
627 if before:
636 if before:
628 if before['action'] == 'old-no-nl':
637 if before['action'] == 'old-no-nl':
629 before_tokens = [('nonl', before['line'])]
638 before_tokens = [('nonl', before['line'])]
630 else:
639 else:
631 before_tokens = self.get_line_tokens(
640 before_tokens = self.get_line_tokens(
632 line_text=before['line'], line_number=before['old_lineno'],
641 line_text=before['line'], line_number=before['old_lineno'],
633 input_file=source_file, no_hl=no_hl)
642 input_file=source_file, no_hl=no_hl)
634 original.lineno = before['old_lineno']
643 original.lineno = before['old_lineno']
635 original.content = before['line']
644 original.content = before['line']
636 original.action = self.action_to_op(before['action'])
645 original.action = self.action_to_op(before['action'])
637
646
638 original.get_comment_args = (
647 original.get_comment_args = (
639 source_file, 'o', before['old_lineno'])
648 source_file, 'o', before['old_lineno'])
640
649
641 if after:
650 if after:
642 if after['action'] == 'new-no-nl':
651 if after['action'] == 'new-no-nl':
643 after_tokens = [('nonl', after['line'])]
652 after_tokens = [('nonl', after['line'])]
644 else:
653 else:
645 after_tokens = self.get_line_tokens(
654 after_tokens = self.get_line_tokens(
646 line_text=after['line'], line_number=after['new_lineno'],
655 line_text=after['line'], line_number=after['new_lineno'],
647 input_file=target_file, no_hl=no_hl)
656 input_file=target_file, no_hl=no_hl)
648 modified.lineno = after['new_lineno']
657 modified.lineno = after['new_lineno']
649 modified.content = after['line']
658 modified.content = after['line']
650 modified.action = self.action_to_op(after['action'])
659 modified.action = self.action_to_op(after['action'])
651
660
652 modified.get_comment_args = (target_file, 'n', after['new_lineno'])
661 modified.get_comment_args = (target_file, 'n', after['new_lineno'])
653
662
654 # diff the lines
663 # diff the lines
655 if before_tokens and after_tokens:
664 if before_tokens and after_tokens:
656 o_tokens, m_tokens, similarity = tokens_diff(
665 o_tokens, m_tokens, similarity = tokens_diff(
657 before_tokens, after_tokens)
666 before_tokens, after_tokens)
658 original.content = render_tokenstream(o_tokens)
667 original.content = render_tokenstream(o_tokens)
659 modified.content = render_tokenstream(m_tokens)
668 modified.content = render_tokenstream(m_tokens)
660 elif before_tokens:
669 elif before_tokens:
661 original.content = render_tokenstream(
670 original.content = render_tokenstream(
662 [(x[0], '', x[1]) for x in before_tokens])
671 [(x[0], '', x[1]) for x in before_tokens])
663 elif after_tokens:
672 elif after_tokens:
664 modified.content = render_tokenstream(
673 modified.content = render_tokenstream(
665 [(x[0], '', x[1]) for x in after_tokens])
674 [(x[0], '', x[1]) for x in after_tokens])
666
675
667 if not before_lines and before_newline:
676 if not before_lines and before_newline:
668 original.content += before_newline.content
677 original.content += before_newline.content
669 before_newline = None
678 before_newline = None
670 if not after_lines and after_newline:
679 if not after_lines and after_newline:
671 modified.content += after_newline.content
680 modified.content += after_newline.content
672 after_newline = None
681 after_newline = None
673
682
674 lines.append(AttributeDict({
683 lines.append(AttributeDict({
675 'original': original,
684 'original': original,
676 'modified': modified,
685 'modified': modified,
677 }))
686 }))
678
687
679 return lines
688 return lines
680
689
681 def get_line_tokens(self, line_text, line_number, input_file=None, no_hl=False):
690 def get_line_tokens(self, line_text, line_number, input_file=None, no_hl=False):
682 filenode = None
691 filenode = None
683 filename = None
692 filename = None
684
693
685 if isinstance(input_file, basestring):
694 if isinstance(input_file, basestring):
686 filename = input_file
695 filename = input_file
687 elif isinstance(input_file, FileNode):
696 elif isinstance(input_file, FileNode):
688 filenode = input_file
697 filenode = input_file
689 filename = input_file.unicode_path
698 filename = input_file.unicode_path
690
699
691 hl_mode = self.HL_NONE if no_hl else self.highlight_mode
700 hl_mode = self.HL_NONE if no_hl else self.highlight_mode
692 if hl_mode == self.HL_REAL and filenode:
701 if hl_mode == self.HL_REAL and filenode:
693 lexer = self._get_lexer_for_filename(filename)
702 lexer = self._get_lexer_for_filename(filename)
694 file_size_allowed = input_file.size < self.max_file_size_limit
703 file_size_allowed = input_file.size < self.max_file_size_limit
695 if line_number and file_size_allowed:
704 if line_number and file_size_allowed:
696 return self.get_tokenized_filenode_line(
705 return self.get_tokenized_filenode_line(
697 input_file, line_number, lexer)
706 input_file, line_number, lexer)
698
707
699 if hl_mode in (self.HL_REAL, self.HL_FAST) and filename:
708 if hl_mode in (self.HL_REAL, self.HL_FAST) and filename:
700 lexer = self._get_lexer_for_filename(filename)
709 lexer = self._get_lexer_for_filename(filename)
701 return list(tokenize_string(line_text, lexer))
710 return list(tokenize_string(line_text, lexer))
702
711
703 return list(tokenize_string(line_text, plain_text_lexer))
712 return list(tokenize_string(line_text, plain_text_lexer))
704
713
705 def get_tokenized_filenode_line(self, filenode, line_number, lexer=None):
714 def get_tokenized_filenode_line(self, filenode, line_number, lexer=None):
706
715
707 if filenode not in self.highlighted_filenodes:
716 if filenode not in self.highlighted_filenodes:
708 tokenized_lines = filenode_as_lines_tokens(filenode, lexer)
717 tokenized_lines = filenode_as_lines_tokens(filenode, lexer)
709 self.highlighted_filenodes[filenode] = tokenized_lines
718 self.highlighted_filenodes[filenode] = tokenized_lines
710 return self.highlighted_filenodes[filenode][line_number - 1]
719 return self.highlighted_filenodes[filenode][line_number - 1]
711
720
712 def action_to_op(self, action):
721 def action_to_op(self, action):
713 return {
722 return {
714 'add': '+',
723 'add': '+',
715 'del': '-',
724 'del': '-',
716 'unmod': ' ',
725 'unmod': ' ',
717 'unmod-no-hl': ' ',
726 'unmod-no-hl': ' ',
718 'old-no-nl': ' ',
727 'old-no-nl': ' ',
719 'new-no-nl': ' ',
728 'new-no-nl': ' ',
720 }.get(action, action)
729 }.get(action, action)
721
730
722 def as_unified(self, lines):
731 def as_unified(self, lines):
723 """
732 """
724 Return a generator that yields the lines of a diff in unified order
733 Return a generator that yields the lines of a diff in unified order
725 """
734 """
726 def generator():
735 def generator():
727 buf = []
736 buf = []
728 for line in lines:
737 for line in lines:
729
738
730 if buf and not line.original or line.original.action == ' ':
739 if buf and not line.original or line.original.action == ' ':
731 for b in buf:
740 for b in buf:
732 yield b
741 yield b
733 buf = []
742 buf = []
734
743
735 if line.original:
744 if line.original:
736 if line.original.action == ' ':
745 if line.original.action == ' ':
737 yield (line.original.lineno, line.modified.lineno,
746 yield (line.original.lineno, line.modified.lineno,
738 line.original.action, line.original.content,
747 line.original.action, line.original.content,
739 line.original.get_comment_args)
748 line.original.get_comment_args)
740 continue
749 continue
741
750
742 if line.original.action == '-':
751 if line.original.action == '-':
743 yield (line.original.lineno, None,
752 yield (line.original.lineno, None,
744 line.original.action, line.original.content,
753 line.original.action, line.original.content,
745 line.original.get_comment_args)
754 line.original.get_comment_args)
746
755
747 if line.modified.action == '+':
756 if line.modified.action == '+':
748 buf.append((
757 buf.append((
749 None, line.modified.lineno,
758 None, line.modified.lineno,
750 line.modified.action, line.modified.content,
759 line.modified.action, line.modified.content,
751 line.modified.get_comment_args))
760 line.modified.get_comment_args))
752 continue
761 continue
753
762
754 if line.modified:
763 if line.modified:
755 yield (None, line.modified.lineno,
764 yield (None, line.modified.lineno,
756 line.modified.action, line.modified.content,
765 line.modified.action, line.modified.content,
757 line.modified.get_comment_args)
766 line.modified.get_comment_args)
758
767
759 for b in buf:
768 for b in buf:
760 yield b
769 yield b
761
770
762 return generator()
771 return generator()
@@ -1,1749 +1,1755 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import fnmatch
27 import fnmatch
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import os
30 import os
31 import re
31 import re
32 import time
32 import time
33 import warnings
33 import warnings
34 import shutil
34 import shutil
35
35
36 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from zope.cachedescriptors.property import Lazy as LazyProperty
37
37
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs import connection
40 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.utils import author_name, author_email
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 RepositoryError)
47 RepositoryError)
48
48
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 FILEMODE_DEFAULT = 0100644
53 FILEMODE_DEFAULT = 0100644
54 FILEMODE_EXECUTABLE = 0100755
54 FILEMODE_EXECUTABLE = 0100755
55
55
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 MergeResponse = collections.namedtuple(
57 MergeResponse = collections.namedtuple(
58 'MergeResponse',
58 'MergeResponse',
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60
60
61
61
62 class MergeFailureReason(object):
62 class MergeFailureReason(object):
63 """
63 """
64 Enumeration with all the reasons why the server side merge could fail.
64 Enumeration with all the reasons why the server side merge could fail.
65
65
66 DO NOT change the number of the reasons, as they may be stored in the
66 DO NOT change the number of the reasons, as they may be stored in the
67 database.
67 database.
68
68
69 Changing the name of a reason is acceptable and encouraged to deprecate old
69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 reasons.
70 reasons.
71 """
71 """
72
72
73 # Everything went well.
73 # Everything went well.
74 NONE = 0
74 NONE = 0
75
75
76 # An unexpected exception was raised. Check the logs for more details.
76 # An unexpected exception was raised. Check the logs for more details.
77 UNKNOWN = 1
77 UNKNOWN = 1
78
78
79 # The merge was not successful, there are conflicts.
79 # The merge was not successful, there are conflicts.
80 MERGE_FAILED = 2
80 MERGE_FAILED = 2
81
81
82 # The merge succeeded but we could not push it to the target repository.
82 # The merge succeeded but we could not push it to the target repository.
83 PUSH_FAILED = 3
83 PUSH_FAILED = 3
84
84
85 # The specified target is not a head in the target repository.
85 # The specified target is not a head in the target repository.
86 TARGET_IS_NOT_HEAD = 4
86 TARGET_IS_NOT_HEAD = 4
87
87
88 # The source repository contains more branches than the target. Pushing
88 # The source repository contains more branches than the target. Pushing
89 # the merge will create additional branches in the target.
89 # the merge will create additional branches in the target.
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91
91
92 # The target reference has multiple heads. That does not allow to correctly
92 # The target reference has multiple heads. That does not allow to correctly
93 # identify the target location. This could only happen for mercurial
93 # identify the target location. This could only happen for mercurial
94 # branches.
94 # branches.
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96
96
97 # The target repository is locked
97 # The target repository is locked
98 TARGET_IS_LOCKED = 7
98 TARGET_IS_LOCKED = 7
99
99
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # A involved commit could not be found.
101 # A involved commit could not be found.
102 _DEPRECATED_MISSING_COMMIT = 8
102 _DEPRECATED_MISSING_COMMIT = 8
103
103
104 # The target repo reference is missing.
104 # The target repo reference is missing.
105 MISSING_TARGET_REF = 9
105 MISSING_TARGET_REF = 9
106
106
107 # The source repo reference is missing.
107 # The source repo reference is missing.
108 MISSING_SOURCE_REF = 10
108 MISSING_SOURCE_REF = 10
109
109
110 # The merge was not successful, there are conflicts related to sub
110 # The merge was not successful, there are conflicts related to sub
111 # repositories.
111 # repositories.
112 SUBREPO_MERGE_FAILED = 11
112 SUBREPO_MERGE_FAILED = 11
113
113
114
114
115 class UpdateFailureReason(object):
115 class UpdateFailureReason(object):
116 """
116 """
117 Enumeration with all the reasons why the pull request update could fail.
117 Enumeration with all the reasons why the pull request update could fail.
118
118
119 DO NOT change the number of the reasons, as they may be stored in the
119 DO NOT change the number of the reasons, as they may be stored in the
120 database.
120 database.
121
121
122 Changing the name of a reason is acceptable and encouraged to deprecate old
122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 reasons.
123 reasons.
124 """
124 """
125
125
126 # Everything went well.
126 # Everything went well.
127 NONE = 0
127 NONE = 0
128
128
129 # An unexpected exception was raised. Check the logs for more details.
129 # An unexpected exception was raised. Check the logs for more details.
130 UNKNOWN = 1
130 UNKNOWN = 1
131
131
132 # The pull request is up to date.
132 # The pull request is up to date.
133 NO_CHANGE = 2
133 NO_CHANGE = 2
134
134
135 # The pull request has a reference type that is not supported for update.
135 # The pull request has a reference type that is not supported for update.
136 WRONG_REF_TYPE = 3
136 WRONG_REF_TYPE = 3
137
137
138 # Update failed because the target reference is missing.
138 # Update failed because the target reference is missing.
139 MISSING_TARGET_REF = 4
139 MISSING_TARGET_REF = 4
140
140
141 # Update failed because the source reference is missing.
141 # Update failed because the source reference is missing.
142 MISSING_SOURCE_REF = 5
142 MISSING_SOURCE_REF = 5
143
143
144
144
145 class BaseRepository(object):
145 class BaseRepository(object):
146 """
146 """
147 Base Repository for final backends
147 Base Repository for final backends
148
148
149 .. attribute:: DEFAULT_BRANCH_NAME
149 .. attribute:: DEFAULT_BRANCH_NAME
150
150
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152
152
153 .. attribute:: commit_ids
153 .. attribute:: commit_ids
154
154
155 list of all available commit ids, in ascending order
155 list of all available commit ids, in ascending order
156
156
157 .. attribute:: path
157 .. attribute:: path
158
158
159 absolute path to the repository
159 absolute path to the repository
160
160
161 .. attribute:: bookmarks
161 .. attribute:: bookmarks
162
162
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 there are no bookmarks or the backend implementation does not support
164 there are no bookmarks or the backend implementation does not support
165 bookmarks.
165 bookmarks.
166
166
167 .. attribute:: tags
167 .. attribute:: tags
168
168
169 Mapping from name to :term:`Commit ID` of the tag.
169 Mapping from name to :term:`Commit ID` of the tag.
170
170
171 """
171 """
172
172
173 DEFAULT_BRANCH_NAME = None
173 DEFAULT_BRANCH_NAME = None
174 DEFAULT_CONTACT = u"Unknown"
174 DEFAULT_CONTACT = u"Unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
176 EMPTY_COMMIT_ID = '0' * 40
176 EMPTY_COMMIT_ID = '0' * 40
177
177
178 path = None
178 path = None
179
179
180 def __init__(self, repo_path, config=None, create=False, **kwargs):
180 def __init__(self, repo_path, config=None, create=False, **kwargs):
181 """
181 """
182 Initializes repository. Raises RepositoryError if repository could
182 Initializes repository. Raises RepositoryError if repository could
183 not be find at the given ``repo_path`` or directory at ``repo_path``
183 not be find at the given ``repo_path`` or directory at ``repo_path``
184 exists and ``create`` is set to True.
184 exists and ``create`` is set to True.
185
185
186 :param repo_path: local path of the repository
186 :param repo_path: local path of the repository
187 :param config: repository configuration
187 :param config: repository configuration
188 :param create=False: if set to True, would try to create repository.
188 :param create=False: if set to True, would try to create repository.
189 :param src_url=None: if set, should be proper url from which repository
189 :param src_url=None: if set, should be proper url from which repository
190 would be cloned; requires ``create`` parameter to be set to True -
190 would be cloned; requires ``create`` parameter to be set to True -
191 raises RepositoryError if src_url is set and create evaluates to
191 raises RepositoryError if src_url is set and create evaluates to
192 False
192 False
193 """
193 """
194 raise NotImplementedError
194 raise NotImplementedError
195
195
196 def __repr__(self):
196 def __repr__(self):
197 return '<%s at %s>' % (self.__class__.__name__, self.path)
197 return '<%s at %s>' % (self.__class__.__name__, self.path)
198
198
199 def __len__(self):
199 def __len__(self):
200 return self.count()
200 return self.count()
201
201
202 def __eq__(self, other):
202 def __eq__(self, other):
203 same_instance = isinstance(other, self.__class__)
203 same_instance = isinstance(other, self.__class__)
204 return same_instance and other.path == self.path
204 return same_instance and other.path == self.path
205
205
206 def __ne__(self, other):
206 def __ne__(self, other):
207 return not self.__eq__(other)
207 return not self.__eq__(other)
208
208
209 def get_create_shadow_cache_pr_path(self, db_repo):
209 def get_create_shadow_cache_pr_path(self, db_repo):
210 path = db_repo.cached_diffs_dir
210 path = db_repo.cached_diffs_dir
211 if not os.path.exists(path):
211 if not os.path.exists(path):
212 os.makedirs(path, 0755)
212 os.makedirs(path, 0755)
213 return path
213 return path
214
214
215 @classmethod
215 @classmethod
216 def get_default_config(cls, default=None):
216 def get_default_config(cls, default=None):
217 config = Config()
217 config = Config()
218 if default and isinstance(default, list):
218 if default and isinstance(default, list):
219 for section, key, val in default:
219 for section, key, val in default:
220 config.set(section, key, val)
220 config.set(section, key, val)
221 return config
221 return config
222
222
223 @LazyProperty
223 @LazyProperty
224 def _remote(self):
224 def _remote(self):
225 raise NotImplementedError
225 raise NotImplementedError
226
226
227 @LazyProperty
227 @LazyProperty
228 def EMPTY_COMMIT(self):
228 def EMPTY_COMMIT(self):
229 return EmptyCommit(self.EMPTY_COMMIT_ID)
229 return EmptyCommit(self.EMPTY_COMMIT_ID)
230
230
231 @LazyProperty
231 @LazyProperty
232 def alias(self):
232 def alias(self):
233 for k, v in settings.BACKENDS.items():
233 for k, v in settings.BACKENDS.items():
234 if v.split('.')[-1] == str(self.__class__.__name__):
234 if v.split('.')[-1] == str(self.__class__.__name__):
235 return k
235 return k
236
236
237 @LazyProperty
237 @LazyProperty
238 def name(self):
238 def name(self):
239 return safe_unicode(os.path.basename(self.path))
239 return safe_unicode(os.path.basename(self.path))
240
240
241 @LazyProperty
241 @LazyProperty
242 def description(self):
242 def description(self):
243 raise NotImplementedError
243 raise NotImplementedError
244
244
245 def refs(self):
245 def refs(self):
246 """
246 """
247 returns a `dict` with branches, bookmarks, tags, and closed_branches
247 returns a `dict` with branches, bookmarks, tags, and closed_branches
248 for this repository
248 for this repository
249 """
249 """
250 return dict(
250 return dict(
251 branches=self.branches,
251 branches=self.branches,
252 branches_closed=self.branches_closed,
252 branches_closed=self.branches_closed,
253 tags=self.tags,
253 tags=self.tags,
254 bookmarks=self.bookmarks
254 bookmarks=self.bookmarks
255 )
255 )
256
256
257 @LazyProperty
257 @LazyProperty
258 def branches(self):
258 def branches(self):
259 """
259 """
260 A `dict` which maps branch names to commit ids.
260 A `dict` which maps branch names to commit ids.
261 """
261 """
262 raise NotImplementedError
262 raise NotImplementedError
263
263
264 @LazyProperty
264 @LazyProperty
265 def branches_closed(self):
265 def branches_closed(self):
266 """
266 """
267 A `dict` which maps tags names to commit ids.
267 A `dict` which maps tags names to commit ids.
268 """
268 """
269 raise NotImplementedError
269 raise NotImplementedError
270
270
271 @LazyProperty
271 @LazyProperty
272 def bookmarks(self):
272 def bookmarks(self):
273 """
273 """
274 A `dict` which maps tags names to commit ids.
274 A `dict` which maps tags names to commit ids.
275 """
275 """
276 raise NotImplementedError
276 raise NotImplementedError
277
277
278 @LazyProperty
278 @LazyProperty
279 def tags(self):
279 def tags(self):
280 """
280 """
281 A `dict` which maps tags names to commit ids.
281 A `dict` which maps tags names to commit ids.
282 """
282 """
283 raise NotImplementedError
283 raise NotImplementedError
284
284
285 @LazyProperty
285 @LazyProperty
286 def size(self):
286 def size(self):
287 """
287 """
288 Returns combined size in bytes for all repository files
288 Returns combined size in bytes for all repository files
289 """
289 """
290 tip = self.get_commit()
290 tip = self.get_commit()
291 return tip.size
291 return tip.size
292
292
293 def size_at_commit(self, commit_id):
293 def size_at_commit(self, commit_id):
294 commit = self.get_commit(commit_id)
294 commit = self.get_commit(commit_id)
295 return commit.size
295 return commit.size
296
296
297 def is_empty(self):
297 def is_empty(self):
298 return not bool(self.commit_ids)
298 return not bool(self.commit_ids)
299
299
300 @staticmethod
300 @staticmethod
301 def check_url(url, config):
301 def check_url(url, config):
302 """
302 """
303 Function will check given url and try to verify if it's a valid
303 Function will check given url and try to verify if it's a valid
304 link.
304 link.
305 """
305 """
306 raise NotImplementedError
306 raise NotImplementedError
307
307
308 @staticmethod
308 @staticmethod
309 def is_valid_repository(path):
309 def is_valid_repository(path):
310 """
310 """
311 Check if given `path` contains a valid repository of this backend
311 Check if given `path` contains a valid repository of this backend
312 """
312 """
313 raise NotImplementedError
313 raise NotImplementedError
314
314
315 # ==========================================================================
315 # ==========================================================================
316 # COMMITS
316 # COMMITS
317 # ==========================================================================
317 # ==========================================================================
318
318
319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
320 """
320 """
321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
322 are both None, most recent commit is returned.
322 are both None, most recent commit is returned.
323
323
324 :param pre_load: Optional. List of commit attributes to load.
324 :param pre_load: Optional. List of commit attributes to load.
325
325
326 :raises ``EmptyRepositoryError``: if there are no commits
326 :raises ``EmptyRepositoryError``: if there are no commits
327 """
327 """
328 raise NotImplementedError
328 raise NotImplementedError
329
329
330 def __iter__(self):
330 def __iter__(self):
331 for commit_id in self.commit_ids:
331 for commit_id in self.commit_ids:
332 yield self.get_commit(commit_id=commit_id)
332 yield self.get_commit(commit_id=commit_id)
333
333
334 def get_commits(
334 def get_commits(
335 self, start_id=None, end_id=None, start_date=None, end_date=None,
335 self, start_id=None, end_id=None, start_date=None, end_date=None,
336 branch_name=None, show_hidden=False, pre_load=None):
336 branch_name=None, show_hidden=False, pre_load=None):
337 """
337 """
338 Returns iterator of `BaseCommit` objects from start to end
338 Returns iterator of `BaseCommit` objects from start to end
339 not inclusive. This should behave just like a list, ie. end is not
339 not inclusive. This should behave just like a list, ie. end is not
340 inclusive.
340 inclusive.
341
341
342 :param start_id: None or str, must be a valid commit id
342 :param start_id: None or str, must be a valid commit id
343 :param end_id: None or str, must be a valid commit id
343 :param end_id: None or str, must be a valid commit id
344 :param start_date:
344 :param start_date:
345 :param end_date:
345 :param end_date:
346 :param branch_name:
346 :param branch_name:
347 :param show_hidden:
347 :param show_hidden:
348 :param pre_load:
348 :param pre_load:
349 """
349 """
350 raise NotImplementedError
350 raise NotImplementedError
351
351
352 def __getitem__(self, key):
352 def __getitem__(self, key):
353 """
353 """
354 Allows index based access to the commit objects of this repository.
354 Allows index based access to the commit objects of this repository.
355 """
355 """
356 pre_load = ["author", "branch", "date", "message", "parents"]
356 pre_load = ["author", "branch", "date", "message", "parents"]
357 if isinstance(key, slice):
357 if isinstance(key, slice):
358 return self._get_range(key, pre_load)
358 return self._get_range(key, pre_load)
359 return self.get_commit(commit_idx=key, pre_load=pre_load)
359 return self.get_commit(commit_idx=key, pre_load=pre_load)
360
360
361 def _get_range(self, slice_obj, pre_load):
361 def _get_range(self, slice_obj, pre_load):
362 for commit_id in self.commit_ids.__getitem__(slice_obj):
362 for commit_id in self.commit_ids.__getitem__(slice_obj):
363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
364
364
365 def count(self):
365 def count(self):
366 return len(self.commit_ids)
366 return len(self.commit_ids)
367
367
368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
369 """
369 """
370 Creates and returns a tag for the given ``commit_id``.
370 Creates and returns a tag for the given ``commit_id``.
371
371
372 :param name: name for new tag
372 :param name: name for new tag
373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
374 :param commit_id: commit id for which new tag would be created
374 :param commit_id: commit id for which new tag would be created
375 :param message: message of the tag's commit
375 :param message: message of the tag's commit
376 :param date: date of tag's commit
376 :param date: date of tag's commit
377
377
378 :raises TagAlreadyExistError: if tag with same name already exists
378 :raises TagAlreadyExistError: if tag with same name already exists
379 """
379 """
380 raise NotImplementedError
380 raise NotImplementedError
381
381
382 def remove_tag(self, name, user, message=None, date=None):
382 def remove_tag(self, name, user, message=None, date=None):
383 """
383 """
384 Removes tag with the given ``name``.
384 Removes tag with the given ``name``.
385
385
386 :param name: name of the tag to be removed
386 :param name: name of the tag to be removed
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 :param message: message of the tag's removal commit
388 :param message: message of the tag's removal commit
389 :param date: date of tag's removal commit
389 :param date: date of tag's removal commit
390
390
391 :raises TagDoesNotExistError: if tag with given name does not exists
391 :raises TagDoesNotExistError: if tag with given name does not exists
392 """
392 """
393 raise NotImplementedError
393 raise NotImplementedError
394
394
395 def get_diff(
395 def get_diff(
396 self, commit1, commit2, path=None, ignore_whitespace=False,
396 self, commit1, commit2, path=None, ignore_whitespace=False,
397 context=3, path1=None):
397 context=3, path1=None):
398 """
398 """
399 Returns (git like) *diff*, as plain text. Shows changes introduced by
399 Returns (git like) *diff*, as plain text. Shows changes introduced by
400 `commit2` since `commit1`.
400 `commit2` since `commit1`.
401
401
402 :param commit1: Entry point from which diff is shown. Can be
402 :param commit1: Entry point from which diff is shown. Can be
403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
404 the changes since empty state of the repository until `commit2`
404 the changes since empty state of the repository until `commit2`
405 :param commit2: Until which commit changes should be shown.
405 :param commit2: Until which commit changes should be shown.
406 :param path: Can be set to a path of a file to create a diff of that
406 :param path: Can be set to a path of a file to create a diff of that
407 file. If `path1` is also set, this value is only associated to
407 file. If `path1` is also set, this value is only associated to
408 `commit2`.
408 `commit2`.
409 :param ignore_whitespace: If set to ``True``, would not show whitespace
409 :param ignore_whitespace: If set to ``True``, would not show whitespace
410 changes. Defaults to ``False``.
410 changes. Defaults to ``False``.
411 :param context: How many lines before/after changed lines should be
411 :param context: How many lines before/after changed lines should be
412 shown. Defaults to ``3``.
412 shown. Defaults to ``3``.
413 :param path1: Can be set to a path to associate with `commit1`. This
413 :param path1: Can be set to a path to associate with `commit1`. This
414 parameter works only for backends which support diff generation for
414 parameter works only for backends which support diff generation for
415 different paths. Other backends will raise a `ValueError` if `path1`
415 different paths. Other backends will raise a `ValueError` if `path1`
416 is set and has a different value than `path`.
416 is set and has a different value than `path`.
417 :param file_path: filter this diff by given path pattern
417 :param file_path: filter this diff by given path pattern
418 """
418 """
419 raise NotImplementedError
419 raise NotImplementedError
420
420
421 def strip(self, commit_id, branch=None):
421 def strip(self, commit_id, branch=None):
422 """
422 """
423 Strip given commit_id from the repository
423 Strip given commit_id from the repository
424 """
424 """
425 raise NotImplementedError
425 raise NotImplementedError
426
426
427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
428 """
428 """
429 Return a latest common ancestor commit if one exists for this repo
429 Return a latest common ancestor commit if one exists for this repo
430 `commit_id1` vs `commit_id2` from `repo2`.
430 `commit_id1` vs `commit_id2` from `repo2`.
431
431
432 :param commit_id1: Commit it from this repository to use as a
432 :param commit_id1: Commit it from this repository to use as a
433 target for the comparison.
433 target for the comparison.
434 :param commit_id2: Source commit id to use for comparison.
434 :param commit_id2: Source commit id to use for comparison.
435 :param repo2: Source repository to use for comparison.
435 :param repo2: Source repository to use for comparison.
436 """
436 """
437 raise NotImplementedError
437 raise NotImplementedError
438
438
439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
440 """
440 """
441 Compare this repository's revision `commit_id1` with `commit_id2`.
441 Compare this repository's revision `commit_id1` with `commit_id2`.
442
442
443 Returns a tuple(commits, ancestor) that would be merged from
443 Returns a tuple(commits, ancestor) that would be merged from
444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
445 will be returned as ancestor.
445 will be returned as ancestor.
446
446
447 :param commit_id1: Commit it from this repository to use as a
447 :param commit_id1: Commit it from this repository to use as a
448 target for the comparison.
448 target for the comparison.
449 :param commit_id2: Source commit id to use for comparison.
449 :param commit_id2: Source commit id to use for comparison.
450 :param repo2: Source repository to use for comparison.
450 :param repo2: Source repository to use for comparison.
451 :param merge: If set to ``True`` will do a merge compare which also
451 :param merge: If set to ``True`` will do a merge compare which also
452 returns the common ancestor.
452 returns the common ancestor.
453 :param pre_load: Optional. List of commit attributes to load.
453 :param pre_load: Optional. List of commit attributes to load.
454 """
454 """
455 raise NotImplementedError
455 raise NotImplementedError
456
456
457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
458 user_name='', user_email='', message='', dry_run=False,
458 user_name='', user_email='', message='', dry_run=False,
459 use_rebase=False, close_branch=False):
459 use_rebase=False, close_branch=False):
460 """
460 """
461 Merge the revisions specified in `source_ref` from `source_repo`
461 Merge the revisions specified in `source_ref` from `source_repo`
462 onto the `target_ref` of this repository.
462 onto the `target_ref` of this repository.
463
463
464 `source_ref` and `target_ref` are named tupls with the following
464 `source_ref` and `target_ref` are named tupls with the following
465 fields `type`, `name` and `commit_id`.
465 fields `type`, `name` and `commit_id`.
466
466
467 Returns a MergeResponse named tuple with the following fields
467 Returns a MergeResponse named tuple with the following fields
468 'possible', 'executed', 'source_commit', 'target_commit',
468 'possible', 'executed', 'source_commit', 'target_commit',
469 'merge_commit'.
469 'merge_commit'.
470
470
471 :param repo_id: `repo_id` target repo id.
471 :param repo_id: `repo_id` target repo id.
472 :param workspace_id: `workspace_id` unique identifier.
472 :param workspace_id: `workspace_id` unique identifier.
473 :param target_ref: `target_ref` points to the commit on top of which
473 :param target_ref: `target_ref` points to the commit on top of which
474 the `source_ref` should be merged.
474 the `source_ref` should be merged.
475 :param source_repo: The repository that contains the commits to be
475 :param source_repo: The repository that contains the commits to be
476 merged.
476 merged.
477 :param source_ref: `source_ref` points to the topmost commit from
477 :param source_ref: `source_ref` points to the topmost commit from
478 the `source_repo` which should be merged.
478 the `source_repo` which should be merged.
479 :param user_name: Merge commit `user_name`.
479 :param user_name: Merge commit `user_name`.
480 :param user_email: Merge commit `user_email`.
480 :param user_email: Merge commit `user_email`.
481 :param message: Merge commit `message`.
481 :param message: Merge commit `message`.
482 :param dry_run: If `True` the merge will not take place.
482 :param dry_run: If `True` the merge will not take place.
483 :param use_rebase: If `True` commits from the source will be rebased
483 :param use_rebase: If `True` commits from the source will be rebased
484 on top of the target instead of being merged.
484 on top of the target instead of being merged.
485 :param close_branch: If `True` branch will be close before merging it
485 :param close_branch: If `True` branch will be close before merging it
486 """
486 """
487 if dry_run:
487 if dry_run:
488 message = message or settings.MERGE_DRY_RUN_MESSAGE
488 message = message or settings.MERGE_DRY_RUN_MESSAGE
489 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
489 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
490 user_name = user_name or settings.MERGE_DRY_RUN_USER
490 user_name = user_name or settings.MERGE_DRY_RUN_USER
491 else:
491 else:
492 if not user_name:
492 if not user_name:
493 raise ValueError('user_name cannot be empty')
493 raise ValueError('user_name cannot be empty')
494 if not user_email:
494 if not user_email:
495 raise ValueError('user_email cannot be empty')
495 raise ValueError('user_email cannot be empty')
496 if not message:
496 if not message:
497 raise ValueError('message cannot be empty')
497 raise ValueError('message cannot be empty')
498
498
499 try:
499 try:
500 return self._merge_repo(
500 return self._merge_repo(
501 repo_id, workspace_id, target_ref, source_repo,
501 repo_id, workspace_id, target_ref, source_repo,
502 source_ref, message, user_name, user_email, dry_run=dry_run,
502 source_ref, message, user_name, user_email, dry_run=dry_run,
503 use_rebase=use_rebase, close_branch=close_branch)
503 use_rebase=use_rebase, close_branch=close_branch)
504 except RepositoryError:
504 except RepositoryError:
505 log.exception(
505 log.exception(
506 'Unexpected failure when running merge, dry-run=%s',
506 'Unexpected failure when running merge, dry-run=%s',
507 dry_run)
507 dry_run)
508 return MergeResponse(
508 return MergeResponse(
509 False, False, None, MergeFailureReason.UNKNOWN)
509 False, False, None, MergeFailureReason.UNKNOWN)
510
510
511 def _merge_repo(self, repo_id, workspace_id, target_ref,
511 def _merge_repo(self, repo_id, workspace_id, target_ref,
512 source_repo, source_ref, merge_message,
512 source_repo, source_ref, merge_message,
513 merger_name, merger_email, dry_run=False,
513 merger_name, merger_email, dry_run=False,
514 use_rebase=False, close_branch=False):
514 use_rebase=False, close_branch=False):
515 """Internal implementation of merge."""
515 """Internal implementation of merge."""
516 raise NotImplementedError
516 raise NotImplementedError
517
517
518 def _maybe_prepare_merge_workspace(
518 def _maybe_prepare_merge_workspace(
519 self, repo_id, workspace_id, target_ref, source_ref):
519 self, repo_id, workspace_id, target_ref, source_ref):
520 """
520 """
521 Create the merge workspace.
521 Create the merge workspace.
522
522
523 :param workspace_id: `workspace_id` unique identifier.
523 :param workspace_id: `workspace_id` unique identifier.
524 """
524 """
525 raise NotImplementedError
525 raise NotImplementedError
526
526
527 def _get_legacy_shadow_repository_path(self, workspace_id):
527 def _get_legacy_shadow_repository_path(self, workspace_id):
528 """
528 """
529 Legacy version that was used before. We still need it for
529 Legacy version that was used before. We still need it for
530 backward compat
530 backward compat
531 """
531 """
532 return os.path.join(
532 return os.path.join(
533 os.path.dirname(self.path),
533 os.path.dirname(self.path),
534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
535
535
536 def _get_shadow_repository_path(self, repo_id, workspace_id):
536 def _get_shadow_repository_path(self, repo_id, workspace_id):
537 # The name of the shadow repository must start with '.', so it is
537 # The name of the shadow repository must start with '.', so it is
538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
540 if os.path.exists(legacy_repository_path):
540 if os.path.exists(legacy_repository_path):
541 return legacy_repository_path
541 return legacy_repository_path
542 else:
542 else:
543 return os.path.join(
543 return os.path.join(
544 os.path.dirname(self.path),
544 os.path.dirname(self.path),
545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
546
546
547 def cleanup_merge_workspace(self, repo_id, workspace_id):
547 def cleanup_merge_workspace(self, repo_id, workspace_id):
548 """
548 """
549 Remove merge workspace.
549 Remove merge workspace.
550
550
551 This function MUST not fail in case there is no workspace associated to
551 This function MUST not fail in case there is no workspace associated to
552 the given `workspace_id`.
552 the given `workspace_id`.
553
553
554 :param workspace_id: `workspace_id` unique identifier.
554 :param workspace_id: `workspace_id` unique identifier.
555 """
555 """
556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
557 shadow_repository_path_del = '{}.{}.delete'.format(
557 shadow_repository_path_del = '{}.{}.delete'.format(
558 shadow_repository_path, time.time())
558 shadow_repository_path, time.time())
559
559
560 # move the shadow repo, so it never conflicts with the one used.
560 # move the shadow repo, so it never conflicts with the one used.
561 # we use this method because shutil.rmtree had some edge case problems
561 # we use this method because shutil.rmtree had some edge case problems
562 # removing symlinked repositories
562 # removing symlinked repositories
563 if not os.path.isdir(shadow_repository_path):
563 if not os.path.isdir(shadow_repository_path):
564 return
564 return
565
565
566 shutil.move(shadow_repository_path, shadow_repository_path_del)
566 shutil.move(shadow_repository_path, shadow_repository_path_del)
567 try:
567 try:
568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
569 except Exception:
569 except Exception:
570 log.exception('Failed to gracefully remove shadow repo under %s',
570 log.exception('Failed to gracefully remove shadow repo under %s',
571 shadow_repository_path_del)
571 shadow_repository_path_del)
572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
573
573
574 # ========== #
574 # ========== #
575 # COMMIT API #
575 # COMMIT API #
576 # ========== #
576 # ========== #
577
577
578 @LazyProperty
578 @LazyProperty
579 def in_memory_commit(self):
579 def in_memory_commit(self):
580 """
580 """
581 Returns :class:`InMemoryCommit` object for this repository.
581 Returns :class:`InMemoryCommit` object for this repository.
582 """
582 """
583 raise NotImplementedError
583 raise NotImplementedError
584
584
585 # ======================== #
585 # ======================== #
586 # UTILITIES FOR SUBCLASSES #
586 # UTILITIES FOR SUBCLASSES #
587 # ======================== #
587 # ======================== #
588
588
589 def _validate_diff_commits(self, commit1, commit2):
589 def _validate_diff_commits(self, commit1, commit2):
590 """
590 """
591 Validates that the given commits are related to this repository.
591 Validates that the given commits are related to this repository.
592
592
593 Intended as a utility for sub classes to have a consistent validation
593 Intended as a utility for sub classes to have a consistent validation
594 of input parameters in methods like :meth:`get_diff`.
594 of input parameters in methods like :meth:`get_diff`.
595 """
595 """
596 self._validate_commit(commit1)
596 self._validate_commit(commit1)
597 self._validate_commit(commit2)
597 self._validate_commit(commit2)
598 if (isinstance(commit1, EmptyCommit) and
598 if (isinstance(commit1, EmptyCommit) and
599 isinstance(commit2, EmptyCommit)):
599 isinstance(commit2, EmptyCommit)):
600 raise ValueError("Cannot compare two empty commits")
600 raise ValueError("Cannot compare two empty commits")
601
601
602 def _validate_commit(self, commit):
602 def _validate_commit(self, commit):
603 if not isinstance(commit, BaseCommit):
603 if not isinstance(commit, BaseCommit):
604 raise TypeError(
604 raise TypeError(
605 "%s is not of type BaseCommit" % repr(commit))
605 "%s is not of type BaseCommit" % repr(commit))
606 if commit.repository != self and not isinstance(commit, EmptyCommit):
606 if commit.repository != self and not isinstance(commit, EmptyCommit):
607 raise ValueError(
607 raise ValueError(
608 "Commit %s must be a valid commit from this repository %s, "
608 "Commit %s must be a valid commit from this repository %s, "
609 "related to this repository instead %s." %
609 "related to this repository instead %s." %
610 (commit, self, commit.repository))
610 (commit, self, commit.repository))
611
611
612 def _validate_commit_id(self, commit_id):
612 def _validate_commit_id(self, commit_id):
613 if not isinstance(commit_id, basestring):
613 if not isinstance(commit_id, basestring):
614 raise TypeError("commit_id must be a string value")
614 raise TypeError("commit_id must be a string value")
615
615
616 def _validate_commit_idx(self, commit_idx):
616 def _validate_commit_idx(self, commit_idx):
617 if not isinstance(commit_idx, (int, long)):
617 if not isinstance(commit_idx, (int, long)):
618 raise TypeError("commit_idx must be a numeric value")
618 raise TypeError("commit_idx must be a numeric value")
619
619
620 def _validate_branch_name(self, branch_name):
620 def _validate_branch_name(self, branch_name):
621 if branch_name and branch_name not in self.branches_all:
621 if branch_name and branch_name not in self.branches_all:
622 msg = ("Branch %s not found in %s" % (branch_name, self))
622 msg = ("Branch %s not found in %s" % (branch_name, self))
623 raise BranchDoesNotExistError(msg)
623 raise BranchDoesNotExistError(msg)
624
624
625 #
625 #
626 # Supporting deprecated API parts
626 # Supporting deprecated API parts
627 # TODO: johbo: consider to move this into a mixin
627 # TODO: johbo: consider to move this into a mixin
628 #
628 #
629
629
630 @property
630 @property
631 def EMPTY_CHANGESET(self):
631 def EMPTY_CHANGESET(self):
632 warnings.warn(
632 warnings.warn(
633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
634 return self.EMPTY_COMMIT_ID
634 return self.EMPTY_COMMIT_ID
635
635
636 @property
636 @property
637 def revisions(self):
637 def revisions(self):
638 warnings.warn("Use commits attribute instead", DeprecationWarning)
638 warnings.warn("Use commits attribute instead", DeprecationWarning)
639 return self.commit_ids
639 return self.commit_ids
640
640
641 @revisions.setter
641 @revisions.setter
642 def revisions(self, value):
642 def revisions(self, value):
643 warnings.warn("Use commits attribute instead", DeprecationWarning)
643 warnings.warn("Use commits attribute instead", DeprecationWarning)
644 self.commit_ids = value
644 self.commit_ids = value
645
645
646 def get_changeset(self, revision=None, pre_load=None):
646 def get_changeset(self, revision=None, pre_load=None):
647 warnings.warn("Use get_commit instead", DeprecationWarning)
647 warnings.warn("Use get_commit instead", DeprecationWarning)
648 commit_id = None
648 commit_id = None
649 commit_idx = None
649 commit_idx = None
650 if isinstance(revision, basestring):
650 if isinstance(revision, basestring):
651 commit_id = revision
651 commit_id = revision
652 else:
652 else:
653 commit_idx = revision
653 commit_idx = revision
654 return self.get_commit(
654 return self.get_commit(
655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
656
656
657 def get_changesets(
657 def get_changesets(
658 self, start=None, end=None, start_date=None, end_date=None,
658 self, start=None, end=None, start_date=None, end_date=None,
659 branch_name=None, pre_load=None):
659 branch_name=None, pre_load=None):
660 warnings.warn("Use get_commits instead", DeprecationWarning)
660 warnings.warn("Use get_commits instead", DeprecationWarning)
661 start_id = self._revision_to_commit(start)
661 start_id = self._revision_to_commit(start)
662 end_id = self._revision_to_commit(end)
662 end_id = self._revision_to_commit(end)
663 return self.get_commits(
663 return self.get_commits(
664 start_id=start_id, end_id=end_id, start_date=start_date,
664 start_id=start_id, end_id=end_id, start_date=start_date,
665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
666
666
667 def _revision_to_commit(self, revision):
667 def _revision_to_commit(self, revision):
668 """
668 """
669 Translates a revision to a commit_id
669 Translates a revision to a commit_id
670
670
671 Helps to support the old changeset based API which allows to use
671 Helps to support the old changeset based API which allows to use
672 commit ids and commit indices interchangeable.
672 commit ids and commit indices interchangeable.
673 """
673 """
674 if revision is None:
674 if revision is None:
675 return revision
675 return revision
676
676
677 if isinstance(revision, basestring):
677 if isinstance(revision, basestring):
678 commit_id = revision
678 commit_id = revision
679 else:
679 else:
680 commit_id = self.commit_ids[revision]
680 commit_id = self.commit_ids[revision]
681 return commit_id
681 return commit_id
682
682
683 @property
683 @property
684 def in_memory_changeset(self):
684 def in_memory_changeset(self):
685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
686 return self.in_memory_commit
686 return self.in_memory_commit
687
687
688 def get_path_permissions(self, username):
688 def get_path_permissions(self, username):
689 """
689 """
690 Returns a path permission checker or None if not supported
690 Returns a path permission checker or None if not supported
691
691
692 :param username: session user name
692 :param username: session user name
693 :return: an instance of BasePathPermissionChecker or None
693 :return: an instance of BasePathPermissionChecker or None
694 """
694 """
695 return None
695 return None
696
696
697 def install_hooks(self, force=False):
697 def install_hooks(self, force=False):
698 return self._remote.install_hooks(force)
698 return self._remote.install_hooks(force)
699
699
700
700
701 class BaseCommit(object):
701 class BaseCommit(object):
702 """
702 """
703 Each backend should implement it's commit representation.
703 Each backend should implement it's commit representation.
704
704
705 **Attributes**
705 **Attributes**
706
706
707 ``repository``
707 ``repository``
708 repository object within which commit exists
708 repository object within which commit exists
709
709
710 ``id``
710 ``id``
711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
712 just ``tip``.
712 just ``tip``.
713
713
714 ``raw_id``
714 ``raw_id``
715 raw commit representation (i.e. full 40 length sha for git
715 raw commit representation (i.e. full 40 length sha for git
716 backend)
716 backend)
717
717
718 ``short_id``
718 ``short_id``
719 shortened (if apply) version of ``raw_id``; it would be simple
719 shortened (if apply) version of ``raw_id``; it would be simple
720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
721 as ``raw_id`` for subversion
721 as ``raw_id`` for subversion
722
722
723 ``idx``
723 ``idx``
724 commit index
724 commit index
725
725
726 ``files``
726 ``files``
727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
728
728
729 ``dirs``
729 ``dirs``
730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
731
731
732 ``nodes``
732 ``nodes``
733 combined list of ``Node`` objects
733 combined list of ``Node`` objects
734
734
735 ``author``
735 ``author``
736 author of the commit, as unicode
736 author of the commit, as unicode
737
737
738 ``message``
738 ``message``
739 message of the commit, as unicode
739 message of the commit, as unicode
740
740
741 ``parents``
741 ``parents``
742 list of parent commits
742 list of parent commits
743
743
744 """
744 """
745
745
746 branch = None
746 branch = None
747 """
747 """
748 Depending on the backend this should be set to the branch name of the
748 Depending on the backend this should be set to the branch name of the
749 commit. Backends not supporting branches on commits should leave this
749 commit. Backends not supporting branches on commits should leave this
750 value as ``None``.
750 value as ``None``.
751 """
751 """
752
752
753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
754 """
754 """
755 This template is used to generate a default prefix for repository archives
755 This template is used to generate a default prefix for repository archives
756 if no prefix has been specified.
756 if no prefix has been specified.
757 """
757 """
758
758
759 def __str__(self):
759 def __str__(self):
760 return '<%s at %s:%s>' % (
760 return '<%s at %s:%s>' % (
761 self.__class__.__name__, self.idx, self.short_id)
761 self.__class__.__name__, self.idx, self.short_id)
762
762
763 def __repr__(self):
763 def __repr__(self):
764 return self.__str__()
764 return self.__str__()
765
765
766 def __unicode__(self):
766 def __unicode__(self):
767 return u'%s:%s' % (self.idx, self.short_id)
767 return u'%s:%s' % (self.idx, self.short_id)
768
768
769 def __eq__(self, other):
769 def __eq__(self, other):
770 same_instance = isinstance(other, self.__class__)
770 same_instance = isinstance(other, self.__class__)
771 return same_instance and self.raw_id == other.raw_id
771 return same_instance and self.raw_id == other.raw_id
772
772
773 def __json__(self):
773 def __json__(self):
774 parents = []
774 parents = []
775 try:
775 try:
776 for parent in self.parents:
776 for parent in self.parents:
777 parents.append({'raw_id': parent.raw_id})
777 parents.append({'raw_id': parent.raw_id})
778 except NotImplementedError:
778 except NotImplementedError:
779 # empty commit doesn't have parents implemented
779 # empty commit doesn't have parents implemented
780 pass
780 pass
781
781
782 return {
782 return {
783 'short_id': self.short_id,
783 'short_id': self.short_id,
784 'raw_id': self.raw_id,
784 'raw_id': self.raw_id,
785 'revision': self.idx,
785 'revision': self.idx,
786 'message': self.message,
786 'message': self.message,
787 'date': self.date,
787 'date': self.date,
788 'author': self.author,
788 'author': self.author,
789 'parents': parents,
789 'parents': parents,
790 'branch': self.branch
790 'branch': self.branch
791 }
791 }
792
792
793 def __getstate__(self):
793 def __getstate__(self):
794 d = self.__dict__.copy()
794 d = self.__dict__.copy()
795 d.pop('_remote', None)
795 d.pop('_remote', None)
796 d.pop('repository', None)
796 d.pop('repository', None)
797 return d
797 return d
798
798
799 def _get_refs(self):
799 def _get_refs(self):
800 return {
800 return {
801 'branches': [self.branch] if self.branch else [],
801 'branches': [self.branch] if self.branch else [],
802 'bookmarks': getattr(self, 'bookmarks', []),
802 'bookmarks': getattr(self, 'bookmarks', []),
803 'tags': self.tags
803 'tags': self.tags
804 }
804 }
805
805
806 @LazyProperty
806 @LazyProperty
807 def last(self):
807 def last(self):
808 """
808 """
809 ``True`` if this is last commit in repository, ``False``
809 ``True`` if this is last commit in repository, ``False``
810 otherwise; trying to access this attribute while there is no
810 otherwise; trying to access this attribute while there is no
811 commits would raise `EmptyRepositoryError`
811 commits would raise `EmptyRepositoryError`
812 """
812 """
813 if self.repository is None:
813 if self.repository is None:
814 raise CommitError("Cannot check if it's most recent commit")
814 raise CommitError("Cannot check if it's most recent commit")
815 return self.raw_id == self.repository.commit_ids[-1]
815 return self.raw_id == self.repository.commit_ids[-1]
816
816
817 @LazyProperty
817 @LazyProperty
818 def parents(self):
818 def parents(self):
819 """
819 """
820 Returns list of parent commits.
820 Returns list of parent commits.
821 """
821 """
822 raise NotImplementedError
822 raise NotImplementedError
823
823
824 @LazyProperty
825 def first_parent(self):
826 """
827 Returns list of parent commits.
828 """
829 return self.parents[0] if self.parents else EmptyCommit()
830
824 @property
831 @property
825 def merge(self):
832 def merge(self):
826 """
833 """
827 Returns boolean if commit is a merge.
834 Returns boolean if commit is a merge.
828 """
835 """
829 return len(self.parents) > 1
836 return len(self.parents) > 1
830
837
831 @LazyProperty
838 @LazyProperty
832 def children(self):
839 def children(self):
833 """
840 """
834 Returns list of child commits.
841 Returns list of child commits.
835 """
842 """
836 raise NotImplementedError
843 raise NotImplementedError
837
844
838 @LazyProperty
845 @LazyProperty
839 def id(self):
846 def id(self):
840 """
847 """
841 Returns string identifying this commit.
848 Returns string identifying this commit.
842 """
849 """
843 raise NotImplementedError
850 raise NotImplementedError
844
851
845 @LazyProperty
852 @LazyProperty
846 def raw_id(self):
853 def raw_id(self):
847 """
854 """
848 Returns raw string identifying this commit.
855 Returns raw string identifying this commit.
849 """
856 """
850 raise NotImplementedError
857 raise NotImplementedError
851
858
852 @LazyProperty
859 @LazyProperty
853 def short_id(self):
860 def short_id(self):
854 """
861 """
855 Returns shortened version of ``raw_id`` attribute, as string,
862 Returns shortened version of ``raw_id`` attribute, as string,
856 identifying this commit, useful for presentation to users.
863 identifying this commit, useful for presentation to users.
857 """
864 """
858 raise NotImplementedError
865 raise NotImplementedError
859
866
860 @LazyProperty
867 @LazyProperty
861 def idx(self):
868 def idx(self):
862 """
869 """
863 Returns integer identifying this commit.
870 Returns integer identifying this commit.
864 """
871 """
865 raise NotImplementedError
872 raise NotImplementedError
866
873
867 @LazyProperty
874 @LazyProperty
868 def committer(self):
875 def committer(self):
869 """
876 """
870 Returns committer for this commit
877 Returns committer for this commit
871 """
878 """
872 raise NotImplementedError
879 raise NotImplementedError
873
880
874 @LazyProperty
881 @LazyProperty
875 def committer_name(self):
882 def committer_name(self):
876 """
883 """
877 Returns committer name for this commit
884 Returns committer name for this commit
878 """
885 """
879
886
880 return author_name(self.committer)
887 return author_name(self.committer)
881
888
882 @LazyProperty
889 @LazyProperty
883 def committer_email(self):
890 def committer_email(self):
884 """
891 """
885 Returns committer email address for this commit
892 Returns committer email address for this commit
886 """
893 """
887
894
888 return author_email(self.committer)
895 return author_email(self.committer)
889
896
890 @LazyProperty
897 @LazyProperty
891 def author(self):
898 def author(self):
892 """
899 """
893 Returns author for this commit
900 Returns author for this commit
894 """
901 """
895
902
896 raise NotImplementedError
903 raise NotImplementedError
897
904
898 @LazyProperty
905 @LazyProperty
899 def author_name(self):
906 def author_name(self):
900 """
907 """
901 Returns author name for this commit
908 Returns author name for this commit
902 """
909 """
903
910
904 return author_name(self.author)
911 return author_name(self.author)
905
912
906 @LazyProperty
913 @LazyProperty
907 def author_email(self):
914 def author_email(self):
908 """
915 """
909 Returns author email address for this commit
916 Returns author email address for this commit
910 """
917 """
911
918
912 return author_email(self.author)
919 return author_email(self.author)
913
920
914 def get_file_mode(self, path):
921 def get_file_mode(self, path):
915 """
922 """
916 Returns stat mode of the file at `path`.
923 Returns stat mode of the file at `path`.
917 """
924 """
918 raise NotImplementedError
925 raise NotImplementedError
919
926
920 def is_link(self, path):
927 def is_link(self, path):
921 """
928 """
922 Returns ``True`` if given `path` is a symlink
929 Returns ``True`` if given `path` is a symlink
923 """
930 """
924 raise NotImplementedError
931 raise NotImplementedError
925
932
926 def get_file_content(self, path):
933 def get_file_content(self, path):
927 """
934 """
928 Returns content of the file at the given `path`.
935 Returns content of the file at the given `path`.
929 """
936 """
930 raise NotImplementedError
937 raise NotImplementedError
931
938
932 def get_file_size(self, path):
939 def get_file_size(self, path):
933 """
940 """
934 Returns size of the file at the given `path`.
941 Returns size of the file at the given `path`.
935 """
942 """
936 raise NotImplementedError
943 raise NotImplementedError
937
944
938 def get_file_commit(self, path, pre_load=None):
945 def get_file_commit(self, path, pre_load=None):
939 """
946 """
940 Returns last commit of the file at the given `path`.
947 Returns last commit of the file at the given `path`.
941
948
942 :param pre_load: Optional. List of commit attributes to load.
949 :param pre_load: Optional. List of commit attributes to load.
943 """
950 """
944 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
951 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
945 if not commits:
952 if not commits:
946 raise RepositoryError(
953 raise RepositoryError(
947 'Failed to fetch history for path {}. '
954 'Failed to fetch history for path {}. '
948 'Please check if such path exists in your repository'.format(
955 'Please check if such path exists in your repository'.format(
949 path))
956 path))
950 return commits[0]
957 return commits[0]
951
958
952 def get_file_history(self, path, limit=None, pre_load=None):
959 def get_file_history(self, path, limit=None, pre_load=None):
953 """
960 """
954 Returns history of file as reversed list of :class:`BaseCommit`
961 Returns history of file as reversed list of :class:`BaseCommit`
955 objects for which file at given `path` has been modified.
962 objects for which file at given `path` has been modified.
956
963
957 :param limit: Optional. Allows to limit the size of the returned
964 :param limit: Optional. Allows to limit the size of the returned
958 history. This is intended as a hint to the underlying backend, so
965 history. This is intended as a hint to the underlying backend, so
959 that it can apply optimizations depending on the limit.
966 that it can apply optimizations depending on the limit.
960 :param pre_load: Optional. List of commit attributes to load.
967 :param pre_load: Optional. List of commit attributes to load.
961 """
968 """
962 raise NotImplementedError
969 raise NotImplementedError
963
970
964 def get_file_annotate(self, path, pre_load=None):
971 def get_file_annotate(self, path, pre_load=None):
965 """
972 """
966 Returns a generator of four element tuples with
973 Returns a generator of four element tuples with
967 lineno, sha, commit lazy loader and line
974 lineno, sha, commit lazy loader and line
968
975
969 :param pre_load: Optional. List of commit attributes to load.
976 :param pre_load: Optional. List of commit attributes to load.
970 """
977 """
971 raise NotImplementedError
978 raise NotImplementedError
972
979
973 def get_nodes(self, path):
980 def get_nodes(self, path):
974 """
981 """
975 Returns combined ``DirNode`` and ``FileNode`` objects list representing
982 Returns combined ``DirNode`` and ``FileNode`` objects list representing
976 state of commit at the given ``path``.
983 state of commit at the given ``path``.
977
984
978 :raises ``CommitError``: if node at the given ``path`` is not
985 :raises ``CommitError``: if node at the given ``path`` is not
979 instance of ``DirNode``
986 instance of ``DirNode``
980 """
987 """
981 raise NotImplementedError
988 raise NotImplementedError
982
989
983 def get_node(self, path):
990 def get_node(self, path):
984 """
991 """
985 Returns ``Node`` object from the given ``path``.
992 Returns ``Node`` object from the given ``path``.
986
993
987 :raises ``NodeDoesNotExistError``: if there is no node at the given
994 :raises ``NodeDoesNotExistError``: if there is no node at the given
988 ``path``
995 ``path``
989 """
996 """
990 raise NotImplementedError
997 raise NotImplementedError
991
998
992 def get_largefile_node(self, path):
999 def get_largefile_node(self, path):
993 """
1000 """
994 Returns the path to largefile from Mercurial/Git-lfs storage.
1001 Returns the path to largefile from Mercurial/Git-lfs storage.
995 or None if it's not a largefile node
1002 or None if it's not a largefile node
996 """
1003 """
997 return None
1004 return None
998
1005
999 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1006 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1000 prefix=None, write_metadata=False, mtime=None):
1007 prefix=None, write_metadata=False, mtime=None):
1001 """
1008 """
1002 Creates an archive containing the contents of the repository.
1009 Creates an archive containing the contents of the repository.
1003
1010
1004 :param file_path: path to the file which to create the archive.
1011 :param file_path: path to the file which to create the archive.
1005 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1012 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1006 :param prefix: name of root directory in archive.
1013 :param prefix: name of root directory in archive.
1007 Default is repository name and commit's short_id joined with dash:
1014 Default is repository name and commit's short_id joined with dash:
1008 ``"{repo_name}-{short_id}"``.
1015 ``"{repo_name}-{short_id}"``.
1009 :param write_metadata: write a metadata file into archive.
1016 :param write_metadata: write a metadata file into archive.
1010 :param mtime: custom modification time for archive creation, defaults
1017 :param mtime: custom modification time for archive creation, defaults
1011 to time.time() if not given.
1018 to time.time() if not given.
1012
1019
1013 :raise VCSError: If prefix has a problem.
1020 :raise VCSError: If prefix has a problem.
1014 """
1021 """
1015 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1022 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1016 if kind not in allowed_kinds:
1023 if kind not in allowed_kinds:
1017 raise ImproperArchiveTypeError(
1024 raise ImproperArchiveTypeError(
1018 'Archive kind (%s) not supported use one of %s' %
1025 'Archive kind (%s) not supported use one of %s' %
1019 (kind, allowed_kinds))
1026 (kind, allowed_kinds))
1020
1027
1021 prefix = self._validate_archive_prefix(prefix)
1028 prefix = self._validate_archive_prefix(prefix)
1022
1029
1023 mtime = mtime or time.mktime(self.date.timetuple())
1030 mtime = mtime or time.mktime(self.date.timetuple())
1024
1031
1025 file_info = []
1032 file_info = []
1026 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1033 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1027 for _r, _d, files in cur_rev.walk('/'):
1034 for _r, _d, files in cur_rev.walk('/'):
1028 for f in files:
1035 for f in files:
1029 f_path = os.path.join(prefix, f.path)
1036 f_path = os.path.join(prefix, f.path)
1030 file_info.append(
1037 file_info.append(
1031 (f_path, f.mode, f.is_link(), f.raw_bytes))
1038 (f_path, f.mode, f.is_link(), f.raw_bytes))
1032
1039
1033 if write_metadata:
1040 if write_metadata:
1034 metadata = [
1041 metadata = [
1035 ('repo_name', self.repository.name),
1042 ('repo_name', self.repository.name),
1036 ('rev', self.raw_id),
1043 ('rev', self.raw_id),
1037 ('create_time', mtime),
1044 ('create_time', mtime),
1038 ('branch', self.branch),
1045 ('branch', self.branch),
1039 ('tags', ','.join(self.tags)),
1046 ('tags', ','.join(self.tags)),
1040 ]
1047 ]
1041 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1048 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1042 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1049 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1043
1050
1044 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1051 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1045
1052
1046 def _validate_archive_prefix(self, prefix):
1053 def _validate_archive_prefix(self, prefix):
1047 if prefix is None:
1054 if prefix is None:
1048 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1055 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1049 repo_name=safe_str(self.repository.name),
1056 repo_name=safe_str(self.repository.name),
1050 short_id=self.short_id)
1057 short_id=self.short_id)
1051 elif not isinstance(prefix, str):
1058 elif not isinstance(prefix, str):
1052 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1059 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1053 elif prefix.startswith('/'):
1060 elif prefix.startswith('/'):
1054 raise VCSError("Prefix cannot start with leading slash")
1061 raise VCSError("Prefix cannot start with leading slash")
1055 elif prefix.strip() == '':
1062 elif prefix.strip() == '':
1056 raise VCSError("Prefix cannot be empty")
1063 raise VCSError("Prefix cannot be empty")
1057 return prefix
1064 return prefix
1058
1065
1059 @LazyProperty
1066 @LazyProperty
1060 def root(self):
1067 def root(self):
1061 """
1068 """
1062 Returns ``RootNode`` object for this commit.
1069 Returns ``RootNode`` object for this commit.
1063 """
1070 """
1064 return self.get_node('')
1071 return self.get_node('')
1065
1072
1066 def next(self, branch=None):
1073 def next(self, branch=None):
1067 """
1074 """
1068 Returns next commit from current, if branch is gives it will return
1075 Returns next commit from current, if branch is gives it will return
1069 next commit belonging to this branch
1076 next commit belonging to this branch
1070
1077
1071 :param branch: show commits within the given named branch
1078 :param branch: show commits within the given named branch
1072 """
1079 """
1073 indexes = xrange(self.idx + 1, self.repository.count())
1080 indexes = xrange(self.idx + 1, self.repository.count())
1074 return self._find_next(indexes, branch)
1081 return self._find_next(indexes, branch)
1075
1082
1076 def prev(self, branch=None):
1083 def prev(self, branch=None):
1077 """
1084 """
1078 Returns previous commit from current, if branch is gives it will
1085 Returns previous commit from current, if branch is gives it will
1079 return previous commit belonging to this branch
1086 return previous commit belonging to this branch
1080
1087
1081 :param branch: show commit within the given named branch
1088 :param branch: show commit within the given named branch
1082 """
1089 """
1083 indexes = xrange(self.idx - 1, -1, -1)
1090 indexes = xrange(self.idx - 1, -1, -1)
1084 return self._find_next(indexes, branch)
1091 return self._find_next(indexes, branch)
1085
1092
1086 def _find_next(self, indexes, branch=None):
1093 def _find_next(self, indexes, branch=None):
1087 if branch and self.branch != branch:
1094 if branch and self.branch != branch:
1088 raise VCSError('Branch option used on commit not belonging '
1095 raise VCSError('Branch option used on commit not belonging '
1089 'to that branch')
1096 'to that branch')
1090
1097
1091 for next_idx in indexes:
1098 for next_idx in indexes:
1092 commit = self.repository.get_commit(commit_idx=next_idx)
1099 commit = self.repository.get_commit(commit_idx=next_idx)
1093 if branch and branch != commit.branch:
1100 if branch and branch != commit.branch:
1094 continue
1101 continue
1095 return commit
1102 return commit
1096 raise CommitDoesNotExistError
1103 raise CommitDoesNotExistError
1097
1104
1098 def diff(self, ignore_whitespace=True, context=3):
1105 def diff(self, ignore_whitespace=True, context=3):
1099 """
1106 """
1100 Returns a `Diff` object representing the change made by this commit.
1107 Returns a `Diff` object representing the change made by this commit.
1101 """
1108 """
1102 parent = (
1109 parent = self.first_parent
1103 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1104 diff = self.repository.get_diff(
1110 diff = self.repository.get_diff(
1105 parent, self,
1111 parent, self,
1106 ignore_whitespace=ignore_whitespace,
1112 ignore_whitespace=ignore_whitespace,
1107 context=context)
1113 context=context)
1108 return diff
1114 return diff
1109
1115
1110 @LazyProperty
1116 @LazyProperty
1111 def added(self):
1117 def added(self):
1112 """
1118 """
1113 Returns list of added ``FileNode`` objects.
1119 Returns list of added ``FileNode`` objects.
1114 """
1120 """
1115 raise NotImplementedError
1121 raise NotImplementedError
1116
1122
1117 @LazyProperty
1123 @LazyProperty
1118 def changed(self):
1124 def changed(self):
1119 """
1125 """
1120 Returns list of modified ``FileNode`` objects.
1126 Returns list of modified ``FileNode`` objects.
1121 """
1127 """
1122 raise NotImplementedError
1128 raise NotImplementedError
1123
1129
1124 @LazyProperty
1130 @LazyProperty
1125 def removed(self):
1131 def removed(self):
1126 """
1132 """
1127 Returns list of removed ``FileNode`` objects.
1133 Returns list of removed ``FileNode`` objects.
1128 """
1134 """
1129 raise NotImplementedError
1135 raise NotImplementedError
1130
1136
1131 @LazyProperty
1137 @LazyProperty
1132 def size(self):
1138 def size(self):
1133 """
1139 """
1134 Returns total number of bytes from contents of all filenodes.
1140 Returns total number of bytes from contents of all filenodes.
1135 """
1141 """
1136 return sum((node.size for node in self.get_filenodes_generator()))
1142 return sum((node.size for node in self.get_filenodes_generator()))
1137
1143
1138 def walk(self, topurl=''):
1144 def walk(self, topurl=''):
1139 """
1145 """
1140 Similar to os.walk method. Insted of filesystem it walks through
1146 Similar to os.walk method. Insted of filesystem it walks through
1141 commit starting at given ``topurl``. Returns generator of tuples
1147 commit starting at given ``topurl``. Returns generator of tuples
1142 (topnode, dirnodes, filenodes).
1148 (topnode, dirnodes, filenodes).
1143 """
1149 """
1144 topnode = self.get_node(topurl)
1150 topnode = self.get_node(topurl)
1145 if not topnode.is_dir():
1151 if not topnode.is_dir():
1146 return
1152 return
1147 yield (topnode, topnode.dirs, topnode.files)
1153 yield (topnode, topnode.dirs, topnode.files)
1148 for dirnode in topnode.dirs:
1154 for dirnode in topnode.dirs:
1149 for tup in self.walk(dirnode.path):
1155 for tup in self.walk(dirnode.path):
1150 yield tup
1156 yield tup
1151
1157
1152 def get_filenodes_generator(self):
1158 def get_filenodes_generator(self):
1153 """
1159 """
1154 Returns generator that yields *all* file nodes.
1160 Returns generator that yields *all* file nodes.
1155 """
1161 """
1156 for topnode, dirs, files in self.walk():
1162 for topnode, dirs, files in self.walk():
1157 for node in files:
1163 for node in files:
1158 yield node
1164 yield node
1159
1165
1160 #
1166 #
1161 # Utilities for sub classes to support consistent behavior
1167 # Utilities for sub classes to support consistent behavior
1162 #
1168 #
1163
1169
1164 def no_node_at_path(self, path):
1170 def no_node_at_path(self, path):
1165 return NodeDoesNotExistError(
1171 return NodeDoesNotExistError(
1166 u"There is no file nor directory at the given path: "
1172 u"There is no file nor directory at the given path: "
1167 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1173 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1168
1174
1169 def _fix_path(self, path):
1175 def _fix_path(self, path):
1170 """
1176 """
1171 Paths are stored without trailing slash so we need to get rid off it if
1177 Paths are stored without trailing slash so we need to get rid off it if
1172 needed.
1178 needed.
1173 """
1179 """
1174 return path.rstrip('/')
1180 return path.rstrip('/')
1175
1181
1176 #
1182 #
1177 # Deprecated API based on changesets
1183 # Deprecated API based on changesets
1178 #
1184 #
1179
1185
1180 @property
1186 @property
1181 def revision(self):
1187 def revision(self):
1182 warnings.warn("Use idx instead", DeprecationWarning)
1188 warnings.warn("Use idx instead", DeprecationWarning)
1183 return self.idx
1189 return self.idx
1184
1190
1185 @revision.setter
1191 @revision.setter
1186 def revision(self, value):
1192 def revision(self, value):
1187 warnings.warn("Use idx instead", DeprecationWarning)
1193 warnings.warn("Use idx instead", DeprecationWarning)
1188 self.idx = value
1194 self.idx = value
1189
1195
1190 def get_file_changeset(self, path):
1196 def get_file_changeset(self, path):
1191 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1197 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1192 return self.get_file_commit(path)
1198 return self.get_file_commit(path)
1193
1199
1194
1200
1195 class BaseChangesetClass(type):
1201 class BaseChangesetClass(type):
1196
1202
1197 def __instancecheck__(self, instance):
1203 def __instancecheck__(self, instance):
1198 return isinstance(instance, BaseCommit)
1204 return isinstance(instance, BaseCommit)
1199
1205
1200
1206
1201 class BaseChangeset(BaseCommit):
1207 class BaseChangeset(BaseCommit):
1202
1208
1203 __metaclass__ = BaseChangesetClass
1209 __metaclass__ = BaseChangesetClass
1204
1210
1205 def __new__(cls, *args, **kwargs):
1211 def __new__(cls, *args, **kwargs):
1206 warnings.warn(
1212 warnings.warn(
1207 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1213 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1208 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1214 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1209
1215
1210
1216
1211 class BaseInMemoryCommit(object):
1217 class BaseInMemoryCommit(object):
1212 """
1218 """
1213 Represents differences between repository's state (most recent head) and
1219 Represents differences between repository's state (most recent head) and
1214 changes made *in place*.
1220 changes made *in place*.
1215
1221
1216 **Attributes**
1222 **Attributes**
1217
1223
1218 ``repository``
1224 ``repository``
1219 repository object for this in-memory-commit
1225 repository object for this in-memory-commit
1220
1226
1221 ``added``
1227 ``added``
1222 list of ``FileNode`` objects marked as *added*
1228 list of ``FileNode`` objects marked as *added*
1223
1229
1224 ``changed``
1230 ``changed``
1225 list of ``FileNode`` objects marked as *changed*
1231 list of ``FileNode`` objects marked as *changed*
1226
1232
1227 ``removed``
1233 ``removed``
1228 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1234 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1229 *removed*
1235 *removed*
1230
1236
1231 ``parents``
1237 ``parents``
1232 list of :class:`BaseCommit` instances representing parents of
1238 list of :class:`BaseCommit` instances representing parents of
1233 in-memory commit. Should always be 2-element sequence.
1239 in-memory commit. Should always be 2-element sequence.
1234
1240
1235 """
1241 """
1236
1242
1237 def __init__(self, repository):
1243 def __init__(self, repository):
1238 self.repository = repository
1244 self.repository = repository
1239 self.added = []
1245 self.added = []
1240 self.changed = []
1246 self.changed = []
1241 self.removed = []
1247 self.removed = []
1242 self.parents = []
1248 self.parents = []
1243
1249
1244 def add(self, *filenodes):
1250 def add(self, *filenodes):
1245 """
1251 """
1246 Marks given ``FileNode`` objects as *to be committed*.
1252 Marks given ``FileNode`` objects as *to be committed*.
1247
1253
1248 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1254 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1249 latest commit
1255 latest commit
1250 :raises ``NodeAlreadyAddedError``: if node with same path is already
1256 :raises ``NodeAlreadyAddedError``: if node with same path is already
1251 marked as *added*
1257 marked as *added*
1252 """
1258 """
1253 # Check if not already marked as *added* first
1259 # Check if not already marked as *added* first
1254 for node in filenodes:
1260 for node in filenodes:
1255 if node.path in (n.path for n in self.added):
1261 if node.path in (n.path for n in self.added):
1256 raise NodeAlreadyAddedError(
1262 raise NodeAlreadyAddedError(
1257 "Such FileNode %s is already marked for addition"
1263 "Such FileNode %s is already marked for addition"
1258 % node.path)
1264 % node.path)
1259 for node in filenodes:
1265 for node in filenodes:
1260 self.added.append(node)
1266 self.added.append(node)
1261
1267
1262 def change(self, *filenodes):
1268 def change(self, *filenodes):
1263 """
1269 """
1264 Marks given ``FileNode`` objects to be *changed* in next commit.
1270 Marks given ``FileNode`` objects to be *changed* in next commit.
1265
1271
1266 :raises ``EmptyRepositoryError``: if there are no commits yet
1272 :raises ``EmptyRepositoryError``: if there are no commits yet
1267 :raises ``NodeAlreadyExistsError``: if node with same path is already
1273 :raises ``NodeAlreadyExistsError``: if node with same path is already
1268 marked to be *changed*
1274 marked to be *changed*
1269 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1275 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1270 marked to be *removed*
1276 marked to be *removed*
1271 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1277 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1272 commit
1278 commit
1273 :raises ``NodeNotChangedError``: if node hasn't really be changed
1279 :raises ``NodeNotChangedError``: if node hasn't really be changed
1274 """
1280 """
1275 for node in filenodes:
1281 for node in filenodes:
1276 if node.path in (n.path for n in self.removed):
1282 if node.path in (n.path for n in self.removed):
1277 raise NodeAlreadyRemovedError(
1283 raise NodeAlreadyRemovedError(
1278 "Node at %s is already marked as removed" % node.path)
1284 "Node at %s is already marked as removed" % node.path)
1279 try:
1285 try:
1280 self.repository.get_commit()
1286 self.repository.get_commit()
1281 except EmptyRepositoryError:
1287 except EmptyRepositoryError:
1282 raise EmptyRepositoryError(
1288 raise EmptyRepositoryError(
1283 "Nothing to change - try to *add* new nodes rather than "
1289 "Nothing to change - try to *add* new nodes rather than "
1284 "changing them")
1290 "changing them")
1285 for node in filenodes:
1291 for node in filenodes:
1286 if node.path in (n.path for n in self.changed):
1292 if node.path in (n.path for n in self.changed):
1287 raise NodeAlreadyChangedError(
1293 raise NodeAlreadyChangedError(
1288 "Node at '%s' is already marked as changed" % node.path)
1294 "Node at '%s' is already marked as changed" % node.path)
1289 self.changed.append(node)
1295 self.changed.append(node)
1290
1296
1291 def remove(self, *filenodes):
1297 def remove(self, *filenodes):
1292 """
1298 """
1293 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1299 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1294 *removed* in next commit.
1300 *removed* in next commit.
1295
1301
1296 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1302 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1297 be *removed*
1303 be *removed*
1298 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1304 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1299 be *changed*
1305 be *changed*
1300 """
1306 """
1301 for node in filenodes:
1307 for node in filenodes:
1302 if node.path in (n.path for n in self.removed):
1308 if node.path in (n.path for n in self.removed):
1303 raise NodeAlreadyRemovedError(
1309 raise NodeAlreadyRemovedError(
1304 "Node is already marked to for removal at %s" % node.path)
1310 "Node is already marked to for removal at %s" % node.path)
1305 if node.path in (n.path for n in self.changed):
1311 if node.path in (n.path for n in self.changed):
1306 raise NodeAlreadyChangedError(
1312 raise NodeAlreadyChangedError(
1307 "Node is already marked to be changed at %s" % node.path)
1313 "Node is already marked to be changed at %s" % node.path)
1308 # We only mark node as *removed* - real removal is done by
1314 # We only mark node as *removed* - real removal is done by
1309 # commit method
1315 # commit method
1310 self.removed.append(node)
1316 self.removed.append(node)
1311
1317
1312 def reset(self):
1318 def reset(self):
1313 """
1319 """
1314 Resets this instance to initial state (cleans ``added``, ``changed``
1320 Resets this instance to initial state (cleans ``added``, ``changed``
1315 and ``removed`` lists).
1321 and ``removed`` lists).
1316 """
1322 """
1317 self.added = []
1323 self.added = []
1318 self.changed = []
1324 self.changed = []
1319 self.removed = []
1325 self.removed = []
1320 self.parents = []
1326 self.parents = []
1321
1327
1322 def get_ipaths(self):
1328 def get_ipaths(self):
1323 """
1329 """
1324 Returns generator of paths from nodes marked as added, changed or
1330 Returns generator of paths from nodes marked as added, changed or
1325 removed.
1331 removed.
1326 """
1332 """
1327 for node in itertools.chain(self.added, self.changed, self.removed):
1333 for node in itertools.chain(self.added, self.changed, self.removed):
1328 yield node.path
1334 yield node.path
1329
1335
1330 def get_paths(self):
1336 def get_paths(self):
1331 """
1337 """
1332 Returns list of paths from nodes marked as added, changed or removed.
1338 Returns list of paths from nodes marked as added, changed or removed.
1333 """
1339 """
1334 return list(self.get_ipaths())
1340 return list(self.get_ipaths())
1335
1341
1336 def check_integrity(self, parents=None):
1342 def check_integrity(self, parents=None):
1337 """
1343 """
1338 Checks in-memory commit's integrity. Also, sets parents if not
1344 Checks in-memory commit's integrity. Also, sets parents if not
1339 already set.
1345 already set.
1340
1346
1341 :raises CommitError: if any error occurs (i.e.
1347 :raises CommitError: if any error occurs (i.e.
1342 ``NodeDoesNotExistError``).
1348 ``NodeDoesNotExistError``).
1343 """
1349 """
1344 if not self.parents:
1350 if not self.parents:
1345 parents = parents or []
1351 parents = parents or []
1346 if len(parents) == 0:
1352 if len(parents) == 0:
1347 try:
1353 try:
1348 parents = [self.repository.get_commit(), None]
1354 parents = [self.repository.get_commit(), None]
1349 except EmptyRepositoryError:
1355 except EmptyRepositoryError:
1350 parents = [None, None]
1356 parents = [None, None]
1351 elif len(parents) == 1:
1357 elif len(parents) == 1:
1352 parents += [None]
1358 parents += [None]
1353 self.parents = parents
1359 self.parents = parents
1354
1360
1355 # Local parents, only if not None
1361 # Local parents, only if not None
1356 parents = [p for p in self.parents if p]
1362 parents = [p for p in self.parents if p]
1357
1363
1358 # Check nodes marked as added
1364 # Check nodes marked as added
1359 for p in parents:
1365 for p in parents:
1360 for node in self.added:
1366 for node in self.added:
1361 try:
1367 try:
1362 p.get_node(node.path)
1368 p.get_node(node.path)
1363 except NodeDoesNotExistError:
1369 except NodeDoesNotExistError:
1364 pass
1370 pass
1365 else:
1371 else:
1366 raise NodeAlreadyExistsError(
1372 raise NodeAlreadyExistsError(
1367 "Node `%s` already exists at %s" % (node.path, p))
1373 "Node `%s` already exists at %s" % (node.path, p))
1368
1374
1369 # Check nodes marked as changed
1375 # Check nodes marked as changed
1370 missing = set(self.changed)
1376 missing = set(self.changed)
1371 not_changed = set(self.changed)
1377 not_changed = set(self.changed)
1372 if self.changed and not parents:
1378 if self.changed and not parents:
1373 raise NodeDoesNotExistError(str(self.changed[0].path))
1379 raise NodeDoesNotExistError(str(self.changed[0].path))
1374 for p in parents:
1380 for p in parents:
1375 for node in self.changed:
1381 for node in self.changed:
1376 try:
1382 try:
1377 old = p.get_node(node.path)
1383 old = p.get_node(node.path)
1378 missing.remove(node)
1384 missing.remove(node)
1379 # if content actually changed, remove node from not_changed
1385 # if content actually changed, remove node from not_changed
1380 if old.content != node.content:
1386 if old.content != node.content:
1381 not_changed.remove(node)
1387 not_changed.remove(node)
1382 except NodeDoesNotExistError:
1388 except NodeDoesNotExistError:
1383 pass
1389 pass
1384 if self.changed and missing:
1390 if self.changed and missing:
1385 raise NodeDoesNotExistError(
1391 raise NodeDoesNotExistError(
1386 "Node `%s` marked as modified but missing in parents: %s"
1392 "Node `%s` marked as modified but missing in parents: %s"
1387 % (node.path, parents))
1393 % (node.path, parents))
1388
1394
1389 if self.changed and not_changed:
1395 if self.changed and not_changed:
1390 raise NodeNotChangedError(
1396 raise NodeNotChangedError(
1391 "Node `%s` wasn't actually changed (parents: %s)"
1397 "Node `%s` wasn't actually changed (parents: %s)"
1392 % (not_changed.pop().path, parents))
1398 % (not_changed.pop().path, parents))
1393
1399
1394 # Check nodes marked as removed
1400 # Check nodes marked as removed
1395 if self.removed and not parents:
1401 if self.removed and not parents:
1396 raise NodeDoesNotExistError(
1402 raise NodeDoesNotExistError(
1397 "Cannot remove node at %s as there "
1403 "Cannot remove node at %s as there "
1398 "were no parents specified" % self.removed[0].path)
1404 "were no parents specified" % self.removed[0].path)
1399 really_removed = set()
1405 really_removed = set()
1400 for p in parents:
1406 for p in parents:
1401 for node in self.removed:
1407 for node in self.removed:
1402 try:
1408 try:
1403 p.get_node(node.path)
1409 p.get_node(node.path)
1404 really_removed.add(node)
1410 really_removed.add(node)
1405 except CommitError:
1411 except CommitError:
1406 pass
1412 pass
1407 not_removed = set(self.removed) - really_removed
1413 not_removed = set(self.removed) - really_removed
1408 if not_removed:
1414 if not_removed:
1409 # TODO: johbo: This code branch does not seem to be covered
1415 # TODO: johbo: This code branch does not seem to be covered
1410 raise NodeDoesNotExistError(
1416 raise NodeDoesNotExistError(
1411 "Cannot remove node at %s from "
1417 "Cannot remove node at %s from "
1412 "following parents: %s" % (not_removed, parents))
1418 "following parents: %s" % (not_removed, parents))
1413
1419
1414 def commit(
1420 def commit(
1415 self, message, author, parents=None, branch=None, date=None,
1421 self, message, author, parents=None, branch=None, date=None,
1416 **kwargs):
1422 **kwargs):
1417 """
1423 """
1418 Performs in-memory commit (doesn't check workdir in any way) and
1424 Performs in-memory commit (doesn't check workdir in any way) and
1419 returns newly created :class:`BaseCommit`. Updates repository's
1425 returns newly created :class:`BaseCommit`. Updates repository's
1420 attribute `commits`.
1426 attribute `commits`.
1421
1427
1422 .. note::
1428 .. note::
1423
1429
1424 While overriding this method each backend's should call
1430 While overriding this method each backend's should call
1425 ``self.check_integrity(parents)`` in the first place.
1431 ``self.check_integrity(parents)`` in the first place.
1426
1432
1427 :param message: message of the commit
1433 :param message: message of the commit
1428 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1434 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1429 :param parents: single parent or sequence of parents from which commit
1435 :param parents: single parent or sequence of parents from which commit
1430 would be derived
1436 would be derived
1431 :param date: ``datetime.datetime`` instance. Defaults to
1437 :param date: ``datetime.datetime`` instance. Defaults to
1432 ``datetime.datetime.now()``.
1438 ``datetime.datetime.now()``.
1433 :param branch: branch name, as string. If none given, default backend's
1439 :param branch: branch name, as string. If none given, default backend's
1434 branch would be used.
1440 branch would be used.
1435
1441
1436 :raises ``CommitError``: if any error occurs while committing
1442 :raises ``CommitError``: if any error occurs while committing
1437 """
1443 """
1438 raise NotImplementedError
1444 raise NotImplementedError
1439
1445
1440
1446
1441 class BaseInMemoryChangesetClass(type):
1447 class BaseInMemoryChangesetClass(type):
1442
1448
1443 def __instancecheck__(self, instance):
1449 def __instancecheck__(self, instance):
1444 return isinstance(instance, BaseInMemoryCommit)
1450 return isinstance(instance, BaseInMemoryCommit)
1445
1451
1446
1452
1447 class BaseInMemoryChangeset(BaseInMemoryCommit):
1453 class BaseInMemoryChangeset(BaseInMemoryCommit):
1448
1454
1449 __metaclass__ = BaseInMemoryChangesetClass
1455 __metaclass__ = BaseInMemoryChangesetClass
1450
1456
1451 def __new__(cls, *args, **kwargs):
1457 def __new__(cls, *args, **kwargs):
1452 warnings.warn(
1458 warnings.warn(
1453 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1459 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1454 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1460 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1455
1461
1456
1462
1457 class EmptyCommit(BaseCommit):
1463 class EmptyCommit(BaseCommit):
1458 """
1464 """
1459 An dummy empty commit. It's possible to pass hash when creating
1465 An dummy empty commit. It's possible to pass hash when creating
1460 an EmptyCommit
1466 an EmptyCommit
1461 """
1467 """
1462
1468
1463 def __init__(
1469 def __init__(
1464 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1470 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1465 message='', author='', date=None):
1471 message='', author='', date=None):
1466 self._empty_commit_id = commit_id
1472 self._empty_commit_id = commit_id
1467 # TODO: johbo: Solve idx parameter, default value does not make
1473 # TODO: johbo: Solve idx parameter, default value does not make
1468 # too much sense
1474 # too much sense
1469 self.idx = idx
1475 self.idx = idx
1470 self.message = message
1476 self.message = message
1471 self.author = author
1477 self.author = author
1472 self.date = date or datetime.datetime.fromtimestamp(0)
1478 self.date = date or datetime.datetime.fromtimestamp(0)
1473 self.repository = repo
1479 self.repository = repo
1474 self.alias = alias
1480 self.alias = alias
1475
1481
1476 @LazyProperty
1482 @LazyProperty
1477 def raw_id(self):
1483 def raw_id(self):
1478 """
1484 """
1479 Returns raw string identifying this commit, useful for web
1485 Returns raw string identifying this commit, useful for web
1480 representation.
1486 representation.
1481 """
1487 """
1482
1488
1483 return self._empty_commit_id
1489 return self._empty_commit_id
1484
1490
1485 @LazyProperty
1491 @LazyProperty
1486 def branch(self):
1492 def branch(self):
1487 if self.alias:
1493 if self.alias:
1488 from rhodecode.lib.vcs.backends import get_backend
1494 from rhodecode.lib.vcs.backends import get_backend
1489 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1495 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1490
1496
1491 @LazyProperty
1497 @LazyProperty
1492 def short_id(self):
1498 def short_id(self):
1493 return self.raw_id[:12]
1499 return self.raw_id[:12]
1494
1500
1495 @LazyProperty
1501 @LazyProperty
1496 def id(self):
1502 def id(self):
1497 return self.raw_id
1503 return self.raw_id
1498
1504
1499 def get_file_commit(self, path):
1505 def get_file_commit(self, path):
1500 return self
1506 return self
1501
1507
1502 def get_file_content(self, path):
1508 def get_file_content(self, path):
1503 return u''
1509 return u''
1504
1510
1505 def get_file_size(self, path):
1511 def get_file_size(self, path):
1506 return 0
1512 return 0
1507
1513
1508
1514
1509 class EmptyChangesetClass(type):
1515 class EmptyChangesetClass(type):
1510
1516
1511 def __instancecheck__(self, instance):
1517 def __instancecheck__(self, instance):
1512 return isinstance(instance, EmptyCommit)
1518 return isinstance(instance, EmptyCommit)
1513
1519
1514
1520
1515 class EmptyChangeset(EmptyCommit):
1521 class EmptyChangeset(EmptyCommit):
1516
1522
1517 __metaclass__ = EmptyChangesetClass
1523 __metaclass__ = EmptyChangesetClass
1518
1524
1519 def __new__(cls, *args, **kwargs):
1525 def __new__(cls, *args, **kwargs):
1520 warnings.warn(
1526 warnings.warn(
1521 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1527 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1522 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1528 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1523
1529
1524 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1530 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1525 alias=None, revision=-1, message='', author='', date=None):
1531 alias=None, revision=-1, message='', author='', date=None):
1526 if requested_revision is not None:
1532 if requested_revision is not None:
1527 warnings.warn(
1533 warnings.warn(
1528 "Parameter requested_revision not supported anymore",
1534 "Parameter requested_revision not supported anymore",
1529 DeprecationWarning)
1535 DeprecationWarning)
1530 super(EmptyChangeset, self).__init__(
1536 super(EmptyChangeset, self).__init__(
1531 commit_id=cs, repo=repo, alias=alias, idx=revision,
1537 commit_id=cs, repo=repo, alias=alias, idx=revision,
1532 message=message, author=author, date=date)
1538 message=message, author=author, date=date)
1533
1539
1534 @property
1540 @property
1535 def revision(self):
1541 def revision(self):
1536 warnings.warn("Use idx instead", DeprecationWarning)
1542 warnings.warn("Use idx instead", DeprecationWarning)
1537 return self.idx
1543 return self.idx
1538
1544
1539 @revision.setter
1545 @revision.setter
1540 def revision(self, value):
1546 def revision(self, value):
1541 warnings.warn("Use idx instead", DeprecationWarning)
1547 warnings.warn("Use idx instead", DeprecationWarning)
1542 self.idx = value
1548 self.idx = value
1543
1549
1544
1550
1545 class EmptyRepository(BaseRepository):
1551 class EmptyRepository(BaseRepository):
1546 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1552 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1547 pass
1553 pass
1548
1554
1549 def get_diff(self, *args, **kwargs):
1555 def get_diff(self, *args, **kwargs):
1550 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1556 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1551 return GitDiff('')
1557 return GitDiff('')
1552
1558
1553
1559
1554 class CollectionGenerator(object):
1560 class CollectionGenerator(object):
1555
1561
1556 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1562 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1557 self.repo = repo
1563 self.repo = repo
1558 self.commit_ids = commit_ids
1564 self.commit_ids = commit_ids
1559 # TODO: (oliver) this isn't currently hooked up
1565 # TODO: (oliver) this isn't currently hooked up
1560 self.collection_size = None
1566 self.collection_size = None
1561 self.pre_load = pre_load
1567 self.pre_load = pre_load
1562
1568
1563 def __len__(self):
1569 def __len__(self):
1564 if self.collection_size is not None:
1570 if self.collection_size is not None:
1565 return self.collection_size
1571 return self.collection_size
1566 return self.commit_ids.__len__()
1572 return self.commit_ids.__len__()
1567
1573
1568 def __iter__(self):
1574 def __iter__(self):
1569 for commit_id in self.commit_ids:
1575 for commit_id in self.commit_ids:
1570 # TODO: johbo: Mercurial passes in commit indices or commit ids
1576 # TODO: johbo: Mercurial passes in commit indices or commit ids
1571 yield self._commit_factory(commit_id)
1577 yield self._commit_factory(commit_id)
1572
1578
1573 def _commit_factory(self, commit_id):
1579 def _commit_factory(self, commit_id):
1574 """
1580 """
1575 Allows backends to override the way commits are generated.
1581 Allows backends to override the way commits are generated.
1576 """
1582 """
1577 return self.repo.get_commit(commit_id=commit_id,
1583 return self.repo.get_commit(commit_id=commit_id,
1578 pre_load=self.pre_load)
1584 pre_load=self.pre_load)
1579
1585
1580 def __getslice__(self, i, j):
1586 def __getslice__(self, i, j):
1581 """
1587 """
1582 Returns an iterator of sliced repository
1588 Returns an iterator of sliced repository
1583 """
1589 """
1584 commit_ids = self.commit_ids[i:j]
1590 commit_ids = self.commit_ids[i:j]
1585 return self.__class__(
1591 return self.__class__(
1586 self.repo, commit_ids, pre_load=self.pre_load)
1592 self.repo, commit_ids, pre_load=self.pre_load)
1587
1593
1588 def __repr__(self):
1594 def __repr__(self):
1589 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1595 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1590
1596
1591
1597
1592 class Config(object):
1598 class Config(object):
1593 """
1599 """
1594 Represents the configuration for a repository.
1600 Represents the configuration for a repository.
1595
1601
1596 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1602 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1597 standard library. It implements only the needed subset.
1603 standard library. It implements only the needed subset.
1598 """
1604 """
1599
1605
1600 def __init__(self):
1606 def __init__(self):
1601 self._values = {}
1607 self._values = {}
1602
1608
1603 def copy(self):
1609 def copy(self):
1604 clone = Config()
1610 clone = Config()
1605 for section, values in self._values.items():
1611 for section, values in self._values.items():
1606 clone._values[section] = values.copy()
1612 clone._values[section] = values.copy()
1607 return clone
1613 return clone
1608
1614
1609 def __repr__(self):
1615 def __repr__(self):
1610 return '<Config(%s sections) at %s>' % (
1616 return '<Config(%s sections) at %s>' % (
1611 len(self._values), hex(id(self)))
1617 len(self._values), hex(id(self)))
1612
1618
1613 def items(self, section):
1619 def items(self, section):
1614 return self._values.get(section, {}).iteritems()
1620 return self._values.get(section, {}).iteritems()
1615
1621
1616 def get(self, section, option):
1622 def get(self, section, option):
1617 return self._values.get(section, {}).get(option)
1623 return self._values.get(section, {}).get(option)
1618
1624
1619 def set(self, section, option, value):
1625 def set(self, section, option, value):
1620 section_values = self._values.setdefault(section, {})
1626 section_values = self._values.setdefault(section, {})
1621 section_values[option] = value
1627 section_values[option] = value
1622
1628
1623 def clear_section(self, section):
1629 def clear_section(self, section):
1624 self._values[section] = {}
1630 self._values[section] = {}
1625
1631
1626 def serialize(self):
1632 def serialize(self):
1627 """
1633 """
1628 Creates a list of three tuples (section, key, value) representing
1634 Creates a list of three tuples (section, key, value) representing
1629 this config object.
1635 this config object.
1630 """
1636 """
1631 items = []
1637 items = []
1632 for section in self._values:
1638 for section in self._values:
1633 for option, value in self._values[section].items():
1639 for option, value in self._values[section].items():
1634 items.append(
1640 items.append(
1635 (safe_str(section), safe_str(option), safe_str(value)))
1641 (safe_str(section), safe_str(option), safe_str(value)))
1636 return items
1642 return items
1637
1643
1638
1644
1639 class Diff(object):
1645 class Diff(object):
1640 """
1646 """
1641 Represents a diff result from a repository backend.
1647 Represents a diff result from a repository backend.
1642
1648
1643 Subclasses have to provide a backend specific value for
1649 Subclasses have to provide a backend specific value for
1644 :attr:`_header_re` and :attr:`_meta_re`.
1650 :attr:`_header_re` and :attr:`_meta_re`.
1645 """
1651 """
1646 _meta_re = None
1652 _meta_re = None
1647 _header_re = None
1653 _header_re = None
1648
1654
1649 def __init__(self, raw_diff):
1655 def __init__(self, raw_diff):
1650 self.raw = raw_diff
1656 self.raw = raw_diff
1651
1657
1652 def chunks(self):
1658 def chunks(self):
1653 """
1659 """
1654 split the diff in chunks of separate --git a/file b/file chunks
1660 split the diff in chunks of separate --git a/file b/file chunks
1655 to make diffs consistent we must prepend with \n, and make sure
1661 to make diffs consistent we must prepend with \n, and make sure
1656 we can detect last chunk as this was also has special rule
1662 we can detect last chunk as this was also has special rule
1657 """
1663 """
1658
1664
1659 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1665 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1660 header = diff_parts[0]
1666 header = diff_parts[0]
1661
1667
1662 if self._meta_re:
1668 if self._meta_re:
1663 match = self._meta_re.match(header)
1669 match = self._meta_re.match(header)
1664
1670
1665 chunks = diff_parts[1:]
1671 chunks = diff_parts[1:]
1666 total_chunks = len(chunks)
1672 total_chunks = len(chunks)
1667
1673
1668 return (
1674 return (
1669 DiffChunk(chunk, self, cur_chunk == total_chunks)
1675 DiffChunk(chunk, self, cur_chunk == total_chunks)
1670 for cur_chunk, chunk in enumerate(chunks, start=1))
1676 for cur_chunk, chunk in enumerate(chunks, start=1))
1671
1677
1672
1678
1673 class DiffChunk(object):
1679 class DiffChunk(object):
1674
1680
1675 def __init__(self, chunk, diff, last_chunk):
1681 def __init__(self, chunk, diff, last_chunk):
1676 self._diff = diff
1682 self._diff = diff
1677
1683
1678 # since we split by \ndiff --git that part is lost from original diff
1684 # since we split by \ndiff --git that part is lost from original diff
1679 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1685 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1680 if not last_chunk:
1686 if not last_chunk:
1681 chunk += '\n'
1687 chunk += '\n'
1682
1688
1683 match = self._diff._header_re.match(chunk)
1689 match = self._diff._header_re.match(chunk)
1684 self.header = match.groupdict()
1690 self.header = match.groupdict()
1685 self.diff = chunk[match.end():]
1691 self.diff = chunk[match.end():]
1686 self.raw = chunk
1692 self.raw = chunk
1687
1693
1688
1694
1689 class BasePathPermissionChecker(object):
1695 class BasePathPermissionChecker(object):
1690
1696
1691 @staticmethod
1697 @staticmethod
1692 def create_from_patterns(includes, excludes):
1698 def create_from_patterns(includes, excludes):
1693 if includes and '*' in includes and not excludes:
1699 if includes and '*' in includes and not excludes:
1694 return AllPathPermissionChecker()
1700 return AllPathPermissionChecker()
1695 elif excludes and '*' in excludes:
1701 elif excludes and '*' in excludes:
1696 return NonePathPermissionChecker()
1702 return NonePathPermissionChecker()
1697 else:
1703 else:
1698 return PatternPathPermissionChecker(includes, excludes)
1704 return PatternPathPermissionChecker(includes, excludes)
1699
1705
1700 @property
1706 @property
1701 def has_full_access(self):
1707 def has_full_access(self):
1702 raise NotImplemented()
1708 raise NotImplemented()
1703
1709
1704 def has_access(self, path):
1710 def has_access(self, path):
1705 raise NotImplemented()
1711 raise NotImplemented()
1706
1712
1707
1713
1708 class AllPathPermissionChecker(BasePathPermissionChecker):
1714 class AllPathPermissionChecker(BasePathPermissionChecker):
1709
1715
1710 @property
1716 @property
1711 def has_full_access(self):
1717 def has_full_access(self):
1712 return True
1718 return True
1713
1719
1714 def has_access(self, path):
1720 def has_access(self, path):
1715 return True
1721 return True
1716
1722
1717
1723
1718 class NonePathPermissionChecker(BasePathPermissionChecker):
1724 class NonePathPermissionChecker(BasePathPermissionChecker):
1719
1725
1720 @property
1726 @property
1721 def has_full_access(self):
1727 def has_full_access(self):
1722 return False
1728 return False
1723
1729
1724 def has_access(self, path):
1730 def has_access(self, path):
1725 return False
1731 return False
1726
1732
1727
1733
1728 class PatternPathPermissionChecker(BasePathPermissionChecker):
1734 class PatternPathPermissionChecker(BasePathPermissionChecker):
1729
1735
1730 def __init__(self, includes, excludes):
1736 def __init__(self, includes, excludes):
1731 self.includes = includes
1737 self.includes = includes
1732 self.excludes = excludes
1738 self.excludes = excludes
1733 self.includes_re = [] if not includes else [
1739 self.includes_re = [] if not includes else [
1734 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1740 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1735 self.excludes_re = [] if not excludes else [
1741 self.excludes_re = [] if not excludes else [
1736 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1742 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1737
1743
1738 @property
1744 @property
1739 def has_full_access(self):
1745 def has_full_access(self):
1740 return '*' in self.includes and not self.excludes
1746 return '*' in self.includes and not self.excludes
1741
1747
1742 def has_access(self, path):
1748 def has_access(self, path):
1743 for regex in self.excludes_re:
1749 for regex in self.excludes_re:
1744 if regex.match(path):
1750 if regex.match(path):
1745 return False
1751 return False
1746 for regex in self.includes_re:
1752 for regex in self.includes_re:
1747 if regex.match(path):
1753 if regex.match(path):
1748 return True
1754 return True
1749 return False
1755 return False
@@ -1,869 +1,884 b''
1 <%namespace name="commentblock" file="/changeset/changeset_file_comment.mako"/>
1 <%namespace name="commentblock" file="/changeset/changeset_file_comment.mako"/>
2
2
3 <%def name="diff_line_anchor(filename, line, type)"><%
3 <%def name="diff_line_anchor(filename, line, type)"><%
4 return '%s_%s_%i' % (h.safeid(filename), type, line)
4 return '%s_%s_%i' % (h.safeid(filename), type, line)
5 %></%def>
5 %></%def>
6
6
7 <%def name="action_class(action)">
7 <%def name="action_class(action)">
8 <%
8 <%
9 return {
9 return {
10 '-': 'cb-deletion',
10 '-': 'cb-deletion',
11 '+': 'cb-addition',
11 '+': 'cb-addition',
12 ' ': 'cb-context',
12 ' ': 'cb-context',
13 }.get(action, 'cb-empty')
13 }.get(action, 'cb-empty')
14 %>
14 %>
15 </%def>
15 </%def>
16
16
17 <%def name="op_class(op_id)">
17 <%def name="op_class(op_id)">
18 <%
18 <%
19 return {
19 return {
20 DEL_FILENODE: 'deletion', # file deleted
20 DEL_FILENODE: 'deletion', # file deleted
21 BIN_FILENODE: 'warning' # binary diff hidden
21 BIN_FILENODE: 'warning' # binary diff hidden
22 }.get(op_id, 'addition')
22 }.get(op_id, 'addition')
23 %>
23 %>
24 </%def>
24 </%def>
25
25
26
26
27
27
28 <%def name="render_diffset(diffset, commit=None,
28 <%def name="render_diffset(diffset, commit=None,
29
29
30 # collapse all file diff entries when there are more than this amount of files in the diff
30 # collapse all file diff entries when there are more than this amount of files in the diff
31 collapse_when_files_over=20,
31 collapse_when_files_over=20,
32
32
33 # collapse lines in the diff when more than this amount of lines changed in the file diff
33 # collapse lines in the diff when more than this amount of lines changed in the file diff
34 lines_changed_limit=500,
34 lines_changed_limit=500,
35
35
36 # add a ruler at to the output
36 # add a ruler at to the output
37 ruler_at_chars=0,
37 ruler_at_chars=0,
38
38
39 # show inline comments
39 # show inline comments
40 use_comments=False,
40 use_comments=False,
41
41
42 # disable new comments
42 # disable new comments
43 disable_new_comments=False,
43 disable_new_comments=False,
44
44
45 # special file-comments that were deleted in previous versions
45 # special file-comments that were deleted in previous versions
46 # it's used for showing outdated comments for deleted files in a PR
46 # it's used for showing outdated comments for deleted files in a PR
47 deleted_files_comments=None,
47 deleted_files_comments=None,
48
48
49 # for cache purpose
49 # for cache purpose
50 inline_comments=None
50 inline_comments=None,
51
51
52 )">
52 )">
53 %if use_comments:
53 %if use_comments:
54 <div id="cb-comments-inline-container-template" class="js-template">
54 <div id="cb-comments-inline-container-template" class="js-template">
55 ${inline_comments_container([], inline_comments)}
55 ${inline_comments_container([], inline_comments)}
56 </div>
56 </div>
57 <div class="js-template" id="cb-comment-inline-form-template">
57 <div class="js-template" id="cb-comment-inline-form-template">
58 <div class="comment-inline-form ac">
58 <div class="comment-inline-form ac">
59
59
60 %if c.rhodecode_user.username != h.DEFAULT_USER:
60 %if c.rhodecode_user.username != h.DEFAULT_USER:
61 ## render template for inline comments
61 ## render template for inline comments
62 ${commentblock.comment_form(form_type='inline')}
62 ${commentblock.comment_form(form_type='inline')}
63 %else:
63 %else:
64 ${h.form('', class_='inline-form comment-form-login', method='get')}
64 ${h.form('', class_='inline-form comment-form-login', method='get')}
65 <div class="pull-left">
65 <div class="pull-left">
66 <div class="comment-help pull-right">
66 <div class="comment-help pull-right">
67 ${_('You need to be logged in to leave comments.')} <a href="${h.route_path('login', _query={'came_from': h.current_route_path(request)})}">${_('Login now')}</a>
67 ${_('You need to be logged in to leave comments.')} <a href="${h.route_path('login', _query={'came_from': h.current_route_path(request)})}">${_('Login now')}</a>
68 </div>
68 </div>
69 </div>
69 </div>
70 <div class="comment-button pull-right">
70 <div class="comment-button pull-right">
71 <button type="button" class="cb-comment-cancel" onclick="return Rhodecode.comments.cancelComment(this);">
71 <button type="button" class="cb-comment-cancel" onclick="return Rhodecode.comments.cancelComment(this);">
72 ${_('Cancel')}
72 ${_('Cancel')}
73 </button>
73 </button>
74 </div>
74 </div>
75 <div class="clearfix"></div>
75 <div class="clearfix"></div>
76 ${h.end_form()}
76 ${h.end_form()}
77 %endif
77 %endif
78 </div>
78 </div>
79 </div>
79 </div>
80
80
81 %endif
81 %endif
82 <%
82 <%
83 collapse_all = len(diffset.files) > collapse_when_files_over
83 collapse_all = len(diffset.files) > collapse_when_files_over
84 %>
84 %>
85
85
86 %if c.user_session_attrs["diffmode"] == 'sideside':
86 %if c.user_session_attrs["diffmode"] == 'sideside':
87 <style>
87 <style>
88 .wrapper {
88 .wrapper {
89 max-width: 1600px !important;
89 max-width: 1600px !important;
90 }
90 }
91 </style>
91 </style>
92 %endif
92 %endif
93
93
94 %if ruler_at_chars:
94 %if ruler_at_chars:
95 <style>
95 <style>
96 .diff table.cb .cb-content:after {
96 .diff table.cb .cb-content:after {
97 content: "";
97 content: "";
98 border-left: 1px solid blue;
98 border-left: 1px solid blue;
99 position: absolute;
99 position: absolute;
100 top: 0;
100 top: 0;
101 height: 18px;
101 height: 18px;
102 opacity: .2;
102 opacity: .2;
103 z-index: 10;
103 z-index: 10;
104 //## +5 to account for diff action (+/-)
104 //## +5 to account for diff action (+/-)
105 left: ${ruler_at_chars + 5}ch;
105 left: ${ruler_at_chars + 5}ch;
106 </style>
106 </style>
107 %endif
107 %endif
108
108
109 <div class="diffset ${disable_new_comments and 'diffset-comments-disabled'}">
109 <div class="diffset ${disable_new_comments and 'diffset-comments-disabled'}">
110 <div class="diffset-heading ${diffset.limited_diff and 'diffset-heading-warning' or ''}">
110 <div class="diffset-heading ${diffset.limited_diff and 'diffset-heading-warning' or ''}">
111 %if commit:
111 %if commit:
112 <div class="pull-right">
112 <div class="pull-right">
113 <a class="btn tooltip" title="${h.tooltip(_('Browse Files at revision {}').format(commit.raw_id))}" href="${h.route_path('repo_files',repo_name=diffset.repo_name, commit_id=commit.raw_id, f_path='')}">
113 <a class="btn tooltip" title="${h.tooltip(_('Browse Files at revision {}').format(commit.raw_id))}" href="${h.route_path('repo_files',repo_name=diffset.repo_name, commit_id=commit.raw_id, f_path='')}">
114 ${_('Browse Files')}
114 ${_('Browse Files')}
115 </a>
115 </a>
116 </div>
116 </div>
117 %endif
117 %endif
118 <h2 class="clearinner">
118 <h2 class="clearinner">
119 ## invidual commit
119 ## invidual commit
120 % if commit:
120 % if commit:
121 <a class="tooltip revision" title="${h.tooltip(commit.message)}" href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id)}">${('r%s:%s' % (commit.idx,h.short_id(commit.raw_id)))}</a> -
121 <a class="tooltip revision" title="${h.tooltip(commit.message)}" href="${h.route_path('repo_commit',repo_name=diffset.repo_name,commit_id=commit.raw_id)}">${('r%s:%s' % (commit.idx,h.short_id(commit.raw_id)))}</a> -
122 ${h.age_component(commit.date)}
122 ${h.age_component(commit.date)}
123 % if diffset.limited_diff:
123 % if diffset.limited_diff:
124 - ${_('The requested commit is too big and content was truncated.')}
124 - ${_('The requested commit is too big and content was truncated.')}
125 ${_ungettext('%(num)s file changed.', '%(num)s files changed.', diffset.changed_files) % {'num': diffset.changed_files}}
125 ${_ungettext('%(num)s file changed.', '%(num)s files changed.', diffset.changed_files) % {'num': diffset.changed_files}}
126 <a href="${h.current_route_path(request, fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
126 <a href="${h.current_route_path(request, fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
127 % elif hasattr(c, 'commit_ranges') and len(c.commit_ranges) > 1:
127 % elif hasattr(c, 'commit_ranges') and len(c.commit_ranges) > 1:
128 ## compare diff, has no file-selector and we want to show stats anyway
128 ## compare diff, has no file-selector and we want to show stats anyway
129 ${_ungettext('{num} file changed: {linesadd} inserted, ''{linesdel} deleted',
129 ${_ungettext('{num} file changed: {linesadd} inserted, ''{linesdel} deleted',
130 '{num} files changed: {linesadd} inserted, {linesdel} deleted', diffset.changed_files) \
130 '{num} files changed: {linesadd} inserted, {linesdel} deleted', diffset.changed_files) \
131 .format(num=diffset.changed_files, linesadd=diffset.lines_added, linesdel=diffset.lines_deleted)}
131 .format(num=diffset.changed_files, linesadd=diffset.lines_added, linesdel=diffset.lines_deleted)}
132 % endif
132 % endif
133 % else:
133 % else:
134 ## pull requests/compare
134 ## pull requests/compare
135 ${_('File Changes')}
135 ${_('File Changes')}
136 % endif
136 % endif
137
137
138 </h2>
138 </h2>
139 </div>
139 </div>
140
140
141 %if diffset.has_hidden_changes:
141 %if diffset.has_hidden_changes:
142 <p class="empty_data">${_('Some changes may be hidden')}</p>
142 <p class="empty_data">${_('Some changes may be hidden')}</p>
143 %elif not diffset.files:
143 %elif not diffset.files:
144 <p class="empty_data">${_('No files')}</p>
144 <p class="empty_data">${_('No files')}</p>
145 %endif
145 %endif
146
146
147 <div class="filediffs">
147 <div class="filediffs">
148 ## initial value could be marked as False later on
148 ## initial value could be marked as False later on
149 <% over_lines_changed_limit = False %>
149 <% over_lines_changed_limit = False %>
150 %for i, filediff in enumerate(diffset.files):
150 %for i, filediff in enumerate(diffset.files):
151
151
152 <%
152 <%
153 lines_changed = filediff.patch['stats']['added'] + filediff.patch['stats']['deleted']
153 lines_changed = filediff.patch['stats']['added'] + filediff.patch['stats']['deleted']
154 over_lines_changed_limit = lines_changed > lines_changed_limit
154 over_lines_changed_limit = lines_changed > lines_changed_limit
155 %>
155 %>
156
156
157 <input ${(collapse_all and 'checked' or '')} class="filediff-collapse-state" id="filediff-collapse-${id(filediff)}" type="checkbox">
157 <input ${(collapse_all and 'checked' or '')} class="filediff-collapse-state" id="filediff-collapse-${id(filediff)}" type="checkbox">
158 <div
158 <div
159 class="filediff"
159 class="filediff"
160 data-f-path="${filediff.patch['filename']}"
160 data-f-path="${filediff.patch['filename']}"
161 id="a_${h.FID('', filediff.patch['filename'])}"
161 id="a_${h.FID(filediff.raw_id, filediff.patch['filename'])}"
162 >
162 >
163
163
164 <label for="filediff-collapse-${id(filediff)}" class="filediff-heading">
164 <label for="filediff-collapse-${id(filediff)}" class="filediff-heading">
165 <div class="filediff-collapse-indicator"></div>
165 <div class="filediff-collapse-indicator"></div>
166 ${diff_ops(filediff)}
166 ${diff_ops(filediff)}
167 </label>
167 </label>
168 ${diff_menu(filediff, use_comments=use_comments)}
168 ${diff_menu(filediff, use_comments=use_comments)}
169 <table class="cb cb-diff-${c.user_session_attrs["diffmode"]} code-highlight ${(over_lines_changed_limit and 'cb-collapsed' or '')}">
169 <table class="cb cb-diff-${c.user_session_attrs["diffmode"]} code-highlight ${(over_lines_changed_limit and 'cb-collapsed' or '')}">
170
170
171 ## new/deleted/empty content case
171 ## new/deleted/empty content case
172 % if not filediff.hunks:
172 % if not filediff.hunks:
173 ## Comment container, on "fakes" hunk that contains all data to render comments
173 ## Comment container, on "fakes" hunk that contains all data to render comments
174 ${render_hunk_lines(c.user_session_attrs["diffmode"], filediff.hunk_ops, use_comments=use_comments, inline_comments=inline_comments)}
174 ${render_hunk_lines(c.user_session_attrs["diffmode"], filediff.hunk_ops, use_comments=use_comments, inline_comments=inline_comments)}
175 % endif
175 % endif
176
176
177 %if filediff.limited_diff:
177 %if filediff.limited_diff:
178 <tr class="cb-warning cb-collapser">
178 <tr class="cb-warning cb-collapser">
179 <td class="cb-text" ${(c.user_session_attrs["diffmode"] == 'unified' and 'colspan=4' or 'colspan=6')}>
179 <td class="cb-text" ${(c.user_session_attrs["diffmode"] == 'unified' and 'colspan=4' or 'colspan=6')}>
180 ${_('The requested commit is too big and content was truncated.')} <a href="${h.current_route_path(request, fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
180 ${_('The requested commit is too big and content was truncated.')} <a href="${h.current_route_path(request, fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
181 </td>
181 </td>
182 </tr>
182 </tr>
183 %else:
183 %else:
184 %if over_lines_changed_limit:
184 %if over_lines_changed_limit:
185 <tr class="cb-warning cb-collapser">
185 <tr class="cb-warning cb-collapser">
186 <td class="cb-text" ${(c.user_session_attrs["diffmode"] == 'unified' and 'colspan=4' or 'colspan=6')}>
186 <td class="cb-text" ${(c.user_session_attrs["diffmode"] == 'unified' and 'colspan=4' or 'colspan=6')}>
187 ${_('This diff has been collapsed as it changes many lines, (%i lines changed)' % lines_changed)}
187 ${_('This diff has been collapsed as it changes many lines, (%i lines changed)' % lines_changed)}
188 <a href="#" class="cb-expand"
188 <a href="#" class="cb-expand"
189 onclick="$(this).closest('table').removeClass('cb-collapsed'); return false;">${_('Show them')}
189 onclick="$(this).closest('table').removeClass('cb-collapsed'); return false;">${_('Show them')}
190 </a>
190 </a>
191 <a href="#" class="cb-collapse"
191 <a href="#" class="cb-collapse"
192 onclick="$(this).closest('table').addClass('cb-collapsed'); return false;">${_('Hide them')}
192 onclick="$(this).closest('table').addClass('cb-collapsed'); return false;">${_('Hide them')}
193 </a>
193 </a>
194 </td>
194 </td>
195 </tr>
195 </tr>
196 %endif
196 %endif
197 %endif
197 %endif
198
198
199 % for hunk in filediff.hunks:
199 % for hunk in filediff.hunks:
200 <tr class="cb-hunk">
200 <tr class="cb-hunk">
201 <td ${(c.user_session_attrs["diffmode"] == 'unified' and 'colspan=3' or '')}>
201 <td ${(c.user_session_attrs["diffmode"] == 'unified' and 'colspan=3' or '')}>
202 ## TODO: dan: add ajax loading of more context here
202 ## TODO: dan: add ajax loading of more context here
203 ## <a href="#">
203 ## <a href="#">
204 <i class="icon-more"></i>
204 <i class="icon-more"></i>
205 ## </a>
205 ## </a>
206 </td>
206 </td>
207 <td ${(c.user_session_attrs["diffmode"] == 'sideside' and 'colspan=5' or '')}>
207 <td ${(c.user_session_attrs["diffmode"] == 'sideside' and 'colspan=5' or '')}>
208 @@
208 @@
209 -${hunk.source_start},${hunk.source_length}
209 -${hunk.source_start},${hunk.source_length}
210 +${hunk.target_start},${hunk.target_length}
210 +${hunk.target_start},${hunk.target_length}
211 ${hunk.section_header}
211 ${hunk.section_header}
212 </td>
212 </td>
213 </tr>
213 </tr>
214 ${render_hunk_lines(c.user_session_attrs["diffmode"], hunk, use_comments=use_comments, inline_comments=inline_comments)}
214 ${render_hunk_lines(c.user_session_attrs["diffmode"], hunk, use_comments=use_comments, inline_comments=inline_comments)}
215 % endfor
215 % endfor
216
216
217 <% unmatched_comments = (inline_comments or {}).get(filediff.patch['filename'], {}) %>
217 <% unmatched_comments = (inline_comments or {}).get(filediff.patch['filename'], {}) %>
218
218
219 ## outdated comments that do not fit into currently displayed lines
219 ## outdated comments that do not fit into currently displayed lines
220 % for lineno, comments in unmatched_comments.items():
220 % for lineno, comments in unmatched_comments.items():
221
221
222 %if c.user_session_attrs["diffmode"] == 'unified':
222 %if c.user_session_attrs["diffmode"] == 'unified':
223 % if loop.index == 0:
223 % if loop.index == 0:
224 <tr class="cb-hunk">
224 <tr class="cb-hunk">
225 <td colspan="3"></td>
225 <td colspan="3"></td>
226 <td>
226 <td>
227 <div>
227 <div>
228 ${_('Unmatched inline comments below')}
228 ${_('Unmatched inline comments below')}
229 </div>
229 </div>
230 </td>
230 </td>
231 </tr>
231 </tr>
232 % endif
232 % endif
233 <tr class="cb-line">
233 <tr class="cb-line">
234 <td class="cb-data cb-context"></td>
234 <td class="cb-data cb-context"></td>
235 <td class="cb-lineno cb-context"></td>
235 <td class="cb-lineno cb-context"></td>
236 <td class="cb-lineno cb-context"></td>
236 <td class="cb-lineno cb-context"></td>
237 <td class="cb-content cb-context">
237 <td class="cb-content cb-context">
238 ${inline_comments_container(comments, inline_comments)}
238 ${inline_comments_container(comments, inline_comments)}
239 </td>
239 </td>
240 </tr>
240 </tr>
241 %elif c.user_session_attrs["diffmode"] == 'sideside':
241 %elif c.user_session_attrs["diffmode"] == 'sideside':
242 % if loop.index == 0:
242 % if loop.index == 0:
243 <tr class="cb-comment-info">
243 <tr class="cb-comment-info">
244 <td colspan="2"></td>
244 <td colspan="2"></td>
245 <td class="cb-line">
245 <td class="cb-line">
246 <div>
246 <div>
247 ${_('Unmatched inline comments below')}
247 ${_('Unmatched inline comments below')}
248 </div>
248 </div>
249 </td>
249 </td>
250 <td colspan="2"></td>
250 <td colspan="2"></td>
251 <td class="cb-line">
251 <td class="cb-line">
252 <div>
252 <div>
253 ${_('Unmatched comments below')}
253 ${_('Unmatched comments below')}
254 </div>
254 </div>
255 </td>
255 </td>
256 </tr>
256 </tr>
257 % endif
257 % endif
258 <tr class="cb-line">
258 <tr class="cb-line">
259 <td class="cb-data cb-context"></td>
259 <td class="cb-data cb-context"></td>
260 <td class="cb-lineno cb-context"></td>
260 <td class="cb-lineno cb-context"></td>
261 <td class="cb-content cb-context">
261 <td class="cb-content cb-context">
262 % if lineno.startswith('o'):
262 % if lineno.startswith('o'):
263 ${inline_comments_container(comments, inline_comments)}
263 ${inline_comments_container(comments, inline_comments)}
264 % endif
264 % endif
265 </td>
265 </td>
266
266
267 <td class="cb-data cb-context"></td>
267 <td class="cb-data cb-context"></td>
268 <td class="cb-lineno cb-context"></td>
268 <td class="cb-lineno cb-context"></td>
269 <td class="cb-content cb-context">
269 <td class="cb-content cb-context">
270 % if lineno.startswith('n'):
270 % if lineno.startswith('n'):
271 ${inline_comments_container(comments, inline_comments)}
271 ${inline_comments_container(comments, inline_comments)}
272 % endif
272 % endif
273 </td>
273 </td>
274 </tr>
274 </tr>
275 %endif
275 %endif
276
276
277 % endfor
277 % endfor
278
278
279 </table>
279 </table>
280 </div>
280 </div>
281 %endfor
281 %endfor
282
282
283 ## outdated comments that are made for a file that has been deleted
283 ## outdated comments that are made for a file that has been deleted
284 % for filename, comments_dict in (deleted_files_comments or {}).items():
284 % for filename, comments_dict in (deleted_files_comments or {}).items():
285 <%
285 <%
286 display_state = 'display: none'
286 display_state = 'display: none'
287 open_comments_in_file = [x for x in comments_dict['comments'] if x.outdated is False]
287 open_comments_in_file = [x for x in comments_dict['comments'] if x.outdated is False]
288 if open_comments_in_file:
288 if open_comments_in_file:
289 display_state = ''
289 display_state = ''
290 %>
290 %>
291 <div class="filediffs filediff-outdated" style="${display_state}">
291 <div class="filediffs filediff-outdated" style="${display_state}">
292 <input ${(collapse_all and 'checked' or '')} class="filediff-collapse-state" id="filediff-collapse-${id(filename)}" type="checkbox">
292 <input ${(collapse_all and 'checked' or '')} class="filediff-collapse-state" id="filediff-collapse-${id(filename)}" type="checkbox">
293 <div class="filediff" data-f-path="${filename}" id="a_${h.FID('', filename)}">
293 <div class="filediff" data-f-path="${filename}" id="a_${h.FID(filediff.raw_id, filename)}">
294 <label for="filediff-collapse-${id(filename)}" class="filediff-heading">
294 <label for="filediff-collapse-${id(filename)}" class="filediff-heading">
295 <div class="filediff-collapse-indicator"></div>
295 <div class="filediff-collapse-indicator"></div>
296 <span class="pill">
296 <span class="pill">
297 ## file was deleted
297 ## file was deleted
298 <strong>${filename}</strong>
298 <strong>${filename}</strong>
299 </span>
299 </span>
300 <span class="pill-group" style="float: left">
300 <span class="pill-group" style="float: left">
301 ## file op, doesn't need translation
301 ## file op, doesn't need translation
302 <span class="pill" op="removed">removed in this version</span>
302 <span class="pill" op="removed">removed in this version</span>
303 </span>
303 </span>
304 <a class="pill filediff-anchor" href="#a_${h.FID('', filename)}">ΒΆ</a>
304 <a class="pill filediff-anchor" href="#a_${h.FID(filediff.raw_id, filename)}">ΒΆ</a>
305 <span class="pill-group" style="float: right">
305 <span class="pill-group" style="float: right">
306 <span class="pill" op="deleted">-${comments_dict['stats']}</span>
306 <span class="pill" op="deleted">-${comments_dict['stats']}</span>
307 </span>
307 </span>
308 </label>
308 </label>
309
309
310 <table class="cb cb-diff-${c.user_session_attrs["diffmode"]} code-highlight ${over_lines_changed_limit and 'cb-collapsed' or ''}">
310 <table class="cb cb-diff-${c.user_session_attrs["diffmode"]} code-highlight ${over_lines_changed_limit and 'cb-collapsed' or ''}">
311 <tr>
311 <tr>
312 % if c.user_session_attrs["diffmode"] == 'unified':
312 % if c.user_session_attrs["diffmode"] == 'unified':
313 <td></td>
313 <td></td>
314 %endif
314 %endif
315
315
316 <td></td>
316 <td></td>
317 <td class="cb-text cb-${op_class(BIN_FILENODE)}" ${(c.user_session_attrs["diffmode"] == 'unified' and 'colspan=4' or 'colspan=5')}>
317 <td class="cb-text cb-${op_class(BIN_FILENODE)}" ${(c.user_session_attrs["diffmode"] == 'unified' and 'colspan=4' or 'colspan=5')}>
318 ${_('File was deleted in this version. There are still outdated/unresolved comments attached to it.')}
318 ${_('File was deleted in this version. There are still outdated/unresolved comments attached to it.')}
319 </td>
319 </td>
320 </tr>
320 </tr>
321 %if c.user_session_attrs["diffmode"] == 'unified':
321 %if c.user_session_attrs["diffmode"] == 'unified':
322 <tr class="cb-line">
322 <tr class="cb-line">
323 <td class="cb-data cb-context"></td>
323 <td class="cb-data cb-context"></td>
324 <td class="cb-lineno cb-context"></td>
324 <td class="cb-lineno cb-context"></td>
325 <td class="cb-lineno cb-context"></td>
325 <td class="cb-lineno cb-context"></td>
326 <td class="cb-content cb-context">
326 <td class="cb-content cb-context">
327 ${inline_comments_container(comments_dict['comments'], inline_comments)}
327 ${inline_comments_container(comments_dict['comments'], inline_comments)}
328 </td>
328 </td>
329 </tr>
329 </tr>
330 %elif c.user_session_attrs["diffmode"] == 'sideside':
330 %elif c.user_session_attrs["diffmode"] == 'sideside':
331 <tr class="cb-line">
331 <tr class="cb-line">
332 <td class="cb-data cb-context"></td>
332 <td class="cb-data cb-context"></td>
333 <td class="cb-lineno cb-context"></td>
333 <td class="cb-lineno cb-context"></td>
334 <td class="cb-content cb-context"></td>
334 <td class="cb-content cb-context"></td>
335
335
336 <td class="cb-data cb-context"></td>
336 <td class="cb-data cb-context"></td>
337 <td class="cb-lineno cb-context"></td>
337 <td class="cb-lineno cb-context"></td>
338 <td class="cb-content cb-context">
338 <td class="cb-content cb-context">
339 ${inline_comments_container(comments_dict['comments'], inline_comments)}
339 ${inline_comments_container(comments_dict['comments'], inline_comments)}
340 </td>
340 </td>
341 </tr>
341 </tr>
342 %endif
342 %endif
343 </table>
343 </table>
344 </div>
344 </div>
345 </div>
345 </div>
346 % endfor
346 % endfor
347
347
348 </div>
348 </div>
349 </div>
349 </div>
350 </%def>
350 </%def>
351
351
352 <%def name="diff_ops(filediff)">
352 <%def name="diff_ops(filediff)">
353 <%
353 <%
354 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
354 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
355 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE
355 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE
356 %>
356 %>
357 <span class="pill">
357 <span class="pill">
358 %if filediff.source_file_path and filediff.target_file_path:
358 %if filediff.source_file_path and filediff.target_file_path:
359 %if filediff.source_file_path != filediff.target_file_path:
359 %if filediff.source_file_path != filediff.target_file_path:
360 ## file was renamed, or copied
360 ## file was renamed, or copied
361 %if RENAMED_FILENODE in filediff.patch['stats']['ops']:
361 %if RENAMED_FILENODE in filediff.patch['stats']['ops']:
362 <strong>${filediff.target_file_path}</strong> β¬… <del>${filediff.source_file_path}</del>
362 <strong>${filediff.target_file_path}</strong> β¬… <del>${filediff.source_file_path}</del>
363 <% final_path = filediff.target_file_path %>
363 <% final_path = filediff.target_file_path %>
364 %elif COPIED_FILENODE in filediff.patch['stats']['ops']:
364 %elif COPIED_FILENODE in filediff.patch['stats']['ops']:
365 <strong>${filediff.target_file_path}</strong> β¬… ${filediff.source_file_path}
365 <strong>${filediff.target_file_path}</strong> β¬… ${filediff.source_file_path}
366 <% final_path = filediff.target_file_path %>
366 <% final_path = filediff.target_file_path %>
367 %endif
367 %endif
368 %else:
368 %else:
369 ## file was modified
369 ## file was modified
370 <strong>${filediff.source_file_path}</strong>
370 <strong>${filediff.source_file_path}</strong>
371 <% final_path = filediff.source_file_path %>
371 <% final_path = filediff.source_file_path %>
372 %endif
372 %endif
373 %else:
373 %else:
374 %if filediff.source_file_path:
374 %if filediff.source_file_path:
375 ## file was deleted
375 ## file was deleted
376 <strong>${filediff.source_file_path}</strong>
376 <strong>${filediff.source_file_path}</strong>
377 <% final_path = filediff.source_file_path %>
377 <% final_path = filediff.source_file_path %>
378 %else:
378 %else:
379 ## file was added
379 ## file was added
380 <strong>${filediff.target_file_path}</strong>
380 <strong>${filediff.target_file_path}</strong>
381 <% final_path = filediff.target_file_path %>
381 <% final_path = filediff.target_file_path %>
382 %endif
382 %endif
383 %endif
383 %endif
384 <i style="color: #aaa" class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${final_path}" title="${_('Copy the full path')}" onclick="return false;"></i>
384 <i style="color: #aaa" class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${final_path}" title="${_('Copy the full path')}" onclick="return false;"></i>
385 </span>
385 </span>
386 ## anchor link
386 ## anchor link
387 <a class="pill filediff-anchor" href="#a_${h.FID('', filediff.patch['filename'])}">ΒΆ</a>
387 <a class="pill filediff-anchor" href="#a_${h.FID(filediff.raw_id, filediff.patch['filename'])}">ΒΆ</a>
388
388
389 <span class="pill-group" style="float: right">
389 <span class="pill-group" style="float: right">
390
390
391 ## ops pills
391 ## ops pills
392 %if filediff.limited_diff:
392 %if filediff.limited_diff:
393 <span class="pill tooltip" op="limited" title="The stats for this diff are not complete">limited diff</span>
393 <span class="pill tooltip" op="limited" title="The stats for this diff are not complete">limited diff</span>
394 %endif
394 %endif
395
395
396 %if NEW_FILENODE in filediff.patch['stats']['ops']:
396 %if NEW_FILENODE in filediff.patch['stats']['ops']:
397 <span class="pill" op="created">created</span>
397 <span class="pill" op="created">created</span>
398 %if filediff['target_mode'].startswith('120'):
398 %if filediff['target_mode'].startswith('120'):
399 <span class="pill" op="symlink">symlink</span>
399 <span class="pill" op="symlink">symlink</span>
400 %else:
400 %else:
401 <span class="pill" op="mode">${nice_mode(filediff['target_mode'])}</span>
401 <span class="pill" op="mode">${nice_mode(filediff['target_mode'])}</span>
402 %endif
402 %endif
403 %endif
403 %endif
404
404
405 %if RENAMED_FILENODE in filediff.patch['stats']['ops']:
405 %if RENAMED_FILENODE in filediff.patch['stats']['ops']:
406 <span class="pill" op="renamed">renamed</span>
406 <span class="pill" op="renamed">renamed</span>
407 %endif
407 %endif
408
408
409 %if COPIED_FILENODE in filediff.patch['stats']['ops']:
409 %if COPIED_FILENODE in filediff.patch['stats']['ops']:
410 <span class="pill" op="copied">copied</span>
410 <span class="pill" op="copied">copied</span>
411 %endif
411 %endif
412
412
413 %if DEL_FILENODE in filediff.patch['stats']['ops']:
413 %if DEL_FILENODE in filediff.patch['stats']['ops']:
414 <span class="pill" op="removed">removed</span>
414 <span class="pill" op="removed">removed</span>
415 %endif
415 %endif
416
416
417 %if CHMOD_FILENODE in filediff.patch['stats']['ops']:
417 %if CHMOD_FILENODE in filediff.patch['stats']['ops']:
418 <span class="pill" op="mode">
418 <span class="pill" op="mode">
419 ${nice_mode(filediff['source_mode'])} ➑ ${nice_mode(filediff['target_mode'])}
419 ${nice_mode(filediff['source_mode'])} ➑ ${nice_mode(filediff['target_mode'])}
420 </span>
420 </span>
421 %endif
421 %endif
422
422
423 %if BIN_FILENODE in filediff.patch['stats']['ops']:
423 %if BIN_FILENODE in filediff.patch['stats']['ops']:
424 <span class="pill" op="binary">binary</span>
424 <span class="pill" op="binary">binary</span>
425 %if MOD_FILENODE in filediff.patch['stats']['ops']:
425 %if MOD_FILENODE in filediff.patch['stats']['ops']:
426 <span class="pill" op="modified">modified</span>
426 <span class="pill" op="modified">modified</span>
427 %endif
427 %endif
428 %endif
428 %endif
429
429
430 <span class="pill" op="added">${('+' if filediff.patch['stats']['added'] else '')}${filediff.patch['stats']['added']}</span>
430 <span class="pill" op="added">${('+' if filediff.patch['stats']['added'] else '')}${filediff.patch['stats']['added']}</span>
431 <span class="pill" op="deleted">${((h.safe_int(filediff.patch['stats']['deleted']) or 0) * -1)}</span>
431 <span class="pill" op="deleted">${((h.safe_int(filediff.patch['stats']['deleted']) or 0) * -1)}</span>
432
432
433 </span>
433 </span>
434
434
435 </%def>
435 </%def>
436
436
437 <%def name="nice_mode(filemode)">
437 <%def name="nice_mode(filemode)">
438 ${(filemode.startswith('100') and filemode[3:] or filemode)}
438 ${(filemode.startswith('100') and filemode[3:] or filemode)}
439 </%def>
439 </%def>
440
440
441 <%def name="diff_menu(filediff, use_comments=False)">
441 <%def name="diff_menu(filediff, use_comments=False)">
442 <div class="filediff-menu">
442 <div class="filediff-menu">
443 %if filediff.diffset.source_ref:
443 %if filediff.diffset.source_ref:
444 %if filediff.operation in ['D', 'M']:
444 %if filediff.operation in ['D', 'M']:
445 <a
445 <a
446 class="tooltip"
446 class="tooltip"
447 href="${h.route_path('repo_files',repo_name=filediff.diffset.repo_name,commit_id=filediff.diffset.source_ref,f_path=filediff.source_file_path)}"
447 href="${h.route_path('repo_files',repo_name=filediff.diffset.repo_name,commit_id=filediff.diffset.source_ref,f_path=filediff.source_file_path)}"
448 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
448 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
449 >
449 >
450 ${_('Show file before')}
450 ${_('Show file before')}
451 </a> |
451 </a> |
452 %else:
452 %else:
453 <span
453 <span
454 class="tooltip"
454 class="tooltip"
455 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
455 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
456 >
456 >
457 ${_('Show file before')}
457 ${_('Show file before')}
458 </span> |
458 </span> |
459 %endif
459 %endif
460 %if filediff.operation in ['A', 'M']:
460 %if filediff.operation in ['A', 'M']:
461 <a
461 <a
462 class="tooltip"
462 class="tooltip"
463 href="${h.route_path('repo_files',repo_name=filediff.diffset.source_repo_name,commit_id=filediff.diffset.target_ref,f_path=filediff.target_file_path)}"
463 href="${h.route_path('repo_files',repo_name=filediff.diffset.source_repo_name,commit_id=filediff.diffset.target_ref,f_path=filediff.target_file_path)}"
464 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
464 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
465 >
465 >
466 ${_('Show file after')}
466 ${_('Show file after')}
467 </a> |
467 </a> |
468 %else:
468 %else:
469 <span
469 <span
470 class="tooltip"
470 class="tooltip"
471 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
471 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
472 >
472 >
473 ${_('Show file after')}
473 ${_('Show file after')}
474 </span> |
474 </span> |
475 %endif
475 %endif
476 <a
476 <a
477 class="tooltip"
477 class="tooltip"
478 title="${h.tooltip(_('Raw diff'))}"
478 title="${h.tooltip(_('Raw diff'))}"
479 href="${h.route_path('repo_files_diff',repo_name=filediff.diffset.repo_name,f_path=filediff.target_file_path, _query=dict(diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='raw'))}"
479 href="${h.route_path('repo_files_diff',repo_name=filediff.diffset.repo_name,f_path=filediff.target_file_path, _query=dict(diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='raw'))}"
480 >
480 >
481 ${_('Raw diff')}
481 ${_('Raw diff')}
482 </a> |
482 </a> |
483 <a
483 <a
484 class="tooltip"
484 class="tooltip"
485 title="${h.tooltip(_('Download diff'))}"
485 title="${h.tooltip(_('Download diff'))}"
486 href="${h.route_path('repo_files_diff',repo_name=filediff.diffset.repo_name,f_path=filediff.target_file_path, _query=dict(diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='download'))}"
486 href="${h.route_path('repo_files_diff',repo_name=filediff.diffset.repo_name,f_path=filediff.target_file_path, _query=dict(diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='download'))}"
487 >
487 >
488 ${_('Download diff')}
488 ${_('Download diff')}
489 </a>
489 </a>
490 % if use_comments:
490 % if use_comments:
491 |
491 |
492 % endif
492 % endif
493
493
494 ## TODO: dan: refactor ignorews_url and context_url into the diff renderer same as diffmode=unified/sideside. Also use ajax to load more context (by clicking hunks)
494 ## TODO: dan: refactor ignorews_url and context_url into the diff renderer same as diffmode=unified/sideside. Also use ajax to load more context (by clicking hunks)
495 %if hasattr(c, 'ignorews_url'):
495 %if hasattr(c, 'ignorews_url'):
496 ${c.ignorews_url(request, h.FID('', filediff.patch['filename']))}
496 ${c.ignorews_url(request, h.FID(filediff.raw_id, filediff.patch['filename']))}
497 %endif
497 %endif
498 %if hasattr(c, 'context_url'):
498 %if hasattr(c, 'context_url'):
499 ${c.context_url(request, h.FID('', filediff.patch['filename']))}
499 ${c.context_url(request, h.FID(filediff.raw_id, filediff.patch['filename']))}
500 %endif
500 %endif
501
501
502 %if use_comments:
502 %if use_comments:
503 <a href="#" onclick="return Rhodecode.comments.toggleComments(this);">
503 <a href="#" onclick="return Rhodecode.comments.toggleComments(this);">
504 <span class="show-comment-button">${_('Show comments')}</span><span class="hide-comment-button">${_('Hide comments')}</span>
504 <span class="show-comment-button">${_('Show comments')}</span><span class="hide-comment-button">${_('Hide comments')}</span>
505 </a>
505 </a>
506 %endif
506 %endif
507 %endif
507 %endif
508 </div>
508 </div>
509 </%def>
509 </%def>
510
510
511
511
512 <%def name="inline_comments_container(comments, inline_comments)">
512 <%def name="inline_comments_container(comments, inline_comments)">
513 <div class="inline-comments">
513 <div class="inline-comments">
514 %for comment in comments:
514 %for comment in comments:
515 ${commentblock.comment_block(comment, inline=True)}
515 ${commentblock.comment_block(comment, inline=True)}
516 %endfor
516 %endfor
517 % if comments and comments[-1].outdated:
517 % if comments and comments[-1].outdated:
518 <span class="btn btn-secondary cb-comment-add-button comment-outdated}"
518 <span class="btn btn-secondary cb-comment-add-button comment-outdated}"
519 style="display: none;}">
519 style="display: none;}">
520 ${_('Add another comment')}
520 ${_('Add another comment')}
521 </span>
521 </span>
522 % else:
522 % else:
523 <span onclick="return Rhodecode.comments.createComment(this)"
523 <span onclick="return Rhodecode.comments.createComment(this)"
524 class="btn btn-secondary cb-comment-add-button">
524 class="btn btn-secondary cb-comment-add-button">
525 ${_('Add another comment')}
525 ${_('Add another comment')}
526 </span>
526 </span>
527 % endif
527 % endif
528
528
529 </div>
529 </div>
530 </%def>
530 </%def>
531
531
532 <%!
532 <%!
533 def get_comments_for(diff_type, comments, filename, line_version, line_number):
533 def get_comments_for(diff_type, comments, filename, line_version, line_number):
534 if hasattr(filename, 'unicode_path'):
534 if hasattr(filename, 'unicode_path'):
535 filename = filename.unicode_path
535 filename = filename.unicode_path
536
536
537 if not isinstance(filename, basestring):
537 if not isinstance(filename, basestring):
538 return None
538 return None
539
539
540 line_key = '{}{}'.format(line_version, line_number) ## e.g o37, n12
540 line_key = '{}{}'.format(line_version, line_number) ## e.g o37, n12
541
541
542 if comments and filename in comments:
542 if comments and filename in comments:
543 file_comments = comments[filename]
543 file_comments = comments[filename]
544 if line_key in file_comments:
544 if line_key in file_comments:
545 data = file_comments.pop(line_key)
545 data = file_comments.pop(line_key)
546 return data
546 return data
547 %>
547 %>
548
548
549 <%def name="render_hunk_lines_sideside(hunk, use_comments=False, inline_comments=None)">
549 <%def name="render_hunk_lines_sideside(hunk, use_comments=False, inline_comments=None)">
550
550
551 %for i, line in enumerate(hunk.sideside):
551 %for i, line in enumerate(hunk.sideside):
552 <%
552 <%
553 old_line_anchor, new_line_anchor = None, None
553 old_line_anchor, new_line_anchor = None, None
554 if line.original.lineno:
554 if line.original.lineno:
555 old_line_anchor = diff_line_anchor(hunk.source_file_path, line.original.lineno, 'o')
555 old_line_anchor = diff_line_anchor(hunk.source_file_path, line.original.lineno, 'o')
556 if line.modified.lineno:
556 if line.modified.lineno:
557 new_line_anchor = diff_line_anchor(hunk.target_file_path, line.modified.lineno, 'n')
557 new_line_anchor = diff_line_anchor(hunk.target_file_path, line.modified.lineno, 'n')
558 %>
558 %>
559
559
560 <tr class="cb-line">
560 <tr class="cb-line">
561 <td class="cb-data ${action_class(line.original.action)}"
561 <td class="cb-data ${action_class(line.original.action)}"
562 data-line-no="${line.original.lineno}"
562 data-line-no="${line.original.lineno}"
563 >
563 >
564 <div>
564 <div>
565
565
566 <% line_old_comments = None %>
566 <% line_old_comments = None %>
567 %if line.original.get_comment_args:
567 %if line.original.get_comment_args:
568 <% line_old_comments = get_comments_for('side-by-side', inline_comments, *line.original.get_comment_args) %>
568 <% line_old_comments = get_comments_for('side-by-side', inline_comments, *line.original.get_comment_args) %>
569 %endif
569 %endif
570 %if line_old_comments:
570 %if line_old_comments:
571 <% has_outdated = any([x.outdated for x in line_old_comments]) %>
571 <% has_outdated = any([x.outdated for x in line_old_comments]) %>
572 % if has_outdated:
572 % if has_outdated:
573 <i title="${_('comments including outdated')}:${len(line_old_comments)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
573 <i title="${_('comments including outdated')}:${len(line_old_comments)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
574 % else:
574 % else:
575 <i title="${_('comments')}: ${len(line_old_comments)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
575 <i title="${_('comments')}: ${len(line_old_comments)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
576 % endif
576 % endif
577 %endif
577 %endif
578 </div>
578 </div>
579 </td>
579 </td>
580 <td class="cb-lineno ${action_class(line.original.action)}"
580 <td class="cb-lineno ${action_class(line.original.action)}"
581 data-line-no="${line.original.lineno}"
581 data-line-no="${line.original.lineno}"
582 %if old_line_anchor:
582 %if old_line_anchor:
583 id="${old_line_anchor}"
583 id="${old_line_anchor}"
584 %endif
584 %endif
585 >
585 >
586 %if line.original.lineno:
586 %if line.original.lineno:
587 <a name="${old_line_anchor}" href="#${old_line_anchor}">${line.original.lineno}</a>
587 <a name="${old_line_anchor}" href="#${old_line_anchor}">${line.original.lineno}</a>
588 %endif
588 %endif
589 </td>
589 </td>
590 <td class="cb-content ${action_class(line.original.action)}"
590 <td class="cb-content ${action_class(line.original.action)}"
591 data-line-no="o${line.original.lineno}"
591 data-line-no="o${line.original.lineno}"
592 >
592 >
593 %if use_comments and line.original.lineno:
593 %if use_comments and line.original.lineno:
594 ${render_add_comment_button()}
594 ${render_add_comment_button()}
595 %endif
595 %endif
596 <span class="cb-code">${line.original.action} ${line.original.content or '' | n}</span>
596 <span class="cb-code">${line.original.action} ${line.original.content or '' | n}</span>
597
597
598 %if use_comments and line.original.lineno and line_old_comments:
598 %if use_comments and line.original.lineno and line_old_comments:
599 ${inline_comments_container(line_old_comments, inline_comments)}
599 ${inline_comments_container(line_old_comments, inline_comments)}
600 %endif
600 %endif
601
601
602 </td>
602 </td>
603 <td class="cb-data ${action_class(line.modified.action)}"
603 <td class="cb-data ${action_class(line.modified.action)}"
604 data-line-no="${line.modified.lineno}"
604 data-line-no="${line.modified.lineno}"
605 >
605 >
606 <div>
606 <div>
607
607
608 %if line.modified.get_comment_args:
608 %if line.modified.get_comment_args:
609 <% line_new_comments = get_comments_for('side-by-side', inline_comments, *line.modified.get_comment_args) %>
609 <% line_new_comments = get_comments_for('side-by-side', inline_comments, *line.modified.get_comment_args) %>
610 %else:
610 %else:
611 <% line_new_comments = None%>
611 <% line_new_comments = None%>
612 %endif
612 %endif
613 %if line_new_comments:
613 %if line_new_comments:
614 <% has_outdated = any([x.outdated for x in line_new_comments]) %>
614 <% has_outdated = any([x.outdated for x in line_new_comments]) %>
615 % if has_outdated:
615 % if has_outdated:
616 <i title="${_('comments including outdated')}:${len(line_new_comments)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
616 <i title="${_('comments including outdated')}:${len(line_new_comments)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
617 % else:
617 % else:
618 <i title="${_('comments')}: ${len(line_new_comments)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
618 <i title="${_('comments')}: ${len(line_new_comments)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
619 % endif
619 % endif
620 %endif
620 %endif
621 </div>
621 </div>
622 </td>
622 </td>
623 <td class="cb-lineno ${action_class(line.modified.action)}"
623 <td class="cb-lineno ${action_class(line.modified.action)}"
624 data-line-no="${line.modified.lineno}"
624 data-line-no="${line.modified.lineno}"
625 %if new_line_anchor:
625 %if new_line_anchor:
626 id="${new_line_anchor}"
626 id="${new_line_anchor}"
627 %endif
627 %endif
628 >
628 >
629 %if line.modified.lineno:
629 %if line.modified.lineno:
630 <a name="${new_line_anchor}" href="#${new_line_anchor}">${line.modified.lineno}</a>
630 <a name="${new_line_anchor}" href="#${new_line_anchor}">${line.modified.lineno}</a>
631 %endif
631 %endif
632 </td>
632 </td>
633 <td class="cb-content ${action_class(line.modified.action)}"
633 <td class="cb-content ${action_class(line.modified.action)}"
634 data-line-no="n${line.modified.lineno}"
634 data-line-no="n${line.modified.lineno}"
635 >
635 >
636 %if use_comments and line.modified.lineno:
636 %if use_comments and line.modified.lineno:
637 ${render_add_comment_button()}
637 ${render_add_comment_button()}
638 %endif
638 %endif
639 <span class="cb-code">${line.modified.action} ${line.modified.content or '' | n}</span>
639 <span class="cb-code">${line.modified.action} ${line.modified.content or '' | n}</span>
640 %if use_comments and line.modified.lineno and line_new_comments:
640 %if use_comments and line.modified.lineno and line_new_comments:
641 ${inline_comments_container(line_new_comments, inline_comments)}
641 ${inline_comments_container(line_new_comments, inline_comments)}
642 %endif
642 %endif
643 </td>
643 </td>
644 </tr>
644 </tr>
645 %endfor
645 %endfor
646 </%def>
646 </%def>
647
647
648
648
649 <%def name="render_hunk_lines_unified(hunk, use_comments=False, inline_comments=None)">
649 <%def name="render_hunk_lines_unified(hunk, use_comments=False, inline_comments=None)">
650 %for old_line_no, new_line_no, action, content, comments_args in hunk.unified:
650 %for old_line_no, new_line_no, action, content, comments_args in hunk.unified:
651 <%
651 <%
652 old_line_anchor, new_line_anchor = None, None
652 old_line_anchor, new_line_anchor = None, None
653 if old_line_no:
653 if old_line_no:
654 old_line_anchor = diff_line_anchor(hunk.source_file_path, old_line_no, 'o')
654 old_line_anchor = diff_line_anchor(hunk.source_file_path, old_line_no, 'o')
655 if new_line_no:
655 if new_line_no:
656 new_line_anchor = diff_line_anchor(hunk.target_file_path, new_line_no, 'n')
656 new_line_anchor = diff_line_anchor(hunk.target_file_path, new_line_no, 'n')
657 %>
657 %>
658 <tr class="cb-line">
658 <tr class="cb-line">
659 <td class="cb-data ${action_class(action)}">
659 <td class="cb-data ${action_class(action)}">
660 <div>
660 <div>
661
661
662 %if comments_args:
662 %if comments_args:
663 <% comments = get_comments_for('unified', inline_comments, *comments_args) %>
663 <% comments = get_comments_for('unified', inline_comments, *comments_args) %>
664 %else:
664 %else:
665 <% comments = None %>
665 <% comments = None %>
666 %endif
666 %endif
667
667
668 % if comments:
668 % if comments:
669 <% has_outdated = any([x.outdated for x in comments]) %>
669 <% has_outdated = any([x.outdated for x in comments]) %>
670 % if has_outdated:
670 % if has_outdated:
671 <i title="${_('comments including outdated')}:${len(comments)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
671 <i title="${_('comments including outdated')}:${len(comments)}" class="icon-comment_toggle" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
672 % else:
672 % else:
673 <i title="${_('comments')}: ${len(comments)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
673 <i title="${_('comments')}: ${len(comments)}" class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
674 % endif
674 % endif
675 % endif
675 % endif
676 </div>
676 </div>
677 </td>
677 </td>
678 <td class="cb-lineno ${action_class(action)}"
678 <td class="cb-lineno ${action_class(action)}"
679 data-line-no="${old_line_no}"
679 data-line-no="${old_line_no}"
680 %if old_line_anchor:
680 %if old_line_anchor:
681 id="${old_line_anchor}"
681 id="${old_line_anchor}"
682 %endif
682 %endif
683 >
683 >
684 %if old_line_anchor:
684 %if old_line_anchor:
685 <a name="${old_line_anchor}" href="#${old_line_anchor}">${old_line_no}</a>
685 <a name="${old_line_anchor}" href="#${old_line_anchor}">${old_line_no}</a>
686 %endif
686 %endif
687 </td>
687 </td>
688 <td class="cb-lineno ${action_class(action)}"
688 <td class="cb-lineno ${action_class(action)}"
689 data-line-no="${new_line_no}"
689 data-line-no="${new_line_no}"
690 %if new_line_anchor:
690 %if new_line_anchor:
691 id="${new_line_anchor}"
691 id="${new_line_anchor}"
692 %endif
692 %endif
693 >
693 >
694 %if new_line_anchor:
694 %if new_line_anchor:
695 <a name="${new_line_anchor}" href="#${new_line_anchor}">${new_line_no}</a>
695 <a name="${new_line_anchor}" href="#${new_line_anchor}">${new_line_no}</a>
696 %endif
696 %endif
697 </td>
697 </td>
698 <td class="cb-content ${action_class(action)}"
698 <td class="cb-content ${action_class(action)}"
699 data-line-no="${(new_line_no and 'n' or 'o')}${(new_line_no or old_line_no)}"
699 data-line-no="${(new_line_no and 'n' or 'o')}${(new_line_no or old_line_no)}"
700 >
700 >
701 %if use_comments:
701 %if use_comments:
702 ${render_add_comment_button()}
702 ${render_add_comment_button()}
703 %endif
703 %endif
704 <span class="cb-code">${action} ${content or '' | n}</span>
704 <span class="cb-code">${action} ${content or '' | n}</span>
705 %if use_comments and comments:
705 %if use_comments and comments:
706 ${inline_comments_container(comments, inline_comments)}
706 ${inline_comments_container(comments, inline_comments)}
707 %endif
707 %endif
708 </td>
708 </td>
709 </tr>
709 </tr>
710 %endfor
710 %endfor
711 </%def>
711 </%def>
712
712
713
713
714 <%def name="render_hunk_lines(diff_mode, hunk, use_comments, inline_comments)">
714 <%def name="render_hunk_lines(diff_mode, hunk, use_comments, inline_comments)">
715 % if diff_mode == 'unified':
715 % if diff_mode == 'unified':
716 ${render_hunk_lines_unified(hunk, use_comments=use_comments, inline_comments=inline_comments)}
716 ${render_hunk_lines_unified(hunk, use_comments=use_comments, inline_comments=inline_comments)}
717 % elif diff_mode == 'sideside':
717 % elif diff_mode == 'sideside':
718 ${render_hunk_lines_sideside(hunk, use_comments=use_comments, inline_comments=inline_comments)}
718 ${render_hunk_lines_sideside(hunk, use_comments=use_comments, inline_comments=inline_comments)}
719 % else:
719 % else:
720 <tr class="cb-line">
720 <tr class="cb-line">
721 <td>unknown diff mode</td>
721 <td>unknown diff mode</td>
722 </tr>
722 </tr>
723 % endif
723 % endif
724 </%def>
724 </%def>file changes
725
725
726
726
727 <%def name="render_add_comment_button()">
727 <%def name="render_add_comment_button()">
728 <button class="btn btn-small btn-primary cb-comment-box-opener" onclick="return Rhodecode.comments.createComment(this)">
728 <button class="btn btn-small btn-primary cb-comment-box-opener" onclick="return Rhodecode.comments.createComment(this)">
729 <span><i class="icon-comment"></i></span>
729 <span><i class="icon-comment"></i></span>
730 </button>
730 </button>
731 </%def>
731 </%def>
732
732
733 <%def name="render_diffset_menu(diffset=None)">
733 <%def name="render_diffset_menu(diffset=None, range_diff_on=None)">
734
734
735 <div class="diffset-menu clearinner">
735 <div class="diffset-menu clearinner">
736 <div class="pull-right">
736 <div class="pull-right">
737 <div class="btn-group">
737 <div class="btn-group">
738
738
739 <a
739 <a
740 class="btn ${(c.user_session_attrs["diffmode"] == 'sideside' and 'btn-primary')} tooltip"
740 class="btn ${(c.user_session_attrs["diffmode"] == 'sideside' and 'btn-primary')} tooltip"
741 title="${h.tooltip(_('View side by side'))}"
741 title="${h.tooltip(_('View side by side'))}"
742 href="${h.current_route_path(request, diffmode='sideside')}">
742 href="${h.current_route_path(request, diffmode='sideside')}">
743 <span>${_('Side by Side')}</span>
743 <span>${_('Side by Side')}</span>
744 </a>
744 </a>
745 <a
745 <a
746 class="btn ${(c.user_session_attrs["diffmode"] == 'unified' and 'btn-primary')} tooltip"
746 class="btn ${(c.user_session_attrs["diffmode"] == 'unified' and 'btn-primary')} tooltip"
747 title="${h.tooltip(_('View unified'))}" href="${h.current_route_path(request, diffmode='unified')}">
747 title="${h.tooltip(_('View unified'))}" href="${h.current_route_path(request, diffmode='unified')}">
748 <span>${_('Unified')}</span>
748 <span>${_('Unified')}</span>
749 </a>
749 </a>
750 % if range_diff_on is True:
751 <a
752 title="${_('Turn off: Show the diff as commit range')}"
753 class="btn btn-primary"
754 href="${h.current_route_path(request, **{"range-diff":"0"})}">
755 <span>${_('Range Diff')}</span>
756 </a>
757 % elif range_diff_on is False:
758 <a
759 title="${_('Show the diff as commit range')}"
760 class="btn"
761 href="${h.current_route_path(request, **{"range-diff":"1"})}">
762 <span>${_('Range Diff')}</span>
763 </a>
764 % endif
750 </div>
765 </div>
751 </div>
766 </div>
752
767
753 <div class="pull-left">
768 <div class="pull-left">
754 <div class="btn-group">
769 <div class="btn-group">
755 <div class="pull-left">
770 <div class="pull-left">
756 ${h.hidden('file_filter')}
771 ${h.hidden('file_filter')}
757 </div>
772 </div>
758 <a
773 <a
759 class="btn"
774 class="btn"
760 href="#"
775 href="#"
761 onclick="$('input[class=filediff-collapse-state]').prop('checked', false); return false">${_('Expand All Files')}</a>
776 onclick="$('input[class=filediff-collapse-state]').prop('checked', false); return false">${_('Expand All Files')}</a>
762 <a
777 <a
763 class="btn"
778 class="btn"
764 href="#"
779 href="#"
765 onclick="$('input[class=filediff-collapse-state]').prop('checked', true); return false">${_('Collapse All Files')}</a>
780 onclick="$('input[class=filediff-collapse-state]').prop('checked', true); return false">${_('Collapse All Files')}</a>
766 <a
781 <a
767 class="btn"
782 class="btn"
768 href="#"
783 href="#"
769 onclick="return Rhodecode.comments.toggleWideMode(this)">${_('Wide Mode Diff')}</a>
784 onclick="return Rhodecode.comments.toggleWideMode(this)">${_('Wide Mode Diff')}</a>
770
785
771 </div>
786 </div>
772 </div>
787 </div>
773 </div>
788 </div>
774
789
775 % if diffset:
790 % if diffset:
776
791
777 %if diffset.limited_diff:
792 %if diffset.limited_diff:
778 <% file_placeholder = _ungettext('%(num)s file changed', '%(num)s files changed', diffset.changed_files) % {'num': diffset.changed_files}%>
793 <% file_placeholder = _ungettext('%(num)s file changed', '%(num)s files changed', diffset.changed_files) % {'num': diffset.changed_files}%>
779 %else:
794 %else:
780 <% file_placeholder = _ungettext('%(num)s file changed: %(linesadd)s inserted, ''%(linesdel)s deleted', '%(num)s files changed: %(linesadd)s inserted, %(linesdel)s deleted', diffset.changed_files) % {'num': diffset.changed_files, 'linesadd': diffset.lines_added, 'linesdel': diffset.lines_deleted}%>
795 <% file_placeholder = _ungettext('%(num)s file changed: %(linesadd)s inserted, ''%(linesdel)s deleted', '%(num)s files changed: %(linesadd)s inserted, %(linesdel)s deleted', diffset.changed_files) % {'num': diffset.changed_files, 'linesadd': diffset.lines_added, 'linesdel': diffset.lines_deleted}%>
781 %endif
796 %endif
782
797
783 <script>
798 <script>
784
799
785 var feedFilesOptions = function (query, initialData) {
800 var feedFilesOptions = function (query, initialData) {
786 var data = {results: []};
801 var data = {results: []};
787 var isQuery = typeof query.term !== 'undefined';
802 var isQuery = typeof query.term !== 'undefined';
788
803
789 var section = _gettext('Changed files');
804 var section = _gettext('Changed files');
790 var filteredData = [];
805 var filteredData = [];
791
806
792 //filter results
807 //filter results
793 $.each(initialData.results, function (idx, value) {
808 $.each(initialData.results, function (idx, value) {
794
809
795 if (!isQuery || query.term.length === 0 || value.text.toUpperCase().indexOf(query.term.toUpperCase()) >= 0) {
810 if (!isQuery || query.term.length === 0 || value.text.toUpperCase().indexOf(query.term.toUpperCase()) >= 0) {
796 filteredData.push({
811 filteredData.push({
797 'id': this.id,
812 'id': this.id,
798 'text': this.text,
813 'text': this.text,
799 "ops": this.ops,
814 "ops": this.ops,
800 })
815 })
801 }
816 }
802
817
803 });
818 });
804
819
805 data.results = filteredData;
820 data.results = filteredData;
806
821
807 query.callback(data);
822 query.callback(data);
808 };
823 };
809
824
810 var formatFileResult = function(result, container, query, escapeMarkup) {
825 var formatFileResult = function(result, container, query, escapeMarkup) {
811 return function(data, escapeMarkup) {
826 return function(data, escapeMarkup) {
812 var container = '<div class="filelist" style="padding-right:100px">{0}</div>';
827 var container = '<div class="filelist" style="padding-right:100px">{0}</div>';
813 var tmpl = '<span style="margin-right:-50px"><strong>{0}</strong></span>'.format(escapeMarkup(data['text']));
828 var tmpl = '<span style="margin-right:-50px"><strong>{0}</strong></span>'.format(escapeMarkup(data['text']));
814 var pill = '<span class="pill-group" style="float: right;margin-right: -100px">' +
829 var pill = '<span class="pill-group" style="float: right;margin-right: -100px">' +
815 '<span class="pill" op="added">{0}</span>' +
830 '<span class="pill" op="added">{0}</span>' +
816 '<span class="pill" op="deleted">{1}</span>' +
831 '<span class="pill" op="deleted">{1}</span>' +
817 '</span>'
832 '</span>'
818 ;
833 ;
819 var added = data['ops']['added'];
834 var added = data['ops']['added'];
820 if (added === 0) {
835 if (added === 0) {
821 // don't show +0
836 // don't show +0
822 added = 0;
837 added = 0;
823 } else {
838 } else {
824 added = '+' + added;
839 added = '+' + added;
825 }
840 }
826
841
827 var deleted = -1*data['ops']['deleted'];
842 var deleted = -1*data['ops']['deleted'];
828
843
829 tmpl += pill.format(added, deleted);
844 tmpl += pill.format(added, deleted);
830 return container.format(tmpl);
845 return container.format(tmpl);
831
846
832 }(result, escapeMarkup);
847 }(result, escapeMarkup);
833 };
848 };
834 var preloadData = {
849 var preloadData = {
835 results: [
850 results: [
836 % for filediff in diffset.files:
851 % for filediff in diffset.files:
837 {id:"a_${h.FID('', filediff.patch['filename'])}",
852 {id:"a_${h.FID(filediff.raw_id, filediff.patch['filename'])}",
838 text:"${filediff.patch['filename']}",
853 text:"${filediff.patch['filename']}",
839 ops:${h.json.dumps(filediff.patch['stats'])|n}}${('' if loop.last else ',')}
854 ops:${h.json.dumps(filediff.patch['stats'])|n}}${('' if loop.last else ',')}
840 % endfor
855 % endfor
841 ]
856 ]
842 };
857 };
843
858
844 $("#file_filter").select2({
859 $("#file_filter").select2({
845 'dropdownAutoWidth': true,
860 'dropdownAutoWidth': true,
846 'width': 'auto',
861 'width': 'auto',
847 'placeholder': "${file_placeholder}",
862 'placeholder': "${file_placeholder}",
848 containerCssClass: "drop-menu",
863 containerCssClass: "drop-menu",
849 dropdownCssClass: "drop-menu-dropdown",
864 dropdownCssClass: "drop-menu-dropdown",
850 data: preloadData,
865 data: preloadData,
851 query: function(query) {
866 query: function(query) {
852 feedFilesOptions(query, preloadData);
867 feedFilesOptions(query, preloadData);
853 },
868 },
854 formatResult: formatFileResult
869 formatResult: formatFileResult
855 });
870 });
856
871
857 $("#file_filter").on('click', function (e) {
872 $("#file_filter").on('click', function (e) {
858 e.preventDefault();
873 e.preventDefault();
859 var selected = $('#file_filter').select2('data');
874 var selected = $('#file_filter').select2('data');
860 var idSelector = "#"+selected.id;
875 var idSelector = "#"+selected.id;
861 window.location.hash = idSelector;
876 window.location.hash = idSelector;
862 // expand the container if we quick-select the field
877 // expand the container if we quick-select the field
863 $(idSelector).prev().prop('checked', false);
878 $(idSelector).prev().prop('checked', false);
864 })
879 })
865
880
866 </script>
881 </script>
867 % endif
882 % endif
868
883
869 </%def>
884 </%def>
@@ -1,869 +1,874 b''
1 <%inherit file="/base/base.mako"/>
1 <%inherit file="/base/base.mako"/>
2 <%namespace name="base" file="/base/base.mako"/>
2 <%namespace name="base" file="/base/base.mako"/>
3 <%namespace name="dt" file="/data_table/_dt_elements.mako"/>
3 <%namespace name="dt" file="/data_table/_dt_elements.mako"/>
4
4
5 <%def name="title()">
5 <%def name="title()">
6 ${_('%s Pull Request #%s') % (c.repo_name, c.pull_request.pull_request_id)}
6 ${_('%s Pull Request #%s') % (c.repo_name, c.pull_request.pull_request_id)}
7 %if c.rhodecode_name:
7 %if c.rhodecode_name:
8 &middot; ${h.branding(c.rhodecode_name)}
8 &middot; ${h.branding(c.rhodecode_name)}
9 %endif
9 %endif
10 </%def>
10 </%def>
11
11
12 <%def name="breadcrumbs_links()">
12 <%def name="breadcrumbs_links()">
13 <span id="pr-title">
13 <span id="pr-title">
14 ${c.pull_request.title}
14 ${c.pull_request.title}
15 %if c.pull_request.is_closed():
15 %if c.pull_request.is_closed():
16 (${_('Closed')})
16 (${_('Closed')})
17 %endif
17 %endif
18 </span>
18 </span>
19 <div id="pr-title-edit" class="input" style="display: none;">
19 <div id="pr-title-edit" class="input" style="display: none;">
20 ${h.text('pullrequest_title', id_="pr-title-input", class_="large", value=c.pull_request.title)}
20 ${h.text('pullrequest_title', id_="pr-title-input", class_="large", value=c.pull_request.title)}
21 </div>
21 </div>
22 </%def>
22 </%def>
23
23
24 <%def name="menu_bar_nav()">
24 <%def name="menu_bar_nav()">
25 ${self.menu_items(active='repositories')}
25 ${self.menu_items(active='repositories')}
26 </%def>
26 </%def>
27
27
28 <%def name="menu_bar_subnav()">
28 <%def name="menu_bar_subnav()">
29 ${self.repo_menu(active='showpullrequest')}
29 ${self.repo_menu(active='showpullrequest')}
30 </%def>
30 </%def>
31
31
32 <%def name="main()">
32 <%def name="main()">
33
33
34 <script type="text/javascript">
34 <script type="text/javascript">
35 // TODO: marcink switch this to pyroutes
35 // TODO: marcink switch this to pyroutes
36 AJAX_COMMENT_DELETE_URL = "${h.route_path('pullrequest_comment_delete',repo_name=c.repo_name,pull_request_id=c.pull_request.pull_request_id,comment_id='__COMMENT_ID__')}";
36 AJAX_COMMENT_DELETE_URL = "${h.route_path('pullrequest_comment_delete',repo_name=c.repo_name,pull_request_id=c.pull_request.pull_request_id,comment_id='__COMMENT_ID__')}";
37 templateContext.pull_request_data.pull_request_id = ${c.pull_request.pull_request_id};
37 templateContext.pull_request_data.pull_request_id = ${c.pull_request.pull_request_id};
38 </script>
38 </script>
39 <div class="box">
39 <div class="box">
40
40
41 <div class="title">
41 <div class="title">
42 ${self.repo_page_title(c.rhodecode_db_repo)}
42 ${self.repo_page_title(c.rhodecode_db_repo)}
43 </div>
43 </div>
44
44
45 ${self.breadcrumbs()}
45 ${self.breadcrumbs()}
46
46
47 <div class="box pr-summary">
47 <div class="box pr-summary">
48
48
49 <div class="summary-details block-left">
49 <div class="summary-details block-left">
50 <% summary = lambda n:{False:'summary-short'}.get(n) %>
50 <% summary = lambda n:{False:'summary-short'}.get(n) %>
51 <div class="pr-details-title">
51 <div class="pr-details-title">
52 <a href="${h.route_path('pull_requests_global', pull_request_id=c.pull_request.pull_request_id)}">${_('Pull request #%s') % c.pull_request.pull_request_id}</a> ${_('From')} ${h.format_date(c.pull_request.created_on)}
52 <a href="${h.route_path('pull_requests_global', pull_request_id=c.pull_request.pull_request_id)}">${_('Pull request #%s') % c.pull_request.pull_request_id}</a> ${_('From')} ${h.format_date(c.pull_request.created_on)}
53 %if c.allowed_to_update:
53 %if c.allowed_to_update:
54 <div id="delete_pullrequest" class="pull-right action_button ${'' if c.allowed_to_delete else 'disabled' }" style="clear:inherit;padding: 0">
54 <div id="delete_pullrequest" class="pull-right action_button ${'' if c.allowed_to_delete else 'disabled' }" style="clear:inherit;padding: 0">
55 % if c.allowed_to_delete:
55 % if c.allowed_to_delete:
56 ${h.secure_form(h.route_path('pullrequest_delete', repo_name=c.pull_request.target_repo.repo_name, pull_request_id=c.pull_request.pull_request_id), request=request)}
56 ${h.secure_form(h.route_path('pullrequest_delete', repo_name=c.pull_request.target_repo.repo_name, pull_request_id=c.pull_request.pull_request_id), request=request)}
57 ${h.submit('remove_%s' % c.pull_request.pull_request_id, _('Delete'),
57 ${h.submit('remove_%s' % c.pull_request.pull_request_id, _('Delete'),
58 class_="btn btn-link btn-danger no-margin",onclick="return confirm('"+_('Confirm to delete this pull request')+"');")}
58 class_="btn btn-link btn-danger no-margin",onclick="return confirm('"+_('Confirm to delete this pull request')+"');")}
59 ${h.end_form()}
59 ${h.end_form()}
60 % else:
60 % else:
61 ${_('Delete')}
61 ${_('Delete')}
62 % endif
62 % endif
63 </div>
63 </div>
64 <div id="open_edit_pullrequest" class="pull-right action_button">${_('Edit')}</div>
64 <div id="open_edit_pullrequest" class="pull-right action_button">${_('Edit')}</div>
65 <div id="close_edit_pullrequest" class="pull-right action_button" style="display: none;padding: 0">${_('Cancel')}</div>
65 <div id="close_edit_pullrequest" class="pull-right action_button" style="display: none;padding: 0">${_('Cancel')}</div>
66 %endif
66 %endif
67 </div>
67 </div>
68
68
69 <div id="summary" class="fields pr-details-content">
69 <div id="summary" class="fields pr-details-content">
70 <div class="field">
70 <div class="field">
71 <div class="label-summary">
71 <div class="label-summary">
72 <label>${_('Source')}:</label>
72 <label>${_('Source')}:</label>
73 </div>
73 </div>
74 <div class="input">
74 <div class="input">
75 <div class="pr-origininfo">
75 <div class="pr-origininfo">
76 ## branch link is only valid if it is a branch
76 ## branch link is only valid if it is a branch
77 <span class="tag">
77 <span class="tag">
78 %if c.pull_request.source_ref_parts.type == 'branch':
78 %if c.pull_request.source_ref_parts.type == 'branch':
79 <a href="${h.route_path('repo_changelog', repo_name=c.pull_request.source_repo.repo_name, _query=dict(branch=c.pull_request.source_ref_parts.name))}">${c.pull_request.source_ref_parts.type}: ${c.pull_request.source_ref_parts.name}</a>
79 <a href="${h.route_path('repo_changelog', repo_name=c.pull_request.source_repo.repo_name, _query=dict(branch=c.pull_request.source_ref_parts.name))}">${c.pull_request.source_ref_parts.type}: ${c.pull_request.source_ref_parts.name}</a>
80 %else:
80 %else:
81 ${c.pull_request.source_ref_parts.type}: ${c.pull_request.source_ref_parts.name}
81 ${c.pull_request.source_ref_parts.type}: ${c.pull_request.source_ref_parts.name}
82 %endif
82 %endif
83 </span>
83 </span>
84 <span class="clone-url">
84 <span class="clone-url">
85 <a href="${h.route_path('repo_summary', repo_name=c.pull_request.source_repo.repo_name)}">${c.pull_request.source_repo.clone_url()}</a>
85 <a href="${h.route_path('repo_summary', repo_name=c.pull_request.source_repo.repo_name)}">${c.pull_request.source_repo.clone_url()}</a>
86 </span>
86 </span>
87 <br/>
87 <br/>
88 % if c.ancestor_commit:
88 % if c.ancestor_commit:
89 ${_('Common ancestor')}:
89 ${_('Common ancestor')}:
90 <code><a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=c.ancestor_commit.raw_id)}">${h.show_id(c.ancestor_commit)}</a></code>
90 <code><a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=c.ancestor_commit.raw_id)}">${h.show_id(c.ancestor_commit)}</a></code>
91 % endif
91 % endif
92 </div>
92 </div>
93 %if h.is_hg(c.pull_request.source_repo):
93 %if h.is_hg(c.pull_request.source_repo):
94 <% clone_url = 'hg pull -r {} {}'.format(h.short_id(c.source_ref), c.pull_request.source_repo.clone_url()) %>
94 <% clone_url = 'hg pull -r {} {}'.format(h.short_id(c.source_ref), c.pull_request.source_repo.clone_url()) %>
95 %elif h.is_git(c.pull_request.source_repo):
95 %elif h.is_git(c.pull_request.source_repo):
96 <% clone_url = 'git pull {} {}'.format(c.pull_request.source_repo.clone_url(), c.pull_request.source_ref_parts.name) %>
96 <% clone_url = 'git pull {} {}'.format(c.pull_request.source_repo.clone_url(), c.pull_request.source_ref_parts.name) %>
97 %endif
97 %endif
98
98
99 <div class="">
99 <div class="">
100 <input type="text" class="input-monospace pr-pullinfo" value="${clone_url}" readonly="readonly">
100 <input type="text" class="input-monospace pr-pullinfo" value="${clone_url}" readonly="readonly">
101 <i class="tooltip icon-clipboard clipboard-action pull-right pr-pullinfo-copy" data-clipboard-text="${clone_url}" title="${_('Copy the pull url')}"></i>
101 <i class="tooltip icon-clipboard clipboard-action pull-right pr-pullinfo-copy" data-clipboard-text="${clone_url}" title="${_('Copy the pull url')}"></i>
102 </div>
102 </div>
103
103
104 </div>
104 </div>
105 </div>
105 </div>
106 <div class="field">
106 <div class="field">
107 <div class="label-summary">
107 <div class="label-summary">
108 <label>${_('Target')}:</label>
108 <label>${_('Target')}:</label>
109 </div>
109 </div>
110 <div class="input">
110 <div class="input">
111 <div class="pr-targetinfo">
111 <div class="pr-targetinfo">
112 ## branch link is only valid if it is a branch
112 ## branch link is only valid if it is a branch
113 <span class="tag">
113 <span class="tag">
114 %if c.pull_request.target_ref_parts.type == 'branch':
114 %if c.pull_request.target_ref_parts.type == 'branch':
115 <a href="${h.route_path('repo_changelog', repo_name=c.pull_request.target_repo.repo_name, _query=dict(branch=c.pull_request.target_ref_parts.name))}">${c.pull_request.target_ref_parts.type}: ${c.pull_request.target_ref_parts.name}</a>
115 <a href="${h.route_path('repo_changelog', repo_name=c.pull_request.target_repo.repo_name, _query=dict(branch=c.pull_request.target_ref_parts.name))}">${c.pull_request.target_ref_parts.type}: ${c.pull_request.target_ref_parts.name}</a>
116 %else:
116 %else:
117 ${c.pull_request.target_ref_parts.type}: ${c.pull_request.target_ref_parts.name}
117 ${c.pull_request.target_ref_parts.type}: ${c.pull_request.target_ref_parts.name}
118 %endif
118 %endif
119 </span>
119 </span>
120 <span class="clone-url">
120 <span class="clone-url">
121 <a href="${h.route_path('repo_summary', repo_name=c.pull_request.target_repo.repo_name)}">${c.pull_request.target_repo.clone_url()}</a>
121 <a href="${h.route_path('repo_summary', repo_name=c.pull_request.target_repo.repo_name)}">${c.pull_request.target_repo.clone_url()}</a>
122 </span>
122 </span>
123 </div>
123 </div>
124 </div>
124 </div>
125 </div>
125 </div>
126
126
127 ## Link to the shadow repository.
127 ## Link to the shadow repository.
128 <div class="field">
128 <div class="field">
129 <div class="label-summary">
129 <div class="label-summary">
130 <label>${_('Merge')}:</label>
130 <label>${_('Merge')}:</label>
131 </div>
131 </div>
132 <div class="input">
132 <div class="input">
133 % if not c.pull_request.is_closed() and c.pull_request.shadow_merge_ref:
133 % if not c.pull_request.is_closed() and c.pull_request.shadow_merge_ref:
134 %if h.is_hg(c.pull_request.target_repo):
134 %if h.is_hg(c.pull_request.target_repo):
135 <% clone_url = 'hg clone --update {} {} pull-request-{}'.format(c.pull_request.shadow_merge_ref.name, c.shadow_clone_url, c.pull_request.pull_request_id) %>
135 <% clone_url = 'hg clone --update {} {} pull-request-{}'.format(c.pull_request.shadow_merge_ref.name, c.shadow_clone_url, c.pull_request.pull_request_id) %>
136 %elif h.is_git(c.pull_request.target_repo):
136 %elif h.is_git(c.pull_request.target_repo):
137 <% clone_url = 'git clone --branch {} {} pull-request-{}'.format(c.pull_request.shadow_merge_ref.name, c.shadow_clone_url, c.pull_request.pull_request_id) %>
137 <% clone_url = 'git clone --branch {} {} pull-request-{}'.format(c.pull_request.shadow_merge_ref.name, c.shadow_clone_url, c.pull_request.pull_request_id) %>
138 %endif
138 %endif
139 <div class="">
139 <div class="">
140 <input type="text" class="input-monospace pr-mergeinfo" value="${clone_url}" readonly="readonly">
140 <input type="text" class="input-monospace pr-mergeinfo" value="${clone_url}" readonly="readonly">
141 <i class="tooltip icon-clipboard clipboard-action pull-right pr-mergeinfo-copy" data-clipboard-text="${clone_url}" title="${_('Copy the clone url')}"></i>
141 <i class="tooltip icon-clipboard clipboard-action pull-right pr-mergeinfo-copy" data-clipboard-text="${clone_url}" title="${_('Copy the clone url')}"></i>
142 </div>
142 </div>
143 % else:
143 % else:
144 <div class="">
144 <div class="">
145 ${_('Shadow repository data not available')}.
145 ${_('Shadow repository data not available')}.
146 </div>
146 </div>
147 % endif
147 % endif
148 </div>
148 </div>
149 </div>
149 </div>
150
150
151 <div class="field">
151 <div class="field">
152 <div class="label-summary">
152 <div class="label-summary">
153 <label>${_('Review')}:</label>
153 <label>${_('Review')}:</label>
154 </div>
154 </div>
155 <div class="input">
155 <div class="input">
156 %if c.pull_request_review_status:
156 %if c.pull_request_review_status:
157 <div class="${'flag_status %s' % c.pull_request_review_status} tooltip pull-left"></div>
157 <div class="${'flag_status %s' % c.pull_request_review_status} tooltip pull-left"></div>
158 <span class="changeset-status-lbl tooltip">
158 <span class="changeset-status-lbl tooltip">
159 %if c.pull_request.is_closed():
159 %if c.pull_request.is_closed():
160 ${_('Closed')},
160 ${_('Closed')},
161 %endif
161 %endif
162 ${h.commit_status_lbl(c.pull_request_review_status)}
162 ${h.commit_status_lbl(c.pull_request_review_status)}
163 </span>
163 </span>
164 - ${_ungettext('calculated based on %s reviewer vote', 'calculated based on %s reviewers votes', len(c.pull_request_reviewers)) % len(c.pull_request_reviewers)}
164 - ${_ungettext('calculated based on %s reviewer vote', 'calculated based on %s reviewers votes', len(c.pull_request_reviewers)) % len(c.pull_request_reviewers)}
165 %endif
165 %endif
166 </div>
166 </div>
167 </div>
167 </div>
168 <div class="field">
168 <div class="field">
169 <div class="pr-description-label label-summary" title="${_('Rendered using {} renderer').format(c.renderer)}">
169 <div class="pr-description-label label-summary" title="${_('Rendered using {} renderer').format(c.renderer)}">
170 <label>${_('Description')}:</label>
170 <label>${_('Description')}:</label>
171 </div>
171 </div>
172 <div id="pr-desc" class="input">
172 <div id="pr-desc" class="input">
173 <div class="pr-description">${h.render(c.pull_request.description, renderer=c.renderer)}</div>
173 <div class="pr-description">${h.render(c.pull_request.description, renderer=c.renderer)}</div>
174 </div>
174 </div>
175 <div id="pr-desc-edit" class="input textarea editor" style="display: none;">
175 <div id="pr-desc-edit" class="input textarea editor" style="display: none;">
176 <input id="pr-renderer-input" type="hidden" name="description_renderer" value="${c.visual.default_renderer}">
176 <input id="pr-renderer-input" type="hidden" name="description_renderer" value="${c.visual.default_renderer}">
177 ${dt.markup_form('pr-description-input', form_text=c.pull_request.description)}
177 ${dt.markup_form('pr-description-input', form_text=c.pull_request.description)}
178 </div>
178 </div>
179 </div>
179 </div>
180
180
181 <div class="field">
181 <div class="field">
182 <div class="label-summary">
182 <div class="label-summary">
183 <label>${_('Versions')}:</label>
183 <label>${_('Versions')}:</label>
184 </div>
184 </div>
185
185
186 <% outdated_comm_count_ver = len(c.inline_versions[None]['outdated']) %>
186 <% outdated_comm_count_ver = len(c.inline_versions[None]['outdated']) %>
187 <% general_outdated_comm_count_ver = len(c.comment_versions[None]['outdated']) %>
187 <% general_outdated_comm_count_ver = len(c.comment_versions[None]['outdated']) %>
188
188
189 <div class="pr-versions">
189 <div class="pr-versions">
190 % if c.show_version_changes:
190 % if c.show_version_changes:
191 <% outdated_comm_count_ver = len(c.inline_versions[c.at_version_num]['outdated']) %>
191 <% outdated_comm_count_ver = len(c.inline_versions[c.at_version_num]['outdated']) %>
192 <% general_outdated_comm_count_ver = len(c.comment_versions[c.at_version_num]['outdated']) %>
192 <% general_outdated_comm_count_ver = len(c.comment_versions[c.at_version_num]['outdated']) %>
193 <a id="show-pr-versions" class="input" onclick="return versionController.toggleVersionView(this)" href="#show-pr-versions"
193 <a id="show-pr-versions" class="input" onclick="return versionController.toggleVersionView(this)" href="#show-pr-versions"
194 data-toggle-on="${_ungettext('{} version available for this pull request, show it.', '{} versions available for this pull request, show them.', len(c.versions)).format(len(c.versions))}"
194 data-toggle-on="${_ungettext('{} version available for this pull request, show it.', '{} versions available for this pull request, show them.', len(c.versions)).format(len(c.versions))}"
195 data-toggle-off="${_('Hide all versions of this pull request')}">
195 data-toggle-off="${_('Hide all versions of this pull request')}">
196 ${_ungettext('{} version available for this pull request, show it.', '{} versions available for this pull request, show them.', len(c.versions)).format(len(c.versions))}
196 ${_ungettext('{} version available for this pull request, show it.', '{} versions available for this pull request, show them.', len(c.versions)).format(len(c.versions))}
197 </a>
197 </a>
198 <table>
198 <table>
199 ## SHOW ALL VERSIONS OF PR
199 ## SHOW ALL VERSIONS OF PR
200 <% ver_pr = None %>
200 <% ver_pr = None %>
201
201
202 % for data in reversed(list(enumerate(c.versions, 1))):
202 % for data in reversed(list(enumerate(c.versions, 1))):
203 <% ver_pos = data[0] %>
203 <% ver_pos = data[0] %>
204 <% ver = data[1] %>
204 <% ver = data[1] %>
205 <% ver_pr = ver.pull_request_version_id %>
205 <% ver_pr = ver.pull_request_version_id %>
206 <% display_row = '' if c.at_version and (c.at_version_num == ver_pr or c.from_version_num == ver_pr) else 'none' %>
206 <% display_row = '' if c.at_version and (c.at_version_num == ver_pr or c.from_version_num == ver_pr) else 'none' %>
207
207
208 <tr class="version-pr" style="display: ${display_row}">
208 <tr class="version-pr" style="display: ${display_row}">
209 <td>
209 <td>
210 <code>
210 <code>
211 <a href="${request.current_route_path(_query=dict(version=ver_pr or 'latest'))}">v${ver_pos}</a>
211 <a href="${request.current_route_path(_query=dict(version=ver_pr or 'latest'))}">v${ver_pos}</a>
212 </code>
212 </code>
213 </td>
213 </td>
214 <td>
214 <td>
215 <input ${'checked="checked"' if c.from_version_num == ver_pr else ''} class="compare-radio-button" type="radio" name="ver_source" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
215 <input ${'checked="checked"' if c.from_version_num == ver_pr else ''} class="compare-radio-button" type="radio" name="ver_source" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
216 <input ${'checked="checked"' if c.at_version_num == ver_pr else ''} class="compare-radio-button" type="radio" name="ver_target" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
216 <input ${'checked="checked"' if c.at_version_num == ver_pr else ''} class="compare-radio-button" type="radio" name="ver_target" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
217 </td>
217 </td>
218 <td>
218 <td>
219 <% review_status = c.review_versions[ver_pr].status if ver_pr in c.review_versions else 'not_reviewed' %>
219 <% review_status = c.review_versions[ver_pr].status if ver_pr in c.review_versions else 'not_reviewed' %>
220 <div class="${'flag_status %s' % review_status} tooltip pull-left" title="${_('Your review status at this version')}">
220 <div class="${'flag_status %s' % review_status} tooltip pull-left" title="${_('Your review status at this version')}">
221 </div>
221 </div>
222 </td>
222 </td>
223 <td>
223 <td>
224 % if c.at_version_num != ver_pr:
224 % if c.at_version_num != ver_pr:
225 <i class="icon-comment"></i>
225 <i class="icon-comment"></i>
226 <code class="tooltip" title="${_('Comment from pull request version v{0}, general:{1} inline:{2}').format(ver_pos, len(c.comment_versions[ver_pr]['at']), len(c.inline_versions[ver_pr]['at']))}">
226 <code class="tooltip" title="${_('Comment from pull request version v{0}, general:{1} inline:{2}').format(ver_pos, len(c.comment_versions[ver_pr]['at']), len(c.inline_versions[ver_pr]['at']))}">
227 G:${len(c.comment_versions[ver_pr]['at'])} / I:${len(c.inline_versions[ver_pr]['at'])}
227 G:${len(c.comment_versions[ver_pr]['at'])} / I:${len(c.inline_versions[ver_pr]['at'])}
228 </code>
228 </code>
229 % endif
229 % endif
230 </td>
230 </td>
231 <td>
231 <td>
232 ##<code>${ver.source_ref_parts.commit_id[:6]}</code>
232 ##<code>${ver.source_ref_parts.commit_id[:6]}</code>
233 </td>
233 </td>
234 <td>
234 <td>
235 ${h.age_component(ver.updated_on, time_is_local=True)}
235 ${h.age_component(ver.updated_on, time_is_local=True)}
236 </td>
236 </td>
237 </tr>
237 </tr>
238 % endfor
238 % endfor
239
239
240 <tr>
240 <tr>
241 <td colspan="6">
241 <td colspan="6">
242 <button id="show-version-diff" onclick="return versionController.showVersionDiff()" class="btn btn-sm" style="display: none"
242 <button id="show-version-diff" onclick="return versionController.showVersionDiff()" class="btn btn-sm" style="display: none"
243 data-label-text-locked="${_('select versions to show changes')}"
243 data-label-text-locked="${_('select versions to show changes')}"
244 data-label-text-diff="${_('show changes between versions')}"
244 data-label-text-diff="${_('show changes between versions')}"
245 data-label-text-show="${_('show pull request for this version')}"
245 data-label-text-show="${_('show pull request for this version')}"
246 >
246 >
247 ${_('select versions to show changes')}
247 ${_('select versions to show changes')}
248 </button>
248 </button>
249 </td>
249 </td>
250 </tr>
250 </tr>
251
251
252 ## show comment/inline comments summary
252 ## show comment/inline comments summary
253 <%def name="comments_summary()">
253 <%def name="comments_summary()">
254 <tr>
254 <tr>
255 <td colspan="6" class="comments-summary-td">
255 <td colspan="6" class="comments-summary-td">
256
256
257 % if c.at_version:
257 % if c.at_version:
258 <% inline_comm_count_ver = len(c.inline_versions[c.at_version_num]['display']) %>
258 <% inline_comm_count_ver = len(c.inline_versions[c.at_version_num]['display']) %>
259 <% general_comm_count_ver = len(c.comment_versions[c.at_version_num]['display']) %>
259 <% general_comm_count_ver = len(c.comment_versions[c.at_version_num]['display']) %>
260 ${_('Comments at this version')}:
260 ${_('Comments at this version')}:
261 % else:
261 % else:
262 <% inline_comm_count_ver = len(c.inline_versions[c.at_version_num]['until']) %>
262 <% inline_comm_count_ver = len(c.inline_versions[c.at_version_num]['until']) %>
263 <% general_comm_count_ver = len(c.comment_versions[c.at_version_num]['until']) %>
263 <% general_comm_count_ver = len(c.comment_versions[c.at_version_num]['until']) %>
264 ${_('Comments for this pull request')}:
264 ${_('Comments for this pull request')}:
265 % endif
265 % endif
266
266
267
267
268 %if general_comm_count_ver:
268 %if general_comm_count_ver:
269 <a href="#comments">${_("%d General ") % general_comm_count_ver}</a>
269 <a href="#comments">${_("%d General ") % general_comm_count_ver}</a>
270 %else:
270 %else:
271 ${_("%d General ") % general_comm_count_ver}
271 ${_("%d General ") % general_comm_count_ver}
272 %endif
272 %endif
273
273
274 %if inline_comm_count_ver:
274 %if inline_comm_count_ver:
275 , <a href="#" onclick="return Rhodecode.comments.nextComment();" id="inline-comments-counter">${_("%d Inline") % inline_comm_count_ver}</a>
275 , <a href="#" onclick="return Rhodecode.comments.nextComment();" id="inline-comments-counter">${_("%d Inline") % inline_comm_count_ver}</a>
276 %else:
276 %else:
277 , ${_("%d Inline") % inline_comm_count_ver}
277 , ${_("%d Inline") % inline_comm_count_ver}
278 %endif
278 %endif
279
279
280 %if outdated_comm_count_ver:
280 %if outdated_comm_count_ver:
281 , <a href="#" onclick="showOutdated(); Rhodecode.comments.nextOutdatedComment(); return false;">${_("%d Outdated") % outdated_comm_count_ver}</a>
281 , <a href="#" onclick="showOutdated(); Rhodecode.comments.nextOutdatedComment(); return false;">${_("%d Outdated") % outdated_comm_count_ver}</a>
282 <a href="#" class="showOutdatedComments" onclick="showOutdated(this); return false;"> | ${_('show outdated comments')}</a>
282 <a href="#" class="showOutdatedComments" onclick="showOutdated(this); return false;"> | ${_('show outdated comments')}</a>
283 <a href="#" class="hideOutdatedComments" style="display: none" onclick="hideOutdated(this); return false;"> | ${_('hide outdated comments')}</a>
283 <a href="#" class="hideOutdatedComments" style="display: none" onclick="hideOutdated(this); return false;"> | ${_('hide outdated comments')}</a>
284 %else:
284 %else:
285 , ${_("%d Outdated") % outdated_comm_count_ver}
285 , ${_("%d Outdated") % outdated_comm_count_ver}
286 %endif
286 %endif
287 </td>
287 </td>
288 </tr>
288 </tr>
289 </%def>
289 </%def>
290 ${comments_summary()}
290 ${comments_summary()}
291 </table>
291 </table>
292 % else:
292 % else:
293 <div class="input">
293 <div class="input">
294 ${_('Pull request versions not available')}.
294 ${_('Pull request versions not available')}.
295 </div>
295 </div>
296 <div>
296 <div>
297 <table>
297 <table>
298 ${comments_summary()}
298 ${comments_summary()}
299 </table>
299 </table>
300 </div>
300 </div>
301 % endif
301 % endif
302 </div>
302 </div>
303 </div>
303 </div>
304
304
305 <div id="pr-save" class="field" style="display: none;">
305 <div id="pr-save" class="field" style="display: none;">
306 <div class="label-summary"></div>
306 <div class="label-summary"></div>
307 <div class="input">
307 <div class="input">
308 <span id="edit_pull_request" class="btn btn-small no-margin">${_('Save Changes')}</span>
308 <span id="edit_pull_request" class="btn btn-small no-margin">${_('Save Changes')}</span>
309 </div>
309 </div>
310 </div>
310 </div>
311 </div>
311 </div>
312 </div>
312 </div>
313 <div>
313 <div>
314 ## AUTHOR
314 ## AUTHOR
315 <div class="reviewers-title block-right">
315 <div class="reviewers-title block-right">
316 <div class="pr-details-title">
316 <div class="pr-details-title">
317 ${_('Author of this pull request')}
317 ${_('Author of this pull request')}
318 </div>
318 </div>
319 </div>
319 </div>
320 <div class="block-right pr-details-content reviewers">
320 <div class="block-right pr-details-content reviewers">
321 <ul class="group_members">
321 <ul class="group_members">
322 <li>
322 <li>
323 ${self.gravatar_with_user(c.pull_request.author.email, 16)}
323 ${self.gravatar_with_user(c.pull_request.author.email, 16)}
324 </li>
324 </li>
325 </ul>
325 </ul>
326 </div>
326 </div>
327
327
328 ## REVIEW RULES
328 ## REVIEW RULES
329 <div id="review_rules" style="display: none" class="reviewers-title block-right">
329 <div id="review_rules" style="display: none" class="reviewers-title block-right">
330 <div class="pr-details-title">
330 <div class="pr-details-title">
331 ${_('Reviewer rules')}
331 ${_('Reviewer rules')}
332 %if c.allowed_to_update:
332 %if c.allowed_to_update:
333 <span id="close_edit_reviewers" class="block-right action_button last-item" style="display: none;">${_('Close')}</span>
333 <span id="close_edit_reviewers" class="block-right action_button last-item" style="display: none;">${_('Close')}</span>
334 %endif
334 %endif
335 </div>
335 </div>
336 <div class="pr-reviewer-rules">
336 <div class="pr-reviewer-rules">
337 ## review rules will be appended here, by default reviewers logic
337 ## review rules will be appended here, by default reviewers logic
338 </div>
338 </div>
339 <input id="review_data" type="hidden" name="review_data" value="">
339 <input id="review_data" type="hidden" name="review_data" value="">
340 </div>
340 </div>
341
341
342 ## REVIEWERS
342 ## REVIEWERS
343 <div class="reviewers-title block-right">
343 <div class="reviewers-title block-right">
344 <div class="pr-details-title">
344 <div class="pr-details-title">
345 ${_('Pull request reviewers')}
345 ${_('Pull request reviewers')}
346 %if c.allowed_to_update:
346 %if c.allowed_to_update:
347 <span id="open_edit_reviewers" class="block-right action_button last-item">${_('Edit')}</span>
347 <span id="open_edit_reviewers" class="block-right action_button last-item">${_('Edit')}</span>
348 %endif
348 %endif
349 </div>
349 </div>
350 </div>
350 </div>
351 <div id="reviewers" class="block-right pr-details-content reviewers">
351 <div id="reviewers" class="block-right pr-details-content reviewers">
352
352
353 ## members redering block
353 ## members redering block
354 <input type="hidden" name="__start__" value="review_members:sequence">
354 <input type="hidden" name="__start__" value="review_members:sequence">
355 <ul id="review_members" class="group_members">
355 <ul id="review_members" class="group_members">
356
356
357 % for review_obj, member, reasons, mandatory, status in c.pull_request_reviewers:
357 % for review_obj, member, reasons, mandatory, status in c.pull_request_reviewers:
358 <script>
358 <script>
359 var member = ${h.json.dumps(h.reviewer_as_json(member, reasons=reasons, mandatory=mandatory, user_group=review_obj.rule_user_group_data()))|n};
359 var member = ${h.json.dumps(h.reviewer_as_json(member, reasons=reasons, mandatory=mandatory, user_group=review_obj.rule_user_group_data()))|n};
360 var status = "${(status[0][1].status if status else 'not_reviewed')}";
360 var status = "${(status[0][1].status if status else 'not_reviewed')}";
361 var status_lbl = "${h.commit_status_lbl(status[0][1].status if status else 'not_reviewed')}";
361 var status_lbl = "${h.commit_status_lbl(status[0][1].status if status else 'not_reviewed')}";
362 var allowed_to_update = ${h.json.dumps(c.allowed_to_update)};
362 var allowed_to_update = ${h.json.dumps(c.allowed_to_update)};
363
363
364 var entry = renderTemplate('reviewMemberEntry', {
364 var entry = renderTemplate('reviewMemberEntry', {
365 'member': member,
365 'member': member,
366 'mandatory': member.mandatory,
366 'mandatory': member.mandatory,
367 'reasons': member.reasons,
367 'reasons': member.reasons,
368 'allowed_to_update': allowed_to_update,
368 'allowed_to_update': allowed_to_update,
369 'review_status': status,
369 'review_status': status,
370 'review_status_label': status_lbl,
370 'review_status_label': status_lbl,
371 'user_group': member.user_group,
371 'user_group': member.user_group,
372 'create': false
372 'create': false
373 });
373 });
374 $('#review_members').append(entry)
374 $('#review_members').append(entry)
375 </script>
375 </script>
376
376
377 % endfor
377 % endfor
378
378
379 </ul>
379 </ul>
380 <input type="hidden" name="__end__" value="review_members:sequence">
380 <input type="hidden" name="__end__" value="review_members:sequence">
381 ## end members redering block
381 ## end members redering block
382
382
383 %if not c.pull_request.is_closed():
383 %if not c.pull_request.is_closed():
384 <div id="add_reviewer" class="ac" style="display: none;">
384 <div id="add_reviewer" class="ac" style="display: none;">
385 %if c.allowed_to_update:
385 %if c.allowed_to_update:
386 % if not c.forbid_adding_reviewers:
386 % if not c.forbid_adding_reviewers:
387 <div id="add_reviewer_input" class="reviewer_ac">
387 <div id="add_reviewer_input" class="reviewer_ac">
388 ${h.text('user', class_='ac-input', placeholder=_('Add reviewer or reviewer group'))}
388 ${h.text('user', class_='ac-input', placeholder=_('Add reviewer or reviewer group'))}
389 <div id="reviewers_container"></div>
389 <div id="reviewers_container"></div>
390 </div>
390 </div>
391 % endif
391 % endif
392 <div class="pull-right">
392 <div class="pull-right">
393 <button id="update_pull_request" class="btn btn-small no-margin">${_('Save Changes')}</button>
393 <button id="update_pull_request" class="btn btn-small no-margin">${_('Save Changes')}</button>
394 </div>
394 </div>
395 %endif
395 %endif
396 </div>
396 </div>
397 %endif
397 %endif
398 </div>
398 </div>
399 </div>
399 </div>
400 </div>
400 </div>
401 <div class="box">
401 <div class="box">
402 ##DIFF
402 ##DIFF
403 <div class="table" >
403 <div class="table" >
404 <div id="changeset_compare_view_content">
404 <div id="changeset_compare_view_content">
405 ##CS
405 ##CS
406 % if c.missing_requirements:
406 % if c.missing_requirements:
407 <div class="box">
407 <div class="box">
408 <div class="alert alert-warning">
408 <div class="alert alert-warning">
409 <div>
409 <div>
410 <strong>${_('Missing requirements:')}</strong>
410 <strong>${_('Missing requirements:')}</strong>
411 ${_('These commits cannot be displayed, because this repository uses the Mercurial largefiles extension, which was not enabled.')}
411 ${_('These commits cannot be displayed, because this repository uses the Mercurial largefiles extension, which was not enabled.')}
412 </div>
412 </div>
413 </div>
413 </div>
414 </div>
414 </div>
415 % elif c.missing_commits:
415 % elif c.missing_commits:
416 <div class="box">
416 <div class="box">
417 <div class="alert alert-warning">
417 <div class="alert alert-warning">
418 <div>
418 <div>
419 <strong>${_('Missing commits')}:</strong>
419 <strong>${_('Missing commits')}:</strong>
420 ${_('This pull request cannot be displayed, because one or more commits no longer exist in the source repository.')}
420 ${_('This pull request cannot be displayed, because one or more commits no longer exist in the source repository.')}
421 ${_('Please update this pull request, push the commits back into the source repository, or consider closing this pull request.')}
421 ${_('Please update this pull request, push the commits back into the source repository, or consider closing this pull request.')}
422 ${_('Consider doing a {force_refresh_url} in case you think this is an error.').format(force_refresh_url=h.link_to('force refresh', h.current_route_path(request, force_refresh='1')))|n}
422 ${_('Consider doing a {force_refresh_url} in case you think this is an error.').format(force_refresh_url=h.link_to('force refresh', h.current_route_path(request, force_refresh='1')))|n}
423 </div>
423 </div>
424 </div>
424 </div>
425 </div>
425 </div>
426 % endif
426 % endif
427
427
428 <div class="compare_view_commits_title">
428 <div class="compare_view_commits_title">
429 % if not c.compare_mode:
429 % if not c.compare_mode:
430
430
431 % if c.at_version_pos:
431 % if c.at_version_pos:
432 <h4>
432 <h4>
433 ${_('Showing changes at v%d, commenting is disabled.') % c.at_version_pos}
433 ${_('Showing changes at v%d, commenting is disabled.') % c.at_version_pos}
434 </h4>
434 </h4>
435 % endif
435 % endif
436
436
437 <div class="pull-left">
437 <div class="pull-left">
438 <div class="btn-group">
438 <div class="btn-group">
439 <a
439 <a
440 class="btn"
440 class="btn"
441 href="#"
441 href="#"
442 onclick="$('.compare_select').show();$('.compare_select_hidden').hide(); return false">
442 onclick="$('.compare_select').show();$('.compare_select_hidden').hide(); return false">
443 ${_ungettext('Expand %s commit','Expand %s commits', len(c.commit_ranges)) % len(c.commit_ranges)}
443 ${_ungettext('Expand %s commit','Expand %s commits', len(c.commit_ranges)) % len(c.commit_ranges)}
444 </a>
444 </a>
445 <a
445 <a
446 class="btn"
446 class="btn"
447 href="#"
447 href="#"
448 onclick="$('.compare_select').hide();$('.compare_select_hidden').show(); return false">
448 onclick="$('.compare_select').hide();$('.compare_select_hidden').show(); return false">
449 ${_ungettext('Collapse %s commit','Collapse %s commits', len(c.commit_ranges)) % len(c.commit_ranges)}
449 ${_ungettext('Collapse %s commit','Collapse %s commits', len(c.commit_ranges)) % len(c.commit_ranges)}
450 </a>
450 </a>
451 <%
452 range_commit_id = '{}...{}'.format(c.commit_ranges[-1].raw_id, c.commit_ranges[0].raw_id)
453 %>
454 <a
455 class="btn"
456 href="${request.route_path('repo_commit', repo_name=c.source_repo.repo_name, commit_id=range_commit_id)}"
457 >
458 ${_('Show range diff')}
459 </a>
460
461 </div>
451 </div>
462 </div>
452 </div>
463
453
464 <div class="pull-right">
454 <div class="pull-right">
465 % if c.allowed_to_update and not c.pull_request.is_closed():
455 % if c.allowed_to_update and not c.pull_request.is_closed():
466 <a id="update_commits" class="btn btn-primary no-margin pull-right">${_('Update commits')}</a>
456 <a id="update_commits" class="btn btn-primary no-margin pull-right">${_('Update commits')}</a>
467 % else:
457 % else:
468 <a class="tooltip btn disabled pull-right" disabled="disabled" title="${_('Update is disabled for current view')}">${_('Update commits')}</a>
458 <a class="tooltip btn disabled pull-right" disabled="disabled" title="${_('Update is disabled for current view')}">${_('Update commits')}</a>
469 % endif
459 % endif
470
460
471 </div>
461 </div>
472 % endif
462 % endif
473 </div>
463 </div>
474
464
475 % if not c.missing_commits:
465 % if not c.missing_commits:
476 % if c.compare_mode:
466 % if c.compare_mode:
477 % if c.at_version:
467 % if c.at_version:
478 <h4>
468 <h4>
479 ${_('Commits and changes between v{ver_from} and {ver_to} of this pull request, commenting is disabled').format(ver_from=c.from_version_pos, ver_to=c.at_version_pos if c.at_version_pos else 'latest')}:
469 ${_('Commits and changes between v{ver_from} and {ver_to} of this pull request, commenting is disabled').format(ver_from=c.from_version_pos, ver_to=c.at_version_pos if c.at_version_pos else 'latest')}:
480 </h4>
470 </h4>
481
471
482 <div class="subtitle-compare">
472 <div class="subtitle-compare">
483 ${_('commits added: {}, removed: {}').format(len(c.commit_changes_summary.added), len(c.commit_changes_summary.removed))}
473 ${_('commits added: {}, removed: {}').format(len(c.commit_changes_summary.added), len(c.commit_changes_summary.removed))}
484 </div>
474 </div>
485
475
486 <div class="container">
476 <div class="container">
487 <table class="rctable compare_view_commits">
477 <table class="rctable compare_view_commits">
488 <tr>
478 <tr>
489 <th></th>
479 <th></th>
490 <th>${_('Time')}</th>
480 <th>${_('Time')}</th>
491 <th>${_('Author')}</th>
481 <th>${_('Author')}</th>
492 <th>${_('Commit')}</th>
482 <th>${_('Commit')}</th>
493 <th></th>
483 <th></th>
494 <th>${_('Description')}</th>
484 <th>${_('Description')}</th>
495 </tr>
485 </tr>
496
486
497 % for c_type, commit in c.commit_changes:
487 % for c_type, commit in c.commit_changes:
498 % if c_type in ['a', 'r']:
488 % if c_type in ['a', 'r']:
499 <%
489 <%
500 if c_type == 'a':
490 if c_type == 'a':
501 cc_title = _('Commit added in displayed changes')
491 cc_title = _('Commit added in displayed changes')
502 elif c_type == 'r':
492 elif c_type == 'r':
503 cc_title = _('Commit removed in displayed changes')
493 cc_title = _('Commit removed in displayed changes')
504 else:
494 else:
505 cc_title = ''
495 cc_title = ''
506 %>
496 %>
507 <tr id="row-${commit.raw_id}" commit_id="${commit.raw_id}" class="compare_select">
497 <tr id="row-${commit.raw_id}" commit_id="${commit.raw_id}" class="compare_select">
508 <td>
498 <td>
509 <div class="commit-change-indicator color-${c_type}-border">
499 <div class="commit-change-indicator color-${c_type}-border">
510 <div class="commit-change-content color-${c_type} tooltip" title="${h.tooltip(cc_title)}">
500 <div class="commit-change-content color-${c_type} tooltip" title="${h.tooltip(cc_title)}">
511 ${c_type.upper()}
501 ${c_type.upper()}
512 </div>
502 </div>
513 </div>
503 </div>
514 </td>
504 </td>
515 <td class="td-time">
505 <td class="td-time">
516 ${h.age_component(commit.date)}
506 ${h.age_component(commit.date)}
517 </td>
507 </td>
518 <td class="td-user">
508 <td class="td-user">
519 ${base.gravatar_with_user(commit.author, 16)}
509 ${base.gravatar_with_user(commit.author, 16)}
520 </td>
510 </td>
521 <td class="td-hash">
511 <td class="td-hash">
522 <code>
512 <code>
523 <a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=commit.raw_id)}">
513 <a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=commit.raw_id)}">
524 r${commit.idx}:${h.short_id(commit.raw_id)}
514 r${commit.idx}:${h.short_id(commit.raw_id)}
525 </a>
515 </a>
526 ${h.hidden('revisions', commit.raw_id)}
516 ${h.hidden('revisions', commit.raw_id)}
527 </code>
517 </code>
528 </td>
518 </td>
529 <td class="expand_commit" data-commit-id="${commit.raw_id}" title="${_( 'Expand commit message')}">
519 <td class="expand_commit" data-commit-id="${commit.raw_id}" title="${_( 'Expand commit message')}">
530 <div class="show_more_col">
520 <div class="show_more_col">
531 <i class="show_more"></i>
521 <i class="show_more"></i>
532 </div>
522 </div>
533 </td>
523 </td>
534 <td class="mid td-description">
524 <td class="mid td-description">
535 <div class="log-container truncate-wrap">
525 <div class="log-container truncate-wrap">
536 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">
526 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">
537 ${h.urlify_commit_message(commit.message, c.repo_name)}
527 ${h.urlify_commit_message(commit.message, c.repo_name)}
538 </div>
528 </div>
539 </div>
529 </div>
540 </td>
530 </td>
541 </tr>
531 </tr>
542 % endif
532 % endif
543 % endfor
533 % endfor
544 </table>
534 </table>
545 </div>
535 </div>
546
536
547 <script>
537 <script>
548 $('.expand_commit').on('click',function(e){
538 $('.expand_commit').on('click',function(e){
549 var target_expand = $(this);
539 var target_expand = $(this);
550 var cid = target_expand.data('commitId');
540 var cid = target_expand.data('commitId');
551
541
552 if (target_expand.hasClass('open')){
542 if (target_expand.hasClass('open')){
553 $('#c-'+cid).css({
543 $('#c-'+cid).css({
554 'height': '1.5em',
544 'height': '1.5em',
555 'white-space': 'nowrap',
545 'white-space': 'nowrap',
556 'text-overflow': 'ellipsis',
546 'text-overflow': 'ellipsis',
557 'overflow':'hidden'
547 'overflow':'hidden'
558 });
548 });
559 target_expand.removeClass('open');
549 target_expand.removeClass('open');
560 }
550 }
561 else {
551 else {
562 $('#c-'+cid).css({
552 $('#c-'+cid).css({
563 'height': 'auto',
553 'height': 'auto',
564 'white-space': 'pre-line',
554 'white-space': 'pre-line',
565 'text-overflow': 'initial',
555 'text-overflow': 'initial',
566 'overflow':'visible'
556 'overflow':'visible'
567 });
557 });
568 target_expand.addClass('open');
558 target_expand.addClass('open');
569 }
559 }
570 });
560 });
571 </script>
561 </script>
572
562
573 % endif
563 % endif
574
564
575 % else:
565 % else:
576 <%include file="/compare/compare_commits.mako" />
566 <%include file="/compare/compare_commits.mako" />
577 % endif
567 % endif
578
568
579 <div class="cs_files">
569 <div class="cs_files">
580 <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/>
570 <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/>
581 ${cbdiffs.render_diffset_menu(c.diffset)}
571
582 ${cbdiffs.render_diffset(
572 ${cbdiffs.render_diffset_menu(c.diffset, range_diff_on=c.range_diff_on)}
583 c.diffset, use_comments=True,
573
584 collapse_when_files_over=30,
574 % if c.range_diff_on:
585 disable_new_comments=not c.allowed_to_comment,
575 % for commit in c.commit_ranges:
586 deleted_files_comments=c.deleted_files_comments,
576 ${cbdiffs.render_diffset(
587 inline_comments=c.inline_comments)}
577 c.changes[commit.raw_id],
578 commit=commit, use_comments=True,
579 collapse_when_files_over=5,
580 disable_new_comments=True,
581 deleted_files_comments=c.deleted_files_comments,
582 inline_comments=c.inline_comments)}
583 % endfor
584 % else:
585 ${cbdiffs.render_diffset(
586 c.diffset, use_comments=True,
587 collapse_when_files_over=30,
588 disable_new_comments=not c.allowed_to_comment,
589 deleted_files_comments=c.deleted_files_comments,
590 inline_comments=c.inline_comments)}
591 % endif
592
588 </div>
593 </div>
589 % else:
594 % else:
590 ## skipping commits we need to clear the view for missing commits
595 ## skipping commits we need to clear the view for missing commits
591 <div style="clear:both;"></div>
596 <div style="clear:both;"></div>
592 % endif
597 % endif
593
598
594 </div>
599 </div>
595 </div>
600 </div>
596
601
597 ## template for inline comment form
602 ## template for inline comment form
598 <%namespace name="comment" file="/changeset/changeset_file_comment.mako"/>
603 <%namespace name="comment" file="/changeset/changeset_file_comment.mako"/>
599
604
600 ## render general comments
605 ## render general comments
601
606
602 <div id="comment-tr-show">
607 <div id="comment-tr-show">
603 <div class="comment">
608 <div class="comment">
604 % if general_outdated_comm_count_ver:
609 % if general_outdated_comm_count_ver:
605 <div class="meta">
610 <div class="meta">
606 % if general_outdated_comm_count_ver == 1:
611 % if general_outdated_comm_count_ver == 1:
607 ${_('there is {num} general comment from older versions').format(num=general_outdated_comm_count_ver)},
612 ${_('there is {num} general comment from older versions').format(num=general_outdated_comm_count_ver)},
608 <a href="#show-hidden-comments" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show it')}</a>
613 <a href="#show-hidden-comments" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show it')}</a>
609 % else:
614 % else:
610 ${_('there are {num} general comments from older versions').format(num=general_outdated_comm_count_ver)},
615 ${_('there are {num} general comments from older versions').format(num=general_outdated_comm_count_ver)},
611 <a href="#show-hidden-comments" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show them')}</a>
616 <a href="#show-hidden-comments" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show them')}</a>
612 % endif
617 % endif
613 </div>
618 </div>
614 % endif
619 % endif
615 </div>
620 </div>
616 </div>
621 </div>
617
622
618 ${comment.generate_comments(c.comments, include_pull_request=True, is_pull_request=True)}
623 ${comment.generate_comments(c.comments, include_pull_request=True, is_pull_request=True)}
619
624
620 % if not c.pull_request.is_closed():
625 % if not c.pull_request.is_closed():
621 ## merge status, and merge action
626 ## merge status, and merge action
622 <div class="pull-request-merge">
627 <div class="pull-request-merge">
623 <%include file="/pullrequests/pullrequest_merge_checks.mako"/>
628 <%include file="/pullrequests/pullrequest_merge_checks.mako"/>
624 </div>
629 </div>
625
630
626 ## main comment form and it status
631 ## main comment form and it status
627 ${comment.comments(h.route_path('pullrequest_comment_create', repo_name=c.repo_name,
632 ${comment.comments(h.route_path('pullrequest_comment_create', repo_name=c.repo_name,
628 pull_request_id=c.pull_request.pull_request_id),
633 pull_request_id=c.pull_request.pull_request_id),
629 c.pull_request_review_status,
634 c.pull_request_review_status,
630 is_pull_request=True, change_status=c.allowed_to_change_status)}
635 is_pull_request=True, change_status=c.allowed_to_change_status)}
631 %endif
636 %endif
632
637
633 <script type="text/javascript">
638 <script type="text/javascript">
634 if (location.hash) {
639 if (location.hash) {
635 var result = splitDelimitedHash(location.hash);
640 var result = splitDelimitedHash(location.hash);
636 var line = $('html').find(result.loc);
641 var line = $('html').find(result.loc);
637 // show hidden comments if we use location.hash
642 // show hidden comments if we use location.hash
638 if (line.hasClass('comment-general')) {
643 if (line.hasClass('comment-general')) {
639 $(line).show();
644 $(line).show();
640 } else if (line.hasClass('comment-inline')) {
645 } else if (line.hasClass('comment-inline')) {
641 $(line).show();
646 $(line).show();
642 var $cb = $(line).closest('.cb');
647 var $cb = $(line).closest('.cb');
643 $cb.removeClass('cb-collapsed')
648 $cb.removeClass('cb-collapsed')
644 }
649 }
645 if (line.length > 0){
650 if (line.length > 0){
646 offsetScroll(line, 70);
651 offsetScroll(line, 70);
647 }
652 }
648 }
653 }
649
654
650 versionController = new VersionController();
655 versionController = new VersionController();
651 versionController.init();
656 versionController.init();
652
657
653 reviewersController = new ReviewersController();
658 reviewersController = new ReviewersController();
654
659
655 $(function(){
660 $(function(){
656
661
657 // custom code mirror
662 // custom code mirror
658 var codeMirrorInstance = $('#pr-description-input').get(0).MarkupForm.cm;
663 var codeMirrorInstance = $('#pr-description-input').get(0).MarkupForm.cm;
659
664
660 var PRDetails = {
665 var PRDetails = {
661 editButton: $('#open_edit_pullrequest'),
666 editButton: $('#open_edit_pullrequest'),
662 closeButton: $('#close_edit_pullrequest'),
667 closeButton: $('#close_edit_pullrequest'),
663 deleteButton: $('#delete_pullrequest'),
668 deleteButton: $('#delete_pullrequest'),
664 viewFields: $('#pr-desc, #pr-title'),
669 viewFields: $('#pr-desc, #pr-title'),
665 editFields: $('#pr-desc-edit, #pr-title-edit, #pr-save'),
670 editFields: $('#pr-desc-edit, #pr-title-edit, #pr-save'),
666
671
667 init: function() {
672 init: function() {
668 var that = this;
673 var that = this;
669 this.editButton.on('click', function(e) { that.edit(); });
674 this.editButton.on('click', function(e) { that.edit(); });
670 this.closeButton.on('click', function(e) { that.view(); });
675 this.closeButton.on('click', function(e) { that.view(); });
671 },
676 },
672
677
673 edit: function(event) {
678 edit: function(event) {
674 this.viewFields.hide();
679 this.viewFields.hide();
675 this.editButton.hide();
680 this.editButton.hide();
676 this.deleteButton.hide();
681 this.deleteButton.hide();
677 this.closeButton.show();
682 this.closeButton.show();
678 this.editFields.show();
683 this.editFields.show();
679 codeMirrorInstance.refresh();
684 codeMirrorInstance.refresh();
680 },
685 },
681
686
682 view: function(event) {
687 view: function(event) {
683 this.editButton.show();
688 this.editButton.show();
684 this.deleteButton.show();
689 this.deleteButton.show();
685 this.editFields.hide();
690 this.editFields.hide();
686 this.closeButton.hide();
691 this.closeButton.hide();
687 this.viewFields.show();
692 this.viewFields.show();
688 }
693 }
689 };
694 };
690
695
691 var ReviewersPanel = {
696 var ReviewersPanel = {
692 editButton: $('#open_edit_reviewers'),
697 editButton: $('#open_edit_reviewers'),
693 closeButton: $('#close_edit_reviewers'),
698 closeButton: $('#close_edit_reviewers'),
694 addButton: $('#add_reviewer'),
699 addButton: $('#add_reviewer'),
695 removeButtons: $('.reviewer_member_remove,.reviewer_member_mandatory_remove'),
700 removeButtons: $('.reviewer_member_remove,.reviewer_member_mandatory_remove'),
696
701
697 init: function() {
702 init: function() {
698 var self = this;
703 var self = this;
699 this.editButton.on('click', function(e) { self.edit(); });
704 this.editButton.on('click', function(e) { self.edit(); });
700 this.closeButton.on('click', function(e) { self.close(); });
705 this.closeButton.on('click', function(e) { self.close(); });
701 },
706 },
702
707
703 edit: function(event) {
708 edit: function(event) {
704 this.editButton.hide();
709 this.editButton.hide();
705 this.closeButton.show();
710 this.closeButton.show();
706 this.addButton.show();
711 this.addButton.show();
707 this.removeButtons.css('visibility', 'visible');
712 this.removeButtons.css('visibility', 'visible');
708 // review rules
713 // review rules
709 reviewersController.loadReviewRules(
714 reviewersController.loadReviewRules(
710 ${c.pull_request.reviewer_data_json | n});
715 ${c.pull_request.reviewer_data_json | n});
711 },
716 },
712
717
713 close: function(event) {
718 close: function(event) {
714 this.editButton.show();
719 this.editButton.show();
715 this.closeButton.hide();
720 this.closeButton.hide();
716 this.addButton.hide();
721 this.addButton.hide();
717 this.removeButtons.css('visibility', 'hidden');
722 this.removeButtons.css('visibility', 'hidden');
718 // hide review rules
723 // hide review rules
719 reviewersController.hideReviewRules()
724 reviewersController.hideReviewRules()
720 }
725 }
721 };
726 };
722
727
723 PRDetails.init();
728 PRDetails.init();
724 ReviewersPanel.init();
729 ReviewersPanel.init();
725
730
726 showOutdated = function(self){
731 showOutdated = function(self){
727 $('.comment-inline.comment-outdated').show();
732 $('.comment-inline.comment-outdated').show();
728 $('.filediff-outdated').show();
733 $('.filediff-outdated').show();
729 $('.showOutdatedComments').hide();
734 $('.showOutdatedComments').hide();
730 $('.hideOutdatedComments').show();
735 $('.hideOutdatedComments').show();
731 };
736 };
732
737
733 hideOutdated = function(self){
738 hideOutdated = function(self){
734 $('.comment-inline.comment-outdated').hide();
739 $('.comment-inline.comment-outdated').hide();
735 $('.filediff-outdated').hide();
740 $('.filediff-outdated').hide();
736 $('.hideOutdatedComments').hide();
741 $('.hideOutdatedComments').hide();
737 $('.showOutdatedComments').show();
742 $('.showOutdatedComments').show();
738 };
743 };
739
744
740 refreshMergeChecks = function(){
745 refreshMergeChecks = function(){
741 var loadUrl = "${request.current_route_path(_query=dict(merge_checks=1))}";
746 var loadUrl = "${request.current_route_path(_query=dict(merge_checks=1))}";
742 $('.pull-request-merge').css('opacity', 0.3);
747 $('.pull-request-merge').css('opacity', 0.3);
743 $('.action-buttons-extra').css('opacity', 0.3);
748 $('.action-buttons-extra').css('opacity', 0.3);
744
749
745 $('.pull-request-merge').load(
750 $('.pull-request-merge').load(
746 loadUrl, function() {
751 loadUrl, function() {
747 $('.pull-request-merge').css('opacity', 1);
752 $('.pull-request-merge').css('opacity', 1);
748
753
749 $('.action-buttons-extra').css('opacity', 1);
754 $('.action-buttons-extra').css('opacity', 1);
750 injectCloseAction();
755 injectCloseAction();
751 }
756 }
752 );
757 );
753 };
758 };
754
759
755 injectCloseAction = function() {
760 injectCloseAction = function() {
756 var closeAction = $('#close-pull-request-action').html();
761 var closeAction = $('#close-pull-request-action').html();
757 var $actionButtons = $('.action-buttons-extra');
762 var $actionButtons = $('.action-buttons-extra');
758 // clear the action before
763 // clear the action before
759 $actionButtons.html("");
764 $actionButtons.html("");
760 $actionButtons.html(closeAction);
765 $actionButtons.html(closeAction);
761 };
766 };
762
767
763 closePullRequest = function (status) {
768 closePullRequest = function (status) {
764 // inject closing flag
769 // inject closing flag
765 $('.action-buttons-extra').append('<input type="hidden" class="close-pr-input" id="close_pull_request" value="1">');
770 $('.action-buttons-extra').append('<input type="hidden" class="close-pr-input" id="close_pull_request" value="1">');
766 $(generalCommentForm.statusChange).select2("val", status).trigger('change');
771 $(generalCommentForm.statusChange).select2("val", status).trigger('change');
767 $(generalCommentForm.submitForm).submit();
772 $(generalCommentForm.submitForm).submit();
768 };
773 };
769
774
770 $('#show-outdated-comments').on('click', function(e){
775 $('#show-outdated-comments').on('click', function(e){
771 var button = $(this);
776 var button = $(this);
772 var outdated = $('.comment-outdated');
777 var outdated = $('.comment-outdated');
773
778
774 if (button.html() === "(Show)") {
779 if (button.html() === "(Show)") {
775 button.html("(Hide)");
780 button.html("(Hide)");
776 outdated.show();
781 outdated.show();
777 } else {
782 } else {
778 button.html("(Show)");
783 button.html("(Show)");
779 outdated.hide();
784 outdated.hide();
780 }
785 }
781 });
786 });
782
787
783 $('.show-inline-comments').on('change', function(e){
788 $('.show-inline-comments').on('change', function(e){
784 var show = 'none';
789 var show = 'none';
785 var target = e.currentTarget;
790 var target = e.currentTarget;
786 if(target.checked){
791 if(target.checked){
787 show = ''
792 show = ''
788 }
793 }
789 var boxid = $(target).attr('id_for');
794 var boxid = $(target).attr('id_for');
790 var comments = $('#{0} .inline-comments'.format(boxid));
795 var comments = $('#{0} .inline-comments'.format(boxid));
791 var fn_display = function(idx){
796 var fn_display = function(idx){
792 $(this).css('display', show);
797 $(this).css('display', show);
793 };
798 };
794 $(comments).each(fn_display);
799 $(comments).each(fn_display);
795 var btns = $('#{0} .inline-comments-button'.format(boxid));
800 var btns = $('#{0} .inline-comments-button'.format(boxid));
796 $(btns).each(fn_display);
801 $(btns).each(fn_display);
797 });
802 });
798
803
799 $('#merge_pull_request_form').submit(function() {
804 $('#merge_pull_request_form').submit(function() {
800 if (!$('#merge_pull_request').attr('disabled')) {
805 if (!$('#merge_pull_request').attr('disabled')) {
801 $('#merge_pull_request').attr('disabled', 'disabled');
806 $('#merge_pull_request').attr('disabled', 'disabled');
802 }
807 }
803 return true;
808 return true;
804 });
809 });
805
810
806 $('#edit_pull_request').on('click', function(e){
811 $('#edit_pull_request').on('click', function(e){
807 var title = $('#pr-title-input').val();
812 var title = $('#pr-title-input').val();
808 var description = codeMirrorInstance.getValue();
813 var description = codeMirrorInstance.getValue();
809 var renderer = $('#pr-renderer-input').val();
814 var renderer = $('#pr-renderer-input').val();
810 editPullRequest(
815 editPullRequest(
811 "${c.repo_name}", "${c.pull_request.pull_request_id}",
816 "${c.repo_name}", "${c.pull_request.pull_request_id}",
812 title, description, renderer);
817 title, description, renderer);
813 });
818 });
814
819
815 $('#update_pull_request').on('click', function(e){
820 $('#update_pull_request').on('click', function(e){
816 $(this).attr('disabled', 'disabled');
821 $(this).attr('disabled', 'disabled');
817 $(this).addClass('disabled');
822 $(this).addClass('disabled');
818 $(this).html(_gettext('Saving...'));
823 $(this).html(_gettext('Saving...'));
819 reviewersController.updateReviewers(
824 reviewersController.updateReviewers(
820 "${c.repo_name}", "${c.pull_request.pull_request_id}");
825 "${c.repo_name}", "${c.pull_request.pull_request_id}");
821 });
826 });
822
827
823 $('#update_commits').on('click', function(e){
828 $('#update_commits').on('click', function(e){
824 var isDisabled = !$(e.currentTarget).attr('disabled');
829 var isDisabled = !$(e.currentTarget).attr('disabled');
825 $(e.currentTarget).attr('disabled', 'disabled');
830 $(e.currentTarget).attr('disabled', 'disabled');
826 $(e.currentTarget).addClass('disabled');
831 $(e.currentTarget).addClass('disabled');
827 $(e.currentTarget).removeClass('btn-primary');
832 $(e.currentTarget).removeClass('btn-primary');
828 $(e.currentTarget).text(_gettext('Updating...'));
833 $(e.currentTarget).text(_gettext('Updating...'));
829 if(isDisabled){
834 if(isDisabled){
830 updateCommits(
835 updateCommits(
831 "${c.repo_name}", "${c.pull_request.pull_request_id}");
836 "${c.repo_name}", "${c.pull_request.pull_request_id}");
832 }
837 }
833 });
838 });
834 // fixing issue with caches on firefox
839 // fixing issue with caches on firefox
835 $('#update_commits').removeAttr("disabled");
840 $('#update_commits').removeAttr("disabled");
836
841
837 $('.show-inline-comments').on('click', function(e){
842 $('.show-inline-comments').on('click', function(e){
838 var boxid = $(this).attr('data-comment-id');
843 var boxid = $(this).attr('data-comment-id');
839 var button = $(this);
844 var button = $(this);
840
845
841 if(button.hasClass("comments-visible")) {
846 if(button.hasClass("comments-visible")) {
842 $('#{0} .inline-comments'.format(boxid)).each(function(index){
847 $('#{0} .inline-comments'.format(boxid)).each(function(index){
843 $(this).hide();
848 $(this).hide();
844 });
849 });
845 button.removeClass("comments-visible");
850 button.removeClass("comments-visible");
846 } else {
851 } else {
847 $('#{0} .inline-comments'.format(boxid)).each(function(index){
852 $('#{0} .inline-comments'.format(boxid)).each(function(index){
848 $(this).show();
853 $(this).show();
849 });
854 });
850 button.addClass("comments-visible");
855 button.addClass("comments-visible");
851 }
856 }
852 });
857 });
853
858
854 // register submit callback on commentForm form to track TODOs
859 // register submit callback on commentForm form to track TODOs
855 window.commentFormGlobalSubmitSuccessCallback = function(){
860 window.commentFormGlobalSubmitSuccessCallback = function(){
856 refreshMergeChecks();
861 refreshMergeChecks();
857 };
862 };
858 // initial injection
863 // initial injection
859 injectCloseAction();
864 injectCloseAction();
860
865
861 ReviewerAutoComplete('#user');
866 ReviewerAutoComplete('#user');
862
867
863 })
868 })
864 </script>
869 </script>
865
870
866 </div>
871 </div>
867 </div>
872 </div>
868
873
869 </%def>
874 </%def>
General Comments 0
You need to be logged in to leave comments. Login now