##// END OF EJS Templates
diffs: inline prepare() into __init__ and make the result available as .parsed...
Mads Kiilerich -
r6838:24a9bec8 default
parent child Browse files
Show More
@@ -1,460 +1,459 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.controllers.changeset
15 kallithea.controllers.changeset
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17
17
18 changeset controller showing changes between revisions
18 changeset controller showing changes between revisions
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: Apr 25, 2010
22 :created_on: Apr 25, 2010
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28 import logging
28 import logging
29 import traceback
29 import traceback
30 from collections import defaultdict
30 from collections import defaultdict
31
31
32 from tg import tmpl_context as c, request, response
32 from tg import tmpl_context as c, request, response
33 from tg.i18n import ugettext as _
33 from tg.i18n import ugettext as _
34 from webob.exc import HTTPFound, HTTPForbidden, HTTPBadRequest, HTTPNotFound
34 from webob.exc import HTTPFound, HTTPForbidden, HTTPBadRequest, HTTPNotFound
35
35
36 from kallithea.lib.vcs.exceptions import RepositoryError, \
36 from kallithea.lib.vcs.exceptions import RepositoryError, \
37 ChangesetDoesNotExistError, EmptyRepositoryError
37 ChangesetDoesNotExistError, EmptyRepositoryError
38
38
39 import kallithea.lib.helpers as h
39 import kallithea.lib.helpers as h
40 from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator, \
40 from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator, \
41 NotAnonymous
41 NotAnonymous
42 from kallithea.lib.base import BaseRepoController, render, jsonify
42 from kallithea.lib.base import BaseRepoController, render, jsonify
43 from kallithea.lib.utils import action_logger
43 from kallithea.lib.utils import action_logger
44 from kallithea.lib.compat import OrderedDict
44 from kallithea.lib.compat import OrderedDict
45 from kallithea.lib import diffs
45 from kallithea.lib import diffs
46 from kallithea.model.db import ChangesetComment, ChangesetStatus
46 from kallithea.model.db import ChangesetComment, ChangesetStatus
47 from kallithea.model.comment import ChangesetCommentsModel
47 from kallithea.model.comment import ChangesetCommentsModel
48 from kallithea.model.changeset_status import ChangesetStatusModel
48 from kallithea.model.changeset_status import ChangesetStatusModel
49 from kallithea.model.meta import Session
49 from kallithea.model.meta import Session
50 from kallithea.model.repo import RepoModel
50 from kallithea.model.repo import RepoModel
51 from kallithea.lib.diffs import LimitedDiffContainer
51 from kallithea.lib.diffs import LimitedDiffContainer
52 from kallithea.lib.exceptions import StatusChangeOnClosedPullRequestError
52 from kallithea.lib.exceptions import StatusChangeOnClosedPullRequestError
53 from kallithea.lib.vcs.backends.base import EmptyChangeset
53 from kallithea.lib.vcs.backends.base import EmptyChangeset
54 from kallithea.lib.utils2 import safe_unicode
54 from kallithea.lib.utils2 import safe_unicode
55 from kallithea.lib.graphmod import graph_data
55 from kallithea.lib.graphmod import graph_data
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 def _update_with_GET(params, GET):
60 def _update_with_GET(params, GET):
61 for k in ['diff1', 'diff2', 'diff']:
61 for k in ['diff1', 'diff2', 'diff']:
62 params[k] += GET.getall(k)
62 params[k] += GET.getall(k)
63
63
64
64
65 def anchor_url(revision, path, GET):
65 def anchor_url(revision, path, GET):
66 fid = h.FID(revision, path)
66 fid = h.FID(revision, path)
67 return h.url.current(anchor=fid, **dict(GET))
67 return h.url.current(anchor=fid, **dict(GET))
68
68
69
69
70 def get_ignore_ws(fid, GET):
70 def get_ignore_ws(fid, GET):
71 ig_ws_global = GET.get('ignorews')
71 ig_ws_global = GET.get('ignorews')
72 ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
72 ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
73 if ig_ws:
73 if ig_ws:
74 try:
74 try:
75 return int(ig_ws[0].split(':')[-1])
75 return int(ig_ws[0].split(':')[-1])
76 except ValueError:
76 except ValueError:
77 raise HTTPBadRequest()
77 raise HTTPBadRequest()
78 return ig_ws_global
78 return ig_ws_global
79
79
80
80
81 def _ignorews_url(GET, fileid=None):
81 def _ignorews_url(GET, fileid=None):
82 fileid = str(fileid) if fileid else None
82 fileid = str(fileid) if fileid else None
83 params = defaultdict(list)
83 params = defaultdict(list)
84 _update_with_GET(params, GET)
84 _update_with_GET(params, GET)
85 lbl = _('Show whitespace')
85 lbl = _('Show whitespace')
86 ig_ws = get_ignore_ws(fileid, GET)
86 ig_ws = get_ignore_ws(fileid, GET)
87 ln_ctx = get_line_ctx(fileid, GET)
87 ln_ctx = get_line_ctx(fileid, GET)
88 # global option
88 # global option
89 if fileid is None:
89 if fileid is None:
90 if ig_ws is None:
90 if ig_ws is None:
91 params['ignorews'] += [1]
91 params['ignorews'] += [1]
92 lbl = _('Ignore whitespace')
92 lbl = _('Ignore whitespace')
93 ctx_key = 'context'
93 ctx_key = 'context'
94 ctx_val = ln_ctx
94 ctx_val = ln_ctx
95 # per file options
95 # per file options
96 else:
96 else:
97 if ig_ws is None:
97 if ig_ws is None:
98 params[fileid] += ['WS:1']
98 params[fileid] += ['WS:1']
99 lbl = _('Ignore whitespace')
99 lbl = _('Ignore whitespace')
100
100
101 ctx_key = fileid
101 ctx_key = fileid
102 ctx_val = 'C:%s' % ln_ctx
102 ctx_val = 'C:%s' % ln_ctx
103 # if we have passed in ln_ctx pass it along to our params
103 # if we have passed in ln_ctx pass it along to our params
104 if ln_ctx:
104 if ln_ctx:
105 params[ctx_key] += [ctx_val]
105 params[ctx_key] += [ctx_val]
106
106
107 params['anchor'] = fileid
107 params['anchor'] = fileid
108 icon = h.literal('<i class="icon-strike"></i>')
108 icon = h.literal('<i class="icon-strike"></i>')
109 return h.link_to(icon, h.url.current(**params), title=lbl, **{'data-toggle': 'tooltip'})
109 return h.link_to(icon, h.url.current(**params), title=lbl, **{'data-toggle': 'tooltip'})
110
110
111
111
112 def get_line_ctx(fid, GET):
112 def get_line_ctx(fid, GET):
113 ln_ctx_global = GET.get('context')
113 ln_ctx_global = GET.get('context')
114 if fid:
114 if fid:
115 ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
115 ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
116 else:
116 else:
117 _ln_ctx = filter(lambda k: k.startswith('C'), GET)
117 _ln_ctx = filter(lambda k: k.startswith('C'), GET)
118 ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
118 ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
119 if ln_ctx:
119 if ln_ctx:
120 ln_ctx = [ln_ctx]
120 ln_ctx = [ln_ctx]
121
121
122 if ln_ctx:
122 if ln_ctx:
123 retval = ln_ctx[0].split(':')[-1]
123 retval = ln_ctx[0].split(':')[-1]
124 else:
124 else:
125 retval = ln_ctx_global
125 retval = ln_ctx_global
126
126
127 try:
127 try:
128 return int(retval)
128 return int(retval)
129 except Exception:
129 except Exception:
130 return 3
130 return 3
131
131
132
132
133 def _context_url(GET, fileid=None):
133 def _context_url(GET, fileid=None):
134 """
134 """
135 Generates url for context lines
135 Generates url for context lines
136
136
137 :param fileid:
137 :param fileid:
138 """
138 """
139
139
140 fileid = str(fileid) if fileid else None
140 fileid = str(fileid) if fileid else None
141 ig_ws = get_ignore_ws(fileid, GET)
141 ig_ws = get_ignore_ws(fileid, GET)
142 ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2
142 ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2
143
143
144 params = defaultdict(list)
144 params = defaultdict(list)
145 _update_with_GET(params, GET)
145 _update_with_GET(params, GET)
146
146
147 # global option
147 # global option
148 if fileid is None:
148 if fileid is None:
149 if ln_ctx > 0:
149 if ln_ctx > 0:
150 params['context'] += [ln_ctx]
150 params['context'] += [ln_ctx]
151
151
152 if ig_ws:
152 if ig_ws:
153 ig_ws_key = 'ignorews'
153 ig_ws_key = 'ignorews'
154 ig_ws_val = 1
154 ig_ws_val = 1
155
155
156 # per file option
156 # per file option
157 else:
157 else:
158 params[fileid] += ['C:%s' % ln_ctx]
158 params[fileid] += ['C:%s' % ln_ctx]
159 ig_ws_key = fileid
159 ig_ws_key = fileid
160 ig_ws_val = 'WS:%s' % 1
160 ig_ws_val = 'WS:%s' % 1
161
161
162 if ig_ws:
162 if ig_ws:
163 params[ig_ws_key] += [ig_ws_val]
163 params[ig_ws_key] += [ig_ws_val]
164
164
165 lbl = _('Increase diff context to %(num)s lines') % {'num': ln_ctx}
165 lbl = _('Increase diff context to %(num)s lines') % {'num': ln_ctx}
166
166
167 params['anchor'] = fileid
167 params['anchor'] = fileid
168 icon = h.literal('<i class="icon-sort"></i>')
168 icon = h.literal('<i class="icon-sort"></i>')
169 return h.link_to(icon, h.url.current(**params), title=lbl, **{'data-toggle': 'tooltip'})
169 return h.link_to(icon, h.url.current(**params), title=lbl, **{'data-toggle': 'tooltip'})
170
170
171
171
172 # Could perhaps be nice to have in the model but is too high level ...
172 # Could perhaps be nice to have in the model but is too high level ...
173 def create_comment(text, status, f_path, line_no, revision=None, pull_request_id=None, closing_pr=None):
173 def create_comment(text, status, f_path, line_no, revision=None, pull_request_id=None, closing_pr=None):
174 """Comment functionality shared between changesets and pullrequests"""
174 """Comment functionality shared between changesets and pullrequests"""
175 f_path = f_path or None
175 f_path = f_path or None
176 line_no = line_no or None
176 line_no = line_no or None
177
177
178 comment = ChangesetCommentsModel().create(
178 comment = ChangesetCommentsModel().create(
179 text=text,
179 text=text,
180 repo=c.db_repo.repo_id,
180 repo=c.db_repo.repo_id,
181 author=request.authuser.user_id,
181 author=request.authuser.user_id,
182 revision=revision,
182 revision=revision,
183 pull_request=pull_request_id,
183 pull_request=pull_request_id,
184 f_path=f_path,
184 f_path=f_path,
185 line_no=line_no,
185 line_no=line_no,
186 status_change=ChangesetStatus.get_status_lbl(status) if status else None,
186 status_change=ChangesetStatus.get_status_lbl(status) if status else None,
187 closing_pr=closing_pr,
187 closing_pr=closing_pr,
188 )
188 )
189
189
190 return comment
190 return comment
191
191
192
192
193 class ChangesetController(BaseRepoController):
193 class ChangesetController(BaseRepoController):
194
194
195 def _before(self, *args, **kwargs):
195 def _before(self, *args, **kwargs):
196 super(ChangesetController, self)._before(*args, **kwargs)
196 super(ChangesetController, self)._before(*args, **kwargs)
197 c.affected_files_cut_off = 60
197 c.affected_files_cut_off = 60
198
198
199 def __load_data(self):
199 def __load_data(self):
200 repo_model = RepoModel()
200 repo_model = RepoModel()
201 c.users_array = repo_model.get_users_js()
201 c.users_array = repo_model.get_users_js()
202 c.user_groups_array = repo_model.get_user_groups_js()
202 c.user_groups_array = repo_model.get_user_groups_js()
203
203
204 def _index(self, revision, method):
204 def _index(self, revision, method):
205 c.pull_request = None
205 c.pull_request = None
206 c.anchor_url = anchor_url
206 c.anchor_url = anchor_url
207 c.ignorews_url = _ignorews_url
207 c.ignorews_url = _ignorews_url
208 c.context_url = _context_url
208 c.context_url = _context_url
209 c.fulldiff = request.GET.get('fulldiff') # for reporting number of changed files
209 c.fulldiff = request.GET.get('fulldiff') # for reporting number of changed files
210 # get ranges of revisions if preset
210 # get ranges of revisions if preset
211 rev_range = revision.split('...')[:2]
211 rev_range = revision.split('...')[:2]
212 enable_comments = True
212 enable_comments = True
213 c.cs_repo = c.db_repo
213 c.cs_repo = c.db_repo
214 try:
214 try:
215 if len(rev_range) == 2:
215 if len(rev_range) == 2:
216 enable_comments = False
216 enable_comments = False
217 rev_start = rev_range[0]
217 rev_start = rev_range[0]
218 rev_end = rev_range[1]
218 rev_end = rev_range[1]
219 rev_ranges = c.db_repo_scm_instance.get_changesets(start=rev_start,
219 rev_ranges = c.db_repo_scm_instance.get_changesets(start=rev_start,
220 end=rev_end)
220 end=rev_end)
221 else:
221 else:
222 rev_ranges = [c.db_repo_scm_instance.get_changeset(revision)]
222 rev_ranges = [c.db_repo_scm_instance.get_changeset(revision)]
223
223
224 c.cs_ranges = list(rev_ranges)
224 c.cs_ranges = list(rev_ranges)
225 if not c.cs_ranges:
225 if not c.cs_ranges:
226 raise RepositoryError('Changeset range returned empty result')
226 raise RepositoryError('Changeset range returned empty result')
227
227
228 except (ChangesetDoesNotExistError, EmptyRepositoryError):
228 except (ChangesetDoesNotExistError, EmptyRepositoryError):
229 log.debug(traceback.format_exc())
229 log.debug(traceback.format_exc())
230 msg = _('Such revision does not exist for this repository')
230 msg = _('Such revision does not exist for this repository')
231 h.flash(msg, category='error')
231 h.flash(msg, category='error')
232 raise HTTPNotFound()
232 raise HTTPNotFound()
233
233
234 c.changes = OrderedDict()
234 c.changes = OrderedDict()
235
235
236 c.lines_added = 0 # count of lines added
236 c.lines_added = 0 # count of lines added
237 c.lines_deleted = 0 # count of lines removes
237 c.lines_deleted = 0 # count of lines removes
238
238
239 c.changeset_statuses = ChangesetStatus.STATUSES
239 c.changeset_statuses = ChangesetStatus.STATUSES
240 comments = dict()
240 comments = dict()
241 c.statuses = []
241 c.statuses = []
242 c.inline_comments = []
242 c.inline_comments = []
243 c.inline_cnt = 0
243 c.inline_cnt = 0
244
244
245 # Iterate over ranges (default changeset view is always one changeset)
245 # Iterate over ranges (default changeset view is always one changeset)
246 for changeset in c.cs_ranges:
246 for changeset in c.cs_ranges:
247 if method == 'show':
247 if method == 'show':
248 c.statuses.extend([ChangesetStatusModel().get_status(
248 c.statuses.extend([ChangesetStatusModel().get_status(
249 c.db_repo.repo_id, changeset.raw_id)])
249 c.db_repo.repo_id, changeset.raw_id)])
250
250
251 # Changeset comments
251 # Changeset comments
252 comments.update((com.comment_id, com)
252 comments.update((com.comment_id, com)
253 for com in ChangesetCommentsModel()
253 for com in ChangesetCommentsModel()
254 .get_comments(c.db_repo.repo_id,
254 .get_comments(c.db_repo.repo_id,
255 revision=changeset.raw_id))
255 revision=changeset.raw_id))
256
256
257 # Status change comments - mostly from pull requests
257 # Status change comments - mostly from pull requests
258 comments.update((st.comment_id, st.comment)
258 comments.update((st.comment_id, st.comment)
259 for st in ChangesetStatusModel()
259 for st in ChangesetStatusModel()
260 .get_statuses(c.db_repo.repo_id,
260 .get_statuses(c.db_repo.repo_id,
261 changeset.raw_id, with_revisions=True)
261 changeset.raw_id, with_revisions=True)
262 if st.comment_id is not None)
262 if st.comment_id is not None)
263
263
264 inlines = ChangesetCommentsModel() \
264 inlines = ChangesetCommentsModel() \
265 .get_inline_comments(c.db_repo.repo_id,
265 .get_inline_comments(c.db_repo.repo_id,
266 revision=changeset.raw_id)
266 revision=changeset.raw_id)
267 c.inline_comments.extend(inlines)
267 c.inline_comments.extend(inlines)
268
268
269 cs2 = changeset.raw_id
269 cs2 = changeset.raw_id
270 cs1 = changeset.parents[0].raw_id if changeset.parents else EmptyChangeset().raw_id
270 cs1 = changeset.parents[0].raw_id if changeset.parents else EmptyChangeset().raw_id
271 context_lcl = get_line_ctx('', request.GET)
271 context_lcl = get_line_ctx('', request.GET)
272 ign_whitespace_lcl = get_ignore_ws('', request.GET)
272 ign_whitespace_lcl = get_ignore_ws('', request.GET)
273
273
274 raw_diff = c.db_repo_scm_instance.get_diff(cs1, cs2,
274 raw_diff = c.db_repo_scm_instance.get_diff(cs1, cs2,
275 ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
275 ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
276 diff_limit = None if c.fulldiff else self.cut_off_limit
276 diff_limit = None if c.fulldiff else self.cut_off_limit
277 file_diff_data = []
277 file_diff_data = []
278 if method == 'show':
278 if method == 'show':
279 diff_processor = diffs.DiffProcessor(raw_diff,
279 diff_processor = diffs.DiffProcessor(raw_diff,
280 vcs=c.db_repo_scm_instance.alias,
280 vcs=c.db_repo_scm_instance.alias,
281 diff_limit=diff_limit)
281 diff_limit=diff_limit)
282 _parsed = diff_processor.prepare()
283 c.limited_diff = False
282 c.limited_diff = False
284 if isinstance(_parsed, LimitedDiffContainer):
283 if isinstance(diff_processor.parsed, LimitedDiffContainer):
285 c.limited_diff = True
284 c.limited_diff = True
286 for f in _parsed:
285 for f in diff_processor.parsed:
287 st = f['stats']
286 st = f['stats']
288 c.lines_added += st['added']
287 c.lines_added += st['added']
289 c.lines_deleted += st['deleted']
288 c.lines_deleted += st['deleted']
290 filename = f['filename']
289 filename = f['filename']
291 fid = h.FID(changeset.raw_id, filename)
290 fid = h.FID(changeset.raw_id, filename)
292 url_fid = h.FID('', filename)
291 url_fid = h.FID('', filename)
293 diff = diff_processor.as_html(enable_comments=enable_comments,
292 diff = diff_processor.as_html(enable_comments=enable_comments,
294 parsed_lines=[f])
293 parsed_lines=[f])
295 file_diff_data.append((fid, url_fid, f['operation'], f['old_filename'], filename, diff, st))
294 file_diff_data.append((fid, url_fid, f['operation'], f['old_filename'], filename, diff, st))
296 else:
295 else:
297 # downloads/raw we only need RAW diff nothing else
296 # downloads/raw we only need RAW diff nothing else
298 file_diff_data.append(('', None, None, None, raw_diff, None))
297 file_diff_data.append(('', None, None, None, raw_diff, None))
299 c.changes[changeset.raw_id] = (cs1, cs2, file_diff_data)
298 c.changes[changeset.raw_id] = (cs1, cs2, file_diff_data)
300
299
301 # sort comments in creation order
300 # sort comments in creation order
302 c.comments = [com for com_id, com in sorted(comments.items())]
301 c.comments = [com for com_id, com in sorted(comments.items())]
303
302
304 # count inline comments
303 # count inline comments
305 for __, lines in c.inline_comments:
304 for __, lines in c.inline_comments:
306 for comments in lines.values():
305 for comments in lines.values():
307 c.inline_cnt += len(comments)
306 c.inline_cnt += len(comments)
308
307
309 if len(c.cs_ranges) == 1:
308 if len(c.cs_ranges) == 1:
310 c.changeset = c.cs_ranges[0]
309 c.changeset = c.cs_ranges[0]
311 c.parent_tmpl = ''.join(['# Parent %s\n' % x.raw_id
310 c.parent_tmpl = ''.join(['# Parent %s\n' % x.raw_id
312 for x in c.changeset.parents])
311 for x in c.changeset.parents])
313 if method == 'download':
312 if method == 'download':
314 response.content_type = 'text/plain'
313 response.content_type = 'text/plain'
315 response.content_disposition = 'attachment; filename=%s.diff' \
314 response.content_disposition = 'attachment; filename=%s.diff' \
316 % revision[:12]
315 % revision[:12]
317 return raw_diff
316 return raw_diff
318 elif method == 'patch':
317 elif method == 'patch':
319 response.content_type = 'text/plain'
318 response.content_type = 'text/plain'
320 c.diff = safe_unicode(raw_diff)
319 c.diff = safe_unicode(raw_diff)
321 return render('changeset/patch_changeset.html')
320 return render('changeset/patch_changeset.html')
322 elif method == 'raw':
321 elif method == 'raw':
323 response.content_type = 'text/plain'
322 response.content_type = 'text/plain'
324 return raw_diff
323 return raw_diff
325 elif method == 'show':
324 elif method == 'show':
326 self.__load_data()
325 self.__load_data()
327 if len(c.cs_ranges) == 1:
326 if len(c.cs_ranges) == 1:
328 return render('changeset/changeset.html')
327 return render('changeset/changeset.html')
329 else:
328 else:
330 c.cs_ranges_org = None
329 c.cs_ranges_org = None
331 c.cs_comments = {}
330 c.cs_comments = {}
332 revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
331 revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
333 c.jsdata = graph_data(c.db_repo_scm_instance, revs)
332 c.jsdata = graph_data(c.db_repo_scm_instance, revs)
334 return render('changeset/changeset_range.html')
333 return render('changeset/changeset_range.html')
335
334
336 @LoginRequired()
335 @LoginRequired()
337 @HasRepoPermissionLevelDecorator('read')
336 @HasRepoPermissionLevelDecorator('read')
338 def index(self, revision, method='show'):
337 def index(self, revision, method='show'):
339 return self._index(revision, method=method)
338 return self._index(revision, method=method)
340
339
341 @LoginRequired()
340 @LoginRequired()
342 @HasRepoPermissionLevelDecorator('read')
341 @HasRepoPermissionLevelDecorator('read')
343 def changeset_raw(self, revision):
342 def changeset_raw(self, revision):
344 return self._index(revision, method='raw')
343 return self._index(revision, method='raw')
345
344
346 @LoginRequired()
345 @LoginRequired()
347 @HasRepoPermissionLevelDecorator('read')
346 @HasRepoPermissionLevelDecorator('read')
348 def changeset_patch(self, revision):
347 def changeset_patch(self, revision):
349 return self._index(revision, method='patch')
348 return self._index(revision, method='patch')
350
349
351 @LoginRequired()
350 @LoginRequired()
352 @HasRepoPermissionLevelDecorator('read')
351 @HasRepoPermissionLevelDecorator('read')
353 def changeset_download(self, revision):
352 def changeset_download(self, revision):
354 return self._index(revision, method='download')
353 return self._index(revision, method='download')
355
354
356 @LoginRequired()
355 @LoginRequired()
357 @NotAnonymous()
356 @NotAnonymous()
358 @HasRepoPermissionLevelDecorator('read')
357 @HasRepoPermissionLevelDecorator('read')
359 @jsonify
358 @jsonify
360 def comment(self, repo_name, revision):
359 def comment(self, repo_name, revision):
361 assert request.environ.get('HTTP_X_PARTIAL_XHR')
360 assert request.environ.get('HTTP_X_PARTIAL_XHR')
362
361
363 status = request.POST.get('changeset_status')
362 status = request.POST.get('changeset_status')
364 text = request.POST.get('text', '').strip()
363 text = request.POST.get('text', '').strip()
365
364
366 c.comment = create_comment(
365 c.comment = create_comment(
367 text,
366 text,
368 status,
367 status,
369 revision=revision,
368 revision=revision,
370 f_path=request.POST.get('f_path'),
369 f_path=request.POST.get('f_path'),
371 line_no=request.POST.get('line'),
370 line_no=request.POST.get('line'),
372 )
371 )
373
372
374 # get status if set !
373 # get status if set !
375 if status:
374 if status:
376 # if latest status was from pull request and it's closed
375 # if latest status was from pull request and it's closed
377 # disallow changing status ! RLY?
376 # disallow changing status ! RLY?
378 try:
377 try:
379 ChangesetStatusModel().set_status(
378 ChangesetStatusModel().set_status(
380 c.db_repo.repo_id,
379 c.db_repo.repo_id,
381 status,
380 status,
382 request.authuser.user_id,
381 request.authuser.user_id,
383 c.comment,
382 c.comment,
384 revision=revision,
383 revision=revision,
385 dont_allow_on_closed_pull_request=True,
384 dont_allow_on_closed_pull_request=True,
386 )
385 )
387 except StatusChangeOnClosedPullRequestError:
386 except StatusChangeOnClosedPullRequestError:
388 log.debug('cannot change status on %s with closed pull request', revision)
387 log.debug('cannot change status on %s with closed pull request', revision)
389 raise HTTPBadRequest()
388 raise HTTPBadRequest()
390
389
391 action_logger(request.authuser,
390 action_logger(request.authuser,
392 'user_commented_revision:%s' % revision,
391 'user_commented_revision:%s' % revision,
393 c.db_repo, request.ip_addr)
392 c.db_repo, request.ip_addr)
394
393
395 Session().commit()
394 Session().commit()
396
395
397 data = {
396 data = {
398 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
397 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
399 }
398 }
400 if c.comment is not None:
399 if c.comment is not None:
401 data.update(c.comment.get_dict())
400 data.update(c.comment.get_dict())
402 data.update({'rendered_text':
401 data.update({'rendered_text':
403 render('changeset/changeset_comment_block.html')})
402 render('changeset/changeset_comment_block.html')})
404
403
405 return data
404 return data
406
405
407 @LoginRequired()
406 @LoginRequired()
408 @NotAnonymous()
407 @NotAnonymous()
409 @HasRepoPermissionLevelDecorator('read')
408 @HasRepoPermissionLevelDecorator('read')
410 @jsonify
409 @jsonify
411 def delete_comment(self, repo_name, comment_id):
410 def delete_comment(self, repo_name, comment_id):
412 co = ChangesetComment.get_or_404(comment_id)
411 co = ChangesetComment.get_or_404(comment_id)
413 if co.repo.repo_name != repo_name:
412 if co.repo.repo_name != repo_name:
414 raise HTTPNotFound()
413 raise HTTPNotFound()
415 owner = co.author_id == request.authuser.user_id
414 owner = co.author_id == request.authuser.user_id
416 repo_admin = h.HasRepoPermissionLevel('admin')(repo_name)
415 repo_admin = h.HasRepoPermissionLevel('admin')(repo_name)
417 if h.HasPermissionAny('hg.admin')() or repo_admin or owner:
416 if h.HasPermissionAny('hg.admin')() or repo_admin or owner:
418 ChangesetCommentsModel().delete(comment=co)
417 ChangesetCommentsModel().delete(comment=co)
419 Session().commit()
418 Session().commit()
420 return True
419 return True
421 else:
420 else:
422 raise HTTPForbidden()
421 raise HTTPForbidden()
423
422
424 @LoginRequired()
423 @LoginRequired()
425 @HasRepoPermissionLevelDecorator('read')
424 @HasRepoPermissionLevelDecorator('read')
426 @jsonify
425 @jsonify
427 def changeset_info(self, repo_name, revision):
426 def changeset_info(self, repo_name, revision):
428 if request.is_xhr:
427 if request.is_xhr:
429 try:
428 try:
430 return c.db_repo_scm_instance.get_changeset(revision)
429 return c.db_repo_scm_instance.get_changeset(revision)
431 except ChangesetDoesNotExistError as e:
430 except ChangesetDoesNotExistError as e:
432 return EmptyChangeset(message=str(e))
431 return EmptyChangeset(message=str(e))
433 else:
432 else:
434 raise HTTPBadRequest()
433 raise HTTPBadRequest()
435
434
436 @LoginRequired()
435 @LoginRequired()
437 @HasRepoPermissionLevelDecorator('read')
436 @HasRepoPermissionLevelDecorator('read')
438 @jsonify
437 @jsonify
439 def changeset_children(self, repo_name, revision):
438 def changeset_children(self, repo_name, revision):
440 if request.is_xhr:
439 if request.is_xhr:
441 changeset = c.db_repo_scm_instance.get_changeset(revision)
440 changeset = c.db_repo_scm_instance.get_changeset(revision)
442 result = {"results": []}
441 result = {"results": []}
443 if changeset.children:
442 if changeset.children:
444 result = {"results": changeset.children}
443 result = {"results": changeset.children}
445 return result
444 return result
446 else:
445 else:
447 raise HTTPBadRequest()
446 raise HTTPBadRequest()
448
447
449 @LoginRequired()
448 @LoginRequired()
450 @HasRepoPermissionLevelDecorator('read')
449 @HasRepoPermissionLevelDecorator('read')
451 @jsonify
450 @jsonify
452 def changeset_parents(self, repo_name, revision):
451 def changeset_parents(self, repo_name, revision):
453 if request.is_xhr:
452 if request.is_xhr:
454 changeset = c.db_repo_scm_instance.get_changeset(revision)
453 changeset = c.db_repo_scm_instance.get_changeset(revision)
455 result = {"results": []}
454 result = {"results": []}
456 if changeset.parents:
455 if changeset.parents:
457 result = {"results": changeset.parents}
456 result = {"results": changeset.parents}
458 return result
457 return result
459 else:
458 else:
460 raise HTTPBadRequest()
459 raise HTTPBadRequest()
@@ -1,294 +1,293 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.controllers.compare
15 kallithea.controllers.compare
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17
17
18 compare controller showing differences between two
18 compare controller showing differences between two
19 repos, branches, bookmarks or tips
19 repos, branches, bookmarks or tips
20
20
21 This file was forked by the Kallithea project in July 2014.
21 This file was forked by the Kallithea project in July 2014.
22 Original author and date, and relevant copyright and licensing information is below:
22 Original author and date, and relevant copyright and licensing information is below:
23 :created_on: May 6, 2012
23 :created_on: May 6, 2012
24 :author: marcink
24 :author: marcink
25 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :copyright: (c) 2013 RhodeCode GmbH, and others.
26 :license: GPLv3, see LICENSE.md for more details.
26 :license: GPLv3, see LICENSE.md for more details.
27 """
27 """
28
28
29
29
30 import logging
30 import logging
31 import re
31 import re
32
32
33 from tg import request, tmpl_context as c
33 from tg import request, tmpl_context as c
34 from tg.i18n import ugettext as _
34 from tg.i18n import ugettext as _
35 from webob.exc import HTTPFound, HTTPBadRequest, HTTPNotFound
35 from webob.exc import HTTPFound, HTTPBadRequest, HTTPNotFound
36
36
37 from kallithea.config.routing import url
37 from kallithea.config.routing import url
38 from kallithea.lib.utils2 import safe_str, safe_int
38 from kallithea.lib.utils2 import safe_str, safe_int
39 from kallithea.lib.vcs.utils.hgcompat import unionrepo
39 from kallithea.lib.vcs.utils.hgcompat import unionrepo
40 from kallithea.lib import helpers as h
40 from kallithea.lib import helpers as h
41 from kallithea.lib.base import BaseRepoController, render
41 from kallithea.lib.base import BaseRepoController, render
42 from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator
42 from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator
43 from kallithea.lib import diffs
43 from kallithea.lib import diffs
44 from kallithea.model.db import Repository
44 from kallithea.model.db import Repository
45 from kallithea.lib.diffs import LimitedDiffContainer
45 from kallithea.lib.diffs import LimitedDiffContainer
46 from kallithea.controllers.changeset import _ignorews_url, _context_url
46 from kallithea.controllers.changeset import _ignorews_url, _context_url
47 from kallithea.lib.graphmod import graph_data
47 from kallithea.lib.graphmod import graph_data
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 class CompareController(BaseRepoController):
52 class CompareController(BaseRepoController):
53
53
54 def _before(self, *args, **kwargs):
54 def _before(self, *args, **kwargs):
55 super(CompareController, self)._before(*args, **kwargs)
55 super(CompareController, self)._before(*args, **kwargs)
56
56
57 # The base repository has already been retrieved.
57 # The base repository has already been retrieved.
58 c.a_repo = c.db_repo
58 c.a_repo = c.db_repo
59
59
60 # Retrieve the "changeset" repository (default: same as base).
60 # Retrieve the "changeset" repository (default: same as base).
61 other_repo = request.GET.get('other_repo', None)
61 other_repo = request.GET.get('other_repo', None)
62 if other_repo is None:
62 if other_repo is None:
63 c.cs_repo = c.a_repo
63 c.cs_repo = c.a_repo
64 else:
64 else:
65 c.cs_repo = Repository.get_by_repo_name(other_repo)
65 c.cs_repo = Repository.get_by_repo_name(other_repo)
66 if c.cs_repo is None:
66 if c.cs_repo is None:
67 msg = _('Could not find other repository %s') % other_repo
67 msg = _('Could not find other repository %s') % other_repo
68 h.flash(msg, category='error')
68 h.flash(msg, category='error')
69 raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name))
69 raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name))
70
70
71 # Verify that it's even possible to compare these two repositories.
71 # Verify that it's even possible to compare these two repositories.
72 if c.a_repo.scm_instance.alias != c.cs_repo.scm_instance.alias:
72 if c.a_repo.scm_instance.alias != c.cs_repo.scm_instance.alias:
73 msg = _('Cannot compare repositories of different types')
73 msg = _('Cannot compare repositories of different types')
74 h.flash(msg, category='error')
74 h.flash(msg, category='error')
75 raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name))
75 raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name))
76
76
77 @staticmethod
77 @staticmethod
78 def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev):
78 def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev):
79 """
79 """
80 Returns lists of changesets that can be merged from org_repo@org_rev
80 Returns lists of changesets that can be merged from org_repo@org_rev
81 to other_repo@other_rev
81 to other_repo@other_rev
82 ... and the other way
82 ... and the other way
83 ... and the ancestors that would be used for merge
83 ... and the ancestors that would be used for merge
84
84
85 :param org_repo: repo object, that is most likely the original repo we forked from
85 :param org_repo: repo object, that is most likely the original repo we forked from
86 :param org_rev: the revision we want our compare to be made
86 :param org_rev: the revision we want our compare to be made
87 :param other_repo: repo object, most likely the fork of org_repo. It has
87 :param other_repo: repo object, most likely the fork of org_repo. It has
88 all changesets that we need to obtain
88 all changesets that we need to obtain
89 :param other_rev: revision we want out compare to be made on other_repo
89 :param other_rev: revision we want out compare to be made on other_repo
90 """
90 """
91 ancestors = None
91 ancestors = None
92 if org_rev == other_rev:
92 if org_rev == other_rev:
93 org_changesets = []
93 org_changesets = []
94 other_changesets = []
94 other_changesets = []
95
95
96 elif alias == 'hg':
96 elif alias == 'hg':
97 # case two independent repos
97 # case two independent repos
98 if org_repo != other_repo:
98 if org_repo != other_repo:
99 hgrepo = unionrepo.unionrepository(other_repo.baseui,
99 hgrepo = unionrepo.unionrepository(other_repo.baseui,
100 other_repo.path,
100 other_repo.path,
101 org_repo.path)
101 org_repo.path)
102 # all ancestors of other_rev will be in other_repo and
102 # all ancestors of other_rev will be in other_repo and
103 # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot
103 # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot
104
104
105 # no remote compare do it on the same repository
105 # no remote compare do it on the same repository
106 else:
106 else:
107 hgrepo = other_repo._repo
107 hgrepo = other_repo._repo
108
108
109 ancestors = [hgrepo[ancestor].hex() for ancestor in
109 ancestors = [hgrepo[ancestor].hex() for ancestor in
110 hgrepo.revs("id(%s) & ::id(%s)", other_rev, org_rev)]
110 hgrepo.revs("id(%s) & ::id(%s)", other_rev, org_rev)]
111 if ancestors:
111 if ancestors:
112 log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev)
112 log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev)
113 else:
113 else:
114 log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev)
114 log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev)
115 ancestors = [hgrepo[ancestor].hex() for ancestor in
115 ancestors = [hgrepo[ancestor].hex() for ancestor in
116 hgrepo.revs("heads(::id(%s) & ::id(%s))", org_rev, other_rev)] # FIXME: expensive!
116 hgrepo.revs("heads(::id(%s) & ::id(%s))", org_rev, other_rev)] # FIXME: expensive!
117
117
118 other_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
118 other_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
119 other_rev, org_rev, org_rev)
119 other_rev, org_rev, org_rev)
120 other_changesets = [other_repo.get_changeset(rev) for rev in other_revs]
120 other_changesets = [other_repo.get_changeset(rev) for rev in other_revs]
121 org_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
121 org_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
122 org_rev, other_rev, other_rev)
122 org_rev, other_rev, other_rev)
123 org_changesets = [org_repo.get_changeset(hgrepo[rev].hex()) for rev in org_revs]
123 org_changesets = [org_repo.get_changeset(hgrepo[rev].hex()) for rev in org_revs]
124
124
125 elif alias == 'git':
125 elif alias == 'git':
126 if org_repo != other_repo:
126 if org_repo != other_repo:
127 from dulwich.repo import Repo
127 from dulwich.repo import Repo
128 from dulwich.client import SubprocessGitClient
128 from dulwich.client import SubprocessGitClient
129
129
130 gitrepo = Repo(org_repo.path)
130 gitrepo = Repo(org_repo.path)
131 SubprocessGitClient(thin_packs=False).fetch(safe_str(other_repo.path), gitrepo)
131 SubprocessGitClient(thin_packs=False).fetch(safe_str(other_repo.path), gitrepo)
132
132
133 gitrepo_remote = Repo(other_repo.path)
133 gitrepo_remote = Repo(other_repo.path)
134 SubprocessGitClient(thin_packs=False).fetch(safe_str(org_repo.path), gitrepo_remote)
134 SubprocessGitClient(thin_packs=False).fetch(safe_str(org_repo.path), gitrepo_remote)
135
135
136 revs = [
136 revs = [
137 x.commit.id
137 x.commit.id
138 for x in gitrepo_remote.get_walker(include=[other_rev],
138 for x in gitrepo_remote.get_walker(include=[other_rev],
139 exclude=[org_rev])
139 exclude=[org_rev])
140 ]
140 ]
141 other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)]
141 other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)]
142 if other_changesets:
142 if other_changesets:
143 ancestors = [other_changesets[0].parents[0].raw_id]
143 ancestors = [other_changesets[0].parents[0].raw_id]
144 else:
144 else:
145 # no changesets from other repo, ancestor is the other_rev
145 # no changesets from other repo, ancestor is the other_rev
146 ancestors = [other_rev]
146 ancestors = [other_rev]
147
147
148 gitrepo.close()
148 gitrepo.close()
149 gitrepo_remote.close()
149 gitrepo_remote.close()
150
150
151 else:
151 else:
152 so, se = org_repo.run_git_command(
152 so, se = org_repo.run_git_command(
153 ['log', '--reverse', '--pretty=format:%H',
153 ['log', '--reverse', '--pretty=format:%H',
154 '-s', '%s..%s' % (org_rev, other_rev)]
154 '-s', '%s..%s' % (org_rev, other_rev)]
155 )
155 )
156 other_changesets = [org_repo.get_changeset(cs)
156 other_changesets = [org_repo.get_changeset(cs)
157 for cs in re.findall(r'[0-9a-fA-F]{40}', so)]
157 for cs in re.findall(r'[0-9a-fA-F]{40}', so)]
158 so, se = org_repo.run_git_command(
158 so, se = org_repo.run_git_command(
159 ['merge-base', org_rev, other_rev]
159 ['merge-base', org_rev, other_rev]
160 )
160 )
161 ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]]
161 ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]]
162 org_changesets = []
162 org_changesets = []
163
163
164 else:
164 else:
165 raise Exception('Bad alias only git and hg is allowed')
165 raise Exception('Bad alias only git and hg is allowed')
166
166
167 return other_changesets, org_changesets, ancestors
167 return other_changesets, org_changesets, ancestors
168
168
169 @LoginRequired()
169 @LoginRequired()
170 @HasRepoPermissionLevelDecorator('read')
170 @HasRepoPermissionLevelDecorator('read')
171 def index(self, repo_name):
171 def index(self, repo_name):
172 c.compare_home = True
172 c.compare_home = True
173 c.a_ref_name = c.cs_ref_name = None
173 c.a_ref_name = c.cs_ref_name = None
174 return render('compare/compare_diff.html')
174 return render('compare/compare_diff.html')
175
175
176 @LoginRequired()
176 @LoginRequired()
177 @HasRepoPermissionLevelDecorator('read')
177 @HasRepoPermissionLevelDecorator('read')
178 def compare(self, repo_name, org_ref_type, org_ref_name, other_ref_type, other_ref_name):
178 def compare(self, repo_name, org_ref_type, org_ref_name, other_ref_type, other_ref_name):
179 org_ref_name = org_ref_name.strip()
179 org_ref_name = org_ref_name.strip()
180 other_ref_name = other_ref_name.strip()
180 other_ref_name = other_ref_name.strip()
181
181
182 # If merge is True:
182 # If merge is True:
183 # Show what org would get if merged with other:
183 # Show what org would get if merged with other:
184 # List changesets that are ancestors of other but not of org.
184 # List changesets that are ancestors of other but not of org.
185 # New changesets in org is thus ignored.
185 # New changesets in org is thus ignored.
186 # Diff will be from common ancestor, and merges of org to other will thus be ignored.
186 # Diff will be from common ancestor, and merges of org to other will thus be ignored.
187 # If merge is False:
187 # If merge is False:
188 # Make a raw diff from org to other, no matter if related or not.
188 # Make a raw diff from org to other, no matter if related or not.
189 # Changesets in one and not in the other will be ignored
189 # Changesets in one and not in the other will be ignored
190 merge = bool(request.GET.get('merge'))
190 merge = bool(request.GET.get('merge'))
191 # fulldiff disables cut_off_limit
191 # fulldiff disables cut_off_limit
192 fulldiff = request.GET.get('fulldiff')
192 fulldiff = request.GET.get('fulldiff')
193 # partial uses compare_cs.html template directly
193 # partial uses compare_cs.html template directly
194 partial = request.environ.get('HTTP_X_PARTIAL_XHR')
194 partial = request.environ.get('HTTP_X_PARTIAL_XHR')
195 # is_ajax_preview puts hidden input field with changeset revisions
195 # is_ajax_preview puts hidden input field with changeset revisions
196 c.is_ajax_preview = partial and request.GET.get('is_ajax_preview')
196 c.is_ajax_preview = partial and request.GET.get('is_ajax_preview')
197 # swap url for compare_diff page - never partial and never is_ajax_preview
197 # swap url for compare_diff page - never partial and never is_ajax_preview
198 c.swap_url = h.url('compare_url',
198 c.swap_url = h.url('compare_url',
199 repo_name=c.cs_repo.repo_name,
199 repo_name=c.cs_repo.repo_name,
200 org_ref_type=other_ref_type, org_ref_name=other_ref_name,
200 org_ref_type=other_ref_type, org_ref_name=other_ref_name,
201 other_repo=c.a_repo.repo_name,
201 other_repo=c.a_repo.repo_name,
202 other_ref_type=org_ref_type, other_ref_name=org_ref_name,
202 other_ref_type=org_ref_type, other_ref_name=org_ref_name,
203 merge=merge or '')
203 merge=merge or '')
204
204
205 # set callbacks for generating markup for icons
205 # set callbacks for generating markup for icons
206 c.ignorews_url = _ignorews_url
206 c.ignorews_url = _ignorews_url
207 c.context_url = _context_url
207 c.context_url = _context_url
208 ignore_whitespace = request.GET.get('ignorews') == '1'
208 ignore_whitespace = request.GET.get('ignorews') == '1'
209 line_context = safe_int(request.GET.get('context'), 3)
209 line_context = safe_int(request.GET.get('context'), 3)
210
210
211 c.a_rev = self._get_ref_rev(c.a_repo, org_ref_type, org_ref_name,
211 c.a_rev = self._get_ref_rev(c.a_repo, org_ref_type, org_ref_name,
212 returnempty=True)
212 returnempty=True)
213 c.cs_rev = self._get_ref_rev(c.cs_repo, other_ref_type, other_ref_name)
213 c.cs_rev = self._get_ref_rev(c.cs_repo, other_ref_type, other_ref_name)
214
214
215 c.compare_home = False
215 c.compare_home = False
216 c.a_ref_name = org_ref_name
216 c.a_ref_name = org_ref_name
217 c.a_ref_type = org_ref_type
217 c.a_ref_type = org_ref_type
218 c.cs_ref_name = other_ref_name
218 c.cs_ref_name = other_ref_name
219 c.cs_ref_type = other_ref_type
219 c.cs_ref_type = other_ref_type
220
220
221 c.cs_ranges, c.cs_ranges_org, c.ancestors = self._get_changesets(
221 c.cs_ranges, c.cs_ranges_org, c.ancestors = self._get_changesets(
222 c.a_repo.scm_instance.alias, c.a_repo.scm_instance, c.a_rev,
222 c.a_repo.scm_instance.alias, c.a_repo.scm_instance, c.a_rev,
223 c.cs_repo.scm_instance, c.cs_rev)
223 c.cs_repo.scm_instance, c.cs_rev)
224 raw_ids = [x.raw_id for x in c.cs_ranges]
224 raw_ids = [x.raw_id for x in c.cs_ranges]
225 c.cs_comments = c.cs_repo.get_comments(raw_ids)
225 c.cs_comments = c.cs_repo.get_comments(raw_ids)
226 c.cs_statuses = c.cs_repo.statuses(raw_ids)
226 c.cs_statuses = c.cs_repo.statuses(raw_ids)
227
227
228 revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
228 revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
229 c.jsdata = graph_data(c.cs_repo.scm_instance, revs)
229 c.jsdata = graph_data(c.cs_repo.scm_instance, revs)
230
230
231 if partial:
231 if partial:
232 return render('compare/compare_cs.html')
232 return render('compare/compare_cs.html')
233
233
234 org_repo = c.a_repo
234 org_repo = c.a_repo
235 other_repo = c.cs_repo
235 other_repo = c.cs_repo
236
236
237 if merge:
237 if merge:
238 rev1 = msg = None
238 rev1 = msg = None
239 if not c.cs_ranges:
239 if not c.cs_ranges:
240 msg = _('Cannot show empty diff')
240 msg = _('Cannot show empty diff')
241 elif not c.ancestors:
241 elif not c.ancestors:
242 msg = _('No ancestor found for merge diff')
242 msg = _('No ancestor found for merge diff')
243 elif len(c.ancestors) == 1:
243 elif len(c.ancestors) == 1:
244 rev1 = c.ancestors[0]
244 rev1 = c.ancestors[0]
245 else:
245 else:
246 msg = _('Multiple merge ancestors found for merge compare')
246 msg = _('Multiple merge ancestors found for merge compare')
247 if rev1 is None:
247 if rev1 is None:
248 h.flash(msg, category='error')
248 h.flash(msg, category='error')
249 log.error(msg)
249 log.error(msg)
250 raise HTTPNotFound
250 raise HTTPNotFound
251
251
252 # case we want a simple diff without incoming changesets,
252 # case we want a simple diff without incoming changesets,
253 # previewing what will be merged.
253 # previewing what will be merged.
254 # Make the diff on the other repo (which is known to have other_rev)
254 # Make the diff on the other repo (which is known to have other_rev)
255 log.debug('Using ancestor %s as rev1 instead of %s',
255 log.debug('Using ancestor %s as rev1 instead of %s',
256 rev1, c.a_rev)
256 rev1, c.a_rev)
257 org_repo = other_repo
257 org_repo = other_repo
258 else: # comparing tips, not necessarily linearly related
258 else: # comparing tips, not necessarily linearly related
259 if org_repo != other_repo:
259 if org_repo != other_repo:
260 # TODO: we could do this by using hg unionrepo
260 # TODO: we could do this by using hg unionrepo
261 log.error('cannot compare across repos %s and %s', org_repo, other_repo)
261 log.error('cannot compare across repos %s and %s', org_repo, other_repo)
262 h.flash(_('Cannot compare repositories without using common ancestor'), category='error')
262 h.flash(_('Cannot compare repositories without using common ancestor'), category='error')
263 raise HTTPBadRequest
263 raise HTTPBadRequest
264 rev1 = c.a_rev
264 rev1 = c.a_rev
265
265
266 diff_limit = None if fulldiff else self.cut_off_limit
266 diff_limit = None if fulldiff else self.cut_off_limit
267
267
268 log.debug('running diff between %s and %s in %s',
268 log.debug('running diff between %s and %s in %s',
269 rev1, c.cs_rev, org_repo.scm_instance.path)
269 rev1, c.cs_rev, org_repo.scm_instance.path)
270 txtdiff = org_repo.scm_instance.get_diff(rev1=rev1, rev2=c.cs_rev,
270 raw_diff = org_repo.scm_instance.get_diff(rev1=rev1, rev2=c.cs_rev,
271 ignore_whitespace=ignore_whitespace,
271 ignore_whitespace=ignore_whitespace,
272 context=line_context)
272 context=line_context)
273
273
274 diff_processor = diffs.DiffProcessor(txtdiff or '', diff_limit=diff_limit)
274 diff_processor = diffs.DiffProcessor(raw_diff or '', diff_limit=diff_limit)
275 _parsed = diff_processor.prepare()
276
275
277 c.limited_diff = False
276 c.limited_diff = False
278 if isinstance(_parsed, LimitedDiffContainer):
277 if isinstance(diff_processor.parsed, LimitedDiffContainer):
279 c.limited_diff = True
278 c.limited_diff = True
280
279
281 c.file_diff_data = []
280 c.file_diff_data = []
282 c.lines_added = 0
281 c.lines_added = 0
283 c.lines_deleted = 0
282 c.lines_deleted = 0
284 for f in _parsed:
283 for f in diff_processor.parsed:
285 st = f['stats']
284 st = f['stats']
286 c.lines_added += st['added']
285 c.lines_added += st['added']
287 c.lines_deleted += st['deleted']
286 c.lines_deleted += st['deleted']
288 filename = f['filename']
287 filename = f['filename']
289 fid = h.FID('', filename)
288 fid = h.FID('', filename)
290 diff = diff_processor.as_html(enable_comments=False,
289 diff = diff_processor.as_html(enable_comments=False,
291 parsed_lines=[f])
290 parsed_lines=[f])
292 c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, diff, st))
291 c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, diff, st))
293
292
294 return render('compare/compare_diff.html')
293 return render('compare/compare_diff.html')
@@ -1,170 +1,170 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.controllers.feed
15 kallithea.controllers.feed
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~
17
17
18 Feed controller for Kallithea
18 Feed controller for Kallithea
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: Apr 23, 2010
22 :created_on: Apr 23, 2010
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28
28
29 import logging
29 import logging
30
30
31 from tg import response, tmpl_context as c
31 from tg import response, tmpl_context as c
32 from tg.i18n import ugettext as _
32 from tg.i18n import ugettext as _
33
33
34 from beaker.cache import cache_region, region_invalidate
34 from beaker.cache import cache_region, region_invalidate
35 from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
35 from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
36
36
37 from kallithea import CONFIG
37 from kallithea import CONFIG
38 from kallithea.lib import helpers as h
38 from kallithea.lib import helpers as h
39 from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator
39 from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator
40 from kallithea.lib.base import BaseRepoController
40 from kallithea.lib.base import BaseRepoController
41 from kallithea.lib.diffs import DiffProcessor, LimitedDiffContainer
41 from kallithea.lib.diffs import DiffProcessor, LimitedDiffContainer
42 from kallithea.model.db import CacheInvalidation
42 from kallithea.model.db import CacheInvalidation
43 from kallithea.lib.utils2 import safe_int, str2bool, safe_unicode
43 from kallithea.lib.utils2 import safe_int, str2bool, safe_unicode
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 language = 'en-us'
48 language = 'en-us'
49 ttl = "5"
49 ttl = "5"
50
50
51
51
52 class FeedController(BaseRepoController):
52 class FeedController(BaseRepoController):
53
53
54 @LoginRequired(api_access=True)
54 @LoginRequired(api_access=True)
55 @HasRepoPermissionLevelDecorator('read')
55 @HasRepoPermissionLevelDecorator('read')
56 def _before(self, *args, **kwargs):
56 def _before(self, *args, **kwargs):
57 super(FeedController, self)._before(*args, **kwargs)
57 super(FeedController, self)._before(*args, **kwargs)
58
58
59 def _get_title(self, cs):
59 def _get_title(self, cs):
60 return h.shorter(cs.message, 160)
60 return h.shorter(cs.message, 160)
61
61
62 def __get_desc(self, cs):
62 def __get_desc(self, cs):
63 desc_msg = [(_('%s committed on %s')
63 desc_msg = [(_('%s committed on %s')
64 % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>']
64 % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>']
65 # branches, tags, bookmarks
65 # branches, tags, bookmarks
66 if cs.branch:
66 if cs.branch:
67 desc_msg.append('branch: %s<br/>' % cs.branch)
67 desc_msg.append('branch: %s<br/>' % cs.branch)
68 for book in cs.bookmarks:
68 for book in cs.bookmarks:
69 desc_msg.append('bookmark: %s<br/>' % book)
69 desc_msg.append('bookmark: %s<br/>' % book)
70 for tag in cs.tags:
70 for tag in cs.tags:
71 desc_msg.append('tag: %s<br/>' % tag)
71 desc_msg.append('tag: %s<br/>' % tag)
72
72
73 changes = []
73 changes = []
74 diff_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024))
74 diff_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024))
75 raw_diff = cs.diff()
75 raw_diff = cs.diff()
76 diff_processor = DiffProcessor(raw_diff,
76 diff_processor = DiffProcessor(raw_diff,
77 diff_limit=diff_limit)
77 diff_limit=diff_limit,
78 _parsed = diff_processor.prepare(inline_diff=False)
78 inline_diff=False)
79 limited_diff = False
79 limited_diff = False
80 if isinstance(_parsed, LimitedDiffContainer):
80 if isinstance(diff_processor.parsed, LimitedDiffContainer):
81 limited_diff = True
81 limited_diff = True
82
82
83 for st in _parsed:
83 for st in diff_processor.parsed:
84 st.update({'added': st['stats']['added'],
84 st.update({'added': st['stats']['added'],
85 'removed': st['stats']['deleted']})
85 'removed': st['stats']['deleted']})
86 changes.append('\n %(operation)s %(filename)s '
86 changes.append('\n %(operation)s %(filename)s '
87 '(%(added)s lines added, %(removed)s lines removed)'
87 '(%(added)s lines added, %(removed)s lines removed)'
88 % st)
88 % st)
89 if limited_diff:
89 if limited_diff:
90 changes = changes + ['\n ' +
90 changes = changes + ['\n ' +
91 _('Changeset was too big and was cut off...')]
91 _('Changeset was too big and was cut off...')]
92
92
93 # rev link
93 # rev link
94 _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
94 _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
95 revision=cs.raw_id)
95 revision=cs.raw_id)
96 desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
96 desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
97
97
98 desc_msg.append('<pre>')
98 desc_msg.append('<pre>')
99 desc_msg.append(h.urlify_text(cs.message))
99 desc_msg.append(h.urlify_text(cs.message))
100 desc_msg.append('\n')
100 desc_msg.append('\n')
101 desc_msg.extend(changes)
101 desc_msg.extend(changes)
102 if str2bool(CONFIG.get('rss_include_diff', False)):
102 if str2bool(CONFIG.get('rss_include_diff', False)):
103 desc_msg.append('\n\n')
103 desc_msg.append('\n\n')
104 desc_msg.append(raw_diff)
104 desc_msg.append(raw_diff)
105 desc_msg.append('</pre>')
105 desc_msg.append('</pre>')
106 return map(safe_unicode, desc_msg)
106 return map(safe_unicode, desc_msg)
107
107
108 def atom(self, repo_name):
108 def atom(self, repo_name):
109 """Produce an atom-1.0 feed via feedgenerator module"""
109 """Produce an atom-1.0 feed via feedgenerator module"""
110
110
111 @cache_region('long_term', '_get_feed_from_cache')
111 @cache_region('long_term', '_get_feed_from_cache')
112 def _get_feed_from_cache(key, kind):
112 def _get_feed_from_cache(key, kind):
113 feed = Atom1Feed(
113 feed = Atom1Feed(
114 title=_('%s %s feed') % (c.site_name, repo_name),
114 title=_('%s %s feed') % (c.site_name, repo_name),
115 link=h.canonical_url('summary_home', repo_name=repo_name),
115 link=h.canonical_url('summary_home', repo_name=repo_name),
116 description=_('Changes on %s repository') % repo_name,
116 description=_('Changes on %s repository') % repo_name,
117 language=language,
117 language=language,
118 ttl=ttl
118 ttl=ttl
119 )
119 )
120
120
121 rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
121 rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
122 for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
122 for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
123 feed.add_item(title=self._get_title(cs),
123 feed.add_item(title=self._get_title(cs),
124 link=h.canonical_url('changeset_home', repo_name=repo_name,
124 link=h.canonical_url('changeset_home', repo_name=repo_name,
125 revision=cs.raw_id),
125 revision=cs.raw_id),
126 author_name=cs.author,
126 author_name=cs.author,
127 description=''.join(self.__get_desc(cs)),
127 description=''.join(self.__get_desc(cs)),
128 pubdate=cs.date,
128 pubdate=cs.date,
129 )
129 )
130
130
131 response.content_type = feed.mime_type
131 response.content_type = feed.mime_type
132 return feed.writeString('utf-8')
132 return feed.writeString('utf-8')
133
133
134 kind = 'ATOM'
134 kind = 'ATOM'
135 valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
135 valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
136 if not valid:
136 if not valid:
137 region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind)
137 region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind)
138 return _get_feed_from_cache(repo_name, kind)
138 return _get_feed_from_cache(repo_name, kind)
139
139
140 def rss(self, repo_name):
140 def rss(self, repo_name):
141 """Produce an rss2 feed via feedgenerator module"""
141 """Produce an rss2 feed via feedgenerator module"""
142
142
143 @cache_region('long_term', '_get_feed_from_cache')
143 @cache_region('long_term', '_get_feed_from_cache')
144 def _get_feed_from_cache(key, kind):
144 def _get_feed_from_cache(key, kind):
145 feed = Rss201rev2Feed(
145 feed = Rss201rev2Feed(
146 title=_('%s %s feed') % (c.site_name, repo_name),
146 title=_('%s %s feed') % (c.site_name, repo_name),
147 link=h.canonical_url('summary_home', repo_name=repo_name),
147 link=h.canonical_url('summary_home', repo_name=repo_name),
148 description=_('Changes on %s repository') % repo_name,
148 description=_('Changes on %s repository') % repo_name,
149 language=language,
149 language=language,
150 ttl=ttl
150 ttl=ttl
151 )
151 )
152
152
153 rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
153 rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
154 for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
154 for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
155 feed.add_item(title=self._get_title(cs),
155 feed.add_item(title=self._get_title(cs),
156 link=h.canonical_url('changeset_home', repo_name=repo_name,
156 link=h.canonical_url('changeset_home', repo_name=repo_name,
157 revision=cs.raw_id),
157 revision=cs.raw_id),
158 author_name=cs.author,
158 author_name=cs.author,
159 description=''.join(self.__get_desc(cs)),
159 description=''.join(self.__get_desc(cs)),
160 pubdate=cs.date,
160 pubdate=cs.date,
161 )
161 )
162
162
163 response.content_type = feed.mime_type
163 response.content_type = feed.mime_type
164 return feed.writeString('utf-8')
164 return feed.writeString('utf-8')
165
165
166 kind = 'RSS'
166 kind = 'RSS'
167 valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
167 valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
168 if not valid:
168 if not valid:
169 region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind)
169 region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind)
170 return _get_feed_from_cache(repo_name, kind)
170 return _get_feed_from_cache(repo_name, kind)
@@ -1,746 +1,745 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.controllers.pullrequests
15 kallithea.controllers.pullrequests
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17
17
18 pull requests controller for Kallithea for initializing pull requests
18 pull requests controller for Kallithea for initializing pull requests
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: May 7, 2012
22 :created_on: May 7, 2012
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28 import logging
28 import logging
29 import traceback
29 import traceback
30 import formencode
30 import formencode
31
31
32 from tg import request, tmpl_context as c
32 from tg import request, tmpl_context as c
33 from tg.i18n import ugettext as _
33 from tg.i18n import ugettext as _
34 from webob.exc import HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest
34 from webob.exc import HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest
35
35
36 from kallithea.config.routing import url
36 from kallithea.config.routing import url
37 from kallithea.lib import helpers as h
37 from kallithea.lib import helpers as h
38 from kallithea.lib import diffs
38 from kallithea.lib import diffs
39 from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator, \
39 from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator, \
40 NotAnonymous
40 NotAnonymous
41 from kallithea.lib.base import BaseRepoController, render, jsonify
41 from kallithea.lib.base import BaseRepoController, render, jsonify
42 from kallithea.lib.diffs import LimitedDiffContainer
42 from kallithea.lib.diffs import LimitedDiffContainer
43 from kallithea.lib.page import Page
43 from kallithea.lib.page import Page
44 from kallithea.lib.utils import action_logger
44 from kallithea.lib.utils import action_logger
45 from kallithea.lib.vcs.exceptions import EmptyRepositoryError, ChangesetDoesNotExistError
45 from kallithea.lib.vcs.exceptions import EmptyRepositoryError, ChangesetDoesNotExistError
46 from kallithea.lib.vcs.utils import safe_str
46 from kallithea.lib.vcs.utils import safe_str
47 from kallithea.lib.vcs.utils.hgcompat import unionrepo
47 from kallithea.lib.vcs.utils.hgcompat import unionrepo
48 from kallithea.model.db import PullRequest, ChangesetStatus, ChangesetComment, \
48 from kallithea.model.db import PullRequest, ChangesetStatus, ChangesetComment, \
49 PullRequestReviewer, Repository, User
49 PullRequestReviewer, Repository, User
50 from kallithea.model.pull_request import CreatePullRequestAction, CreatePullRequestIterationAction, PullRequestModel
50 from kallithea.model.pull_request import CreatePullRequestAction, CreatePullRequestIterationAction, PullRequestModel
51 from kallithea.model.meta import Session
51 from kallithea.model.meta import Session
52 from kallithea.model.repo import RepoModel
52 from kallithea.model.repo import RepoModel
53 from kallithea.model.comment import ChangesetCommentsModel
53 from kallithea.model.comment import ChangesetCommentsModel
54 from kallithea.model.changeset_status import ChangesetStatusModel
54 from kallithea.model.changeset_status import ChangesetStatusModel
55 from kallithea.model.forms import PullRequestForm, PullRequestPostForm
55 from kallithea.model.forms import PullRequestForm, PullRequestPostForm
56 from kallithea.lib.utils2 import safe_int
56 from kallithea.lib.utils2 import safe_int
57 from kallithea.controllers.changeset import _ignorews_url, _context_url, \
57 from kallithea.controllers.changeset import _ignorews_url, _context_url, \
58 create_comment
58 create_comment
59 from kallithea.controllers.compare import CompareController
59 from kallithea.controllers.compare import CompareController
60 from kallithea.lib.graphmod import graph_data
60 from kallithea.lib.graphmod import graph_data
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64
64
65 def _get_reviewer(user_id):
65 def _get_reviewer(user_id):
66 """Look up user by ID and validate it as a potential reviewer."""
66 """Look up user by ID and validate it as a potential reviewer."""
67 try:
67 try:
68 user = User.get(int(user_id))
68 user = User.get(int(user_id))
69 except ValueError:
69 except ValueError:
70 user = None
70 user = None
71
71
72 if user is None or user.is_default_user:
72 if user is None or user.is_default_user:
73 h.flash(_('Invalid reviewer "%s" specified') % user_id, category='error')
73 h.flash(_('Invalid reviewer "%s" specified') % user_id, category='error')
74 raise HTTPBadRequest()
74 raise HTTPBadRequest()
75
75
76 return user
76 return user
77
77
78
78
79 class PullrequestsController(BaseRepoController):
79 class PullrequestsController(BaseRepoController):
80
80
81 def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None):
81 def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None):
82 """return a structure with repo's interesting changesets, suitable for
82 """return a structure with repo's interesting changesets, suitable for
83 the selectors in pullrequest.html
83 the selectors in pullrequest.html
84
84
85 rev: a revision that must be in the list somehow and selected by default
85 rev: a revision that must be in the list somehow and selected by default
86 branch: a branch that must be in the list and selected by default - even if closed
86 branch: a branch that must be in the list and selected by default - even if closed
87 branch_rev: a revision of which peers should be preferred and available."""
87 branch_rev: a revision of which peers should be preferred and available."""
88 # list named branches that has been merged to this named branch - it should probably merge back
88 # list named branches that has been merged to this named branch - it should probably merge back
89 peers = []
89 peers = []
90
90
91 if rev:
91 if rev:
92 rev = safe_str(rev)
92 rev = safe_str(rev)
93
93
94 if branch:
94 if branch:
95 branch = safe_str(branch)
95 branch = safe_str(branch)
96
96
97 if branch_rev:
97 if branch_rev:
98 branch_rev = safe_str(branch_rev)
98 branch_rev = safe_str(branch_rev)
99 # a revset not restricting to merge() would be better
99 # a revset not restricting to merge() would be better
100 # (especially because it would get the branch point)
100 # (especially because it would get the branch point)
101 # ... but is currently too expensive
101 # ... but is currently too expensive
102 # including branches of children could be nice too
102 # including branches of children could be nice too
103 peerbranches = set()
103 peerbranches = set()
104 for i in repo._repo.revs(
104 for i in repo._repo.revs(
105 "sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)",
105 "sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)",
106 branch_rev, branch_rev):
106 branch_rev, branch_rev):
107 abranch = repo.get_changeset(i).branch
107 abranch = repo.get_changeset(i).branch
108 if abranch not in peerbranches:
108 if abranch not in peerbranches:
109 n = 'branch:%s:%s' % (abranch, repo.get_changeset(abranch).raw_id)
109 n = 'branch:%s:%s' % (abranch, repo.get_changeset(abranch).raw_id)
110 peers.append((n, abranch))
110 peers.append((n, abranch))
111 peerbranches.add(abranch)
111 peerbranches.add(abranch)
112
112
113 selected = None
113 selected = None
114 tiprev = repo.tags.get('tip')
114 tiprev = repo.tags.get('tip')
115 tipbranch = None
115 tipbranch = None
116
116
117 branches = []
117 branches = []
118 for abranch, branchrev in repo.branches.iteritems():
118 for abranch, branchrev in repo.branches.iteritems():
119 n = 'branch:%s:%s' % (abranch, branchrev)
119 n = 'branch:%s:%s' % (abranch, branchrev)
120 desc = abranch
120 desc = abranch
121 if branchrev == tiprev:
121 if branchrev == tiprev:
122 tipbranch = abranch
122 tipbranch = abranch
123 desc = '%s (current tip)' % desc
123 desc = '%s (current tip)' % desc
124 branches.append((n, desc))
124 branches.append((n, desc))
125 if rev == branchrev:
125 if rev == branchrev:
126 selected = n
126 selected = n
127 if branch == abranch:
127 if branch == abranch:
128 if not rev:
128 if not rev:
129 selected = n
129 selected = n
130 branch = None
130 branch = None
131 if branch: # branch not in list - it is probably closed
131 if branch: # branch not in list - it is probably closed
132 branchrev = repo.closed_branches.get(branch)
132 branchrev = repo.closed_branches.get(branch)
133 if branchrev:
133 if branchrev:
134 n = 'branch:%s:%s' % (branch, branchrev)
134 n = 'branch:%s:%s' % (branch, branchrev)
135 branches.append((n, _('%s (closed)') % branch))
135 branches.append((n, _('%s (closed)') % branch))
136 selected = n
136 selected = n
137 branch = None
137 branch = None
138 if branch:
138 if branch:
139 log.debug('branch %r not found in %s', branch, repo)
139 log.debug('branch %r not found in %s', branch, repo)
140
140
141 bookmarks = []
141 bookmarks = []
142 for bookmark, bookmarkrev in repo.bookmarks.iteritems():
142 for bookmark, bookmarkrev in repo.bookmarks.iteritems():
143 n = 'book:%s:%s' % (bookmark, bookmarkrev)
143 n = 'book:%s:%s' % (bookmark, bookmarkrev)
144 bookmarks.append((n, bookmark))
144 bookmarks.append((n, bookmark))
145 if rev == bookmarkrev:
145 if rev == bookmarkrev:
146 selected = n
146 selected = n
147
147
148 tags = []
148 tags = []
149 for tag, tagrev in repo.tags.iteritems():
149 for tag, tagrev in repo.tags.iteritems():
150 if tag == 'tip':
150 if tag == 'tip':
151 continue
151 continue
152 n = 'tag:%s:%s' % (tag, tagrev)
152 n = 'tag:%s:%s' % (tag, tagrev)
153 tags.append((n, tag))
153 tags.append((n, tag))
154 # note: even if rev == tagrev, don't select the static tag - it must be chosen explicitly
154 # note: even if rev == tagrev, don't select the static tag - it must be chosen explicitly
155
155
156 # prio 1: rev was selected as existing entry above
156 # prio 1: rev was selected as existing entry above
157
157
158 # prio 2: create special entry for rev; rev _must_ be used
158 # prio 2: create special entry for rev; rev _must_ be used
159 specials = []
159 specials = []
160 if rev and selected is None:
160 if rev and selected is None:
161 selected = 'rev:%s:%s' % (rev, rev)
161 selected = 'rev:%s:%s' % (rev, rev)
162 specials = [(selected, '%s: %s' % (_("Changeset"), rev[:12]))]
162 specials = [(selected, '%s: %s' % (_("Changeset"), rev[:12]))]
163
163
164 # prio 3: most recent peer branch
164 # prio 3: most recent peer branch
165 if peers and not selected:
165 if peers and not selected:
166 selected = peers[0][0]
166 selected = peers[0][0]
167
167
168 # prio 4: tip revision
168 # prio 4: tip revision
169 if not selected:
169 if not selected:
170 if h.is_hg(repo):
170 if h.is_hg(repo):
171 if tipbranch:
171 if tipbranch:
172 selected = 'branch:%s:%s' % (tipbranch, tiprev)
172 selected = 'branch:%s:%s' % (tipbranch, tiprev)
173 else:
173 else:
174 selected = 'tag:null:' + repo.EMPTY_CHANGESET
174 selected = 'tag:null:' + repo.EMPTY_CHANGESET
175 tags.append((selected, 'null'))
175 tags.append((selected, 'null'))
176 else:
176 else:
177 if 'master' in repo.branches:
177 if 'master' in repo.branches:
178 selected = 'branch:master:%s' % repo.branches['master']
178 selected = 'branch:master:%s' % repo.branches['master']
179 else:
179 else:
180 k, v = repo.branches.items()[0]
180 k, v = repo.branches.items()[0]
181 selected = 'branch:%s:%s' % (k, v)
181 selected = 'branch:%s:%s' % (k, v)
182
182
183 groups = [(specials, _("Special")),
183 groups = [(specials, _("Special")),
184 (peers, _("Peer branches")),
184 (peers, _("Peer branches")),
185 (bookmarks, _("Bookmarks")),
185 (bookmarks, _("Bookmarks")),
186 (branches, _("Branches")),
186 (branches, _("Branches")),
187 (tags, _("Tags")),
187 (tags, _("Tags")),
188 ]
188 ]
189 return [g for g in groups if g[0]], selected
189 return [g for g in groups if g[0]], selected
190
190
191 def _get_is_allowed_change_status(self, pull_request):
191 def _get_is_allowed_change_status(self, pull_request):
192 if pull_request.is_closed():
192 if pull_request.is_closed():
193 return False
193 return False
194
194
195 owner = request.authuser.user_id == pull_request.owner_id
195 owner = request.authuser.user_id == pull_request.owner_id
196 reviewer = PullRequestReviewer.query() \
196 reviewer = PullRequestReviewer.query() \
197 .filter(PullRequestReviewer.pull_request == pull_request) \
197 .filter(PullRequestReviewer.pull_request == pull_request) \
198 .filter(PullRequestReviewer.user_id == request.authuser.user_id) \
198 .filter(PullRequestReviewer.user_id == request.authuser.user_id) \
199 .count() != 0
199 .count() != 0
200
200
201 return request.authuser.admin or owner or reviewer
201 return request.authuser.admin or owner or reviewer
202
202
203 @LoginRequired()
203 @LoginRequired()
204 @HasRepoPermissionLevelDecorator('read')
204 @HasRepoPermissionLevelDecorator('read')
205 def show_all(self, repo_name):
205 def show_all(self, repo_name):
206 c.from_ = request.GET.get('from_') or ''
206 c.from_ = request.GET.get('from_') or ''
207 c.closed = request.GET.get('closed') or ''
207 c.closed = request.GET.get('closed') or ''
208 p = safe_int(request.GET.get('page'), 1)
208 p = safe_int(request.GET.get('page'), 1)
209
209
210 q = PullRequest.query(include_closed=c.closed, sorted=True)
210 q = PullRequest.query(include_closed=c.closed, sorted=True)
211 if c.from_:
211 if c.from_:
212 q = q.filter_by(org_repo=c.db_repo)
212 q = q.filter_by(org_repo=c.db_repo)
213 else:
213 else:
214 q = q.filter_by(other_repo=c.db_repo)
214 q = q.filter_by(other_repo=c.db_repo)
215 c.pull_requests = q.all()
215 c.pull_requests = q.all()
216
216
217 c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=100)
217 c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=100)
218
218
219 return render('/pullrequests/pullrequest_show_all.html')
219 return render('/pullrequests/pullrequest_show_all.html')
220
220
221 @LoginRequired()
221 @LoginRequired()
222 @NotAnonymous()
222 @NotAnonymous()
223 def show_my(self):
223 def show_my(self):
224 c.closed = request.GET.get('closed') or ''
224 c.closed = request.GET.get('closed') or ''
225
225
226 c.my_pull_requests = PullRequest.query(
226 c.my_pull_requests = PullRequest.query(
227 include_closed=c.closed,
227 include_closed=c.closed,
228 sorted=True,
228 sorted=True,
229 ).filter_by(owner_id=request.authuser.user_id).all()
229 ).filter_by(owner_id=request.authuser.user_id).all()
230
230
231 c.participate_in_pull_requests = []
231 c.participate_in_pull_requests = []
232 c.participate_in_pull_requests_todo = []
232 c.participate_in_pull_requests_todo = []
233 done_status = set([ChangesetStatus.STATUS_APPROVED, ChangesetStatus.STATUS_REJECTED])
233 done_status = set([ChangesetStatus.STATUS_APPROVED, ChangesetStatus.STATUS_REJECTED])
234 for pr in PullRequest.query(
234 for pr in PullRequest.query(
235 include_closed=c.closed,
235 include_closed=c.closed,
236 reviewer_id=request.authuser.user_id,
236 reviewer_id=request.authuser.user_id,
237 sorted=True,
237 sorted=True,
238 ):
238 ):
239 status = pr.user_review_status(request.authuser.user_id) # very inefficient!!!
239 status = pr.user_review_status(request.authuser.user_id) # very inefficient!!!
240 if status in done_status:
240 if status in done_status:
241 c.participate_in_pull_requests.append(pr)
241 c.participate_in_pull_requests.append(pr)
242 else:
242 else:
243 c.participate_in_pull_requests_todo.append(pr)
243 c.participate_in_pull_requests_todo.append(pr)
244
244
245 return render('/pullrequests/pullrequest_show_my.html')
245 return render('/pullrequests/pullrequest_show_my.html')
246
246
247 @LoginRequired()
247 @LoginRequired()
248 @NotAnonymous()
248 @NotAnonymous()
249 @HasRepoPermissionLevelDecorator('read')
249 @HasRepoPermissionLevelDecorator('read')
250 def index(self):
250 def index(self):
251 org_repo = c.db_repo
251 org_repo = c.db_repo
252 org_scm_instance = org_repo.scm_instance
252 org_scm_instance = org_repo.scm_instance
253 try:
253 try:
254 org_scm_instance.get_changeset()
254 org_scm_instance.get_changeset()
255 except EmptyRepositoryError as e:
255 except EmptyRepositoryError as e:
256 h.flash(h.literal(_('There are no changesets yet')),
256 h.flash(h.literal(_('There are no changesets yet')),
257 category='warning')
257 category='warning')
258 raise HTTPFound(location=url('summary_home', repo_name=org_repo.repo_name))
258 raise HTTPFound(location=url('summary_home', repo_name=org_repo.repo_name))
259
259
260 org_rev = request.GET.get('rev_end')
260 org_rev = request.GET.get('rev_end')
261 # rev_start is not directly useful - its parent could however be used
261 # rev_start is not directly useful - its parent could however be used
262 # as default for other and thus give a simple compare view
262 # as default for other and thus give a simple compare view
263 rev_start = request.GET.get('rev_start')
263 rev_start = request.GET.get('rev_start')
264 other_rev = None
264 other_rev = None
265 if rev_start:
265 if rev_start:
266 starters = org_repo.get_changeset(rev_start).parents
266 starters = org_repo.get_changeset(rev_start).parents
267 if starters:
267 if starters:
268 other_rev = starters[0].raw_id
268 other_rev = starters[0].raw_id
269 else:
269 else:
270 other_rev = org_repo.scm_instance.EMPTY_CHANGESET
270 other_rev = org_repo.scm_instance.EMPTY_CHANGESET
271 branch = request.GET.get('branch')
271 branch = request.GET.get('branch')
272
272
273 c.cs_repos = [(org_repo.repo_name, org_repo.repo_name)]
273 c.cs_repos = [(org_repo.repo_name, org_repo.repo_name)]
274 c.default_cs_repo = org_repo.repo_name
274 c.default_cs_repo = org_repo.repo_name
275 c.cs_refs, c.default_cs_ref = self._get_repo_refs(org_scm_instance, rev=org_rev, branch=branch)
275 c.cs_refs, c.default_cs_ref = self._get_repo_refs(org_scm_instance, rev=org_rev, branch=branch)
276
276
277 default_cs_ref_type, default_cs_branch, default_cs_rev = c.default_cs_ref.split(':')
277 default_cs_ref_type, default_cs_branch, default_cs_rev = c.default_cs_ref.split(':')
278 if default_cs_ref_type != 'branch':
278 if default_cs_ref_type != 'branch':
279 default_cs_branch = org_repo.get_changeset(default_cs_rev).branch
279 default_cs_branch = org_repo.get_changeset(default_cs_rev).branch
280
280
281 # add org repo to other so we can open pull request against peer branches on itself
281 # add org repo to other so we can open pull request against peer branches on itself
282 c.a_repos = [(org_repo.repo_name, '%s (self)' % org_repo.repo_name)]
282 c.a_repos = [(org_repo.repo_name, '%s (self)' % org_repo.repo_name)]
283
283
284 if org_repo.parent:
284 if org_repo.parent:
285 # add parent of this fork also and select it.
285 # add parent of this fork also and select it.
286 # use the same branch on destination as on source, if available.
286 # use the same branch on destination as on source, if available.
287 c.a_repos.append((org_repo.parent.repo_name, '%s (parent)' % org_repo.parent.repo_name))
287 c.a_repos.append((org_repo.parent.repo_name, '%s (parent)' % org_repo.parent.repo_name))
288 c.a_repo = org_repo.parent
288 c.a_repo = org_repo.parent
289 c.a_refs, c.default_a_ref = self._get_repo_refs(
289 c.a_refs, c.default_a_ref = self._get_repo_refs(
290 org_repo.parent.scm_instance, branch=default_cs_branch, rev=other_rev)
290 org_repo.parent.scm_instance, branch=default_cs_branch, rev=other_rev)
291
291
292 else:
292 else:
293 c.a_repo = org_repo
293 c.a_repo = org_repo
294 c.a_refs, c.default_a_ref = self._get_repo_refs(org_scm_instance, rev=other_rev)
294 c.a_refs, c.default_a_ref = self._get_repo_refs(org_scm_instance, rev=other_rev)
295
295
296 # gather forks and add to this list ... even though it is rare to
296 # gather forks and add to this list ... even though it is rare to
297 # request forks to pull from their parent
297 # request forks to pull from their parent
298 for fork in org_repo.forks:
298 for fork in org_repo.forks:
299 c.a_repos.append((fork.repo_name, fork.repo_name))
299 c.a_repos.append((fork.repo_name, fork.repo_name))
300
300
301 return render('/pullrequests/pullrequest.html')
301 return render('/pullrequests/pullrequest.html')
302
302
303 @LoginRequired()
303 @LoginRequired()
304 @NotAnonymous()
304 @NotAnonymous()
305 @HasRepoPermissionLevelDecorator('read')
305 @HasRepoPermissionLevelDecorator('read')
306 @jsonify
306 @jsonify
307 def repo_info(self, repo_name):
307 def repo_info(self, repo_name):
308 repo = c.db_repo
308 repo = c.db_repo
309 refs, selected_ref = self._get_repo_refs(repo.scm_instance)
309 refs, selected_ref = self._get_repo_refs(repo.scm_instance)
310 return {
310 return {
311 'description': repo.description.split('\n', 1)[0],
311 'description': repo.description.split('\n', 1)[0],
312 'selected_ref': selected_ref,
312 'selected_ref': selected_ref,
313 'refs': refs,
313 'refs': refs,
314 }
314 }
315
315
316 @LoginRequired()
316 @LoginRequired()
317 @NotAnonymous()
317 @NotAnonymous()
318 @HasRepoPermissionLevelDecorator('read')
318 @HasRepoPermissionLevelDecorator('read')
319 def create(self, repo_name):
319 def create(self, repo_name):
320 repo = c.db_repo
320 repo = c.db_repo
321 try:
321 try:
322 _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
322 _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
323 except formencode.Invalid as errors:
323 except formencode.Invalid as errors:
324 log.error(traceback.format_exc())
324 log.error(traceback.format_exc())
325 log.error(str(errors))
325 log.error(str(errors))
326 msg = _('Error creating pull request: %s') % errors.msg
326 msg = _('Error creating pull request: %s') % errors.msg
327 h.flash(msg, 'error')
327 h.flash(msg, 'error')
328 raise HTTPBadRequest
328 raise HTTPBadRequest
329
329
330 # heads up: org and other might seem backward here ...
330 # heads up: org and other might seem backward here ...
331 org_ref = _form['org_ref'] # will have merge_rev as rev but symbolic name
331 org_ref = _form['org_ref'] # will have merge_rev as rev but symbolic name
332 org_repo = Repository.guess_instance(_form['org_repo'])
332 org_repo = Repository.guess_instance(_form['org_repo'])
333
333
334 other_ref = _form['other_ref'] # will have symbolic name and head revision
334 other_ref = _form['other_ref'] # will have symbolic name and head revision
335 other_repo = Repository.guess_instance(_form['other_repo'])
335 other_repo = Repository.guess_instance(_form['other_repo'])
336
336
337 reviewers = []
337 reviewers = []
338
338
339 title = _form['pullrequest_title']
339 title = _form['pullrequest_title']
340 description = _form['pullrequest_desc'].strip()
340 description = _form['pullrequest_desc'].strip()
341 owner = User.get(request.authuser.user_id)
341 owner = User.get(request.authuser.user_id)
342
342
343 try:
343 try:
344 cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers)
344 cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers)
345 except CreatePullRequestAction.ValidationError as e:
345 except CreatePullRequestAction.ValidationError as e:
346 h.flash(str(e), category='error', logf=log.error)
346 h.flash(str(e), category='error', logf=log.error)
347 raise HTTPNotFound
347 raise HTTPNotFound
348
348
349 try:
349 try:
350 pull_request = cmd.execute()
350 pull_request = cmd.execute()
351 Session().commit()
351 Session().commit()
352 except Exception:
352 except Exception:
353 h.flash(_('Error occurred while creating pull request'),
353 h.flash(_('Error occurred while creating pull request'),
354 category='error')
354 category='error')
355 log.error(traceback.format_exc())
355 log.error(traceback.format_exc())
356 raise HTTPFound(location=url('pullrequest_home', repo_name=repo_name))
356 raise HTTPFound(location=url('pullrequest_home', repo_name=repo_name))
357
357
358 h.flash(_('Successfully opened new pull request'),
358 h.flash(_('Successfully opened new pull request'),
359 category='success')
359 category='success')
360 raise HTTPFound(location=pull_request.url())
360 raise HTTPFound(location=pull_request.url())
361
361
362 def create_new_iteration(self, old_pull_request, new_rev, title, description, reviewers):
362 def create_new_iteration(self, old_pull_request, new_rev, title, description, reviewers):
363 owner = User.get(request.authuser.user_id)
363 owner = User.get(request.authuser.user_id)
364 new_org_rev = self._get_ref_rev(old_pull_request.org_repo, 'rev', new_rev)
364 new_org_rev = self._get_ref_rev(old_pull_request.org_repo, 'rev', new_rev)
365 new_other_rev = self._get_ref_rev(old_pull_request.other_repo, old_pull_request.other_ref_parts[0], old_pull_request.other_ref_parts[1])
365 new_other_rev = self._get_ref_rev(old_pull_request.other_repo, old_pull_request.other_ref_parts[0], old_pull_request.other_ref_parts[1])
366 try:
366 try:
367 cmd = CreatePullRequestIterationAction(old_pull_request, new_org_rev, new_other_rev, title, description, owner, reviewers)
367 cmd = CreatePullRequestIterationAction(old_pull_request, new_org_rev, new_other_rev, title, description, owner, reviewers)
368 except CreatePullRequestAction.ValidationError as e:
368 except CreatePullRequestAction.ValidationError as e:
369 h.flash(str(e), category='error', logf=log.error)
369 h.flash(str(e), category='error', logf=log.error)
370 raise HTTPNotFound
370 raise HTTPNotFound
371
371
372 try:
372 try:
373 pull_request = cmd.execute()
373 pull_request = cmd.execute()
374 Session().commit()
374 Session().commit()
375 except Exception:
375 except Exception:
376 h.flash(_('Error occurred while creating pull request'),
376 h.flash(_('Error occurred while creating pull request'),
377 category='error')
377 category='error')
378 log.error(traceback.format_exc())
378 log.error(traceback.format_exc())
379 raise HTTPFound(location=old_pull_request.url())
379 raise HTTPFound(location=old_pull_request.url())
380
380
381 h.flash(_('New pull request iteration created'),
381 h.flash(_('New pull request iteration created'),
382 category='success')
382 category='success')
383 raise HTTPFound(location=pull_request.url())
383 raise HTTPFound(location=pull_request.url())
384
384
385 # pullrequest_post for PR editing
385 # pullrequest_post for PR editing
386 @LoginRequired()
386 @LoginRequired()
387 @NotAnonymous()
387 @NotAnonymous()
388 @HasRepoPermissionLevelDecorator('read')
388 @HasRepoPermissionLevelDecorator('read')
389 def post(self, repo_name, pull_request_id):
389 def post(self, repo_name, pull_request_id):
390 pull_request = PullRequest.get_or_404(pull_request_id)
390 pull_request = PullRequest.get_or_404(pull_request_id)
391 if pull_request.is_closed():
391 if pull_request.is_closed():
392 raise HTTPForbidden()
392 raise HTTPForbidden()
393 assert pull_request.other_repo.repo_name == repo_name
393 assert pull_request.other_repo.repo_name == repo_name
394 # only owner or admin can update it
394 # only owner or admin can update it
395 owner = pull_request.owner_id == request.authuser.user_id
395 owner = pull_request.owner_id == request.authuser.user_id
396 repo_admin = h.HasRepoPermissionLevel('admin')(c.repo_name)
396 repo_admin = h.HasRepoPermissionLevel('admin')(c.repo_name)
397 if not (h.HasPermissionAny('hg.admin')() or repo_admin or owner):
397 if not (h.HasPermissionAny('hg.admin')() or repo_admin or owner):
398 raise HTTPForbidden()
398 raise HTTPForbidden()
399
399
400 _form = PullRequestPostForm()().to_python(request.POST)
400 _form = PullRequestPostForm()().to_python(request.POST)
401
401
402 cur_reviewers = set(pull_request.get_reviewer_users())
402 cur_reviewers = set(pull_request.get_reviewer_users())
403 new_reviewers = set(_get_reviewer(s) for s in _form['review_members'])
403 new_reviewers = set(_get_reviewer(s) for s in _form['review_members'])
404 old_reviewers = set(_get_reviewer(s) for s in _form['org_review_members'])
404 old_reviewers = set(_get_reviewer(s) for s in _form['org_review_members'])
405
405
406 other_added = cur_reviewers - old_reviewers
406 other_added = cur_reviewers - old_reviewers
407 other_removed = old_reviewers - cur_reviewers
407 other_removed = old_reviewers - cur_reviewers
408
408
409 if other_added:
409 if other_added:
410 h.flash(_('Meanwhile, the following reviewers have been added: %s') %
410 h.flash(_('Meanwhile, the following reviewers have been added: %s') %
411 (', '.join(u.username for u in other_added)),
411 (', '.join(u.username for u in other_added)),
412 category='warning')
412 category='warning')
413 if other_removed:
413 if other_removed:
414 h.flash(_('Meanwhile, the following reviewers have been removed: %s') %
414 h.flash(_('Meanwhile, the following reviewers have been removed: %s') %
415 (', '.join(u.username for u in other_removed)),
415 (', '.join(u.username for u in other_removed)),
416 category='warning')
416 category='warning')
417
417
418 if _form['updaterev']:
418 if _form['updaterev']:
419 return self.create_new_iteration(pull_request,
419 return self.create_new_iteration(pull_request,
420 _form['updaterev'],
420 _form['updaterev'],
421 _form['pullrequest_title'],
421 _form['pullrequest_title'],
422 _form['pullrequest_desc'],
422 _form['pullrequest_desc'],
423 new_reviewers)
423 new_reviewers)
424
424
425 added_reviewers = new_reviewers - old_reviewers - cur_reviewers
425 added_reviewers = new_reviewers - old_reviewers - cur_reviewers
426 removed_reviewers = (old_reviewers - new_reviewers) & cur_reviewers
426 removed_reviewers = (old_reviewers - new_reviewers) & cur_reviewers
427
427
428 old_description = pull_request.description
428 old_description = pull_request.description
429 pull_request.title = _form['pullrequest_title']
429 pull_request.title = _form['pullrequest_title']
430 pull_request.description = _form['pullrequest_desc'].strip() or _('No description')
430 pull_request.description = _form['pullrequest_desc'].strip() or _('No description')
431 pull_request.owner = User.get_by_username(_form['owner'])
431 pull_request.owner = User.get_by_username(_form['owner'])
432 user = User.get(request.authuser.user_id)
432 user = User.get(request.authuser.user_id)
433
433
434 PullRequestModel().mention_from_description(user, pull_request, old_description)
434 PullRequestModel().mention_from_description(user, pull_request, old_description)
435 PullRequestModel().add_reviewers(user, pull_request, added_reviewers)
435 PullRequestModel().add_reviewers(user, pull_request, added_reviewers)
436 PullRequestModel().remove_reviewers(user, pull_request, removed_reviewers)
436 PullRequestModel().remove_reviewers(user, pull_request, removed_reviewers)
437
437
438 Session().commit()
438 Session().commit()
439 h.flash(_('Pull request updated'), category='success')
439 h.flash(_('Pull request updated'), category='success')
440
440
441 raise HTTPFound(location=pull_request.url())
441 raise HTTPFound(location=pull_request.url())
442
442
443 @LoginRequired()
443 @LoginRequired()
444 @NotAnonymous()
444 @NotAnonymous()
445 @HasRepoPermissionLevelDecorator('read')
445 @HasRepoPermissionLevelDecorator('read')
446 @jsonify
446 @jsonify
447 def delete(self, repo_name, pull_request_id):
447 def delete(self, repo_name, pull_request_id):
448 pull_request = PullRequest.get_or_404(pull_request_id)
448 pull_request = PullRequest.get_or_404(pull_request_id)
449 # only owner can delete it !
449 # only owner can delete it !
450 if pull_request.owner_id == request.authuser.user_id:
450 if pull_request.owner_id == request.authuser.user_id:
451 PullRequestModel().delete(pull_request)
451 PullRequestModel().delete(pull_request)
452 Session().commit()
452 Session().commit()
453 h.flash(_('Successfully deleted pull request'),
453 h.flash(_('Successfully deleted pull request'),
454 category='success')
454 category='success')
455 raise HTTPFound(location=url('my_pullrequests'))
455 raise HTTPFound(location=url('my_pullrequests'))
456 raise HTTPForbidden()
456 raise HTTPForbidden()
457
457
458 @LoginRequired()
458 @LoginRequired()
459 @HasRepoPermissionLevelDecorator('read')
459 @HasRepoPermissionLevelDecorator('read')
460 def show(self, repo_name, pull_request_id, extra=None):
460 def show(self, repo_name, pull_request_id, extra=None):
461 repo_model = RepoModel()
461 repo_model = RepoModel()
462 c.users_array = repo_model.get_users_js()
462 c.users_array = repo_model.get_users_js()
463 c.user_groups_array = repo_model.get_user_groups_js()
463 c.user_groups_array = repo_model.get_user_groups_js()
464 c.pull_request = PullRequest.get_or_404(pull_request_id)
464 c.pull_request = PullRequest.get_or_404(pull_request_id)
465 c.allowed_to_change_status = self._get_is_allowed_change_status(c.pull_request)
465 c.allowed_to_change_status = self._get_is_allowed_change_status(c.pull_request)
466 cc_model = ChangesetCommentsModel()
466 cc_model = ChangesetCommentsModel()
467 cs_model = ChangesetStatusModel()
467 cs_model = ChangesetStatusModel()
468
468
469 # pull_requests repo_name we opened it against
469 # pull_requests repo_name we opened it against
470 # ie. other_repo must match
470 # ie. other_repo must match
471 if repo_name != c.pull_request.other_repo.repo_name:
471 if repo_name != c.pull_request.other_repo.repo_name:
472 raise HTTPNotFound
472 raise HTTPNotFound
473
473
474 # load compare data into template context
474 # load compare data into template context
475 c.cs_repo = c.pull_request.org_repo
475 c.cs_repo = c.pull_request.org_repo
476 (c.cs_ref_type,
476 (c.cs_ref_type,
477 c.cs_ref_name,
477 c.cs_ref_name,
478 c.cs_rev) = c.pull_request.org_ref.split(':')
478 c.cs_rev) = c.pull_request.org_ref.split(':')
479
479
480 c.a_repo = c.pull_request.other_repo
480 c.a_repo = c.pull_request.other_repo
481 (c.a_ref_type,
481 (c.a_ref_type,
482 c.a_ref_name,
482 c.a_ref_name,
483 c.a_rev) = c.pull_request.other_ref.split(':') # a_rev is ancestor
483 c.a_rev) = c.pull_request.other_ref.split(':') # a_rev is ancestor
484
484
485 org_scm_instance = c.cs_repo.scm_instance # property with expensive cache invalidation check!!!
485 org_scm_instance = c.cs_repo.scm_instance # property with expensive cache invalidation check!!!
486 try:
486 try:
487 c.cs_ranges = []
487 c.cs_ranges = []
488 for x in c.pull_request.revisions:
488 for x in c.pull_request.revisions:
489 c.cs_ranges.append(org_scm_instance.get_changeset(x))
489 c.cs_ranges.append(org_scm_instance.get_changeset(x))
490 except ChangesetDoesNotExistError:
490 except ChangesetDoesNotExistError:
491 c.cs_ranges = []
491 c.cs_ranges = []
492 h.flash(_('Revision %s not found in %s') % (x, c.cs_repo.repo_name),
492 h.flash(_('Revision %s not found in %s') % (x, c.cs_repo.repo_name),
493 'error')
493 'error')
494 c.cs_ranges_org = None # not stored and not important and moving target - could be calculated ...
494 c.cs_ranges_org = None # not stored and not important and moving target - could be calculated ...
495 revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
495 revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
496 c.jsdata = graph_data(org_scm_instance, revs)
496 c.jsdata = graph_data(org_scm_instance, revs)
497
497
498 c.is_range = False
498 c.is_range = False
499 try:
499 try:
500 if c.a_ref_type == 'rev': # this looks like a free range where target is ancestor
500 if c.a_ref_type == 'rev': # this looks like a free range where target is ancestor
501 cs_a = org_scm_instance.get_changeset(c.a_rev)
501 cs_a = org_scm_instance.get_changeset(c.a_rev)
502 root_parents = c.cs_ranges[0].parents
502 root_parents = c.cs_ranges[0].parents
503 c.is_range = cs_a in root_parents
503 c.is_range = cs_a in root_parents
504 #c.merge_root = len(root_parents) > 1 # a range starting with a merge might deserve a warning
504 #c.merge_root = len(root_parents) > 1 # a range starting with a merge might deserve a warning
505 except ChangesetDoesNotExistError: # probably because c.a_rev not found
505 except ChangesetDoesNotExistError: # probably because c.a_rev not found
506 pass
506 pass
507 except IndexError: # probably because c.cs_ranges is empty, probably because revisions are missing
507 except IndexError: # probably because c.cs_ranges is empty, probably because revisions are missing
508 pass
508 pass
509
509
510 avail_revs = set()
510 avail_revs = set()
511 avail_show = []
511 avail_show = []
512 c.cs_branch_name = c.cs_ref_name
512 c.cs_branch_name = c.cs_ref_name
513 c.a_branch_name = None
513 c.a_branch_name = None
514 other_scm_instance = c.a_repo.scm_instance
514 other_scm_instance = c.a_repo.scm_instance
515 c.update_msg = ""
515 c.update_msg = ""
516 c.update_msg_other = ""
516 c.update_msg_other = ""
517 try:
517 try:
518 if not c.cs_ranges:
518 if not c.cs_ranges:
519 c.update_msg = _('Error: changesets not found when displaying pull request from %s.') % c.cs_rev
519 c.update_msg = _('Error: changesets not found when displaying pull request from %s.') % c.cs_rev
520 elif org_scm_instance.alias == 'hg' and c.a_ref_name != 'ancestor':
520 elif org_scm_instance.alias == 'hg' and c.a_ref_name != 'ancestor':
521 if c.cs_ref_type != 'branch':
521 if c.cs_ref_type != 'branch':
522 c.cs_branch_name = org_scm_instance.get_changeset(c.cs_ref_name).branch # use ref_type ?
522 c.cs_branch_name = org_scm_instance.get_changeset(c.cs_ref_name).branch # use ref_type ?
523 c.a_branch_name = c.a_ref_name
523 c.a_branch_name = c.a_ref_name
524 if c.a_ref_type != 'branch':
524 if c.a_ref_type != 'branch':
525 try:
525 try:
526 c.a_branch_name = other_scm_instance.get_changeset(c.a_ref_name).branch # use ref_type ?
526 c.a_branch_name = other_scm_instance.get_changeset(c.a_ref_name).branch # use ref_type ?
527 except EmptyRepositoryError:
527 except EmptyRepositoryError:
528 c.a_branch_name = 'null' # not a branch name ... but close enough
528 c.a_branch_name = 'null' # not a branch name ... but close enough
529 # candidates: descendants of old head that are on the right branch
529 # candidates: descendants of old head that are on the right branch
530 # and not are the old head itself ...
530 # and not are the old head itself ...
531 # and nothing at all if old head is a descendant of target ref name
531 # and nothing at all if old head is a descendant of target ref name
532 if not c.is_range and other_scm_instance._repo.revs('present(%s)::&%s', c.cs_ranges[-1].raw_id, c.a_branch_name):
532 if not c.is_range and other_scm_instance._repo.revs('present(%s)::&%s', c.cs_ranges[-1].raw_id, c.a_branch_name):
533 c.update_msg = _('This pull request has already been merged to %s.') % c.a_branch_name
533 c.update_msg = _('This pull request has already been merged to %s.') % c.a_branch_name
534 elif c.pull_request.is_closed():
534 elif c.pull_request.is_closed():
535 c.update_msg = _('This pull request has been closed and can not be updated.')
535 c.update_msg = _('This pull request has been closed and can not be updated.')
536 else: # look for descendants of PR head on source branch in org repo
536 else: # look for descendants of PR head on source branch in org repo
537 avail_revs = org_scm_instance._repo.revs('%s:: & branch(%s)',
537 avail_revs = org_scm_instance._repo.revs('%s:: & branch(%s)',
538 revs[0], c.cs_branch_name)
538 revs[0], c.cs_branch_name)
539 if len(avail_revs) > 1: # more than just revs[0]
539 if len(avail_revs) > 1: # more than just revs[0]
540 # also show changesets that not are descendants but would be merged in
540 # also show changesets that not are descendants but would be merged in
541 targethead = other_scm_instance.get_changeset(c.a_branch_name).raw_id
541 targethead = other_scm_instance.get_changeset(c.a_branch_name).raw_id
542 if org_scm_instance.path != other_scm_instance.path:
542 if org_scm_instance.path != other_scm_instance.path:
543 # Note: org_scm_instance.path must come first so all
543 # Note: org_scm_instance.path must come first so all
544 # valid revision numbers are 100% org_scm compatible
544 # valid revision numbers are 100% org_scm compatible
545 # - both for avail_revs and for revset results
545 # - both for avail_revs and for revset results
546 hgrepo = unionrepo.unionrepository(org_scm_instance.baseui,
546 hgrepo = unionrepo.unionrepository(org_scm_instance.baseui,
547 org_scm_instance.path,
547 org_scm_instance.path,
548 other_scm_instance.path)
548 other_scm_instance.path)
549 else:
549 else:
550 hgrepo = org_scm_instance._repo
550 hgrepo = org_scm_instance._repo
551 show = set(hgrepo.revs('::%ld & !::parents(%s) & !::%s',
551 show = set(hgrepo.revs('::%ld & !::parents(%s) & !::%s',
552 avail_revs, revs[0], targethead))
552 avail_revs, revs[0], targethead))
553 c.update_msg = _('The following additional changes are available on %s:') % c.cs_branch_name
553 c.update_msg = _('The following additional changes are available on %s:') % c.cs_branch_name
554 else:
554 else:
555 show = set()
555 show = set()
556 avail_revs = set() # drop revs[0]
556 avail_revs = set() # drop revs[0]
557 c.update_msg = _('No additional changesets found for iterating on this pull request.')
557 c.update_msg = _('No additional changesets found for iterating on this pull request.')
558
558
559 # TODO: handle branch heads that not are tip-most
559 # TODO: handle branch heads that not are tip-most
560 brevs = org_scm_instance._repo.revs('%s - %ld - %s', c.cs_branch_name, avail_revs, revs[0])
560 brevs = org_scm_instance._repo.revs('%s - %ld - %s', c.cs_branch_name, avail_revs, revs[0])
561 if brevs:
561 if brevs:
562 # also show changesets that are on branch but neither ancestors nor descendants
562 # also show changesets that are on branch but neither ancestors nor descendants
563 show.update(org_scm_instance._repo.revs('::%ld - ::%ld - ::%s', brevs, avail_revs, c.a_branch_name))
563 show.update(org_scm_instance._repo.revs('::%ld - ::%ld - ::%s', brevs, avail_revs, c.a_branch_name))
564 show.add(revs[0]) # make sure graph shows this so we can see how they relate
564 show.add(revs[0]) # make sure graph shows this so we can see how they relate
565 c.update_msg_other = _('Note: Branch %s has another head: %s.') % (c.cs_branch_name,
565 c.update_msg_other = _('Note: Branch %s has another head: %s.') % (c.cs_branch_name,
566 h.short_id(org_scm_instance.get_changeset((max(brevs))).raw_id))
566 h.short_id(org_scm_instance.get_changeset((max(brevs))).raw_id))
567
567
568 avail_show = sorted(show, reverse=True)
568 avail_show = sorted(show, reverse=True)
569
569
570 elif org_scm_instance.alias == 'git':
570 elif org_scm_instance.alias == 'git':
571 c.cs_repo.scm_instance.get_changeset(c.cs_rev) # check it exists - raise ChangesetDoesNotExistError if not
571 c.cs_repo.scm_instance.get_changeset(c.cs_rev) # check it exists - raise ChangesetDoesNotExistError if not
572 c.update_msg = _("Git pull requests don't support iterating yet.")
572 c.update_msg = _("Git pull requests don't support iterating yet.")
573 except ChangesetDoesNotExistError:
573 except ChangesetDoesNotExistError:
574 c.update_msg = _('Error: some changesets not found when displaying pull request from %s.') % c.cs_rev
574 c.update_msg = _('Error: some changesets not found when displaying pull request from %s.') % c.cs_rev
575
575
576 c.avail_revs = avail_revs
576 c.avail_revs = avail_revs
577 c.avail_cs = [org_scm_instance.get_changeset(r) for r in avail_show]
577 c.avail_cs = [org_scm_instance.get_changeset(r) for r in avail_show]
578 c.avail_jsdata = graph_data(org_scm_instance, avail_show)
578 c.avail_jsdata = graph_data(org_scm_instance, avail_show)
579
579
580 raw_ids = [x.raw_id for x in c.cs_ranges]
580 raw_ids = [x.raw_id for x in c.cs_ranges]
581 c.cs_comments = c.cs_repo.get_comments(raw_ids)
581 c.cs_comments = c.cs_repo.get_comments(raw_ids)
582 c.cs_statuses = c.cs_repo.statuses(raw_ids)
582 c.cs_statuses = c.cs_repo.statuses(raw_ids)
583
583
584 ignore_whitespace = request.GET.get('ignorews') == '1'
584 ignore_whitespace = request.GET.get('ignorews') == '1'
585 line_context = safe_int(request.GET.get('context'), 3)
585 line_context = safe_int(request.GET.get('context'), 3)
586 c.ignorews_url = _ignorews_url
586 c.ignorews_url = _ignorews_url
587 c.context_url = _context_url
587 c.context_url = _context_url
588 fulldiff = request.GET.get('fulldiff')
588 fulldiff = request.GET.get('fulldiff')
589 diff_limit = None if fulldiff else self.cut_off_limit
589 diff_limit = None if fulldiff else self.cut_off_limit
590
590
591 # we swap org/other ref since we run a simple diff on one repo
591 # we swap org/other ref since we run a simple diff on one repo
592 log.debug('running diff between %s and %s in %s',
592 log.debug('running diff between %s and %s in %s',
593 c.a_rev, c.cs_rev, org_scm_instance.path)
593 c.a_rev, c.cs_rev, org_scm_instance.path)
594 try:
594 try:
595 txtdiff = org_scm_instance.get_diff(rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev),
595 raw_diff = org_scm_instance.get_diff(rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev),
596 ignore_whitespace=ignore_whitespace,
596 ignore_whitespace=ignore_whitespace,
597 context=line_context)
597 context=line_context)
598 except ChangesetDoesNotExistError:
598 except ChangesetDoesNotExistError:
599 txtdiff = _("The diff can't be shown - the PR revisions could not be found.")
599 raw_diff = _("The diff can't be shown - the PR revisions could not be found.")
600 diff_processor = diffs.DiffProcessor(txtdiff or '', diff_limit=diff_limit)
600 diff_processor = diffs.DiffProcessor(raw_diff or '', diff_limit=diff_limit)
601 _parsed = diff_processor.prepare()
602
601
603 c.limited_diff = False
602 c.limited_diff = False
604 if isinstance(_parsed, LimitedDiffContainer):
603 if isinstance(diff_processor.parsed, LimitedDiffContainer):
605 c.limited_diff = True
604 c.limited_diff = True
606
605
607 c.file_diff_data = []
606 c.file_diff_data = []
608 c.lines_added = 0
607 c.lines_added = 0
609 c.lines_deleted = 0
608 c.lines_deleted = 0
610
609
611 for f in _parsed:
610 for f in diff_processor.parsed:
612 st = f['stats']
611 st = f['stats']
613 c.lines_added += st['added']
612 c.lines_added += st['added']
614 c.lines_deleted += st['deleted']
613 c.lines_deleted += st['deleted']
615 filename = f['filename']
614 filename = f['filename']
616 fid = h.FID('', filename)
615 fid = h.FID('', filename)
617 diff = diff_processor.as_html(enable_comments=True,
616 diff = diff_processor.as_html(enable_comments=True,
618 parsed_lines=[f])
617 parsed_lines=[f])
619 c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, diff, st))
618 c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, diff, st))
620
619
621 # inline comments
620 # inline comments
622 c.inline_cnt = 0
621 c.inline_cnt = 0
623 c.inline_comments = cc_model.get_inline_comments(
622 c.inline_comments = cc_model.get_inline_comments(
624 c.db_repo.repo_id,
623 c.db_repo.repo_id,
625 pull_request=pull_request_id)
624 pull_request=pull_request_id)
626 # count inline comments
625 # count inline comments
627 for __, lines in c.inline_comments:
626 for __, lines in c.inline_comments:
628 for comments in lines.values():
627 for comments in lines.values():
629 c.inline_cnt += len(comments)
628 c.inline_cnt += len(comments)
630 # comments
629 # comments
631 c.comments = cc_model.get_comments(c.db_repo.repo_id, pull_request=pull_request_id)
630 c.comments = cc_model.get_comments(c.db_repo.repo_id, pull_request=pull_request_id)
632
631
633 # (badly named) pull-request status calculation based on reviewer votes
632 # (badly named) pull-request status calculation based on reviewer votes
634 (c.pull_request_reviewers,
633 (c.pull_request_reviewers,
635 c.pull_request_pending_reviewers,
634 c.pull_request_pending_reviewers,
636 c.current_voting_result,
635 c.current_voting_result,
637 ) = cs_model.calculate_pull_request_result(c.pull_request)
636 ) = cs_model.calculate_pull_request_result(c.pull_request)
638 c.changeset_statuses = ChangesetStatus.STATUSES
637 c.changeset_statuses = ChangesetStatus.STATUSES
639
638
640 c.is_ajax_preview = False
639 c.is_ajax_preview = False
641 c.ancestors = None # [c.a_rev] ... but that is shown in an other way
640 c.ancestors = None # [c.a_rev] ... but that is shown in an other way
642 return render('/pullrequests/pullrequest_show.html')
641 return render('/pullrequests/pullrequest_show.html')
643
642
644 @LoginRequired()
643 @LoginRequired()
645 @NotAnonymous()
644 @NotAnonymous()
646 @HasRepoPermissionLevelDecorator('read')
645 @HasRepoPermissionLevelDecorator('read')
647 @jsonify
646 @jsonify
648 def comment(self, repo_name, pull_request_id):
647 def comment(self, repo_name, pull_request_id):
649 pull_request = PullRequest.get_or_404(pull_request_id)
648 pull_request = PullRequest.get_or_404(pull_request_id)
650
649
651 status = request.POST.get('changeset_status')
650 status = request.POST.get('changeset_status')
652 close_pr = request.POST.get('save_close')
651 close_pr = request.POST.get('save_close')
653 delete = request.POST.get('save_delete')
652 delete = request.POST.get('save_delete')
654 f_path = request.POST.get('f_path')
653 f_path = request.POST.get('f_path')
655 line_no = request.POST.get('line')
654 line_no = request.POST.get('line')
656
655
657 if (status or close_pr or delete) and (f_path or line_no):
656 if (status or close_pr or delete) and (f_path or line_no):
658 # status votes and closing is only possible in general comments
657 # status votes and closing is only possible in general comments
659 raise HTTPBadRequest()
658 raise HTTPBadRequest()
660
659
661 allowed_to_change_status = self._get_is_allowed_change_status(pull_request)
660 allowed_to_change_status = self._get_is_allowed_change_status(pull_request)
662 if not allowed_to_change_status:
661 if not allowed_to_change_status:
663 if status or close_pr:
662 if status or close_pr:
664 h.flash(_('No permission to change pull request status'), 'error')
663 h.flash(_('No permission to change pull request status'), 'error')
665 raise HTTPForbidden()
664 raise HTTPForbidden()
666
665
667 if delete == "delete":
666 if delete == "delete":
668 if (pull_request.owner_id == request.authuser.user_id or
667 if (pull_request.owner_id == request.authuser.user_id or
669 h.HasPermissionAny('hg.admin')() or
668 h.HasPermissionAny('hg.admin')() or
670 h.HasRepoPermissionLevel('admin')(pull_request.org_repo.repo_name) or
669 h.HasRepoPermissionLevel('admin')(pull_request.org_repo.repo_name) or
671 h.HasRepoPermissionLevel('admin')(pull_request.other_repo.repo_name)
670 h.HasRepoPermissionLevel('admin')(pull_request.other_repo.repo_name)
672 ) and not pull_request.is_closed():
671 ) and not pull_request.is_closed():
673 PullRequestModel().delete(pull_request)
672 PullRequestModel().delete(pull_request)
674 Session().commit()
673 Session().commit()
675 h.flash(_('Successfully deleted pull request %s') % pull_request_id,
674 h.flash(_('Successfully deleted pull request %s') % pull_request_id,
676 category='success')
675 category='success')
677 return {
676 return {
678 'location': url('my_pullrequests'), # or repo pr list?
677 'location': url('my_pullrequests'), # or repo pr list?
679 }
678 }
680 raise HTTPFound(location=url('my_pullrequests')) # or repo pr list?
679 raise HTTPFound(location=url('my_pullrequests')) # or repo pr list?
681 raise HTTPForbidden()
680 raise HTTPForbidden()
682
681
683 text = request.POST.get('text', '').strip()
682 text = request.POST.get('text', '').strip()
684
683
685 comment = create_comment(
684 comment = create_comment(
686 text,
685 text,
687 status,
686 status,
688 pull_request_id=pull_request_id,
687 pull_request_id=pull_request_id,
689 f_path=f_path,
688 f_path=f_path,
690 line_no=line_no,
689 line_no=line_no,
691 closing_pr=close_pr,
690 closing_pr=close_pr,
692 )
691 )
693
692
694 action_logger(request.authuser,
693 action_logger(request.authuser,
695 'user_commented_pull_request:%s' % pull_request_id,
694 'user_commented_pull_request:%s' % pull_request_id,
696 c.db_repo, request.ip_addr)
695 c.db_repo, request.ip_addr)
697
696
698 if status:
697 if status:
699 ChangesetStatusModel().set_status(
698 ChangesetStatusModel().set_status(
700 c.db_repo.repo_id,
699 c.db_repo.repo_id,
701 status,
700 status,
702 request.authuser.user_id,
701 request.authuser.user_id,
703 comment,
702 comment,
704 pull_request=pull_request_id
703 pull_request=pull_request_id
705 )
704 )
706
705
707 if close_pr:
706 if close_pr:
708 PullRequestModel().close_pull_request(pull_request_id)
707 PullRequestModel().close_pull_request(pull_request_id)
709 action_logger(request.authuser,
708 action_logger(request.authuser,
710 'user_closed_pull_request:%s' % pull_request_id,
709 'user_closed_pull_request:%s' % pull_request_id,
711 c.db_repo, request.ip_addr)
710 c.db_repo, request.ip_addr)
712
711
713 Session().commit()
712 Session().commit()
714
713
715 if not request.environ.get('HTTP_X_PARTIAL_XHR'):
714 if not request.environ.get('HTTP_X_PARTIAL_XHR'):
716 raise HTTPFound(location=pull_request.url())
715 raise HTTPFound(location=pull_request.url())
717
716
718 data = {
717 data = {
719 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
718 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
720 }
719 }
721 if comment is not None:
720 if comment is not None:
722 c.comment = comment
721 c.comment = comment
723 data.update(comment.get_dict())
722 data.update(comment.get_dict())
724 data.update({'rendered_text':
723 data.update({'rendered_text':
725 render('changeset/changeset_comment_block.html')})
724 render('changeset/changeset_comment_block.html')})
726
725
727 return data
726 return data
728
727
729 @LoginRequired()
728 @LoginRequired()
730 @NotAnonymous()
729 @NotAnonymous()
731 @HasRepoPermissionLevelDecorator('read')
730 @HasRepoPermissionLevelDecorator('read')
732 @jsonify
731 @jsonify
733 def delete_comment(self, repo_name, comment_id):
732 def delete_comment(self, repo_name, comment_id):
734 co = ChangesetComment.get(comment_id)
733 co = ChangesetComment.get(comment_id)
735 if co.pull_request.is_closed():
734 if co.pull_request.is_closed():
736 # don't allow deleting comments on closed pull request
735 # don't allow deleting comments on closed pull request
737 raise HTTPForbidden()
736 raise HTTPForbidden()
738
737
739 owner = co.author_id == request.authuser.user_id
738 owner = co.author_id == request.authuser.user_id
740 repo_admin = h.HasRepoPermissionLevel('admin')(c.repo_name)
739 repo_admin = h.HasRepoPermissionLevel('admin')(c.repo_name)
741 if h.HasPermissionAny('hg.admin')() or repo_admin or owner:
740 if h.HasPermissionAny('hg.admin')() or repo_admin or owner:
742 ChangesetCommentsModel().delete(comment=co)
741 ChangesetCommentsModel().delete(comment=co)
743 Session().commit()
742 Session().commit()
744 return True
743 return True
745 else:
744 else:
746 raise HTTPForbidden()
745 raise HTTPForbidden()
@@ -1,700 +1,685 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.lib.diffs
15 kallithea.lib.diffs
16 ~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~
17
17
18 Set of diffing helpers, previously part of vcs
18 Set of diffing helpers, previously part of vcs
19
19
20
20
21 This file was forked by the Kallithea project in July 2014.
21 This file was forked by the Kallithea project in July 2014.
22 Original author and date, and relevant copyright and licensing information is below:
22 Original author and date, and relevant copyright and licensing information is below:
23 :created_on: Dec 4, 2011
23 :created_on: Dec 4, 2011
24 :author: marcink
24 :author: marcink
25 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :copyright: (c) 2013 RhodeCode GmbH, and others.
26 :license: GPLv3, see LICENSE.md for more details.
26 :license: GPLv3, see LICENSE.md for more details.
27 """
27 """
28 import re
28 import re
29 import difflib
29 import difflib
30 import logging
30 import logging
31
31
32 from tg.i18n import ugettext as _
32 from tg.i18n import ugettext as _
33
33
34 from kallithea.lib.vcs.exceptions import VCSError
34 from kallithea.lib.vcs.exceptions import VCSError
35 from kallithea.lib.vcs.nodes import FileNode, SubModuleNode
35 from kallithea.lib.vcs.nodes import FileNode, SubModuleNode
36 from kallithea.lib.vcs.backends.base import EmptyChangeset
36 from kallithea.lib.vcs.backends.base import EmptyChangeset
37 from kallithea.lib.helpers import escape
37 from kallithea.lib.helpers import escape
38 from kallithea.lib.utils2 import safe_unicode
38 from kallithea.lib.utils2 import safe_unicode
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 def wrap_to_table(html):
43 def wrap_to_table(html):
44 """Given a string with html, return it wrapped in a table, similar to what
44 """Given a string with html, return it wrapped in a table, similar to what
45 DiffProcessor returns."""
45 DiffProcessor returns."""
46 return '''\
46 return '''\
47 <table class="code-difftable">
47 <table class="code-difftable">
48 <tr class="line no-comment">
48 <tr class="line no-comment">
49 <td class="lineno new"></td>
49 <td class="lineno new"></td>
50 <td class="code no-comment"><pre>%s</pre></td>
50 <td class="code no-comment"><pre>%s</pre></td>
51 </tr>
51 </tr>
52 </table>''' % html
52 </table>''' % html
53
53
54
54
55 def wrapped_diff(filenode_old, filenode_new, diff_limit=None,
55 def wrapped_diff(filenode_old, filenode_new, diff_limit=None,
56 ignore_whitespace=True, line_context=3,
56 ignore_whitespace=True, line_context=3,
57 enable_comments=False):
57 enable_comments=False):
58 """
58 """
59 Returns a file diff wrapped into a table.
59 Returns a file diff wrapped into a table.
60 Checks for diff_limit and presents a message if the diff is too big.
60 Checks for diff_limit and presents a message if the diff is too big.
61 """
61 """
62 if filenode_old is None:
62 if filenode_old is None:
63 filenode_old = FileNode(filenode_new.path, '', EmptyChangeset())
63 filenode_old = FileNode(filenode_new.path, '', EmptyChangeset())
64
64
65 op = None
65 op = None
66 a_path = filenode_old.path # default, might be overriden by actual rename in diff
66 a_path = filenode_old.path # default, might be overriden by actual rename in diff
67 if filenode_old.is_binary or filenode_new.is_binary:
67 if filenode_old.is_binary or filenode_new.is_binary:
68 diff = wrap_to_table(_('Binary file'))
68 diff = wrap_to_table(_('Binary file'))
69 stats = (0, 0)
69 stats = (0, 0)
70
70
71 elif diff_limit != -1 and (
71 elif diff_limit != -1 and (
72 diff_limit is None or
72 diff_limit is None or
73 (filenode_old.size < diff_limit and filenode_new.size < diff_limit)):
73 (filenode_old.size < diff_limit and filenode_new.size < diff_limit)):
74
74
75 f_gitdiff = get_gitdiff(filenode_old, filenode_new,
75 raw_diff = get_gitdiff(filenode_old, filenode_new,
76 ignore_whitespace=ignore_whitespace,
76 ignore_whitespace=ignore_whitespace,
77 context=line_context)
77 context=line_context)
78 diff_processor = DiffProcessor(f_gitdiff)
78 diff_processor = DiffProcessor(raw_diff)
79 _parsed = diff_processor.prepare()
79 if diff_processor.parsed: # there should be exactly one element, for the specified file
80 if _parsed: # there should be exactly one element, for the specified file
80 f = diff_processor.parsed[0]
81 f = _parsed[0]
82 op = f['operation']
81 op = f['operation']
83 a_path = f['old_filename']
82 a_path = f['old_filename']
84
83
85 diff = diff_processor.as_html(enable_comments=enable_comments)
84 diff = diff_processor.as_html(enable_comments=enable_comments)
86 stats = diff_processor.stat()
85 stats = diff_processor.stat()
87
86
88 else:
87 else:
89 diff = wrap_to_table(_('Changeset was too big and was cut off, use '
88 diff = wrap_to_table(_('Changeset was too big and was cut off, use '
90 'diff menu to display this diff'))
89 'diff menu to display this diff'))
91 stats = (0, 0)
90 stats = (0, 0)
92
91
93 if not diff:
92 if not diff:
94 submodules = filter(lambda o: isinstance(o, SubModuleNode),
93 submodules = filter(lambda o: isinstance(o, SubModuleNode),
95 [filenode_new, filenode_old])
94 [filenode_new, filenode_old])
96 if submodules:
95 if submodules:
97 diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
96 diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
98 else:
97 else:
99 diff = wrap_to_table(_('No changes detected'))
98 diff = wrap_to_table(_('No changes detected'))
100
99
101 cs1 = filenode_old.changeset.raw_id
100 cs1 = filenode_old.changeset.raw_id
102 cs2 = filenode_new.changeset.raw_id
101 cs2 = filenode_new.changeset.raw_id
103
102
104 return cs1, cs2, a_path, diff, stats, op
103 return cs1, cs2, a_path, diff, stats, op
105
104
106
105
107 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
106 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
108 """
107 """
109 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
108 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
110 """
109 """
111 # make sure we pass in default context
110 # make sure we pass in default context
112 context = context or 3
111 context = context or 3
113 submodules = filter(lambda o: isinstance(o, SubModuleNode),
112 submodules = filter(lambda o: isinstance(o, SubModuleNode),
114 [filenode_new, filenode_old])
113 [filenode_new, filenode_old])
115 if submodules:
114 if submodules:
116 return ''
115 return ''
117
116
118 for filenode in (filenode_old, filenode_new):
117 for filenode in (filenode_old, filenode_new):
119 if not isinstance(filenode, FileNode):
118 if not isinstance(filenode, FileNode):
120 raise VCSError("Given object should be FileNode object, not %s"
119 raise VCSError("Given object should be FileNode object, not %s"
121 % filenode.__class__)
120 % filenode.__class__)
122
121
123 repo = filenode_new.changeset.repository
122 repo = filenode_new.changeset.repository
124 old_raw_id = getattr(filenode_old.changeset, 'raw_id', repo.EMPTY_CHANGESET)
123 old_raw_id = getattr(filenode_old.changeset, 'raw_id', repo.EMPTY_CHANGESET)
125 new_raw_id = getattr(filenode_new.changeset, 'raw_id', repo.EMPTY_CHANGESET)
124 new_raw_id = getattr(filenode_new.changeset, 'raw_id', repo.EMPTY_CHANGESET)
126
125
127 vcs_gitdiff = repo.get_diff(old_raw_id, new_raw_id, filenode_new.path,
126 vcs_gitdiff = repo.get_diff(old_raw_id, new_raw_id, filenode_new.path,
128 ignore_whitespace, context)
127 ignore_whitespace, context)
129 return vcs_gitdiff
128 return vcs_gitdiff
130
129
131
130
132 NEW_FILENODE = 1
131 NEW_FILENODE = 1
133 DEL_FILENODE = 2
132 DEL_FILENODE = 2
134 MOD_FILENODE = 3
133 MOD_FILENODE = 3
135 RENAMED_FILENODE = 4
134 RENAMED_FILENODE = 4
136 COPIED_FILENODE = 5
135 COPIED_FILENODE = 5
137 CHMOD_FILENODE = 6
136 CHMOD_FILENODE = 6
138 BIN_FILENODE = 7
137 BIN_FILENODE = 7
139
138
140
139
141 class DiffLimitExceeded(Exception):
140 class DiffLimitExceeded(Exception):
142 pass
141 pass
143
142
144
143
145 class LimitedDiffContainer(object):
144 class LimitedDiffContainer(object):
146
145
147 def __init__(self, diff_limit, cur_diff_size, diff):
146 def __init__(self, diff_limit, cur_diff_size, diff):
148 self.diff = diff
147 self.diff = diff
149 self.diff_limit = diff_limit
148 self.diff_limit = diff_limit
150 self.cur_diff_size = cur_diff_size
149 self.cur_diff_size = cur_diff_size
151
150
152 def __iter__(self):
151 def __iter__(self):
153 for l in self.diff:
152 for l in self.diff:
154 yield l
153 yield l
155
154
156
155
157 class DiffProcessor(object):
156 class DiffProcessor(object):
158 """
157 """
159 Give it a unified or git diff and it returns a list of the files that were
158 Give it a unified or git diff and it returns a list of the files that were
160 mentioned in the diff together with a dict of meta information that
159 mentioned in the diff together with a dict of meta information that
161 can be used to render it in a HTML template.
160 can be used to render it in a HTML template.
162 """
161 """
163 _diff_git_re = re.compile('^diff --git', re.MULTILINE)
162 _diff_git_re = re.compile('^diff --git', re.MULTILINE)
164 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
163 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
165 _newline_marker = re.compile(r'^\\ No newline at end of file')
164 _newline_marker = re.compile(r'^\\ No newline at end of file')
166 _git_header_re = re.compile(r"""
165 _git_header_re = re.compile(r"""
167 ^diff[ ]--git
166 ^diff[ ]--git
168 [ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
167 [ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
169 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
168 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
170 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
169 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
171 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%\n
170 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%\n
172 ^rename[ ]from[ ](?P<rename_from>.+)\n
171 ^rename[ ]from[ ](?P<rename_from>.+)\n
173 ^rename[ ]to[ ](?P<rename_to>.+)(?:\n|$))?
172 ^rename[ ]to[ ](?P<rename_to>.+)(?:\n|$))?
174 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
173 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
175 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
174 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
176 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
175 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
177 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
176 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
178 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
177 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
179 (?:^---[ ](a/(?P<a_file>.+?)|/dev/null)\t?(?:\n|$))?
178 (?:^---[ ](a/(?P<a_file>.+?)|/dev/null)\t?(?:\n|$))?
180 (?:^\+\+\+[ ](b/(?P<b_file>.+?)|/dev/null)\t?(?:\n|$))?
179 (?:^\+\+\+[ ](b/(?P<b_file>.+?)|/dev/null)\t?(?:\n|$))?
181 """, re.VERBOSE | re.MULTILINE)
180 """, re.VERBOSE | re.MULTILINE)
182 _hg_header_re = re.compile(r"""
181 _hg_header_re = re.compile(r"""
183 ^diff[ ]--git
182 ^diff[ ]--git
184 [ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
183 [ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
185 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
184 (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
186 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
185 ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
187 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%(?:\n|$))?
186 (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%(?:\n|$))?
188 (?:^rename[ ]from[ ](?P<rename_from>.+)\n
187 (?:^rename[ ]from[ ](?P<rename_from>.+)\n
189 ^rename[ ]to[ ](?P<rename_to>.+)(?:\n|$))?
188 ^rename[ ]to[ ](?P<rename_to>.+)(?:\n|$))?
190 (?:^copy[ ]from[ ](?P<copy_from>.+)\n
189 (?:^copy[ ]from[ ](?P<copy_from>.+)\n
191 ^copy[ ]to[ ](?P<copy_to>.+)(?:\n|$))?
190 ^copy[ ]to[ ](?P<copy_to>.+)(?:\n|$))?
192 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
191 (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
193 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
192 (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
194 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
193 (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
195 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
194 \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
196 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
195 (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
197 (?:^---[ ](a/(?P<a_file>.+?)|/dev/null)\t?(?:\n|$))?
196 (?:^---[ ](a/(?P<a_file>.+?)|/dev/null)\t?(?:\n|$))?
198 (?:^\+\+\+[ ](b/(?P<b_file>.+?)|/dev/null)\t?(?:\n|$))?
197 (?:^\+\+\+[ ](b/(?P<b_file>.+?)|/dev/null)\t?(?:\n|$))?
199 """, re.VERBOSE | re.MULTILINE)
198 """, re.VERBOSE | re.MULTILINE)
200
199
201 # Used for inline highlighter word split, must match the substitutions in _escaper
200 # Used for inline highlighter word split, must match the substitutions in _escaper
202 _token_re = re.compile(r'()(&amp;|&lt;|&gt;|<u>\t</u>|<u class="cr"></u>| <i></i>|\W+?)')
201 _token_re = re.compile(r'()(&amp;|&lt;|&gt;|<u>\t</u>|<u class="cr"></u>| <i></i>|\W+?)')
203
202
204 _escape_re = re.compile(r'(&)|(<)|(>)|(\t)|(\r)|(?<=.)( \n| $)')
203 _escape_re = re.compile(r'(&)|(<)|(>)|(\t)|(\r)|(?<=.)( \n| $)')
205
204
206 def __init__(self, diff, vcs='hg', diff_limit=None):
205 def __init__(self, diff, vcs='hg', diff_limit=None, inline_diff=True):
207 """
206 """
208 :param diff: a text in diff format
207 :param diff: a text in diff format
209 :param vcs: type of version control hg or git
208 :param vcs: type of version control hg or git
210 :param diff_limit: define the size of diff that is considered "big"
209 :param diff_limit: define the size of diff that is considered "big"
211 based on that parameter cut off will be triggered, set to None
210 based on that parameter cut off will be triggered, set to None
212 to show full diff
211 to show full diff
213 """
212 """
214 if not isinstance(diff, basestring):
213 if not isinstance(diff, basestring):
215 raise Exception('Diff must be a basestring got %s instead' % type(diff))
214 raise Exception('Diff must be a basestring got %s instead' % type(diff))
216
215
217 self._diff = diff
216 self._diff = diff
218 self.adds = 0
217 self.adds = 0
219 self.removes = 0
218 self.removes = 0
220 # calculate diff size
219 # calculate diff size
221 self.diff_size = len(diff)
222 self.diff_limit = diff_limit
220 self.diff_limit = diff_limit
223 self.cur_diff_size = 0
221 self.cur_diff_size = 0
224 self.parsed = False
225 self.parsed_diff = []
226 self.vcs = vcs
222 self.vcs = vcs
223 self.parsed = self._parse_gitdiff(inline_diff=inline_diff)
227
224
228 def _escaper(self, string):
225 def _escaper(self, string):
229 """
226 """
230 Do HTML escaping/markup and check the diff limit
227 Do HTML escaping/markup and check the diff limit
231 """
228 """
232 self.cur_diff_size += len(string)
229 self.cur_diff_size += len(string)
233
230
234 # escaper gets iterated on each .next() call and it checks if each
231 # escaper gets iterated on each .next() call and it checks if each
235 # parsed line doesn't exceed the diff limit
232 # parsed line doesn't exceed the diff limit
236 if self.diff_limit is not None and self.cur_diff_size > self.diff_limit:
233 if self.diff_limit is not None and self.cur_diff_size > self.diff_limit:
237 raise DiffLimitExceeded('Diff Limit Exceeded')
234 raise DiffLimitExceeded('Diff Limit Exceeded')
238
235
239 def substitute(m):
236 def substitute(m):
240 groups = m.groups()
237 groups = m.groups()
241 if groups[0]:
238 if groups[0]:
242 return '&amp;'
239 return '&amp;'
243 if groups[1]:
240 if groups[1]:
244 return '&lt;'
241 return '&lt;'
245 if groups[2]:
242 if groups[2]:
246 return '&gt;'
243 return '&gt;'
247 if groups[3]:
244 if groups[3]:
248 return '<u>\t</u>'
245 return '<u>\t</u>'
249 if groups[4]:
246 if groups[4]:
250 return '<u class="cr"></u>'
247 return '<u class="cr"></u>'
251 if groups[5]:
248 if groups[5]:
252 return ' <i></i>'
249 return ' <i></i>'
253 assert False
250 assert False
254
251
255 return self._escape_re.sub(substitute, safe_unicode(string))
252 return self._escape_re.sub(substitute, safe_unicode(string))
256
253
257 def _highlight_inline_diff(self, old, new):
254 def _highlight_inline_diff(self, old, new):
258 """
255 """
259 Highlight simple add/remove in two lines given as info dicts. They are
256 Highlight simple add/remove in two lines given as info dicts. They are
260 modified in place and given markup with <del>/<ins>.
257 modified in place and given markup with <del>/<ins>.
261 """
258 """
262 assert old['action'] == 'del'
259 assert old['action'] == 'del'
263 assert new['action'] == 'add'
260 assert new['action'] == 'add'
264
261
265 oldwords = self._token_re.split(old['line'])
262 oldwords = self._token_re.split(old['line'])
266 newwords = self._token_re.split(new['line'])
263 newwords = self._token_re.split(new['line'])
267 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
264 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
268
265
269 oldfragments, newfragments = [], []
266 oldfragments, newfragments = [], []
270 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
267 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
271 oldfrag = ''.join(oldwords[i1:i2])
268 oldfrag = ''.join(oldwords[i1:i2])
272 newfrag = ''.join(newwords[j1:j2])
269 newfrag = ''.join(newwords[j1:j2])
273 if tag != 'equal':
270 if tag != 'equal':
274 if oldfrag:
271 if oldfrag:
275 oldfrag = '<del>%s</del>' % oldfrag
272 oldfrag = '<del>%s</del>' % oldfrag
276 if newfrag:
273 if newfrag:
277 newfrag = '<ins>%s</ins>' % newfrag
274 newfrag = '<ins>%s</ins>' % newfrag
278 oldfragments.append(oldfrag)
275 oldfragments.append(oldfrag)
279 newfragments.append(newfrag)
276 newfragments.append(newfrag)
280
277
281 old['line'] = "".join(oldfragments)
278 old['line'] = "".join(oldfragments)
282 new['line'] = "".join(newfragments)
279 new['line'] = "".join(newfragments)
283
280
284 def _get_header(self, diff_chunk):
281 def _get_header(self, diff_chunk):
285 """
282 """
286 Parses a Git diff for a single file (header and chunks) and returns a tuple with:
283 Parses a Git diff for a single file (header and chunks) and returns a tuple with:
287
284
288 1. A dict with meta info:
285 1. A dict with meta info:
289
286
290 a_path, b_path, similarity_index, rename_from, rename_to,
287 a_path, b_path, similarity_index, rename_from, rename_to,
291 old_mode, new_mode, new_file_mode, deleted_file_mode,
288 old_mode, new_mode, new_file_mode, deleted_file_mode,
292 a_blob_id, b_blob_id, b_mode, a_file, b_file
289 a_blob_id, b_blob_id, b_mode, a_file, b_file
293
290
294 2. An iterator yielding lines with simple HTML markup.
291 2. An iterator yielding lines with simple HTML markup.
295 """
292 """
296 match = None
293 match = None
297 if self.vcs == 'git':
294 if self.vcs == 'git':
298 match = self._git_header_re.match(diff_chunk)
295 match = self._git_header_re.match(diff_chunk)
299 elif self.vcs == 'hg':
296 elif self.vcs == 'hg':
300 match = self._hg_header_re.match(diff_chunk)
297 match = self._hg_header_re.match(diff_chunk)
301 if match is None:
298 if match is None:
302 raise Exception('diff not recognized as valid %s diff' % self.vcs)
299 raise Exception('diff not recognized as valid %s diff' % self.vcs)
303 meta_info = match.groupdict()
300 meta_info = match.groupdict()
304 rest = diff_chunk[match.end():]
301 rest = diff_chunk[match.end():]
305 if rest and not rest.startswith('@') and not rest.startswith('literal ') and not rest.startswith('delta '):
302 if rest and not rest.startswith('@') and not rest.startswith('literal ') and not rest.startswith('delta '):
306 raise Exception('cannot parse %s diff header: %r followed by %r' % (self.vcs, diff_chunk[:match.end()], rest[:1000]))
303 raise Exception('cannot parse %s diff header: %r followed by %r' % (self.vcs, diff_chunk[:match.end()], rest[:1000]))
307 diff_lines = (self._escaper(m.group(0)) for m in re.finditer(r'.*\n|.+$', rest)) # don't split on \r as str.splitlines do
304 diff_lines = (self._escaper(m.group(0)) for m in re.finditer(r'.*\n|.+$', rest)) # don't split on \r as str.splitlines do
308 return meta_info, diff_lines
305 return meta_info, diff_lines
309
306
310 def _parse_gitdiff(self, inline_diff=True):
307 def _parse_gitdiff(self, inline_diff):
311 """Parse self._diff and return a list of dicts with meta info and chunks for each file.
308 """Parse self._diff and return a list of dicts with meta info and chunks for each file.
312 If diff is truncated, wrap it in LimitedDiffContainer.
309 If diff is truncated, wrap it in LimitedDiffContainer.
313 Optionally, do an extra pass and to extra markup of one-liner changes.
310 Optionally, do an extra pass and to extra markup of one-liner changes.
314 """
311 """
315 _files = [] # list of dicts with meta info and chunks
312 _files = [] # list of dicts with meta info and chunks
316 diff_container = lambda arg: arg
313 diff_container = lambda arg: arg
317
314
318 starts = [m.start() for m in self._diff_git_re.finditer(self._diff)]
315 starts = [m.start() for m in self._diff_git_re.finditer(self._diff)]
319 starts.append(len(self._diff))
316 starts.append(len(self._diff))
320
317
321 for start, end in zip(starts, starts[1:]):
318 for start, end in zip(starts, starts[1:]):
322 head, diff_lines = self._get_header(buffer(self._diff, start, end - start))
319 head, diff_lines = self._get_header(buffer(self._diff, start, end - start))
323
320
324 op = None
321 op = None
325 stats = {
322 stats = {
326 'added': 0,
323 'added': 0,
327 'deleted': 0,
324 'deleted': 0,
328 'binary': False,
325 'binary': False,
329 'ops': {},
326 'ops': {},
330 }
327 }
331
328
332 if head['deleted_file_mode']:
329 if head['deleted_file_mode']:
333 op = 'D'
330 op = 'D'
334 stats['binary'] = True
331 stats['binary'] = True
335 stats['ops'][DEL_FILENODE] = 'deleted file'
332 stats['ops'][DEL_FILENODE] = 'deleted file'
336
333
337 elif head['new_file_mode']:
334 elif head['new_file_mode']:
338 op = 'A'
335 op = 'A'
339 stats['binary'] = True
336 stats['binary'] = True
340 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
337 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
341 else: # modify operation, can be cp, rename, chmod
338 else: # modify operation, can be cp, rename, chmod
342 # CHMOD
339 # CHMOD
343 if head['new_mode'] and head['old_mode']:
340 if head['new_mode'] and head['old_mode']:
344 op = 'M'
341 op = 'M'
345 stats['binary'] = True
342 stats['binary'] = True
346 stats['ops'][CHMOD_FILENODE] = ('modified file chmod %s => %s'
343 stats['ops'][CHMOD_FILENODE] = ('modified file chmod %s => %s'
347 % (head['old_mode'], head['new_mode']))
344 % (head['old_mode'], head['new_mode']))
348 # RENAME
345 # RENAME
349 if (head['rename_from'] and head['rename_to']
346 if (head['rename_from'] and head['rename_to']
350 and head['rename_from'] != head['rename_to']):
347 and head['rename_from'] != head['rename_to']):
351 op = 'R'
348 op = 'R'
352 stats['binary'] = True
349 stats['binary'] = True
353 stats['ops'][RENAMED_FILENODE] = ('file renamed from %s to %s'
350 stats['ops'][RENAMED_FILENODE] = ('file renamed from %s to %s'
354 % (head['rename_from'], head['rename_to']))
351 % (head['rename_from'], head['rename_to']))
355 # COPY
352 # COPY
356 if head.get('copy_from') and head.get('copy_to'):
353 if head.get('copy_from') and head.get('copy_to'):
357 op = 'M'
354 op = 'M'
358 stats['binary'] = True
355 stats['binary'] = True
359 stats['ops'][COPIED_FILENODE] = ('file copied from %s to %s'
356 stats['ops'][COPIED_FILENODE] = ('file copied from %s to %s'
360 % (head['copy_from'], head['copy_to']))
357 % (head['copy_from'], head['copy_to']))
361 # FALL BACK: detect missed old style add or remove
358 # FALL BACK: detect missed old style add or remove
362 if op is None:
359 if op is None:
363 if not head['a_file'] and head['b_file']:
360 if not head['a_file'] and head['b_file']:
364 op = 'A'
361 op = 'A'
365 stats['binary'] = True
362 stats['binary'] = True
366 stats['ops'][NEW_FILENODE] = 'new file'
363 stats['ops'][NEW_FILENODE] = 'new file'
367
364
368 elif head['a_file'] and not head['b_file']:
365 elif head['a_file'] and not head['b_file']:
369 op = 'D'
366 op = 'D'
370 stats['binary'] = True
367 stats['binary'] = True
371 stats['ops'][DEL_FILENODE] = 'deleted file'
368 stats['ops'][DEL_FILENODE] = 'deleted file'
372
369
373 # it's not ADD not DELETE
370 # it's not ADD not DELETE
374 if op is None:
371 if op is None:
375 op = 'M'
372 op = 'M'
376 stats['binary'] = True
373 stats['binary'] = True
377 stats['ops'][MOD_FILENODE] = 'modified file'
374 stats['ops'][MOD_FILENODE] = 'modified file'
378
375
379 # a real non-binary diff
376 # a real non-binary diff
380 if head['a_file'] or head['b_file']:
377 if head['a_file'] or head['b_file']:
381 try:
378 try:
382 chunks, added, deleted = self._parse_lines(diff_lines)
379 chunks, added, deleted = self._parse_lines(diff_lines)
383 stats['binary'] = False
380 stats['binary'] = False
384 stats['added'] = added
381 stats['added'] = added
385 stats['deleted'] = deleted
382 stats['deleted'] = deleted
386 # explicit mark that it's a modified file
383 # explicit mark that it's a modified file
387 if op == 'M':
384 if op == 'M':
388 stats['ops'][MOD_FILENODE] = 'modified file'
385 stats['ops'][MOD_FILENODE] = 'modified file'
389
386
390 except DiffLimitExceeded:
387 except DiffLimitExceeded:
391 diff_container = lambda _diff: \
388 diff_container = lambda _diff: \
392 LimitedDiffContainer(self.diff_limit,
389 LimitedDiffContainer(self.diff_limit,
393 self.cur_diff_size, _diff)
390 self.cur_diff_size, _diff)
394 break
391 break
395 else: # Git binary patch (or empty diff)
392 else: # Git binary patch (or empty diff)
396 # Git binary patch
393 # Git binary patch
397 if head['bin_patch']:
394 if head['bin_patch']:
398 stats['ops'][BIN_FILENODE] = 'binary diff not shown'
395 stats['ops'][BIN_FILENODE] = 'binary diff not shown'
399 chunks = []
396 chunks = []
400
397
401 if op == 'D' and chunks:
398 if op == 'D' and chunks:
402 # a way of seeing deleted content could perhaps be nice - but
399 # a way of seeing deleted content could perhaps be nice - but
403 # not with the current UI
400 # not with the current UI
404 chunks = []
401 chunks = []
405
402
406 chunks.insert(0, [{
403 chunks.insert(0, [{
407 'old_lineno': '',
404 'old_lineno': '',
408 'new_lineno': '',
405 'new_lineno': '',
409 'action': 'context',
406 'action': 'context',
410 'line': msg,
407 'line': msg,
411 } for _op, msg in stats['ops'].iteritems()
408 } for _op, msg in stats['ops'].iteritems()
412 if _op not in [MOD_FILENODE]])
409 if _op not in [MOD_FILENODE]])
413
410
414 _files.append({
411 _files.append({
415 'old_filename': head['a_path'],
412 'old_filename': head['a_path'],
416 'filename': head['b_path'],
413 'filename': head['b_path'],
417 'old_revision': head['a_blob_id'],
414 'old_revision': head['a_blob_id'],
418 'new_revision': head['b_blob_id'],
415 'new_revision': head['b_blob_id'],
419 'chunks': chunks,
416 'chunks': chunks,
420 'operation': op,
417 'operation': op,
421 'stats': stats,
418 'stats': stats,
422 })
419 })
423
420
424 if not inline_diff:
421 if not inline_diff:
425 return diff_container(_files)
422 return diff_container(_files)
426
423
427 # highlight inline changes when one del is followed by one add
424 # highlight inline changes when one del is followed by one add
428 for diff_data in _files:
425 for diff_data in _files:
429 for chunk in diff_data['chunks']:
426 for chunk in diff_data['chunks']:
430 lineiter = iter(chunk)
427 lineiter = iter(chunk)
431 try:
428 try:
432 peekline = lineiter.next()
429 peekline = lineiter.next()
433 while True:
430 while True:
434 # find a first del line
431 # find a first del line
435 while peekline['action'] != 'del':
432 while peekline['action'] != 'del':
436 peekline = lineiter.next()
433 peekline = lineiter.next()
437 delline = peekline
434 delline = peekline
438 peekline = lineiter.next()
435 peekline = lineiter.next()
439 # if not followed by add, eat all following del lines
436 # if not followed by add, eat all following del lines
440 if peekline['action'] != 'add':
437 if peekline['action'] != 'add':
441 while peekline['action'] == 'del':
438 while peekline['action'] == 'del':
442 peekline = lineiter.next()
439 peekline = lineiter.next()
443 continue
440 continue
444 # found an add - make sure it is the only one
441 # found an add - make sure it is the only one
445 addline = peekline
442 addline = peekline
446 try:
443 try:
447 peekline = lineiter.next()
444 peekline = lineiter.next()
448 except StopIteration:
445 except StopIteration:
449 # add was last line - ok
446 # add was last line - ok
450 self._highlight_inline_diff(delline, addline)
447 self._highlight_inline_diff(delline, addline)
451 raise
448 raise
452 if peekline['action'] != 'add':
449 if peekline['action'] != 'add':
453 # there was only one add line - ok
450 # there was only one add line - ok
454 self._highlight_inline_diff(delline, addline)
451 self._highlight_inline_diff(delline, addline)
455 except StopIteration:
452 except StopIteration:
456 pass
453 pass
457
454
458 return diff_container(_files)
455 return diff_container(_files)
459
456
460 def _parse_lines(self, diff_lines):
457 def _parse_lines(self, diff_lines):
461 """
458 """
462 Given an iterator of diff body lines, parse them and return a dict per
459 Given an iterator of diff body lines, parse them and return a dict per
463 line and added/removed totals.
460 line and added/removed totals.
464 """
461 """
465 added = deleted = 0
462 added = deleted = 0
466 old_line = old_end = new_line = new_end = None
463 old_line = old_end = new_line = new_end = None
467
464
468 try:
465 try:
469 chunks = []
466 chunks = []
470 line = diff_lines.next()
467 line = diff_lines.next()
471
468
472 while True:
469 while True:
473 lines = []
470 lines = []
474 chunks.append(lines)
471 chunks.append(lines)
475
472
476 match = self._chunk_re.match(line)
473 match = self._chunk_re.match(line)
477
474
478 if not match:
475 if not match:
479 raise Exception('error parsing diff @@ line %r' % line)
476 raise Exception('error parsing diff @@ line %r' % line)
480
477
481 gr = match.groups()
478 gr = match.groups()
482 (old_line, old_end,
479 (old_line, old_end,
483 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
480 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
484 old_line -= 1
481 old_line -= 1
485 new_line -= 1
482 new_line -= 1
486
483
487 context = len(gr) == 5
484 context = len(gr) == 5
488 old_end += old_line
485 old_end += old_line
489 new_end += new_line
486 new_end += new_line
490
487
491 if context:
488 if context:
492 # skip context only if it's first line
489 # skip context only if it's first line
493 if int(gr[0]) > 1:
490 if int(gr[0]) > 1:
494 lines.append({
491 lines.append({
495 'old_lineno': '...',
492 'old_lineno': '...',
496 'new_lineno': '...',
493 'new_lineno': '...',
497 'action': 'context',
494 'action': 'context',
498 'line': line,
495 'line': line,
499 })
496 })
500
497
501 line = diff_lines.next()
498 line = diff_lines.next()
502
499
503 while old_line < old_end or new_line < new_end:
500 while old_line < old_end or new_line < new_end:
504 if not line:
501 if not line:
505 raise Exception('error parsing diff - empty line at -%s+%s' % (old_line, new_line))
502 raise Exception('error parsing diff - empty line at -%s+%s' % (old_line, new_line))
506
503
507 affects_old = affects_new = False
504 affects_old = affects_new = False
508
505
509 command = line[0]
506 command = line[0]
510 if command == '+':
507 if command == '+':
511 affects_new = True
508 affects_new = True
512 action = 'add'
509 action = 'add'
513 added += 1
510 added += 1
514 elif command == '-':
511 elif command == '-':
515 affects_old = True
512 affects_old = True
516 action = 'del'
513 action = 'del'
517 deleted += 1
514 deleted += 1
518 elif command == ' ':
515 elif command == ' ':
519 affects_old = affects_new = True
516 affects_old = affects_new = True
520 action = 'unmod'
517 action = 'unmod'
521 else:
518 else:
522 raise Exception('error parsing diff - unknown command in line %r at -%s+%s' % (line, old_line, new_line))
519 raise Exception('error parsing diff - unknown command in line %r at -%s+%s' % (line, old_line, new_line))
523
520
524 if not self._newline_marker.match(line):
521 if not self._newline_marker.match(line):
525 old_line += affects_old
522 old_line += affects_old
526 new_line += affects_new
523 new_line += affects_new
527 lines.append({
524 lines.append({
528 'old_lineno': affects_old and old_line or '',
525 'old_lineno': affects_old and old_line or '',
529 'new_lineno': affects_new and new_line or '',
526 'new_lineno': affects_new and new_line or '',
530 'action': action,
527 'action': action,
531 'line': line[1:],
528 'line': line[1:],
532 })
529 })
533
530
534 line = diff_lines.next()
531 line = diff_lines.next()
535
532
536 if self._newline_marker.match(line):
533 if self._newline_marker.match(line):
537 # we need to append to lines, since this is not
534 # we need to append to lines, since this is not
538 # counted in the line specs of diff
535 # counted in the line specs of diff
539 lines.append({
536 lines.append({
540 'old_lineno': '...',
537 'old_lineno': '...',
541 'new_lineno': '...',
538 'new_lineno': '...',
542 'action': 'context',
539 'action': 'context',
543 'line': line,
540 'line': line,
544 })
541 })
545 line = diff_lines.next()
542 line = diff_lines.next()
546 if old_line > old_end:
543 if old_line > old_end:
547 raise Exception('error parsing diff - more than %s "-" lines at -%s+%s' % (old_end, old_line, new_line))
544 raise Exception('error parsing diff - more than %s "-" lines at -%s+%s' % (old_end, old_line, new_line))
548 if new_line > new_end:
545 if new_line > new_end:
549 raise Exception('error parsing diff - more than %s "+" lines at -%s+%s' % (new_end, old_line, new_line))
546 raise Exception('error parsing diff - more than %s "+" lines at -%s+%s' % (new_end, old_line, new_line))
550 except StopIteration:
547 except StopIteration:
551 pass
548 pass
552 if old_line != old_end or new_line != new_end:
549 if old_line != old_end or new_line != new_end:
553 raise Exception('diff processing broken when old %s<>%s or new %s<>%s line %r' % (old_line, old_end, new_line, new_end, line))
550 raise Exception('diff processing broken when old %s<>%s or new %s<>%s line %r' % (old_line, old_end, new_line, new_end, line))
554
551
555 return chunks, added, deleted
552 return chunks, added, deleted
556
553
557 def _safe_id(self, idstring):
554 def _safe_id(self, idstring):
558 """Make a string safe for including in an id attribute.
555 """Make a string safe for including in an id attribute.
559
556
560 The HTML spec says that id attributes 'must begin with
557 The HTML spec says that id attributes 'must begin with
561 a letter ([A-Za-z]) and may be followed by any number
558 a letter ([A-Za-z]) and may be followed by any number
562 of letters, digits ([0-9]), hyphens ("-"), underscores
559 of letters, digits ([0-9]), hyphens ("-"), underscores
563 ("_"), colons (":"), and periods (".")'. These regexps
560 ("_"), colons (":"), and periods (".")'. These regexps
564 are slightly over-zealous, in that they remove colons
561 are slightly over-zealous, in that they remove colons
565 and periods unnecessarily.
562 and periods unnecessarily.
566
563
567 Whitespace is transformed into underscores, and then
564 Whitespace is transformed into underscores, and then
568 anything which is not a hyphen or a character that
565 anything which is not a hyphen or a character that
569 matches \w (alphanumerics and underscore) is removed.
566 matches \w (alphanumerics and underscore) is removed.
570
567
571 """
568 """
572 # Transform all whitespace to underscore
569 # Transform all whitespace to underscore
573 idstring = re.sub(r'\s', "_", idstring)
570 idstring = re.sub(r'\s', "_", idstring)
574 # Remove everything that is not a hyphen or a member of \w
571 # Remove everything that is not a hyphen or a member of \w
575 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
572 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
576 return idstring
573 return idstring
577
574
578 def prepare(self, inline_diff=True):
579 """
580 Prepare the passed udiff for HTML rendering. It'll return a list
581 of dicts with diff information
582 """
583 parsed = self._parse_gitdiff(inline_diff=inline_diff)
584 self.parsed = True
585 self.parsed_diff = parsed
586 return parsed
587
588 def as_html(self, table_class='code-difftable', line_class='line',
575 def as_html(self, table_class='code-difftable', line_class='line',
589 old_lineno_class='lineno old', new_lineno_class='lineno new',
576 old_lineno_class='lineno old', new_lineno_class='lineno new',
590 no_lineno_class='lineno',
577 no_lineno_class='lineno',
591 code_class='code', enable_comments=False, parsed_lines=None):
578 code_class='code', enable_comments=False, parsed_lines=None):
592 """
579 """
593 Return given diff as html table with customized css classes
580 Return given diff as html table with customized css classes
594 """
581 """
595 def _link_to_if(condition, label, url):
582 def _link_to_if(condition, label, url):
596 """
583 """
597 Generates a link if condition is meet or just the label if not.
584 Generates a link if condition is meet or just the label if not.
598 """
585 """
599
586
600 if condition:
587 if condition:
601 return '''<a href="%(url)s">%(label)s</a>''' % {
588 return '''<a href="%(url)s">%(label)s</a>''' % {
602 'url': url,
589 'url': url,
603 'label': label
590 'label': label
604 }
591 }
605 else:
592 else:
606 return label
593 return label
607 if not self.parsed:
608 self.prepare()
609
594
610 diff_lines = self.parsed_diff
595 diff_lines = self.parsed
611 if parsed_lines:
596 if parsed_lines:
612 diff_lines = parsed_lines
597 diff_lines = parsed_lines
613
598
614 _html_empty = True
599 _html_empty = True
615 _html = []
600 _html = []
616 _html.append('''<table class="%(table_class)s">\n''' % {
601 _html.append('''<table class="%(table_class)s">\n''' % {
617 'table_class': table_class
602 'table_class': table_class
618 })
603 })
619
604
620 for diff in diff_lines:
605 for diff in diff_lines:
621 for line in diff['chunks']:
606 for line in diff['chunks']:
622 _html_empty = False
607 _html_empty = False
623 for change in line:
608 for change in line:
624 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
609 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
625 'lc': line_class,
610 'lc': line_class,
626 'action': change['action']
611 'action': change['action']
627 })
612 })
628 anchor_old_id = ''
613 anchor_old_id = ''
629 anchor_new_id = ''
614 anchor_new_id = ''
630 anchor_old = "%(filename)s_o%(oldline_no)s" % {
615 anchor_old = "%(filename)s_o%(oldline_no)s" % {
631 'filename': self._safe_id(diff['filename']),
616 'filename': self._safe_id(diff['filename']),
632 'oldline_no': change['old_lineno']
617 'oldline_no': change['old_lineno']
633 }
618 }
634 anchor_new = "%(filename)s_n%(oldline_no)s" % {
619 anchor_new = "%(filename)s_n%(oldline_no)s" % {
635 'filename': self._safe_id(diff['filename']),
620 'filename': self._safe_id(diff['filename']),
636 'oldline_no': change['new_lineno']
621 'oldline_no': change['new_lineno']
637 }
622 }
638 cond_old = (change['old_lineno'] != '...' and
623 cond_old = (change['old_lineno'] != '...' and
639 change['old_lineno'])
624 change['old_lineno'])
640 cond_new = (change['new_lineno'] != '...' and
625 cond_new = (change['new_lineno'] != '...' and
641 change['new_lineno'])
626 change['new_lineno'])
642 no_lineno = (change['old_lineno'] == '...' and
627 no_lineno = (change['old_lineno'] == '...' and
643 change['new_lineno'] == '...')
628 change['new_lineno'] == '...')
644 if cond_old:
629 if cond_old:
645 anchor_old_id = 'id="%s"' % anchor_old
630 anchor_old_id = 'id="%s"' % anchor_old
646 if cond_new:
631 if cond_new:
647 anchor_new_id = 'id="%s"' % anchor_new
632 anchor_new_id = 'id="%s"' % anchor_new
648 ###########################################################
633 ###########################################################
649 # OLD LINE NUMBER
634 # OLD LINE NUMBER
650 ###########################################################
635 ###########################################################
651 _html.append('''\t<td %(a_id)s class="%(olc)s" %(colspan)s>''' % {
636 _html.append('''\t<td %(a_id)s class="%(olc)s" %(colspan)s>''' % {
652 'a_id': anchor_old_id,
637 'a_id': anchor_old_id,
653 'olc': no_lineno_class if no_lineno else old_lineno_class,
638 'olc': no_lineno_class if no_lineno else old_lineno_class,
654 'colspan': 'colspan="2"' if no_lineno else ''
639 'colspan': 'colspan="2"' if no_lineno else ''
655 })
640 })
656
641
657 _html.append('''%(link)s''' % {
642 _html.append('''%(link)s''' % {
658 'link': _link_to_if(not no_lineno, change['old_lineno'],
643 'link': _link_to_if(not no_lineno, change['old_lineno'],
659 '#%s' % anchor_old)
644 '#%s' % anchor_old)
660 })
645 })
661 _html.append('''</td>\n''')
646 _html.append('''</td>\n''')
662 ###########################################################
647 ###########################################################
663 # NEW LINE NUMBER
648 # NEW LINE NUMBER
664 ###########################################################
649 ###########################################################
665
650
666 if not no_lineno:
651 if not no_lineno:
667 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
652 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
668 'a_id': anchor_new_id,
653 'a_id': anchor_new_id,
669 'nlc': new_lineno_class
654 'nlc': new_lineno_class
670 })
655 })
671
656
672 _html.append('''%(link)s''' % {
657 _html.append('''%(link)s''' % {
673 'link': _link_to_if(True, change['new_lineno'],
658 'link': _link_to_if(True, change['new_lineno'],
674 '#%s' % anchor_new)
659 '#%s' % anchor_new)
675 })
660 })
676 _html.append('''</td>\n''')
661 _html.append('''</td>\n''')
677 ###########################################################
662 ###########################################################
678 # CODE
663 # CODE
679 ###########################################################
664 ###########################################################
680 comments = '' if enable_comments else 'no-comment'
665 comments = '' if enable_comments else 'no-comment'
681 _html.append('''\t<td class="%(cc)s %(inc)s">''' % {
666 _html.append('''\t<td class="%(cc)s %(inc)s">''' % {
682 'cc': code_class,
667 'cc': code_class,
683 'inc': comments
668 'inc': comments
684 })
669 })
685 _html.append('''\n\t\t<div class="add-bubble"><div>&nbsp;</div></div><pre>%(code)s</pre>\n''' % {
670 _html.append('''\n\t\t<div class="add-bubble"><div>&nbsp;</div></div><pre>%(code)s</pre>\n''' % {
686 'code': change['line']
671 'code': change['line']
687 })
672 })
688
673
689 _html.append('''\t</td>''')
674 _html.append('''\t</td>''')
690 _html.append('''\n</tr>\n''')
675 _html.append('''\n</tr>\n''')
691 _html.append('''</table>''')
676 _html.append('''</table>''')
692 if _html_empty:
677 if _html_empty:
693 return None
678 return None
694 return ''.join(_html)
679 return ''.join(_html)
695
680
696 def stat(self):
681 def stat(self):
697 """
682 """
698 Returns tuple of added, and removed lines for this instance
683 Returns tuple of added, and removed lines for this instance
699 """
684 """
700 return self.adds, self.removes
685 return self.adds, self.removes
@@ -1,317 +1,315 b''
1 from kallithea.tests.base import *
1 from kallithea.tests.base import *
2 from kallithea.lib.diffs import DiffProcessor, NEW_FILENODE, DEL_FILENODE, \
2 from kallithea.lib.diffs import DiffProcessor, NEW_FILENODE, DEL_FILENODE, \
3 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE
3 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE
4 from kallithea.tests.fixture import Fixture
4 from kallithea.tests.fixture import Fixture
5
5
6 fixture = Fixture()
6 fixture = Fixture()
7
7
8
8
9 DIFF_FIXTURES = {
9 DIFF_FIXTURES = {
10 'hg_diff_add_single_binary_file.diff': [
10 'hg_diff_add_single_binary_file.diff': [
11 ('US Warszawa.jpg', 'A',
11 ('US Warszawa.jpg', 'A',
12 {'added': 0,
12 {'added': 0,
13 'deleted': 0,
13 'deleted': 0,
14 'binary': True,
14 'binary': True,
15 'ops': {NEW_FILENODE: 'new file 100755',
15 'ops': {NEW_FILENODE: 'new file 100755',
16 BIN_FILENODE: 'binary diff not shown'}}),
16 BIN_FILENODE: 'binary diff not shown'}}),
17 ],
17 ],
18 'hg_diff_mod_single_binary_file.diff': [
18 'hg_diff_mod_single_binary_file.diff': [
19 ('US Warszawa.jpg', 'M',
19 ('US Warszawa.jpg', 'M',
20 {'added': 0,
20 {'added': 0,
21 'deleted': 0,
21 'deleted': 0,
22 'binary': True,
22 'binary': True,
23 'ops': {MOD_FILENODE: 'modified file',
23 'ops': {MOD_FILENODE: 'modified file',
24 BIN_FILENODE: 'binary diff not shown'}}),
24 BIN_FILENODE: 'binary diff not shown'}}),
25 ],
25 ],
26
26
27 'hg_diff_mod_single_file_and_rename_and_chmod.diff': [
27 'hg_diff_mod_single_file_and_rename_and_chmod.diff': [
28 ('README', 'R',
28 ('README', 'R',
29 {'added': 3,
29 {'added': 3,
30 'deleted': 0,
30 'deleted': 0,
31 'binary': False,
31 'binary': False,
32 'ops': {RENAMED_FILENODE: 'file renamed from README.rst to README',
32 'ops': {RENAMED_FILENODE: 'file renamed from README.rst to README',
33 CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
33 CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
34 ],
34 ],
35 'hg_diff_mod_file_and_rename.diff': [
35 'hg_diff_mod_file_and_rename.diff': [
36 ('README.rst', 'R',
36 ('README.rst', 'R',
37 {'added': 3,
37 {'added': 3,
38 'deleted': 0,
38 'deleted': 0,
39 'binary': False,
39 'binary': False,
40 'ops': {RENAMED_FILENODE: 'file renamed from README to README.rst'}}),
40 'ops': {RENAMED_FILENODE: 'file renamed from README to README.rst'}}),
41 ],
41 ],
42 'hg_diff_del_single_binary_file.diff': [
42 'hg_diff_del_single_binary_file.diff': [
43 ('US Warszawa.jpg', 'D',
43 ('US Warszawa.jpg', 'D',
44 {'added': 0,
44 {'added': 0,
45 'deleted': 0,
45 'deleted': 0,
46 'binary': True,
46 'binary': True,
47 'ops': {DEL_FILENODE: 'deleted file',
47 'ops': {DEL_FILENODE: 'deleted file',
48 BIN_FILENODE: 'binary diff not shown'}}),
48 BIN_FILENODE: 'binary diff not shown'}}),
49 ],
49 ],
50 'hg_diff_chmod_and_mod_single_binary_file.diff': [
50 'hg_diff_chmod_and_mod_single_binary_file.diff': [
51 ('gravatar.png', 'M',
51 ('gravatar.png', 'M',
52 {'added': 0,
52 {'added': 0,
53 'deleted': 0,
53 'deleted': 0,
54 'binary': True,
54 'binary': True,
55 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
55 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
56 BIN_FILENODE: 'binary diff not shown'}}),
56 BIN_FILENODE: 'binary diff not shown'}}),
57 ],
57 ],
58 'hg_diff_chmod.diff': [
58 'hg_diff_chmod.diff': [
59 ('file', 'M',
59 ('file', 'M',
60 {'added': 0,
60 {'added': 0,
61 'deleted': 0,
61 'deleted': 0,
62 'binary': True,
62 'binary': True,
63 'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
63 'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
64 ],
64 ],
65 'hg_diff_rename_file.diff': [
65 'hg_diff_rename_file.diff': [
66 ('file_renamed', 'R',
66 ('file_renamed', 'R',
67 {'added': 0,
67 {'added': 0,
68 'deleted': 0,
68 'deleted': 0,
69 'binary': True,
69 'binary': True,
70 'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}),
70 'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}),
71 ],
71 ],
72 'hg_diff_rename_and_chmod_file.diff': [
72 'hg_diff_rename_and_chmod_file.diff': [
73 ('README', 'R',
73 ('README', 'R',
74 {'added': 0,
74 {'added': 0,
75 'deleted': 0,
75 'deleted': 0,
76 'binary': True,
76 'binary': True,
77 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
77 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
78 RENAMED_FILENODE: 'file renamed from README.rst to README'}}),
78 RENAMED_FILENODE: 'file renamed from README.rst to README'}}),
79 ],
79 ],
80 'hg_diff_binary_and_normal.diff': [
80 'hg_diff_binary_and_normal.diff': [
81 ('img/baseline-10px.png', 'A',
81 ('img/baseline-10px.png', 'A',
82 {'added': 0,
82 {'added': 0,
83 'deleted': 0,
83 'deleted': 0,
84 'binary': True,
84 'binary': True,
85 'ops': {NEW_FILENODE: 'new file 100644',
85 'ops': {NEW_FILENODE: 'new file 100644',
86 BIN_FILENODE: 'binary diff not shown'}}),
86 BIN_FILENODE: 'binary diff not shown'}}),
87 ('img/baseline-20px.png', 'D',
87 ('img/baseline-20px.png', 'D',
88 {'added': 0,
88 {'added': 0,
89 'deleted': 0,
89 'deleted': 0,
90 'binary': True,
90 'binary': True,
91 'ops': {DEL_FILENODE: 'deleted file',
91 'ops': {DEL_FILENODE: 'deleted file',
92 BIN_FILENODE: 'binary diff not shown'}}),
92 BIN_FILENODE: 'binary diff not shown'}}),
93 ('index.html', 'M',
93 ('index.html', 'M',
94 {'added': 3,
94 {'added': 3,
95 'deleted': 2,
95 'deleted': 2,
96 'binary': False,
96 'binary': False,
97 'ops': {MOD_FILENODE: 'modified file'}}),
97 'ops': {MOD_FILENODE: 'modified file'}}),
98 ('js/global.js', 'D',
98 ('js/global.js', 'D',
99 {'added': 0,
99 {'added': 0,
100 'deleted': 75,
100 'deleted': 75,
101 'binary': False,
101 'binary': False,
102 'ops': {DEL_FILENODE: 'deleted file'}}),
102 'ops': {DEL_FILENODE: 'deleted file'}}),
103 ('js/jquery/hashgrid.js', 'A',
103 ('js/jquery/hashgrid.js', 'A',
104 {'added': 340,
104 {'added': 340,
105 'deleted': 0,
105 'deleted': 0,
106 'binary': False,
106 'binary': False,
107 'ops': {NEW_FILENODE: 'new file 100755'}}),
107 'ops': {NEW_FILENODE: 'new file 100755'}}),
108 ('less/docs.less', 'M',
108 ('less/docs.less', 'M',
109 {'added': 34,
109 {'added': 34,
110 'deleted': 0,
110 'deleted': 0,
111 'binary': False,
111 'binary': False,
112 'ops': {MOD_FILENODE: 'modified file'}}),
112 'ops': {MOD_FILENODE: 'modified file'}}),
113 ('less/scaffolding.less', 'M',
113 ('less/scaffolding.less', 'M',
114 {'added': 1,
114 {'added': 1,
115 'deleted': 3,
115 'deleted': 3,
116 'binary': False,
116 'binary': False,
117 'ops': {MOD_FILENODE: 'modified file'}}),
117 'ops': {MOD_FILENODE: 'modified file'}}),
118 ('readme.markdown', 'M',
118 ('readme.markdown', 'M',
119 {'added': 1,
119 {'added': 1,
120 'deleted': 10,
120 'deleted': 10,
121 'binary': False,
121 'binary': False,
122 'ops': {MOD_FILENODE: 'modified file'}}),
122 'ops': {MOD_FILENODE: 'modified file'}}),
123 ],
123 ],
124 'git_diff_chmod.diff': [
124 'git_diff_chmod.diff': [
125 ('work-horus.xls', 'M',
125 ('work-horus.xls', 'M',
126 {'added': 0,
126 {'added': 0,
127 'deleted': 0,
127 'deleted': 0,
128 'binary': True,
128 'binary': True,
129 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}})
129 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}})
130 ],
130 ],
131 'git_diff_rename_file.diff': [
131 'git_diff_rename_file.diff': [
132 ('file.xls', 'R',
132 ('file.xls', 'R',
133 {'added': 0,
133 {'added': 0,
134 'deleted': 0,
134 'deleted': 0,
135 'binary': True,
135 'binary': True,
136 'ops': {RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}}),
136 'ops': {RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}}),
137 ('files/var/www/favicon.ico/DEFAULT',
137 ('files/var/www/favicon.ico/DEFAULT',
138 'R',
138 'R',
139 {'added': 0,
139 {'added': 0,
140 'binary': True,
140 'binary': True,
141 'deleted': 0,
141 'deleted': 0,
142 'ops': {4: 'file renamed from files/var/www/favicon.ico to files/var/www/favicon.ico/DEFAULT',
142 'ops': {4: 'file renamed from files/var/www/favicon.ico to files/var/www/favicon.ico/DEFAULT',
143 6: 'modified file chmod 100644 => 100755'}})
143 6: 'modified file chmod 100644 => 100755'}})
144 ],
144 ],
145 'git_diff_mod_single_binary_file.diff': [
145 'git_diff_mod_single_binary_file.diff': [
146 ('US Warszawa.jpg', 'M',
146 ('US Warszawa.jpg', 'M',
147 {'added': 0,
147 {'added': 0,
148 'deleted': 0,
148 'deleted': 0,
149 'binary': True,
149 'binary': True,
150 'ops': {MOD_FILENODE: 'modified file',
150 'ops': {MOD_FILENODE: 'modified file',
151 BIN_FILENODE: 'binary diff not shown'}})
151 BIN_FILENODE: 'binary diff not shown'}})
152 ],
152 ],
153 'git_diff_binary_and_normal.diff': [
153 'git_diff_binary_and_normal.diff': [
154 ('img/baseline-10px.png', 'A',
154 ('img/baseline-10px.png', 'A',
155 {'added': 0,
155 {'added': 0,
156 'deleted': 0,
156 'deleted': 0,
157 'binary': True,
157 'binary': True,
158 'ops': {NEW_FILENODE: 'new file 100644',
158 'ops': {NEW_FILENODE: 'new file 100644',
159 BIN_FILENODE: 'binary diff not shown'}}),
159 BIN_FILENODE: 'binary diff not shown'}}),
160 ('img/baseline-20px.png', 'D',
160 ('img/baseline-20px.png', 'D',
161 {'added': 0,
161 {'added': 0,
162 'deleted': 0,
162 'deleted': 0,
163 'binary': True,
163 'binary': True,
164 'ops': {DEL_FILENODE: 'deleted file',
164 'ops': {DEL_FILENODE: 'deleted file',
165 BIN_FILENODE: 'binary diff not shown'}}),
165 BIN_FILENODE: 'binary diff not shown'}}),
166 ('index.html', 'M',
166 ('index.html', 'M',
167 {'added': 3,
167 {'added': 3,
168 'deleted': 2,
168 'deleted': 2,
169 'binary': False,
169 'binary': False,
170 'ops': {MOD_FILENODE: 'modified file'}}),
170 'ops': {MOD_FILENODE: 'modified file'}}),
171 ('js/global.js', 'D',
171 ('js/global.js', 'D',
172 {'added': 0,
172 {'added': 0,
173 'deleted': 75,
173 'deleted': 75,
174 'binary': False,
174 'binary': False,
175 'ops': {DEL_FILENODE: 'deleted file'}}),
175 'ops': {DEL_FILENODE: 'deleted file'}}),
176 ('js/jquery/hashgrid.js', 'A',
176 ('js/jquery/hashgrid.js', 'A',
177 {'added': 340,
177 {'added': 340,
178 'deleted': 0,
178 'deleted': 0,
179 'binary': False,
179 'binary': False,
180 'ops': {NEW_FILENODE: 'new file 100755'}}),
180 'ops': {NEW_FILENODE: 'new file 100755'}}),
181 ('less/docs.less', 'M',
181 ('less/docs.less', 'M',
182 {'added': 34,
182 {'added': 34,
183 'deleted': 0,
183 'deleted': 0,
184 'binary': False,
184 'binary': False,
185 'ops': {MOD_FILENODE: 'modified file'}}),
185 'ops': {MOD_FILENODE: 'modified file'}}),
186 ('less/scaffolding.less', 'M',
186 ('less/scaffolding.less', 'M',
187 {'added': 1,
187 {'added': 1,
188 'deleted': 3,
188 'deleted': 3,
189 'binary': False,
189 'binary': False,
190 'ops': {MOD_FILENODE: 'modified file'}}),
190 'ops': {MOD_FILENODE: 'modified file'}}),
191 ('readme.markdown', 'M',
191 ('readme.markdown', 'M',
192 {'added': 1,
192 {'added': 1,
193 'deleted': 10,
193 'deleted': 10,
194 'binary': False,
194 'binary': False,
195 'ops': {MOD_FILENODE: 'modified file'}}),
195 'ops': {MOD_FILENODE: 'modified file'}}),
196 ],
196 ],
197 'diff_with_diff_data.diff': [
197 'diff_with_diff_data.diff': [
198 ('vcs/backends/base.py', 'M',
198 ('vcs/backends/base.py', 'M',
199 {'added': 18,
199 {'added': 18,
200 'deleted': 2,
200 'deleted': 2,
201 'binary': False,
201 'binary': False,
202 'ops': {MOD_FILENODE: 'modified file'}}),
202 'ops': {MOD_FILENODE: 'modified file'}}),
203 ('vcs/backends/git/repository.py', 'M',
203 ('vcs/backends/git/repository.py', 'M',
204 {'added': 46,
204 {'added': 46,
205 'deleted': 15,
205 'deleted': 15,
206 'binary': False,
206 'binary': False,
207 'ops': {MOD_FILENODE: 'modified file'}}),
207 'ops': {MOD_FILENODE: 'modified file'}}),
208 ('vcs/backends/hg.py', 'M',
208 ('vcs/backends/hg.py', 'M',
209 {'added': 22,
209 {'added': 22,
210 'deleted': 3,
210 'deleted': 3,
211 'binary': False,
211 'binary': False,
212 'ops': {MOD_FILENODE: 'modified file'}}),
212 'ops': {MOD_FILENODE: 'modified file'}}),
213 ('vcs/tests/test_git.py', 'M',
213 ('vcs/tests/test_git.py', 'M',
214 {'added': 5,
214 {'added': 5,
215 'deleted': 5,
215 'deleted': 5,
216 'binary': False,
216 'binary': False,
217 'ops': {MOD_FILENODE: 'modified file'}}),
217 'ops': {MOD_FILENODE: 'modified file'}}),
218 ('vcs/tests/test_repository.py', 'M',
218 ('vcs/tests/test_repository.py', 'M',
219 {'added': 174,
219 {'added': 174,
220 'deleted': 2,
220 'deleted': 2,
221 'binary': False,
221 'binary': False,
222 'ops': {MOD_FILENODE: 'modified file'}}),
222 'ops': {MOD_FILENODE: 'modified file'}}),
223 ],
223 ],
224 'git_diff_modify_binary_file.diff': [
224 'git_diff_modify_binary_file.diff': [
225 ('file.name', 'M',
225 ('file.name', 'M',
226 {'added': 0,
226 {'added': 0,
227 'deleted': 0,
227 'deleted': 0,
228 'binary': True,
228 'binary': True,
229 'ops': {MOD_FILENODE: 'modified file',
229 'ops': {MOD_FILENODE: 'modified file',
230 BIN_FILENODE: 'binary diff not shown'}})
230 BIN_FILENODE: 'binary diff not shown'}})
231 ],
231 ],
232 'hg_diff_copy_file.diff': [
232 'hg_diff_copy_file.diff': [
233 ('file2', 'M',
233 ('file2', 'M',
234 {'added': 0,
234 {'added': 0,
235 'deleted': 0,
235 'deleted': 0,
236 'binary': True,
236 'binary': True,
237 'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}),
237 'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}),
238 ],
238 ],
239 'hg_diff_copy_and_modify_file.diff': [
239 'hg_diff_copy_and_modify_file.diff': [
240 ('file3', 'M',
240 ('file3', 'M',
241 {'added': 1,
241 {'added': 1,
242 'deleted': 0,
242 'deleted': 0,
243 'binary': False,
243 'binary': False,
244 'ops': {COPIED_FILENODE: 'file copied from file2 to file3',
244 'ops': {COPIED_FILENODE: 'file copied from file2 to file3',
245 MOD_FILENODE: 'modified file'}}),
245 MOD_FILENODE: 'modified file'}}),
246 ],
246 ],
247 'hg_diff_copy_and_chmod_file.diff': [
247 'hg_diff_copy_and_chmod_file.diff': [
248 ('file4', 'M',
248 ('file4', 'M',
249 {'added': 0,
249 {'added': 0,
250 'deleted': 0,
250 'deleted': 0,
251 'binary': True,
251 'binary': True,
252 'ops': {COPIED_FILENODE: 'file copied from file3 to file4',
252 'ops': {COPIED_FILENODE: 'file copied from file3 to file4',
253 CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}),
253 CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}),
254 ],
254 ],
255 'hg_diff_copy_chmod_and_edit_file.diff': [
255 'hg_diff_copy_chmod_and_edit_file.diff': [
256 ('file5', 'M',
256 ('file5', 'M',
257 {'added': 2,
257 {'added': 2,
258 'deleted': 1,
258 'deleted': 1,
259 'binary': False,
259 'binary': False,
260 'ops': {COPIED_FILENODE: 'file copied from file4 to file5',
260 'ops': {COPIED_FILENODE: 'file copied from file4 to file5',
261 CHMOD_FILENODE: 'modified file chmod 100755 => 100644',
261 CHMOD_FILENODE: 'modified file chmod 100755 => 100644',
262 MOD_FILENODE: 'modified file'}}),
262 MOD_FILENODE: 'modified file'}}),
263 ],
263 ],
264 'hg_diff_rename_space_cr.diff': [
264 'hg_diff_rename_space_cr.diff': [
265 ('oh yes', 'R',
265 ('oh yes', 'R',
266 {'added': 3,
266 {'added': 3,
267 'deleted': 2,
267 'deleted': 2,
268 'binary': False,
268 'binary': False,
269 'ops': {RENAMED_FILENODE: 'file renamed from oh no to oh yes'}}),
269 'ops': {RENAMED_FILENODE: 'file renamed from oh no to oh yes'}}),
270 ],
270 ],
271 }
271 }
272
272
273
273
274 class TestDiffLib(TestController):
274 class TestDiffLib(TestController):
275
275
276 @parametrize('diff_fixture', DIFF_FIXTURES)
276 @parametrize('diff_fixture', DIFF_FIXTURES)
277 def test_diff(self, diff_fixture):
277 def test_diff(self, diff_fixture):
278 diff = fixture.load_resource(diff_fixture, strip=False)
278 raw_diff = fixture.load_resource(diff_fixture, strip=False)
279 vcs = 'hg'
279 vcs = 'hg'
280 if diff_fixture.startswith('git_'):
280 if diff_fixture.startswith('git_'):
281 vcs = 'git'
281 vcs = 'git'
282 diff_proc = DiffProcessor(diff, vcs=vcs)
282 diff_processor = DiffProcessor(raw_diff, vcs=vcs)
283 diff_proc_d = diff_proc.prepare()
283 data = [(x['filename'], x['operation'], x['stats']) for x in diff_processor.parsed]
284 data = [(x['filename'], x['operation'], x['stats']) for x in diff_proc_d]
285 expected_data = DIFF_FIXTURES[diff_fixture]
284 expected_data = DIFF_FIXTURES[diff_fixture]
286 assert expected_data == data
285 assert expected_data == data
287
286
288 def test_diff_markup(self):
287 def test_diff_markup(self):
289 diff = fixture.load_resource('markuptest.diff', strip=False)
288 raw_diff = fixture.load_resource('markuptest.diff', strip=False)
290 diff_proc = DiffProcessor(diff)
289 diff_processor = DiffProcessor(raw_diff)
291 diff_proc_d = diff_proc.prepare()
290 chunks = diff_processor.parsed[0]['chunks']
292 chunks = diff_proc_d[0]['chunks']
293 assert not chunks[0]
291 assert not chunks[0]
294 #from pprint import pprint; pprint(chunks[1])
292 #from pprint import pprint; pprint(chunks[1])
295 l = ['\n']
293 l = ['\n']
296 for d in chunks[1]:
294 for d in chunks[1]:
297 l.append('%(action)-7s %(new_lineno)3s %(old_lineno)3s %(line)r\n' % d)
295 l.append('%(action)-7s %(new_lineno)3s %(old_lineno)3s %(line)r\n' % d)
298 s = ''.join(l)
296 s = ''.join(l)
299 print s
297 print s
300 assert s == r'''
298 assert s == r'''
301 context ... ... u'@@ -51,6 +51,13 @@\n'
299 context ... ... u'@@ -51,6 +51,13 @@\n'
302 unmod 51 51 u'<u>\t</u>begin();\n'
300 unmod 51 51 u'<u>\t</u>begin();\n'
303 unmod 52 52 u'<u>\t</u>\n'
301 unmod 52 52 u'<u>\t</u>\n'
304 add 53 u'<u>\t</u>int foo;<u class="cr"></u>\n'
302 add 53 u'<u>\t</u>int foo;<u class="cr"></u>\n'
305 add 54 u'<u>\t</u>int bar; <u class="cr"></u>\n'
303 add 54 u'<u>\t</u>int bar; <u class="cr"></u>\n'
306 add 55 u'<u>\t</u>int baz;<u>\t</u><u class="cr"></u>\n'
304 add 55 u'<u>\t</u>int baz;<u>\t</u><u class="cr"></u>\n'
307 add 56 u'<u>\t</u>int space; <i></i>'
305 add 56 u'<u>\t</u>int space; <i></i>'
308 add 57 u'<u>\t</u>int tab;<u>\t</u>\n'
306 add 57 u'<u>\t</u>int tab;<u>\t</u>\n'
309 add 58 u'<u>\t</u>\n'
307 add 58 u'<u>\t</u>\n'
310 unmod 59 53 u' <i></i>'
308 unmod 59 53 u' <i></i>'
311 del 54 u'<u>\t</u>#define MAX_STEPS (48)\n'
309 del 54 u'<u>\t</u>#define MAX_STEPS (48)\n'
312 add 60 u'<u>\t</u><u class="cr"></u>\n'
310 add 60 u'<u>\t</u><u class="cr"></u>\n'
313 add 61 u'<u>\t</u>#define MAX_STEPS (64)<u class="cr"></u>\n'
311 add 61 u'<u>\t</u>#define MAX_STEPS (64)<u class="cr"></u>\n'
314 unmod 62 55 u'\n'
312 unmod 62 55 u'\n'
315 del 56 u'<u>\t</u>#define MIN_STEPS (<del>48</del>)\n'
313 del 56 u'<u>\t</u>#define MIN_STEPS (<del>48</del>)\n'
316 add 63 u'<u>\t</u>#define MIN_STEPS (<ins>42</ins>)\n'
314 add 63 u'<u>\t</u>#define MIN_STEPS (<ins>42</ins>)\n'
317 '''
315 '''
General Comments 0
You need to be logged in to leave comments. Login now