##// END OF EJS Templates
Bumped mercurial version to 2.3...
marcink -
r2684:2b6939a7 beta
parent child Browse files
Show More
@@ -1,442 +1,443 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.controllers.changeset
3 rhodecode.controllers.changeset
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 changeset controller for pylons showoing changes beetween
6 changeset controller for pylons showoing changes beetween
7 revisions
7 revisions
8
8
9 :created_on: Apr 25, 2010
9 :created_on: Apr 25, 2010
10 :author: marcink
10 :author: marcink
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 :license: GPLv3, see COPYING for more details.
12 :license: GPLv3, see COPYING for more details.
13 """
13 """
14 # This program is free software: you can redistribute it and/or modify
14 # This program is free software: you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation, either version 3 of the License, or
16 # the Free Software Foundation, either version 3 of the License, or
17 # (at your option) any later version.
17 # (at your option) any later version.
18 #
18 #
19 # This program is distributed in the hope that it will be useful,
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 # GNU General Public License for more details.
22 # GNU General Public License for more details.
23 #
23 #
24 # You should have received a copy of the GNU General Public License
24 # You should have received a copy of the GNU General Public License
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 import logging
26 import logging
27 import traceback
27 import traceback
28 from collections import defaultdict
28 from collections import defaultdict
29 from webob.exc import HTTPForbidden
29 from webob.exc import HTTPForbidden
30
30
31 from pylons import tmpl_context as c, url, request, response
31 from pylons import tmpl_context as c, url, request, response
32 from pylons.i18n.translation import _
32 from pylons.i18n.translation import _
33 from pylons.controllers.util import redirect
33 from pylons.controllers.util import redirect
34 from pylons.decorators import jsonify
34 from pylons.decorators import jsonify
35
35
36 from rhodecode.lib.vcs.exceptions import RepositoryError, ChangesetError, \
36 from rhodecode.lib.vcs.exceptions import RepositoryError, ChangesetError, \
37 ChangesetDoesNotExistError
37 ChangesetDoesNotExistError
38 from rhodecode.lib.vcs.nodes import FileNode
38 from rhodecode.lib.vcs.nodes import FileNode
39
39
40 import rhodecode.lib.helpers as h
40 import rhodecode.lib.helpers as h
41 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
41 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
42 from rhodecode.lib.base import BaseRepoController, render
42 from rhodecode.lib.base import BaseRepoController, render
43 from rhodecode.lib.utils import EmptyChangeset, action_logger
43 from rhodecode.lib.utils import action_logger
44 from rhodecode.lib.compat import OrderedDict
44 from rhodecode.lib.compat import OrderedDict
45 from rhodecode.lib import diffs
45 from rhodecode.lib import diffs
46 from rhodecode.model.db import ChangesetComment, ChangesetStatus
46 from rhodecode.model.db import ChangesetComment, ChangesetStatus
47 from rhodecode.model.comment import ChangesetCommentsModel
47 from rhodecode.model.comment import ChangesetCommentsModel
48 from rhodecode.model.changeset_status import ChangesetStatusModel
48 from rhodecode.model.changeset_status import ChangesetStatusModel
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.lib.diffs import wrapped_diff
50 from rhodecode.lib.diffs import wrapped_diff
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
52 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
53 from rhodecode.lib.vcs.backends.base import EmptyChangeset
53
54
54 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
55
56
56
57
57 def _update_with_GET(params, GET):
58 def _update_with_GET(params, GET):
58 for k in ['diff1', 'diff2', 'diff']:
59 for k in ['diff1', 'diff2', 'diff']:
59 params[k] += GET.getall(k)
60 params[k] += GET.getall(k)
60
61
61
62
62 def anchor_url(revision, path, GET):
63 def anchor_url(revision, path, GET):
63 fid = h.FID(revision, path)
64 fid = h.FID(revision, path)
64 return h.url.current(anchor=fid, **dict(GET))
65 return h.url.current(anchor=fid, **dict(GET))
65
66
66
67
67 def get_ignore_ws(fid, GET):
68 def get_ignore_ws(fid, GET):
68 ig_ws_global = GET.get('ignorews')
69 ig_ws_global = GET.get('ignorews')
69 ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
70 ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
70 if ig_ws:
71 if ig_ws:
71 try:
72 try:
72 return int(ig_ws[0].split(':')[-1])
73 return int(ig_ws[0].split(':')[-1])
73 except:
74 except:
74 pass
75 pass
75 return ig_ws_global
76 return ig_ws_global
76
77
77
78
78 def _ignorews_url(GET, fileid=None):
79 def _ignorews_url(GET, fileid=None):
79 fileid = str(fileid) if fileid else None
80 fileid = str(fileid) if fileid else None
80 params = defaultdict(list)
81 params = defaultdict(list)
81 _update_with_GET(params, GET)
82 _update_with_GET(params, GET)
82 lbl = _('show white space')
83 lbl = _('show white space')
83 ig_ws = get_ignore_ws(fileid, GET)
84 ig_ws = get_ignore_ws(fileid, GET)
84 ln_ctx = get_line_ctx(fileid, GET)
85 ln_ctx = get_line_ctx(fileid, GET)
85 # global option
86 # global option
86 if fileid is None:
87 if fileid is None:
87 if ig_ws is None:
88 if ig_ws is None:
88 params['ignorews'] += [1]
89 params['ignorews'] += [1]
89 lbl = _('ignore white space')
90 lbl = _('ignore white space')
90 ctx_key = 'context'
91 ctx_key = 'context'
91 ctx_val = ln_ctx
92 ctx_val = ln_ctx
92 # per file options
93 # per file options
93 else:
94 else:
94 if ig_ws is None:
95 if ig_ws is None:
95 params[fileid] += ['WS:1']
96 params[fileid] += ['WS:1']
96 lbl = _('ignore white space')
97 lbl = _('ignore white space')
97
98
98 ctx_key = fileid
99 ctx_key = fileid
99 ctx_val = 'C:%s' % ln_ctx
100 ctx_val = 'C:%s' % ln_ctx
100 # if we have passed in ln_ctx pass it along to our params
101 # if we have passed in ln_ctx pass it along to our params
101 if ln_ctx:
102 if ln_ctx:
102 params[ctx_key] += [ctx_val]
103 params[ctx_key] += [ctx_val]
103
104
104 params['anchor'] = fileid
105 params['anchor'] = fileid
105 img = h.image(h.url('/images/icons/text_strikethrough.png'), lbl, class_='icon')
106 img = h.image(h.url('/images/icons/text_strikethrough.png'), lbl, class_='icon')
106 return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip')
107 return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip')
107
108
108
109
109 def get_line_ctx(fid, GET):
110 def get_line_ctx(fid, GET):
110 ln_ctx_global = GET.get('context')
111 ln_ctx_global = GET.get('context')
111 ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
112 ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
112
113
113 if ln_ctx:
114 if ln_ctx:
114 retval = ln_ctx[0].split(':')[-1]
115 retval = ln_ctx[0].split(':')[-1]
115 else:
116 else:
116 retval = ln_ctx_global
117 retval = ln_ctx_global
117
118
118 try:
119 try:
119 return int(retval)
120 return int(retval)
120 except:
121 except:
121 return
122 return
122
123
123
124
124 def _context_url(GET, fileid=None):
125 def _context_url(GET, fileid=None):
125 """
126 """
126 Generates url for context lines
127 Generates url for context lines
127
128
128 :param fileid:
129 :param fileid:
129 """
130 """
130
131
131 fileid = str(fileid) if fileid else None
132 fileid = str(fileid) if fileid else None
132 ig_ws = get_ignore_ws(fileid, GET)
133 ig_ws = get_ignore_ws(fileid, GET)
133 ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2
134 ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2
134
135
135 params = defaultdict(list)
136 params = defaultdict(list)
136 _update_with_GET(params, GET)
137 _update_with_GET(params, GET)
137
138
138 # global option
139 # global option
139 if fileid is None:
140 if fileid is None:
140 if ln_ctx > 0:
141 if ln_ctx > 0:
141 params['context'] += [ln_ctx]
142 params['context'] += [ln_ctx]
142
143
143 if ig_ws:
144 if ig_ws:
144 ig_ws_key = 'ignorews'
145 ig_ws_key = 'ignorews'
145 ig_ws_val = 1
146 ig_ws_val = 1
146
147
147 # per file option
148 # per file option
148 else:
149 else:
149 params[fileid] += ['C:%s' % ln_ctx]
150 params[fileid] += ['C:%s' % ln_ctx]
150 ig_ws_key = fileid
151 ig_ws_key = fileid
151 ig_ws_val = 'WS:%s' % 1
152 ig_ws_val = 'WS:%s' % 1
152
153
153 if ig_ws:
154 if ig_ws:
154 params[ig_ws_key] += [ig_ws_val]
155 params[ig_ws_key] += [ig_ws_val]
155
156
156 lbl = _('%s line context') % ln_ctx
157 lbl = _('%s line context') % ln_ctx
157
158
158 params['anchor'] = fileid
159 params['anchor'] = fileid
159 img = h.image(h.url('/images/icons/table_add.png'), lbl, class_='icon')
160 img = h.image(h.url('/images/icons/table_add.png'), lbl, class_='icon')
160 return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip')
161 return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip')
161
162
162
163
163 class ChangesetController(BaseRepoController):
164 class ChangesetController(BaseRepoController):
164
165
165 @LoginRequired()
166 @LoginRequired()
166 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
167 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
167 'repository.admin')
168 'repository.admin')
168 def __before__(self):
169 def __before__(self):
169 super(ChangesetController, self).__before__()
170 super(ChangesetController, self).__before__()
170 c.affected_files_cut_off = 60
171 c.affected_files_cut_off = 60
171 repo_model = RepoModel()
172 repo_model = RepoModel()
172 c.users_array = repo_model.get_users_js()
173 c.users_array = repo_model.get_users_js()
173 c.users_groups_array = repo_model.get_users_groups_js()
174 c.users_groups_array = repo_model.get_users_groups_js()
174
175
175 def index(self, revision):
176 def index(self, revision):
176
177
177 c.anchor_url = anchor_url
178 c.anchor_url = anchor_url
178 c.ignorews_url = _ignorews_url
179 c.ignorews_url = _ignorews_url
179 c.context_url = _context_url
180 c.context_url = _context_url
180 limit_off = request.GET.get('fulldiff')
181 limit_off = request.GET.get('fulldiff')
181 #get ranges of revisions if preset
182 #get ranges of revisions if preset
182 rev_range = revision.split('...')[:2]
183 rev_range = revision.split('...')[:2]
183 enable_comments = True
184 enable_comments = True
184 try:
185 try:
185 if len(rev_range) == 2:
186 if len(rev_range) == 2:
186 enable_comments = False
187 enable_comments = False
187 rev_start = rev_range[0]
188 rev_start = rev_range[0]
188 rev_end = rev_range[1]
189 rev_end = rev_range[1]
189 rev_ranges = c.rhodecode_repo.get_changesets(start=rev_start,
190 rev_ranges = c.rhodecode_repo.get_changesets(start=rev_start,
190 end=rev_end)
191 end=rev_end)
191 else:
192 else:
192 rev_ranges = [c.rhodecode_repo.get_changeset(revision)]
193 rev_ranges = [c.rhodecode_repo.get_changeset(revision)]
193
194
194 c.cs_ranges = list(rev_ranges)
195 c.cs_ranges = list(rev_ranges)
195 if not c.cs_ranges:
196 if not c.cs_ranges:
196 raise RepositoryError('Changeset range returned empty result')
197 raise RepositoryError('Changeset range returned empty result')
197
198
198 except (RepositoryError, ChangesetDoesNotExistError, Exception), e:
199 except (RepositoryError, ChangesetDoesNotExistError, Exception), e:
199 log.error(traceback.format_exc())
200 log.error(traceback.format_exc())
200 h.flash(str(e), category='warning')
201 h.flash(str(e), category='warning')
201 return redirect(url('home'))
202 return redirect(url('home'))
202
203
203 c.changes = OrderedDict()
204 c.changes = OrderedDict()
204
205
205 c.lines_added = 0 # count of lines added
206 c.lines_added = 0 # count of lines added
206 c.lines_deleted = 0 # count of lines removes
207 c.lines_deleted = 0 # count of lines removes
207
208
208 cumulative_diff = 0
209 cumulative_diff = 0
209 c.cut_off = False # defines if cut off limit is reached
210 c.cut_off = False # defines if cut off limit is reached
210 c.changeset_statuses = ChangesetStatus.STATUSES
211 c.changeset_statuses = ChangesetStatus.STATUSES
211 c.comments = []
212 c.comments = []
212 c.statuses = []
213 c.statuses = []
213 c.inline_comments = []
214 c.inline_comments = []
214 c.inline_cnt = 0
215 c.inline_cnt = 0
215 # Iterate over ranges (default changeset view is always one changeset)
216 # Iterate over ranges (default changeset view is always one changeset)
216 for changeset in c.cs_ranges:
217 for changeset in c.cs_ranges:
217
218
218 c.statuses.extend([ChangesetStatusModel()\
219 c.statuses.extend([ChangesetStatusModel()\
219 .get_status(c.rhodecode_db_repo.repo_id,
220 .get_status(c.rhodecode_db_repo.repo_id,
220 changeset.raw_id)])
221 changeset.raw_id)])
221
222
222 c.comments.extend(ChangesetCommentsModel()\
223 c.comments.extend(ChangesetCommentsModel()\
223 .get_comments(c.rhodecode_db_repo.repo_id,
224 .get_comments(c.rhodecode_db_repo.repo_id,
224 revision=changeset.raw_id))
225 revision=changeset.raw_id))
225 inlines = ChangesetCommentsModel()\
226 inlines = ChangesetCommentsModel()\
226 .get_inline_comments(c.rhodecode_db_repo.repo_id,
227 .get_inline_comments(c.rhodecode_db_repo.repo_id,
227 revision=changeset.raw_id)
228 revision=changeset.raw_id)
228 c.inline_comments.extend(inlines)
229 c.inline_comments.extend(inlines)
229 c.changes[changeset.raw_id] = []
230 c.changes[changeset.raw_id] = []
230 try:
231 try:
231 changeset_parent = changeset.parents[0]
232 changeset_parent = changeset.parents[0]
232 except IndexError:
233 except IndexError:
233 changeset_parent = None
234 changeset_parent = None
234
235
235 #==================================================================
236 #==================================================================
236 # ADDED FILES
237 # ADDED FILES
237 #==================================================================
238 #==================================================================
238 for node in changeset.added:
239 for node in changeset.added:
239 fid = h.FID(revision, node.path)
240 fid = h.FID(revision, node.path)
240 line_context_lcl = get_line_ctx(fid, request.GET)
241 line_context_lcl = get_line_ctx(fid, request.GET)
241 ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
242 ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
242 lim = self.cut_off_limit
243 lim = self.cut_off_limit
243 if cumulative_diff > self.cut_off_limit:
244 if cumulative_diff > self.cut_off_limit:
244 lim = -1 if limit_off is None else None
245 lim = -1 if limit_off is None else None
245 size, cs1, cs2, diff, st = wrapped_diff(
246 size, cs1, cs2, diff, st = wrapped_diff(
246 filenode_old=None,
247 filenode_old=None,
247 filenode_new=node,
248 filenode_new=node,
248 cut_off_limit=lim,
249 cut_off_limit=lim,
249 ignore_whitespace=ign_whitespace_lcl,
250 ignore_whitespace=ign_whitespace_lcl,
250 line_context=line_context_lcl,
251 line_context=line_context_lcl,
251 enable_comments=enable_comments
252 enable_comments=enable_comments
252 )
253 )
253 cumulative_diff += size
254 cumulative_diff += size
254 c.lines_added += st[0]
255 c.lines_added += st[0]
255 c.lines_deleted += st[1]
256 c.lines_deleted += st[1]
256 c.changes[changeset.raw_id].append(
257 c.changes[changeset.raw_id].append(
257 ('added', node, diff, cs1, cs2, st)
258 ('added', node, diff, cs1, cs2, st)
258 )
259 )
259
260
260 #==================================================================
261 #==================================================================
261 # CHANGED FILES
262 # CHANGED FILES
262 #==================================================================
263 #==================================================================
263 for node in changeset.changed:
264 for node in changeset.changed:
264 try:
265 try:
265 filenode_old = changeset_parent.get_node(node.path)
266 filenode_old = changeset_parent.get_node(node.path)
266 except ChangesetError:
267 except ChangesetError:
267 log.warning('Unable to fetch parent node for diff')
268 log.warning('Unable to fetch parent node for diff')
268 filenode_old = FileNode(node.path, '', EmptyChangeset())
269 filenode_old = FileNode(node.path, '', EmptyChangeset())
269
270
270 fid = h.FID(revision, node.path)
271 fid = h.FID(revision, node.path)
271 line_context_lcl = get_line_ctx(fid, request.GET)
272 line_context_lcl = get_line_ctx(fid, request.GET)
272 ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
273 ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
273 lim = self.cut_off_limit
274 lim = self.cut_off_limit
274 if cumulative_diff > self.cut_off_limit:
275 if cumulative_diff > self.cut_off_limit:
275 lim = -1 if limit_off is None else None
276 lim = -1 if limit_off is None else None
276 size, cs1, cs2, diff, st = wrapped_diff(
277 size, cs1, cs2, diff, st = wrapped_diff(
277 filenode_old=filenode_old,
278 filenode_old=filenode_old,
278 filenode_new=node,
279 filenode_new=node,
279 cut_off_limit=lim,
280 cut_off_limit=lim,
280 ignore_whitespace=ign_whitespace_lcl,
281 ignore_whitespace=ign_whitespace_lcl,
281 line_context=line_context_lcl,
282 line_context=line_context_lcl,
282 enable_comments=enable_comments
283 enable_comments=enable_comments
283 )
284 )
284 cumulative_diff += size
285 cumulative_diff += size
285 c.lines_added += st[0]
286 c.lines_added += st[0]
286 c.lines_deleted += st[1]
287 c.lines_deleted += st[1]
287 c.changes[changeset.raw_id].append(
288 c.changes[changeset.raw_id].append(
288 ('changed', node, diff, cs1, cs2, st)
289 ('changed', node, diff, cs1, cs2, st)
289 )
290 )
290 #==================================================================
291 #==================================================================
291 # REMOVED FILES
292 # REMOVED FILES
292 #==================================================================
293 #==================================================================
293 for node in changeset.removed:
294 for node in changeset.removed:
294 c.changes[changeset.raw_id].append(
295 c.changes[changeset.raw_id].append(
295 ('removed', node, None, None, None, (0, 0))
296 ('removed', node, None, None, None, (0, 0))
296 )
297 )
297
298
298 # count inline comments
299 # count inline comments
299 for __, lines in c.inline_comments:
300 for __, lines in c.inline_comments:
300 for comments in lines.values():
301 for comments in lines.values():
301 c.inline_cnt += len(comments)
302 c.inline_cnt += len(comments)
302
303
303 if len(c.cs_ranges) == 1:
304 if len(c.cs_ranges) == 1:
304 c.changeset = c.cs_ranges[0]
305 c.changeset = c.cs_ranges[0]
305 c.changes = c.changes[c.changeset.raw_id]
306 c.changes = c.changes[c.changeset.raw_id]
306
307
307 return render('changeset/changeset.html')
308 return render('changeset/changeset.html')
308 else:
309 else:
309 return render('changeset/changeset_range.html')
310 return render('changeset/changeset_range.html')
310
311
311 def raw_changeset(self, revision):
312 def raw_changeset(self, revision):
312
313
313 method = request.GET.get('diff', 'show')
314 method = request.GET.get('diff', 'show')
314 ignore_whitespace = request.GET.get('ignorews') == '1'
315 ignore_whitespace = request.GET.get('ignorews') == '1'
315 line_context = request.GET.get('context', 3)
316 line_context = request.GET.get('context', 3)
316 try:
317 try:
317 c.scm_type = c.rhodecode_repo.alias
318 c.scm_type = c.rhodecode_repo.alias
318 c.changeset = c.rhodecode_repo.get_changeset(revision)
319 c.changeset = c.rhodecode_repo.get_changeset(revision)
319 except RepositoryError:
320 except RepositoryError:
320 log.error(traceback.format_exc())
321 log.error(traceback.format_exc())
321 return redirect(url('home'))
322 return redirect(url('home'))
322 else:
323 else:
323 try:
324 try:
324 c.changeset_parent = c.changeset.parents[0]
325 c.changeset_parent = c.changeset.parents[0]
325 except IndexError:
326 except IndexError:
326 c.changeset_parent = None
327 c.changeset_parent = None
327 c.changes = []
328 c.changes = []
328
329
329 for node in c.changeset.added:
330 for node in c.changeset.added:
330 filenode_old = FileNode(node.path, '')
331 filenode_old = FileNode(node.path, '')
331 if filenode_old.is_binary or node.is_binary:
332 if filenode_old.is_binary or node.is_binary:
332 diff = _('binary file') + '\n'
333 diff = _('binary file') + '\n'
333 else:
334 else:
334 f_gitdiff = diffs.get_gitdiff(filenode_old, node,
335 f_gitdiff = diffs.get_gitdiff(filenode_old, node,
335 ignore_whitespace=ignore_whitespace,
336 ignore_whitespace=ignore_whitespace,
336 context=line_context)
337 context=line_context)
337 diff = diffs.DiffProcessor(f_gitdiff,
338 diff = diffs.DiffProcessor(f_gitdiff,
338 format='gitdiff').raw_diff()
339 format='gitdiff').raw_diff()
339
340
340 cs1 = None
341 cs1 = None
341 cs2 = node.changeset.raw_id
342 cs2 = node.changeset.raw_id
342 c.changes.append(('added', node, diff, cs1, cs2))
343 c.changes.append(('added', node, diff, cs1, cs2))
343
344
344 for node in c.changeset.changed:
345 for node in c.changeset.changed:
345 filenode_old = c.changeset_parent.get_node(node.path)
346 filenode_old = c.changeset_parent.get_node(node.path)
346 if filenode_old.is_binary or node.is_binary:
347 if filenode_old.is_binary or node.is_binary:
347 diff = _('binary file')
348 diff = _('binary file')
348 else:
349 else:
349 f_gitdiff = diffs.get_gitdiff(filenode_old, node,
350 f_gitdiff = diffs.get_gitdiff(filenode_old, node,
350 ignore_whitespace=ignore_whitespace,
351 ignore_whitespace=ignore_whitespace,
351 context=line_context)
352 context=line_context)
352 diff = diffs.DiffProcessor(f_gitdiff,
353 diff = diffs.DiffProcessor(f_gitdiff,
353 format='gitdiff').raw_diff()
354 format='gitdiff').raw_diff()
354
355
355 cs1 = filenode_old.changeset.raw_id
356 cs1 = filenode_old.changeset.raw_id
356 cs2 = node.changeset.raw_id
357 cs2 = node.changeset.raw_id
357 c.changes.append(('changed', node, diff, cs1, cs2))
358 c.changes.append(('changed', node, diff, cs1, cs2))
358
359
359 response.content_type = 'text/plain'
360 response.content_type = 'text/plain'
360
361
361 if method == 'download':
362 if method == 'download':
362 response.content_disposition = 'attachment; filename=%s.patch' \
363 response.content_disposition = 'attachment; filename=%s.patch' \
363 % revision
364 % revision
364
365
365 c.parent_tmpl = ''.join(['# Parent %s\n' % x.raw_id
366 c.parent_tmpl = ''.join(['# Parent %s\n' % x.raw_id
366 for x in c.changeset.parents])
367 for x in c.changeset.parents])
367
368
368 c.diffs = ''
369 c.diffs = ''
369 for x in c.changes:
370 for x in c.changes:
370 c.diffs += x[2]
371 c.diffs += x[2]
371
372
372 return render('changeset/raw_changeset.html')
373 return render('changeset/raw_changeset.html')
373
374
374 @jsonify
375 @jsonify
375 def comment(self, repo_name, revision):
376 def comment(self, repo_name, revision):
376 status = request.POST.get('changeset_status')
377 status = request.POST.get('changeset_status')
377 change_status = request.POST.get('change_changeset_status')
378 change_status = request.POST.get('change_changeset_status')
378
379
379 comm = ChangesetCommentsModel().create(
380 comm = ChangesetCommentsModel().create(
380 text=request.POST.get('text'),
381 text=request.POST.get('text'),
381 repo=c.rhodecode_db_repo.repo_id,
382 repo=c.rhodecode_db_repo.repo_id,
382 user=c.rhodecode_user.user_id,
383 user=c.rhodecode_user.user_id,
383 revision=revision,
384 revision=revision,
384 f_path=request.POST.get('f_path'),
385 f_path=request.POST.get('f_path'),
385 line_no=request.POST.get('line'),
386 line_no=request.POST.get('line'),
386 status_change=(ChangesetStatus.get_status_lbl(status)
387 status_change=(ChangesetStatus.get_status_lbl(status)
387 if status and change_status else None)
388 if status and change_status else None)
388 )
389 )
389
390
390 # get status if set !
391 # get status if set !
391 if status and change_status:
392 if status and change_status:
392 # if latest status was from pull request and it's closed
393 # if latest status was from pull request and it's closed
393 # disallow changing status !
394 # disallow changing status !
394 # dont_allow_on_closed_pull_request = True !
395 # dont_allow_on_closed_pull_request = True !
395
396
396 try:
397 try:
397 ChangesetStatusModel().set_status(
398 ChangesetStatusModel().set_status(
398 c.rhodecode_db_repo.repo_id,
399 c.rhodecode_db_repo.repo_id,
399 status,
400 status,
400 c.rhodecode_user.user_id,
401 c.rhodecode_user.user_id,
401 comm,
402 comm,
402 revision=revision,
403 revision=revision,
403 dont_allow_on_closed_pull_request=True
404 dont_allow_on_closed_pull_request=True
404 )
405 )
405 except StatusChangeOnClosedPullRequestError:
406 except StatusChangeOnClosedPullRequestError:
406 log.error(traceback.format_exc())
407 log.error(traceback.format_exc())
407 msg = _('Changing status on a changeset associated with'
408 msg = _('Changing status on a changeset associated with'
408 'a closed pull request is not allowed')
409 'a closed pull request is not allowed')
409 h.flash(msg, category='warning')
410 h.flash(msg, category='warning')
410 return redirect(h.url('changeset_home', repo_name=repo_name,
411 return redirect(h.url('changeset_home', repo_name=repo_name,
411 revision=revision))
412 revision=revision))
412 action_logger(self.rhodecode_user,
413 action_logger(self.rhodecode_user,
413 'user_commented_revision:%s' % revision,
414 'user_commented_revision:%s' % revision,
414 c.rhodecode_db_repo, self.ip_addr, self.sa)
415 c.rhodecode_db_repo, self.ip_addr, self.sa)
415
416
416 Session().commit()
417 Session().commit()
417
418
418 if not request.environ.get('HTTP_X_PARTIAL_XHR'):
419 if not request.environ.get('HTTP_X_PARTIAL_XHR'):
419 return redirect(h.url('changeset_home', repo_name=repo_name,
420 return redirect(h.url('changeset_home', repo_name=repo_name,
420 revision=revision))
421 revision=revision))
421
422
422 data = {
423 data = {
423 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
424 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
424 }
425 }
425 if comm:
426 if comm:
426 c.co = comm
427 c.co = comm
427 data.update(comm.get_dict())
428 data.update(comm.get_dict())
428 data.update({'rendered_text':
429 data.update({'rendered_text':
429 render('changeset/changeset_comment_block.html')})
430 render('changeset/changeset_comment_block.html')})
430
431
431 return data
432 return data
432
433
433 @jsonify
434 @jsonify
434 def delete_comment(self, repo_name, comment_id):
435 def delete_comment(self, repo_name, comment_id):
435 co = ChangesetComment.get(comment_id)
436 co = ChangesetComment.get(comment_id)
436 owner = lambda: co.author.user_id == c.rhodecode_user.user_id
437 owner = lambda: co.author.user_id == c.rhodecode_user.user_id
437 if h.HasPermissionAny('hg.admin', 'repository.admin')() or owner:
438 if h.HasPermissionAny('hg.admin', 'repository.admin')() or owner:
438 ChangesetCommentsModel().delete(comment=co)
439 ChangesetCommentsModel().delete(comment=co)
439 Session().commit()
440 Session().commit()
440 return True
441 return True
441 else:
442 else:
442 raise HTTPForbidden()
443 raise HTTPForbidden()
@@ -1,132 +1,133 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.controllers.compare
3 rhodecode.controllers.compare
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 compare controller for pylons showoing differences between two
6 compare controller for pylons showoing differences between two
7 repos, branches, bookmarks or tips
7 repos, branches, bookmarks or tips
8
8
9 :created_on: May 6, 2012
9 :created_on: May 6, 2012
10 :author: marcink
10 :author: marcink
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 :license: GPLv3, see COPYING for more details.
12 :license: GPLv3, see COPYING for more details.
13 """
13 """
14 # This program is free software: you can redistribute it and/or modify
14 # This program is free software: you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation, either version 3 of the License, or
16 # the Free Software Foundation, either version 3 of the License, or
17 # (at your option) any later version.
17 # (at your option) any later version.
18 #
18 #
19 # This program is distributed in the hope that it will be useful,
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 # GNU General Public License for more details.
22 # GNU General Public License for more details.
23 #
23 #
24 # You should have received a copy of the GNU General Public License
24 # You should have received a copy of the GNU General Public License
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 import logging
26 import logging
27 import traceback
27 import traceback
28
28
29 from webob.exc import HTTPNotFound
29 from webob.exc import HTTPNotFound
30 from pylons import request, response, session, tmpl_context as c, url
30 from pylons import request, response, session, tmpl_context as c, url
31 from pylons.controllers.util import abort, redirect
31 from pylons.controllers.util import abort, redirect
32 from pylons.i18n.translation import _
32 from pylons.i18n.translation import _
33
33
34 from rhodecode.lib.vcs.exceptions import EmptyRepositoryError, RepositoryError
34 from rhodecode.lib.vcs.exceptions import EmptyRepositoryError, RepositoryError
35 from rhodecode.lib import helpers as h
35 from rhodecode.lib import helpers as h
36 from rhodecode.lib.base import BaseRepoController, render
36 from rhodecode.lib.base import BaseRepoController, render
37 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
37 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
38 from rhodecode.lib import diffs
38 from rhodecode.lib import diffs
39
39
40 from rhodecode.model.db import Repository
40 from rhodecode.model.db import Repository
41 from rhodecode.model.pull_request import PullRequestModel
41 from rhodecode.model.pull_request import PullRequestModel
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 class CompareController(BaseRepoController):
46 class CompareController(BaseRepoController):
47
47
48 @LoginRequired()
48 @LoginRequired()
49 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
49 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
50 'repository.admin')
50 'repository.admin')
51 def __before__(self):
51 def __before__(self):
52 super(CompareController, self).__before__()
52 super(CompareController, self).__before__()
53
53
54 def __get_cs_or_redirect(self, rev, repo, redirect_after=True):
54 def __get_cs_or_redirect(self, rev, repo, redirect_after=True):
55 """
55 """
56 Safe way to get changeset if error occur it redirects to changeset with
56 Safe way to get changeset if error occur it redirects to changeset with
57 proper message
57 proper message
58
58
59 :param rev: revision to fetch
59 :param rev: revision to fetch
60 :param repo: repo instance
60 :param repo: repo instance
61 """
61 """
62
62
63 try:
63 try:
64 type_, rev = rev
64 type_, rev = rev
65 return repo.scm_instance.get_changeset(rev)
65 return repo.scm_instance.get_changeset(rev)
66 except EmptyRepositoryError, e:
66 except EmptyRepositoryError, e:
67 if not redirect_after:
67 if not redirect_after:
68 return None
68 return None
69 h.flash(h.literal(_('There are no changesets yet')),
69 h.flash(h.literal(_('There are no changesets yet')),
70 category='warning')
70 category='warning')
71 redirect(url('summary_home', repo_name=repo.repo_name))
71 redirect(url('summary_home', repo_name=repo.repo_name))
72
72
73 except RepositoryError, e:
73 except RepositoryError, e:
74 log.error(traceback.format_exc())
74 h.flash(str(e), category='warning')
75 h.flash(str(e), category='warning')
75 redirect(h.url('summary_home', repo_name=repo.repo_name))
76 redirect(h.url('summary_home', repo_name=repo.repo_name))
76
77
77 def index(self, org_ref_type, org_ref, other_ref_type, other_ref):
78 def index(self, org_ref_type, org_ref, other_ref_type, other_ref):
78
79
79 org_repo = c.rhodecode_db_repo.repo_name
80 org_repo = c.rhodecode_db_repo.repo_name
80 org_ref = (org_ref_type, org_ref)
81 org_ref = (org_ref_type, org_ref)
81 other_ref = (other_ref_type, other_ref)
82 other_ref = (other_ref_type, other_ref)
82 other_repo = request.GET.get('repo', org_repo)
83 other_repo = request.GET.get('repo', org_repo)
83
84
84 c.swap_url = h.url('compare_url', repo_name=other_repo,
85 c.swap_url = h.url('compare_url', repo_name=other_repo,
85 org_ref_type=other_ref[0], org_ref=other_ref[1],
86 org_ref_type=other_ref[0], org_ref=other_ref[1],
86 other_ref_type=org_ref[0], other_ref=org_ref[1],
87 other_ref_type=org_ref[0], other_ref=org_ref[1],
87 repo=org_repo)
88 repo=org_repo)
88
89
89 c.org_repo = org_repo = Repository.get_by_repo_name(org_repo)
90 c.org_repo = org_repo = Repository.get_by_repo_name(org_repo)
90 c.other_repo = other_repo = Repository.get_by_repo_name(other_repo)
91 c.other_repo = other_repo = Repository.get_by_repo_name(other_repo)
91
92
92 if c.org_repo is None or c.other_repo is None:
93 if c.org_repo is None or c.other_repo is None:
93 log.error('Could not found repo %s or %s' % (org_repo, other_repo))
94 log.error('Could not found repo %s or %s' % (org_repo, other_repo))
94 raise HTTPNotFound
95 raise HTTPNotFound
95
96
96 if c.org_repo.scm_instance.alias != 'hg':
97 if c.org_repo.scm_instance.alias != 'hg':
97 log.error('Review not available for GIT REPOS')
98 log.error('Review not available for GIT REPOS')
98 raise HTTPNotFound
99 raise HTTPNotFound
99
100
100 self.__get_cs_or_redirect(rev=org_ref, repo=org_repo)
101 self.__get_cs_or_redirect(rev=org_ref, repo=org_repo)
101 self.__get_cs_or_redirect(rev=other_ref, repo=other_repo)
102 self.__get_cs_or_redirect(rev=other_ref, repo=other_repo)
102
103
103 c.cs_ranges, discovery_data = PullRequestModel().get_compare_data(
104 c.cs_ranges, discovery_data = PullRequestModel().get_compare_data(
104 org_repo, org_ref, other_repo, other_ref
105 org_repo, org_ref, other_repo, other_ref
105 )
106 )
106
107
107 c.statuses = c.rhodecode_db_repo.statuses([x.raw_id for x in
108 c.statuses = c.rhodecode_db_repo.statuses([x.raw_id for x in
108 c.cs_ranges])
109 c.cs_ranges])
109 c.target_repo = c.repo_name
110 c.target_repo = c.repo_name
110 # defines that we need hidden inputs with changesets
111 # defines that we need hidden inputs with changesets
111 c.as_form = request.GET.get('as_form', False)
112 c.as_form = request.GET.get('as_form', False)
112 if request.environ.get('HTTP_X_PARTIAL_XHR'):
113 if request.environ.get('HTTP_X_PARTIAL_XHR'):
113 return render('compare/compare_cs.html')
114 return render('compare/compare_cs.html')
114
115
115 c.org_ref = org_ref[1]
116 c.org_ref = org_ref[1]
116 c.other_ref = other_ref[1]
117 c.other_ref = other_ref[1]
117 # diff needs to have swapped org with other to generate proper diff
118 # diff needs to have swapped org with other to generate proper diff
118 _diff = diffs.differ(other_repo, other_ref, org_repo, org_ref,
119 _diff = diffs.differ(other_repo, other_ref, org_repo, org_ref,
119 discovery_data)
120 discovery_data)
120 diff_processor = diffs.DiffProcessor(_diff, format='gitdiff')
121 diff_processor = diffs.DiffProcessor(_diff, format='gitdiff')
121 _parsed = diff_processor.prepare()
122 _parsed = diff_processor.prepare()
122
123
123 c.files = []
124 c.files = []
124 c.changes = {}
125 c.changes = {}
125
126
126 for f in _parsed:
127 for f in _parsed:
127 fid = h.FID('', f['filename'])
128 fid = h.FID('', f['filename'])
128 c.files.append([fid, f['operation'], f['filename'], f['stats']])
129 c.files.append([fid, f['operation'], f['filename'], f['stats']])
129 diff = diff_processor.as_html(enable_comments=False, diff_lines=[f])
130 diff = diff_processor.as_html(enable_comments=False, diff_lines=[f])
130 c.changes[fid] = [f['operation'], f['filename'], diff]
131 c.changes[fid] = [f['operation'], f['filename'], diff]
131
132
132 return render('compare/compare_diff.html')
133 return render('compare/compare_diff.html')
@@ -1,502 +1,502 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.controllers.files
3 rhodecode.controllers.files
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 Files controller for RhodeCode
6 Files controller for RhodeCode
7
7
8 :created_on: Apr 21, 2010
8 :created_on: Apr 21, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 from __future__ import with_statement
25 from __future__ import with_statement
26 import os
26 import os
27 import logging
27 import logging
28 import traceback
28 import traceback
29 import tempfile
29 import tempfile
30
30
31 from pylons import request, response, tmpl_context as c, url
31 from pylons import request, response, tmpl_context as c, url
32 from pylons.i18n.translation import _
32 from pylons.i18n.translation import _
33 from pylons.controllers.util import redirect
33 from pylons.controllers.util import redirect
34 from pylons.decorators import jsonify
34 from pylons.decorators import jsonify
35
35
36 from rhodecode.lib import diffs
36 from rhodecode.lib import diffs
37 from rhodecode.lib import helpers as h
37 from rhodecode.lib import helpers as h
38
38
39 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.utils2 import convert_line_endings, detect_mode, safe_str
40 from rhodecode.lib.utils2 import convert_line_endings, detect_mode, safe_str
41 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
41 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
42 from rhodecode.lib.base import BaseRepoController, render
42 from rhodecode.lib.base import BaseRepoController, render
43 from rhodecode.lib.utils import EmptyChangeset
43 from rhodecode.lib.vcs.backends.base import EmptyChangeset
44 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.exceptions import RepositoryError, \
45 from rhodecode.lib.vcs.exceptions import RepositoryError, \
46 ChangesetDoesNotExistError, EmptyRepositoryError, \
46 ChangesetDoesNotExistError, EmptyRepositoryError, \
47 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError
47 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError
48 from rhodecode.lib.vcs.nodes import FileNode
48 from rhodecode.lib.vcs.nodes import FileNode
49
49
50 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.scm import ScmModel
51 from rhodecode.model.scm import ScmModel
52 from rhodecode.model.db import Repository
52 from rhodecode.model.db import Repository
53
53
54 from rhodecode.controllers.changeset import anchor_url, _ignorews_url,\
54 from rhodecode.controllers.changeset import anchor_url, _ignorews_url,\
55 _context_url, get_line_ctx, get_ignore_ws
55 _context_url, get_line_ctx, get_ignore_ws
56
56
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60
60
61 class FilesController(BaseRepoController):
61 class FilesController(BaseRepoController):
62
62
63 def __before__(self):
63 def __before__(self):
64 super(FilesController, self).__before__()
64 super(FilesController, self).__before__()
65 c.cut_off_limit = self.cut_off_limit
65 c.cut_off_limit = self.cut_off_limit
66
66
67 def __get_cs_or_redirect(self, rev, repo_name, redirect_after=True):
67 def __get_cs_or_redirect(self, rev, repo_name, redirect_after=True):
68 """
68 """
69 Safe way to get changeset if error occur it redirects to tip with
69 Safe way to get changeset if error occur it redirects to tip with
70 proper message
70 proper message
71
71
72 :param rev: revision to fetch
72 :param rev: revision to fetch
73 :param repo_name: repo name to redirect after
73 :param repo_name: repo name to redirect after
74 """
74 """
75
75
76 try:
76 try:
77 return c.rhodecode_repo.get_changeset(rev)
77 return c.rhodecode_repo.get_changeset(rev)
78 except EmptyRepositoryError, e:
78 except EmptyRepositoryError, e:
79 if not redirect_after:
79 if not redirect_after:
80 return None
80 return None
81 url_ = url('files_add_home',
81 url_ = url('files_add_home',
82 repo_name=c.repo_name,
82 repo_name=c.repo_name,
83 revision=0, f_path='')
83 revision=0, f_path='')
84 add_new = '<a href="%s">[%s]</a>' % (url_, _('add new'))
84 add_new = '<a href="%s">[%s]</a>' % (url_, _('add new'))
85 h.flash(h.literal(_('There are no files yet %s') % add_new),
85 h.flash(h.literal(_('There are no files yet %s') % add_new),
86 category='warning')
86 category='warning')
87 redirect(h.url('summary_home', repo_name=repo_name))
87 redirect(h.url('summary_home', repo_name=repo_name))
88
88
89 except RepositoryError, e:
89 except RepositoryError, e:
90 h.flash(str(e), category='warning')
90 h.flash(str(e), category='warning')
91 redirect(h.url('files_home', repo_name=repo_name, revision='tip'))
91 redirect(h.url('files_home', repo_name=repo_name, revision='tip'))
92
92
93 def __get_filenode_or_redirect(self, repo_name, cs, path):
93 def __get_filenode_or_redirect(self, repo_name, cs, path):
94 """
94 """
95 Returns file_node, if error occurs or given path is directory,
95 Returns file_node, if error occurs or given path is directory,
96 it'll redirect to top level path
96 it'll redirect to top level path
97
97
98 :param repo_name: repo_name
98 :param repo_name: repo_name
99 :param cs: given changeset
99 :param cs: given changeset
100 :param path: path to lookup
100 :param path: path to lookup
101 """
101 """
102
102
103 try:
103 try:
104 file_node = cs.get_node(path)
104 file_node = cs.get_node(path)
105 if file_node.is_dir():
105 if file_node.is_dir():
106 raise RepositoryError('given path is a directory')
106 raise RepositoryError('given path is a directory')
107 except RepositoryError, e:
107 except RepositoryError, e:
108 h.flash(str(e), category='warning')
108 h.flash(str(e), category='warning')
109 redirect(h.url('files_home', repo_name=repo_name,
109 redirect(h.url('files_home', repo_name=repo_name,
110 revision=cs.raw_id))
110 revision=cs.raw_id))
111
111
112 return file_node
112 return file_node
113
113
114 @LoginRequired()
114 @LoginRequired()
115 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
115 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
116 'repository.admin')
116 'repository.admin')
117 def index(self, repo_name, revision, f_path, annotate=False):
117 def index(self, repo_name, revision, f_path, annotate=False):
118 # redirect to given revision from form if given
118 # redirect to given revision from form if given
119 post_revision = request.POST.get('at_rev', None)
119 post_revision = request.POST.get('at_rev', None)
120 if post_revision:
120 if post_revision:
121 cs = self.__get_cs_or_redirect(post_revision, repo_name)
121 cs = self.__get_cs_or_redirect(post_revision, repo_name)
122 redirect(url('files_home', repo_name=c.repo_name,
122 redirect(url('files_home', repo_name=c.repo_name,
123 revision=cs.raw_id, f_path=f_path))
123 revision=cs.raw_id, f_path=f_path))
124
124
125 c.changeset = self.__get_cs_or_redirect(revision, repo_name)
125 c.changeset = self.__get_cs_or_redirect(revision, repo_name)
126 c.branch = request.GET.get('branch', None)
126 c.branch = request.GET.get('branch', None)
127 c.f_path = f_path
127 c.f_path = f_path
128 c.annotate = annotate
128 c.annotate = annotate
129 cur_rev = c.changeset.revision
129 cur_rev = c.changeset.revision
130
130
131 # prev link
131 # prev link
132 try:
132 try:
133 prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch)
133 prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch)
134 c.url_prev = url('files_home', repo_name=c.repo_name,
134 c.url_prev = url('files_home', repo_name=c.repo_name,
135 revision=prev_rev.raw_id, f_path=f_path)
135 revision=prev_rev.raw_id, f_path=f_path)
136 if c.branch:
136 if c.branch:
137 c.url_prev += '?branch=%s' % c.branch
137 c.url_prev += '?branch=%s' % c.branch
138 except (ChangesetDoesNotExistError, VCSError):
138 except (ChangesetDoesNotExistError, VCSError):
139 c.url_prev = '#'
139 c.url_prev = '#'
140
140
141 # next link
141 # next link
142 try:
142 try:
143 next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch)
143 next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch)
144 c.url_next = url('files_home', repo_name=c.repo_name,
144 c.url_next = url('files_home', repo_name=c.repo_name,
145 revision=next_rev.raw_id, f_path=f_path)
145 revision=next_rev.raw_id, f_path=f_path)
146 if c.branch:
146 if c.branch:
147 c.url_next += '?branch=%s' % c.branch
147 c.url_next += '?branch=%s' % c.branch
148 except (ChangesetDoesNotExistError, VCSError):
148 except (ChangesetDoesNotExistError, VCSError):
149 c.url_next = '#'
149 c.url_next = '#'
150
150
151 # files or dirs
151 # files or dirs
152 try:
152 try:
153 c.file = c.changeset.get_node(f_path)
153 c.file = c.changeset.get_node(f_path)
154
154
155 if c.file.is_file():
155 if c.file.is_file():
156 _hist = c.changeset.get_file_history(f_path)
156 _hist = c.changeset.get_file_history(f_path)
157 c.file_history = self._get_node_history(c.changeset, f_path,
157 c.file_history = self._get_node_history(c.changeset, f_path,
158 _hist)
158 _hist)
159 c.authors = []
159 c.authors = []
160 for a in set([x.author for x in _hist]):
160 for a in set([x.author for x in _hist]):
161 c.authors.append((h.email(a), h.person(a)))
161 c.authors.append((h.email(a), h.person(a)))
162 else:
162 else:
163 c.authors = c.file_history = []
163 c.authors = c.file_history = []
164 except RepositoryError, e:
164 except RepositoryError, e:
165 h.flash(str(e), category='warning')
165 h.flash(str(e), category='warning')
166 redirect(h.url('files_home', repo_name=repo_name,
166 redirect(h.url('files_home', repo_name=repo_name,
167 revision='tip'))
167 revision='tip'))
168
168
169 return render('files/files.html')
169 return render('files/files.html')
170
170
171 @LoginRequired()
171 @LoginRequired()
172 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
172 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
173 'repository.admin')
173 'repository.admin')
174 def rawfile(self, repo_name, revision, f_path):
174 def rawfile(self, repo_name, revision, f_path):
175 cs = self.__get_cs_or_redirect(revision, repo_name)
175 cs = self.__get_cs_or_redirect(revision, repo_name)
176 file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path)
176 file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path)
177
177
178 response.content_disposition = 'attachment; filename=%s' % \
178 response.content_disposition = 'attachment; filename=%s' % \
179 safe_str(f_path.split(Repository.url_sep())[-1])
179 safe_str(f_path.split(Repository.url_sep())[-1])
180
180
181 response.content_type = file_node.mimetype
181 response.content_type = file_node.mimetype
182 return file_node.content
182 return file_node.content
183
183
184 @LoginRequired()
184 @LoginRequired()
185 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
185 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
186 'repository.admin')
186 'repository.admin')
187 def raw(self, repo_name, revision, f_path):
187 def raw(self, repo_name, revision, f_path):
188 cs = self.__get_cs_or_redirect(revision, repo_name)
188 cs = self.__get_cs_or_redirect(revision, repo_name)
189 file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path)
189 file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path)
190
190
191 raw_mimetype_mapping = {
191 raw_mimetype_mapping = {
192 # map original mimetype to a mimetype used for "show as raw"
192 # map original mimetype to a mimetype used for "show as raw"
193 # you can also provide a content-disposition to override the
193 # you can also provide a content-disposition to override the
194 # default "attachment" disposition.
194 # default "attachment" disposition.
195 # orig_type: (new_type, new_dispo)
195 # orig_type: (new_type, new_dispo)
196
196
197 # show images inline:
197 # show images inline:
198 'image/x-icon': ('image/x-icon', 'inline'),
198 'image/x-icon': ('image/x-icon', 'inline'),
199 'image/png': ('image/png', 'inline'),
199 'image/png': ('image/png', 'inline'),
200 'image/gif': ('image/gif', 'inline'),
200 'image/gif': ('image/gif', 'inline'),
201 'image/jpeg': ('image/jpeg', 'inline'),
201 'image/jpeg': ('image/jpeg', 'inline'),
202 'image/svg+xml': ('image/svg+xml', 'inline'),
202 'image/svg+xml': ('image/svg+xml', 'inline'),
203 }
203 }
204
204
205 mimetype = file_node.mimetype
205 mimetype = file_node.mimetype
206 try:
206 try:
207 mimetype, dispo = raw_mimetype_mapping[mimetype]
207 mimetype, dispo = raw_mimetype_mapping[mimetype]
208 except KeyError:
208 except KeyError:
209 # we don't know anything special about this, handle it safely
209 # we don't know anything special about this, handle it safely
210 if file_node.is_binary:
210 if file_node.is_binary:
211 # do same as download raw for binary files
211 # do same as download raw for binary files
212 mimetype, dispo = 'application/octet-stream', 'attachment'
212 mimetype, dispo = 'application/octet-stream', 'attachment'
213 else:
213 else:
214 # do not just use the original mimetype, but force text/plain,
214 # do not just use the original mimetype, but force text/plain,
215 # otherwise it would serve text/html and that might be unsafe.
215 # otherwise it would serve text/html and that might be unsafe.
216 # Note: underlying vcs library fakes text/plain mimetype if the
216 # Note: underlying vcs library fakes text/plain mimetype if the
217 # mimetype can not be determined and it thinks it is not
217 # mimetype can not be determined and it thinks it is not
218 # binary.This might lead to erroneous text display in some
218 # binary.This might lead to erroneous text display in some
219 # cases, but helps in other cases, like with text files
219 # cases, but helps in other cases, like with text files
220 # without extension.
220 # without extension.
221 mimetype, dispo = 'text/plain', 'inline'
221 mimetype, dispo = 'text/plain', 'inline'
222
222
223 if dispo == 'attachment':
223 if dispo == 'attachment':
224 dispo = 'attachment; filename=%s' % \
224 dispo = 'attachment; filename=%s' % \
225 safe_str(f_path.split(os.sep)[-1])
225 safe_str(f_path.split(os.sep)[-1])
226
226
227 response.content_disposition = dispo
227 response.content_disposition = dispo
228 response.content_type = mimetype
228 response.content_type = mimetype
229 return file_node.content
229 return file_node.content
230
230
231 @LoginRequired()
231 @LoginRequired()
232 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
232 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
233 def edit(self, repo_name, revision, f_path):
233 def edit(self, repo_name, revision, f_path):
234 r_post = request.POST
234 r_post = request.POST
235
235
236 c.cs = self.__get_cs_or_redirect(revision, repo_name)
236 c.cs = self.__get_cs_or_redirect(revision, repo_name)
237 c.file = self.__get_filenode_or_redirect(repo_name, c.cs, f_path)
237 c.file = self.__get_filenode_or_redirect(repo_name, c.cs, f_path)
238
238
239 if c.file.is_binary:
239 if c.file.is_binary:
240 return redirect(url('files_home', repo_name=c.repo_name,
240 return redirect(url('files_home', repo_name=c.repo_name,
241 revision=c.cs.raw_id, f_path=f_path))
241 revision=c.cs.raw_id, f_path=f_path))
242
242
243 c.f_path = f_path
243 c.f_path = f_path
244
244
245 if r_post:
245 if r_post:
246
246
247 old_content = c.file.content
247 old_content = c.file.content
248 sl = old_content.splitlines(1)
248 sl = old_content.splitlines(1)
249 first_line = sl[0] if sl else ''
249 first_line = sl[0] if sl else ''
250 # modes: 0 - Unix, 1 - Mac, 2 - DOS
250 # modes: 0 - Unix, 1 - Mac, 2 - DOS
251 mode = detect_mode(first_line, 0)
251 mode = detect_mode(first_line, 0)
252 content = convert_line_endings(r_post.get('content'), mode)
252 content = convert_line_endings(r_post.get('content'), mode)
253
253
254 message = r_post.get('message') or (_('Edited %s via RhodeCode')
254 message = r_post.get('message') or (_('Edited %s via RhodeCode')
255 % (f_path))
255 % (f_path))
256 author = self.rhodecode_user.full_contact
256 author = self.rhodecode_user.full_contact
257
257
258 if content == old_content:
258 if content == old_content:
259 h.flash(_('No changes'),
259 h.flash(_('No changes'),
260 category='warning')
260 category='warning')
261 return redirect(url('changeset_home', repo_name=c.repo_name,
261 return redirect(url('changeset_home', repo_name=c.repo_name,
262 revision='tip'))
262 revision='tip'))
263
263
264 try:
264 try:
265 self.scm_model.commit_change(repo=c.rhodecode_repo,
265 self.scm_model.commit_change(repo=c.rhodecode_repo,
266 repo_name=repo_name, cs=c.cs,
266 repo_name=repo_name, cs=c.cs,
267 user=self.rhodecode_user,
267 user=self.rhodecode_user,
268 author=author, message=message,
268 author=author, message=message,
269 content=content, f_path=f_path)
269 content=content, f_path=f_path)
270 h.flash(_('Successfully committed to %s') % f_path,
270 h.flash(_('Successfully committed to %s') % f_path,
271 category='success')
271 category='success')
272
272
273 except Exception:
273 except Exception:
274 log.error(traceback.format_exc())
274 log.error(traceback.format_exc())
275 h.flash(_('Error occurred during commit'), category='error')
275 h.flash(_('Error occurred during commit'), category='error')
276 return redirect(url('changeset_home',
276 return redirect(url('changeset_home',
277 repo_name=c.repo_name, revision='tip'))
277 repo_name=c.repo_name, revision='tip'))
278
278
279 return render('files/files_edit.html')
279 return render('files/files_edit.html')
280
280
281 @LoginRequired()
281 @LoginRequired()
282 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
282 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
283 def add(self, repo_name, revision, f_path):
283 def add(self, repo_name, revision, f_path):
284 r_post = request.POST
284 r_post = request.POST
285 c.cs = self.__get_cs_or_redirect(revision, repo_name,
285 c.cs = self.__get_cs_or_redirect(revision, repo_name,
286 redirect_after=False)
286 redirect_after=False)
287 if c.cs is None:
287 if c.cs is None:
288 c.cs = EmptyChangeset(alias=c.rhodecode_repo.alias)
288 c.cs = EmptyChangeset(alias=c.rhodecode_repo.alias)
289
289
290 c.f_path = f_path
290 c.f_path = f_path
291
291
292 if r_post:
292 if r_post:
293 unix_mode = 0
293 unix_mode = 0
294 content = convert_line_endings(r_post.get('content'), unix_mode)
294 content = convert_line_endings(r_post.get('content'), unix_mode)
295
295
296 message = r_post.get('message') or (_('Added %s via RhodeCode')
296 message = r_post.get('message') or (_('Added %s via RhodeCode')
297 % (f_path))
297 % (f_path))
298 location = r_post.get('location')
298 location = r_post.get('location')
299 filename = r_post.get('filename')
299 filename = r_post.get('filename')
300 file_obj = r_post.get('upload_file', None)
300 file_obj = r_post.get('upload_file', None)
301
301
302 if file_obj is not None and hasattr(file_obj, 'filename'):
302 if file_obj is not None and hasattr(file_obj, 'filename'):
303 filename = file_obj.filename
303 filename = file_obj.filename
304 content = file_obj.file
304 content = file_obj.file
305
305
306 node_path = os.path.join(location, filename)
306 node_path = os.path.join(location, filename)
307 author = self.rhodecode_user.full_contact
307 author = self.rhodecode_user.full_contact
308
308
309 if not content:
309 if not content:
310 h.flash(_('No content'), category='warning')
310 h.flash(_('No content'), category='warning')
311 return redirect(url('changeset_home', repo_name=c.repo_name,
311 return redirect(url('changeset_home', repo_name=c.repo_name,
312 revision='tip'))
312 revision='tip'))
313 if not filename:
313 if not filename:
314 h.flash(_('No filename'), category='warning')
314 h.flash(_('No filename'), category='warning')
315 return redirect(url('changeset_home', repo_name=c.repo_name,
315 return redirect(url('changeset_home', repo_name=c.repo_name,
316 revision='tip'))
316 revision='tip'))
317
317
318 try:
318 try:
319 self.scm_model.create_node(repo=c.rhodecode_repo,
319 self.scm_model.create_node(repo=c.rhodecode_repo,
320 repo_name=repo_name, cs=c.cs,
320 repo_name=repo_name, cs=c.cs,
321 user=self.rhodecode_user,
321 user=self.rhodecode_user,
322 author=author, message=message,
322 author=author, message=message,
323 content=content, f_path=node_path)
323 content=content, f_path=node_path)
324 h.flash(_('Successfully committed to %s') % node_path,
324 h.flash(_('Successfully committed to %s') % node_path,
325 category='success')
325 category='success')
326 except NodeAlreadyExistsError, e:
326 except NodeAlreadyExistsError, e:
327 h.flash(_(e), category='error')
327 h.flash(_(e), category='error')
328 except Exception:
328 except Exception:
329 log.error(traceback.format_exc())
329 log.error(traceback.format_exc())
330 h.flash(_('Error occurred during commit'), category='error')
330 h.flash(_('Error occurred during commit'), category='error')
331 return redirect(url('changeset_home',
331 return redirect(url('changeset_home',
332 repo_name=c.repo_name, revision='tip'))
332 repo_name=c.repo_name, revision='tip'))
333
333
334 return render('files/files_add.html')
334 return render('files/files_add.html')
335
335
336 @LoginRequired()
336 @LoginRequired()
337 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
337 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
338 'repository.admin')
338 'repository.admin')
339 def archivefile(self, repo_name, fname):
339 def archivefile(self, repo_name, fname):
340
340
341 fileformat = None
341 fileformat = None
342 revision = None
342 revision = None
343 ext = None
343 ext = None
344 subrepos = request.GET.get('subrepos') == 'true'
344 subrepos = request.GET.get('subrepos') == 'true'
345
345
346 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
346 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
347 archive_spec = fname.split(ext_data[1])
347 archive_spec = fname.split(ext_data[1])
348 if len(archive_spec) == 2 and archive_spec[1] == '':
348 if len(archive_spec) == 2 and archive_spec[1] == '':
349 fileformat = a_type or ext_data[1]
349 fileformat = a_type or ext_data[1]
350 revision = archive_spec[0]
350 revision = archive_spec[0]
351 ext = ext_data[1]
351 ext = ext_data[1]
352
352
353 try:
353 try:
354 dbrepo = RepoModel().get_by_repo_name(repo_name)
354 dbrepo = RepoModel().get_by_repo_name(repo_name)
355 if dbrepo.enable_downloads is False:
355 if dbrepo.enable_downloads is False:
356 return _('downloads disabled')
356 return _('downloads disabled')
357
357
358 if c.rhodecode_repo.alias == 'hg':
358 if c.rhodecode_repo.alias == 'hg':
359 # patch and reset hooks section of UI config to not run any
359 # patch and reset hooks section of UI config to not run any
360 # hooks on fetching archives with subrepos
360 # hooks on fetching archives with subrepos
361 for k, v in c.rhodecode_repo._repo.ui.configitems('hooks'):
361 for k, v in c.rhodecode_repo._repo.ui.configitems('hooks'):
362 c.rhodecode_repo._repo.ui.setconfig('hooks', k, None)
362 c.rhodecode_repo._repo.ui.setconfig('hooks', k, None)
363
363
364 cs = c.rhodecode_repo.get_changeset(revision)
364 cs = c.rhodecode_repo.get_changeset(revision)
365 content_type = settings.ARCHIVE_SPECS[fileformat][0]
365 content_type = settings.ARCHIVE_SPECS[fileformat][0]
366 except ChangesetDoesNotExistError:
366 except ChangesetDoesNotExistError:
367 return _('Unknown revision %s') % revision
367 return _('Unknown revision %s') % revision
368 except EmptyRepositoryError:
368 except EmptyRepositoryError:
369 return _('Empty repository')
369 return _('Empty repository')
370 except (ImproperArchiveTypeError, KeyError):
370 except (ImproperArchiveTypeError, KeyError):
371 return _('Unknown archive type')
371 return _('Unknown archive type')
372
372
373 fd, archive = tempfile.mkstemp()
373 fd, archive = tempfile.mkstemp()
374 t = open(archive, 'wb')
374 t = open(archive, 'wb')
375 cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos)
375 cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos)
376 t.close()
376 t.close()
377
377
378 def get_chunked_archive(archive):
378 def get_chunked_archive(archive):
379 stream = open(archive, 'rb')
379 stream = open(archive, 'rb')
380 while True:
380 while True:
381 data = stream.read(16 * 1024)
381 data = stream.read(16 * 1024)
382 if not data:
382 if not data:
383 stream.close()
383 stream.close()
384 os.close(fd)
384 os.close(fd)
385 os.remove(archive)
385 os.remove(archive)
386 break
386 break
387 yield data
387 yield data
388
388
389 response.content_disposition = str('attachment; filename=%s-%s%s' \
389 response.content_disposition = str('attachment; filename=%s-%s%s' \
390 % (repo_name, revision[:12], ext))
390 % (repo_name, revision[:12], ext))
391 response.content_type = str(content_type)
391 response.content_type = str(content_type)
392 return get_chunked_archive(archive)
392 return get_chunked_archive(archive)
393
393
394 @LoginRequired()
394 @LoginRequired()
395 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
395 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
396 'repository.admin')
396 'repository.admin')
397 def diff(self, repo_name, f_path):
397 def diff(self, repo_name, f_path):
398 ignore_whitespace = request.GET.get('ignorews') == '1'
398 ignore_whitespace = request.GET.get('ignorews') == '1'
399 line_context = request.GET.get('context', 3)
399 line_context = request.GET.get('context', 3)
400 diff1 = request.GET.get('diff1', '')
400 diff1 = request.GET.get('diff1', '')
401 diff2 = request.GET.get('diff2', '')
401 diff2 = request.GET.get('diff2', '')
402 c.action = request.GET.get('diff')
402 c.action = request.GET.get('diff')
403 c.no_changes = diff1 == diff2
403 c.no_changes = diff1 == diff2
404 c.f_path = f_path
404 c.f_path = f_path
405 c.big_diff = False
405 c.big_diff = False
406 c.anchor_url = anchor_url
406 c.anchor_url = anchor_url
407 c.ignorews_url = _ignorews_url
407 c.ignorews_url = _ignorews_url
408 c.context_url = _context_url
408 c.context_url = _context_url
409 c.changes = OrderedDict()
409 c.changes = OrderedDict()
410 c.changes[diff2] = []
410 c.changes[diff2] = []
411 try:
411 try:
412 if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
412 if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
413 c.changeset_1 = c.rhodecode_repo.get_changeset(diff1)
413 c.changeset_1 = c.rhodecode_repo.get_changeset(diff1)
414 node1 = c.changeset_1.get_node(f_path)
414 node1 = c.changeset_1.get_node(f_path)
415 else:
415 else:
416 c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo)
416 c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo)
417 node1 = FileNode('.', '', changeset=c.changeset_1)
417 node1 = FileNode('.', '', changeset=c.changeset_1)
418
418
419 if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
419 if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
420 c.changeset_2 = c.rhodecode_repo.get_changeset(diff2)
420 c.changeset_2 = c.rhodecode_repo.get_changeset(diff2)
421 node2 = c.changeset_2.get_node(f_path)
421 node2 = c.changeset_2.get_node(f_path)
422 else:
422 else:
423 c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo)
423 c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo)
424 node2 = FileNode('.', '', changeset=c.changeset_2)
424 node2 = FileNode('.', '', changeset=c.changeset_2)
425 except RepositoryError:
425 except RepositoryError:
426 return redirect(url('files_home', repo_name=c.repo_name,
426 return redirect(url('files_home', repo_name=c.repo_name,
427 f_path=f_path))
427 f_path=f_path))
428
428
429 if c.action == 'download':
429 if c.action == 'download':
430 _diff = diffs.get_gitdiff(node1, node2,
430 _diff = diffs.get_gitdiff(node1, node2,
431 ignore_whitespace=ignore_whitespace,
431 ignore_whitespace=ignore_whitespace,
432 context=line_context)
432 context=line_context)
433 diff = diffs.DiffProcessor(_diff, format='gitdiff')
433 diff = diffs.DiffProcessor(_diff, format='gitdiff')
434
434
435 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
435 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
436 response.content_type = 'text/plain'
436 response.content_type = 'text/plain'
437 response.content_disposition = (
437 response.content_disposition = (
438 'attachment; filename=%s' % diff_name
438 'attachment; filename=%s' % diff_name
439 )
439 )
440 return diff.raw_diff()
440 return diff.raw_diff()
441
441
442 elif c.action == 'raw':
442 elif c.action == 'raw':
443 _diff = diffs.get_gitdiff(node1, node2,
443 _diff = diffs.get_gitdiff(node1, node2,
444 ignore_whitespace=ignore_whitespace,
444 ignore_whitespace=ignore_whitespace,
445 context=line_context)
445 context=line_context)
446 diff = diffs.DiffProcessor(_diff, format='gitdiff')
446 diff = diffs.DiffProcessor(_diff, format='gitdiff')
447 response.content_type = 'text/plain'
447 response.content_type = 'text/plain'
448 return diff.raw_diff()
448 return diff.raw_diff()
449
449
450 else:
450 else:
451 fid = h.FID(diff2, node2.path)
451 fid = h.FID(diff2, node2.path)
452 line_context_lcl = get_line_ctx(fid, request.GET)
452 line_context_lcl = get_line_ctx(fid, request.GET)
453 ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
453 ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
454
454
455 lim = request.GET.get('fulldiff') or self.cut_off_limit
455 lim = request.GET.get('fulldiff') or self.cut_off_limit
456 _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1,
456 _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1,
457 filenode_new=node2,
457 filenode_new=node2,
458 cut_off_limit=lim,
458 cut_off_limit=lim,
459 ignore_whitespace=ign_whitespace_lcl,
459 ignore_whitespace=ign_whitespace_lcl,
460 line_context=line_context_lcl,
460 line_context=line_context_lcl,
461 enable_comments=False)
461 enable_comments=False)
462
462
463 c.changes = [('', node2, diff, cs1, cs2, st,)]
463 c.changes = [('', node2, diff, cs1, cs2, st,)]
464
464
465 return render('files/file_diff.html')
465 return render('files/file_diff.html')
466
466
467 def _get_node_history(self, cs, f_path, changesets=None):
467 def _get_node_history(self, cs, f_path, changesets=None):
468 if changesets is None:
468 if changesets is None:
469 changesets = cs.get_file_history(f_path)
469 changesets = cs.get_file_history(f_path)
470 hist_l = []
470 hist_l = []
471
471
472 changesets_group = ([], _("Changesets"))
472 changesets_group = ([], _("Changesets"))
473 branches_group = ([], _("Branches"))
473 branches_group = ([], _("Branches"))
474 tags_group = ([], _("Tags"))
474 tags_group = ([], _("Tags"))
475 _hg = cs.repository.alias == 'hg'
475 _hg = cs.repository.alias == 'hg'
476 for chs in changesets:
476 for chs in changesets:
477 _branch = '(%s)' % chs.branch if _hg else ''
477 _branch = '(%s)' % chs.branch if _hg else ''
478 n_desc = 'r%s:%s %s' % (chs.revision, chs.short_id, _branch)
478 n_desc = 'r%s:%s %s' % (chs.revision, chs.short_id, _branch)
479 changesets_group[0].append((chs.raw_id, n_desc,))
479 changesets_group[0].append((chs.raw_id, n_desc,))
480
480
481 hist_l.append(changesets_group)
481 hist_l.append(changesets_group)
482
482
483 for name, chs in c.rhodecode_repo.branches.items():
483 for name, chs in c.rhodecode_repo.branches.items():
484 branches_group[0].append((chs, name),)
484 branches_group[0].append((chs, name),)
485 hist_l.append(branches_group)
485 hist_l.append(branches_group)
486
486
487 for name, chs in c.rhodecode_repo.tags.items():
487 for name, chs in c.rhodecode_repo.tags.items():
488 tags_group[0].append((chs, name),)
488 tags_group[0].append((chs, name),)
489 hist_l.append(tags_group)
489 hist_l.append(tags_group)
490
490
491 return hist_l
491 return hist_l
492
492
493 @LoginRequired()
493 @LoginRequired()
494 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
494 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
495 'repository.admin')
495 'repository.admin')
496 @jsonify
496 @jsonify
497 def nodelist(self, repo_name, revision, f_path):
497 def nodelist(self, repo_name, revision, f_path):
498 if request.environ.get('HTTP_X_PARTIAL_XHR'):
498 if request.environ.get('HTTP_X_PARTIAL_XHR'):
499 cs = self.__get_cs_or_redirect(revision, repo_name)
499 cs = self.__get_cs_or_redirect(revision, repo_name)
500 _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path,
500 _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path,
501 flat=False)
501 flat=False)
502 return {'nodes': _d + _f}
502 return {'nodes': _d + _f}
@@ -1,245 +1,245 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.controllers.summary
3 rhodecode.controllers.summary
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 Summary controller for Rhodecode
6 Summary controller for Rhodecode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import traceback
26 import traceback
27 import calendar
27 import calendar
28 import logging
28 import logging
29 import urllib
29 import urllib
30 from time import mktime
30 from time import mktime
31 from datetime import timedelta, date
31 from datetime import timedelta, date
32 from urlparse import urlparse
32 from urlparse import urlparse
33 from rhodecode.lib.compat import product
33 from rhodecode.lib.compat import product
34
34
35 from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
35 from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
36 NodeDoesNotExistError
36 NodeDoesNotExistError
37
37
38 from pylons import tmpl_context as c, request, url, config
38 from pylons import tmpl_context as c, request, url, config
39 from pylons.i18n.translation import _
39 from pylons.i18n.translation import _
40
40
41 from beaker.cache import cache_region, region_invalidate
41 from beaker.cache import cache_region, region_invalidate
42
42
43 from rhodecode.config.conf import ALL_READMES, ALL_EXTS, LANGUAGES_EXTENSIONS_MAP
43 from rhodecode.config.conf import ALL_READMES, ALL_EXTS, LANGUAGES_EXTENSIONS_MAP
44 from rhodecode.model.db import Statistics, CacheInvalidation
44 from rhodecode.model.db import Statistics, CacheInvalidation
45 from rhodecode.lib.utils2 import safe_unicode
45 from rhodecode.lib.utils2 import safe_unicode
46 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
46 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
47 from rhodecode.lib.base import BaseRepoController, render
47 from rhodecode.lib.base import BaseRepoController, render
48 from rhodecode.lib.utils import EmptyChangeset
48 from rhodecode.lib.vcs.backends.base import EmptyChangeset
49 from rhodecode.lib.markup_renderer import MarkupRenderer
49 from rhodecode.lib.markup_renderer import MarkupRenderer
50 from rhodecode.lib.celerylib import run_task
50 from rhodecode.lib.celerylib import run_task
51 from rhodecode.lib.celerylib.tasks import get_commits_stats
51 from rhodecode.lib.celerylib.tasks import get_commits_stats
52 from rhodecode.lib.helpers import RepoPage
52 from rhodecode.lib.helpers import RepoPage
53 from rhodecode.lib.compat import json, OrderedDict
53 from rhodecode.lib.compat import json, OrderedDict
54 from rhodecode.lib.vcs.nodes import FileNode
54 from rhodecode.lib.vcs.nodes import FileNode
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58 README_FILES = [''.join([x[0][0], x[1][0]]) for x in
58 README_FILES = [''.join([x[0][0], x[1][0]]) for x in
59 sorted(list(product(ALL_READMES, ALL_EXTS)),
59 sorted(list(product(ALL_READMES, ALL_EXTS)),
60 key=lambda y:y[0][1] + y[1][1])]
60 key=lambda y:y[0][1] + y[1][1])]
61
61
62
62
63 class SummaryController(BaseRepoController):
63 class SummaryController(BaseRepoController):
64
64
65 @LoginRequired()
65 @LoginRequired()
66 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
66 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
67 'repository.admin')
67 'repository.admin')
68 def __before__(self):
68 def __before__(self):
69 super(SummaryController, self).__before__()
69 super(SummaryController, self).__before__()
70
70
71 def index(self, repo_name):
71 def index(self, repo_name):
72 c.dbrepo = dbrepo = c.rhodecode_db_repo
72 c.dbrepo = dbrepo = c.rhodecode_db_repo
73 c.following = self.scm_model.is_following_repo(repo_name,
73 c.following = self.scm_model.is_following_repo(repo_name,
74 self.rhodecode_user.user_id)
74 self.rhodecode_user.user_id)
75
75
76 def url_generator(**kw):
76 def url_generator(**kw):
77 return url('shortlog_home', repo_name=repo_name, size=10, **kw)
77 return url('shortlog_home', repo_name=repo_name, size=10, **kw)
78
78
79 c.repo_changesets = RepoPage(c.rhodecode_repo, page=1,
79 c.repo_changesets = RepoPage(c.rhodecode_repo, page=1,
80 items_per_page=10, url=url_generator)
80 items_per_page=10, url=url_generator)
81
81
82 if self.rhodecode_user.username == 'default':
82 if self.rhodecode_user.username == 'default':
83 # for default(anonymous) user we don't need to pass credentials
83 # for default(anonymous) user we don't need to pass credentials
84 username = ''
84 username = ''
85 password = ''
85 password = ''
86 else:
86 else:
87 username = str(self.rhodecode_user.username)
87 username = str(self.rhodecode_user.username)
88 password = '@'
88 password = '@'
89
89
90 parsed_url = urlparse(url.current(qualified=True))
90 parsed_url = urlparse(url.current(qualified=True))
91
91
92 default_clone_uri = '{scheme}://{user}{pass}{netloc}{path}'
92 default_clone_uri = '{scheme}://{user}{pass}{netloc}{path}'
93
93
94 uri_tmpl = config.get('clone_uri', default_clone_uri)
94 uri_tmpl = config.get('clone_uri', default_clone_uri)
95 uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s')
95 uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s')
96 decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
96 decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
97 uri_dict = {
97 uri_dict = {
98 'user': username,
98 'user': username,
99 'pass': password,
99 'pass': password,
100 'scheme': parsed_url.scheme,
100 'scheme': parsed_url.scheme,
101 'netloc': parsed_url.netloc,
101 'netloc': parsed_url.netloc,
102 'path': decoded_path
102 'path': decoded_path
103 }
103 }
104
104
105 uri = uri_tmpl % uri_dict
105 uri = uri_tmpl % uri_dict
106 # generate another clone url by id
106 # generate another clone url by id
107 uri_dict.update(
107 uri_dict.update(
108 {'path': decoded_path.replace(repo_name, '_%s' % c.dbrepo.repo_id)}
108 {'path': decoded_path.replace(repo_name, '_%s' % c.dbrepo.repo_id)}
109 )
109 )
110 uri_id = uri_tmpl % uri_dict
110 uri_id = uri_tmpl % uri_dict
111
111
112 c.clone_repo_url = uri
112 c.clone_repo_url = uri
113 c.clone_repo_url_id = uri_id
113 c.clone_repo_url_id = uri_id
114 c.repo_tags = OrderedDict()
114 c.repo_tags = OrderedDict()
115 for name, hash_ in c.rhodecode_repo.tags.items()[:10]:
115 for name, hash_ in c.rhodecode_repo.tags.items()[:10]:
116 try:
116 try:
117 c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash_)
117 c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash_)
118 except ChangesetError:
118 except ChangesetError:
119 c.repo_tags[name] = EmptyChangeset(hash_)
119 c.repo_tags[name] = EmptyChangeset(hash_)
120
120
121 c.repo_branches = OrderedDict()
121 c.repo_branches = OrderedDict()
122 for name, hash_ in c.rhodecode_repo.branches.items()[:10]:
122 for name, hash_ in c.rhodecode_repo.branches.items()[:10]:
123 try:
123 try:
124 c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash_)
124 c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash_)
125 except ChangesetError:
125 except ChangesetError:
126 c.repo_branches[name] = EmptyChangeset(hash_)
126 c.repo_branches[name] = EmptyChangeset(hash_)
127
127
128 td = date.today() + timedelta(days=1)
128 td = date.today() + timedelta(days=1)
129 td_1m = td - timedelta(days=calendar.mdays[td.month])
129 td_1m = td - timedelta(days=calendar.mdays[td.month])
130 td_1y = td - timedelta(days=365)
130 td_1y = td - timedelta(days=365)
131
131
132 ts_min_m = mktime(td_1m.timetuple())
132 ts_min_m = mktime(td_1m.timetuple())
133 ts_min_y = mktime(td_1y.timetuple())
133 ts_min_y = mktime(td_1y.timetuple())
134 ts_max_y = mktime(td.timetuple())
134 ts_max_y = mktime(td.timetuple())
135
135
136 if dbrepo.enable_statistics:
136 if dbrepo.enable_statistics:
137 c.show_stats = True
137 c.show_stats = True
138 c.no_data_msg = _('No data loaded yet')
138 c.no_data_msg = _('No data loaded yet')
139 run_task(get_commits_stats, c.dbrepo.repo_name, ts_min_y, ts_max_y)
139 run_task(get_commits_stats, c.dbrepo.repo_name, ts_min_y, ts_max_y)
140 else:
140 else:
141 c.show_stats = False
141 c.show_stats = False
142 c.no_data_msg = _('Statistics are disabled for this repository')
142 c.no_data_msg = _('Statistics are disabled for this repository')
143 c.ts_min = ts_min_m
143 c.ts_min = ts_min_m
144 c.ts_max = ts_max_y
144 c.ts_max = ts_max_y
145
145
146 stats = self.sa.query(Statistics)\
146 stats = self.sa.query(Statistics)\
147 .filter(Statistics.repository == dbrepo)\
147 .filter(Statistics.repository == dbrepo)\
148 .scalar()
148 .scalar()
149
149
150 c.stats_percentage = 0
150 c.stats_percentage = 0
151
151
152 if stats and stats.languages:
152 if stats and stats.languages:
153 c.no_data = False is dbrepo.enable_statistics
153 c.no_data = False is dbrepo.enable_statistics
154 lang_stats_d = json.loads(stats.languages)
154 lang_stats_d = json.loads(stats.languages)
155 c.commit_data = stats.commit_activity
155 c.commit_data = stats.commit_activity
156 c.overview_data = stats.commit_activity_combined
156 c.overview_data = stats.commit_activity_combined
157
157
158 lang_stats = ((x, {"count": y,
158 lang_stats = ((x, {"count": y,
159 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
159 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
160 for x, y in lang_stats_d.items())
160 for x, y in lang_stats_d.items())
161
161
162 c.trending_languages = json.dumps(
162 c.trending_languages = json.dumps(
163 sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
163 sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
164 )
164 )
165 last_rev = stats.stat_on_revision + 1
165 last_rev = stats.stat_on_revision + 1
166 c.repo_last_rev = c.rhodecode_repo.count()\
166 c.repo_last_rev = c.rhodecode_repo.count()\
167 if c.rhodecode_repo.revisions else 0
167 if c.rhodecode_repo.revisions else 0
168 if last_rev == 0 or c.repo_last_rev == 0:
168 if last_rev == 0 or c.repo_last_rev == 0:
169 pass
169 pass
170 else:
170 else:
171 c.stats_percentage = '%.2f' % ((float((last_rev)) /
171 c.stats_percentage = '%.2f' % ((float((last_rev)) /
172 c.repo_last_rev) * 100)
172 c.repo_last_rev) * 100)
173 else:
173 else:
174 c.commit_data = json.dumps({})
174 c.commit_data = json.dumps({})
175 c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]])
175 c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]])
176 c.trending_languages = json.dumps({})
176 c.trending_languages = json.dumps({})
177 c.no_data = True
177 c.no_data = True
178
178
179 c.enable_downloads = dbrepo.enable_downloads
179 c.enable_downloads = dbrepo.enable_downloads
180 if c.enable_downloads:
180 if c.enable_downloads:
181 c.download_options = self._get_download_links(c.rhodecode_repo)
181 c.download_options = self._get_download_links(c.rhodecode_repo)
182
182
183 c.readme_data, c.readme_file = \
183 c.readme_data, c.readme_file = \
184 self.__get_readme_data(c.rhodecode_db_repo)
184 self.__get_readme_data(c.rhodecode_db_repo)
185 return render('summary/summary.html')
185 return render('summary/summary.html')
186
186
187 def __get_readme_data(self, db_repo):
187 def __get_readme_data(self, db_repo):
188 repo_name = db_repo.repo_name
188 repo_name = db_repo.repo_name
189
189
190 @cache_region('long_term')
190 @cache_region('long_term')
191 def _get_readme_from_cache(key):
191 def _get_readme_from_cache(key):
192 readme_data = None
192 readme_data = None
193 readme_file = None
193 readme_file = None
194 log.debug('Looking for README file')
194 log.debug('Looking for README file')
195 try:
195 try:
196 # get's the landing revision! or tip if fails
196 # get's the landing revision! or tip if fails
197 cs = db_repo.get_landing_changeset()
197 cs = db_repo.get_landing_changeset()
198 renderer = MarkupRenderer()
198 renderer = MarkupRenderer()
199 for f in README_FILES:
199 for f in README_FILES:
200 try:
200 try:
201 readme = cs.get_node(f)
201 readme = cs.get_node(f)
202 if not isinstance(readme, FileNode):
202 if not isinstance(readme, FileNode):
203 continue
203 continue
204 readme_file = f
204 readme_file = f
205 log.debug('Found README file `%s` rendering...' %
205 log.debug('Found README file `%s` rendering...' %
206 readme_file)
206 readme_file)
207 readme_data = renderer.render(readme.content, f)
207 readme_data = renderer.render(readme.content, f)
208 break
208 break
209 except NodeDoesNotExistError:
209 except NodeDoesNotExistError:
210 continue
210 continue
211 except ChangesetError:
211 except ChangesetError:
212 log.error(traceback.format_exc())
212 log.error(traceback.format_exc())
213 pass
213 pass
214 except EmptyRepositoryError:
214 except EmptyRepositoryError:
215 pass
215 pass
216 except Exception:
216 except Exception:
217 log.error(traceback.format_exc())
217 log.error(traceback.format_exc())
218
218
219 return readme_data, readme_file
219 return readme_data, readme_file
220
220
221 key = repo_name + '_README'
221 key = repo_name + '_README'
222 inv = CacheInvalidation.invalidate(key)
222 inv = CacheInvalidation.invalidate(key)
223 if inv is not None:
223 if inv is not None:
224 region_invalidate(_get_readme_from_cache, None, key)
224 region_invalidate(_get_readme_from_cache, None, key)
225 CacheInvalidation.set_valid(inv.cache_key)
225 CacheInvalidation.set_valid(inv.cache_key)
226 return _get_readme_from_cache(key)
226 return _get_readme_from_cache(key)
227
227
228 def _get_download_links(self, repo):
228 def _get_download_links(self, repo):
229
229
230 download_l = []
230 download_l = []
231
231
232 branches_group = ([], _("Branches"))
232 branches_group = ([], _("Branches"))
233 tags_group = ([], _("Tags"))
233 tags_group = ([], _("Tags"))
234
234
235 for name, chs in c.rhodecode_repo.branches.items():
235 for name, chs in c.rhodecode_repo.branches.items():
236 #chs = chs.split(':')[-1]
236 #chs = chs.split(':')[-1]
237 branches_group[0].append((chs, name),)
237 branches_group[0].append((chs, name),)
238 download_l.append(branches_group)
238 download_l.append(branches_group)
239
239
240 for name, chs in c.rhodecode_repo.tags.items():
240 for name, chs in c.rhodecode_repo.tags.items():
241 #chs = chs.split(':')[-1]
241 #chs = chs.split(':')[-1]
242 tags_group[0].append((chs, name),)
242 tags_group[0].append((chs, name),)
243 download_l.append(tags_group)
243 download_l.append(tags_group)
244
244
245 return download_l
245 return download_l
@@ -1,635 +1,636 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.diffs
3 rhodecode.lib.diffs
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Set of diffing helpers, previously part of vcs
6 Set of diffing helpers, previously part of vcs
7
7
8
8
9 :created_on: Dec 4, 2011
9 :created_on: Dec 4, 2011
10 :author: marcink
10 :author: marcink
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 :original copyright: 2007-2008 by Armin Ronacher
12 :original copyright: 2007-2008 by Armin Ronacher
13 :license: GPLv3, see COPYING for more details.
13 :license: GPLv3, see COPYING for more details.
14 """
14 """
15 # This program is free software: you can redistribute it and/or modify
15 # This program is free software: you can redistribute it and/or modify
16 # it under the terms of the GNU General Public License as published by
16 # it under the terms of the GNU General Public License as published by
17 # the Free Software Foundation, either version 3 of the License, or
17 # the Free Software Foundation, either version 3 of the License, or
18 # (at your option) any later version.
18 # (at your option) any later version.
19 #
19 #
20 # This program is distributed in the hope that it will be useful,
20 # This program is distributed in the hope that it will be useful,
21 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # but WITHOUT ANY WARRANTY; without even the implied warranty of
22 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23 # GNU General Public License for more details.
23 # GNU General Public License for more details.
24 #
24 #
25 # You should have received a copy of the GNU General Public License
25 # You should have received a copy of the GNU General Public License
26 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
28 import re
28 import re
29 import difflib
29 import difflib
30 import markupsafe
30 import markupsafe
31
31
32 from itertools import tee, imap
32 from itertools import tee, imap
33
33
34 from mercurial import patch
34 from mercurial import patch
35 from mercurial.mdiff import diffopts
35 from mercurial.mdiff import diffopts
36 from mercurial.bundlerepo import bundlerepository
36 from mercurial.bundlerepo import bundlerepository
37 from mercurial import localrepo
38
37
39 from pylons.i18n.translation import _
38 from pylons.i18n.translation import _
40
39
41 from rhodecode.lib.compat import BytesIO
40 from rhodecode.lib.compat import BytesIO
41 from rhodecode.lib.vcs.utils.hgcompat import localrepo
42 from rhodecode.lib.vcs.exceptions import VCSError
42 from rhodecode.lib.vcs.exceptions import VCSError
43 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
43 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
44 from rhodecode.lib.vcs.backends.base import EmptyChangeset
44 from rhodecode.lib.helpers import escape
45 from rhodecode.lib.helpers import escape
45 from rhodecode.lib.utils import EmptyChangeset, make_ui
46 from rhodecode.lib.utils import make_ui
46
47
47
48
48 def wrap_to_table(str_):
49 def wrap_to_table(str_):
49 return '''<table class="code-difftable">
50 return '''<table class="code-difftable">
50 <tr class="line no-comment">
51 <tr class="line no-comment">
51 <td class="lineno new"></td>
52 <td class="lineno new"></td>
52 <td class="code no-comment"><pre>%s</pre></td>
53 <td class="code no-comment"><pre>%s</pre></td>
53 </tr>
54 </tr>
54 </table>''' % str_
55 </table>''' % str_
55
56
56
57
57 def wrapped_diff(filenode_old, filenode_new, cut_off_limit=None,
58 def wrapped_diff(filenode_old, filenode_new, cut_off_limit=None,
58 ignore_whitespace=True, line_context=3,
59 ignore_whitespace=True, line_context=3,
59 enable_comments=False):
60 enable_comments=False):
60 """
61 """
61 returns a wrapped diff into a table, checks for cut_off_limit and presents
62 returns a wrapped diff into a table, checks for cut_off_limit and presents
62 proper message
63 proper message
63 """
64 """
64
65
65 if filenode_old is None:
66 if filenode_old is None:
66 filenode_old = FileNode(filenode_new.path, '', EmptyChangeset())
67 filenode_old = FileNode(filenode_new.path, '', EmptyChangeset())
67
68
68 if filenode_old.is_binary or filenode_new.is_binary:
69 if filenode_old.is_binary or filenode_new.is_binary:
69 diff = wrap_to_table(_('binary file'))
70 diff = wrap_to_table(_('binary file'))
70 stats = (0, 0)
71 stats = (0, 0)
71 size = 0
72 size = 0
72
73
73 elif cut_off_limit != -1 and (cut_off_limit is None or
74 elif cut_off_limit != -1 and (cut_off_limit is None or
74 (filenode_old.size < cut_off_limit and filenode_new.size < cut_off_limit)):
75 (filenode_old.size < cut_off_limit and filenode_new.size < cut_off_limit)):
75
76
76 f_gitdiff = get_gitdiff(filenode_old, filenode_new,
77 f_gitdiff = get_gitdiff(filenode_old, filenode_new,
77 ignore_whitespace=ignore_whitespace,
78 ignore_whitespace=ignore_whitespace,
78 context=line_context)
79 context=line_context)
79 diff_processor = DiffProcessor(f_gitdiff, format='gitdiff')
80 diff_processor = DiffProcessor(f_gitdiff, format='gitdiff')
80
81
81 diff = diff_processor.as_html(enable_comments=enable_comments)
82 diff = diff_processor.as_html(enable_comments=enable_comments)
82 stats = diff_processor.stat()
83 stats = diff_processor.stat()
83 size = len(diff or '')
84 size = len(diff or '')
84 else:
85 else:
85 diff = wrap_to_table(_('Changeset was too big and was cut off, use '
86 diff = wrap_to_table(_('Changeset was too big and was cut off, use '
86 'diff menu to display this diff'))
87 'diff menu to display this diff'))
87 stats = (0, 0)
88 stats = (0, 0)
88 size = 0
89 size = 0
89 if not diff:
90 if not diff:
90 submodules = filter(lambda o: isinstance(o, SubModuleNode),
91 submodules = filter(lambda o: isinstance(o, SubModuleNode),
91 [filenode_new, filenode_old])
92 [filenode_new, filenode_old])
92 if submodules:
93 if submodules:
93 diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
94 diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
94 else:
95 else:
95 diff = wrap_to_table(_('No changes detected'))
96 diff = wrap_to_table(_('No changes detected'))
96
97
97 cs1 = filenode_old.changeset.raw_id
98 cs1 = filenode_old.changeset.raw_id
98 cs2 = filenode_new.changeset.raw_id
99 cs2 = filenode_new.changeset.raw_id
99
100
100 return size, cs1, cs2, diff, stats
101 return size, cs1, cs2, diff, stats
101
102
102
103
103 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
104 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
104 """
105 """
105 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
106 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
106
107
107 :param ignore_whitespace: ignore whitespaces in diff
108 :param ignore_whitespace: ignore whitespaces in diff
108 """
109 """
109 # make sure we pass in default context
110 # make sure we pass in default context
110 context = context or 3
111 context = context or 3
111 submodules = filter(lambda o: isinstance(o, SubModuleNode),
112 submodules = filter(lambda o: isinstance(o, SubModuleNode),
112 [filenode_new, filenode_old])
113 [filenode_new, filenode_old])
113 if submodules:
114 if submodules:
114 return ''
115 return ''
115
116
116 for filenode in (filenode_old, filenode_new):
117 for filenode in (filenode_old, filenode_new):
117 if not isinstance(filenode, FileNode):
118 if not isinstance(filenode, FileNode):
118 raise VCSError("Given object should be FileNode object, not %s"
119 raise VCSError("Given object should be FileNode object, not %s"
119 % filenode.__class__)
120 % filenode.__class__)
120
121
121 repo = filenode_new.changeset.repository
122 repo = filenode_new.changeset.repository
122 old_raw_id = getattr(filenode_old.changeset, 'raw_id', repo.EMPTY_CHANGESET)
123 old_raw_id = getattr(filenode_old.changeset, 'raw_id', repo.EMPTY_CHANGESET)
123 new_raw_id = getattr(filenode_new.changeset, 'raw_id', repo.EMPTY_CHANGESET)
124 new_raw_id = getattr(filenode_new.changeset, 'raw_id', repo.EMPTY_CHANGESET)
124
125
125 vcs_gitdiff = repo.get_diff(old_raw_id, new_raw_id, filenode_new.path,
126 vcs_gitdiff = repo.get_diff(old_raw_id, new_raw_id, filenode_new.path,
126 ignore_whitespace, context)
127 ignore_whitespace, context)
127 return vcs_gitdiff
128 return vcs_gitdiff
128
129
129
130
130 class DiffProcessor(object):
131 class DiffProcessor(object):
131 """
132 """
132 Give it a unified diff and it returns a list of the files that were
133 Give it a unified diff and it returns a list of the files that were
133 mentioned in the diff together with a dict of meta information that
134 mentioned in the diff together with a dict of meta information that
134 can be used to render it in a HTML template.
135 can be used to render it in a HTML template.
135 """
136 """
136 _chunk_re = re.compile(r'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
137 _chunk_re = re.compile(r'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
137 _newline_marker = '\\ No newline at end of file\n'
138 _newline_marker = '\\ No newline at end of file\n'
138
139
139 def __init__(self, diff, differ='diff', format='gitdiff'):
140 def __init__(self, diff, differ='diff', format='gitdiff'):
140 """
141 """
141 :param diff: a text in diff format or generator
142 :param diff: a text in diff format or generator
142 :param format: format of diff passed, `udiff` or `gitdiff`
143 :param format: format of diff passed, `udiff` or `gitdiff`
143 """
144 """
144 if isinstance(diff, basestring):
145 if isinstance(diff, basestring):
145 diff = [diff]
146 diff = [diff]
146
147
147 self.__udiff = diff
148 self.__udiff = diff
148 self.__format = format
149 self.__format = format
149 self.adds = 0
150 self.adds = 0
150 self.removes = 0
151 self.removes = 0
151
152
152 if isinstance(self.__udiff, basestring):
153 if isinstance(self.__udiff, basestring):
153 self.lines = iter(self.__udiff.splitlines(1))
154 self.lines = iter(self.__udiff.splitlines(1))
154
155
155 elif self.__format == 'gitdiff':
156 elif self.__format == 'gitdiff':
156 udiff_copy = self.copy_iterator()
157 udiff_copy = self.copy_iterator()
157 self.lines = imap(self.escaper, self._parse_gitdiff(udiff_copy))
158 self.lines = imap(self.escaper, self._parse_gitdiff(udiff_copy))
158 else:
159 else:
159 udiff_copy = self.copy_iterator()
160 udiff_copy = self.copy_iterator()
160 self.lines = imap(self.escaper, udiff_copy)
161 self.lines = imap(self.escaper, udiff_copy)
161
162
162 # Select a differ.
163 # Select a differ.
163 if differ == 'difflib':
164 if differ == 'difflib':
164 self.differ = self._highlight_line_difflib
165 self.differ = self._highlight_line_difflib
165 else:
166 else:
166 self.differ = self._highlight_line_udiff
167 self.differ = self._highlight_line_udiff
167
168
168 def escaper(self, string):
169 def escaper(self, string):
169 return markupsafe.escape(string)
170 return markupsafe.escape(string)
170
171
171 def copy_iterator(self):
172 def copy_iterator(self):
172 """
173 """
173 make a fresh copy of generator, we should not iterate thru
174 make a fresh copy of generator, we should not iterate thru
174 an original as it's needed for repeating operations on
175 an original as it's needed for repeating operations on
175 this instance of DiffProcessor
176 this instance of DiffProcessor
176 """
177 """
177 self.__udiff, iterator_copy = tee(self.__udiff)
178 self.__udiff, iterator_copy = tee(self.__udiff)
178 return iterator_copy
179 return iterator_copy
179
180
180 def _extract_rev(self, line1, line2):
181 def _extract_rev(self, line1, line2):
181 """
182 """
182 Extract the operation (A/M/D), filename and revision hint from a line.
183 Extract the operation (A/M/D), filename and revision hint from a line.
183 """
184 """
184
185
185 try:
186 try:
186 if line1.startswith('--- ') and line2.startswith('+++ '):
187 if line1.startswith('--- ') and line2.startswith('+++ '):
187 l1 = line1[4:].split(None, 1)
188 l1 = line1[4:].split(None, 1)
188 old_filename = (l1[0].replace('a/', '', 1)
189 old_filename = (l1[0].replace('a/', '', 1)
189 if len(l1) >= 1 else None)
190 if len(l1) >= 1 else None)
190 old_rev = l1[1] if len(l1) == 2 else 'old'
191 old_rev = l1[1] if len(l1) == 2 else 'old'
191
192
192 l2 = line2[4:].split(None, 1)
193 l2 = line2[4:].split(None, 1)
193 new_filename = (l2[0].replace('b/', '', 1)
194 new_filename = (l2[0].replace('b/', '', 1)
194 if len(l1) >= 1 else None)
195 if len(l1) >= 1 else None)
195 new_rev = l2[1] if len(l2) == 2 else 'new'
196 new_rev = l2[1] if len(l2) == 2 else 'new'
196
197
197 filename = (old_filename
198 filename = (old_filename
198 if old_filename != '/dev/null' else new_filename)
199 if old_filename != '/dev/null' else new_filename)
199
200
200 operation = 'D' if new_filename == '/dev/null' else None
201 operation = 'D' if new_filename == '/dev/null' else None
201 if not operation:
202 if not operation:
202 operation = 'M' if old_filename != '/dev/null' else 'A'
203 operation = 'M' if old_filename != '/dev/null' else 'A'
203
204
204 return operation, filename, new_rev, old_rev
205 return operation, filename, new_rev, old_rev
205 except (ValueError, IndexError):
206 except (ValueError, IndexError):
206 pass
207 pass
207
208
208 return None, None, None, None
209 return None, None, None, None
209
210
210 def _parse_gitdiff(self, diffiterator):
211 def _parse_gitdiff(self, diffiterator):
211 def line_decoder(l):
212 def line_decoder(l):
212 if l.startswith('+') and not l.startswith('+++'):
213 if l.startswith('+') and not l.startswith('+++'):
213 self.adds += 1
214 self.adds += 1
214 elif l.startswith('-') and not l.startswith('---'):
215 elif l.startswith('-') and not l.startswith('---'):
215 self.removes += 1
216 self.removes += 1
216 return l.decode('utf8', 'replace')
217 return l.decode('utf8', 'replace')
217
218
218 output = list(diffiterator)
219 output = list(diffiterator)
219 size = len(output)
220 size = len(output)
220
221
221 if size == 2:
222 if size == 2:
222 l = []
223 l = []
223 l.extend([output[0]])
224 l.extend([output[0]])
224 l.extend(output[1].splitlines(1))
225 l.extend(output[1].splitlines(1))
225 return map(line_decoder, l)
226 return map(line_decoder, l)
226 elif size == 1:
227 elif size == 1:
227 return map(line_decoder, output[0].splitlines(1))
228 return map(line_decoder, output[0].splitlines(1))
228 elif size == 0:
229 elif size == 0:
229 return []
230 return []
230
231
231 raise Exception('wrong size of diff %s' % size)
232 raise Exception('wrong size of diff %s' % size)
232
233
233 def _highlight_line_difflib(self, line, next_):
234 def _highlight_line_difflib(self, line, next_):
234 """
235 """
235 Highlight inline changes in both lines.
236 Highlight inline changes in both lines.
236 """
237 """
237
238
238 if line['action'] == 'del':
239 if line['action'] == 'del':
239 old, new = line, next_
240 old, new = line, next_
240 else:
241 else:
241 old, new = next_, line
242 old, new = next_, line
242
243
243 oldwords = re.split(r'(\W)', old['line'])
244 oldwords = re.split(r'(\W)', old['line'])
244 newwords = re.split(r'(\W)', new['line'])
245 newwords = re.split(r'(\W)', new['line'])
245
246
246 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
247 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
247
248
248 oldfragments, newfragments = [], []
249 oldfragments, newfragments = [], []
249 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
250 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
250 oldfrag = ''.join(oldwords[i1:i2])
251 oldfrag = ''.join(oldwords[i1:i2])
251 newfrag = ''.join(newwords[j1:j2])
252 newfrag = ''.join(newwords[j1:j2])
252 if tag != 'equal':
253 if tag != 'equal':
253 if oldfrag:
254 if oldfrag:
254 oldfrag = '<del>%s</del>' % oldfrag
255 oldfrag = '<del>%s</del>' % oldfrag
255 if newfrag:
256 if newfrag:
256 newfrag = '<ins>%s</ins>' % newfrag
257 newfrag = '<ins>%s</ins>' % newfrag
257 oldfragments.append(oldfrag)
258 oldfragments.append(oldfrag)
258 newfragments.append(newfrag)
259 newfragments.append(newfrag)
259
260
260 old['line'] = "".join(oldfragments)
261 old['line'] = "".join(oldfragments)
261 new['line'] = "".join(newfragments)
262 new['line'] = "".join(newfragments)
262
263
263 def _highlight_line_udiff(self, line, next_):
264 def _highlight_line_udiff(self, line, next_):
264 """
265 """
265 Highlight inline changes in both lines.
266 Highlight inline changes in both lines.
266 """
267 """
267 start = 0
268 start = 0
268 limit = min(len(line['line']), len(next_['line']))
269 limit = min(len(line['line']), len(next_['line']))
269 while start < limit and line['line'][start] == next_['line'][start]:
270 while start < limit and line['line'][start] == next_['line'][start]:
270 start += 1
271 start += 1
271 end = -1
272 end = -1
272 limit -= start
273 limit -= start
273 while -end <= limit and line['line'][end] == next_['line'][end]:
274 while -end <= limit and line['line'][end] == next_['line'][end]:
274 end -= 1
275 end -= 1
275 end += 1
276 end += 1
276 if start or end:
277 if start or end:
277 def do(l):
278 def do(l):
278 last = end + len(l['line'])
279 last = end + len(l['line'])
279 if l['action'] == 'add':
280 if l['action'] == 'add':
280 tag = 'ins'
281 tag = 'ins'
281 else:
282 else:
282 tag = 'del'
283 tag = 'del'
283 l['line'] = '%s<%s>%s</%s>%s' % (
284 l['line'] = '%s<%s>%s</%s>%s' % (
284 l['line'][:start],
285 l['line'][:start],
285 tag,
286 tag,
286 l['line'][start:last],
287 l['line'][start:last],
287 tag,
288 tag,
288 l['line'][last:]
289 l['line'][last:]
289 )
290 )
290 do(line)
291 do(line)
291 do(next_)
292 do(next_)
292
293
293 def _parse_udiff(self, inline_diff=True):
294 def _parse_udiff(self, inline_diff=True):
294 """
295 """
295 Parse the diff an return data for the template.
296 Parse the diff an return data for the template.
296 """
297 """
297 lineiter = self.lines
298 lineiter = self.lines
298 files = []
299 files = []
299 try:
300 try:
300 line = lineiter.next()
301 line = lineiter.next()
301 while 1:
302 while 1:
302 # continue until we found the old file
303 # continue until we found the old file
303 if not line.startswith('--- '):
304 if not line.startswith('--- '):
304 line = lineiter.next()
305 line = lineiter.next()
305 continue
306 continue
306
307
307 chunks = []
308 chunks = []
308 stats = [0, 0]
309 stats = [0, 0]
309 operation, filename, old_rev, new_rev = \
310 operation, filename, old_rev, new_rev = \
310 self._extract_rev(line, lineiter.next())
311 self._extract_rev(line, lineiter.next())
311 files.append({
312 files.append({
312 'filename': filename,
313 'filename': filename,
313 'old_revision': old_rev,
314 'old_revision': old_rev,
314 'new_revision': new_rev,
315 'new_revision': new_rev,
315 'chunks': chunks,
316 'chunks': chunks,
316 'operation': operation,
317 'operation': operation,
317 'stats': stats,
318 'stats': stats,
318 })
319 })
319
320
320 line = lineiter.next()
321 line = lineiter.next()
321 while line:
322 while line:
322 match = self._chunk_re.match(line)
323 match = self._chunk_re.match(line)
323 if not match:
324 if not match:
324 break
325 break
325
326
326 lines = []
327 lines = []
327 chunks.append(lines)
328 chunks.append(lines)
328
329
329 old_line, old_end, new_line, new_end = \
330 old_line, old_end, new_line, new_end = \
330 [int(x or 1) for x in match.groups()[:-1]]
331 [int(x or 1) for x in match.groups()[:-1]]
331 old_line -= 1
332 old_line -= 1
332 new_line -= 1
333 new_line -= 1
333 gr = match.groups()
334 gr = match.groups()
334 context = len(gr) == 5
335 context = len(gr) == 5
335 old_end += old_line
336 old_end += old_line
336 new_end += new_line
337 new_end += new_line
337
338
338 if context:
339 if context:
339 # skip context only if it's first line
340 # skip context only if it's first line
340 if int(gr[0]) > 1:
341 if int(gr[0]) > 1:
341 lines.append({
342 lines.append({
342 'old_lineno': '...',
343 'old_lineno': '...',
343 'new_lineno': '...',
344 'new_lineno': '...',
344 'action': 'context',
345 'action': 'context',
345 'line': line,
346 'line': line,
346 })
347 })
347
348
348 line = lineiter.next()
349 line = lineiter.next()
349
350
350 while old_line < old_end or new_line < new_end:
351 while old_line < old_end or new_line < new_end:
351 if line:
352 if line:
352 command = line[0]
353 command = line[0]
353 if command in ['+', '-', ' ']:
354 if command in ['+', '-', ' ']:
354 #only modify the line if it's actually a diff
355 #only modify the line if it's actually a diff
355 # thing
356 # thing
356 line = line[1:]
357 line = line[1:]
357 else:
358 else:
358 command = ' '
359 command = ' '
359
360
360 affects_old = affects_new = False
361 affects_old = affects_new = False
361
362
362 # ignore those if we don't expect them
363 # ignore those if we don't expect them
363 if command in '#@':
364 if command in '#@':
364 continue
365 continue
365 elif command == '+':
366 elif command == '+':
366 affects_new = True
367 affects_new = True
367 action = 'add'
368 action = 'add'
368 stats[0] += 1
369 stats[0] += 1
369 elif command == '-':
370 elif command == '-':
370 affects_old = True
371 affects_old = True
371 action = 'del'
372 action = 'del'
372 stats[1] += 1
373 stats[1] += 1
373 else:
374 else:
374 affects_old = affects_new = True
375 affects_old = affects_new = True
375 action = 'unmod'
376 action = 'unmod'
376
377
377 if line != self._newline_marker:
378 if line != self._newline_marker:
378 old_line += affects_old
379 old_line += affects_old
379 new_line += affects_new
380 new_line += affects_new
380 lines.append({
381 lines.append({
381 'old_lineno': affects_old and old_line or '',
382 'old_lineno': affects_old and old_line or '',
382 'new_lineno': affects_new and new_line or '',
383 'new_lineno': affects_new and new_line or '',
383 'action': action,
384 'action': action,
384 'line': line
385 'line': line
385 })
386 })
386
387
387 line = lineiter.next()
388 line = lineiter.next()
388 if line == self._newline_marker:
389 if line == self._newline_marker:
389 # we need to append to lines, since this is not
390 # we need to append to lines, since this is not
390 # counted in the line specs of diff
391 # counted in the line specs of diff
391 lines.append({
392 lines.append({
392 'old_lineno': '...',
393 'old_lineno': '...',
393 'new_lineno': '...',
394 'new_lineno': '...',
394 'action': 'context',
395 'action': 'context',
395 'line': line
396 'line': line
396 })
397 })
397
398
398 except StopIteration:
399 except StopIteration:
399 pass
400 pass
400
401
401 sorter = lambda info: {'A': 0, 'M': 1, 'D': 2}.get(info['operation'])
402 sorter = lambda info: {'A': 0, 'M': 1, 'D': 2}.get(info['operation'])
402 if inline_diff is False:
403 if inline_diff is False:
403 return sorted(files, key=sorter)
404 return sorted(files, key=sorter)
404
405
405 # highlight inline changes
406 # highlight inline changes
406 for diff_data in files:
407 for diff_data in files:
407 for chunk in diff_data['chunks']:
408 for chunk in diff_data['chunks']:
408 lineiter = iter(chunk)
409 lineiter = iter(chunk)
409 try:
410 try:
410 while 1:
411 while 1:
411 line = lineiter.next()
412 line = lineiter.next()
412 if line['action'] not in ['unmod', 'context']:
413 if line['action'] not in ['unmod', 'context']:
413 nextline = lineiter.next()
414 nextline = lineiter.next()
414 if nextline['action'] in ['unmod', 'context'] or \
415 if nextline['action'] in ['unmod', 'context'] or \
415 nextline['action'] == line['action']:
416 nextline['action'] == line['action']:
416 continue
417 continue
417 self.differ(line, nextline)
418 self.differ(line, nextline)
418 except StopIteration:
419 except StopIteration:
419 pass
420 pass
420
421
421 return sorted(files, key=sorter)
422 return sorted(files, key=sorter)
422
423
423 def prepare(self, inline_diff=True):
424 def prepare(self, inline_diff=True):
424 """
425 """
425 Prepare the passed udiff for HTML rendering. It'l return a list
426 Prepare the passed udiff for HTML rendering. It'l return a list
426 of dicts
427 of dicts
427 """
428 """
428 return self._parse_udiff(inline_diff=inline_diff)
429 return self._parse_udiff(inline_diff=inline_diff)
429
430
430 def _safe_id(self, idstring):
431 def _safe_id(self, idstring):
431 """Make a string safe for including in an id attribute.
432 """Make a string safe for including in an id attribute.
432
433
433 The HTML spec says that id attributes 'must begin with
434 The HTML spec says that id attributes 'must begin with
434 a letter ([A-Za-z]) and may be followed by any number
435 a letter ([A-Za-z]) and may be followed by any number
435 of letters, digits ([0-9]), hyphens ("-"), underscores
436 of letters, digits ([0-9]), hyphens ("-"), underscores
436 ("_"), colons (":"), and periods (".")'. These regexps
437 ("_"), colons (":"), and periods (".")'. These regexps
437 are slightly over-zealous, in that they remove colons
438 are slightly over-zealous, in that they remove colons
438 and periods unnecessarily.
439 and periods unnecessarily.
439
440
440 Whitespace is transformed into underscores, and then
441 Whitespace is transformed into underscores, and then
441 anything which is not a hyphen or a character that
442 anything which is not a hyphen or a character that
442 matches \w (alphanumerics and underscore) is removed.
443 matches \w (alphanumerics and underscore) is removed.
443
444
444 """
445 """
445 # Transform all whitespace to underscore
446 # Transform all whitespace to underscore
446 idstring = re.sub(r'\s', "_", '%s' % idstring)
447 idstring = re.sub(r'\s', "_", '%s' % idstring)
447 # Remove everything that is not a hyphen or a member of \w
448 # Remove everything that is not a hyphen or a member of \w
448 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
449 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
449 return idstring
450 return idstring
450
451
451 def raw_diff(self):
452 def raw_diff(self):
452 """
453 """
453 Returns raw string as udiff
454 Returns raw string as udiff
454 """
455 """
455 udiff_copy = self.copy_iterator()
456 udiff_copy = self.copy_iterator()
456 if self.__format == 'gitdiff':
457 if self.__format == 'gitdiff':
457 udiff_copy = self._parse_gitdiff(udiff_copy)
458 udiff_copy = self._parse_gitdiff(udiff_copy)
458 return u''.join(udiff_copy)
459 return u''.join(udiff_copy)
459
460
460 def as_html(self, table_class='code-difftable', line_class='line',
461 def as_html(self, table_class='code-difftable', line_class='line',
461 new_lineno_class='lineno old', old_lineno_class='lineno new',
462 new_lineno_class='lineno old', old_lineno_class='lineno new',
462 code_class='code', enable_comments=False, diff_lines=None):
463 code_class='code', enable_comments=False, diff_lines=None):
463 """
464 """
464 Return given diff as html table with customized css classes
465 Return given diff as html table with customized css classes
465 """
466 """
466 def _link_to_if(condition, label, url):
467 def _link_to_if(condition, label, url):
467 """
468 """
468 Generates a link if condition is meet or just the label if not.
469 Generates a link if condition is meet or just the label if not.
469 """
470 """
470
471
471 if condition:
472 if condition:
472 return '''<a href="%(url)s">%(label)s</a>''' % {
473 return '''<a href="%(url)s">%(label)s</a>''' % {
473 'url': url,
474 'url': url,
474 'label': label
475 'label': label
475 }
476 }
476 else:
477 else:
477 return label
478 return label
478 if diff_lines is None:
479 if diff_lines is None:
479 diff_lines = self.prepare()
480 diff_lines = self.prepare()
480 _html_empty = True
481 _html_empty = True
481 _html = []
482 _html = []
482 _html.append('''<table class="%(table_class)s">\n''' % {
483 _html.append('''<table class="%(table_class)s">\n''' % {
483 'table_class': table_class
484 'table_class': table_class
484 })
485 })
485 for diff in diff_lines:
486 for diff in diff_lines:
486 for line in diff['chunks']:
487 for line in diff['chunks']:
487 _html_empty = False
488 _html_empty = False
488 for change in line:
489 for change in line:
489 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
490 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
490 'lc': line_class,
491 'lc': line_class,
491 'action': change['action']
492 'action': change['action']
492 })
493 })
493 anchor_old_id = ''
494 anchor_old_id = ''
494 anchor_new_id = ''
495 anchor_new_id = ''
495 anchor_old = "%(filename)s_o%(oldline_no)s" % {
496 anchor_old = "%(filename)s_o%(oldline_no)s" % {
496 'filename': self._safe_id(diff['filename']),
497 'filename': self._safe_id(diff['filename']),
497 'oldline_no': change['old_lineno']
498 'oldline_no': change['old_lineno']
498 }
499 }
499 anchor_new = "%(filename)s_n%(oldline_no)s" % {
500 anchor_new = "%(filename)s_n%(oldline_no)s" % {
500 'filename': self._safe_id(diff['filename']),
501 'filename': self._safe_id(diff['filename']),
501 'oldline_no': change['new_lineno']
502 'oldline_no': change['new_lineno']
502 }
503 }
503 cond_old = (change['old_lineno'] != '...' and
504 cond_old = (change['old_lineno'] != '...' and
504 change['old_lineno'])
505 change['old_lineno'])
505 cond_new = (change['new_lineno'] != '...' and
506 cond_new = (change['new_lineno'] != '...' and
506 change['new_lineno'])
507 change['new_lineno'])
507 if cond_old:
508 if cond_old:
508 anchor_old_id = 'id="%s"' % anchor_old
509 anchor_old_id = 'id="%s"' % anchor_old
509 if cond_new:
510 if cond_new:
510 anchor_new_id = 'id="%s"' % anchor_new
511 anchor_new_id = 'id="%s"' % anchor_new
511 ###########################################################
512 ###########################################################
512 # OLD LINE NUMBER
513 # OLD LINE NUMBER
513 ###########################################################
514 ###########################################################
514 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
515 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
515 'a_id': anchor_old_id,
516 'a_id': anchor_old_id,
516 'olc': old_lineno_class
517 'olc': old_lineno_class
517 })
518 })
518
519
519 _html.append('''%(link)s''' % {
520 _html.append('''%(link)s''' % {
520 'link': _link_to_if(True, change['old_lineno'],
521 'link': _link_to_if(True, change['old_lineno'],
521 '#%s' % anchor_old)
522 '#%s' % anchor_old)
522 })
523 })
523 _html.append('''</td>\n''')
524 _html.append('''</td>\n''')
524 ###########################################################
525 ###########################################################
525 # NEW LINE NUMBER
526 # NEW LINE NUMBER
526 ###########################################################
527 ###########################################################
527
528
528 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
529 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
529 'a_id': anchor_new_id,
530 'a_id': anchor_new_id,
530 'nlc': new_lineno_class
531 'nlc': new_lineno_class
531 })
532 })
532
533
533 _html.append('''%(link)s''' % {
534 _html.append('''%(link)s''' % {
534 'link': _link_to_if(True, change['new_lineno'],
535 'link': _link_to_if(True, change['new_lineno'],
535 '#%s' % anchor_new)
536 '#%s' % anchor_new)
536 })
537 })
537 _html.append('''</td>\n''')
538 _html.append('''</td>\n''')
538 ###########################################################
539 ###########################################################
539 # CODE
540 # CODE
540 ###########################################################
541 ###########################################################
541 comments = '' if enable_comments else 'no-comment'
542 comments = '' if enable_comments else 'no-comment'
542 _html.append('''\t<td class="%(cc)s %(inc)s">''' % {
543 _html.append('''\t<td class="%(cc)s %(inc)s">''' % {
543 'cc': code_class,
544 'cc': code_class,
544 'inc': comments
545 'inc': comments
545 })
546 })
546 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
547 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
547 'code': change['line']
548 'code': change['line']
548 })
549 })
549 _html.append('''\t</td>''')
550 _html.append('''\t</td>''')
550 _html.append('''\n</tr>\n''')
551 _html.append('''\n</tr>\n''')
551 _html.append('''</table>''')
552 _html.append('''</table>''')
552 if _html_empty:
553 if _html_empty:
553 return None
554 return None
554 return ''.join(_html)
555 return ''.join(_html)
555
556
556 def stat(self):
557 def stat(self):
557 """
558 """
558 Returns tuple of added, and removed lines for this instance
559 Returns tuple of added, and removed lines for this instance
559 """
560 """
560 return self.adds, self.removes
561 return self.adds, self.removes
561
562
562
563
563 class InMemoryBundleRepo(bundlerepository):
564 class InMemoryBundleRepo(bundlerepository):
564 def __init__(self, ui, path, bundlestream):
565 def __init__(self, ui, path, bundlestream):
565 self._tempparent = None
566 self._tempparent = None
566 localrepo.localrepository.__init__(self, ui, path)
567 localrepo.localrepository.__init__(self, ui, path)
567 self.ui.setconfig('phases', 'publish', False)
568 self.ui.setconfig('phases', 'publish', False)
568
569
569 self.bundle = bundlestream
570 self.bundle = bundlestream
570
571
571 # dict with the mapping 'filename' -> position in the bundle
572 # dict with the mapping 'filename' -> position in the bundle
572 self.bundlefilespos = {}
573 self.bundlefilespos = {}
573
574
574
575
575 def differ(org_repo, org_ref, other_repo, other_ref, discovery_data=None):
576 def differ(org_repo, org_ref, other_repo, other_ref, discovery_data=None):
576 """
577 """
577 General differ between branches, bookmarks or separate but releated
578 General differ between branches, bookmarks or separate but releated
578 repositories
579 repositories
579
580
580 :param org_repo:
581 :param org_repo:
581 :type org_repo:
582 :type org_repo:
582 :param org_ref:
583 :param org_ref:
583 :type org_ref:
584 :type org_ref:
584 :param other_repo:
585 :param other_repo:
585 :type other_repo:
586 :type other_repo:
586 :param other_ref:
587 :param other_ref:
587 :type other_ref:
588 :type other_ref:
588 """
589 """
589
590
590 bundlerepo = None
591 bundlerepo = None
591 ignore_whitespace = False
592 ignore_whitespace = False
592 context = 3
593 context = 3
593 org_repo = org_repo.scm_instance._repo
594 org_repo = org_repo.scm_instance._repo
594 other_repo = other_repo.scm_instance._repo
595 other_repo = other_repo.scm_instance._repo
595 opts = diffopts(git=True, ignorews=ignore_whitespace, context=context)
596 opts = diffopts(git=True, ignorews=ignore_whitespace, context=context)
596 org_ref = org_ref[1]
597 org_ref = org_ref[1]
597 other_ref = other_ref[1]
598 other_ref = other_ref[1]
598
599
599 if org_repo != other_repo:
600 if org_repo != other_repo:
600
601
601 common, incoming, rheads = discovery_data
602 common, incoming, rheads = discovery_data
602
603 other_repo_peer = localrepo.locallegacypeer(other_repo.local())
603 # create a bundle (uncompressed if other repo is not local)
604 # create a bundle (uncompressed if other repo is not local)
604 if other_repo.capable('getbundle') and incoming:
605 if other_repo_peer.capable('getbundle') and incoming:
605 # disable repo hooks here since it's just bundle !
606 # disable repo hooks here since it's just bundle !
606 # patch and reset hooks section of UI config to not run any
607 # patch and reset hooks section of UI config to not run any
607 # hooks on fetching archives with subrepos
608 # hooks on fetching archives with subrepos
608 for k, _ in other_repo.ui.configitems('hooks'):
609 for k, _ in other_repo.ui.configitems('hooks'):
609 other_repo.ui.setconfig('hooks', k, None)
610 other_repo.ui.setconfig('hooks', k, None)
610
611
611 unbundle = other_repo.getbundle('incoming', common=common,
612 unbundle = other_repo.getbundle('incoming', common=common,
612 heads=rheads)
613 heads=rheads)
613
614
614 buf = BytesIO()
615 buf = BytesIO()
615 while True:
616 while True:
616 chunk = unbundle._stream.read(1024 * 4)
617 chunk = unbundle._stream.read(1024 * 4)
617 if not chunk:
618 if not chunk:
618 break
619 break
619 buf.write(chunk)
620 buf.write(chunk)
620
621
621 buf.seek(0)
622 buf.seek(0)
622 # replace chunked _stream with data that can do tell() and seek()
623 # replace chunked _stream with data that can do tell() and seek()
623 unbundle._stream = buf
624 unbundle._stream = buf
624
625
625 ui = make_ui('db')
626 ui = make_ui('db')
626 bundlerepo = InMemoryBundleRepo(ui, path=org_repo.root,
627 bundlerepo = InMemoryBundleRepo(ui, path=org_repo.root,
627 bundlestream=unbundle)
628 bundlestream=unbundle)
628
629
629 return ''.join(patch.diff(bundlerepo or org_repo,
630 return ''.join(patch.diff(bundlerepo or org_repo,
630 node1=org_repo[org_ref].node(),
631 node1=org_repo[org_ref].node(),
631 node2=other_repo[other_ref].node(),
632 node2=other_repo[other_ref].node(),
632 opts=opts))
633 opts=opts))
633 else:
634 else:
634 return ''.join(patch.diff(org_repo, node1=org_ref, node2=other_ref,
635 return ''.join(patch.diff(org_repo, node1=org_ref, node2=other_ref,
635 opts=opts))
636 opts=opts))
@@ -1,703 +1,659 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 from os.path import abspath
35 from os.path import abspath
36 from os.path import dirname as dn, join as jn
36 from os.path import dirname as dn, join as jn
37
37
38 from paste.script.command import Command, BadCommand
38 from paste.script.command import Command, BadCommand
39
39
40 from mercurial import ui, config
40 from mercurial import ui, config
41
41
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
43
43
44 from rhodecode.lib.vcs import get_backend
44 from rhodecode.lib.vcs import get_backend
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
47 from rhodecode.lib.vcs.utils.helpers import get_scm
47 from rhodecode.lib.vcs.utils.helpers import get_scm
48 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.exceptions import VCSError
49
49
50 from rhodecode.lib.caching_query import FromCache
50 from rhodecode.lib.caching_query import FromCache
51
51
52 from rhodecode.model import meta
52 from rhodecode.model import meta
53 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
53 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
54 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
54 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.repos_group import ReposGroupModel
56 from rhodecode.model.repos_group import ReposGroupModel
57 from rhodecode.lib.utils2 import safe_str, safe_unicode
57 from rhodecode.lib.utils2 import safe_str, safe_unicode
58 from rhodecode.lib.vcs.utils.fakemod import create_module
58 from rhodecode.lib.vcs.utils.fakemod import create_module
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
63
63
64
64
65 def recursive_replace(str_, replace=' '):
65 def recursive_replace(str_, replace=' '):
66 """
66 """
67 Recursive replace of given sign to just one instance
67 Recursive replace of given sign to just one instance
68
68
69 :param str_: given string
69 :param str_: given string
70 :param replace: char to find and replace multiple instances
70 :param replace: char to find and replace multiple instances
71
71
72 Examples::
72 Examples::
73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
74 'Mighty-Mighty-Bo-sstones'
74 'Mighty-Mighty-Bo-sstones'
75 """
75 """
76
76
77 if str_.find(replace * 2) == -1:
77 if str_.find(replace * 2) == -1:
78 return str_
78 return str_
79 else:
79 else:
80 str_ = str_.replace(replace * 2, replace)
80 str_ = str_.replace(replace * 2, replace)
81 return recursive_replace(str_, replace)
81 return recursive_replace(str_, replace)
82
82
83
83
84 def repo_name_slug(value):
84 def repo_name_slug(value):
85 """
85 """
86 Return slug of name of repository
86 Return slug of name of repository
87 This function is called on each creation/modification
87 This function is called on each creation/modification
88 of repository to prevent bad names in repo
88 of repository to prevent bad names in repo
89 """
89 """
90
90
91 slug = remove_formatting(value)
91 slug = remove_formatting(value)
92 slug = strip_tags(slug)
92 slug = strip_tags(slug)
93
93
94 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
94 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
95 slug = slug.replace(c, '-')
95 slug = slug.replace(c, '-')
96 slug = recursive_replace(slug, '-')
96 slug = recursive_replace(slug, '-')
97 slug = collapse(slug, '-')
97 slug = collapse(slug, '-')
98 return slug
98 return slug
99
99
100
100
101 def get_repo_slug(request):
101 def get_repo_slug(request):
102 _repo = request.environ['pylons.routes_dict'].get('repo_name')
102 _repo = request.environ['pylons.routes_dict'].get('repo_name')
103 if _repo:
103 if _repo:
104 _repo = _repo.rstrip('/')
104 _repo = _repo.rstrip('/')
105 return _repo
105 return _repo
106
106
107
107
108 def get_repos_group_slug(request):
108 def get_repos_group_slug(request):
109 _group = request.environ['pylons.routes_dict'].get('group_name')
109 _group = request.environ['pylons.routes_dict'].get('group_name')
110 if _group:
110 if _group:
111 _group = _group.rstrip('/')
111 _group = _group.rstrip('/')
112 return _group
112 return _group
113
113
114
114
115 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
115 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
116 """
116 """
117 Action logger for various actions made by users
117 Action logger for various actions made by users
118
118
119 :param user: user that made this action, can be a unique username string or
119 :param user: user that made this action, can be a unique username string or
120 object containing user_id attribute
120 object containing user_id attribute
121 :param action: action to log, should be on of predefined unique actions for
121 :param action: action to log, should be on of predefined unique actions for
122 easy translations
122 easy translations
123 :param repo: string name of repository or object containing repo_id,
123 :param repo: string name of repository or object containing repo_id,
124 that action was made on
124 that action was made on
125 :param ipaddr: optional ip address from what the action was made
125 :param ipaddr: optional ip address from what the action was made
126 :param sa: optional sqlalchemy session
126 :param sa: optional sqlalchemy session
127
127
128 """
128 """
129
129
130 if not sa:
130 if not sa:
131 sa = meta.Session()
131 sa = meta.Session()
132
132
133 try:
133 try:
134 if hasattr(user, 'user_id'):
134 if hasattr(user, 'user_id'):
135 user_obj = user
135 user_obj = user
136 elif isinstance(user, basestring):
136 elif isinstance(user, basestring):
137 user_obj = User.get_by_username(user)
137 user_obj = User.get_by_username(user)
138 else:
138 else:
139 raise Exception('You have to provide user object or username')
139 raise Exception('You have to provide user object or username')
140
140
141 if hasattr(repo, 'repo_id'):
141 if hasattr(repo, 'repo_id'):
142 repo_obj = Repository.get(repo.repo_id)
142 repo_obj = Repository.get(repo.repo_id)
143 repo_name = repo_obj.repo_name
143 repo_name = repo_obj.repo_name
144 elif isinstance(repo, basestring):
144 elif isinstance(repo, basestring):
145 repo_name = repo.lstrip('/')
145 repo_name = repo.lstrip('/')
146 repo_obj = Repository.get_by_repo_name(repo_name)
146 repo_obj = Repository.get_by_repo_name(repo_name)
147 else:
147 else:
148 repo_obj = None
148 repo_obj = None
149 repo_name = ''
149 repo_name = ''
150
150
151 user_log = UserLog()
151 user_log = UserLog()
152 user_log.user_id = user_obj.user_id
152 user_log.user_id = user_obj.user_id
153 user_log.action = safe_unicode(action)
153 user_log.action = safe_unicode(action)
154
154
155 user_log.repository = repo_obj
155 user_log.repository = repo_obj
156 user_log.repository_name = repo_name
156 user_log.repository_name = repo_name
157
157
158 user_log.action_date = datetime.datetime.now()
158 user_log.action_date = datetime.datetime.now()
159 user_log.user_ip = ipaddr
159 user_log.user_ip = ipaddr
160 sa.add(user_log)
160 sa.add(user_log)
161
161
162 log.info(
162 log.info(
163 'Adding user %s, action %s on %s' % (user_obj, action,
163 'Adding user %s, action %s on %s' % (user_obj, action,
164 safe_unicode(repo))
164 safe_unicode(repo))
165 )
165 )
166 if commit:
166 if commit:
167 sa.commit()
167 sa.commit()
168 except:
168 except:
169 log.error(traceback.format_exc())
169 log.error(traceback.format_exc())
170 raise
170 raise
171
171
172
172
173 def get_repos(path, recursive=False):
173 def get_repos(path, recursive=False):
174 """
174 """
175 Scans given path for repos and return (name,(type,path)) tuple
175 Scans given path for repos and return (name,(type,path)) tuple
176
176
177 :param path: path to scan for repositories
177 :param path: path to scan for repositories
178 :param recursive: recursive search and return names with subdirs in front
178 :param recursive: recursive search and return names with subdirs in front
179 """
179 """
180
180
181 # remove ending slash for better results
181 # remove ending slash for better results
182 path = path.rstrip(os.sep)
182 path = path.rstrip(os.sep)
183
183
184 def _get_repos(p):
184 def _get_repos(p):
185 if not os.access(p, os.W_OK):
185 if not os.access(p, os.W_OK):
186 return
186 return
187 for dirpath in os.listdir(p):
187 for dirpath in os.listdir(p):
188 if os.path.isfile(os.path.join(p, dirpath)):
188 if os.path.isfile(os.path.join(p, dirpath)):
189 continue
189 continue
190 cur_path = os.path.join(p, dirpath)
190 cur_path = os.path.join(p, dirpath)
191 try:
191 try:
192 scm_info = get_scm(cur_path)
192 scm_info = get_scm(cur_path)
193 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
193 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
194 except VCSError:
194 except VCSError:
195 if not recursive:
195 if not recursive:
196 continue
196 continue
197 #check if this dir containts other repos for recursive scan
197 #check if this dir containts other repos for recursive scan
198 rec_path = os.path.join(p, dirpath)
198 rec_path = os.path.join(p, dirpath)
199 if os.path.isdir(rec_path):
199 if os.path.isdir(rec_path):
200 for inner_scm in _get_repos(rec_path):
200 for inner_scm in _get_repos(rec_path):
201 yield inner_scm
201 yield inner_scm
202
202
203 return _get_repos(path)
203 return _get_repos(path)
204
204
205
205
206 def is_valid_repo(repo_name, base_path):
206 def is_valid_repo(repo_name, base_path):
207 """
207 """
208 Returns True if given path is a valid repository False otherwise
208 Returns True if given path is a valid repository False otherwise
209
209
210 :param repo_name:
210 :param repo_name:
211 :param base_path:
211 :param base_path:
212
212
213 :return True: if given path is a valid repository
213 :return True: if given path is a valid repository
214 """
214 """
215 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
215 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
216
216
217 try:
217 try:
218 get_scm(full_path)
218 get_scm(full_path)
219 return True
219 return True
220 except VCSError:
220 except VCSError:
221 return False
221 return False
222
222
223
223
224 def is_valid_repos_group(repos_group_name, base_path):
224 def is_valid_repos_group(repos_group_name, base_path):
225 """
225 """
226 Returns True if given path is a repos group False otherwise
226 Returns True if given path is a repos group False otherwise
227
227
228 :param repo_name:
228 :param repo_name:
229 :param base_path:
229 :param base_path:
230 """
230 """
231 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
231 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
232
232
233 # check if it's not a repo
233 # check if it's not a repo
234 if is_valid_repo(repos_group_name, base_path):
234 if is_valid_repo(repos_group_name, base_path):
235 return False
235 return False
236
236
237 try:
237 try:
238 # we need to check bare git repos at higher level
238 # we need to check bare git repos at higher level
239 # since we might match branches/hooks/info/objects or possible
239 # since we might match branches/hooks/info/objects or possible
240 # other things inside bare git repo
240 # other things inside bare git repo
241 get_scm(os.path.dirname(full_path))
241 get_scm(os.path.dirname(full_path))
242 return False
242 return False
243 except VCSError:
243 except VCSError:
244 pass
244 pass
245
245
246 # check if it's a valid path
246 # check if it's a valid path
247 if os.path.isdir(full_path):
247 if os.path.isdir(full_path):
248 return True
248 return True
249
249
250 return False
250 return False
251
251
252
252
253 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
253 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
254 while True:
254 while True:
255 ok = raw_input(prompt)
255 ok = raw_input(prompt)
256 if ok in ('y', 'ye', 'yes'):
256 if ok in ('y', 'ye', 'yes'):
257 return True
257 return True
258 if ok in ('n', 'no', 'nop', 'nope'):
258 if ok in ('n', 'no', 'nop', 'nope'):
259 return False
259 return False
260 retries = retries - 1
260 retries = retries - 1
261 if retries < 0:
261 if retries < 0:
262 raise IOError
262 raise IOError
263 print complaint
263 print complaint
264
264
265 #propagated from mercurial documentation
265 #propagated from mercurial documentation
266 ui_sections = ['alias', 'auth',
266 ui_sections = ['alias', 'auth',
267 'decode/encode', 'defaults',
267 'decode/encode', 'defaults',
268 'diff', 'email',
268 'diff', 'email',
269 'extensions', 'format',
269 'extensions', 'format',
270 'merge-patterns', 'merge-tools',
270 'merge-patterns', 'merge-tools',
271 'hooks', 'http_proxy',
271 'hooks', 'http_proxy',
272 'smtp', 'patch',
272 'smtp', 'patch',
273 'paths', 'profiling',
273 'paths', 'profiling',
274 'server', 'trusted',
274 'server', 'trusted',
275 'ui', 'web', ]
275 'ui', 'web', ]
276
276
277
277
278 def make_ui(read_from='file', path=None, checkpaths=True):
278 def make_ui(read_from='file', path=None, checkpaths=True):
279 """
279 """
280 A function that will read python rc files or database
280 A function that will read python rc files or database
281 and make an mercurial ui object from read options
281 and make an mercurial ui object from read options
282
282
283 :param path: path to mercurial config file
283 :param path: path to mercurial config file
284 :param checkpaths: check the path
284 :param checkpaths: check the path
285 :param read_from: read from 'file' or 'db'
285 :param read_from: read from 'file' or 'db'
286 """
286 """
287
287
288 baseui = ui.ui()
288 baseui = ui.ui()
289
289
290 # clean the baseui object
290 # clean the baseui object
291 baseui._ocfg = config.config()
291 baseui._ocfg = config.config()
292 baseui._ucfg = config.config()
292 baseui._ucfg = config.config()
293 baseui._tcfg = config.config()
293 baseui._tcfg = config.config()
294
294
295 if read_from == 'file':
295 if read_from == 'file':
296 if not os.path.isfile(path):
296 if not os.path.isfile(path):
297 log.debug('hgrc file is not present at %s skipping...' % path)
297 log.debug('hgrc file is not present at %s skipping...' % path)
298 return False
298 return False
299 log.debug('reading hgrc from %s' % path)
299 log.debug('reading hgrc from %s' % path)
300 cfg = config.config()
300 cfg = config.config()
301 cfg.read(path)
301 cfg.read(path)
302 for section in ui_sections:
302 for section in ui_sections:
303 for k, v in cfg.items(section):
303 for k, v in cfg.items(section):
304 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
304 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
305 baseui.setconfig(section, k, v)
305 baseui.setconfig(section, k, v)
306
306
307 elif read_from == 'db':
307 elif read_from == 'db':
308 sa = meta.Session()
308 sa = meta.Session()
309 ret = sa.query(RhodeCodeUi)\
309 ret = sa.query(RhodeCodeUi)\
310 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
310 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
311 .all()
311 .all()
312
312
313 hg_ui = ret
313 hg_ui = ret
314 for ui_ in hg_ui:
314 for ui_ in hg_ui:
315 if ui_.ui_active and ui_.ui_key != 'push_ssl':
315 if ui_.ui_active and ui_.ui_key != 'push_ssl':
316 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
316 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
317 ui_.ui_key, ui_.ui_value)
317 ui_.ui_key, ui_.ui_value)
318 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
318 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
319
319
320 meta.Session.remove()
320 meta.Session.remove()
321 return baseui
321 return baseui
322
322
323
323
324 def set_rhodecode_config(config):
324 def set_rhodecode_config(config):
325 """
325 """
326 Updates pylons config with new settings from database
326 Updates pylons config with new settings from database
327
327
328 :param config:
328 :param config:
329 """
329 """
330 hgsettings = RhodeCodeSetting.get_app_settings()
330 hgsettings = RhodeCodeSetting.get_app_settings()
331
331
332 for k, v in hgsettings.items():
332 for k, v in hgsettings.items():
333 config[k] = v
333 config[k] = v
334
334
335
335
336 def invalidate_cache(cache_key, *args):
336 def invalidate_cache(cache_key, *args):
337 """
337 """
338 Puts cache invalidation task into db for
338 Puts cache invalidation task into db for
339 further global cache invalidation
339 further global cache invalidation
340 """
340 """
341
341
342 from rhodecode.model.scm import ScmModel
342 from rhodecode.model.scm import ScmModel
343
343
344 if cache_key.startswith('get_repo_cached_'):
344 if cache_key.startswith('get_repo_cached_'):
345 name = cache_key.split('get_repo_cached_')[-1]
345 name = cache_key.split('get_repo_cached_')[-1]
346 ScmModel().mark_for_invalidation(name)
346 ScmModel().mark_for_invalidation(name)
347
347
348
348
349 class EmptyChangeset(BaseChangeset):
350 """
351 An dummy empty changeset. It's possible to pass hash when creating
352 an EmptyChangeset
353 """
354
355 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
356 alias=None):
357 self._empty_cs = cs
358 self.revision = -1
359 self.message = ''
360 self.author = ''
361 self.date = ''
362 self.repository = repo
363 self.requested_revision = requested_revision
364 self.alias = alias
365
366 @LazyProperty
367 def raw_id(self):
368 """
369 Returns raw string identifying this changeset, useful for web
370 representation.
371 """
372
373 return self._empty_cs
374
375 @LazyProperty
376 def branch(self):
377 return get_backend(self.alias).DEFAULT_BRANCH_NAME
378
379 @LazyProperty
380 def short_id(self):
381 return self.raw_id[:12]
382
383 def get_file_changeset(self, path):
384 return self
385
386 def get_file_content(self, path):
387 return u''
388
389 def get_file_size(self, path):
390 return 0
391
392
393 def map_groups(path):
349 def map_groups(path):
394 """
350 """
395 Given a full path to a repository, create all nested groups that this
351 Given a full path to a repository, create all nested groups that this
396 repo is inside. This function creates parent-child relationships between
352 repo is inside. This function creates parent-child relationships between
397 groups and creates default perms for all new groups.
353 groups and creates default perms for all new groups.
398
354
399 :param paths: full path to repository
355 :param paths: full path to repository
400 """
356 """
401 sa = meta.Session()
357 sa = meta.Session()
402 groups = path.split(Repository.url_sep())
358 groups = path.split(Repository.url_sep())
403 parent = None
359 parent = None
404 group = None
360 group = None
405
361
406 # last element is repo in nested groups structure
362 # last element is repo in nested groups structure
407 groups = groups[:-1]
363 groups = groups[:-1]
408 rgm = ReposGroupModel(sa)
364 rgm = ReposGroupModel(sa)
409 for lvl, group_name in enumerate(groups):
365 for lvl, group_name in enumerate(groups):
410 group_name = '/'.join(groups[:lvl] + [group_name])
366 group_name = '/'.join(groups[:lvl] + [group_name])
411 group = RepoGroup.get_by_group_name(group_name)
367 group = RepoGroup.get_by_group_name(group_name)
412 desc = '%s group' % group_name
368 desc = '%s group' % group_name
413
369
414 # skip folders that are now removed repos
370 # skip folders that are now removed repos
415 if REMOVED_REPO_PAT.match(group_name):
371 if REMOVED_REPO_PAT.match(group_name):
416 break
372 break
417
373
418 if group is None:
374 if group is None:
419 log.debug('creating group level: %s group_name: %s' % (lvl,
375 log.debug('creating group level: %s group_name: %s' % (lvl,
420 group_name))
376 group_name))
421 group = RepoGroup(group_name, parent)
377 group = RepoGroup(group_name, parent)
422 group.group_description = desc
378 group.group_description = desc
423 sa.add(group)
379 sa.add(group)
424 rgm._create_default_perms(group)
380 rgm._create_default_perms(group)
425 sa.flush()
381 sa.flush()
426 parent = group
382 parent = group
427 return group
383 return group
428
384
429
385
430 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
386 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
431 install_git_hook=False):
387 install_git_hook=False):
432 """
388 """
433 maps all repos given in initial_repo_list, non existing repositories
389 maps all repos given in initial_repo_list, non existing repositories
434 are created, if remove_obsolete is True it also check for db entries
390 are created, if remove_obsolete is True it also check for db entries
435 that are not in initial_repo_list and removes them.
391 that are not in initial_repo_list and removes them.
436
392
437 :param initial_repo_list: list of repositories found by scanning methods
393 :param initial_repo_list: list of repositories found by scanning methods
438 :param remove_obsolete: check for obsolete entries in database
394 :param remove_obsolete: check for obsolete entries in database
439 :param install_git_hook: if this is True, also check and install githook
395 :param install_git_hook: if this is True, also check and install githook
440 for a repo if missing
396 for a repo if missing
441 """
397 """
442 from rhodecode.model.repo import RepoModel
398 from rhodecode.model.repo import RepoModel
443 from rhodecode.model.scm import ScmModel
399 from rhodecode.model.scm import ScmModel
444 sa = meta.Session()
400 sa = meta.Session()
445 rm = RepoModel()
401 rm = RepoModel()
446 user = sa.query(User).filter(User.admin == True).first()
402 user = sa.query(User).filter(User.admin == True).first()
447 if user is None:
403 if user is None:
448 raise Exception('Missing administrative account !')
404 raise Exception('Missing administrative account !')
449 added = []
405 added = []
450
406
451 for name, repo in initial_repo_list.items():
407 for name, repo in initial_repo_list.items():
452 group = map_groups(name)
408 group = map_groups(name)
453 db_repo = rm.get_by_repo_name(name)
409 db_repo = rm.get_by_repo_name(name)
454 # found repo that is on filesystem not in RhodeCode database
410 # found repo that is on filesystem not in RhodeCode database
455 if not db_repo:
411 if not db_repo:
456 log.info('repository %s not found creating now' % name)
412 log.info('repository %s not found creating now' % name)
457 added.append(name)
413 added.append(name)
458 desc = (repo.description
414 desc = (repo.description
459 if repo.description != 'unknown'
415 if repo.description != 'unknown'
460 else '%s repository' % name)
416 else '%s repository' % name)
461 new_repo = rm.create_repo(
417 new_repo = rm.create_repo(
462 repo_name=name,
418 repo_name=name,
463 repo_type=repo.alias,
419 repo_type=repo.alias,
464 description=desc,
420 description=desc,
465 repos_group=getattr(group, 'group_id', None),
421 repos_group=getattr(group, 'group_id', None),
466 owner=user,
422 owner=user,
467 just_db=True
423 just_db=True
468 )
424 )
469 # we added that repo just now, and make sure it has githook
425 # we added that repo just now, and make sure it has githook
470 # installed
426 # installed
471 if new_repo.repo_type == 'git':
427 if new_repo.repo_type == 'git':
472 ScmModel().install_git_hook(new_repo.scm_instance)
428 ScmModel().install_git_hook(new_repo.scm_instance)
473 elif install_git_hook:
429 elif install_git_hook:
474 if db_repo.repo_type == 'git':
430 if db_repo.repo_type == 'git':
475 ScmModel().install_git_hook(db_repo.scm_instance)
431 ScmModel().install_git_hook(db_repo.scm_instance)
476 sa.commit()
432 sa.commit()
477 removed = []
433 removed = []
478 if remove_obsolete:
434 if remove_obsolete:
479 # remove from database those repositories that are not in the filesystem
435 # remove from database those repositories that are not in the filesystem
480 for repo in sa.query(Repository).all():
436 for repo in sa.query(Repository).all():
481 if repo.repo_name not in initial_repo_list.keys():
437 if repo.repo_name not in initial_repo_list.keys():
482 log.debug("Removing non existing repository found in db `%s`" %
438 log.debug("Removing non existing repository found in db `%s`" %
483 repo.repo_name)
439 repo.repo_name)
484 try:
440 try:
485 sa.delete(repo)
441 sa.delete(repo)
486 sa.commit()
442 sa.commit()
487 removed.append(repo.repo_name)
443 removed.append(repo.repo_name)
488 except:
444 except:
489 #don't hold further removals on error
445 #don't hold further removals on error
490 log.error(traceback.format_exc())
446 log.error(traceback.format_exc())
491 sa.rollback()
447 sa.rollback()
492
448
493 # clear cache keys
449 # clear cache keys
494 log.debug("Clearing cache keys now...")
450 log.debug("Clearing cache keys now...")
495 CacheInvalidation.clear_cache()
451 CacheInvalidation.clear_cache()
496 sa.commit()
452 sa.commit()
497 return added, removed
453 return added, removed
498
454
499
455
500 # set cache regions for beaker so celery can utilise it
456 # set cache regions for beaker so celery can utilise it
501 def add_cache(settings):
457 def add_cache(settings):
502 cache_settings = {'regions': None}
458 cache_settings = {'regions': None}
503 for key in settings.keys():
459 for key in settings.keys():
504 for prefix in ['beaker.cache.', 'cache.']:
460 for prefix in ['beaker.cache.', 'cache.']:
505 if key.startswith(prefix):
461 if key.startswith(prefix):
506 name = key.split(prefix)[1].strip()
462 name = key.split(prefix)[1].strip()
507 cache_settings[name] = settings[key].strip()
463 cache_settings[name] = settings[key].strip()
508 if cache_settings['regions']:
464 if cache_settings['regions']:
509 for region in cache_settings['regions'].split(','):
465 for region in cache_settings['regions'].split(','):
510 region = region.strip()
466 region = region.strip()
511 region_settings = {}
467 region_settings = {}
512 for key, value in cache_settings.items():
468 for key, value in cache_settings.items():
513 if key.startswith(region):
469 if key.startswith(region):
514 region_settings[key.split('.')[1]] = value
470 region_settings[key.split('.')[1]] = value
515 region_settings['expire'] = int(region_settings.get('expire',
471 region_settings['expire'] = int(region_settings.get('expire',
516 60))
472 60))
517 region_settings.setdefault('lock_dir',
473 region_settings.setdefault('lock_dir',
518 cache_settings.get('lock_dir'))
474 cache_settings.get('lock_dir'))
519 region_settings.setdefault('data_dir',
475 region_settings.setdefault('data_dir',
520 cache_settings.get('data_dir'))
476 cache_settings.get('data_dir'))
521
477
522 if 'type' not in region_settings:
478 if 'type' not in region_settings:
523 region_settings['type'] = cache_settings.get('type',
479 region_settings['type'] = cache_settings.get('type',
524 'memory')
480 'memory')
525 beaker.cache.cache_regions[region] = region_settings
481 beaker.cache.cache_regions[region] = region_settings
526
482
527
483
528 def load_rcextensions(root_path):
484 def load_rcextensions(root_path):
529 import rhodecode
485 import rhodecode
530 from rhodecode.config import conf
486 from rhodecode.config import conf
531
487
532 path = os.path.join(root_path, 'rcextensions', '__init__.py')
488 path = os.path.join(root_path, 'rcextensions', '__init__.py')
533 if os.path.isfile(path):
489 if os.path.isfile(path):
534 rcext = create_module('rc', path)
490 rcext = create_module('rc', path)
535 EXT = rhodecode.EXTENSIONS = rcext
491 EXT = rhodecode.EXTENSIONS = rcext
536 log.debug('Found rcextensions now loading %s...' % rcext)
492 log.debug('Found rcextensions now loading %s...' % rcext)
537
493
538 # Additional mappings that are not present in the pygments lexers
494 # Additional mappings that are not present in the pygments lexers
539 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
495 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
540
496
541 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
497 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
542
498
543 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
499 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
544 log.debug('settings custom INDEX_EXTENSIONS')
500 log.debug('settings custom INDEX_EXTENSIONS')
545 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
501 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
546
502
547 #ADDITIONAL MAPPINGS
503 #ADDITIONAL MAPPINGS
548 log.debug('adding extra into INDEX_EXTENSIONS')
504 log.debug('adding extra into INDEX_EXTENSIONS')
549 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
505 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
550
506
551
507
552 #==============================================================================
508 #==============================================================================
553 # TEST FUNCTIONS AND CREATORS
509 # TEST FUNCTIONS AND CREATORS
554 #==============================================================================
510 #==============================================================================
555 def create_test_index(repo_location, config, full_index):
511 def create_test_index(repo_location, config, full_index):
556 """
512 """
557 Makes default test index
513 Makes default test index
558
514
559 :param config: test config
515 :param config: test config
560 :param full_index:
516 :param full_index:
561 """
517 """
562
518
563 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
519 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
564 from rhodecode.lib.pidlock import DaemonLock, LockHeld
520 from rhodecode.lib.pidlock import DaemonLock, LockHeld
565
521
566 repo_location = repo_location
522 repo_location = repo_location
567
523
568 index_location = os.path.join(config['app_conf']['index_dir'])
524 index_location = os.path.join(config['app_conf']['index_dir'])
569 if not os.path.exists(index_location):
525 if not os.path.exists(index_location):
570 os.makedirs(index_location)
526 os.makedirs(index_location)
571
527
572 try:
528 try:
573 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
529 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
574 WhooshIndexingDaemon(index_location=index_location,
530 WhooshIndexingDaemon(index_location=index_location,
575 repo_location=repo_location)\
531 repo_location=repo_location)\
576 .run(full_index=full_index)
532 .run(full_index=full_index)
577 l.release()
533 l.release()
578 except LockHeld:
534 except LockHeld:
579 pass
535 pass
580
536
581
537
582 def create_test_env(repos_test_path, config):
538 def create_test_env(repos_test_path, config):
583 """
539 """
584 Makes a fresh database and
540 Makes a fresh database and
585 install test repository into tmp dir
541 install test repository into tmp dir
586 """
542 """
587 from rhodecode.lib.db_manage import DbManage
543 from rhodecode.lib.db_manage import DbManage
588 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
544 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
589
545
590 # PART ONE create db
546 # PART ONE create db
591 dbconf = config['sqlalchemy.db1.url']
547 dbconf = config['sqlalchemy.db1.url']
592 log.debug('making test db %s' % dbconf)
548 log.debug('making test db %s' % dbconf)
593
549
594 # create test dir if it doesn't exist
550 # create test dir if it doesn't exist
595 if not os.path.isdir(repos_test_path):
551 if not os.path.isdir(repos_test_path):
596 log.debug('Creating testdir %s' % repos_test_path)
552 log.debug('Creating testdir %s' % repos_test_path)
597 os.makedirs(repos_test_path)
553 os.makedirs(repos_test_path)
598
554
599 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
555 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
600 tests=True)
556 tests=True)
601 dbmanage.create_tables(override=True)
557 dbmanage.create_tables(override=True)
602 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
558 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
603 dbmanage.create_default_user()
559 dbmanage.create_default_user()
604 dbmanage.admin_prompt()
560 dbmanage.admin_prompt()
605 dbmanage.create_permissions()
561 dbmanage.create_permissions()
606 dbmanage.populate_default_permissions()
562 dbmanage.populate_default_permissions()
607 Session().commit()
563 Session().commit()
608 # PART TWO make test repo
564 # PART TWO make test repo
609 log.debug('making test vcs repositories')
565 log.debug('making test vcs repositories')
610
566
611 idx_path = config['app_conf']['index_dir']
567 idx_path = config['app_conf']['index_dir']
612 data_path = config['app_conf']['cache_dir']
568 data_path = config['app_conf']['cache_dir']
613
569
614 #clean index and data
570 #clean index and data
615 if idx_path and os.path.exists(idx_path):
571 if idx_path and os.path.exists(idx_path):
616 log.debug('remove %s' % idx_path)
572 log.debug('remove %s' % idx_path)
617 shutil.rmtree(idx_path)
573 shutil.rmtree(idx_path)
618
574
619 if data_path and os.path.exists(data_path):
575 if data_path and os.path.exists(data_path):
620 log.debug('remove %s' % data_path)
576 log.debug('remove %s' % data_path)
621 shutil.rmtree(data_path)
577 shutil.rmtree(data_path)
622
578
623 #CREATE DEFAULT TEST REPOS
579 #CREATE DEFAULT TEST REPOS
624 cur_dir = dn(dn(abspath(__file__)))
580 cur_dir = dn(dn(abspath(__file__)))
625 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
581 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
626 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
582 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
627 tar.close()
583 tar.close()
628
584
629 cur_dir = dn(dn(abspath(__file__)))
585 cur_dir = dn(dn(abspath(__file__)))
630 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
586 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
631 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
587 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
632 tar.close()
588 tar.close()
633
589
634 #LOAD VCS test stuff
590 #LOAD VCS test stuff
635 from rhodecode.tests.vcs import setup_package
591 from rhodecode.tests.vcs import setup_package
636 setup_package()
592 setup_package()
637
593
638
594
639 #==============================================================================
595 #==============================================================================
640 # PASTER COMMANDS
596 # PASTER COMMANDS
641 #==============================================================================
597 #==============================================================================
642 class BasePasterCommand(Command):
598 class BasePasterCommand(Command):
643 """
599 """
644 Abstract Base Class for paster commands.
600 Abstract Base Class for paster commands.
645
601
646 The celery commands are somewhat aggressive about loading
602 The celery commands are somewhat aggressive about loading
647 celery.conf, and since our module sets the `CELERY_LOADER`
603 celery.conf, and since our module sets the `CELERY_LOADER`
648 environment variable to our loader, we have to bootstrap a bit and
604 environment variable to our loader, we have to bootstrap a bit and
649 make sure we've had a chance to load the pylons config off of the
605 make sure we've had a chance to load the pylons config off of the
650 command line, otherwise everything fails.
606 command line, otherwise everything fails.
651 """
607 """
652 min_args = 1
608 min_args = 1
653 min_args_error = "Please provide a paster config file as an argument."
609 min_args_error = "Please provide a paster config file as an argument."
654 takes_config_file = 1
610 takes_config_file = 1
655 requires_config_file = True
611 requires_config_file = True
656
612
657 def notify_msg(self, msg, log=False):
613 def notify_msg(self, msg, log=False):
658 """Make a notification to user, additionally if logger is passed
614 """Make a notification to user, additionally if logger is passed
659 it logs this action using given logger
615 it logs this action using given logger
660
616
661 :param msg: message that will be printed to user
617 :param msg: message that will be printed to user
662 :param log: logging instance, to use to additionally log this message
618 :param log: logging instance, to use to additionally log this message
663
619
664 """
620 """
665 if log and isinstance(log, logging):
621 if log and isinstance(log, logging):
666 log(msg)
622 log(msg)
667
623
668 def run(self, args):
624 def run(self, args):
669 """
625 """
670 Overrides Command.run
626 Overrides Command.run
671
627
672 Checks for a config file argument and loads it.
628 Checks for a config file argument and loads it.
673 """
629 """
674 if len(args) < self.min_args:
630 if len(args) < self.min_args:
675 raise BadCommand(
631 raise BadCommand(
676 self.min_args_error % {'min_args': self.min_args,
632 self.min_args_error % {'min_args': self.min_args,
677 'actual_args': len(args)})
633 'actual_args': len(args)})
678
634
679 # Decrement because we're going to lob off the first argument.
635 # Decrement because we're going to lob off the first argument.
680 # @@ This is hacky
636 # @@ This is hacky
681 self.min_args -= 1
637 self.min_args -= 1
682 self.bootstrap_config(args[0])
638 self.bootstrap_config(args[0])
683 self.update_parser()
639 self.update_parser()
684 return super(BasePasterCommand, self).run(args[1:])
640 return super(BasePasterCommand, self).run(args[1:])
685
641
686 def update_parser(self):
642 def update_parser(self):
687 """
643 """
688 Abstract method. Allows for the class's parser to be updated
644 Abstract method. Allows for the class's parser to be updated
689 before the superclass's `run` method is called. Necessary to
645 before the superclass's `run` method is called. Necessary to
690 allow options/arguments to be passed through to the underlying
646 allow options/arguments to be passed through to the underlying
691 celery command.
647 celery command.
692 """
648 """
693 raise NotImplementedError("Abstract Method.")
649 raise NotImplementedError("Abstract Method.")
694
650
695 def bootstrap_config(self, conf):
651 def bootstrap_config(self, conf):
696 """
652 """
697 Loads the pylons configuration.
653 Loads the pylons configuration.
698 """
654 """
699 from pylons import config as pylonsconfig
655 from pylons import config as pylonsconfig
700
656
701 self.path_to_ini_file = os.path.realpath(conf)
657 self.path_to_ini_file = os.path.realpath(conf)
702 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
658 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
703 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
659 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
@@ -1,451 +1,451 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Some simple helper functions
6 Some simple helper functions
7
7
8 :created_on: Jan 5, 2011
8 :created_on: Jan 5, 2011
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import re
26 import re
27 from datetime import datetime
27 from datetime import datetime
28 from pylons.i18n.translation import _, ungettext
28 from pylons.i18n.translation import _, ungettext
29 from rhodecode.lib.vcs.utils.lazy import LazyProperty
29 from rhodecode.lib.vcs.utils.lazy import LazyProperty
30
30
31
31
32 def __get_lem():
32 def __get_lem():
33 """
33 """
34 Get language extension map based on what's inside pygments lexers
34 Get language extension map based on what's inside pygments lexers
35 """
35 """
36 from pygments import lexers
36 from pygments import lexers
37 from string import lower
37 from string import lower
38 from collections import defaultdict
38 from collections import defaultdict
39
39
40 d = defaultdict(lambda: [])
40 d = defaultdict(lambda: [])
41
41
42 def __clean(s):
42 def __clean(s):
43 s = s.lstrip('*')
43 s = s.lstrip('*')
44 s = s.lstrip('.')
44 s = s.lstrip('.')
45
45
46 if s.find('[') != -1:
46 if s.find('[') != -1:
47 exts = []
47 exts = []
48 start, stop = s.find('['), s.find(']')
48 start, stop = s.find('['), s.find(']')
49
49
50 for suffix in s[start + 1:stop]:
50 for suffix in s[start + 1:stop]:
51 exts.append(s[:s.find('[')] + suffix)
51 exts.append(s[:s.find('[')] + suffix)
52 return map(lower, exts)
52 return map(lower, exts)
53 else:
53 else:
54 return map(lower, [s])
54 return map(lower, [s])
55
55
56 for lx, t in sorted(lexers.LEXERS.items()):
56 for lx, t in sorted(lexers.LEXERS.items()):
57 m = map(__clean, t[-2])
57 m = map(__clean, t[-2])
58 if m:
58 if m:
59 m = reduce(lambda x, y: x + y, m)
59 m = reduce(lambda x, y: x + y, m)
60 for ext in m:
60 for ext in m:
61 desc = lx.replace('Lexer', '')
61 desc = lx.replace('Lexer', '')
62 d[ext].append(desc)
62 d[ext].append(desc)
63
63
64 return dict(d)
64 return dict(d)
65
65
66 def str2bool(_str):
66 def str2bool(_str):
67 """
67 """
68 returs True/False value from given string, it tries to translate the
68 returs True/False value from given string, it tries to translate the
69 string into boolean
69 string into boolean
70
70
71 :param _str: string value to translate into boolean
71 :param _str: string value to translate into boolean
72 :rtype: boolean
72 :rtype: boolean
73 :returns: boolean from given string
73 :returns: boolean from given string
74 """
74 """
75 if _str is None:
75 if _str is None:
76 return False
76 return False
77 if _str in (True, False):
77 if _str in (True, False):
78 return _str
78 return _str
79 _str = str(_str).strip().lower()
79 _str = str(_str).strip().lower()
80 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
80 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
81
81
82
82
83 def convert_line_endings(line, mode):
83 def convert_line_endings(line, mode):
84 """
84 """
85 Converts a given line "line end" accordingly to given mode
85 Converts a given line "line end" accordingly to given mode
86
86
87 Available modes are::
87 Available modes are::
88 0 - Unix
88 0 - Unix
89 1 - Mac
89 1 - Mac
90 2 - DOS
90 2 - DOS
91
91
92 :param line: given line to convert
92 :param line: given line to convert
93 :param mode: mode to convert to
93 :param mode: mode to convert to
94 :rtype: str
94 :rtype: str
95 :return: converted line according to mode
95 :return: converted line according to mode
96 """
96 """
97 from string import replace
97 from string import replace
98
98
99 if mode == 0:
99 if mode == 0:
100 line = replace(line, '\r\n', '\n')
100 line = replace(line, '\r\n', '\n')
101 line = replace(line, '\r', '\n')
101 line = replace(line, '\r', '\n')
102 elif mode == 1:
102 elif mode == 1:
103 line = replace(line, '\r\n', '\r')
103 line = replace(line, '\r\n', '\r')
104 line = replace(line, '\n', '\r')
104 line = replace(line, '\n', '\r')
105 elif mode == 2:
105 elif mode == 2:
106 line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
106 line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
107 return line
107 return line
108
108
109
109
110 def detect_mode(line, default):
110 def detect_mode(line, default):
111 """
111 """
112 Detects line break for given line, if line break couldn't be found
112 Detects line break for given line, if line break couldn't be found
113 given default value is returned
113 given default value is returned
114
114
115 :param line: str line
115 :param line: str line
116 :param default: default
116 :param default: default
117 :rtype: int
117 :rtype: int
118 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
118 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
119 """
119 """
120 if line.endswith('\r\n'):
120 if line.endswith('\r\n'):
121 return 2
121 return 2
122 elif line.endswith('\n'):
122 elif line.endswith('\n'):
123 return 0
123 return 0
124 elif line.endswith('\r'):
124 elif line.endswith('\r'):
125 return 1
125 return 1
126 else:
126 else:
127 return default
127 return default
128
128
129
129
130 def generate_api_key(username, salt=None):
130 def generate_api_key(username, salt=None):
131 """
131 """
132 Generates unique API key for given username, if salt is not given
132 Generates unique API key for given username, if salt is not given
133 it'll be generated from some random string
133 it'll be generated from some random string
134
134
135 :param username: username as string
135 :param username: username as string
136 :param salt: salt to hash generate KEY
136 :param salt: salt to hash generate KEY
137 :rtype: str
137 :rtype: str
138 :returns: sha1 hash from username+salt
138 :returns: sha1 hash from username+salt
139 """
139 """
140 from tempfile import _RandomNameSequence
140 from tempfile import _RandomNameSequence
141 import hashlib
141 import hashlib
142
142
143 if salt is None:
143 if salt is None:
144 salt = _RandomNameSequence().next()
144 salt = _RandomNameSequence().next()
145
145
146 return hashlib.sha1(username + salt).hexdigest()
146 return hashlib.sha1(username + salt).hexdigest()
147
147
148
148
149 def safe_unicode(str_, from_encoding=None):
149 def safe_unicode(str_, from_encoding=None):
150 """
150 """
151 safe unicode function. Does few trick to turn str_ into unicode
151 safe unicode function. Does few trick to turn str_ into unicode
152
152
153 In case of UnicodeDecode error we try to return it with encoding detected
153 In case of UnicodeDecode error we try to return it with encoding detected
154 by chardet library if it fails fallback to unicode with errors replaced
154 by chardet library if it fails fallback to unicode with errors replaced
155
155
156 :param str_: string to decode
156 :param str_: string to decode
157 :rtype: unicode
157 :rtype: unicode
158 :returns: unicode object
158 :returns: unicode object
159 """
159 """
160 if isinstance(str_, unicode):
160 if isinstance(str_, unicode):
161 return str_
161 return str_
162
162
163 if not from_encoding:
163 if not from_encoding:
164 import rhodecode
164 import rhodecode
165 DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8')
165 DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8')
166 from_encoding = DEFAULT_ENCODING
166 from_encoding = DEFAULT_ENCODING
167
167
168 try:
168 try:
169 return unicode(str_)
169 return unicode(str_)
170 except UnicodeDecodeError:
170 except UnicodeDecodeError:
171 pass
171 pass
172
172
173 try:
173 try:
174 return unicode(str_, from_encoding)
174 return unicode(str_, from_encoding)
175 except UnicodeDecodeError:
175 except UnicodeDecodeError:
176 pass
176 pass
177
177
178 try:
178 try:
179 import chardet
179 import chardet
180 encoding = chardet.detect(str_)['encoding']
180 encoding = chardet.detect(str_)['encoding']
181 if encoding is None:
181 if encoding is None:
182 raise Exception()
182 raise Exception()
183 return str_.decode(encoding)
183 return str_.decode(encoding)
184 except (ImportError, UnicodeDecodeError, Exception):
184 except (ImportError, UnicodeDecodeError, Exception):
185 return unicode(str_, from_encoding, 'replace')
185 return unicode(str_, from_encoding, 'replace')
186
186
187
187
188 def safe_str(unicode_, to_encoding=None):
188 def safe_str(unicode_, to_encoding=None):
189 """
189 """
190 safe str function. Does few trick to turn unicode_ into string
190 safe str function. Does few trick to turn unicode_ into string
191
191
192 In case of UnicodeEncodeError we try to return it with encoding detected
192 In case of UnicodeEncodeError we try to return it with encoding detected
193 by chardet library if it fails fallback to string with errors replaced
193 by chardet library if it fails fallback to string with errors replaced
194
194
195 :param unicode_: unicode to encode
195 :param unicode_: unicode to encode
196 :rtype: str
196 :rtype: str
197 :returns: str object
197 :returns: str object
198 """
198 """
199
199
200 # if it's not basestr cast to str
200 # if it's not basestr cast to str
201 if not isinstance(unicode_, basestring):
201 if not isinstance(unicode_, basestring):
202 return str(unicode_)
202 return str(unicode_)
203
203
204 if isinstance(unicode_, str):
204 if isinstance(unicode_, str):
205 return unicode_
205 return unicode_
206
206
207 if not to_encoding:
207 if not to_encoding:
208 import rhodecode
208 import rhodecode
209 DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8')
209 DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8')
210 to_encoding = DEFAULT_ENCODING
210 to_encoding = DEFAULT_ENCODING
211
211
212 try:
212 try:
213 return unicode_.encode(to_encoding)
213 return unicode_.encode(to_encoding)
214 except UnicodeEncodeError:
214 except UnicodeEncodeError:
215 pass
215 pass
216
216
217 try:
217 try:
218 import chardet
218 import chardet
219 encoding = chardet.detect(unicode_)['encoding']
219 encoding = chardet.detect(unicode_)['encoding']
220 if encoding is None:
220 if encoding is None:
221 raise UnicodeEncodeError()
221 raise UnicodeEncodeError()
222
222
223 return unicode_.encode(encoding)
223 return unicode_.encode(encoding)
224 except (ImportError, UnicodeEncodeError):
224 except (ImportError, UnicodeEncodeError):
225 return unicode_.encode(to_encoding, 'replace')
225 return unicode_.encode(to_encoding, 'replace')
226
226
227 return safe_str
227 return safe_str
228
228
229
229
230 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
230 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
231 """
231 """
232 Custom engine_from_config functions that makes sure we use NullPool for
232 Custom engine_from_config functions that makes sure we use NullPool for
233 file based sqlite databases. This prevents errors on sqlite. This only
233 file based sqlite databases. This prevents errors on sqlite. This only
234 applies to sqlalchemy versions < 0.7.0
234 applies to sqlalchemy versions < 0.7.0
235
235
236 """
236 """
237 import sqlalchemy
237 import sqlalchemy
238 from sqlalchemy import engine_from_config as efc
238 from sqlalchemy import engine_from_config as efc
239 import logging
239 import logging
240
240
241 if int(sqlalchemy.__version__.split('.')[1]) < 7:
241 if int(sqlalchemy.__version__.split('.')[1]) < 7:
242
242
243 # This solution should work for sqlalchemy < 0.7.0, and should use
243 # This solution should work for sqlalchemy < 0.7.0, and should use
244 # proxy=TimerProxy() for execution time profiling
244 # proxy=TimerProxy() for execution time profiling
245
245
246 from sqlalchemy.pool import NullPool
246 from sqlalchemy.pool import NullPool
247 url = configuration[prefix + 'url']
247 url = configuration[prefix + 'url']
248
248
249 if url.startswith('sqlite'):
249 if url.startswith('sqlite'):
250 kwargs.update({'poolclass': NullPool})
250 kwargs.update({'poolclass': NullPool})
251 return efc(configuration, prefix, **kwargs)
251 return efc(configuration, prefix, **kwargs)
252 else:
252 else:
253 import time
253 import time
254 from sqlalchemy import event
254 from sqlalchemy import event
255 from sqlalchemy.engine import Engine
255 from sqlalchemy.engine import Engine
256
256
257 log = logging.getLogger('sqlalchemy.engine')
257 log = logging.getLogger('sqlalchemy.engine')
258 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
258 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
259 engine = efc(configuration, prefix, **kwargs)
259 engine = efc(configuration, prefix, **kwargs)
260
260
261 def color_sql(sql):
261 def color_sql(sql):
262 COLOR_SEQ = "\033[1;%dm"
262 COLOR_SEQ = "\033[1;%dm"
263 COLOR_SQL = YELLOW
263 COLOR_SQL = YELLOW
264 normal = '\x1b[0m'
264 normal = '\x1b[0m'
265 return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal])
265 return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal])
266
266
267 if configuration['debug']:
267 if configuration['debug']:
268 #attach events only for debug configuration
268 #attach events only for debug configuration
269
269
270 def before_cursor_execute(conn, cursor, statement,
270 def before_cursor_execute(conn, cursor, statement,
271 parameters, context, executemany):
271 parameters, context, executemany):
272 context._query_start_time = time.time()
272 context._query_start_time = time.time()
273 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
273 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
274
274
275
275
276 def after_cursor_execute(conn, cursor, statement,
276 def after_cursor_execute(conn, cursor, statement,
277 parameters, context, executemany):
277 parameters, context, executemany):
278 total = time.time() - context._query_start_time
278 total = time.time() - context._query_start_time
279 log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total))
279 log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total))
280
280
281 event.listen(engine, "before_cursor_execute",
281 event.listen(engine, "before_cursor_execute",
282 before_cursor_execute)
282 before_cursor_execute)
283 event.listen(engine, "after_cursor_execute",
283 event.listen(engine, "after_cursor_execute",
284 after_cursor_execute)
284 after_cursor_execute)
285
285
286 return engine
286 return engine
287
287
288
288
289 def age(prevdate):
289 def age(prevdate):
290 """
290 """
291 turns a datetime into an age string.
291 turns a datetime into an age string.
292
292
293 :param prevdate: datetime object
293 :param prevdate: datetime object
294 :rtype: unicode
294 :rtype: unicode
295 :returns: unicode words describing age
295 :returns: unicode words describing age
296 """
296 """
297
297
298 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
298 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
299 deltas = {}
299 deltas = {}
300
300
301 # Get date parts deltas
301 # Get date parts deltas
302 now = datetime.now()
302 now = datetime.now()
303 for part in order:
303 for part in order:
304 deltas[part] = getattr(now, part) - getattr(prevdate, part)
304 deltas[part] = getattr(now, part) - getattr(prevdate, part)
305
305
306 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
306 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
307 # not 1 hour, -59 minutes and -59 seconds)
307 # not 1 hour, -59 minutes and -59 seconds)
308
308
309 for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours
309 for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours
310 part = order[num]
310 part = order[num]
311 carry_part = order[num - 1]
311 carry_part = order[num - 1]
312
312
313 if deltas[part] < 0:
313 if deltas[part] < 0:
314 deltas[part] += length
314 deltas[part] += length
315 deltas[carry_part] -= 1
315 deltas[carry_part] -= 1
316
316
317 # Same thing for days except that the increment depends on the (variable)
317 # Same thing for days except that the increment depends on the (variable)
318 # number of days in the month
318 # number of days in the month
319 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
319 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
320 if deltas['day'] < 0:
320 if deltas['day'] < 0:
321 if prevdate.month == 2 and (prevdate.year % 4 == 0 and
321 if prevdate.month == 2 and (prevdate.year % 4 == 0 and
322 (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)):
322 (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)):
323 deltas['day'] += 29
323 deltas['day'] += 29
324 else:
324 else:
325 deltas['day'] += month_lengths[prevdate.month - 1]
325 deltas['day'] += month_lengths[prevdate.month - 1]
326
326
327 deltas['month'] -= 1
327 deltas['month'] -= 1
328
328
329 if deltas['month'] < 0:
329 if deltas['month'] < 0:
330 deltas['month'] += 12
330 deltas['month'] += 12
331 deltas['year'] -= 1
331 deltas['year'] -= 1
332
332
333 # Format the result
333 # Format the result
334 fmt_funcs = {
334 fmt_funcs = {
335 'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
335 'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
336 'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
336 'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
337 'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
337 'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
338 'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
338 'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
339 'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
339 'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
340 'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
340 'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
341 }
341 }
342
342
343 for i, part in enumerate(order):
343 for i, part in enumerate(order):
344 value = deltas[part]
344 value = deltas[part]
345 if value == 0:
345 if value == 0:
346 continue
346 continue
347
347
348 if i < 5:
348 if i < 5:
349 sub_part = order[i + 1]
349 sub_part = order[i + 1]
350 sub_value = deltas[sub_part]
350 sub_value = deltas[sub_part]
351 else:
351 else:
352 sub_value = 0
352 sub_value = 0
353
353
354 if sub_value == 0:
354 if sub_value == 0:
355 return _(u'%s ago') % fmt_funcs[part](value)
355 return _(u'%s ago') % fmt_funcs[part](value)
356
356
357 return _(u'%s and %s ago') % (fmt_funcs[part](value),
357 return _(u'%s and %s ago') % (fmt_funcs[part](value),
358 fmt_funcs[sub_part](sub_value))
358 fmt_funcs[sub_part](sub_value))
359
359
360 return _(u'just now')
360 return _(u'just now')
361
361
362
362
363 def uri_filter(uri):
363 def uri_filter(uri):
364 """
364 """
365 Removes user:password from given url string
365 Removes user:password from given url string
366
366
367 :param uri:
367 :param uri:
368 :rtype: unicode
368 :rtype: unicode
369 :returns: filtered list of strings
369 :returns: filtered list of strings
370 """
370 """
371 if not uri:
371 if not uri:
372 return ''
372 return ''
373
373
374 proto = ''
374 proto = ''
375
375
376 for pat in ('https://', 'http://'):
376 for pat in ('https://', 'http://'):
377 if uri.startswith(pat):
377 if uri.startswith(pat):
378 uri = uri[len(pat):]
378 uri = uri[len(pat):]
379 proto = pat
379 proto = pat
380 break
380 break
381
381
382 # remove passwords and username
382 # remove passwords and username
383 uri = uri[uri.find('@') + 1:]
383 uri = uri[uri.find('@') + 1:]
384
384
385 # get the port
385 # get the port
386 cred_pos = uri.find(':')
386 cred_pos = uri.find(':')
387 if cred_pos == -1:
387 if cred_pos == -1:
388 host, port = uri, None
388 host, port = uri, None
389 else:
389 else:
390 host, port = uri[:cred_pos], uri[cred_pos + 1:]
390 host, port = uri[:cred_pos], uri[cred_pos + 1:]
391
391
392 return filter(None, [proto, host, port])
392 return filter(None, [proto, host, port])
393
393
394
394
395 def credentials_filter(uri):
395 def credentials_filter(uri):
396 """
396 """
397 Returns a url with removed credentials
397 Returns a url with removed credentials
398
398
399 :param uri:
399 :param uri:
400 """
400 """
401
401
402 uri = uri_filter(uri)
402 uri = uri_filter(uri)
403 #check if we have port
403 #check if we have port
404 if len(uri) > 2 and uri[2]:
404 if len(uri) > 2 and uri[2]:
405 uri[2] = ':' + uri[2]
405 uri[2] = ':' + uri[2]
406
406
407 return ''.join(uri)
407 return ''.join(uri)
408
408
409
409
410 def get_changeset_safe(repo, rev):
410 def get_changeset_safe(repo, rev):
411 """
411 """
412 Safe version of get_changeset if this changeset doesn't exists for a
412 Safe version of get_changeset if this changeset doesn't exists for a
413 repo it returns a Dummy one instead
413 repo it returns a Dummy one instead
414
414
415 :param repo:
415 :param repo:
416 :param rev:
416 :param rev:
417 """
417 """
418 from rhodecode.lib.vcs.backends.base import BaseRepository
418 from rhodecode.lib.vcs.backends.base import BaseRepository
419 from rhodecode.lib.vcs.exceptions import RepositoryError
419 from rhodecode.lib.vcs.exceptions import RepositoryError
420 from rhodecode.lib.vcs.backends.base import EmptyChangeset
420 if not isinstance(repo, BaseRepository):
421 if not isinstance(repo, BaseRepository):
421 raise Exception('You must pass an Repository '
422 raise Exception('You must pass an Repository '
422 'object as first argument got %s', type(repo))
423 'object as first argument got %s', type(repo))
423
424
424 try:
425 try:
425 cs = repo.get_changeset(rev)
426 cs = repo.get_changeset(rev)
426 except RepositoryError:
427 except RepositoryError:
427 from rhodecode.lib.utils import EmptyChangeset
428 cs = EmptyChangeset(requested_revision=rev)
428 cs = EmptyChangeset(requested_revision=rev)
429 return cs
429 return cs
430
430
431
431
432 MENTIONS_REGEX = r'(?:^@|\s@)([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)(?:\s{1})'
432 MENTIONS_REGEX = r'(?:^@|\s@)([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)(?:\s{1})'
433
433
434
434
435 def extract_mentioned_users(s):
435 def extract_mentioned_users(s):
436 """
436 """
437 Returns unique usernames from given string s that have @mention
437 Returns unique usernames from given string s that have @mention
438
438
439 :param s: string to get mentions
439 :param s: string to get mentions
440 """
440 """
441 usrs = set()
441 usrs = set()
442 for username in re.findall(MENTIONS_REGEX, s):
442 for username in re.findall(MENTIONS_REGEX, s):
443 usrs.add(username)
443 usrs.add(username)
444
444
445 return sorted(list(usrs), key=lambda k: k.lower())
445 return sorted(list(usrs), key=lambda k: k.lower())
446
446
447 class AttributeDict(dict):
447 class AttributeDict(dict):
448 def __getattr__(self, attr):
448 def __getattr__(self, attr):
449 return self.get(attr, None)
449 return self.get(attr, None)
450 __setattr__ = dict.__setitem__
450 __setattr__ = dict.__setitem__
451 __delattr__ = dict.__delitem__
451 __delattr__ = dict.__delitem__
@@ -1,15 +1,17 b''
1 """
1 """
2 Mercurial libs compatibility
2 Mercurial libs compatibility
3 """
3 """
4
4
5 from mercurial import archival, merge as hg_merge, patch, ui
5 from mercurial import archival, merge as hg_merge, patch, ui
6 from mercurial.commands import clone, nullid, pull
6 from mercurial.commands import clone, nullid, pull
7 from mercurial.context import memctx, memfilectx
7 from mercurial.context import memctx, memfilectx
8 from mercurial.error import RepoError, RepoLookupError, Abort
8 from mercurial.error import RepoError, RepoLookupError, Abort
9 from mercurial.hgweb.common import get_contact
9 from mercurial.hgweb.common import get_contact
10 from mercurial.localrepo import localrepository
10 from mercurial.localrepo import localrepository
11 from mercurial.match import match
11 from mercurial.match import match
12 from mercurial.mdiff import diffopts
12 from mercurial.mdiff import diffopts
13 from mercurial.node import hex
13 from mercurial.node import hex
14 from mercurial.encoding import tolocal
14 from mercurial.encoding import tolocal
15 from mercurial import discovery
15 from mercurial import discovery
16 from mercurial import localrepo
17 from mercurial import scmutil No newline at end of file
@@ -1,244 +1,245 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.model.pull_request
3 rhodecode.model.pull_request
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 pull request model for RhodeCode
6 pull request model for RhodeCode
7
7
8 :created_on: Jun 6, 2012
8 :created_on: Jun 6, 2012
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2012-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2012-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import logging
26 import logging
27 import binascii
27 import binascii
28 import datetime
28 import datetime
29
29
30 from pylons.i18n.translation import _
30 from pylons.i18n.translation import _
31
31
32 from rhodecode.model.meta import Session
32 from rhodecode.model.meta import Session
33 from rhodecode.lib import helpers as h
33 from rhodecode.lib import helpers as h
34 from rhodecode.model import BaseModel
34 from rhodecode.model import BaseModel
35 from rhodecode.model.db import PullRequest, PullRequestReviewers, Notification
35 from rhodecode.model.db import PullRequest, PullRequestReviewers, Notification
36 from rhodecode.model.notification import NotificationModel
36 from rhodecode.model.notification import NotificationModel
37 from rhodecode.lib.utils2 import safe_unicode
37 from rhodecode.lib.utils2 import safe_unicode
38
38
39 from rhodecode.lib.vcs.utils.hgcompat import discovery
39 from rhodecode.lib.vcs.utils.hgcompat import discovery, localrepo, scmutil
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 class PullRequestModel(BaseModel):
44 class PullRequestModel(BaseModel):
45
45
46 cls = PullRequest
46 cls = PullRequest
47
47
48 def __get_pull_request(self, pull_request):
48 def __get_pull_request(self, pull_request):
49 return self._get_instance(PullRequest, pull_request)
49 return self._get_instance(PullRequest, pull_request)
50
50
51 def get_all(self, repo):
51 def get_all(self, repo):
52 repo = self._get_repo(repo)
52 repo = self._get_repo(repo)
53 return PullRequest.query().filter(PullRequest.other_repo == repo).all()
53 return PullRequest.query().filter(PullRequest.other_repo == repo).all()
54
54
55 def create(self, created_by, org_repo, org_ref, other_repo,
55 def create(self, created_by, org_repo, org_ref, other_repo,
56 other_ref, revisions, reviewers, title, description=None):
56 other_ref, revisions, reviewers, title, description=None):
57
57
58 created_by_user = self._get_user(created_by)
58 created_by_user = self._get_user(created_by)
59 org_repo = self._get_repo(org_repo)
59 org_repo = self._get_repo(org_repo)
60 other_repo = self._get_repo(other_repo)
60 other_repo = self._get_repo(other_repo)
61
61
62 new = PullRequest()
62 new = PullRequest()
63 new.org_repo = org_repo
63 new.org_repo = org_repo
64 new.org_ref = org_ref
64 new.org_ref = org_ref
65 new.other_repo = other_repo
65 new.other_repo = other_repo
66 new.other_ref = other_ref
66 new.other_ref = other_ref
67 new.revisions = revisions
67 new.revisions = revisions
68 new.title = title
68 new.title = title
69 new.description = description
69 new.description = description
70 new.author = created_by_user
70 new.author = created_by_user
71 self.sa.add(new)
71 self.sa.add(new)
72 Session().flush()
72 Session().flush()
73 #members
73 #members
74 for member in reviewers:
74 for member in reviewers:
75 _usr = self._get_user(member)
75 _usr = self._get_user(member)
76 reviewer = PullRequestReviewers(_usr, new)
76 reviewer = PullRequestReviewers(_usr, new)
77 self.sa.add(reviewer)
77 self.sa.add(reviewer)
78
78
79 #notification to reviewers
79 #notification to reviewers
80 notif = NotificationModel()
80 notif = NotificationModel()
81
81
82 subject = safe_unicode(
82 subject = safe_unicode(
83 h.link_to(
83 h.link_to(
84 _('%(user)s wants you to review pull request #%(pr_id)s') % \
84 _('%(user)s wants you to review pull request #%(pr_id)s') % \
85 {'user': created_by_user.username,
85 {'user': created_by_user.username,
86 'pr_id': new.pull_request_id},
86 'pr_id': new.pull_request_id},
87 h.url('pullrequest_show', repo_name=other_repo.repo_name,
87 h.url('pullrequest_show', repo_name=other_repo.repo_name,
88 pull_request_id=new.pull_request_id,
88 pull_request_id=new.pull_request_id,
89 qualified=True,
89 qualified=True,
90 )
90 )
91 )
91 )
92 )
92 )
93 body = description
93 body = description
94 notif.create(created_by=created_by_user, subject=subject, body=body,
94 notif.create(created_by=created_by_user, subject=subject, body=body,
95 recipients=reviewers,
95 recipients=reviewers,
96 type_=Notification.TYPE_PULL_REQUEST,)
96 type_=Notification.TYPE_PULL_REQUEST,)
97
97
98 return new
98 return new
99
99
100 def update_reviewers(self, pull_request, reviewers_ids):
100 def update_reviewers(self, pull_request, reviewers_ids):
101 reviewers_ids = set(reviewers_ids)
101 reviewers_ids = set(reviewers_ids)
102 pull_request = self.__get_pull_request(pull_request)
102 pull_request = self.__get_pull_request(pull_request)
103 current_reviewers = PullRequestReviewers.query()\
103 current_reviewers = PullRequestReviewers.query()\
104 .filter(PullRequestReviewers.pull_request==
104 .filter(PullRequestReviewers.pull_request==
105 pull_request)\
105 pull_request)\
106 .all()
106 .all()
107 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
107 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
108
108
109 to_add = reviewers_ids.difference(current_reviewers_ids)
109 to_add = reviewers_ids.difference(current_reviewers_ids)
110 to_remove = current_reviewers_ids.difference(reviewers_ids)
110 to_remove = current_reviewers_ids.difference(reviewers_ids)
111
111
112 log.debug("Adding %s reviewers" % to_add)
112 log.debug("Adding %s reviewers" % to_add)
113 log.debug("Removing %s reviewers" % to_remove)
113 log.debug("Removing %s reviewers" % to_remove)
114
114
115 for uid in to_add:
115 for uid in to_add:
116 _usr = self._get_user(uid)
116 _usr = self._get_user(uid)
117 reviewer = PullRequestReviewers(_usr, pull_request)
117 reviewer = PullRequestReviewers(_usr, pull_request)
118 self.sa.add(reviewer)
118 self.sa.add(reviewer)
119
119
120 for uid in to_remove:
120 for uid in to_remove:
121 reviewer = PullRequestReviewers.query()\
121 reviewer = PullRequestReviewers.query()\
122 .filter(PullRequestReviewers.user_id==uid,
122 .filter(PullRequestReviewers.user_id==uid,
123 PullRequestReviewers.pull_request==pull_request)\
123 PullRequestReviewers.pull_request==pull_request)\
124 .scalar()
124 .scalar()
125 if reviewer:
125 if reviewer:
126 self.sa.delete(reviewer)
126 self.sa.delete(reviewer)
127
127
128 def close_pull_request(self, pull_request):
128 def close_pull_request(self, pull_request):
129 pull_request = self.__get_pull_request(pull_request)
129 pull_request = self.__get_pull_request(pull_request)
130 pull_request.status = PullRequest.STATUS_CLOSED
130 pull_request.status = PullRequest.STATUS_CLOSED
131 pull_request.updated_on = datetime.datetime.now()
131 pull_request.updated_on = datetime.datetime.now()
132 self.sa.add(pull_request)
132 self.sa.add(pull_request)
133
133
134 def _get_changesets(self, org_repo, org_ref, other_repo, other_ref,
134 def _get_changesets(self, org_repo, org_ref, other_repo, other_ref,
135 discovery_data):
135 discovery_data):
136 """
136 """
137 Returns a list of changesets that are incoming from org_repo@org_ref
137 Returns a list of changesets that are incoming from org_repo@org_ref
138 to other_repo@other_ref
138 to other_repo@other_ref
139
139
140 :param org_repo:
140 :param org_repo:
141 :type org_repo:
141 :type org_repo:
142 :param org_ref:
142 :param org_ref:
143 :type org_ref:
143 :type org_ref:
144 :param other_repo:
144 :param other_repo:
145 :type other_repo:
145 :type other_repo:
146 :param other_ref:
146 :param other_ref:
147 :type other_ref:
147 :type other_ref:
148 :param tmp:
148 :param tmp:
149 :type tmp:
149 :type tmp:
150 """
150 """
151 changesets = []
151 changesets = []
152 #case two independent repos
152 #case two independent repos
153 if org_repo != other_repo:
154 common, incoming, rheads = discovery_data
153 common, incoming, rheads = discovery_data
155
154 if org_repo != other_repo and incoming:
156 if not incoming:
157 revs = []
158 else:
159 revs = org_repo._repo.changelog.findmissing(common, rheads)
155 revs = org_repo._repo.changelog.findmissing(common, rheads)
160
156
161 for cs in reversed(map(binascii.hexlify, revs)):
157 for cs in reversed(map(binascii.hexlify, revs)):
162 changesets.append(org_repo.get_changeset(cs))
158 changesets.append(org_repo.get_changeset(cs))
163 else:
159 else:
164 _revset_predicates = {
160 _revset_predicates = {
165 'branch': 'branch',
161 'branch': 'branch',
166 'book': 'bookmark',
162 'book': 'bookmark',
167 'tag': 'tag',
163 'tag': 'tag',
168 'rev': 'id',
164 'rev': 'id',
169 }
165 }
170
166
171 revs = [
167 revs = [
172 "ancestors(%s('%s')) and not ancestors(%s('%s'))" % (
168 "ancestors(%s('%s')) and not ancestors(%s('%s'))" % (
173 _revset_predicates[org_ref[0]], org_ref[1],
169 _revset_predicates[org_ref[0]], org_ref[1],
174 _revset_predicates[other_ref[0]], other_ref[1]
170 _revset_predicates[other_ref[0]], other_ref[1]
175 )
171 )
176 ]
172 ]
177
173
178 from mercurial import scmutil
179 out = scmutil.revrange(org_repo._repo, revs)
174 out = scmutil.revrange(org_repo._repo, revs)
180 for cs in reversed(out):
175 for cs in reversed(out):
181 changesets.append(org_repo.get_changeset(cs))
176 changesets.append(org_repo.get_changeset(cs))
182
177
183 return changesets
178 return changesets
184
179
185 def _get_discovery(self, org_repo, org_ref, other_repo, other_ref):
180 def _get_discovery(self, org_repo, org_ref, other_repo, other_ref):
186 """
181 """
187 Get's mercurial discovery data used to calculate difference between
182 Get's mercurial discovery data used to calculate difference between
188 repos and refs
183 repos and refs
189
184
190 :param org_repo:
185 :param org_repo:
191 :type org_repo:
186 :type org_repo:
192 :param org_ref:
187 :param org_ref:
193 :type org_ref:
188 :type org_ref:
194 :param other_repo:
189 :param other_repo:
195 :type other_repo:
190 :type other_repo:
196 :param other_ref:
191 :param other_ref:
197 :type other_ref:
192 :type other_ref:
198 """
193 """
199
194
200 other = org_repo._repo
195 _org_repo = org_repo._repo
201 repo = other_repo._repo
196 org_rev_type, org_rev = org_ref
202 tip = other[org_ref[1]]
197
198 _other_repo = other_repo._repo
199 other_rev_type, other_rev = other_ref
200
203 log.debug('Doing discovery for %s@%s vs %s@%s' % (
201 log.debug('Doing discovery for %s@%s vs %s@%s' % (
204 org_repo, org_ref, other_repo, other_ref)
202 org_repo, org_ref, other_repo, other_ref)
205 )
203 )
206 log.debug('Filter heads are %s[%s]' % (tip, org_ref[1]))
204 #log.debug('Filter heads are %s[%s]' % ('', org_ref[1]))
205 org_peer = localrepo.locallegacypeer(_org_repo.local())
207 tmp = discovery.findcommonincoming(
206 tmp = discovery.findcommonincoming(
208 repo=repo, # other_repo we check for incoming
207 repo=_other_repo, # other_repo we check for incoming
209 remote=other, # org_repo source for incoming
208 remote=org_peer, # org_repo source for incoming
210 heads=[tip.node()],
209 heads=[_other_repo[other_rev].node(),
210 _org_repo[org_rev].node()],
211 force=False
211 force=False
212 )
212 )
213 return tmp
213 return tmp
214
214
215 def get_compare_data(self, org_repo, org_ref, other_repo, other_ref):
215 def get_compare_data(self, org_repo, org_ref, other_repo, other_ref):
216 """
216 """
217 Returns a tuple of incomming changesets, and discoverydata cache
217 Returns a tuple of incomming changesets, and discoverydata cache
218
218
219 :param org_repo:
219 :param org_repo:
220 :type org_repo:
220 :type org_repo:
221 :param org_ref:
221 :param org_ref:
222 :type org_ref:
222 :type org_ref:
223 :param other_repo:
223 :param other_repo:
224 :type other_repo:
224 :type other_repo:
225 :param other_ref:
225 :param other_ref:
226 :type other_ref:
226 :type other_ref:
227 """
227 """
228
228
229 if len(org_ref) != 2 or not isinstance(org_ref, (list, tuple)):
229 if len(org_ref) != 2 or not isinstance(org_ref, (list, tuple)):
230 raise Exception('org_ref must be a two element list/tuple')
230 raise Exception('org_ref must be a two element list/tuple')
231
231
232 if len(other_ref) != 2 or not isinstance(org_ref, (list, tuple)):
232 if len(other_ref) != 2 or not isinstance(org_ref, (list, tuple)):
233 raise Exception('other_ref must be a two element list/tuple')
233 raise Exception('other_ref must be a two element list/tuple')
234
234
235 discovery_data = self._get_discovery(org_repo.scm_instance,
235 discovery_data = self._get_discovery(org_repo.scm_instance,
236 org_ref,
236 org_ref,
237 other_repo.scm_instance,
237 other_repo.scm_instance,
238 other_ref)
238 other_ref)
239 cs_ranges = self._get_changesets(org_repo.scm_instance,
239 cs_ranges = self._get_changesets(org_repo.scm_instance,
240 org_ref,
240 org_ref,
241 other_repo.scm_instance,
241 other_repo.scm_instance,
242 other_ref,
242 other_ref,
243 discovery_data)
243 discovery_data)
244
244 return cs_ranges, discovery_data
245 return cs_ranges, discovery_data
@@ -1,595 +1,604 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.model.scm
3 rhodecode.model.scm
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Scm model for RhodeCode
6 Scm model for RhodeCode
7
7
8 :created_on: Apr 9, 2010
8 :created_on: Apr 9, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 from __future__ import with_statement
25 from __future__ import with_statement
26 import os
26 import os
27 import re
27 import re
28 import time
28 import time
29 import traceback
29 import traceback
30 import logging
30 import logging
31 import cStringIO
31 import cStringIO
32 import pkg_resources
32 import pkg_resources
33 from os.path import dirname as dn, join as jn
33 from os.path import dirname as dn, join as jn
34
34
35 from sqlalchemy import func
35 from sqlalchemy import func
36 from pylons.i18n.translation import _
36 from pylons.i18n.translation import _
37
37
38 import rhodecode
38 import rhodecode
39 from rhodecode.lib.vcs import get_backend
39 from rhodecode.lib.vcs import get_backend
40 from rhodecode.lib.vcs.exceptions import RepositoryError
40 from rhodecode.lib.vcs.exceptions import RepositoryError
41 from rhodecode.lib.vcs.utils.lazy import LazyProperty
41 from rhodecode.lib.vcs.utils.lazy import LazyProperty
42 from rhodecode.lib.vcs.nodes import FileNode
42 from rhodecode.lib.vcs.nodes import FileNode
43 from rhodecode.lib.vcs.backends.base import EmptyChangeset
43
44
44 from rhodecode import BACKENDS
45 from rhodecode import BACKENDS
45 from rhodecode.lib import helpers as h
46 from rhodecode.lib import helpers as h
46 from rhodecode.lib.utils2 import safe_str, safe_unicode
47 from rhodecode.lib.utils2 import safe_str, safe_unicode
47 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
48 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
48 from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \
49 from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \
49 action_logger, EmptyChangeset, REMOVED_REPO_PAT
50 action_logger, REMOVED_REPO_PAT
50 from rhodecode.model import BaseModel
51 from rhodecode.model import BaseModel
51 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
52 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
52 UserFollowing, UserLog, User, RepoGroup, PullRequest
53 UserFollowing, UserLog, User, RepoGroup, PullRequest
53
54
54 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
55
56
56
57
57 class UserTemp(object):
58 class UserTemp(object):
58 def __init__(self, user_id):
59 def __init__(self, user_id):
59 self.user_id = user_id
60 self.user_id = user_id
60
61
61 def __repr__(self):
62 def __repr__(self):
62 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
63
64
64
65
65 class RepoTemp(object):
66 class RepoTemp(object):
66 def __init__(self, repo_id):
67 def __init__(self, repo_id):
67 self.repo_id = repo_id
68 self.repo_id = repo_id
68
69
69 def __repr__(self):
70 def __repr__(self):
70 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
71
72
72
73
73 class CachedRepoList(object):
74 class CachedRepoList(object):
74 """
75 """
75 Cached repo list, uses in-memory cache after initialization, that is
76 Cached repo list, uses in-memory cache after initialization, that is
76 super fast
77 super fast
77 """
78 """
78
79
79 def __init__(self, db_repo_list, repos_path, order_by=None):
80 def __init__(self, db_repo_list, repos_path, order_by=None):
80 self.db_repo_list = db_repo_list
81 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
82 self.repos_path = repos_path
82 self.order_by = order_by
83 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
84 self.reversed = (order_by or '').startswith('-')
84
85
85 def __len__(self):
86 def __len__(self):
86 return len(self.db_repo_list)
87 return len(self.db_repo_list)
87
88
88 def __repr__(self):
89 def __repr__(self):
89 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
90 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
90
91
91 def __iter__(self):
92 def __iter__(self):
92 # pre-propagated cache_map to save executing select statements
93 # pre-propagated cache_map to save executing select statements
93 # for each repo
94 # for each repo
94 cache_map = CacheInvalidation.get_cache_map()
95 cache_map = CacheInvalidation.get_cache_map()
95
96
96 for dbr in self.db_repo_list:
97 for dbr in self.db_repo_list:
97 scmr = dbr.scm_instance_cached(cache_map)
98 scmr = dbr.scm_instance_cached(cache_map)
98 # check permission at this level
99 # check permission at this level
99 if not HasRepoPermissionAny(
100 if not HasRepoPermissionAny(
100 'repository.read', 'repository.write', 'repository.admin'
101 'repository.read', 'repository.write', 'repository.admin'
101 )(dbr.repo_name, 'get repo check'):
102 )(dbr.repo_name, 'get repo check'):
102 continue
103 continue
103
104
104 if scmr is None:
105 if scmr is None:
105 log.error(
106 log.error(
106 '%s this repository is present in database but it '
107 '%s this repository is present in database but it '
107 'cannot be created as an scm instance' % dbr.repo_name
108 'cannot be created as an scm instance' % dbr.repo_name
108 )
109 )
109 continue
110 continue
110
111
111 last_change = scmr.last_change
112 last_change = scmr.last_change
112 tip = h.get_changeset_safe(scmr, 'tip')
113 tip = h.get_changeset_safe(scmr, 'tip')
113
114
114 tmp_d = {}
115 tmp_d = {}
115 tmp_d['name'] = dbr.repo_name
116 tmp_d['name'] = dbr.repo_name
116 tmp_d['name_sort'] = tmp_d['name'].lower()
117 tmp_d['name_sort'] = tmp_d['name'].lower()
117 tmp_d['description'] = dbr.description
118 tmp_d['description'] = dbr.description
118 tmp_d['description_sort'] = tmp_d['description'].lower()
119 tmp_d['description_sort'] = tmp_d['description'].lower()
119 tmp_d['last_change'] = last_change
120 tmp_d['last_change'] = last_change
120 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
121 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
121 tmp_d['tip'] = tip.raw_id
122 tmp_d['tip'] = tip.raw_id
122 tmp_d['tip_sort'] = tip.revision
123 tmp_d['tip_sort'] = tip.revision
123 tmp_d['rev'] = tip.revision
124 tmp_d['rev'] = tip.revision
124 tmp_d['contact'] = dbr.user.full_contact
125 tmp_d['contact'] = dbr.user.full_contact
125 tmp_d['contact_sort'] = tmp_d['contact']
126 tmp_d['contact_sort'] = tmp_d['contact']
126 tmp_d['owner_sort'] = tmp_d['contact']
127 tmp_d['owner_sort'] = tmp_d['contact']
127 tmp_d['repo_archives'] = list(scmr._get_archives())
128 tmp_d['repo_archives'] = list(scmr._get_archives())
128 tmp_d['last_msg'] = tip.message
129 tmp_d['last_msg'] = tip.message
129 tmp_d['author'] = tip.author
130 tmp_d['author'] = tip.author
130 tmp_d['dbrepo'] = dbr.get_dict()
131 tmp_d['dbrepo'] = dbr.get_dict()
131 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
132 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
132 yield tmp_d
133 yield tmp_d
133
134
134
135
135 class SimpleCachedRepoList(CachedRepoList):
136 class SimpleCachedRepoList(CachedRepoList):
136 """
137 """
137 Lighter version of CachedRepoList without the scm initialisation
138 Lighter version of CachedRepoList without the scm initialisation
138 """
139 """
139
140
140 def __iter__(self):
141 def __iter__(self):
141 for dbr in self.db_repo_list:
142 for dbr in self.db_repo_list:
142 # check permission at this level
143 # check permission at this level
143 if not HasRepoPermissionAny(
144 if not HasRepoPermissionAny(
144 'repository.read', 'repository.write', 'repository.admin'
145 'repository.read', 'repository.write', 'repository.admin'
145 )(dbr.repo_name, 'get repo check'):
146 )(dbr.repo_name, 'get repo check'):
146 continue
147 continue
147
148
148 tmp_d = {}
149 tmp_d = {}
149 tmp_d['name'] = dbr.repo_name
150 tmp_d['name'] = dbr.repo_name
150 tmp_d['name_sort'] = tmp_d['name'].lower()
151 tmp_d['name_sort'] = tmp_d['name'].lower()
151 tmp_d['description'] = dbr.description
152 tmp_d['description'] = dbr.description
152 tmp_d['description_sort'] = tmp_d['description'].lower()
153 tmp_d['description_sort'] = tmp_d['description'].lower()
153 tmp_d['dbrepo'] = dbr.get_dict()
154 tmp_d['dbrepo'] = dbr.get_dict()
154 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
155 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
155 yield tmp_d
156 yield tmp_d
156
157
157
158
158 class GroupList(object):
159 class GroupList(object):
159
160
160 def __init__(self, db_repo_group_list):
161 def __init__(self, db_repo_group_list):
161 self.db_repo_group_list = db_repo_group_list
162 self.db_repo_group_list = db_repo_group_list
162
163
163 def __len__(self):
164 def __len__(self):
164 return len(self.db_repo_group_list)
165 return len(self.db_repo_group_list)
165
166
166 def __repr__(self):
167 def __repr__(self):
167 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
168 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
168
169
169 def __iter__(self):
170 def __iter__(self):
170 for dbgr in self.db_repo_group_list:
171 for dbgr in self.db_repo_group_list:
171 # check permission at this level
172 # check permission at this level
172 if not HasReposGroupPermissionAny(
173 if not HasReposGroupPermissionAny(
173 'group.read', 'group.write', 'group.admin'
174 'group.read', 'group.write', 'group.admin'
174 )(dbgr.group_name, 'get group repo check'):
175 )(dbgr.group_name, 'get group repo check'):
175 continue
176 continue
176
177
177 yield dbgr
178 yield dbgr
178
179
179
180
180 class ScmModel(BaseModel):
181 class ScmModel(BaseModel):
181 """
182 """
182 Generic Scm Model
183 Generic Scm Model
183 """
184 """
184
185
185 def __get_repo(self, instance):
186 def __get_repo(self, instance):
186 cls = Repository
187 cls = Repository
187 if isinstance(instance, cls):
188 if isinstance(instance, cls):
188 return instance
189 return instance
189 elif isinstance(instance, int) or safe_str(instance).isdigit():
190 elif isinstance(instance, int) or safe_str(instance).isdigit():
190 return cls.get(instance)
191 return cls.get(instance)
191 elif isinstance(instance, basestring):
192 elif isinstance(instance, basestring):
192 return cls.get_by_repo_name(instance)
193 return cls.get_by_repo_name(instance)
193 elif instance:
194 elif instance:
194 raise Exception('given object must be int, basestr or Instance'
195 raise Exception('given object must be int, basestr or Instance'
195 ' of %s got %s' % (type(cls), type(instance)))
196 ' of %s got %s' % (type(cls), type(instance)))
196
197
197 @LazyProperty
198 @LazyProperty
198 def repos_path(self):
199 def repos_path(self):
199 """
200 """
200 Get's the repositories root path from database
201 Get's the repositories root path from database
201 """
202 """
202
203
203 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
204 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
204
205
205 return q.ui_value
206 return q.ui_value
206
207
207 def repo_scan(self, repos_path=None):
208 def repo_scan(self, repos_path=None):
208 """
209 """
209 Listing of repositories in given path. This path should not be a
210 Listing of repositories in given path. This path should not be a
210 repository itself. Return a dictionary of repository objects
211 repository itself. Return a dictionary of repository objects
211
212
212 :param repos_path: path to directory containing repositories
213 :param repos_path: path to directory containing repositories
213 """
214 """
214
215
215 if repos_path is None:
216 if repos_path is None:
216 repos_path = self.repos_path
217 repos_path = self.repos_path
217
218
218 log.info('scanning for repositories in %s' % repos_path)
219 log.info('scanning for repositories in %s' % repos_path)
219
220
220 baseui = make_ui('db')
221 baseui = make_ui('db')
221 repos = {}
222 repos = {}
222
223
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
224 for name, path in get_filesystem_repos(repos_path, recursive=True):
224 # skip removed repos
225 # skip removed repos
225 if REMOVED_REPO_PAT.match(name):
226 if REMOVED_REPO_PAT.match(name):
226 continue
227 continue
227
228
228 # name need to be decomposed and put back together using the /
229 # name need to be decomposed and put back together using the /
229 # since this is internal storage separator for rhodecode
230 # since this is internal storage separator for rhodecode
230 name = Repository.url_sep().join(name.split(os.sep))
231 name = Repository.url_sep().join(name.split(os.sep))
231
232
232 try:
233 try:
233 if name in repos:
234 if name in repos:
234 raise RepositoryError('Duplicate repository name %s '
235 raise RepositoryError('Duplicate repository name %s '
235 'found in %s' % (name, path))
236 'found in %s' % (name, path))
236 else:
237 else:
237
238
238 klass = get_backend(path[0])
239 klass = get_backend(path[0])
239
240
240 if path[0] == 'hg' and path[0] in BACKENDS.keys():
241 if path[0] == 'hg' and path[0] in BACKENDS.keys():
241 repos[name] = klass(safe_str(path[1]), baseui=baseui)
242 repos[name] = klass(safe_str(path[1]), baseui=baseui)
242
243
243 if path[0] == 'git' and path[0] in BACKENDS.keys():
244 if path[0] == 'git' and path[0] in BACKENDS.keys():
244 repos[name] = klass(path[1])
245 repos[name] = klass(path[1])
245 except OSError:
246 except OSError:
246 continue
247 continue
247
248
248 return repos
249 return repos
249
250
250 def get_repos(self, all_repos=None, sort_key=None, simple=False):
251 def get_repos(self, all_repos=None, sort_key=None, simple=False):
251 """
252 """
252 Get all repos from db and for each repo create it's
253 Get all repos from db and for each repo create it's
253 backend instance and fill that backed with information from database
254 backend instance and fill that backed with information from database
254
255
255 :param all_repos: list of repository names as strings
256 :param all_repos: list of repository names as strings
256 give specific repositories list, good for filtering
257 give specific repositories list, good for filtering
257
258
258 :param sort_key: initial sorting of repos
259 :param sort_key: initial sorting of repos
259 :param simple: use SimpleCachedList - one without the SCM info
260 :param simple: use SimpleCachedList - one without the SCM info
260 """
261 """
261 if all_repos is None:
262 if all_repos is None:
262 all_repos = self.sa.query(Repository)\
263 all_repos = self.sa.query(Repository)\
263 .filter(Repository.group_id == None)\
264 .filter(Repository.group_id == None)\
264 .order_by(func.lower(Repository.repo_name)).all()
265 .order_by(func.lower(Repository.repo_name)).all()
265 if simple:
266 if simple:
266 repo_iter = SimpleCachedRepoList(all_repos,
267 repo_iter = SimpleCachedRepoList(all_repos,
267 repos_path=self.repos_path,
268 repos_path=self.repos_path,
268 order_by=sort_key)
269 order_by=sort_key)
269 else:
270 else:
270 repo_iter = CachedRepoList(all_repos,
271 repo_iter = CachedRepoList(all_repos,
271 repos_path=self.repos_path,
272 repos_path=self.repos_path,
272 order_by=sort_key)
273 order_by=sort_key)
273
274
274 return repo_iter
275 return repo_iter
275
276
276 def get_repos_groups(self, all_groups=None):
277 def get_repos_groups(self, all_groups=None):
277 if all_groups is None:
278 if all_groups is None:
278 all_groups = RepoGroup.query()\
279 all_groups = RepoGroup.query()\
279 .filter(RepoGroup.group_parent_id == None).all()
280 .filter(RepoGroup.group_parent_id == None).all()
280 group_iter = GroupList(all_groups)
281 group_iter = GroupList(all_groups)
281
282
282 return group_iter
283 return group_iter
283
284
284 def mark_for_invalidation(self, repo_name):
285 def mark_for_invalidation(self, repo_name):
285 """
286 """
286 Puts cache invalidation task into db for
287 Puts cache invalidation task into db for
287 further global cache invalidation
288 further global cache invalidation
288
289
289 :param repo_name: this repo that should invalidation take place
290 :param repo_name: this repo that should invalidation take place
290 """
291 """
291 CacheInvalidation.set_invalidate(repo_name)
292 CacheInvalidation.set_invalidate(repo_name)
292
293
293 def toggle_following_repo(self, follow_repo_id, user_id):
294 def toggle_following_repo(self, follow_repo_id, user_id):
294
295
295 f = self.sa.query(UserFollowing)\
296 f = self.sa.query(UserFollowing)\
296 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
297 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
297 .filter(UserFollowing.user_id == user_id).scalar()
298 .filter(UserFollowing.user_id == user_id).scalar()
298
299
299 if f is not None:
300 if f is not None:
300 try:
301 try:
301 self.sa.delete(f)
302 self.sa.delete(f)
302 action_logger(UserTemp(user_id),
303 action_logger(UserTemp(user_id),
303 'stopped_following_repo',
304 'stopped_following_repo',
304 RepoTemp(follow_repo_id))
305 RepoTemp(follow_repo_id))
305 return
306 return
306 except:
307 except:
307 log.error(traceback.format_exc())
308 log.error(traceback.format_exc())
308 raise
309 raise
309
310
310 try:
311 try:
311 f = UserFollowing()
312 f = UserFollowing()
312 f.user_id = user_id
313 f.user_id = user_id
313 f.follows_repo_id = follow_repo_id
314 f.follows_repo_id = follow_repo_id
314 self.sa.add(f)
315 self.sa.add(f)
315
316
316 action_logger(UserTemp(user_id),
317 action_logger(UserTemp(user_id),
317 'started_following_repo',
318 'started_following_repo',
318 RepoTemp(follow_repo_id))
319 RepoTemp(follow_repo_id))
319 except:
320 except:
320 log.error(traceback.format_exc())
321 log.error(traceback.format_exc())
321 raise
322 raise
322
323
323 def toggle_following_user(self, follow_user_id, user_id):
324 def toggle_following_user(self, follow_user_id, user_id):
324 f = self.sa.query(UserFollowing)\
325 f = self.sa.query(UserFollowing)\
325 .filter(UserFollowing.follows_user_id == follow_user_id)\
326 .filter(UserFollowing.follows_user_id == follow_user_id)\
326 .filter(UserFollowing.user_id == user_id).scalar()
327 .filter(UserFollowing.user_id == user_id).scalar()
327
328
328 if f is not None:
329 if f is not None:
329 try:
330 try:
330 self.sa.delete(f)
331 self.sa.delete(f)
331 return
332 return
332 except:
333 except:
333 log.error(traceback.format_exc())
334 log.error(traceback.format_exc())
334 raise
335 raise
335
336
336 try:
337 try:
337 f = UserFollowing()
338 f = UserFollowing()
338 f.user_id = user_id
339 f.user_id = user_id
339 f.follows_user_id = follow_user_id
340 f.follows_user_id = follow_user_id
340 self.sa.add(f)
341 self.sa.add(f)
341 except:
342 except:
342 log.error(traceback.format_exc())
343 log.error(traceback.format_exc())
343 raise
344 raise
344
345
345 def is_following_repo(self, repo_name, user_id, cache=False):
346 def is_following_repo(self, repo_name, user_id, cache=False):
346 r = self.sa.query(Repository)\
347 r = self.sa.query(Repository)\
347 .filter(Repository.repo_name == repo_name).scalar()
348 .filter(Repository.repo_name == repo_name).scalar()
348
349
349 f = self.sa.query(UserFollowing)\
350 f = self.sa.query(UserFollowing)\
350 .filter(UserFollowing.follows_repository == r)\
351 .filter(UserFollowing.follows_repository == r)\
351 .filter(UserFollowing.user_id == user_id).scalar()
352 .filter(UserFollowing.user_id == user_id).scalar()
352
353
353 return f is not None
354 return f is not None
354
355
355 def is_following_user(self, username, user_id, cache=False):
356 def is_following_user(self, username, user_id, cache=False):
356 u = User.get_by_username(username)
357 u = User.get_by_username(username)
357
358
358 f = self.sa.query(UserFollowing)\
359 f = self.sa.query(UserFollowing)\
359 .filter(UserFollowing.follows_user == u)\
360 .filter(UserFollowing.follows_user == u)\
360 .filter(UserFollowing.user_id == user_id).scalar()
361 .filter(UserFollowing.user_id == user_id).scalar()
361
362
362 return f is not None
363 return f is not None
363
364
364 def get_followers(self, repo):
365 def get_followers(self, repo):
365 repo = self._get_repo(repo)
366 repo = self._get_repo(repo)
366
367
367 return self.sa.query(UserFollowing)\
368 return self.sa.query(UserFollowing)\
368 .filter(UserFollowing.follows_repository == repo).count()
369 .filter(UserFollowing.follows_repository == repo).count()
369
370
370 def get_forks(self, repo):
371 def get_forks(self, repo):
371 repo = self._get_repo(repo)
372 repo = self._get_repo(repo)
372 return self.sa.query(Repository)\
373 return self.sa.query(Repository)\
373 .filter(Repository.fork == repo).count()
374 .filter(Repository.fork == repo).count()
374
375
375 def get_pull_requests(self, repo):
376 def get_pull_requests(self, repo):
376 repo = self._get_repo(repo)
377 repo = self._get_repo(repo)
377 return self.sa.query(PullRequest)\
378 return self.sa.query(PullRequest)\
378 .filter(PullRequest.other_repo == repo).count()
379 .filter(PullRequest.other_repo == repo).count()
379
380
380 def mark_as_fork(self, repo, fork, user):
381 def mark_as_fork(self, repo, fork, user):
381 repo = self.__get_repo(repo)
382 repo = self.__get_repo(repo)
382 fork = self.__get_repo(fork)
383 fork = self.__get_repo(fork)
383 if fork and repo.repo_id == fork.repo_id:
384 if fork and repo.repo_id == fork.repo_id:
384 raise Exception("Cannot set repository as fork of itself")
385 raise Exception("Cannot set repository as fork of itself")
385 repo.fork = fork
386 repo.fork = fork
386 self.sa.add(repo)
387 self.sa.add(repo)
387 return repo
388 return repo
388
389
389 def pull_changes(self, repo, username):
390 def pull_changes(self, repo, username):
390 dbrepo = self.__get_repo(repo)
391 dbrepo = self.__get_repo(repo)
391 clone_uri = dbrepo.clone_uri
392 clone_uri = dbrepo.clone_uri
392 if not clone_uri:
393 if not clone_uri:
393 raise Exception("This repository doesn't have a clone uri")
394 raise Exception("This repository doesn't have a clone uri")
394
395
395 repo = dbrepo.scm_instance
396 repo = dbrepo.scm_instance
396 try:
397 try:
397 extras = {
398 extras = {
398 'ip': '',
399 'ip': '',
399 'username': username,
400 'username': username,
400 'action': 'push_remote',
401 'action': 'push_remote',
401 'repository': dbrepo.repo_name,
402 'repository': dbrepo.repo_name,
402 'scm': repo.alias,
403 'scm': repo.alias,
403 }
404 }
404 Repository.inject_ui(repo, extras=extras)
405 Repository.inject_ui(repo, extras=extras)
405
406
406 if repo.alias == 'git':
407 if repo.alias == 'git':
407 repo.fetch(clone_uri)
408 repo.fetch(clone_uri)
408 else:
409 else:
409 repo.pull(clone_uri)
410 repo.pull(clone_uri)
410 self.mark_for_invalidation(dbrepo.repo_name)
411 self.mark_for_invalidation(dbrepo.repo_name)
411 except:
412 except:
412 log.error(traceback.format_exc())
413 log.error(traceback.format_exc())
413 raise
414 raise
414
415
415 def commit_change(self, repo, repo_name, cs, user, author, message,
416 def commit_change(self, repo, repo_name, cs, user, author, message,
416 content, f_path):
417 content, f_path):
418 """
419 Commits changes
420
421 :param repo: SCM instance
422
423 """
417
424
418 if repo.alias == 'hg':
425 if repo.alias == 'hg':
419 from rhodecode.lib.vcs.backends.hg import \
426 from rhodecode.lib.vcs.backends.hg import \
420 MercurialInMemoryChangeset as IMC
427 MercurialInMemoryChangeset as IMC
421 elif repo.alias == 'git':
428 elif repo.alias == 'git':
422 from rhodecode.lib.vcs.backends.git import \
429 from rhodecode.lib.vcs.backends.git import \
423 GitInMemoryChangeset as IMC
430 GitInMemoryChangeset as IMC
424
431
425 # decoding here will force that we have proper encoded values
432 # decoding here will force that we have proper encoded values
426 # in any other case this will throw exceptions and deny commit
433 # in any other case this will throw exceptions and deny commit
427 content = safe_str(content)
434 content = safe_str(content)
428 path = safe_str(f_path)
435 path = safe_str(f_path)
429 # message and author needs to be unicode
436 # message and author needs to be unicode
430 # proper backend should then translate that into required type
437 # proper backend should then translate that into required type
431 message = safe_unicode(message)
438 message = safe_unicode(message)
432 author = safe_unicode(author)
439 author = safe_unicode(author)
433 m = IMC(repo)
440 m = IMC(repo)
434 m.change(FileNode(path, content))
441 m.change(FileNode(path, content))
435 tip = m.commit(message=message,
442 tip = m.commit(message=message,
436 author=author,
443 author=author,
437 parents=[cs], branch=cs.branch)
444 parents=[cs], branch=cs.branch)
438
445
439 action = 'push_local:%s' % tip.raw_id
446 action = 'push_local:%s' % tip.raw_id
440 action_logger(user, action, repo_name)
447 action_logger(user, action, repo_name)
441 self.mark_for_invalidation(repo_name)
448 self.mark_for_invalidation(repo_name)
449 return tip
442
450
443 def create_node(self, repo, repo_name, cs, user, author, message, content,
451 def create_node(self, repo, repo_name, cs, user, author, message, content,
444 f_path):
452 f_path):
445 if repo.alias == 'hg':
453 if repo.alias == 'hg':
446 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
454 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
447 elif repo.alias == 'git':
455 elif repo.alias == 'git':
448 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
456 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
449 # decoding here will force that we have proper encoded values
457 # decoding here will force that we have proper encoded values
450 # in any other case this will throw exceptions and deny commit
458 # in any other case this will throw exceptions and deny commit
451
459
452 if isinstance(content, (basestring,)):
460 if isinstance(content, (basestring,)):
453 content = safe_str(content)
461 content = safe_str(content)
454 elif isinstance(content, (file, cStringIO.OutputType,)):
462 elif isinstance(content, (file, cStringIO.OutputType,)):
455 content = content.read()
463 content = content.read()
456 else:
464 else:
457 raise Exception('Content is of unrecognized type %s' % (
465 raise Exception('Content is of unrecognized type %s' % (
458 type(content)
466 type(content)
459 ))
467 ))
460
468
461 message = safe_unicode(message)
469 message = safe_unicode(message)
462 author = safe_unicode(author)
470 author = safe_unicode(author)
463 path = safe_str(f_path)
471 path = safe_str(f_path)
464 m = IMC(repo)
472 m = IMC(repo)
465
473
466 if isinstance(cs, EmptyChangeset):
474 if isinstance(cs, EmptyChangeset):
467 # EmptyChangeset means we we're editing empty repository
475 # EmptyChangeset means we we're editing empty repository
468 parents = None
476 parents = None
469 else:
477 else:
470 parents = [cs]
478 parents = [cs]
471
479
472 m.add(FileNode(path, content=content))
480 m.add(FileNode(path, content=content))
473 tip = m.commit(message=message,
481 tip = m.commit(message=message,
474 author=author,
482 author=author,
475 parents=parents, branch=cs.branch)
483 parents=parents, branch=cs.branch)
476
484
477 action = 'push_local:%s' % tip.raw_id
485 action = 'push_local:%s' % tip.raw_id
478 action_logger(user, action, repo_name)
486 action_logger(user, action, repo_name)
479 self.mark_for_invalidation(repo_name)
487 self.mark_for_invalidation(repo_name)
488 return tip
480
489
481 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
490 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
482 """
491 """
483 recursive walk in root dir and return a set of all path in that dir
492 recursive walk in root dir and return a set of all path in that dir
484 based on repository walk function
493 based on repository walk function
485
494
486 :param repo_name: name of repository
495 :param repo_name: name of repository
487 :param revision: revision for which to list nodes
496 :param revision: revision for which to list nodes
488 :param root_path: root path to list
497 :param root_path: root path to list
489 :param flat: return as a list, if False returns a dict with decription
498 :param flat: return as a list, if False returns a dict with decription
490
499
491 """
500 """
492 _files = list()
501 _files = list()
493 _dirs = list()
502 _dirs = list()
494 try:
503 try:
495 _repo = self.__get_repo(repo_name)
504 _repo = self.__get_repo(repo_name)
496 changeset = _repo.scm_instance.get_changeset(revision)
505 changeset = _repo.scm_instance.get_changeset(revision)
497 root_path = root_path.lstrip('/')
506 root_path = root_path.lstrip('/')
498 for topnode, dirs, files in changeset.walk(root_path):
507 for topnode, dirs, files in changeset.walk(root_path):
499 for f in files:
508 for f in files:
500 _files.append(f.path if flat else {"name": f.path,
509 _files.append(f.path if flat else {"name": f.path,
501 "type": "file"})
510 "type": "file"})
502 for d in dirs:
511 for d in dirs:
503 _dirs.append(d.path if flat else {"name": d.path,
512 _dirs.append(d.path if flat else {"name": d.path,
504 "type": "dir"})
513 "type": "dir"})
505 except RepositoryError:
514 except RepositoryError:
506 log.debug(traceback.format_exc())
515 log.debug(traceback.format_exc())
507 raise
516 raise
508
517
509 return _dirs, _files
518 return _dirs, _files
510
519
511 def get_unread_journal(self):
520 def get_unread_journal(self):
512 return self.sa.query(UserLog).count()
521 return self.sa.query(UserLog).count()
513
522
514 def get_repo_landing_revs(self, repo=None):
523 def get_repo_landing_revs(self, repo=None):
515 """
524 """
516 Generates select option with tags branches and bookmarks (for hg only)
525 Generates select option with tags branches and bookmarks (for hg only)
517 grouped by type
526 grouped by type
518
527
519 :param repo:
528 :param repo:
520 :type repo:
529 :type repo:
521 """
530 """
522
531
523 hist_l = []
532 hist_l = []
524 choices = []
533 choices = []
525 repo = self.__get_repo(repo)
534 repo = self.__get_repo(repo)
526 hist_l.append(['tip', _('latest tip')])
535 hist_l.append(['tip', _('latest tip')])
527 choices.append('tip')
536 choices.append('tip')
528 if not repo:
537 if not repo:
529 return choices, hist_l
538 return choices, hist_l
530
539
531 repo = repo.scm_instance
540 repo = repo.scm_instance
532
541
533 branches_group = ([(k, k) for k, v in
542 branches_group = ([(k, k) for k, v in
534 repo.branches.iteritems()], _("Branches"))
543 repo.branches.iteritems()], _("Branches"))
535 hist_l.append(branches_group)
544 hist_l.append(branches_group)
536 choices.extend([x[0] for x in branches_group[0]])
545 choices.extend([x[0] for x in branches_group[0]])
537
546
538 if repo.alias == 'hg':
547 if repo.alias == 'hg':
539 bookmarks_group = ([(k, k) for k, v in
548 bookmarks_group = ([(k, k) for k, v in
540 repo.bookmarks.iteritems()], _("Bookmarks"))
549 repo.bookmarks.iteritems()], _("Bookmarks"))
541 hist_l.append(bookmarks_group)
550 hist_l.append(bookmarks_group)
542 choices.extend([x[0] for x in bookmarks_group[0]])
551 choices.extend([x[0] for x in bookmarks_group[0]])
543
552
544 tags_group = ([(k, k) for k, v in
553 tags_group = ([(k, k) for k, v in
545 repo.tags.iteritems()], _("Tags"))
554 repo.tags.iteritems()], _("Tags"))
546 hist_l.append(tags_group)
555 hist_l.append(tags_group)
547 choices.extend([x[0] for x in tags_group[0]])
556 choices.extend([x[0] for x in tags_group[0]])
548
557
549 return choices, hist_l
558 return choices, hist_l
550
559
551 def install_git_hook(self, repo, force_create=False):
560 def install_git_hook(self, repo, force_create=False):
552 """
561 """
553 Creates a rhodecode hook inside a git repository
562 Creates a rhodecode hook inside a git repository
554
563
555 :param repo: Instance of VCS repo
564 :param repo: Instance of VCS repo
556 :param force_create: Create even if same name hook exists
565 :param force_create: Create even if same name hook exists
557 """
566 """
558
567
559 loc = jn(repo.path, 'hooks')
568 loc = jn(repo.path, 'hooks')
560 if not repo.bare:
569 if not repo.bare:
561 loc = jn(repo.path, '.git', 'hooks')
570 loc = jn(repo.path, '.git', 'hooks')
562 if not os.path.isdir(loc):
571 if not os.path.isdir(loc):
563 os.makedirs(loc)
572 os.makedirs(loc)
564
573
565 tmpl = pkg_resources.resource_string(
574 tmpl = pkg_resources.resource_string(
566 'rhodecode', jn('config', 'post_receive_tmpl.py')
575 'rhodecode', jn('config', 'post_receive_tmpl.py')
567 )
576 )
568
577
569 _hook_file = jn(loc, 'post-receive')
578 _hook_file = jn(loc, 'post-receive')
570 _rhodecode_hook = False
579 _rhodecode_hook = False
571 log.debug('Installing git hook in repo %s' % repo)
580 log.debug('Installing git hook in repo %s' % repo)
572 if os.path.exists(_hook_file):
581 if os.path.exists(_hook_file):
573 # let's take a look at this hook, maybe it's rhodecode ?
582 # let's take a look at this hook, maybe it's rhodecode ?
574 log.debug('hook exists, checking if it is from rhodecode')
583 log.debug('hook exists, checking if it is from rhodecode')
575 _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
584 _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
576 with open(_hook_file, 'rb') as f:
585 with open(_hook_file, 'rb') as f:
577 data = f.read()
586 data = f.read()
578 matches = re.compile(r'(?:%s)\s*=\s*(.*)'
587 matches = re.compile(r'(?:%s)\s*=\s*(.*)'
579 % 'RC_HOOK_VER').search(data)
588 % 'RC_HOOK_VER').search(data)
580 if matches:
589 if matches:
581 try:
590 try:
582 ver = matches.groups()[0]
591 ver = matches.groups()[0]
583 log.debug('got %s it is rhodecode' % (ver))
592 log.debug('got %s it is rhodecode' % (ver))
584 _rhodecode_hook = True
593 _rhodecode_hook = True
585 except:
594 except:
586 log.error(traceback.format_exc())
595 log.error(traceback.format_exc())
587
596
588 if _rhodecode_hook or force_create:
597 if _rhodecode_hook or force_create:
589 log.debug('writing hook file !')
598 log.debug('writing hook file !')
590 with open(_hook_file, 'wb') as f:
599 with open(_hook_file, 'wb') as f:
591 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
600 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
592 f.write(tmpl)
601 f.write(tmpl)
593 os.chmod(_hook_file, 0755)
602 os.chmod(_hook_file, 0755)
594 else:
603 else:
595 log.debug('skipping writing hook file')
604 log.debug('skipping writing hook file')
@@ -1,52 +1,189 b''
1 from rhodecode.tests import *
1 from rhodecode.tests import *
2 from rhodecode.model.repo import RepoModel
3 from rhodecode.model.meta import Session
4 from rhodecode.model.db import Repository
5 from rhodecode.model.scm import ScmModel
6 from rhodecode.lib.vcs.backends.base import EmptyChangeset
2
7
3
8
4 class TestCompareController(TestController):
9 class TestCompareController(TestController):
5
10
6 def test_index_tag(self):
11 def test_index_tag(self):
7 self.log_user()
12 self.log_user()
8 tag1='0.1.3'
13 tag1 = '0.1.3'
9 tag2='0.1.2'
14 tag2 = '0.1.2'
10 response = self.app.get(url(controller='compare', action='index',
15 response = self.app.get(url(controller='compare', action='index',
11 repo_name=HG_REPO,
16 repo_name=HG_REPO,
12 org_ref_type="tag",
17 org_ref_type="tag",
13 org_ref=tag1,
18 org_ref=tag1,
14 other_ref_type="tag",
19 other_ref_type="tag",
15 other_ref=tag2,
20 other_ref=tag2,
16 ))
21 ))
17 response.mustcontain('%s@%s -> %s@%s' % (HG_REPO, tag1, HG_REPO, tag2))
22 response.mustcontain('%s@%s -> %s@%s' % (HG_REPO, tag1, HG_REPO, tag2))
18 ## outgoing changesets between tags
23 ## outgoing changesets between tags
19 response.mustcontain('''<a href="/%s/changeset/17544fbfcd33ffb439e2b728b5d526b1ef30bfcf">r120:17544fbfcd33</a>''' % HG_REPO)
24 response.mustcontain('''<a href="/%s/changeset/17544fbfcd33ffb439e2b728b5d526b1ef30bfcf">r120:17544fbfcd33</a>''' % HG_REPO)
20 response.mustcontain('''<a href="/%s/changeset/36e0fc9d2808c5022a24f49d6658330383ed8666">r119:36e0fc9d2808</a>''' % HG_REPO)
25 response.mustcontain('''<a href="/%s/changeset/36e0fc9d2808c5022a24f49d6658330383ed8666">r119:36e0fc9d2808</a>''' % HG_REPO)
21 response.mustcontain('''<a href="/%s/changeset/bb1a3ab98cc45cb934a77dcabf87a5a598b59e97">r118:bb1a3ab98cc4</a>''' % HG_REPO)
26 response.mustcontain('''<a href="/%s/changeset/bb1a3ab98cc45cb934a77dcabf87a5a598b59e97">r118:bb1a3ab98cc4</a>''' % HG_REPO)
22 response.mustcontain('''<a href="/%s/changeset/41fda979f02fda216374bf8edac4e83f69e7581c">r117:41fda979f02f</a>''' % HG_REPO)
27 response.mustcontain('''<a href="/%s/changeset/41fda979f02fda216374bf8edac4e83f69e7581c">r117:41fda979f02f</a>''' % HG_REPO)
23 response.mustcontain('''<a href="/%s/changeset/9749bfbfc0d2eba208d7947de266303b67c87cda">r116:9749bfbfc0d2</a>''' % HG_REPO)
28 response.mustcontain('''<a href="/%s/changeset/9749bfbfc0d2eba208d7947de266303b67c87cda">r116:9749bfbfc0d2</a>''' % HG_REPO)
24 response.mustcontain('''<a href="/%s/changeset/70d4cef8a37657ee4cf5aabb3bd9f68879769816">r115:70d4cef8a376</a>''' % HG_REPO)
29 response.mustcontain('''<a href="/%s/changeset/70d4cef8a37657ee4cf5aabb3bd9f68879769816">r115:70d4cef8a376</a>''' % HG_REPO)
25 response.mustcontain('''<a href="/%s/changeset/c5ddebc06eaaba3010c2d66ea6ec9d074eb0f678">r112:c5ddebc06eaa</a>''' % HG_REPO)
30 response.mustcontain('''<a href="/%s/changeset/c5ddebc06eaaba3010c2d66ea6ec9d074eb0f678">r112:c5ddebc06eaa</a>''' % HG_REPO)
26
31
27 ## files diff
32 ## files diff
28 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--1c5cf9e91c12">docs/api/utils/index.rst</a></div>''' % (HG_REPO, tag1, tag2))
33 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--1c5cf9e91c12">docs/api/utils/index.rst</a></div>''' % (HG_REPO, tag1, tag2))
29 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--e3305437df55">test_and_report.sh</a></div>''' % (HG_REPO, tag1, tag2))
34 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--e3305437df55">test_and_report.sh</a></div>''' % (HG_REPO, tag1, tag2))
30 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--c8e92ef85cd1">.hgignore</a></div>''' % (HG_REPO, tag1, tag2))
35 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--c8e92ef85cd1">.hgignore</a></div>''' % (HG_REPO, tag1, tag2))
31 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--6e08b694d687">.hgtags</a></div>''' % (HG_REPO, tag1, tag2))
36 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--6e08b694d687">.hgtags</a></div>''' % (HG_REPO, tag1, tag2))
32 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--2c14b00f3393">docs/api/index.rst</a></div>''' % (HG_REPO, tag1, tag2))
37 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--2c14b00f3393">docs/api/index.rst</a></div>''' % (HG_REPO, tag1, tag2))
33 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--430ccbc82bdf">vcs/__init__.py</a></div>''' % (HG_REPO, tag1, tag2))
38 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--430ccbc82bdf">vcs/__init__.py</a></div>''' % (HG_REPO, tag1, tag2))
34 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--9c390eb52cd6">vcs/backends/hg.py</a></div>''' % (HG_REPO, tag1, tag2))
39 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--9c390eb52cd6">vcs/backends/hg.py</a></div>''' % (HG_REPO, tag1, tag2))
35 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--ebb592c595c0">vcs/utils/__init__.py</a></div>''' % (HG_REPO, tag1, tag2))
40 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--ebb592c595c0">vcs/utils/__init__.py</a></div>''' % (HG_REPO, tag1, tag2))
36 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--7abc741b5052">vcs/utils/annotate.py</a></div>''' % (HG_REPO, tag1, tag2))
41 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--7abc741b5052">vcs/utils/annotate.py</a></div>''' % (HG_REPO, tag1, tag2))
37 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--2ef0ef106c56">vcs/utils/diffs.py</a></div>''' % (HG_REPO, tag1, tag2))
42 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--2ef0ef106c56">vcs/utils/diffs.py</a></div>''' % (HG_REPO, tag1, tag2))
38 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--3150cb87d4b7">vcs/utils/lazy.py</a></div>''' % (HG_REPO, tag1, tag2))
43 response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--3150cb87d4b7">vcs/utils/lazy.py</a></div>''' % (HG_REPO, tag1, tag2))
39
44
40 def test_index_branch(self):
45 def test_index_branch(self):
41 self.log_user()
46 self.log_user()
42 response = self.app.get(url(controller='compare', action='index',
47 response = self.app.get(url(controller='compare', action='index',
43 repo_name=HG_REPO,
48 repo_name=HG_REPO,
44 org_ref_type="branch",
49 org_ref_type="branch",
45 org_ref='default',
50 org_ref='default',
46 other_ref_type="branch",
51 other_ref_type="branch",
47 other_ref='default',
52 other_ref='default',
48 ))
53 ))
49
54
50 response.mustcontain('%s@default -> %s@default' % (HG_REPO, HG_REPO))
55 response.mustcontain('%s@default -> %s@default' % (HG_REPO, HG_REPO))
51 # branch are equal
56 # branch are equal
52 response.mustcontain('<tr><td>No changesets</td></tr>')
57 response.mustcontain('<tr><td>No changesets</td></tr>')
58
59 def test_compare_revisions(self):
60 self.log_user()
61 rev1 = '3d8f361e72ab'
62 rev2 = 'b986218ba1c9'
63 response = self.app.get(url(controller='compare', action='index',
64 repo_name=HG_REPO,
65 org_ref_type="rev",
66 org_ref=rev1,
67 other_ref_type="rev",
68 other_ref=rev2,
69 ))
70 response.mustcontain('%s@%s -> %s@%s' % (HG_REPO, rev1, HG_REPO, rev2))
71 ## outgoing changesets between those revisions
72 response.mustcontain("""<a href="/%s/changeset/3d8f361e72ab303da48d799ff1ac40d5ac37c67e">r1:%s</a>""" % (HG_REPO, rev1))
73
74 ## files
75 response.mustcontain("""<a href="/%s/compare/rev@%s...rev@%s#C--c8e92ef85cd1">.hgignore</a>""" % (HG_REPO, rev1, rev2))
76
77 def test_compare_remote_repos(self):
78 self.log_user()
79
80 form_data = dict(
81 repo_name=HG_FORK,
82 repo_name_full=HG_FORK,
83 repo_group=None,
84 repo_type='hg',
85 description='',
86 private=False,
87 copy_permissions=False,
88 landing_rev='tip',
89 update_after_clone=False,
90 fork_parent_id=Repository.get_by_repo_name(HG_REPO),
91 )
92 RepoModel().create_fork(form_data, cur_user=TEST_USER_ADMIN_LOGIN)
93
94 Session().commit()
95
96 rev1 = '7d4bc8ec6be5'
97 rev2 = '56349e29c2af'
98
99 response = self.app.get(url(controller='compare', action='index',
100 repo_name=HG_REPO,
101 org_ref_type="rev",
102 org_ref=rev1,
103 other_ref_type="rev",
104 other_ref=rev2,
105 repo=HG_FORK
106 ))
107
108 try:
109 response.mustcontain('%s@%s -> %s@%s' % (HG_REPO, rev1, HG_FORK, rev2))
110 ## outgoing changesets between those revisions
111
112 response.mustcontain("""<a href="/%s/changeset/7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7">r6:%s</a>""" % (HG_REPO, rev1))
113 response.mustcontain("""<a href="/%s/changeset/6fff84722075f1607a30f436523403845f84cd9e">r5:6fff84722075</a>""" % (HG_REPO))
114 response.mustcontain("""<a href="/%s/changeset/2dda4e345facb0ccff1a191052dd1606dba6781d">r4:2dda4e345fac</a>""" % (HG_REPO))
115
116 ## files
117 response.mustcontain("""<a href="/%s/compare/rev@%s...rev@%s#C--9c390eb52cd6">vcs/backends/hg.py</a>""" % (HG_REPO, rev1, rev2))
118 response.mustcontain("""<a href="/%s/compare/rev@%s...rev@%s#C--41b41c1f2796">vcs/backends/__init__.py</a>""" % (HG_REPO, rev1, rev2))
119 response.mustcontain("""<a href="/%s/compare/rev@%s...rev@%s#C--2f574d260608">vcs/backends/base.py</a>""" % (HG_REPO, rev1, rev2))
120 finally:
121 RepoModel().delete(HG_FORK)
122
123 def test_compare_extra_commits(self):
124 self.log_user()
125
126 repo1 = RepoModel().create_repo(repo_name='one', repo_type='hg',
127 description='diff-test',
128 owner=TEST_USER_ADMIN_LOGIN)
129
130 repo2 = RepoModel().create_repo(repo_name='one-fork', repo_type='hg',
131 description='diff-test',
132 owner=TEST_USER_ADMIN_LOGIN)
133
134 Session().commit()
135 r1_id = repo1.repo_id
136 r1_name = repo1.repo_name
137 r2_id = repo2.repo_id
138 r2_name = repo2.repo_name
139
140 #commit something !
141 cs0 = ScmModel().create_node(
142 repo=repo1.scm_instance, repo_name=r1_name,
143 cs=EmptyChangeset(alias='hg'), user=TEST_USER_ADMIN_LOGIN,
144 author=TEST_USER_ADMIN_LOGIN,
145 message='commit1',
146 content='line1',
147 f_path='file1'
148 )
149
150 cs0_prim = ScmModel().create_node(
151 repo=repo2.scm_instance, repo_name=r2_name,
152 cs=EmptyChangeset(alias='hg'), user=TEST_USER_ADMIN_LOGIN,
153 author=TEST_USER_ADMIN_LOGIN,
154 message='commit1',
155 content='line1',
156 f_path='file1'
157 )
158
159 cs1 = ScmModel().commit_change(
160 repo=repo2.scm_instance, repo_name=r2_name,
161 cs=cs0_prim, user=TEST_USER_ADMIN_LOGIN, author=TEST_USER_ADMIN_LOGIN,
162 message='commit2',
163 content='line1\nline2',
164 f_path='file1'
165 )
166
167 rev1 = 'default'
168 rev2 = 'default'
169 response = self.app.get(url(controller='compare', action='index',
170 repo_name=r2_name,
171 org_ref_type="branch",
172 org_ref=rev1,
173 other_ref_type="branch",
174 other_ref=rev2,
175 repo=r1_name
176 ))
177
178 try:
179 response.mustcontain('%s@%s -> %s@%s' % (r2_name, rev1, r1_name, rev2))
180
181 response.mustcontain("""<div class="message">commit2</div>""")
182 response.mustcontain("""<a href="/%s/changeset/%s">r1:%s</a>""" % (r2_name, cs1.raw_id, cs1.short_id))
183 ## files
184 response.mustcontain("""<a href="/%s/compare/branch@%s...branch@%s#C--826e8142e6ba">file1</a>""" % (r2_name, rev1, rev2))
185
186
187 finally:
188 RepoModel().delete(r1_id)
189 RepoModel().delete(r2_id)
General Comments 0
You need to be logged in to leave comments. Login now