Show More
@@ -1,559 +1,556 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.controllers.pullrequests |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | pull requests controller for rhodecode for initializing pull requests |
|
7 | 7 | |
|
8 | 8 | :created_on: May 7, 2012 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | import logging |
|
26 | 26 | import traceback |
|
27 | 27 | import formencode |
|
28 | 28 | |
|
29 | 29 | from webob.exc import HTTPNotFound, HTTPForbidden |
|
30 | 30 | from collections import defaultdict |
|
31 | 31 | from itertools import groupby |
|
32 | 32 | |
|
33 |
from pylons import request |
|
|
34 |
from pylons.controllers.util import |
|
|
33 | from pylons import request, tmpl_context as c, url | |
|
34 | from pylons.controllers.util import redirect | |
|
35 | 35 | from pylons.i18n.translation import _ |
|
36 | 36 | |
|
37 | 37 | from rhodecode.lib.compat import json |
|
38 | 38 | from rhodecode.lib.base import BaseRepoController, render |
|
39 | 39 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\ |
|
40 | 40 | NotAnonymous |
|
41 | 41 | from rhodecode.lib.helpers import Page |
|
42 | 42 | from rhodecode.lib import helpers as h |
|
43 | 43 | from rhodecode.lib import diffs |
|
44 | 44 | from rhodecode.lib.utils import action_logger, jsonify |
|
45 | 45 | from rhodecode.lib.vcs.utils import safe_str |
|
46 | 46 | from rhodecode.lib.vcs.exceptions import EmptyRepositoryError |
|
47 | from rhodecode.lib.vcs.backends.base import EmptyChangeset | |
|
48 | 47 | from rhodecode.lib.diffs import LimitedDiffContainer |
|
49 |
from rhodecode.model.db import |
|
|
50 | ChangesetComment | |
|
48 | from rhodecode.model.db import PullRequest, ChangesetStatus, ChangesetComment | |
|
51 | 49 | from rhodecode.model.pull_request import PullRequestModel |
|
52 | 50 | from rhodecode.model.meta import Session |
|
53 | 51 | from rhodecode.model.repo import RepoModel |
|
54 | 52 | from rhodecode.model.comment import ChangesetCommentsModel |
|
55 | 53 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
56 | 54 | from rhodecode.model.forms import PullRequestForm |
|
57 | from mercurial import scmutil | |
|
58 | 55 | from rhodecode.lib.utils2 import safe_int |
|
59 | 56 | |
|
60 | 57 | log = logging.getLogger(__name__) |
|
61 | 58 | |
|
62 | 59 | |
|
63 | 60 | class PullrequestsController(BaseRepoController): |
|
64 | 61 | |
|
65 | 62 | def __before__(self): |
|
66 | 63 | super(PullrequestsController, self).__before__() |
|
67 | 64 | repo_model = RepoModel() |
|
68 | 65 | c.users_array = repo_model.get_users_js() |
|
69 | 66 | c.users_groups_array = repo_model.get_users_groups_js() |
|
70 | 67 | |
|
71 | 68 | def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None): |
|
72 | 69 | """return a structure with repo's interesting changesets, suitable for |
|
73 | 70 | the selectors in pullrequest.html |
|
74 | 71 | |
|
75 | 72 | rev: a revision that must be in the list somehow and selected by default |
|
76 | 73 | branch: a branch that must be in the list and selected by default - even if closed |
|
77 | 74 | branch_rev: a revision of which peers should be preferred and available.""" |
|
78 | 75 | # list named branches that has been merged to this named branch - it should probably merge back |
|
79 | 76 | peers = [] |
|
80 | 77 | |
|
81 | 78 | if rev: |
|
82 | 79 | rev = safe_str(rev) |
|
83 | 80 | |
|
84 | 81 | if branch: |
|
85 | 82 | branch = safe_str(branch) |
|
86 | 83 | |
|
87 | 84 | if branch_rev: |
|
88 | 85 | branch_rev = safe_str(branch_rev) |
|
89 | 86 | # not restricting to merge() would also get branch point and be better |
|
90 | 87 | # (especially because it would get the branch point) ... but is currently too expensive |
|
91 | 88 | otherbranches = {} |
|
92 | 89 | for i in repo._repo.revs( |
|
93 | 90 | "sort(parents(branch(id(%s)) and merge()) - branch(id(%s)))", |
|
94 | 91 | branch_rev, branch_rev): |
|
95 | 92 | cs = repo.get_changeset(i) |
|
96 | 93 | otherbranches[cs.branch] = cs.raw_id |
|
97 | 94 | for abranch, node in otherbranches.iteritems(): |
|
98 | 95 | selected = 'branch:%s:%s' % (abranch, node) |
|
99 | 96 | peers.append((selected, abranch)) |
|
100 | 97 | |
|
101 | 98 | selected = None |
|
102 | 99 | |
|
103 | 100 | branches = [] |
|
104 | 101 | for abranch, branchrev in repo.branches.iteritems(): |
|
105 | 102 | n = 'branch:%s:%s' % (abranch, branchrev) |
|
106 | 103 | branches.append((n, abranch)) |
|
107 | 104 | if rev == branchrev: |
|
108 | 105 | selected = n |
|
109 | 106 | if branch == abranch: |
|
110 | 107 | selected = n |
|
111 | 108 | branch = None |
|
112 | 109 | if branch: # branch not in list - it is probably closed |
|
113 | 110 | revs = repo._repo.revs('max(branch(%s))', branch) |
|
114 | 111 | if revs: |
|
115 | 112 | cs = repo.get_changeset(revs[0]) |
|
116 | 113 | selected = 'branch:%s:%s' % (branch, cs.raw_id) |
|
117 | 114 | branches.append((selected, branch)) |
|
118 | 115 | |
|
119 | 116 | bookmarks = [] |
|
120 | 117 | for bookmark, bookmarkrev in repo.bookmarks.iteritems(): |
|
121 | 118 | n = 'book:%s:%s' % (bookmark, bookmarkrev) |
|
122 | 119 | bookmarks.append((n, bookmark)) |
|
123 | 120 | if rev == bookmarkrev: |
|
124 | 121 | selected = n |
|
125 | 122 | |
|
126 | 123 | tags = [] |
|
127 | 124 | for tag, tagrev in repo.tags.iteritems(): |
|
128 | 125 | n = 'tag:%s:%s' % (tag, tagrev) |
|
129 | 126 | tags.append((n, tag)) |
|
130 | 127 | if rev == tagrev and tag != 'tip': # tip is not a real tag - and its branch is better |
|
131 | 128 | selected = n |
|
132 | 129 | |
|
133 | 130 | # prio 1: rev was selected as existing entry above |
|
134 | 131 | |
|
135 | 132 | # prio 2: create special entry for rev; rev _must_ be used |
|
136 | 133 | specials = [] |
|
137 | 134 | if rev and selected is None: |
|
138 | 135 | selected = 'rev:%s:%s' % (rev, rev) |
|
139 | 136 | specials = [(selected, '%s: %s' % (_("Changeset"), rev[:12]))] |
|
140 | 137 | |
|
141 | 138 | # prio 3: most recent peer branch |
|
142 | 139 | if peers and not selected: |
|
143 | 140 | selected = peers[0][0][0] |
|
144 | 141 | |
|
145 | 142 | # prio 4: tip revision |
|
146 | 143 | if not selected: |
|
147 | 144 | selected = 'tag:tip:%s' % repo.tags['tip'] |
|
148 | 145 | |
|
149 | 146 | groups = [(specials, _("Special")), |
|
150 | 147 | (peers, _("Peer branches")), |
|
151 | 148 | (bookmarks, _("Bookmarks")), |
|
152 | 149 | (branches, _("Branches")), |
|
153 | 150 | (tags, _("Tags")), |
|
154 | 151 | ] |
|
155 | 152 | return [g for g in groups if g[0]], selected |
|
156 | 153 | |
|
157 | 154 | def _get_is_allowed_change_status(self, pull_request): |
|
158 | 155 | owner = self.rhodecode_user.user_id == pull_request.user_id |
|
159 | 156 | reviewer = self.rhodecode_user.user_id in [x.user_id for x in |
|
160 | 157 | pull_request.reviewers] |
|
161 | 158 | return (self.rhodecode_user.admin or owner or reviewer) |
|
162 | 159 | |
|
163 | 160 | def _load_compare_data(self, pull_request, enable_comments=True): |
|
164 | 161 | """ |
|
165 | 162 | Load context data needed for generating compare diff |
|
166 | 163 | |
|
167 | 164 | :param pull_request: |
|
168 | 165 | """ |
|
169 | 166 | org_repo = pull_request.org_repo |
|
170 | 167 | (org_ref_type, |
|
171 | 168 | org_ref_name, |
|
172 | 169 | org_ref_rev) = pull_request.org_ref.split(':') |
|
173 | 170 | |
|
174 | 171 | other_repo = org_repo |
|
175 | 172 | (other_ref_type, |
|
176 | 173 | other_ref_name, |
|
177 | 174 | other_ref_rev) = pull_request.other_ref.split(':') |
|
178 | 175 | |
|
179 | 176 | # despite opening revisions for bookmarks/branches/tags, we always |
|
180 | 177 | # convert this to rev to prevent changes after bookmark or branch change |
|
181 | 178 | org_ref = ('rev', org_ref_rev) |
|
182 | 179 | other_ref = ('rev', other_ref_rev) |
|
183 | 180 | |
|
184 | 181 | c.org_repo = org_repo |
|
185 | 182 | c.other_repo = other_repo |
|
186 | 183 | |
|
187 | 184 | c.fulldiff = fulldiff = request.GET.get('fulldiff') |
|
188 | 185 | |
|
189 | 186 | c.cs_ranges = [org_repo.get_changeset(x) for x in pull_request.revisions] |
|
190 | 187 | |
|
191 | 188 | c.statuses = org_repo.statuses([x.raw_id for x in c.cs_ranges]) |
|
192 | 189 | |
|
193 | 190 | c.org_ref = org_ref[1] |
|
194 | 191 | c.org_ref_type = org_ref[0] |
|
195 | 192 | c.other_ref = other_ref[1] |
|
196 | 193 | c.other_ref_type = other_ref[0] |
|
197 | 194 | |
|
198 | 195 | diff_limit = self.cut_off_limit if not fulldiff else None |
|
199 | 196 | |
|
200 | 197 | # we swap org/other ref since we run a simple diff on one repo |
|
201 | 198 | log.debug('running diff between %s and %s in %s' |
|
202 | 199 | % (other_ref, org_ref, org_repo.scm_instance.path)) |
|
203 | 200 | txtdiff = org_repo.scm_instance.get_diff(rev1=safe_str(other_ref[1]), rev2=safe_str(org_ref[1])) |
|
204 | 201 | |
|
205 | 202 | diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff', |
|
206 | 203 | diff_limit=diff_limit) |
|
207 | 204 | _parsed = diff_processor.prepare() |
|
208 | 205 | |
|
209 | 206 | c.limited_diff = False |
|
210 | 207 | if isinstance(_parsed, LimitedDiffContainer): |
|
211 | 208 | c.limited_diff = True |
|
212 | 209 | |
|
213 | 210 | c.files = [] |
|
214 | 211 | c.changes = {} |
|
215 | 212 | c.lines_added = 0 |
|
216 | 213 | c.lines_deleted = 0 |
|
217 | 214 | |
|
218 | 215 | for f in _parsed: |
|
219 | 216 | st = f['stats'] |
|
220 | 217 | c.lines_added += st['added'] |
|
221 | 218 | c.lines_deleted += st['deleted'] |
|
222 | 219 | fid = h.FID('', f['filename']) |
|
223 | 220 | c.files.append([fid, f['operation'], f['filename'], f['stats']]) |
|
224 | 221 | htmldiff = diff_processor.as_html(enable_comments=enable_comments, |
|
225 | 222 | parsed_lines=[f]) |
|
226 | 223 | c.changes[fid] = [f['operation'], f['filename'], htmldiff] |
|
227 | 224 | |
|
228 | 225 | @LoginRequired() |
|
229 | 226 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
230 | 227 | 'repository.admin') |
|
231 | 228 | def show_all(self, repo_name): |
|
232 | 229 | c.pull_requests = PullRequestModel().get_all(repo_name) |
|
233 | 230 | c.repo_name = repo_name |
|
234 | 231 | p = safe_int(request.GET.get('page', 1), 1) |
|
235 | 232 | |
|
236 | 233 | c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=10) |
|
237 | 234 | |
|
238 | 235 | c.pullrequest_data = render('/pullrequests/pullrequest_data.html') |
|
239 | 236 | |
|
240 | 237 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
241 | 238 | return c.pullrequest_data |
|
242 | 239 | |
|
243 | 240 | return render('/pullrequests/pullrequest_show_all.html') |
|
244 | 241 | |
|
245 | 242 | @LoginRequired() |
|
246 | 243 | @NotAnonymous() |
|
247 | 244 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
248 | 245 | 'repository.admin') |
|
249 | 246 | def index(self): |
|
250 | 247 | org_repo = c.rhodecode_db_repo |
|
251 | 248 | |
|
252 | 249 | if org_repo.scm_instance.alias != 'hg': |
|
253 | 250 | log.error('Review not available for GIT REPOS') |
|
254 | 251 | raise HTTPNotFound |
|
255 | 252 | |
|
256 | 253 | try: |
|
257 | 254 | org_repo.scm_instance.get_changeset() |
|
258 | 255 | except EmptyRepositoryError, e: |
|
259 | 256 | h.flash(h.literal(_('There are no changesets yet')), |
|
260 | 257 | category='warning') |
|
261 | 258 | redirect(url('summary_home', repo_name=org_repo.repo_name)) |
|
262 | 259 | |
|
263 | 260 | org_rev = request.GET.get('rev_end') |
|
264 | 261 | # rev_start is not directly useful - its parent could however be used |
|
265 | 262 | # as default for other and thus give a simple compare view |
|
266 | 263 | #other_rev = request.POST.get('rev_start') |
|
267 | 264 | branch = request.GET.get('branch') |
|
268 | 265 | |
|
269 | 266 | c.org_repos = [] |
|
270 | 267 | c.org_repos.append((org_repo.repo_name, org_repo.repo_name)) |
|
271 | 268 | c.default_org_repo = org_repo.repo_name |
|
272 | 269 | c.org_refs, c.default_org_ref = self._get_repo_refs(org_repo.scm_instance, rev=org_rev, branch=branch) |
|
273 | 270 | |
|
274 | 271 | c.other_repos = [] |
|
275 | 272 | other_repos_info = {} |
|
276 | 273 | |
|
277 | 274 | def add_other_repo(repo, branch_rev=None): |
|
278 | 275 | if repo.repo_name in other_repos_info: # shouldn't happen |
|
279 | 276 | return |
|
280 | 277 | c.other_repos.append((repo.repo_name, repo.repo_name)) |
|
281 | 278 | other_refs, selected_other_ref = self._get_repo_refs(repo.scm_instance, branch_rev=branch_rev) |
|
282 | 279 | other_repos_info[repo.repo_name] = { |
|
283 | 280 | 'user': dict(user_id=repo.user.user_id, |
|
284 | 281 | username=repo.user.username, |
|
285 | 282 | firstname=repo.user.firstname, |
|
286 | 283 | lastname=repo.user.lastname, |
|
287 | 284 | gravatar_link=h.gravatar_url(repo.user.email, 14)), |
|
288 | 285 | 'description': repo.description.split('\n', 1)[0], |
|
289 | 286 | 'revs': h.select('other_ref', selected_other_ref, other_refs, class_='refs') |
|
290 | 287 | } |
|
291 | 288 | |
|
292 | 289 | # add org repo to other so we can open pull request against peer branches on itself |
|
293 | 290 | add_other_repo(org_repo, branch_rev=org_rev) |
|
294 | 291 | c.default_other_repo = org_repo.repo_name |
|
295 | 292 | |
|
296 | 293 | # gather forks and add to this list ... even though it is rare to |
|
297 | 294 | # request forks to pull from their parent |
|
298 | 295 | for fork in org_repo.forks: |
|
299 | 296 | add_other_repo(fork) |
|
300 | 297 | |
|
301 | 298 | # add parents of this fork also, but only if it's not empty |
|
302 | 299 | if org_repo.parent and org_repo.parent.scm_instance.revisions: |
|
303 | 300 | add_other_repo(org_repo.parent) |
|
304 | 301 | c.default_other_repo = org_repo.parent.repo_name |
|
305 | 302 | |
|
306 | 303 | c.default_other_repo_info = other_repos_info[c.default_other_repo] |
|
307 | 304 | c.other_repos_info = json.dumps(other_repos_info) |
|
308 | 305 | |
|
309 | 306 | return render('/pullrequests/pullrequest.html') |
|
310 | 307 | |
|
311 | 308 | @LoginRequired() |
|
312 | 309 | @NotAnonymous() |
|
313 | 310 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
314 | 311 | 'repository.admin') |
|
315 | 312 | def create(self, repo_name): |
|
316 | 313 | repo = RepoModel()._get_repo(repo_name) |
|
317 | 314 | try: |
|
318 | 315 | _form = PullRequestForm(repo.repo_id)().to_python(request.POST) |
|
319 | 316 | except formencode.Invalid, errors: |
|
320 | 317 | log.error(traceback.format_exc()) |
|
321 | 318 | if errors.error_dict.get('revisions'): |
|
322 | 319 | msg = 'Revisions: %s' % errors.error_dict['revisions'] |
|
323 | 320 | elif errors.error_dict.get('pullrequest_title'): |
|
324 | 321 | msg = _('Pull request requires a title with min. 3 chars') |
|
325 | 322 | else: |
|
326 | 323 | msg = _('Error creating pull request') |
|
327 | 324 | |
|
328 | 325 | h.flash(msg, 'error') |
|
329 | 326 | return redirect(url('pullrequest_home', repo_name=repo_name)) |
|
330 | 327 | |
|
331 | 328 | org_repo = _form['org_repo'] |
|
332 | 329 | org_ref = 'rev:merge:%s' % _form['merge_rev'] |
|
333 | 330 | other_repo = _form['other_repo'] |
|
334 | 331 | other_ref = 'rev:ancestor:%s' % _form['ancestor_rev'] |
|
335 | 332 | revisions = [x for x in reversed(_form['revisions'])] |
|
336 | 333 | reviewers = _form['review_members'] |
|
337 | 334 | |
|
338 | 335 | title = _form['pullrequest_title'] |
|
339 | 336 | description = _form['pullrequest_desc'] |
|
340 | 337 | try: |
|
341 | 338 | pull_request = PullRequestModel().create( |
|
342 | 339 | self.rhodecode_user.user_id, org_repo, org_ref, other_repo, |
|
343 | 340 | other_ref, revisions, reviewers, title, description |
|
344 | 341 | ) |
|
345 | 342 | Session().commit() |
|
346 | 343 | h.flash(_('Successfully opened new pull request'), |
|
347 | 344 | category='success') |
|
348 | 345 | except Exception: |
|
349 | 346 | h.flash(_('Error occurred during sending pull request'), |
|
350 | 347 | category='error') |
|
351 | 348 | log.error(traceback.format_exc()) |
|
352 | 349 | return redirect(url('pullrequest_home', repo_name=repo_name)) |
|
353 | 350 | |
|
354 | 351 | return redirect(url('pullrequest_show', repo_name=other_repo, |
|
355 | 352 | pull_request_id=pull_request.pull_request_id)) |
|
356 | 353 | |
|
357 | 354 | @LoginRequired() |
|
358 | 355 | @NotAnonymous() |
|
359 | 356 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
360 | 357 | 'repository.admin') |
|
361 | 358 | @jsonify |
|
362 | 359 | def update(self, repo_name, pull_request_id): |
|
363 | 360 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
364 | 361 | if pull_request.is_closed(): |
|
365 | 362 | raise HTTPForbidden() |
|
366 | 363 | #only owner or admin can update it |
|
367 | 364 | owner = pull_request.author.user_id == c.rhodecode_user.user_id |
|
368 | 365 | if h.HasPermissionAny('hg.admin', 'repository.admin')() or owner: |
|
369 | 366 | reviewers_ids = map(int, filter(lambda v: v not in [None, ''], |
|
370 | 367 | request.POST.get('reviewers_ids', '').split(','))) |
|
371 | 368 | |
|
372 | 369 | PullRequestModel().update_reviewers(pull_request_id, reviewers_ids) |
|
373 | 370 | Session().commit() |
|
374 | 371 | return True |
|
375 | 372 | raise HTTPForbidden() |
|
376 | 373 | |
|
377 | 374 | @LoginRequired() |
|
378 | 375 | @NotAnonymous() |
|
379 | 376 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
380 | 377 | 'repository.admin') |
|
381 | 378 | @jsonify |
|
382 | 379 | def delete(self, repo_name, pull_request_id): |
|
383 | 380 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
384 | 381 | #only owner can delete it ! |
|
385 | 382 | if pull_request.author.user_id == c.rhodecode_user.user_id: |
|
386 | 383 | PullRequestModel().delete(pull_request) |
|
387 | 384 | Session().commit() |
|
388 | 385 | h.flash(_('Successfully deleted pull request'), |
|
389 | 386 | category='success') |
|
390 | 387 | return redirect(url('admin_settings_my_account', anchor='pullrequests')) |
|
391 | 388 | raise HTTPForbidden() |
|
392 | 389 | |
|
393 | 390 | @LoginRequired() |
|
394 | 391 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
395 | 392 | 'repository.admin') |
|
396 | 393 | def show(self, repo_name, pull_request_id): |
|
397 | 394 | repo_model = RepoModel() |
|
398 | 395 | c.users_array = repo_model.get_users_js() |
|
399 | 396 | c.users_groups_array = repo_model.get_users_groups_js() |
|
400 | 397 | c.pull_request = PullRequest.get_or_404(pull_request_id) |
|
401 | 398 | c.allowed_to_change_status = self._get_is_allowed_change_status(c.pull_request) |
|
402 | 399 | cc_model = ChangesetCommentsModel() |
|
403 | 400 | cs_model = ChangesetStatusModel() |
|
404 | 401 | _cs_statuses = cs_model.get_statuses(c.pull_request.org_repo, |
|
405 | 402 | pull_request=c.pull_request, |
|
406 | 403 | with_revisions=True) |
|
407 | 404 | |
|
408 | 405 | cs_statuses = defaultdict(list) |
|
409 | 406 | for st in _cs_statuses: |
|
410 | 407 | cs_statuses[st.author.username] += [st] |
|
411 | 408 | |
|
412 | 409 | c.pull_request_reviewers = [] |
|
413 | 410 | c.pull_request_pending_reviewers = [] |
|
414 | 411 | for o in c.pull_request.reviewers: |
|
415 | 412 | st = cs_statuses.get(o.user.username, None) |
|
416 | 413 | if st: |
|
417 | 414 | sorter = lambda k: k.version |
|
418 | 415 | st = [(x, list(y)[0]) |
|
419 | 416 | for x, y in (groupby(sorted(st, key=sorter), sorter))] |
|
420 | 417 | else: |
|
421 | 418 | c.pull_request_pending_reviewers.append(o.user) |
|
422 | 419 | c.pull_request_reviewers.append([o.user, st]) |
|
423 | 420 | |
|
424 | 421 | # pull_requests repo_name we opened it against |
|
425 | 422 | # ie. other_repo must match |
|
426 | 423 | if repo_name != c.pull_request.other_repo.repo_name: |
|
427 | 424 | raise HTTPNotFound |
|
428 | 425 | |
|
429 | 426 | # load compare data into template context |
|
430 | 427 | enable_comments = not c.pull_request.is_closed() |
|
431 | 428 | self._load_compare_data(c.pull_request, enable_comments=enable_comments) |
|
432 | 429 | |
|
433 | 430 | # inline comments |
|
434 | 431 | c.inline_cnt = 0 |
|
435 | 432 | c.inline_comments = cc_model.get_inline_comments( |
|
436 | 433 | c.rhodecode_db_repo.repo_id, |
|
437 | 434 | pull_request=pull_request_id) |
|
438 | 435 | # count inline comments |
|
439 | 436 | for __, lines in c.inline_comments: |
|
440 | 437 | for comments in lines.values(): |
|
441 | 438 | c.inline_cnt += len(comments) |
|
442 | 439 | # comments |
|
443 | 440 | c.comments = cc_model.get_comments(c.rhodecode_db_repo.repo_id, |
|
444 | 441 | pull_request=pull_request_id) |
|
445 | 442 | |
|
446 | 443 | try: |
|
447 | 444 | cur_status = c.statuses[c.pull_request.revisions[0]][0] |
|
448 | 445 | except Exception: |
|
449 | 446 | log.error(traceback.format_exc()) |
|
450 | 447 | cur_status = 'undefined' |
|
451 | 448 | if c.pull_request.is_closed() and 0: |
|
452 | 449 | c.current_changeset_status = cur_status |
|
453 | 450 | else: |
|
454 | 451 | # changeset(pull-request) status calulation based on reviewers |
|
455 | 452 | c.current_changeset_status = cs_model.calculate_status( |
|
456 | 453 | c.pull_request_reviewers, |
|
457 | 454 | ) |
|
458 | 455 | c.changeset_statuses = ChangesetStatus.STATUSES |
|
459 | 456 | |
|
460 | 457 | c.as_form = False |
|
461 | 458 | c.ancestor = None # there is one - but right here we don't know which |
|
462 | 459 | return render('/pullrequests/pullrequest_show.html') |
|
463 | 460 | |
|
464 | 461 | @LoginRequired() |
|
465 | 462 | @NotAnonymous() |
|
466 | 463 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
467 | 464 | 'repository.admin') |
|
468 | 465 | @jsonify |
|
469 | 466 | def comment(self, repo_name, pull_request_id): |
|
470 | 467 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
471 | 468 | if pull_request.is_closed(): |
|
472 | 469 | raise HTTPForbidden() |
|
473 | 470 | |
|
474 | 471 | status = request.POST.get('changeset_status') |
|
475 | 472 | change_status = request.POST.get('change_changeset_status') |
|
476 | 473 | text = request.POST.get('text') |
|
477 | 474 | close_pr = request.POST.get('save_close') |
|
478 | 475 | |
|
479 | 476 | allowed_to_change_status = self._get_is_allowed_change_status(pull_request) |
|
480 | 477 | if status and change_status and allowed_to_change_status: |
|
481 | 478 | _def = (_('Status change -> %s') |
|
482 | 479 | % ChangesetStatus.get_status_lbl(status)) |
|
483 | 480 | if close_pr: |
|
484 | 481 | _def = _('Closing with') + ' ' + _def |
|
485 | 482 | text = text or _def |
|
486 | 483 | comm = ChangesetCommentsModel().create( |
|
487 | 484 | text=text, |
|
488 | 485 | repo=c.rhodecode_db_repo.repo_id, |
|
489 | 486 | user=c.rhodecode_user.user_id, |
|
490 | 487 | pull_request=pull_request_id, |
|
491 | 488 | f_path=request.POST.get('f_path'), |
|
492 | 489 | line_no=request.POST.get('line'), |
|
493 | 490 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
494 | 491 | if status and change_status |
|
495 | 492 | and allowed_to_change_status else None), |
|
496 | 493 | closing_pr=close_pr |
|
497 | 494 | ) |
|
498 | 495 | |
|
499 | 496 | action_logger(self.rhodecode_user, |
|
500 | 497 | 'user_commented_pull_request:%s' % pull_request_id, |
|
501 | 498 | c.rhodecode_db_repo, self.ip_addr, self.sa) |
|
502 | 499 | |
|
503 | 500 | if allowed_to_change_status: |
|
504 | 501 | # get status if set ! |
|
505 | 502 | if status and change_status: |
|
506 | 503 | ChangesetStatusModel().set_status( |
|
507 | 504 | c.rhodecode_db_repo.repo_id, |
|
508 | 505 | status, |
|
509 | 506 | c.rhodecode_user.user_id, |
|
510 | 507 | comm, |
|
511 | 508 | pull_request=pull_request_id |
|
512 | 509 | ) |
|
513 | 510 | |
|
514 | 511 | if close_pr: |
|
515 | 512 | if status in ['rejected', 'approved']: |
|
516 | 513 | PullRequestModel().close_pull_request(pull_request_id) |
|
517 | 514 | action_logger(self.rhodecode_user, |
|
518 | 515 | 'user_closed_pull_request:%s' % pull_request_id, |
|
519 | 516 | c.rhodecode_db_repo, self.ip_addr, self.sa) |
|
520 | 517 | else: |
|
521 | 518 | h.flash(_('Closing pull request on other statuses than ' |
|
522 | 519 | 'rejected or approved forbidden'), |
|
523 | 520 | category='warning') |
|
524 | 521 | |
|
525 | 522 | Session().commit() |
|
526 | 523 | |
|
527 | 524 | if not request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
528 | 525 | return redirect(h.url('pullrequest_show', repo_name=repo_name, |
|
529 | 526 | pull_request_id=pull_request_id)) |
|
530 | 527 | |
|
531 | 528 | data = { |
|
532 | 529 | 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))), |
|
533 | 530 | } |
|
534 | 531 | if comm: |
|
535 | 532 | c.co = comm |
|
536 | 533 | data.update(comm.get_dict()) |
|
537 | 534 | data.update({'rendered_text': |
|
538 | 535 | render('changeset/changeset_comment_block.html')}) |
|
539 | 536 | |
|
540 | 537 | return data |
|
541 | 538 | |
|
542 | 539 | @LoginRequired() |
|
543 | 540 | @NotAnonymous() |
|
544 | 541 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
545 | 542 | 'repository.admin') |
|
546 | 543 | @jsonify |
|
547 | 544 | def delete_comment(self, repo_name, comment_id): |
|
548 | 545 | co = ChangesetComment.get(comment_id) |
|
549 | 546 | if co.pull_request.is_closed(): |
|
550 | 547 | #don't allow deleting comments on closed pull request |
|
551 | 548 | raise HTTPForbidden() |
|
552 | 549 | |
|
553 | 550 | owner = co.author.user_id == c.rhodecode_user.user_id |
|
554 | 551 | if h.HasPermissionAny('hg.admin', 'repository.admin')() or owner: |
|
555 | 552 | ChangesetCommentsModel().delete(comment=co) |
|
556 | 553 | Session().commit() |
|
557 | 554 | return True |
|
558 | 555 | else: |
|
559 | 556 | raise HTTPForbidden() |
@@ -1,387 +1,385 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.hooks |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Hooks runned by rhodecode |
|
7 | 7 | |
|
8 | 8 | :created_on: Aug 6, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | import os |
|
26 | 26 | import sys |
|
27 | 27 | import time |
|
28 | 28 | import binascii |
|
29 | 29 | import traceback |
|
30 | 30 | from inspect import isfunction |
|
31 | 31 | |
|
32 |
from |
|
|
33 | from mercurial.node import nullrev | |
|
34 | ||
|
32 | from rhodecode.lib.vcs.utils.hgcompat import nullrev, revrange | |
|
35 | 33 | from rhodecode.lib import helpers as h |
|
36 | 34 | from rhodecode.lib.utils import action_logger |
|
37 | 35 | from rhodecode.lib.vcs.backends.base import EmptyChangeset |
|
38 | 36 | from rhodecode.lib.compat import json |
|
39 | 37 | from rhodecode.lib.exceptions import HTTPLockedRC |
|
40 | 38 | from rhodecode.lib.utils2 import safe_str, _extract_extras |
|
41 | 39 | from rhodecode.model.db import Repository, User |
|
42 | 40 | |
|
43 | 41 | |
|
44 | 42 | def _get_scm_size(alias, root_path): |
|
45 | 43 | |
|
46 | 44 | if not alias.startswith('.'): |
|
47 | 45 | alias += '.' |
|
48 | 46 | |
|
49 | 47 | size_scm, size_root = 0, 0 |
|
50 | 48 | for path, dirs, files in os.walk(safe_str(root_path)): |
|
51 | 49 | if path.find(alias) != -1: |
|
52 | 50 | for f in files: |
|
53 | 51 | try: |
|
54 | 52 | size_scm += os.path.getsize(os.path.join(path, f)) |
|
55 | 53 | except OSError: |
|
56 | 54 | pass |
|
57 | 55 | else: |
|
58 | 56 | for f in files: |
|
59 | 57 | try: |
|
60 | 58 | size_root += os.path.getsize(os.path.join(path, f)) |
|
61 | 59 | except OSError: |
|
62 | 60 | pass |
|
63 | 61 | |
|
64 | 62 | size_scm_f = h.format_byte_size(size_scm) |
|
65 | 63 | size_root_f = h.format_byte_size(size_root) |
|
66 | 64 | size_total_f = h.format_byte_size(size_root + size_scm) |
|
67 | 65 | |
|
68 | 66 | return size_scm_f, size_root_f, size_total_f |
|
69 | 67 | |
|
70 | 68 | |
|
71 | 69 | def repo_size(ui, repo, hooktype=None, **kwargs): |
|
72 | 70 | """ |
|
73 | 71 | Presents size of repository after push |
|
74 | 72 | |
|
75 | 73 | :param ui: |
|
76 | 74 | :param repo: |
|
77 | 75 | :param hooktype: |
|
78 | 76 | """ |
|
79 | 77 | |
|
80 | 78 | size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root) |
|
81 | 79 | |
|
82 | 80 | last_cs = repo[len(repo) - 1] |
|
83 | 81 | |
|
84 | 82 | msg = ('Repository size .hg:%s repo:%s total:%s\n' |
|
85 | 83 | 'Last revision is now r%s:%s\n') % ( |
|
86 | 84 | size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12] |
|
87 | 85 | ) |
|
88 | 86 | |
|
89 | 87 | sys.stdout.write(msg) |
|
90 | 88 | |
|
91 | 89 | |
|
92 | 90 | def pre_push(ui, repo, **kwargs): |
|
93 | 91 | # pre push function, currently used to ban pushing when |
|
94 | 92 | # repository is locked |
|
95 | 93 | ex = _extract_extras() |
|
96 | 94 | |
|
97 | 95 | usr = User.get_by_username(ex.username) |
|
98 | 96 | if ex.locked_by[0] and usr.user_id != int(ex.locked_by[0]): |
|
99 | 97 | locked_by = User.get(ex.locked_by[0]).username |
|
100 | 98 | # this exception is interpreted in git/hg middlewares and based |
|
101 | 99 | # on that proper return code is server to client |
|
102 | 100 | _http_ret = HTTPLockedRC(ex.repository, locked_by) |
|
103 | 101 | if str(_http_ret.code).startswith('2'): |
|
104 | 102 | #2xx Codes don't raise exceptions |
|
105 | 103 | sys.stdout.write(_http_ret.title) |
|
106 | 104 | else: |
|
107 | 105 | raise _http_ret |
|
108 | 106 | |
|
109 | 107 | |
|
110 | 108 | def pre_pull(ui, repo, **kwargs): |
|
111 | 109 | # pre push function, currently used to ban pushing when |
|
112 | 110 | # repository is locked |
|
113 | 111 | ex = _extract_extras() |
|
114 | 112 | if ex.locked_by[0]: |
|
115 | 113 | locked_by = User.get(ex.locked_by[0]).username |
|
116 | 114 | # this exception is interpreted in git/hg middlewares and based |
|
117 | 115 | # on that proper return code is server to client |
|
118 | 116 | _http_ret = HTTPLockedRC(ex.repository, locked_by) |
|
119 | 117 | if str(_http_ret.code).startswith('2'): |
|
120 | 118 | #2xx Codes don't raise exceptions |
|
121 | 119 | sys.stdout.write(_http_ret.title) |
|
122 | 120 | else: |
|
123 | 121 | raise _http_ret |
|
124 | 122 | |
|
125 | 123 | |
|
126 | 124 | def log_pull_action(ui, repo, **kwargs): |
|
127 | 125 | """ |
|
128 | 126 | Logs user last pull action |
|
129 | 127 | |
|
130 | 128 | :param ui: |
|
131 | 129 | :param repo: |
|
132 | 130 | """ |
|
133 | 131 | ex = _extract_extras() |
|
134 | 132 | |
|
135 | 133 | user = User.get_by_username(ex.username) |
|
136 | 134 | action = 'pull' |
|
137 | 135 | action_logger(user, action, ex.repository, ex.ip, commit=True) |
|
138 | 136 | # extension hook call |
|
139 | 137 | from rhodecode import EXTENSIONS |
|
140 | 138 | callback = getattr(EXTENSIONS, 'PULL_HOOK', None) |
|
141 | 139 | if isfunction(callback): |
|
142 | 140 | kw = {} |
|
143 | 141 | kw.update(ex) |
|
144 | 142 | callback(**kw) |
|
145 | 143 | |
|
146 | 144 | if ex.make_lock is not None and ex.make_lock: |
|
147 | 145 | Repository.lock(Repository.get_by_repo_name(ex.repository), user.user_id) |
|
148 | 146 | #msg = 'Made lock on repo `%s`' % repository |
|
149 | 147 | #sys.stdout.write(msg) |
|
150 | 148 | |
|
151 | 149 | if ex.locked_by[0]: |
|
152 | 150 | locked_by = User.get(ex.locked_by[0]).username |
|
153 | 151 | _http_ret = HTTPLockedRC(ex.repository, locked_by) |
|
154 | 152 | if str(_http_ret.code).startswith('2'): |
|
155 | 153 | #2xx Codes don't raise exceptions |
|
156 | 154 | sys.stdout.write(_http_ret.title) |
|
157 | 155 | return 0 |
|
158 | 156 | |
|
159 | 157 | |
|
160 | 158 | def log_push_action(ui, repo, **kwargs): |
|
161 | 159 | """ |
|
162 | 160 | Maps user last push action to new changeset id, from mercurial |
|
163 | 161 | |
|
164 | 162 | :param ui: |
|
165 | 163 | :param repo: repo object containing the `ui` object |
|
166 | 164 | """ |
|
167 | 165 | |
|
168 | 166 | ex = _extract_extras() |
|
169 | 167 | |
|
170 | 168 | action = ex.action + ':%s' |
|
171 | 169 | |
|
172 | 170 | if ex.scm == 'hg': |
|
173 | 171 | node = kwargs['node'] |
|
174 | 172 | |
|
175 | 173 | def get_revs(repo, rev_opt): |
|
176 | 174 | if rev_opt: |
|
177 | 175 | revs = revrange(repo, rev_opt) |
|
178 | 176 | |
|
179 | 177 | if len(revs) == 0: |
|
180 | 178 | return (nullrev, nullrev) |
|
181 | 179 | return (max(revs), min(revs)) |
|
182 | 180 | else: |
|
183 | 181 | return (len(repo) - 1, 0) |
|
184 | 182 | |
|
185 | 183 | stop, start = get_revs(repo, [node + ':']) |
|
186 | 184 | h = binascii.hexlify |
|
187 | 185 | revs = [h(repo[r].node()) for r in xrange(start, stop + 1)] |
|
188 | 186 | elif ex.scm == 'git': |
|
189 | 187 | revs = kwargs.get('_git_revs', []) |
|
190 | 188 | if '_git_revs' in kwargs: |
|
191 | 189 | kwargs.pop('_git_revs') |
|
192 | 190 | |
|
193 | 191 | action = action % ','.join(revs) |
|
194 | 192 | |
|
195 | 193 | action_logger(ex.username, action, ex.repository, ex.ip, commit=True) |
|
196 | 194 | |
|
197 | 195 | # extension hook call |
|
198 | 196 | from rhodecode import EXTENSIONS |
|
199 | 197 | callback = getattr(EXTENSIONS, 'PUSH_HOOK', None) |
|
200 | 198 | if isfunction(callback): |
|
201 | 199 | kw = {'pushed_revs': revs} |
|
202 | 200 | kw.update(ex) |
|
203 | 201 | callback(**kw) |
|
204 | 202 | |
|
205 | 203 | if ex.make_lock is not None and not ex.make_lock: |
|
206 | 204 | Repository.unlock(Repository.get_by_repo_name(ex.repository)) |
|
207 | 205 | msg = 'Released lock on repo `%s`\n' % ex.repository |
|
208 | 206 | sys.stdout.write(msg) |
|
209 | 207 | |
|
210 | 208 | if ex.locked_by[0]: |
|
211 | 209 | locked_by = User.get(ex.locked_by[0]).username |
|
212 | 210 | _http_ret = HTTPLockedRC(ex.repository, locked_by) |
|
213 | 211 | if str(_http_ret.code).startswith('2'): |
|
214 | 212 | #2xx Codes don't raise exceptions |
|
215 | 213 | sys.stdout.write(_http_ret.title) |
|
216 | 214 | |
|
217 | 215 | return 0 |
|
218 | 216 | |
|
219 | 217 | |
|
220 | 218 | def log_create_repository(repository_dict, created_by, **kwargs): |
|
221 | 219 | """ |
|
222 | 220 | Post create repository Hook. This is a dummy function for admins to re-use |
|
223 | 221 | if needed. It's taken from rhodecode-extensions module and executed |
|
224 | 222 | if present |
|
225 | 223 | |
|
226 | 224 | :param repository: dict dump of repository object |
|
227 | 225 | :param created_by: username who created repository |
|
228 | 226 | |
|
229 | 227 | available keys of repository_dict: |
|
230 | 228 | |
|
231 | 229 | 'repo_type', |
|
232 | 230 | 'description', |
|
233 | 231 | 'private', |
|
234 | 232 | 'created_on', |
|
235 | 233 | 'enable_downloads', |
|
236 | 234 | 'repo_id', |
|
237 | 235 | 'user_id', |
|
238 | 236 | 'enable_statistics', |
|
239 | 237 | 'clone_uri', |
|
240 | 238 | 'fork_id', |
|
241 | 239 | 'group_id', |
|
242 | 240 | 'repo_name' |
|
243 | 241 | |
|
244 | 242 | """ |
|
245 | 243 | from rhodecode import EXTENSIONS |
|
246 | 244 | callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None) |
|
247 | 245 | if isfunction(callback): |
|
248 | 246 | kw = {} |
|
249 | 247 | kw.update(repository_dict) |
|
250 | 248 | kw.update({'created_by': created_by}) |
|
251 | 249 | kw.update(kwargs) |
|
252 | 250 | return callback(**kw) |
|
253 | 251 | |
|
254 | 252 | return 0 |
|
255 | 253 | |
|
256 | 254 | |
|
257 | 255 | def log_delete_repository(repository_dict, deleted_by, **kwargs): |
|
258 | 256 | """ |
|
259 | 257 | Post delete repository Hook. This is a dummy function for admins to re-use |
|
260 | 258 | if needed. It's taken from rhodecode-extensions module and executed |
|
261 | 259 | if present |
|
262 | 260 | |
|
263 | 261 | :param repository: dict dump of repository object |
|
264 | 262 | :param deleted_by: username who deleted the repository |
|
265 | 263 | |
|
266 | 264 | available keys of repository_dict: |
|
267 | 265 | |
|
268 | 266 | 'repo_type', |
|
269 | 267 | 'description', |
|
270 | 268 | 'private', |
|
271 | 269 | 'created_on', |
|
272 | 270 | 'enable_downloads', |
|
273 | 271 | 'repo_id', |
|
274 | 272 | 'user_id', |
|
275 | 273 | 'enable_statistics', |
|
276 | 274 | 'clone_uri', |
|
277 | 275 | 'fork_id', |
|
278 | 276 | 'group_id', |
|
279 | 277 | 'repo_name' |
|
280 | 278 | |
|
281 | 279 | """ |
|
282 | 280 | from rhodecode import EXTENSIONS |
|
283 | 281 | callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None) |
|
284 | 282 | if isfunction(callback): |
|
285 | 283 | kw = {} |
|
286 | 284 | kw.update(repository_dict) |
|
287 | 285 | kw.update({'deleted_by': deleted_by, |
|
288 | 286 | 'deleted_on': time.time()}) |
|
289 | 287 | kw.update(kwargs) |
|
290 | 288 | return callback(**kw) |
|
291 | 289 | |
|
292 | 290 | return 0 |
|
293 | 291 | |
|
294 | 292 | |
|
295 | 293 | handle_git_pre_receive = (lambda repo_path, revs, env: |
|
296 | 294 | handle_git_receive(repo_path, revs, env, hook_type='pre')) |
|
297 | 295 | handle_git_post_receive = (lambda repo_path, revs, env: |
|
298 | 296 | handle_git_receive(repo_path, revs, env, hook_type='post')) |
|
299 | 297 | |
|
300 | 298 | |
|
301 | 299 | def handle_git_receive(repo_path, revs, env, hook_type='post'): |
|
302 | 300 | """ |
|
303 | 301 | A really hacky method that is runned by git post-receive hook and logs |
|
304 | 302 | an push action together with pushed revisions. It's executed by subprocess |
|
305 | 303 | thus needs all info to be able to create a on the fly pylons enviroment, |
|
306 | 304 | connect to database and run the logging code. Hacky as sh*t but works. |
|
307 | 305 | |
|
308 | 306 | :param repo_path: |
|
309 | 307 | :param revs: |
|
310 | 308 | :param env: |
|
311 | 309 | """ |
|
312 | 310 | from paste.deploy import appconfig |
|
313 | 311 | from sqlalchemy import engine_from_config |
|
314 | 312 | from rhodecode.config.environment import load_environment |
|
315 | 313 | from rhodecode.model import init_model |
|
316 | 314 | from rhodecode.model.db import RhodeCodeUi |
|
317 | 315 | from rhodecode.lib.utils import make_ui |
|
318 | 316 | extras = _extract_extras(env) |
|
319 | 317 | |
|
320 | 318 | path, ini_name = os.path.split(extras['config']) |
|
321 | 319 | conf = appconfig('config:%s' % ini_name, relative_to=path) |
|
322 | 320 | load_environment(conf.global_conf, conf.local_conf) |
|
323 | 321 | |
|
324 | 322 | engine = engine_from_config(conf, 'sqlalchemy.db1.') |
|
325 | 323 | init_model(engine) |
|
326 | 324 | |
|
327 | 325 | baseui = make_ui('db') |
|
328 | 326 | # fix if it's not a bare repo |
|
329 | 327 | if repo_path.endswith(os.sep + '.git'): |
|
330 | 328 | repo_path = repo_path[:-5] |
|
331 | 329 | |
|
332 | 330 | repo = Repository.get_by_full_path(repo_path) |
|
333 | 331 | if not repo: |
|
334 | 332 | raise OSError('Repository %s not found in database' |
|
335 | 333 | % (safe_str(repo_path))) |
|
336 | 334 | |
|
337 | 335 | _hooks = dict(baseui.configitems('hooks')) or {} |
|
338 | 336 | |
|
339 | 337 | if hook_type == 'pre': |
|
340 | 338 | repo = repo.scm_instance |
|
341 | 339 | else: |
|
342 | 340 | #post push shouldn't use the cached instance never |
|
343 | 341 | repo = repo.scm_instance_no_cache() |
|
344 | 342 | |
|
345 | 343 | if hook_type == 'pre': |
|
346 | 344 | pre_push(baseui, repo) |
|
347 | 345 | |
|
348 | 346 | # if push hook is enabled via web interface |
|
349 | 347 | elif hook_type == 'post' and _hooks.get(RhodeCodeUi.HOOK_PUSH): |
|
350 | 348 | |
|
351 | 349 | rev_data = [] |
|
352 | 350 | for l in revs: |
|
353 | 351 | old_rev, new_rev, ref = l.split(' ') |
|
354 | 352 | _ref_data = ref.split('/') |
|
355 | 353 | if _ref_data[1] in ['tags', 'heads']: |
|
356 | 354 | rev_data.append({'old_rev': old_rev, |
|
357 | 355 | 'new_rev': new_rev, |
|
358 | 356 | 'ref': ref, |
|
359 | 357 | 'type': _ref_data[1], |
|
360 | 358 | 'name': _ref_data[2].strip()}) |
|
361 | 359 | |
|
362 | 360 | git_revs = [] |
|
363 | 361 | for push_ref in rev_data: |
|
364 | 362 | _type = push_ref['type'] |
|
365 | 363 | if _type == 'heads': |
|
366 | 364 | if push_ref['old_rev'] == EmptyChangeset().raw_id: |
|
367 | 365 | cmd = "for-each-ref --format='%(refname)' 'refs/heads/*'" |
|
368 | 366 | heads = repo.run_git_command(cmd)[0] |
|
369 | 367 | heads = heads.replace(push_ref['ref'], '') |
|
370 | 368 | heads = ' '.join(map(lambda c: c.strip('\n').strip(), |
|
371 | 369 | heads.splitlines())) |
|
372 | 370 | cmd = (('log %(new_rev)s' % push_ref) + |
|
373 | 371 | ' --reverse --pretty=format:"%H" --not ' + heads) |
|
374 | 372 | git_revs += repo.run_git_command(cmd)[0].splitlines() |
|
375 | 373 | |
|
376 | 374 | elif push_ref['new_rev'] == EmptyChangeset().raw_id: |
|
377 | 375 | #delete branch case |
|
378 | 376 | git_revs += ['delete_branch=>%s' % push_ref['name']] |
|
379 | 377 | else: |
|
380 | 378 | cmd = (('log %(old_rev)s..%(new_rev)s' % push_ref) + |
|
381 | 379 | ' --reverse --pretty=format:"%H"') |
|
382 | 380 | git_revs += repo.run_git_command(cmd)[0].splitlines() |
|
383 | 381 | |
|
384 | 382 | elif _type == 'tags': |
|
385 | 383 | git_revs += ['tag=>%s' % push_ref['name']] |
|
386 | 384 | |
|
387 | 385 | log_push_action(baseui, repo, _git_revs=git_revs) |
@@ -1,286 +1,285 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.middleware.simplehg |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | SimpleHG middleware for handling mercurial protocol request |
|
7 | 7 | (push/clone etc.). It's implemented with basic auth function |
|
8 | 8 | |
|
9 | 9 | :created_on: Apr 28, 2010 |
|
10 | 10 | :author: marcink |
|
11 | 11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
12 | 12 | :license: GPLv3, see COPYING for more details. |
|
13 | 13 | """ |
|
14 | 14 | # This program is free software: you can redistribute it and/or modify |
|
15 | 15 | # it under the terms of the GNU General Public License as published by |
|
16 | 16 | # the Free Software Foundation, either version 3 of the License, or |
|
17 | 17 | # (at your option) any later version. |
|
18 | 18 | # |
|
19 | 19 | # This program is distributed in the hope that it will be useful, |
|
20 | 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
21 | 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
22 | 22 | # GNU General Public License for more details. |
|
23 | 23 | # |
|
24 | 24 | # You should have received a copy of the GNU General Public License |
|
25 | 25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
26 | 26 | |
|
27 | 27 | import os |
|
28 | 28 | import logging |
|
29 | 29 | import traceback |
|
30 | 30 | |
|
31 | from mercurial.error import RepoError | |
|
32 | from mercurial.hgweb import hgweb_mod | |
|
33 | 31 | |
|
34 | 32 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE |
|
35 | 33 | from webob.exc import HTTPNotFound, HTTPForbidden, HTTPInternalServerError, \ |
|
36 | 34 | HTTPBadRequest, HTTPNotAcceptable |
|
37 | 35 | |
|
38 | 36 | from rhodecode.lib.utils2 import safe_str, fix_PATH, get_server_url,\ |
|
39 | 37 | _set_extras |
|
40 | 38 | from rhodecode.lib.base import BaseVCSController |
|
41 | 39 | from rhodecode.lib.auth import get_container_username |
|
42 | 40 | from rhodecode.lib.utils import make_ui, is_valid_repo, ui_sections |
|
43 | 41 | from rhodecode.lib.compat import json |
|
42 | from rhodecode.lib.vcs.utils.hgcompat import RepoError, hgweb_mod | |
|
44 | 43 | from rhodecode.model.db import User |
|
45 | 44 | from rhodecode.lib.exceptions import HTTPLockedRC |
|
46 | 45 | |
|
47 | 46 | |
|
48 | 47 | log = logging.getLogger(__name__) |
|
49 | 48 | |
|
50 | 49 | |
|
51 | 50 | def is_mercurial(environ): |
|
52 | 51 | """ |
|
53 | 52 | Returns True if request's target is mercurial server - header |
|
54 | 53 | ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``. |
|
55 | 54 | """ |
|
56 | 55 | http_accept = environ.get('HTTP_ACCEPT') |
|
57 | 56 | path_info = environ['PATH_INFO'] |
|
58 | 57 | if http_accept and http_accept.startswith('application/mercurial'): |
|
59 | 58 | ishg_path = True |
|
60 | 59 | else: |
|
61 | 60 | ishg_path = False |
|
62 | 61 | |
|
63 | 62 | log.debug('pathinfo: %s detected as HG %s' % ( |
|
64 | 63 | path_info, ishg_path) |
|
65 | 64 | ) |
|
66 | 65 | return ishg_path |
|
67 | 66 | |
|
68 | 67 | |
|
69 | 68 | class SimpleHg(BaseVCSController): |
|
70 | 69 | |
|
71 | 70 | def _handle_request(self, environ, start_response): |
|
72 | 71 | if not is_mercurial(environ): |
|
73 | 72 | return self.application(environ, start_response) |
|
74 | 73 | if not self._check_ssl(environ, start_response): |
|
75 | 74 | return HTTPNotAcceptable('SSL REQUIRED !')(environ, start_response) |
|
76 | 75 | |
|
77 | 76 | ip_addr = self._get_ip_addr(environ) |
|
78 | 77 | username = None |
|
79 | 78 | # skip passing error to error controller |
|
80 | 79 | environ['pylons.status_code_redirect'] = True |
|
81 | 80 | |
|
82 | 81 | #====================================================================== |
|
83 | 82 | # EXTRACT REPOSITORY NAME FROM ENV |
|
84 | 83 | #====================================================================== |
|
85 | 84 | try: |
|
86 | 85 | repo_name = environ['REPO_NAME'] = self.__get_repository(environ) |
|
87 | 86 | log.debug('Extracted repo name is %s' % repo_name) |
|
88 | 87 | except Exception: |
|
89 | 88 | return HTTPInternalServerError()(environ, start_response) |
|
90 | 89 | |
|
91 | 90 | # quick check if that dir exists... |
|
92 | 91 | if not is_valid_repo(repo_name, self.basepath, 'hg'): |
|
93 | 92 | return HTTPNotFound()(environ, start_response) |
|
94 | 93 | |
|
95 | 94 | #====================================================================== |
|
96 | 95 | # GET ACTION PULL or PUSH |
|
97 | 96 | #====================================================================== |
|
98 | 97 | action = self.__get_action(environ) |
|
99 | 98 | |
|
100 | 99 | #====================================================================== |
|
101 | 100 | # CHECK ANONYMOUS PERMISSION |
|
102 | 101 | #====================================================================== |
|
103 | 102 | if action in ['pull', 'push']: |
|
104 | 103 | anonymous_user = self.__get_user('default') |
|
105 | 104 | username = anonymous_user.username |
|
106 | 105 | anonymous_perm = self._check_permission(action, anonymous_user, |
|
107 | 106 | repo_name, ip_addr) |
|
108 | 107 | |
|
109 | 108 | if not anonymous_perm or not anonymous_user.active: |
|
110 | 109 | if not anonymous_perm: |
|
111 | 110 | log.debug('Not enough credentials to access this ' |
|
112 | 111 | 'repository as anonymous user') |
|
113 | 112 | if not anonymous_user.active: |
|
114 | 113 | log.debug('Anonymous access is disabled, running ' |
|
115 | 114 | 'authentication') |
|
116 | 115 | #============================================================== |
|
117 | 116 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE |
|
118 | 117 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS |
|
119 | 118 | #============================================================== |
|
120 | 119 | |
|
121 | 120 | # Attempting to retrieve username from the container |
|
122 | 121 | username = get_container_username(environ, self.config) |
|
123 | 122 | |
|
124 | 123 | # If not authenticated by the container, running basic auth |
|
125 | 124 | if not username: |
|
126 | 125 | self.authenticate.realm = \ |
|
127 | 126 | safe_str(self.config['rhodecode_realm']) |
|
128 | 127 | result = self.authenticate(environ) |
|
129 | 128 | if isinstance(result, str): |
|
130 | 129 | AUTH_TYPE.update(environ, 'basic') |
|
131 | 130 | REMOTE_USER.update(environ, result) |
|
132 | 131 | username = result |
|
133 | 132 | else: |
|
134 | 133 | return result.wsgi_application(environ, start_response) |
|
135 | 134 | |
|
136 | 135 | #============================================================== |
|
137 | 136 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME |
|
138 | 137 | #============================================================== |
|
139 | 138 | try: |
|
140 | 139 | user = self.__get_user(username) |
|
141 | 140 | if user is None or not user.active: |
|
142 | 141 | return HTTPForbidden()(environ, start_response) |
|
143 | 142 | username = user.username |
|
144 | 143 | except Exception: |
|
145 | 144 | log.error(traceback.format_exc()) |
|
146 | 145 | return HTTPInternalServerError()(environ, start_response) |
|
147 | 146 | |
|
148 | 147 | #check permissions for this repository |
|
149 | 148 | perm = self._check_permission(action, user, repo_name, ip_addr) |
|
150 | 149 | if not perm: |
|
151 | 150 | return HTTPForbidden()(environ, start_response) |
|
152 | 151 | |
|
153 | 152 | # extras are injected into mercurial UI object and later available |
|
154 | 153 | # in hg hooks executed by rhodecode |
|
155 | 154 | from rhodecode import CONFIG |
|
156 | 155 | server_url = get_server_url(environ) |
|
157 | 156 | extras = { |
|
158 | 157 | 'ip': ip_addr, |
|
159 | 158 | 'username': username, |
|
160 | 159 | 'action': action, |
|
161 | 160 | 'repository': repo_name, |
|
162 | 161 | 'scm': 'hg', |
|
163 | 162 | 'config': CONFIG['__file__'], |
|
164 | 163 | 'server_url': server_url, |
|
165 | 164 | 'make_lock': None, |
|
166 | 165 | 'locked_by': [None, None] |
|
167 | 166 | } |
|
168 | 167 | #====================================================================== |
|
169 | 168 | # MERCURIAL REQUEST HANDLING |
|
170 | 169 | #====================================================================== |
|
171 | 170 | str_repo_name = safe_str(repo_name) |
|
172 | 171 | repo_path = os.path.join(safe_str(self.basepath), str_repo_name) |
|
173 | 172 | log.debug('Repository path is %s' % repo_path) |
|
174 | 173 | |
|
175 | 174 | # CHECK LOCKING only if it's not ANONYMOUS USER |
|
176 | 175 | if username != User.DEFAULT_USER: |
|
177 | 176 | log.debug('Checking locking on repository') |
|
178 | 177 | (make_lock, |
|
179 | 178 | locked, |
|
180 | 179 | locked_by) = self._check_locking_state( |
|
181 | 180 | environ=environ, action=action, |
|
182 | 181 | repo=repo_name, user_id=user.user_id |
|
183 | 182 | ) |
|
184 | 183 | # store the make_lock for later evaluation in hooks |
|
185 | 184 | extras.update({'make_lock': make_lock, |
|
186 | 185 | 'locked_by': locked_by}) |
|
187 | 186 | |
|
188 | 187 | fix_PATH() |
|
189 | 188 | log.debug('HOOKS extras is %s' % extras) |
|
190 | 189 | baseui = make_ui('db') |
|
191 | 190 | self.__inject_extras(repo_path, baseui, extras) |
|
192 | 191 | |
|
193 | 192 | try: |
|
194 | 193 | log.info('%s action on HG repo "%s" by "%s" from %s' % |
|
195 | 194 | (action, str_repo_name, safe_str(username), ip_addr)) |
|
196 | 195 | app = self.__make_app(repo_path, baseui, extras) |
|
197 | 196 | return app(environ, start_response) |
|
198 | 197 | except RepoError, e: |
|
199 | 198 | if str(e).find('not found') != -1: |
|
200 | 199 | return HTTPNotFound()(environ, start_response) |
|
201 | 200 | except HTTPLockedRC, e: |
|
202 | 201 | _code = CONFIG.get('lock_ret_code') |
|
203 | 202 | log.debug('Repository LOCKED ret code %s!' % (_code)) |
|
204 | 203 | return e(environ, start_response) |
|
205 | 204 | except Exception: |
|
206 | 205 | log.error(traceback.format_exc()) |
|
207 | 206 | return HTTPInternalServerError()(environ, start_response) |
|
208 | 207 | finally: |
|
209 | 208 | # invalidate cache on push |
|
210 | 209 | if action == 'push': |
|
211 | 210 | self._invalidate_cache(repo_name) |
|
212 | 211 | |
|
213 | 212 | def __make_app(self, repo_name, baseui, extras): |
|
214 | 213 | """ |
|
215 | 214 | Make an wsgi application using hgweb, and inject generated baseui |
|
216 | 215 | instance, additionally inject some extras into ui object |
|
217 | 216 | """ |
|
218 | 217 | return hgweb_mod.hgweb(repo_name, name=repo_name, baseui=baseui) |
|
219 | 218 | |
|
220 | 219 | def __get_repository(self, environ): |
|
221 | 220 | """ |
|
222 | 221 | Get's repository name out of PATH_INFO header |
|
223 | 222 | |
|
224 | 223 | :param environ: environ where PATH_INFO is stored |
|
225 | 224 | """ |
|
226 | 225 | try: |
|
227 | 226 | environ['PATH_INFO'] = self._get_by_id(environ['PATH_INFO']) |
|
228 | 227 | repo_name = '/'.join(environ['PATH_INFO'].split('/')[1:]) |
|
229 | 228 | if repo_name.endswith('/'): |
|
230 | 229 | repo_name = repo_name.rstrip('/') |
|
231 | 230 | except Exception: |
|
232 | 231 | log.error(traceback.format_exc()) |
|
233 | 232 | raise |
|
234 | 233 | |
|
235 | 234 | return repo_name |
|
236 | 235 | |
|
237 | 236 | def __get_user(self, username): |
|
238 | 237 | return User.get_by_username(username) |
|
239 | 238 | |
|
240 | 239 | def __get_action(self, environ): |
|
241 | 240 | """ |
|
242 | 241 | Maps mercurial request commands into a clone,pull or push command. |
|
243 | 242 | This should always return a valid command string |
|
244 | 243 | |
|
245 | 244 | :param environ: |
|
246 | 245 | """ |
|
247 | 246 | mapping = {'changegroup': 'pull', |
|
248 | 247 | 'changegroupsubset': 'pull', |
|
249 | 248 | 'stream_out': 'pull', |
|
250 | 249 | 'listkeys': 'pull', |
|
251 | 250 | 'unbundle': 'push', |
|
252 | 251 | 'pushkey': 'push', } |
|
253 | 252 | for qry in environ['QUERY_STRING'].split('&'): |
|
254 | 253 | if qry.startswith('cmd'): |
|
255 | 254 | cmd = qry.split('=')[-1] |
|
256 | 255 | if cmd in mapping: |
|
257 | 256 | return mapping[cmd] |
|
258 | 257 | |
|
259 | 258 | return 'pull' |
|
260 | 259 | |
|
261 | 260 | raise Exception('Unable to detect pull/push action !!' |
|
262 | 261 | 'Are you using non standard command or client ?') |
|
263 | 262 | |
|
264 | 263 | def __inject_extras(self, repo_path, baseui, extras={}): |
|
265 | 264 | """ |
|
266 | 265 | Injects some extra params into baseui instance |
|
267 | 266 | |
|
268 | 267 | also overwrites global settings with those takes from local hgrc file |
|
269 | 268 | |
|
270 | 269 | :param baseui: baseui instance |
|
271 | 270 | :param extras: dict with extra params to put into baseui |
|
272 | 271 | """ |
|
273 | 272 | |
|
274 | 273 | hgrc = os.path.join(repo_path, '.hg', 'hgrc') |
|
275 | 274 | |
|
276 | 275 | # make our hgweb quiet so it doesn't print output |
|
277 | 276 | baseui.setconfig('ui', 'quiet', 'true') |
|
278 | 277 | |
|
279 | 278 | repoui = make_ui('file', hgrc, False) |
|
280 | 279 | |
|
281 | 280 | if repoui: |
|
282 | 281 | #overwrite our ui instance with the section from hgrc file |
|
283 | 282 | for section in ui_sections: |
|
284 | 283 | for k, v in repoui.configitems(section): |
|
285 | 284 | baseui.setconfig(section, k, v) |
|
286 | 285 | _set_extras(extras) |
@@ -1,816 +1,815 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.utils |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Utilities library for RhodeCode |
|
7 | 7 | |
|
8 | 8 | :created_on: Apr 18, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import re |
|
28 | 28 | import logging |
|
29 | 29 | import datetime |
|
30 | 30 | import traceback |
|
31 | 31 | import paste |
|
32 | 32 | import beaker |
|
33 | 33 | import tarfile |
|
34 | 34 | import shutil |
|
35 | 35 | import decorator |
|
36 | 36 | import warnings |
|
37 | 37 | from os.path import abspath |
|
38 | 38 | from os.path import dirname as dn, join as jn |
|
39 | 39 | |
|
40 | 40 | from paste.script.command import Command, BadCommand |
|
41 | 41 | |
|
42 | from mercurial import ui, config | |
|
43 | ||
|
44 | 42 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
45 | 43 | |
|
46 | 44 | from rhodecode.lib.vcs import get_backend |
|
47 | 45 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
48 | 46 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
47 | from rhodecode.lib.vcs.utils.hgcompat import ui, config | |
|
49 | 48 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
50 | 49 | from rhodecode.lib.vcs.exceptions import VCSError |
|
51 | 50 | |
|
52 | 51 | from rhodecode.lib.caching_query import FromCache |
|
53 | 52 | |
|
54 | 53 | from rhodecode.model import meta |
|
55 | 54 | from rhodecode.model.db import Repository, User, RhodeCodeUi, \ |
|
56 | 55 | UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation, UserGroup |
|
57 | 56 | from rhodecode.model.meta import Session |
|
58 | 57 | from rhodecode.model.repos_group import ReposGroupModel |
|
59 | 58 | from rhodecode.lib.utils2 import safe_str, safe_unicode |
|
60 | 59 | from rhodecode.lib.vcs.utils.fakemod import create_module |
|
61 | 60 | from rhodecode.model.users_group import UserGroupModel |
|
62 | 61 | |
|
63 | 62 | log = logging.getLogger(__name__) |
|
64 | 63 | |
|
65 | 64 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*') |
|
66 | 65 | |
|
67 | 66 | |
|
68 | 67 | def recursive_replace(str_, replace=' '): |
|
69 | 68 | """ |
|
70 | 69 | Recursive replace of given sign to just one instance |
|
71 | 70 | |
|
72 | 71 | :param str_: given string |
|
73 | 72 | :param replace: char to find and replace multiple instances |
|
74 | 73 | |
|
75 | 74 | Examples:: |
|
76 | 75 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
77 | 76 | 'Mighty-Mighty-Bo-sstones' |
|
78 | 77 | """ |
|
79 | 78 | |
|
80 | 79 | if str_.find(replace * 2) == -1: |
|
81 | 80 | return str_ |
|
82 | 81 | else: |
|
83 | 82 | str_ = str_.replace(replace * 2, replace) |
|
84 | 83 | return recursive_replace(str_, replace) |
|
85 | 84 | |
|
86 | 85 | |
|
87 | 86 | def repo_name_slug(value): |
|
88 | 87 | """ |
|
89 | 88 | Return slug of name of repository |
|
90 | 89 | This function is called on each creation/modification |
|
91 | 90 | of repository to prevent bad names in repo |
|
92 | 91 | """ |
|
93 | 92 | |
|
94 | 93 | slug = remove_formatting(value) |
|
95 | 94 | slug = strip_tags(slug) |
|
96 | 95 | |
|
97 | 96 | for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
98 | 97 | slug = slug.replace(c, '-') |
|
99 | 98 | slug = recursive_replace(slug, '-') |
|
100 | 99 | slug = collapse(slug, '-') |
|
101 | 100 | return slug |
|
102 | 101 | |
|
103 | 102 | |
|
104 | 103 | #============================================================================== |
|
105 | 104 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS |
|
106 | 105 | #============================================================================== |
|
107 | 106 | def get_repo_slug(request): |
|
108 | 107 | _repo = request.environ['pylons.routes_dict'].get('repo_name') |
|
109 | 108 | if _repo: |
|
110 | 109 | _repo = _repo.rstrip('/') |
|
111 | 110 | return _repo |
|
112 | 111 | |
|
113 | 112 | |
|
114 | 113 | def get_repos_group_slug(request): |
|
115 | 114 | _group = request.environ['pylons.routes_dict'].get('group_name') |
|
116 | 115 | if _group: |
|
117 | 116 | _group = _group.rstrip('/') |
|
118 | 117 | return _group |
|
119 | 118 | |
|
120 | 119 | |
|
121 | 120 | def get_user_group_slug(request): |
|
122 | 121 | _group = request.environ['pylons.routes_dict'].get('id') |
|
123 | 122 | try: |
|
124 | 123 | _group = UserGroup.get(_group) |
|
125 | 124 | if _group: |
|
126 | 125 | _group = _group.users_group_name |
|
127 | 126 | except Exception: |
|
128 | 127 | log.debug(traceback.format_exc()) |
|
129 | 128 | #catch all failures here |
|
130 | 129 | pass |
|
131 | 130 | |
|
132 | 131 | return _group |
|
133 | 132 | |
|
134 | 133 | |
|
135 | 134 | def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): |
|
136 | 135 | """ |
|
137 | 136 | Action logger for various actions made by users |
|
138 | 137 | |
|
139 | 138 | :param user: user that made this action, can be a unique username string or |
|
140 | 139 | object containing user_id attribute |
|
141 | 140 | :param action: action to log, should be on of predefined unique actions for |
|
142 | 141 | easy translations |
|
143 | 142 | :param repo: string name of repository or object containing repo_id, |
|
144 | 143 | that action was made on |
|
145 | 144 | :param ipaddr: optional ip address from what the action was made |
|
146 | 145 | :param sa: optional sqlalchemy session |
|
147 | 146 | |
|
148 | 147 | """ |
|
149 | 148 | |
|
150 | 149 | if not sa: |
|
151 | 150 | sa = meta.Session() |
|
152 | 151 | |
|
153 | 152 | try: |
|
154 | 153 | if hasattr(user, 'user_id'): |
|
155 | 154 | user_obj = User.get(user.user_id) |
|
156 | 155 | elif isinstance(user, basestring): |
|
157 | 156 | user_obj = User.get_by_username(user) |
|
158 | 157 | else: |
|
159 | 158 | raise Exception('You have to provide a user object or a username') |
|
160 | 159 | |
|
161 | 160 | if hasattr(repo, 'repo_id'): |
|
162 | 161 | repo_obj = Repository.get(repo.repo_id) |
|
163 | 162 | repo_name = repo_obj.repo_name |
|
164 | 163 | elif isinstance(repo, basestring): |
|
165 | 164 | repo_name = repo.lstrip('/') |
|
166 | 165 | repo_obj = Repository.get_by_repo_name(repo_name) |
|
167 | 166 | else: |
|
168 | 167 | repo_obj = None |
|
169 | 168 | repo_name = '' |
|
170 | 169 | |
|
171 | 170 | user_log = UserLog() |
|
172 | 171 | user_log.user_id = user_obj.user_id |
|
173 | 172 | user_log.username = user_obj.username |
|
174 | 173 | user_log.action = safe_unicode(action) |
|
175 | 174 | |
|
176 | 175 | user_log.repository = repo_obj |
|
177 | 176 | user_log.repository_name = repo_name |
|
178 | 177 | |
|
179 | 178 | user_log.action_date = datetime.datetime.now() |
|
180 | 179 | user_log.user_ip = ipaddr |
|
181 | 180 | sa.add(user_log) |
|
182 | 181 | |
|
183 | 182 | log.info('Logging action:%s on %s by user:%s ip:%s' % |
|
184 | 183 | (action, safe_unicode(repo), user_obj, ipaddr)) |
|
185 | 184 | if commit: |
|
186 | 185 | sa.commit() |
|
187 | 186 | except Exception: |
|
188 | 187 | log.error(traceback.format_exc()) |
|
189 | 188 | raise |
|
190 | 189 | |
|
191 | 190 | |
|
192 | 191 | def get_filesystem_repos(path, recursive=False, skip_removed_repos=True): |
|
193 | 192 | """ |
|
194 | 193 | Scans given path for repos and return (name,(type,path)) tuple |
|
195 | 194 | |
|
196 | 195 | :param path: path to scan for repositories |
|
197 | 196 | :param recursive: recursive search and return names with subdirs in front |
|
198 | 197 | """ |
|
199 | 198 | |
|
200 | 199 | # remove ending slash for better results |
|
201 | 200 | path = path.rstrip(os.sep) |
|
202 | 201 | log.debug('now scanning in %s location recursive:%s...' % (path, recursive)) |
|
203 | 202 | |
|
204 | 203 | def _get_repos(p): |
|
205 | 204 | if not os.access(p, os.W_OK): |
|
206 | 205 | log.warn('ignoring repo path without write access: %s', p) |
|
207 | 206 | return |
|
208 | 207 | for dirpath in os.listdir(p): |
|
209 | 208 | if os.path.isfile(os.path.join(p, dirpath)): |
|
210 | 209 | continue |
|
211 | 210 | cur_path = os.path.join(p, dirpath) |
|
212 | 211 | |
|
213 | 212 | # skip removed repos |
|
214 | 213 | if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath): |
|
215 | 214 | continue |
|
216 | 215 | |
|
217 | 216 | #skip .<somethin> dirs |
|
218 | 217 | if dirpath.startswith('.'): |
|
219 | 218 | continue |
|
220 | 219 | |
|
221 | 220 | try: |
|
222 | 221 | scm_info = get_scm(cur_path) |
|
223 | 222 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info |
|
224 | 223 | except VCSError: |
|
225 | 224 | if not recursive: |
|
226 | 225 | continue |
|
227 | 226 | #check if this dir containts other repos for recursive scan |
|
228 | 227 | rec_path = os.path.join(p, dirpath) |
|
229 | 228 | if os.path.isdir(rec_path): |
|
230 | 229 | for inner_scm in _get_repos(rec_path): |
|
231 | 230 | yield inner_scm |
|
232 | 231 | |
|
233 | 232 | return _get_repos(path) |
|
234 | 233 | |
|
235 | 234 | |
|
236 | 235 | def is_valid_repo(repo_name, base_path, scm=None): |
|
237 | 236 | """ |
|
238 | 237 | Returns True if given path is a valid repository False otherwise. |
|
239 | 238 | If scm param is given also compare if given scm is the same as expected |
|
240 | 239 | from scm parameter |
|
241 | 240 | |
|
242 | 241 | :param repo_name: |
|
243 | 242 | :param base_path: |
|
244 | 243 | :param scm: |
|
245 | 244 | |
|
246 | 245 | :return True: if given path is a valid repository |
|
247 | 246 | """ |
|
248 | 247 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) |
|
249 | 248 | |
|
250 | 249 | try: |
|
251 | 250 | scm_ = get_scm(full_path) |
|
252 | 251 | if scm: |
|
253 | 252 | return scm_[0] == scm |
|
254 | 253 | return True |
|
255 | 254 | except VCSError: |
|
256 | 255 | return False |
|
257 | 256 | |
|
258 | 257 | |
|
259 | 258 | def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False): |
|
260 | 259 | """ |
|
261 | 260 | Returns True if given path is a repository group False otherwise |
|
262 | 261 | |
|
263 | 262 | :param repo_name: |
|
264 | 263 | :param base_path: |
|
265 | 264 | """ |
|
266 | 265 | full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name)) |
|
267 | 266 | |
|
268 | 267 | # check if it's not a repo |
|
269 | 268 | if is_valid_repo(repos_group_name, base_path): |
|
270 | 269 | return False |
|
271 | 270 | |
|
272 | 271 | try: |
|
273 | 272 | # we need to check bare git repos at higher level |
|
274 | 273 | # since we might match branches/hooks/info/objects or possible |
|
275 | 274 | # other things inside bare git repo |
|
276 | 275 | get_scm(os.path.dirname(full_path)) |
|
277 | 276 | return False |
|
278 | 277 | except VCSError: |
|
279 | 278 | pass |
|
280 | 279 | |
|
281 | 280 | # check if it's a valid path |
|
282 | 281 | if skip_path_check or os.path.isdir(full_path): |
|
283 | 282 | return True |
|
284 | 283 | |
|
285 | 284 | return False |
|
286 | 285 | |
|
287 | 286 | |
|
288 | 287 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): |
|
289 | 288 | while True: |
|
290 | 289 | ok = raw_input(prompt) |
|
291 | 290 | if ok in ('y', 'ye', 'yes'): |
|
292 | 291 | return True |
|
293 | 292 | if ok in ('n', 'no', 'nop', 'nope'): |
|
294 | 293 | return False |
|
295 | 294 | retries = retries - 1 |
|
296 | 295 | if retries < 0: |
|
297 | 296 | raise IOError |
|
298 | 297 | print complaint |
|
299 | 298 | |
|
300 | 299 | #propagated from mercurial documentation |
|
301 | 300 | ui_sections = ['alias', 'auth', |
|
302 | 301 | 'decode/encode', 'defaults', |
|
303 | 302 | 'diff', 'email', |
|
304 | 303 | 'extensions', 'format', |
|
305 | 304 | 'merge-patterns', 'merge-tools', |
|
306 | 305 | 'hooks', 'http_proxy', |
|
307 | 306 | 'smtp', 'patch', |
|
308 | 307 | 'paths', 'profiling', |
|
309 | 308 | 'server', 'trusted', |
|
310 | 309 | 'ui', 'web', ] |
|
311 | 310 | |
|
312 | 311 | |
|
313 | 312 | def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True): |
|
314 | 313 | """ |
|
315 | 314 | A function that will read python rc files or database |
|
316 | 315 | and make an mercurial ui object from read options |
|
317 | 316 | |
|
318 | 317 | :param path: path to mercurial config file |
|
319 | 318 | :param checkpaths: check the path |
|
320 | 319 | :param read_from: read from 'file' or 'db' |
|
321 | 320 | """ |
|
322 | 321 | |
|
323 | 322 | baseui = ui.ui() |
|
324 | 323 | |
|
325 | 324 | # clean the baseui object |
|
326 | 325 | baseui._ocfg = config.config() |
|
327 | 326 | baseui._ucfg = config.config() |
|
328 | 327 | baseui._tcfg = config.config() |
|
329 | 328 | |
|
330 | 329 | if read_from == 'file': |
|
331 | 330 | if not os.path.isfile(path): |
|
332 | 331 | log.debug('hgrc file is not present at %s, skipping...' % path) |
|
333 | 332 | return False |
|
334 | 333 | log.debug('reading hgrc from %s' % path) |
|
335 | 334 | cfg = config.config() |
|
336 | 335 | cfg.read(path) |
|
337 | 336 | for section in ui_sections: |
|
338 | 337 | for k, v in cfg.items(section): |
|
339 | 338 | log.debug('settings ui from file: [%s] %s=%s' % (section, k, v)) |
|
340 | 339 | baseui.setconfig(safe_str(section), safe_str(k), safe_str(v)) |
|
341 | 340 | |
|
342 | 341 | elif read_from == 'db': |
|
343 | 342 | sa = meta.Session() |
|
344 | 343 | ret = sa.query(RhodeCodeUi)\ |
|
345 | 344 | .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\ |
|
346 | 345 | .all() |
|
347 | 346 | |
|
348 | 347 | hg_ui = ret |
|
349 | 348 | for ui_ in hg_ui: |
|
350 | 349 | if ui_.ui_active: |
|
351 | 350 | log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section, |
|
352 | 351 | ui_.ui_key, ui_.ui_value) |
|
353 | 352 | baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), |
|
354 | 353 | safe_str(ui_.ui_value)) |
|
355 | 354 | if ui_.ui_key == 'push_ssl': |
|
356 | 355 | # force set push_ssl requirement to False, rhodecode |
|
357 | 356 | # handles that |
|
358 | 357 | baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key), |
|
359 | 358 | False) |
|
360 | 359 | if clear_session: |
|
361 | 360 | meta.Session.remove() |
|
362 | 361 | return baseui |
|
363 | 362 | |
|
364 | 363 | |
|
365 | 364 | def set_rhodecode_config(config): |
|
366 | 365 | """ |
|
367 | 366 | Updates pylons config with new settings from database |
|
368 | 367 | |
|
369 | 368 | :param config: |
|
370 | 369 | """ |
|
371 | 370 | hgsettings = RhodeCodeSetting.get_app_settings() |
|
372 | 371 | |
|
373 | 372 | for k, v in hgsettings.items(): |
|
374 | 373 | config[k] = v |
|
375 | 374 | |
|
376 | 375 | |
|
377 | 376 | def set_vcs_config(config): |
|
378 | 377 | """ |
|
379 | 378 | Patch VCS config with some RhodeCode specific stuff |
|
380 | 379 | |
|
381 | 380 | :param config: rhodecode.CONFIG |
|
382 | 381 | """ |
|
383 | 382 | import rhodecode |
|
384 | 383 | from rhodecode.lib.vcs import conf |
|
385 | 384 | from rhodecode.lib.utils2 import aslist |
|
386 | 385 | conf.settings.BACKENDS = { |
|
387 | 386 | 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository', |
|
388 | 387 | 'git': 'rhodecode.lib.vcs.backends.git.GitRepository', |
|
389 | 388 | } |
|
390 | 389 | |
|
391 | 390 | conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git') |
|
392 | 391 | conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip() |
|
393 | 392 | conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding', |
|
394 | 393 | 'utf8'), sep=',') |
|
395 | 394 | |
|
396 | 395 | |
|
397 | 396 | def map_groups(path): |
|
398 | 397 | """ |
|
399 | 398 | Given a full path to a repository, create all nested groups that this |
|
400 | 399 | repo is inside. This function creates parent-child relationships between |
|
401 | 400 | groups and creates default perms for all new groups. |
|
402 | 401 | |
|
403 | 402 | :param paths: full path to repository |
|
404 | 403 | """ |
|
405 | 404 | sa = meta.Session() |
|
406 | 405 | groups = path.split(Repository.url_sep()) |
|
407 | 406 | parent = None |
|
408 | 407 | group = None |
|
409 | 408 | |
|
410 | 409 | # last element is repo in nested groups structure |
|
411 | 410 | groups = groups[:-1] |
|
412 | 411 | rgm = ReposGroupModel(sa) |
|
413 | 412 | owner = User.get_first_admin() |
|
414 | 413 | for lvl, group_name in enumerate(groups): |
|
415 | 414 | group_name = '/'.join(groups[:lvl] + [group_name]) |
|
416 | 415 | group = RepoGroup.get_by_group_name(group_name) |
|
417 | 416 | desc = '%s group' % group_name |
|
418 | 417 | |
|
419 | 418 | # skip folders that are now removed repos |
|
420 | 419 | if REMOVED_REPO_PAT.match(group_name): |
|
421 | 420 | break |
|
422 | 421 | |
|
423 | 422 | if group is None: |
|
424 | 423 | log.debug('creating group level: %s group_name: %s' |
|
425 | 424 | % (lvl, group_name)) |
|
426 | 425 | group = RepoGroup(group_name, parent) |
|
427 | 426 | group.group_description = desc |
|
428 | 427 | group.user = owner |
|
429 | 428 | sa.add(group) |
|
430 | 429 | perm_obj = rgm._create_default_perms(group) |
|
431 | 430 | sa.add(perm_obj) |
|
432 | 431 | sa.flush() |
|
433 | 432 | |
|
434 | 433 | parent = group |
|
435 | 434 | return group |
|
436 | 435 | |
|
437 | 436 | |
|
438 | 437 | def repo2db_mapper(initial_repo_list, remove_obsolete=False, |
|
439 | 438 | install_git_hook=False): |
|
440 | 439 | """ |
|
441 | 440 | maps all repos given in initial_repo_list, non existing repositories |
|
442 | 441 | are created, if remove_obsolete is True it also check for db entries |
|
443 | 442 | that are not in initial_repo_list and removes them. |
|
444 | 443 | |
|
445 | 444 | :param initial_repo_list: list of repositories found by scanning methods |
|
446 | 445 | :param remove_obsolete: check for obsolete entries in database |
|
447 | 446 | :param install_git_hook: if this is True, also check and install githook |
|
448 | 447 | for a repo if missing |
|
449 | 448 | """ |
|
450 | 449 | from rhodecode.model.repo import RepoModel |
|
451 | 450 | from rhodecode.model.scm import ScmModel |
|
452 | 451 | sa = meta.Session() |
|
453 | 452 | rm = RepoModel() |
|
454 | 453 | user = User.get_first_admin() |
|
455 | 454 | added = [] |
|
456 | 455 | |
|
457 | 456 | ##creation defaults |
|
458 | 457 | defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True) |
|
459 | 458 | enable_statistics = defs.get('repo_enable_statistics') |
|
460 | 459 | enable_locking = defs.get('repo_enable_locking') |
|
461 | 460 | enable_downloads = defs.get('repo_enable_downloads') |
|
462 | 461 | private = defs.get('repo_private') |
|
463 | 462 | |
|
464 | 463 | for name, repo in initial_repo_list.items(): |
|
465 | 464 | group = map_groups(name) |
|
466 | 465 | db_repo = rm.get_by_repo_name(name) |
|
467 | 466 | # found repo that is on filesystem not in RhodeCode database |
|
468 | 467 | if not db_repo: |
|
469 | 468 | log.info('repository %s not found, creating now' % name) |
|
470 | 469 | added.append(name) |
|
471 | 470 | desc = (repo.description |
|
472 | 471 | if repo.description != 'unknown' |
|
473 | 472 | else '%s repository' % name) |
|
474 | 473 | |
|
475 | 474 | new_repo = rm.create_repo( |
|
476 | 475 | repo_name=name, |
|
477 | 476 | repo_type=repo.alias, |
|
478 | 477 | description=desc, |
|
479 | 478 | repos_group=getattr(group, 'group_id', None), |
|
480 | 479 | owner=user, |
|
481 | 480 | just_db=True, |
|
482 | 481 | enable_locking=enable_locking, |
|
483 | 482 | enable_downloads=enable_downloads, |
|
484 | 483 | enable_statistics=enable_statistics, |
|
485 | 484 | private=private |
|
486 | 485 | ) |
|
487 | 486 | # we added that repo just now, and make sure it has githook |
|
488 | 487 | # installed |
|
489 | 488 | if new_repo.repo_type == 'git': |
|
490 | 489 | ScmModel().install_git_hook(new_repo.scm_instance) |
|
491 | 490 | new_repo.update_changeset_cache() |
|
492 | 491 | elif install_git_hook: |
|
493 | 492 | if db_repo.repo_type == 'git': |
|
494 | 493 | ScmModel().install_git_hook(db_repo.scm_instance) |
|
495 | 494 | |
|
496 | 495 | sa.commit() |
|
497 | 496 | removed = [] |
|
498 | 497 | if remove_obsolete: |
|
499 | 498 | # remove from database those repositories that are not in the filesystem |
|
500 | 499 | for repo in sa.query(Repository).all(): |
|
501 | 500 | if repo.repo_name not in initial_repo_list.keys(): |
|
502 | 501 | log.debug("Removing non-existing repository found in db `%s`" % |
|
503 | 502 | repo.repo_name) |
|
504 | 503 | try: |
|
505 | 504 | removed.append(repo.repo_name) |
|
506 | 505 | RepoModel(sa).delete(repo, forks='detach', fs_remove=False) |
|
507 | 506 | sa.commit() |
|
508 | 507 | except Exception: |
|
509 | 508 | #don't hold further removals on error |
|
510 | 509 | log.error(traceback.format_exc()) |
|
511 | 510 | sa.rollback() |
|
512 | 511 | return added, removed |
|
513 | 512 | |
|
514 | 513 | |
|
515 | 514 | # set cache regions for beaker so celery can utilise it |
|
516 | 515 | def add_cache(settings): |
|
517 | 516 | cache_settings = {'regions': None} |
|
518 | 517 | for key in settings.keys(): |
|
519 | 518 | for prefix in ['beaker.cache.', 'cache.']: |
|
520 | 519 | if key.startswith(prefix): |
|
521 | 520 | name = key.split(prefix)[1].strip() |
|
522 | 521 | cache_settings[name] = settings[key].strip() |
|
523 | 522 | if cache_settings['regions']: |
|
524 | 523 | for region in cache_settings['regions'].split(','): |
|
525 | 524 | region = region.strip() |
|
526 | 525 | region_settings = {} |
|
527 | 526 | for key, value in cache_settings.items(): |
|
528 | 527 | if key.startswith(region): |
|
529 | 528 | region_settings[key.split('.')[1]] = value |
|
530 | 529 | region_settings['expire'] = int(region_settings.get('expire', |
|
531 | 530 | 60)) |
|
532 | 531 | region_settings.setdefault('lock_dir', |
|
533 | 532 | cache_settings.get('lock_dir')) |
|
534 | 533 | region_settings.setdefault('data_dir', |
|
535 | 534 | cache_settings.get('data_dir')) |
|
536 | 535 | |
|
537 | 536 | if 'type' not in region_settings: |
|
538 | 537 | region_settings['type'] = cache_settings.get('type', |
|
539 | 538 | 'memory') |
|
540 | 539 | beaker.cache.cache_regions[region] = region_settings |
|
541 | 540 | |
|
542 | 541 | |
|
543 | 542 | def load_rcextensions(root_path): |
|
544 | 543 | import rhodecode |
|
545 | 544 | from rhodecode.config import conf |
|
546 | 545 | |
|
547 | 546 | path = os.path.join(root_path, 'rcextensions', '__init__.py') |
|
548 | 547 | if os.path.isfile(path): |
|
549 | 548 | rcext = create_module('rc', path) |
|
550 | 549 | EXT = rhodecode.EXTENSIONS = rcext |
|
551 | 550 | log.debug('Found rcextensions now loading %s...' % rcext) |
|
552 | 551 | |
|
553 | 552 | # Additional mappings that are not present in the pygments lexers |
|
554 | 553 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) |
|
555 | 554 | |
|
556 | 555 | #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present) |
|
557 | 556 | |
|
558 | 557 | if getattr(EXT, 'INDEX_EXTENSIONS', []): |
|
559 | 558 | log.debug('settings custom INDEX_EXTENSIONS') |
|
560 | 559 | conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', []) |
|
561 | 560 | |
|
562 | 561 | #ADDITIONAL MAPPINGS |
|
563 | 562 | log.debug('adding extra into INDEX_EXTENSIONS') |
|
564 | 563 | conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', [])) |
|
565 | 564 | |
|
566 | 565 | # auto check if the module is not missing any data, set to default if is |
|
567 | 566 | # this will help autoupdate new feature of rcext module |
|
568 | 567 | from rhodecode.config import rcextensions |
|
569 | 568 | for k in dir(rcextensions): |
|
570 | 569 | if not k.startswith('_') and not hasattr(EXT, k): |
|
571 | 570 | setattr(EXT, k, getattr(rcextensions, k)) |
|
572 | 571 | |
|
573 | 572 | |
|
574 | 573 | def get_custom_lexer(extension): |
|
575 | 574 | """ |
|
576 | 575 | returns a custom lexer if it's defined in rcextensions module, or None |
|
577 | 576 | if there's no custom lexer defined |
|
578 | 577 | """ |
|
579 | 578 | import rhodecode |
|
580 | 579 | from pygments import lexers |
|
581 | 580 | #check if we didn't define this extension as other lexer |
|
582 | 581 | if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS: |
|
583 | 582 | _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension] |
|
584 | 583 | return lexers.get_lexer_by_name(_lexer_name) |
|
585 | 584 | |
|
586 | 585 | |
|
587 | 586 | #============================================================================== |
|
588 | 587 | # TEST FUNCTIONS AND CREATORS |
|
589 | 588 | #============================================================================== |
|
590 | 589 | def create_test_index(repo_location, config, full_index): |
|
591 | 590 | """ |
|
592 | 591 | Makes default test index |
|
593 | 592 | |
|
594 | 593 | :param config: test config |
|
595 | 594 | :param full_index: |
|
596 | 595 | """ |
|
597 | 596 | |
|
598 | 597 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
599 | 598 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
600 | 599 | |
|
601 | 600 | repo_location = repo_location |
|
602 | 601 | |
|
603 | 602 | index_location = os.path.join(config['app_conf']['index_dir']) |
|
604 | 603 | if not os.path.exists(index_location): |
|
605 | 604 | os.makedirs(index_location) |
|
606 | 605 | |
|
607 | 606 | try: |
|
608 | 607 | l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock')) |
|
609 | 608 | WhooshIndexingDaemon(index_location=index_location, |
|
610 | 609 | repo_location=repo_location)\ |
|
611 | 610 | .run(full_index=full_index) |
|
612 | 611 | l.release() |
|
613 | 612 | except LockHeld: |
|
614 | 613 | pass |
|
615 | 614 | |
|
616 | 615 | |
|
617 | 616 | def create_test_env(repos_test_path, config): |
|
618 | 617 | """ |
|
619 | 618 | Makes a fresh database and |
|
620 | 619 | install test repository into tmp dir |
|
621 | 620 | """ |
|
622 | 621 | from rhodecode.lib.db_manage import DbManage |
|
623 | 622 | from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH |
|
624 | 623 | |
|
625 | 624 | # PART ONE create db |
|
626 | 625 | dbconf = config['sqlalchemy.db1.url'] |
|
627 | 626 | log.debug('making test db %s' % dbconf) |
|
628 | 627 | |
|
629 | 628 | # create test dir if it doesn't exist |
|
630 | 629 | if not os.path.isdir(repos_test_path): |
|
631 | 630 | log.debug('Creating testdir %s' % repos_test_path) |
|
632 | 631 | os.makedirs(repos_test_path) |
|
633 | 632 | |
|
634 | 633 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], |
|
635 | 634 | tests=True) |
|
636 | 635 | dbmanage.create_tables(override=True) |
|
637 | 636 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) |
|
638 | 637 | dbmanage.create_default_user() |
|
639 | 638 | dbmanage.admin_prompt() |
|
640 | 639 | dbmanage.create_permissions() |
|
641 | 640 | dbmanage.populate_default_permissions() |
|
642 | 641 | Session().commit() |
|
643 | 642 | # PART TWO make test repo |
|
644 | 643 | log.debug('making test vcs repositories') |
|
645 | 644 | |
|
646 | 645 | idx_path = config['app_conf']['index_dir'] |
|
647 | 646 | data_path = config['app_conf']['cache_dir'] |
|
648 | 647 | |
|
649 | 648 | #clean index and data |
|
650 | 649 | if idx_path and os.path.exists(idx_path): |
|
651 | 650 | log.debug('remove %s' % idx_path) |
|
652 | 651 | shutil.rmtree(idx_path) |
|
653 | 652 | |
|
654 | 653 | if data_path and os.path.exists(data_path): |
|
655 | 654 | log.debug('remove %s' % data_path) |
|
656 | 655 | shutil.rmtree(data_path) |
|
657 | 656 | |
|
658 | 657 | #CREATE DEFAULT TEST REPOS |
|
659 | 658 | cur_dir = dn(dn(abspath(__file__))) |
|
660 | 659 | tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz")) |
|
661 | 660 | tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) |
|
662 | 661 | tar.close() |
|
663 | 662 | |
|
664 | 663 | cur_dir = dn(dn(abspath(__file__))) |
|
665 | 664 | tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz")) |
|
666 | 665 | tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO)) |
|
667 | 666 | tar.close() |
|
668 | 667 | |
|
669 | 668 | #LOAD VCS test stuff |
|
670 | 669 | from rhodecode.tests.vcs import setup_package |
|
671 | 670 | setup_package() |
|
672 | 671 | |
|
673 | 672 | |
|
674 | 673 | #============================================================================== |
|
675 | 674 | # PASTER COMMANDS |
|
676 | 675 | #============================================================================== |
|
677 | 676 | class BasePasterCommand(Command): |
|
678 | 677 | """ |
|
679 | 678 | Abstract Base Class for paster commands. |
|
680 | 679 | |
|
681 | 680 | The celery commands are somewhat aggressive about loading |
|
682 | 681 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
683 | 682 | environment variable to our loader, we have to bootstrap a bit and |
|
684 | 683 | make sure we've had a chance to load the pylons config off of the |
|
685 | 684 | command line, otherwise everything fails. |
|
686 | 685 | """ |
|
687 | 686 | min_args = 1 |
|
688 | 687 | min_args_error = "Please provide a paster config file as an argument." |
|
689 | 688 | takes_config_file = 1 |
|
690 | 689 | requires_config_file = True |
|
691 | 690 | |
|
692 | 691 | def notify_msg(self, msg, log=False): |
|
693 | 692 | """Make a notification to user, additionally if logger is passed |
|
694 | 693 | it logs this action using given logger |
|
695 | 694 | |
|
696 | 695 | :param msg: message that will be printed to user |
|
697 | 696 | :param log: logging instance, to use to additionally log this message |
|
698 | 697 | |
|
699 | 698 | """ |
|
700 | 699 | if log and isinstance(log, logging): |
|
701 | 700 | log(msg) |
|
702 | 701 | |
|
703 | 702 | def run(self, args): |
|
704 | 703 | """ |
|
705 | 704 | Overrides Command.run |
|
706 | 705 | |
|
707 | 706 | Checks for a config file argument and loads it. |
|
708 | 707 | """ |
|
709 | 708 | if len(args) < self.min_args: |
|
710 | 709 | raise BadCommand( |
|
711 | 710 | self.min_args_error % {'min_args': self.min_args, |
|
712 | 711 | 'actual_args': len(args)}) |
|
713 | 712 | |
|
714 | 713 | # Decrement because we're going to lob off the first argument. |
|
715 | 714 | # @@ This is hacky |
|
716 | 715 | self.min_args -= 1 |
|
717 | 716 | self.bootstrap_config(args[0]) |
|
718 | 717 | self.update_parser() |
|
719 | 718 | return super(BasePasterCommand, self).run(args[1:]) |
|
720 | 719 | |
|
721 | 720 | def update_parser(self): |
|
722 | 721 | """ |
|
723 | 722 | Abstract method. Allows for the class's parser to be updated |
|
724 | 723 | before the superclass's `run` method is called. Necessary to |
|
725 | 724 | allow options/arguments to be passed through to the underlying |
|
726 | 725 | celery command. |
|
727 | 726 | """ |
|
728 | 727 | raise NotImplementedError("Abstract Method.") |
|
729 | 728 | |
|
730 | 729 | def bootstrap_config(self, conf): |
|
731 | 730 | """ |
|
732 | 731 | Loads the pylons configuration. |
|
733 | 732 | """ |
|
734 | 733 | from pylons import config as pylonsconfig |
|
735 | 734 | |
|
736 | 735 | self.path_to_ini_file = os.path.realpath(conf) |
|
737 | 736 | conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) |
|
738 | 737 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
|
739 | 738 | |
|
740 | 739 | def _init_session(self): |
|
741 | 740 | """ |
|
742 | 741 | Inits SqlAlchemy Session |
|
743 | 742 | """ |
|
744 | 743 | logging.config.fileConfig(self.path_to_ini_file) |
|
745 | 744 | from pylons import config |
|
746 | 745 | from rhodecode.model import init_model |
|
747 | 746 | from rhodecode.lib.utils2 import engine_from_config |
|
748 | 747 | |
|
749 | 748 | #get to remove repos !! |
|
750 | 749 | add_cache(config) |
|
751 | 750 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
752 | 751 | init_model(engine) |
|
753 | 752 | |
|
754 | 753 | |
|
755 | 754 | def check_git_version(): |
|
756 | 755 | """ |
|
757 | 756 | Checks what version of git is installed in system, and issues a warning |
|
758 | 757 | if it's too old for RhodeCode to properly work. |
|
759 | 758 | """ |
|
760 | 759 | from rhodecode import BACKENDS |
|
761 | 760 | from rhodecode.lib.vcs.backends.git.repository import GitRepository |
|
762 | 761 | from rhodecode.lib.vcs.conf import settings |
|
763 | 762 | from distutils.version import StrictVersion |
|
764 | 763 | |
|
765 | 764 | stdout, stderr = GitRepository._run_git_command('--version', _bare=True, |
|
766 | 765 | _safe=True) |
|
767 | 766 | |
|
768 | 767 | ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0' |
|
769 | 768 | if len(ver.split('.')) > 3: |
|
770 | 769 | #StrictVersion needs to be only 3 element type |
|
771 | 770 | ver = '.'.join(ver.split('.')[:3]) |
|
772 | 771 | try: |
|
773 | 772 | _ver = StrictVersion(ver) |
|
774 | 773 | except Exception: |
|
775 | 774 | _ver = StrictVersion('0.0.0') |
|
776 | 775 | stderr = traceback.format_exc() |
|
777 | 776 | |
|
778 | 777 | req_ver = '1.7.4' |
|
779 | 778 | to_old_git = False |
|
780 | 779 | if _ver < StrictVersion(req_ver): |
|
781 | 780 | to_old_git = True |
|
782 | 781 | |
|
783 | 782 | if 'git' in BACKENDS: |
|
784 | 783 | log.debug('GIT executable: "%s" version detected: %s' |
|
785 | 784 | % (settings.GIT_EXECUTABLE_PATH, stdout)) |
|
786 | 785 | if stderr: |
|
787 | 786 | log.warning('Unable to detect git version, org error was: %r' % stderr) |
|
788 | 787 | elif to_old_git: |
|
789 | 788 | log.warning('RhodeCode detected git version %s, which is too old ' |
|
790 | 789 | 'for the system to function properly. Make sure ' |
|
791 | 790 | 'its version is at least %s' % (ver, req_ver)) |
|
792 | 791 | return _ver |
|
793 | 792 | |
|
794 | 793 | |
|
795 | 794 | @decorator.decorator |
|
796 | 795 | def jsonify(func, *args, **kwargs): |
|
797 | 796 | """Action decorator that formats output for JSON |
|
798 | 797 | |
|
799 | 798 | Given a function that will return content, this decorator will turn |
|
800 | 799 | the result into JSON, with a content-type of 'application/json' and |
|
801 | 800 | output it. |
|
802 | 801 | |
|
803 | 802 | """ |
|
804 | 803 | from pylons.decorators.util import get_pylons |
|
805 | 804 | from rhodecode.lib.compat import json |
|
806 | 805 | pylons = get_pylons(args) |
|
807 | 806 | pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8' |
|
808 | 807 | data = func(*args, **kwargs) |
|
809 | 808 | if isinstance(data, (list, tuple)): |
|
810 | 809 | msg = "JSON responses with Array envelopes are susceptible to " \ |
|
811 | 810 | "cross-site data leak attacks, see " \ |
|
812 | 811 | "http://wiki.pylonshq.com/display/pylonsfaq/Warnings" |
|
813 | 812 | warnings.warn(msg, Warning, 2) |
|
814 | 813 | log.warning(msg) |
|
815 | 814 | log.debug("Returning JSON wrapped action output") |
|
816 | 815 | return json.dumps(data, encoding='utf-8') |
@@ -1,47 +1,47 b'' | |||
|
1 | from mercurial import ui, config | |
|
1 | from rhodecode.lib.vcs.utils.hgcompat import ui, config | |
|
2 | 2 | |
|
3 | 3 | |
|
4 | 4 | def make_ui(self, path='hgwebdir.config'): |
|
5 | 5 | """ |
|
6 | 6 | A funcion that will read python rc files and make an ui from read options |
|
7 | 7 | |
|
8 | 8 | :param path: path to mercurial config file |
|
9 | 9 | """ |
|
10 | 10 | #propagated from mercurial documentation |
|
11 | 11 | sections = [ |
|
12 | 12 | 'alias', |
|
13 | 13 | 'auth', |
|
14 | 14 | 'decode/encode', |
|
15 | 15 | 'defaults', |
|
16 | 16 | 'diff', |
|
17 | 17 | 'email', |
|
18 | 18 | 'extensions', |
|
19 | 19 | 'format', |
|
20 | 20 | 'merge-patterns', |
|
21 | 21 | 'merge-tools', |
|
22 | 22 | 'hooks', |
|
23 | 23 | 'http_proxy', |
|
24 | 24 | 'smtp', |
|
25 | 25 | 'patch', |
|
26 | 26 | 'paths', |
|
27 | 27 | 'profiling', |
|
28 | 28 | 'server', |
|
29 | 29 | 'trusted', |
|
30 | 30 | 'ui', |
|
31 | 31 | 'web', |
|
32 | 32 | ] |
|
33 | 33 | |
|
34 | 34 | repos = path |
|
35 | 35 | baseui = ui.ui() |
|
36 | 36 | cfg = config.config() |
|
37 | 37 | cfg.read(repos) |
|
38 | 38 | self.paths = cfg.items('paths') |
|
39 | 39 | self.base_path = self.paths[0][1].replace('*', '') |
|
40 | 40 | self.check_repo_dir(self.paths) |
|
41 | 41 | self.set_statics(cfg) |
|
42 | 42 | |
|
43 | 43 | for section in sections: |
|
44 | 44 | for k, v in cfg.items(section): |
|
45 | 45 | baseui.setconfig(section, k, v) |
|
46 | 46 | |
|
47 | 47 | return baseui |
@@ -1,25 +1,30 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Mercurial libs compatibility |
|
3 | 3 | """ |
|
4 | 4 | |
|
5 | import mercurial | |
|
5 | 6 | from mercurial import archival, merge as hg_merge, patch, ui |
|
7 | from mercurial import discovery | |
|
8 | from mercurial import localrepo | |
|
9 | from mercurial import scmutil | |
|
10 | from mercurial import config | |
|
6 | 11 | from mercurial.commands import clone, nullid, pull |
|
7 | 12 | from mercurial.context import memctx, memfilectx |
|
8 | 13 | from mercurial.error import RepoError, RepoLookupError, Abort |
|
14 | from mercurial.hgweb import hgweb_mod | |
|
9 | 15 | from mercurial.hgweb.common import get_contact |
|
10 | 16 | from mercurial.localrepo import localrepository |
|
11 | 17 | from mercurial.match import match |
|
12 | 18 | from mercurial.mdiff import diffopts |
|
13 | 19 | from mercurial.node import hex |
|
14 | 20 | from mercurial.encoding import tolocal |
|
15 | from mercurial import discovery | |
|
16 | from mercurial import localrepo | |
|
17 | from mercurial import scmutil | |
|
18 | 21 | from mercurial.discovery import findcommonoutgoing |
|
19 | 22 | from mercurial.hg import peer |
|
20 | ||
|
23 | from mercurial.httppeer import httppeer | |
|
21 | 24 | from mercurial.util import url as hg_url |
|
25 | from mercurial.scmutil import revrange | |
|
26 | from mercurial.node import nullrev | |
|
22 | 27 | |
|
23 | 28 | # those authnadlers are patched for python 2.6.5 bug an |
|
24 | 29 | # infinit looping when given invalid resources |
|
25 | 30 | from mercurial.url import httpbasicauthhandler, httpdigestauthhandler |
@@ -1,827 +1,827 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Set of generic validators |
|
3 | 3 | """ |
|
4 | 4 | import os |
|
5 | 5 | import re |
|
6 | 6 | import formencode |
|
7 | 7 | import logging |
|
8 | 8 | from collections import defaultdict |
|
9 | 9 | from pylons.i18n.translation import _ |
|
10 | 10 | from webhelpers.pylonslib.secure_form import authentication_token |
|
11 | 11 | |
|
12 | 12 | from formencode.validators import ( |
|
13 | 13 | UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, |
|
14 | 14 | NotEmpty, IPAddress, CIDR, String, FancyValidator |
|
15 | 15 | ) |
|
16 | 16 | from rhodecode.lib.compat import OrderedSet |
|
17 | 17 | from rhodecode.lib import ipaddr |
|
18 | 18 | from rhodecode.lib.utils import repo_name_slug |
|
19 | 19 | from rhodecode.lib.utils2 import safe_int, str2bool |
|
20 | 20 | from rhodecode.model.db import RepoGroup, Repository, UserGroup, User,\ |
|
21 | 21 | ChangesetStatus |
|
22 | 22 | from rhodecode.lib.exceptions import LdapImportError |
|
23 | 23 | from rhodecode.config.routing import ADMIN_PREFIX |
|
24 | 24 | from rhodecode.lib.auth import HasReposGroupPermissionAny, HasPermissionAny |
|
25 | 25 | |
|
26 | 26 | # silence warnings and pylint |
|
27 | 27 | UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \ |
|
28 | 28 | NotEmpty, IPAddress, CIDR, String, FancyValidator |
|
29 | 29 | |
|
30 | 30 | log = logging.getLogger(__name__) |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | class UniqueList(formencode.FancyValidator): |
|
34 | 34 | """ |
|
35 | 35 | Unique List ! |
|
36 | 36 | """ |
|
37 | 37 | messages = dict( |
|
38 | 38 | empty=_('Value cannot be an empty list'), |
|
39 | 39 | missing_value=_('Value cannot be an empty list'), |
|
40 | 40 | ) |
|
41 | 41 | |
|
42 | 42 | def _to_python(self, value, state): |
|
43 | 43 | if isinstance(value, list): |
|
44 | 44 | return value |
|
45 | 45 | elif isinstance(value, set): |
|
46 | 46 | return list(value) |
|
47 | 47 | elif isinstance(value, tuple): |
|
48 | 48 | return list(value) |
|
49 | 49 | elif value is None: |
|
50 | 50 | return [] |
|
51 | 51 | else: |
|
52 | 52 | return [value] |
|
53 | 53 | |
|
54 | 54 | def empty_value(self, value): |
|
55 | 55 | return [] |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class StateObj(object): |
|
59 | 59 | """ |
|
60 | 60 | this is needed to translate the messages using _() in validators |
|
61 | 61 | """ |
|
62 | 62 | _ = staticmethod(_) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | def M(self, key, state=None, **kwargs): |
|
66 | 66 | """ |
|
67 | 67 | returns string from self.message based on given key, |
|
68 | 68 | passed kw params are used to substitute %(named)s params inside |
|
69 | 69 | translated strings |
|
70 | 70 | |
|
71 | 71 | :param msg: |
|
72 | 72 | :param state: |
|
73 | 73 | """ |
|
74 | 74 | if state is None: |
|
75 | 75 | state = StateObj() |
|
76 | 76 | else: |
|
77 | 77 | state._ = staticmethod(_) |
|
78 | 78 | #inject validator into state object |
|
79 | 79 | return self.message(key, state, **kwargs) |
|
80 | 80 | |
|
81 | 81 | |
|
82 | 82 | def ValidUsername(edit=False, old_data={}): |
|
83 | 83 | class _validator(formencode.validators.FancyValidator): |
|
84 | 84 | messages = { |
|
85 | 85 | 'username_exists': _(u'Username "%(username)s" already exists'), |
|
86 | 86 | 'system_invalid_username': |
|
87 | 87 | _(u'Username "%(username)s" is forbidden'), |
|
88 | 88 | 'invalid_username': |
|
89 | 89 | _(u'Username may only contain alphanumeric characters ' |
|
90 | 90 | 'underscores, periods or dashes and must begin with ' |
|
91 | 91 | 'alphanumeric character or underscore') |
|
92 | 92 | } |
|
93 | 93 | |
|
94 | 94 | def validate_python(self, value, state): |
|
95 | 95 | if value in ['default', 'new_user']: |
|
96 | 96 | msg = M(self, 'system_invalid_username', state, username=value) |
|
97 | 97 | raise formencode.Invalid(msg, value, state) |
|
98 | 98 | #check if user is unique |
|
99 | 99 | old_un = None |
|
100 | 100 | if edit: |
|
101 | 101 | old_un = User.get(old_data.get('user_id')).username |
|
102 | 102 | |
|
103 | 103 | if old_un != value or not edit: |
|
104 | 104 | if User.get_by_username(value, case_insensitive=True): |
|
105 | 105 | msg = M(self, 'username_exists', state, username=value) |
|
106 | 106 | raise formencode.Invalid(msg, value, state) |
|
107 | 107 | |
|
108 | 108 | if re.match(r'^[a-zA-Z0-9\_]{1}[a-zA-Z0-9\-\_\.]*$', value) is None: |
|
109 | 109 | msg = M(self, 'invalid_username', state) |
|
110 | 110 | raise formencode.Invalid(msg, value, state) |
|
111 | 111 | return _validator |
|
112 | 112 | |
|
113 | 113 | |
|
114 | 114 | def ValidRepoUser(): |
|
115 | 115 | class _validator(formencode.validators.FancyValidator): |
|
116 | 116 | messages = { |
|
117 | 117 | 'invalid_username': _(u'Username %(username)s is not valid') |
|
118 | 118 | } |
|
119 | 119 | |
|
120 | 120 | def validate_python(self, value, state): |
|
121 | 121 | try: |
|
122 | 122 | User.query().filter(User.active == True)\ |
|
123 | 123 | .filter(User.username == value).one() |
|
124 | 124 | except Exception: |
|
125 | 125 | msg = M(self, 'invalid_username', state, username=value) |
|
126 | 126 | raise formencode.Invalid(msg, value, state, |
|
127 | 127 | error_dict=dict(username=msg) |
|
128 | 128 | ) |
|
129 | 129 | |
|
130 | 130 | return _validator |
|
131 | 131 | |
|
132 | 132 | |
|
133 | 133 | def ValidUserGroup(edit=False, old_data={}): |
|
134 | 134 | class _validator(formencode.validators.FancyValidator): |
|
135 | 135 | messages = { |
|
136 | 136 | 'invalid_group': _(u'Invalid user group name'), |
|
137 | 137 | 'group_exist': _(u'User group "%(usergroup)s" already exists'), |
|
138 | 138 | 'invalid_usergroup_name': |
|
139 | 139 | _(u'user group name may only contain alphanumeric ' |
|
140 | 140 | 'characters underscores, periods or dashes and must begin ' |
|
141 | 141 | 'with alphanumeric character') |
|
142 | 142 | } |
|
143 | 143 | |
|
144 | 144 | def validate_python(self, value, state): |
|
145 | 145 | if value in ['default']: |
|
146 | 146 | msg = M(self, 'invalid_group', state) |
|
147 | 147 | raise formencode.Invalid(msg, value, state, |
|
148 | 148 | error_dict=dict(users_group_name=msg) |
|
149 | 149 | ) |
|
150 | 150 | #check if group is unique |
|
151 | 151 | old_ugname = None |
|
152 | 152 | if edit: |
|
153 | 153 | old_id = old_data.get('users_group_id') |
|
154 | 154 | old_ugname = UserGroup.get(old_id).users_group_name |
|
155 | 155 | |
|
156 | 156 | if old_ugname != value or not edit: |
|
157 | 157 | is_existing_group = UserGroup.get_by_group_name(value, |
|
158 | 158 | case_insensitive=True) |
|
159 | 159 | if is_existing_group: |
|
160 | 160 | msg = M(self, 'group_exist', state, usergroup=value) |
|
161 | 161 | raise formencode.Invalid(msg, value, state, |
|
162 | 162 | error_dict=dict(users_group_name=msg) |
|
163 | 163 | ) |
|
164 | 164 | |
|
165 | 165 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: |
|
166 | 166 | msg = M(self, 'invalid_usergroup_name', state) |
|
167 | 167 | raise formencode.Invalid(msg, value, state, |
|
168 | 168 | error_dict=dict(users_group_name=msg) |
|
169 | 169 | ) |
|
170 | 170 | |
|
171 | 171 | return _validator |
|
172 | 172 | |
|
173 | 173 | |
|
174 | 174 | def ValidReposGroup(edit=False, old_data={}): |
|
175 | 175 | class _validator(formencode.validators.FancyValidator): |
|
176 | 176 | messages = { |
|
177 | 177 | 'group_parent_id': _(u'Cannot assign this group as parent'), |
|
178 | 178 | 'group_exists': _(u'Group "%(group_name)s" already exists'), |
|
179 | 179 | 'repo_exists': |
|
180 | 180 | _(u'Repository with name "%(group_name)s" already exists') |
|
181 | 181 | } |
|
182 | 182 | |
|
183 | 183 | def validate_python(self, value, state): |
|
184 | 184 | # TODO WRITE VALIDATIONS |
|
185 | 185 | group_name = value.get('group_name') |
|
186 | 186 | group_parent_id = value.get('group_parent_id') |
|
187 | 187 | |
|
188 | 188 | # slugify repo group just in case :) |
|
189 | 189 | slug = repo_name_slug(group_name) |
|
190 | 190 | |
|
191 | 191 | # check for parent of self |
|
192 | 192 | parent_of_self = lambda: ( |
|
193 | 193 | old_data['group_id'] == int(group_parent_id) |
|
194 | 194 | if group_parent_id else False |
|
195 | 195 | ) |
|
196 | 196 | if edit and parent_of_self(): |
|
197 | 197 | msg = M(self, 'group_parent_id', state) |
|
198 | 198 | raise formencode.Invalid(msg, value, state, |
|
199 | 199 | error_dict=dict(group_parent_id=msg) |
|
200 | 200 | ) |
|
201 | 201 | |
|
202 | 202 | old_gname = None |
|
203 | 203 | if edit: |
|
204 | 204 | old_gname = RepoGroup.get(old_data.get('group_id')).group_name |
|
205 | 205 | |
|
206 | 206 | if old_gname != group_name or not edit: |
|
207 | 207 | |
|
208 | 208 | # check group |
|
209 | 209 | gr = RepoGroup.query()\ |
|
210 | 210 | .filter(RepoGroup.group_name == slug)\ |
|
211 | 211 | .filter(RepoGroup.group_parent_id == group_parent_id)\ |
|
212 | 212 | .scalar() |
|
213 | 213 | |
|
214 | 214 | if gr: |
|
215 | 215 | msg = M(self, 'group_exists', state, group_name=slug) |
|
216 | 216 | raise formencode.Invalid(msg, value, state, |
|
217 | 217 | error_dict=dict(group_name=msg) |
|
218 | 218 | ) |
|
219 | 219 | |
|
220 | 220 | # check for same repo |
|
221 | 221 | repo = Repository.query()\ |
|
222 | 222 | .filter(Repository.repo_name == slug)\ |
|
223 | 223 | .scalar() |
|
224 | 224 | |
|
225 | 225 | if repo: |
|
226 | 226 | msg = M(self, 'repo_exists', state, group_name=slug) |
|
227 | 227 | raise formencode.Invalid(msg, value, state, |
|
228 | 228 | error_dict=dict(group_name=msg) |
|
229 | 229 | ) |
|
230 | 230 | |
|
231 | 231 | return _validator |
|
232 | 232 | |
|
233 | 233 | |
|
234 | 234 | def ValidPassword(): |
|
235 | 235 | class _validator(formencode.validators.FancyValidator): |
|
236 | 236 | messages = { |
|
237 | 237 | 'invalid_password': |
|
238 | 238 | _(u'Invalid characters (non-ascii) in password') |
|
239 | 239 | } |
|
240 | 240 | |
|
241 | 241 | def validate_python(self, value, state): |
|
242 | 242 | try: |
|
243 | 243 | (value or '').decode('ascii') |
|
244 | 244 | except UnicodeError: |
|
245 | 245 | msg = M(self, 'invalid_password', state) |
|
246 | 246 | raise formencode.Invalid(msg, value, state,) |
|
247 | 247 | return _validator |
|
248 | 248 | |
|
249 | 249 | |
|
250 | 250 | def ValidPasswordsMatch(): |
|
251 | 251 | class _validator(formencode.validators.FancyValidator): |
|
252 | 252 | messages = { |
|
253 | 253 | 'password_mismatch': _(u'Passwords do not match'), |
|
254 | 254 | } |
|
255 | 255 | |
|
256 | 256 | def validate_python(self, value, state): |
|
257 | 257 | |
|
258 | 258 | pass_val = value.get('password') or value.get('new_password') |
|
259 | 259 | if pass_val != value['password_confirmation']: |
|
260 | 260 | msg = M(self, 'password_mismatch', state) |
|
261 | 261 | raise formencode.Invalid(msg, value, state, |
|
262 | 262 | error_dict=dict(password_confirmation=msg) |
|
263 | 263 | ) |
|
264 | 264 | return _validator |
|
265 | 265 | |
|
266 | 266 | |
|
267 | 267 | def ValidAuth(): |
|
268 | 268 | class _validator(formencode.validators.FancyValidator): |
|
269 | 269 | messages = { |
|
270 | 270 | 'invalid_password': _(u'invalid password'), |
|
271 | 271 | 'invalid_username': _(u'invalid user name'), |
|
272 | 272 | 'disabled_account': _(u'Your account is disabled') |
|
273 | 273 | } |
|
274 | 274 | |
|
275 | 275 | def validate_python(self, value, state): |
|
276 | 276 | from rhodecode.lib.auth import authenticate |
|
277 | 277 | |
|
278 | 278 | password = value['password'] |
|
279 | 279 | username = value['username'] |
|
280 | 280 | |
|
281 | 281 | if not authenticate(username, password): |
|
282 | 282 | user = User.get_by_username(username) |
|
283 | 283 | if user and not user.active: |
|
284 | 284 | log.warning('user %s is disabled' % username) |
|
285 | 285 | msg = M(self, 'disabled_account', state) |
|
286 | 286 | raise formencode.Invalid(msg, value, state, |
|
287 | 287 | error_dict=dict(username=msg) |
|
288 | 288 | ) |
|
289 | 289 | else: |
|
290 | 290 | log.warning('user %s failed to authenticate' % username) |
|
291 | 291 | msg = M(self, 'invalid_username', state) |
|
292 | 292 | msg2 = M(self, 'invalid_password', state) |
|
293 | 293 | raise formencode.Invalid(msg, value, state, |
|
294 | 294 | error_dict=dict(username=msg, password=msg2) |
|
295 | 295 | ) |
|
296 | 296 | return _validator |
|
297 | 297 | |
|
298 | 298 | |
|
299 | 299 | def ValidAuthToken(): |
|
300 | 300 | class _validator(formencode.validators.FancyValidator): |
|
301 | 301 | messages = { |
|
302 | 302 | 'invalid_token': _(u'Token mismatch') |
|
303 | 303 | } |
|
304 | 304 | |
|
305 | 305 | def validate_python(self, value, state): |
|
306 | 306 | if value != authentication_token(): |
|
307 | 307 | msg = M(self, 'invalid_token', state) |
|
308 | 308 | raise formencode.Invalid(msg, value, state) |
|
309 | 309 | return _validator |
|
310 | 310 | |
|
311 | 311 | |
|
312 | 312 | def ValidRepoName(edit=False, old_data={}): |
|
313 | 313 | class _validator(formencode.validators.FancyValidator): |
|
314 | 314 | messages = { |
|
315 | 315 | 'invalid_repo_name': |
|
316 | 316 | _(u'Repository name %(repo)s is disallowed'), |
|
317 | 317 | 'repository_exists': |
|
318 | 318 | _(u'Repository named %(repo)s already exists'), |
|
319 | 319 | 'repository_in_group_exists': _(u'Repository "%(repo)s" already ' |
|
320 | 320 | 'exists in group "%(group)s"'), |
|
321 | 321 | 'same_group_exists': _(u'Repository group with name "%(repo)s" ' |
|
322 | 322 | 'already exists') |
|
323 | 323 | } |
|
324 | 324 | |
|
325 | 325 | def _to_python(self, value, state): |
|
326 | 326 | repo_name = repo_name_slug(value.get('repo_name', '')) |
|
327 | 327 | repo_group = value.get('repo_group') |
|
328 | 328 | if repo_group: |
|
329 | 329 | gr = RepoGroup.get(repo_group) |
|
330 | 330 | group_path = gr.full_path |
|
331 | 331 | group_name = gr.group_name |
|
332 | 332 | # value needs to be aware of group name in order to check |
|
333 | 333 | # db key This is an actual just the name to store in the |
|
334 | 334 | # database |
|
335 | 335 | repo_name_full = group_path + RepoGroup.url_sep() + repo_name |
|
336 | 336 | else: |
|
337 | 337 | group_name = group_path = '' |
|
338 | 338 | repo_name_full = repo_name |
|
339 | 339 | |
|
340 | 340 | value['repo_name'] = repo_name |
|
341 | 341 | value['repo_name_full'] = repo_name_full |
|
342 | 342 | value['group_path'] = group_path |
|
343 | 343 | value['group_name'] = group_name |
|
344 | 344 | return value |
|
345 | 345 | |
|
346 | 346 | def validate_python(self, value, state): |
|
347 | 347 | |
|
348 | 348 | repo_name = value.get('repo_name') |
|
349 | 349 | repo_name_full = value.get('repo_name_full') |
|
350 | 350 | group_path = value.get('group_path') |
|
351 | 351 | group_name = value.get('group_name') |
|
352 | 352 | |
|
353 | 353 | if repo_name in [ADMIN_PREFIX, '']: |
|
354 | 354 | msg = M(self, 'invalid_repo_name', state, repo=repo_name) |
|
355 | 355 | raise formencode.Invalid(msg, value, state, |
|
356 | 356 | error_dict=dict(repo_name=msg) |
|
357 | 357 | ) |
|
358 | 358 | |
|
359 | 359 | rename = old_data.get('repo_name') != repo_name_full |
|
360 | 360 | create = not edit |
|
361 | 361 | if rename or create: |
|
362 | 362 | |
|
363 | 363 | if group_path != '': |
|
364 | 364 | if Repository.get_by_repo_name(repo_name_full): |
|
365 | 365 | msg = M(self, 'repository_in_group_exists', state, |
|
366 | 366 | repo=repo_name, group=group_name) |
|
367 | 367 | raise formencode.Invalid(msg, value, state, |
|
368 | 368 | error_dict=dict(repo_name=msg) |
|
369 | 369 | ) |
|
370 | 370 | elif RepoGroup.get_by_group_name(repo_name_full): |
|
371 | 371 | msg = M(self, 'same_group_exists', state, |
|
372 | 372 | repo=repo_name) |
|
373 | 373 | raise formencode.Invalid(msg, value, state, |
|
374 | 374 | error_dict=dict(repo_name=msg) |
|
375 | 375 | ) |
|
376 | 376 | |
|
377 | 377 | elif Repository.get_by_repo_name(repo_name_full): |
|
378 | 378 | msg = M(self, 'repository_exists', state, |
|
379 | 379 | repo=repo_name) |
|
380 | 380 | raise formencode.Invalid(msg, value, state, |
|
381 | 381 | error_dict=dict(repo_name=msg) |
|
382 | 382 | ) |
|
383 | 383 | return value |
|
384 | 384 | return _validator |
|
385 | 385 | |
|
386 | 386 | |
|
387 | 387 | def ValidForkName(*args, **kwargs): |
|
388 | 388 | return ValidRepoName(*args, **kwargs) |
|
389 | 389 | |
|
390 | 390 | |
|
391 | 391 | def SlugifyName(): |
|
392 | 392 | class _validator(formencode.validators.FancyValidator): |
|
393 | 393 | |
|
394 | 394 | def _to_python(self, value, state): |
|
395 | 395 | return repo_name_slug(value) |
|
396 | 396 | |
|
397 | 397 | def validate_python(self, value, state): |
|
398 | 398 | pass |
|
399 | 399 | |
|
400 | 400 | return _validator |
|
401 | 401 | |
|
402 | 402 | |
|
403 | 403 | def ValidCloneUri(): |
|
404 | 404 | from rhodecode.lib.utils import make_ui |
|
405 | 405 | |
|
406 | 406 | def url_handler(repo_type, url, ui=None): |
|
407 | 407 | if repo_type == 'hg': |
|
408 | 408 | from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository |
|
409 |
from |
|
|
409 | from rhodecode.lib.vcs.utils.hgcompat import httppeer | |
|
410 | 410 | if url.startswith('http'): |
|
411 | 411 | ## initially check if it's at least the proper URL |
|
412 | 412 | ## or does it pass basic auth |
|
413 | 413 | MercurialRepository._check_url(url) |
|
414 | 414 | httppeer(ui, url)._capabilities() |
|
415 | 415 | elif url.startswith('svn+http'): |
|
416 | 416 | from hgsubversion.svnrepo import svnremoterepo |
|
417 | 417 | svnremoterepo(ui, url).capabilities |
|
418 | 418 | elif url.startswith('git+http'): |
|
419 | 419 | raise NotImplementedError() |
|
420 | 420 | else: |
|
421 | raise Exception('clone from URI %s not allowed' % (url)) | |
|
421 | raise Exception('clone from URI %s not allowed' % (url,)) | |
|
422 | 422 | |
|
423 | 423 | elif repo_type == 'git': |
|
424 | 424 | from rhodecode.lib.vcs.backends.git.repository import GitRepository |
|
425 | 425 | if url.startswith('http'): |
|
426 | 426 | ## initially check if it's at least the proper URL |
|
427 | 427 | ## or does it pass basic auth |
|
428 | 428 | GitRepository._check_url(url) |
|
429 | 429 | elif url.startswith('svn+http'): |
|
430 | 430 | raise NotImplementedError() |
|
431 | 431 | elif url.startswith('hg+http'): |
|
432 | 432 | raise NotImplementedError() |
|
433 | 433 | else: |
|
434 | 434 | raise Exception('clone from URI %s not allowed' % (url)) |
|
435 | 435 | |
|
436 | 436 | class _validator(formencode.validators.FancyValidator): |
|
437 | 437 | messages = { |
|
438 | 438 | 'clone_uri': _(u'invalid clone url'), |
|
439 | 439 | 'invalid_clone_uri': _(u'Invalid clone url, provide a ' |
|
440 | 440 | 'valid clone http(s)/svn+http(s) url') |
|
441 | 441 | } |
|
442 | 442 | |
|
443 | 443 | def validate_python(self, value, state): |
|
444 | 444 | repo_type = value.get('repo_type') |
|
445 | 445 | url = value.get('clone_uri') |
|
446 | 446 | |
|
447 | 447 | if not url: |
|
448 | 448 | pass |
|
449 | 449 | else: |
|
450 | 450 | try: |
|
451 | 451 | url_handler(repo_type, url, make_ui('db', clear_session=False)) |
|
452 | 452 | except Exception: |
|
453 | 453 | log.exception('Url validation failed') |
|
454 | 454 | msg = M(self, 'clone_uri') |
|
455 | 455 | raise formencode.Invalid(msg, value, state, |
|
456 | 456 | error_dict=dict(clone_uri=msg) |
|
457 | 457 | ) |
|
458 | 458 | return _validator |
|
459 | 459 | |
|
460 | 460 | |
|
461 | 461 | def ValidForkType(old_data={}): |
|
462 | 462 | class _validator(formencode.validators.FancyValidator): |
|
463 | 463 | messages = { |
|
464 | 464 | 'invalid_fork_type': _(u'Fork have to be the same type as parent') |
|
465 | 465 | } |
|
466 | 466 | |
|
467 | 467 | def validate_python(self, value, state): |
|
468 | 468 | if old_data['repo_type'] != value: |
|
469 | 469 | msg = M(self, 'invalid_fork_type', state) |
|
470 | 470 | raise formencode.Invalid(msg, value, state, |
|
471 | 471 | error_dict=dict(repo_type=msg) |
|
472 | 472 | ) |
|
473 | 473 | return _validator |
|
474 | 474 | |
|
475 | 475 | |
|
476 | 476 | def CanWriteGroup(old_data=None): |
|
477 | 477 | class _validator(formencode.validators.FancyValidator): |
|
478 | 478 | messages = { |
|
479 | 479 | 'permission_denied': _(u"You don't have permissions " |
|
480 | 480 | "to create repository in this group"), |
|
481 | 481 | 'permission_denied_root': _(u"no permission to create repository " |
|
482 | 482 | "in root location") |
|
483 | 483 | } |
|
484 | 484 | |
|
485 | 485 | def _to_python(self, value, state): |
|
486 | 486 | #root location |
|
487 | 487 | if value in [-1, "-1"]: |
|
488 | 488 | return None |
|
489 | 489 | return value |
|
490 | 490 | |
|
491 | 491 | def validate_python(self, value, state): |
|
492 | 492 | gr = RepoGroup.get(value) |
|
493 | 493 | gr_name = gr.group_name if gr else None # None means ROOT location |
|
494 | 494 | val = HasReposGroupPermissionAny('group.write', 'group.admin') |
|
495 | 495 | can_create_repos = HasPermissionAny('hg.admin', 'hg.create.repository') |
|
496 | 496 | forbidden = not val(gr_name, 'can write into group validator') |
|
497 | 497 | value_changed = True # old_data['repo_group'].get('group_id') != safe_int(value) |
|
498 | 498 | if value_changed: # do check if we changed the value |
|
499 | 499 | #parent group need to be existing |
|
500 | 500 | if gr and forbidden: |
|
501 | 501 | msg = M(self, 'permission_denied', state) |
|
502 | 502 | raise formencode.Invalid(msg, value, state, |
|
503 | 503 | error_dict=dict(repo_type=msg) |
|
504 | 504 | ) |
|
505 | 505 | ## check if we can write to root location ! |
|
506 | 506 | elif gr is None and not can_create_repos(): |
|
507 | 507 | msg = M(self, 'permission_denied_root', state) |
|
508 | 508 | raise formencode.Invalid(msg, value, state, |
|
509 | 509 | error_dict=dict(repo_type=msg) |
|
510 | 510 | ) |
|
511 | 511 | |
|
512 | 512 | return _validator |
|
513 | 513 | |
|
514 | 514 | |
|
515 | 515 | def CanCreateGroup(can_create_in_root=False): |
|
516 | 516 | class _validator(formencode.validators.FancyValidator): |
|
517 | 517 | messages = { |
|
518 | 518 | 'permission_denied': _(u"You don't have permissions " |
|
519 | 519 | "to create a group in this location") |
|
520 | 520 | } |
|
521 | 521 | |
|
522 | 522 | def to_python(self, value, state): |
|
523 | 523 | #root location |
|
524 | 524 | if value in [-1, "-1"]: |
|
525 | 525 | return None |
|
526 | 526 | return value |
|
527 | 527 | |
|
528 | 528 | def validate_python(self, value, state): |
|
529 | 529 | gr = RepoGroup.get(value) |
|
530 | 530 | gr_name = gr.group_name if gr else None # None means ROOT location |
|
531 | 531 | |
|
532 | 532 | if can_create_in_root and gr is None: |
|
533 | 533 | #we can create in root, we're fine no validations required |
|
534 | 534 | return |
|
535 | 535 | |
|
536 | 536 | forbidden_in_root = gr is None and not can_create_in_root |
|
537 | 537 | val = HasReposGroupPermissionAny('group.admin') |
|
538 | 538 | forbidden = not val(gr_name, 'can create group validator') |
|
539 | 539 | if forbidden_in_root or forbidden: |
|
540 | 540 | msg = M(self, 'permission_denied', state) |
|
541 | 541 | raise formencode.Invalid(msg, value, state, |
|
542 | 542 | error_dict=dict(group_parent_id=msg) |
|
543 | 543 | ) |
|
544 | 544 | |
|
545 | 545 | return _validator |
|
546 | 546 | |
|
547 | 547 | |
|
548 | 548 | def ValidPerms(type_='repo'): |
|
549 | 549 | if type_ == 'repo_group': |
|
550 | 550 | EMPTY_PERM = 'group.none' |
|
551 | 551 | elif type_ == 'repo': |
|
552 | 552 | EMPTY_PERM = 'repository.none' |
|
553 | 553 | elif type_ == 'user_group': |
|
554 | 554 | EMPTY_PERM = 'usergroup.none' |
|
555 | 555 | |
|
556 | 556 | class _validator(formencode.validators.FancyValidator): |
|
557 | 557 | messages = { |
|
558 | 558 | 'perm_new_member_name': |
|
559 | 559 | _(u'This username or user group name is not valid') |
|
560 | 560 | } |
|
561 | 561 | |
|
562 | 562 | def to_python(self, value, state): |
|
563 | 563 | perms_update = OrderedSet() |
|
564 | 564 | perms_new = OrderedSet() |
|
565 | 565 | # build a list of permission to update and new permission to create |
|
566 | 566 | |
|
567 | 567 | #CLEAN OUT ORG VALUE FROM NEW MEMBERS, and group them using |
|
568 | 568 | new_perms_group = defaultdict(dict) |
|
569 | 569 | for k, v in value.copy().iteritems(): |
|
570 | 570 | if k.startswith('perm_new_member'): |
|
571 | 571 | del value[k] |
|
572 | 572 | _type, part = k.split('perm_new_member_') |
|
573 | 573 | args = part.split('_') |
|
574 | 574 | if len(args) == 1: |
|
575 | 575 | new_perms_group[args[0]]['perm'] = v |
|
576 | 576 | elif len(args) == 2: |
|
577 | 577 | _key, pos = args |
|
578 | 578 | new_perms_group[pos][_key] = v |
|
579 | 579 | |
|
580 | 580 | # fill new permissions in order of how they were added |
|
581 | 581 | for k in sorted(map(int, new_perms_group.keys())): |
|
582 | 582 | perm_dict = new_perms_group[str(k)] |
|
583 | 583 | new_member = perm_dict.get('name') |
|
584 | 584 | new_perm = perm_dict.get('perm') |
|
585 | 585 | new_type = perm_dict.get('type') |
|
586 | 586 | if new_member and new_perm and new_type: |
|
587 | 587 | perms_new.add((new_member, new_perm, new_type)) |
|
588 | 588 | |
|
589 | 589 | for k, v in value.iteritems(): |
|
590 | 590 | if k.startswith('u_perm_') or k.startswith('g_perm_'): |
|
591 | 591 | member = k[7:] |
|
592 | 592 | t = {'u': 'user', |
|
593 | 593 | 'g': 'users_group' |
|
594 | 594 | }[k[0]] |
|
595 | 595 | if member == 'default': |
|
596 | 596 | if str2bool(value.get('repo_private')): |
|
597 | 597 | # set none for default when updating to |
|
598 | 598 | # private repo protects agains form manipulation |
|
599 | 599 | v = EMPTY_PERM |
|
600 | 600 | perms_update.add((member, v, t)) |
|
601 | 601 | |
|
602 | 602 | value['perms_updates'] = list(perms_update) |
|
603 | 603 | value['perms_new'] = list(perms_new) |
|
604 | 604 | |
|
605 | 605 | # update permissions |
|
606 | 606 | for k, v, t in perms_new: |
|
607 | 607 | try: |
|
608 | 608 | if t is 'user': |
|
609 | 609 | self.user_db = User.query()\ |
|
610 | 610 | .filter(User.active == True)\ |
|
611 | 611 | .filter(User.username == k).one() |
|
612 | 612 | if t is 'users_group': |
|
613 | 613 | self.user_db = UserGroup.query()\ |
|
614 | 614 | .filter(UserGroup.users_group_active == True)\ |
|
615 | 615 | .filter(UserGroup.users_group_name == k).one() |
|
616 | 616 | |
|
617 | 617 | except Exception: |
|
618 | 618 | log.exception('Updated permission failed') |
|
619 | 619 | msg = M(self, 'perm_new_member_type', state) |
|
620 | 620 | raise formencode.Invalid(msg, value, state, |
|
621 | 621 | error_dict=dict(perm_new_member_name=msg) |
|
622 | 622 | ) |
|
623 | 623 | return value |
|
624 | 624 | return _validator |
|
625 | 625 | |
|
626 | 626 | |
|
627 | 627 | def ValidSettings(): |
|
628 | 628 | class _validator(formencode.validators.FancyValidator): |
|
629 | 629 | def _to_python(self, value, state): |
|
630 | 630 | # settings form for users that are not admin |
|
631 | 631 | # can't edit certain parameters, it's extra backup if they mangle |
|
632 | 632 | # with forms |
|
633 | 633 | |
|
634 | 634 | forbidden_params = [ |
|
635 | 635 | 'user', 'repo_type', 'repo_enable_locking', |
|
636 | 636 | 'repo_enable_downloads', 'repo_enable_statistics' |
|
637 | 637 | ] |
|
638 | 638 | |
|
639 | 639 | for param in forbidden_params: |
|
640 | 640 | if param in value: |
|
641 | 641 | del value[param] |
|
642 | 642 | return value |
|
643 | 643 | |
|
644 | 644 | def validate_python(self, value, state): |
|
645 | 645 | pass |
|
646 | 646 | return _validator |
|
647 | 647 | |
|
648 | 648 | |
|
649 | 649 | def ValidPath(): |
|
650 | 650 | class _validator(formencode.validators.FancyValidator): |
|
651 | 651 | messages = { |
|
652 | 652 | 'invalid_path': _(u'This is not a valid path') |
|
653 | 653 | } |
|
654 | 654 | |
|
655 | 655 | def validate_python(self, value, state): |
|
656 | 656 | if not os.path.isdir(value): |
|
657 | 657 | msg = M(self, 'invalid_path', state) |
|
658 | 658 | raise formencode.Invalid(msg, value, state, |
|
659 | 659 | error_dict=dict(paths_root_path=msg) |
|
660 | 660 | ) |
|
661 | 661 | return _validator |
|
662 | 662 | |
|
663 | 663 | |
|
664 | 664 | def UniqSystemEmail(old_data={}): |
|
665 | 665 | class _validator(formencode.validators.FancyValidator): |
|
666 | 666 | messages = { |
|
667 | 667 | 'email_taken': _(u'This e-mail address is already taken') |
|
668 | 668 | } |
|
669 | 669 | |
|
670 | 670 | def _to_python(self, value, state): |
|
671 | 671 | return value.lower() |
|
672 | 672 | |
|
673 | 673 | def validate_python(self, value, state): |
|
674 | 674 | if (old_data.get('email') or '').lower() != value: |
|
675 | 675 | user = User.get_by_email(value, case_insensitive=True) |
|
676 | 676 | if user: |
|
677 | 677 | msg = M(self, 'email_taken', state) |
|
678 | 678 | raise formencode.Invalid(msg, value, state, |
|
679 | 679 | error_dict=dict(email=msg) |
|
680 | 680 | ) |
|
681 | 681 | return _validator |
|
682 | 682 | |
|
683 | 683 | |
|
684 | 684 | def ValidSystemEmail(): |
|
685 | 685 | class _validator(formencode.validators.FancyValidator): |
|
686 | 686 | messages = { |
|
687 | 687 | 'non_existing_email': _(u'e-mail "%(email)s" does not exist.') |
|
688 | 688 | } |
|
689 | 689 | |
|
690 | 690 | def _to_python(self, value, state): |
|
691 | 691 | return value.lower() |
|
692 | 692 | |
|
693 | 693 | def validate_python(self, value, state): |
|
694 | 694 | user = User.get_by_email(value, case_insensitive=True) |
|
695 | 695 | if user is None: |
|
696 | 696 | msg = M(self, 'non_existing_email', state, email=value) |
|
697 | 697 | raise formencode.Invalid(msg, value, state, |
|
698 | 698 | error_dict=dict(email=msg) |
|
699 | 699 | ) |
|
700 | 700 | |
|
701 | 701 | return _validator |
|
702 | 702 | |
|
703 | 703 | |
|
704 | 704 | def LdapLibValidator(): |
|
705 | 705 | class _validator(formencode.validators.FancyValidator): |
|
706 | 706 | messages = { |
|
707 | 707 | |
|
708 | 708 | } |
|
709 | 709 | |
|
710 | 710 | def validate_python(self, value, state): |
|
711 | 711 | try: |
|
712 | 712 | import ldap |
|
713 | 713 | ldap # pyflakes silence ! |
|
714 | 714 | except ImportError: |
|
715 | 715 | raise LdapImportError() |
|
716 | 716 | |
|
717 | 717 | return _validator |
|
718 | 718 | |
|
719 | 719 | |
|
720 | 720 | def AttrLoginValidator(): |
|
721 | 721 | class _validator(formencode.validators.FancyValidator): |
|
722 | 722 | messages = { |
|
723 | 723 | 'invalid_cn': |
|
724 | 724 | _(u'The LDAP Login attribute of the CN must be specified - ' |
|
725 | 725 | 'this is the name of the attribute that is equivalent ' |
|
726 | 726 | 'to "username"') |
|
727 | 727 | } |
|
728 | 728 | messages['empty'] = messages['invalid_cn'] |
|
729 | 729 | |
|
730 | 730 | return _validator |
|
731 | 731 | |
|
732 | 732 | |
|
733 | 733 | def NotReviewedRevisions(repo_id): |
|
734 | 734 | class _validator(formencode.validators.FancyValidator): |
|
735 | 735 | messages = { |
|
736 | 736 | 'rev_already_reviewed': |
|
737 | 737 | _(u'Revisions %(revs)s are already part of pull request ' |
|
738 | 738 | 'or have set status') |
|
739 | 739 | } |
|
740 | 740 | |
|
741 | 741 | def validate_python(self, value, state): |
|
742 | 742 | # check revisions if they are not reviewed, or a part of another |
|
743 | 743 | # pull request |
|
744 | 744 | statuses = ChangesetStatus.query()\ |
|
745 | 745 | .filter(ChangesetStatus.revision.in_(value))\ |
|
746 | 746 | .filter(ChangesetStatus.repo_id == repo_id)\ |
|
747 | 747 | .all() |
|
748 | 748 | |
|
749 | 749 | errors = [] |
|
750 | 750 | for cs in statuses: |
|
751 | 751 | if cs.pull_request_id: |
|
752 | 752 | errors.append(['pull_req', cs.revision[:12]]) |
|
753 | 753 | elif cs.status: |
|
754 | 754 | errors.append(['status', cs.revision[:12]]) |
|
755 | 755 | |
|
756 | 756 | if errors: |
|
757 | 757 | revs = ','.join([x[1] for x in errors]) |
|
758 | 758 | msg = M(self, 'rev_already_reviewed', state, revs=revs) |
|
759 | 759 | raise formencode.Invalid(msg, value, state, |
|
760 | 760 | error_dict=dict(revisions=revs) |
|
761 | 761 | ) |
|
762 | 762 | |
|
763 | 763 | return _validator |
|
764 | 764 | |
|
765 | 765 | |
|
766 | 766 | def ValidIp(): |
|
767 | 767 | class _validator(CIDR): |
|
768 | 768 | messages = dict( |
|
769 | 769 | badFormat=_('Please enter a valid IPv4 or IpV6 address'), |
|
770 | 770 | illegalBits=_('The network size (bits) must be within the range' |
|
771 | 771 | ' of 0-32 (not %(bits)r)') |
|
772 | 772 | ) |
|
773 | 773 | |
|
774 | 774 | def to_python(self, value, state): |
|
775 | 775 | v = super(_validator, self).to_python(value, state) |
|
776 | 776 | v = v.strip() |
|
777 | 777 | net = ipaddr.IPNetwork(address=v) |
|
778 | 778 | if isinstance(net, ipaddr.IPv4Network): |
|
779 | 779 | #if IPv4 doesn't end with a mask, add /32 |
|
780 | 780 | if '/' not in value: |
|
781 | 781 | v += '/32' |
|
782 | 782 | if isinstance(net, ipaddr.IPv6Network): |
|
783 | 783 | #if IPv6 doesn't end with a mask, add /128 |
|
784 | 784 | if '/' not in value: |
|
785 | 785 | v += '/128' |
|
786 | 786 | return v |
|
787 | 787 | |
|
788 | 788 | def validate_python(self, value, state): |
|
789 | 789 | try: |
|
790 | 790 | addr = value.strip() |
|
791 | 791 | #this raises an ValueError if address is not IpV4 or IpV6 |
|
792 | 792 | ipaddr.IPNetwork(address=addr) |
|
793 | 793 | except ValueError: |
|
794 | 794 | raise formencode.Invalid(self.message('badFormat', state), |
|
795 | 795 | value, state) |
|
796 | 796 | |
|
797 | 797 | return _validator |
|
798 | 798 | |
|
799 | 799 | |
|
800 | 800 | def FieldKey(): |
|
801 | 801 | class _validator(formencode.validators.FancyValidator): |
|
802 | 802 | messages = dict( |
|
803 | 803 | badFormat=_('Key name can only consist of letters, ' |
|
804 | 804 | 'underscore, dash or numbers') |
|
805 | 805 | ) |
|
806 | 806 | |
|
807 | 807 | def validate_python(self, value, state): |
|
808 | 808 | if not re.match('[a-zA-Z0-9_-]+$', value): |
|
809 | 809 | raise formencode.Invalid(self.message('badFormat', state), |
|
810 | 810 | value, state) |
|
811 | 811 | return _validator |
|
812 | 812 | |
|
813 | 813 | |
|
814 | 814 | def BasePath(): |
|
815 | 815 | class _validator(formencode.validators.FancyValidator): |
|
816 | 816 | messages = dict( |
|
817 | 817 | badPath=_('Filename cannot be inside a directory') |
|
818 | 818 | ) |
|
819 | 819 | |
|
820 | 820 | def _to_python(self, value, state): |
|
821 | 821 | return value |
|
822 | 822 | |
|
823 | 823 | def validate_python(self, value, state): |
|
824 | 824 | if value != os.path.basename(value): |
|
825 | 825 | raise formencode.Invalid(self.message('badPath', state), |
|
826 | 826 | value, state) |
|
827 | 827 | return _validator |
@@ -1,558 +1,558 b'' | |||
|
1 | 1 | from __future__ import with_statement |
|
2 | 2 | |
|
3 | 3 | import os |
|
4 | 4 | from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialChangeset |
|
5 | 5 | from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError |
|
6 | 6 | from rhodecode.lib.vcs.nodes import NodeKind, NodeState |
|
7 | 7 | from rhodecode.tests.vcs.conf import PACKAGE_DIR, TEST_HG_REPO, TEST_HG_REPO_CLONE, \ |
|
8 | 8 | TEST_HG_REPO_PULL |
|
9 | 9 | from rhodecode.lib.vcs.utils.compat import unittest |
|
10 | 10 | |
|
11 | 11 | |
|
12 | 12 | # Use only clean mercurial's ui |
|
13 | import mercurial.scmutil | |
|
13 | from rhodecode.lib.vcs.utils.hgcompat import mercurial | |
|
14 | 14 | mercurial.scmutil.rcpath() |
|
15 | 15 | if mercurial.scmutil._rcpath: |
|
16 | 16 | mercurial.scmutil._rcpath = mercurial.scmutil._rcpath[:1] |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | class MercurialRepositoryTest(unittest.TestCase): |
|
20 | 20 | |
|
21 | 21 | def __check_for_existing_repo(self): |
|
22 | 22 | if os.path.exists(TEST_HG_REPO_CLONE): |
|
23 | 23 | self.fail('Cannot test mercurial clone repo as location %s already ' |
|
24 | 24 | 'exists. You should manually remove it first.' |
|
25 | 25 | % TEST_HG_REPO_CLONE) |
|
26 | 26 | |
|
27 | 27 | def setUp(self): |
|
28 | 28 | self.repo = MercurialRepository(TEST_HG_REPO) |
|
29 | 29 | |
|
30 | 30 | def test_wrong_repo_path(self): |
|
31 | 31 | wrong_repo_path = '/tmp/errorrepo' |
|
32 | 32 | self.assertRaises(RepositoryError, MercurialRepository, wrong_repo_path) |
|
33 | 33 | |
|
34 | 34 | def test_unicode_path_repo(self): |
|
35 | 35 | self.assertRaises(VCSError,lambda:MercurialRepository(u'iShouldFail')) |
|
36 | 36 | |
|
37 | 37 | def test_repo_clone(self): |
|
38 | 38 | self.__check_for_existing_repo() |
|
39 | 39 | repo = MercurialRepository(TEST_HG_REPO) |
|
40 | 40 | repo_clone = MercurialRepository(TEST_HG_REPO_CLONE, |
|
41 | 41 | src_url=TEST_HG_REPO, update_after_clone=True) |
|
42 | 42 | self.assertEqual(len(repo.revisions), len(repo_clone.revisions)) |
|
43 | 43 | # Checking hashes of changesets should be enough |
|
44 | 44 | for changeset in repo.get_changesets(): |
|
45 | 45 | raw_id = changeset.raw_id |
|
46 | 46 | self.assertEqual(raw_id, repo_clone.get_changeset(raw_id).raw_id) |
|
47 | 47 | |
|
48 | 48 | def test_repo_clone_with_update(self): |
|
49 | 49 | repo = MercurialRepository(TEST_HG_REPO) |
|
50 | 50 | repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_w_update', |
|
51 | 51 | src_url=TEST_HG_REPO, update_after_clone=True) |
|
52 | 52 | self.assertEqual(len(repo.revisions), len(repo_clone.revisions)) |
|
53 | 53 | |
|
54 | 54 | #check if current workdir was updated |
|
55 | 55 | self.assertEqual(os.path.isfile(os.path.join(TEST_HG_REPO_CLONE \ |
|
56 | 56 | + '_w_update', |
|
57 | 57 | 'MANIFEST.in')), True,) |
|
58 | 58 | |
|
59 | 59 | def test_repo_clone_without_update(self): |
|
60 | 60 | repo = MercurialRepository(TEST_HG_REPO) |
|
61 | 61 | repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_wo_update', |
|
62 | 62 | src_url=TEST_HG_REPO, update_after_clone=False) |
|
63 | 63 | self.assertEqual(len(repo.revisions), len(repo_clone.revisions)) |
|
64 | 64 | self.assertEqual(os.path.isfile(os.path.join(TEST_HG_REPO_CLONE \ |
|
65 | 65 | + '_wo_update', |
|
66 | 66 | 'MANIFEST.in')), False,) |
|
67 | 67 | |
|
68 | 68 | def test_pull(self): |
|
69 | 69 | if os.path.exists(TEST_HG_REPO_PULL): |
|
70 | 70 | self.fail('Cannot test mercurial pull command as location %s ' |
|
71 | 71 | 'already exists. You should manually remove it first' |
|
72 | 72 | % TEST_HG_REPO_PULL) |
|
73 | 73 | repo_new = MercurialRepository(TEST_HG_REPO_PULL, create=True) |
|
74 | 74 | self.assertTrue(len(self.repo.revisions) > len(repo_new.revisions)) |
|
75 | 75 | |
|
76 | 76 | repo_new.pull(self.repo.path) |
|
77 | 77 | repo_new = MercurialRepository(TEST_HG_REPO_PULL) |
|
78 | 78 | self.assertTrue(len(self.repo.revisions) == len(repo_new.revisions)) |
|
79 | 79 | |
|
80 | 80 | def test_revisions(self): |
|
81 | 81 | # there are 21 revisions at bitbucket now |
|
82 | 82 | # so we can assume they would be available from now on |
|
83 | 83 | subset = set(['b986218ba1c9b0d6a259fac9b050b1724ed8e545', |
|
84 | 84 | '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', |
|
85 | 85 | '6cba7170863a2411822803fa77a0a264f1310b35', |
|
86 | 86 | '56349e29c2af3ac913b28bde9a2c6154436e615b', |
|
87 | 87 | '2dda4e345facb0ccff1a191052dd1606dba6781d', |
|
88 | 88 | '6fff84722075f1607a30f436523403845f84cd9e', |
|
89 | 89 | '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', |
|
90 | 90 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', |
|
91 | 91 | 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', |
|
92 | 92 | 'be90031137367893f1c406e0a8683010fd115b79', |
|
93 | 93 | 'db8e58be770518cbb2b1cdfa69146e47cd481481', |
|
94 | 94 | '84478366594b424af694a6c784cb991a16b87c21', |
|
95 | 95 | '17f8e105dddb9f339600389c6dc7175d395a535c', |
|
96 | 96 | '20a662e756499bde3095ffc9bc0643d1def2d0eb', |
|
97 | 97 | '2e319b85e70a707bba0beff866d9f9de032aa4f9', |
|
98 | 98 | '786facd2c61deb9cf91e9534735124fb8fc11842', |
|
99 | 99 | '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', |
|
100 | 100 | 'aa6a0de05b7612707db567078e130a6cd114a9a7', |
|
101 | 101 | 'eada5a770da98ab0dd7325e29d00e0714f228d09' |
|
102 | 102 | ]) |
|
103 | 103 | self.assertTrue(subset.issubset(set(self.repo.revisions))) |
|
104 | 104 | |
|
105 | 105 | |
|
106 | 106 | # check if we have the proper order of revisions |
|
107 | 107 | org = ['b986218ba1c9b0d6a259fac9b050b1724ed8e545', |
|
108 | 108 | '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', |
|
109 | 109 | '6cba7170863a2411822803fa77a0a264f1310b35', |
|
110 | 110 | '56349e29c2af3ac913b28bde9a2c6154436e615b', |
|
111 | 111 | '2dda4e345facb0ccff1a191052dd1606dba6781d', |
|
112 | 112 | '6fff84722075f1607a30f436523403845f84cd9e', |
|
113 | 113 | '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', |
|
114 | 114 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', |
|
115 | 115 | 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', |
|
116 | 116 | 'be90031137367893f1c406e0a8683010fd115b79', |
|
117 | 117 | 'db8e58be770518cbb2b1cdfa69146e47cd481481', |
|
118 | 118 | '84478366594b424af694a6c784cb991a16b87c21', |
|
119 | 119 | '17f8e105dddb9f339600389c6dc7175d395a535c', |
|
120 | 120 | '20a662e756499bde3095ffc9bc0643d1def2d0eb', |
|
121 | 121 | '2e319b85e70a707bba0beff866d9f9de032aa4f9', |
|
122 | 122 | '786facd2c61deb9cf91e9534735124fb8fc11842', |
|
123 | 123 | '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', |
|
124 | 124 | 'aa6a0de05b7612707db567078e130a6cd114a9a7', |
|
125 | 125 | 'eada5a770da98ab0dd7325e29d00e0714f228d09', |
|
126 | 126 | '2c1885c735575ca478bf9e17b0029dca68824458', |
|
127 | 127 | 'd9bcd465040bf869799b09ad732c04e0eea99fe9', |
|
128 | 128 | '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7', |
|
129 | 129 | '4fb8326d78e5120da2c7468dcf7098997be385da', |
|
130 | 130 | '62b4a097164940bd66030c4db51687f3ec035eed', |
|
131 | 131 | '536c1a19428381cfea92ac44985304f6a8049569', |
|
132 | 132 | '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4', |
|
133 | 133 | '9bb326a04ae5d98d437dece54be04f830cf1edd9', |
|
134 | 134 | 'f8940bcb890a98c4702319fbe36db75ea309b475', |
|
135 | 135 | 'ff5ab059786ebc7411e559a2cc309dfae3625a3b', |
|
136 | 136 | '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08', |
|
137 | 137 | 'ee87846a61c12153b51543bf860e1026c6d3dcba', ] |
|
138 | 138 | self.assertEqual(org, self.repo.revisions[:31]) |
|
139 | 139 | |
|
140 | 140 | def test_iter_slice(self): |
|
141 | 141 | sliced = list(self.repo[:10]) |
|
142 | 142 | itered = list(self.repo)[:10] |
|
143 | 143 | self.assertEqual(sliced, itered) |
|
144 | 144 | |
|
145 | 145 | def test_slicing(self): |
|
146 | 146 | #4 1 5 10 95 |
|
147 | 147 | for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), |
|
148 | 148 | (10, 20, 10), (5, 100, 95)]: |
|
149 | 149 | revs = list(self.repo[sfrom:sto]) |
|
150 | 150 | self.assertEqual(len(revs), size) |
|
151 | 151 | self.assertEqual(revs[0], self.repo.get_changeset(sfrom)) |
|
152 | 152 | self.assertEqual(revs[-1], self.repo.get_changeset(sto - 1)) |
|
153 | 153 | |
|
154 | 154 | def test_branches(self): |
|
155 | 155 | # TODO: Need more tests here |
|
156 | 156 | |
|
157 | 157 | #active branches |
|
158 | 158 | self.assertTrue('default' in self.repo.branches) |
|
159 | 159 | self.assertTrue('stable' in self.repo.branches) |
|
160 | 160 | |
|
161 | 161 | # closed |
|
162 | 162 | self.assertTrue('git' in self.repo._get_branches(closed=True)) |
|
163 | 163 | self.assertTrue('web' in self.repo._get_branches(closed=True)) |
|
164 | 164 | |
|
165 | 165 | for name, id in self.repo.branches.items(): |
|
166 | 166 | self.assertTrue(isinstance( |
|
167 | 167 | self.repo.get_changeset(id), MercurialChangeset)) |
|
168 | 168 | |
|
169 | 169 | def test_tip_in_tags(self): |
|
170 | 170 | # tip is always a tag |
|
171 | 171 | self.assertIn('tip', self.repo.tags) |
|
172 | 172 | |
|
173 | 173 | def test_tip_changeset_in_tags(self): |
|
174 | 174 | tip = self.repo.get_changeset() |
|
175 | 175 | self.assertEqual(self.repo.tags['tip'], tip.raw_id) |
|
176 | 176 | |
|
177 | 177 | def test_initial_changeset(self): |
|
178 | 178 | |
|
179 | 179 | init_chset = self.repo.get_changeset(0) |
|
180 | 180 | self.assertEqual(init_chset.message, 'initial import') |
|
181 | 181 | self.assertEqual(init_chset.author, |
|
182 | 182 | 'Marcin Kuzminski <marcin@python-blog.com>') |
|
183 | 183 | self.assertEqual(sorted(init_chset._file_paths), |
|
184 | 184 | sorted([ |
|
185 | 185 | 'vcs/__init__.py', |
|
186 | 186 | 'vcs/backends/BaseRepository.py', |
|
187 | 187 | 'vcs/backends/__init__.py', |
|
188 | 188 | ]) |
|
189 | 189 | ) |
|
190 | 190 | self.assertEqual(sorted(init_chset._dir_paths), |
|
191 | 191 | sorted(['', 'vcs', 'vcs/backends'])) |
|
192 | 192 | |
|
193 | 193 | self.assertRaises(NodeDoesNotExistError, init_chset.get_node, path='foobar') |
|
194 | 194 | |
|
195 | 195 | node = init_chset.get_node('vcs/') |
|
196 | 196 | self.assertTrue(hasattr(node, 'kind')) |
|
197 | 197 | self.assertEqual(node.kind, NodeKind.DIR) |
|
198 | 198 | |
|
199 | 199 | node = init_chset.get_node('vcs') |
|
200 | 200 | self.assertTrue(hasattr(node, 'kind')) |
|
201 | 201 | self.assertEqual(node.kind, NodeKind.DIR) |
|
202 | 202 | |
|
203 | 203 | node = init_chset.get_node('vcs/__init__.py') |
|
204 | 204 | self.assertTrue(hasattr(node, 'kind')) |
|
205 | 205 | self.assertEqual(node.kind, NodeKind.FILE) |
|
206 | 206 | |
|
207 | 207 | def test_not_existing_changeset(self): |
|
208 | 208 | #rawid |
|
209 | 209 | self.assertRaises(RepositoryError, self.repo.get_changeset, |
|
210 | 210 | 'abcd' * 10) |
|
211 | 211 | #shortid |
|
212 | 212 | self.assertRaises(RepositoryError, self.repo.get_changeset, |
|
213 | 213 | 'erro' * 4) |
|
214 | 214 | #numeric |
|
215 | 215 | self.assertRaises(RepositoryError, self.repo.get_changeset, |
|
216 | 216 | self.repo.count() + 1) |
|
217 | 217 | |
|
218 | 218 | |
|
219 | 219 | # Small chance we ever get to this one |
|
220 | 220 | revision = pow(2, 30) |
|
221 | 221 | self.assertRaises(RepositoryError, self.repo.get_changeset, revision) |
|
222 | 222 | |
|
223 | 223 | def test_changeset10(self): |
|
224 | 224 | |
|
225 | 225 | chset10 = self.repo.get_changeset(10) |
|
226 | 226 | README = """=== |
|
227 | 227 | VCS |
|
228 | 228 | === |
|
229 | 229 | |
|
230 | 230 | Various Version Control System management abstraction layer for Python. |
|
231 | 231 | |
|
232 | 232 | Introduction |
|
233 | 233 | ------------ |
|
234 | 234 | |
|
235 | 235 | TODO: To be written... |
|
236 | 236 | |
|
237 | 237 | """ |
|
238 | 238 | node = chset10.get_node('README.rst') |
|
239 | 239 | self.assertEqual(node.kind, NodeKind.FILE) |
|
240 | 240 | self.assertEqual(node.content, README) |
|
241 | 241 | |
|
242 | 242 | |
|
243 | 243 | class MercurialChangesetTest(unittest.TestCase): |
|
244 | 244 | |
|
245 | 245 | def setUp(self): |
|
246 | 246 | self.repo = MercurialRepository(TEST_HG_REPO) |
|
247 | 247 | |
|
248 | 248 | def _test_equality(self, changeset): |
|
249 | 249 | revision = changeset.revision |
|
250 | 250 | self.assertEqual(changeset, self.repo.get_changeset(revision)) |
|
251 | 251 | |
|
252 | 252 | def test_equality(self): |
|
253 | 253 | self.setUp() |
|
254 | 254 | revs = [0, 10, 20] |
|
255 | 255 | changesets = [self.repo.get_changeset(rev) for rev in revs] |
|
256 | 256 | for changeset in changesets: |
|
257 | 257 | self._test_equality(changeset) |
|
258 | 258 | |
|
259 | 259 | def test_default_changeset(self): |
|
260 | 260 | tip = self.repo.get_changeset('tip') |
|
261 | 261 | self.assertEqual(tip, self.repo.get_changeset()) |
|
262 | 262 | self.assertEqual(tip, self.repo.get_changeset(revision=None)) |
|
263 | 263 | self.assertEqual(tip, list(self.repo[-1:])[0]) |
|
264 | 264 | |
|
265 | 265 | def test_root_node(self): |
|
266 | 266 | tip = self.repo.get_changeset('tip') |
|
267 | 267 | self.assertTrue(tip.root is tip.get_node('')) |
|
268 | 268 | |
|
269 | 269 | def test_lazy_fetch(self): |
|
270 | 270 | """ |
|
271 | 271 | Test if changeset's nodes expands and are cached as we walk through |
|
272 | 272 | the revision. This test is somewhat hard to write as order of tests |
|
273 | 273 | is a key here. Written by running command after command in a shell. |
|
274 | 274 | """ |
|
275 | 275 | self.setUp() |
|
276 | 276 | chset = self.repo.get_changeset(45) |
|
277 | 277 | self.assertTrue(len(chset.nodes) == 0) |
|
278 | 278 | root = chset.root |
|
279 | 279 | self.assertTrue(len(chset.nodes) == 1) |
|
280 | 280 | self.assertTrue(len(root.nodes) == 8) |
|
281 | 281 | # accessing root.nodes updates chset.nodes |
|
282 | 282 | self.assertTrue(len(chset.nodes) == 9) |
|
283 | 283 | |
|
284 | 284 | docs = root.get_node('docs') |
|
285 | 285 | # we haven't yet accessed anything new as docs dir was already cached |
|
286 | 286 | self.assertTrue(len(chset.nodes) == 9) |
|
287 | 287 | self.assertTrue(len(docs.nodes) == 8) |
|
288 | 288 | # accessing docs.nodes updates chset.nodes |
|
289 | 289 | self.assertTrue(len(chset.nodes) == 17) |
|
290 | 290 | |
|
291 | 291 | self.assertTrue(docs is chset.get_node('docs')) |
|
292 | 292 | self.assertTrue(docs is root.nodes[0]) |
|
293 | 293 | self.assertTrue(docs is root.dirs[0]) |
|
294 | 294 | self.assertTrue(docs is chset.get_node('docs')) |
|
295 | 295 | |
|
296 | 296 | def test_nodes_with_changeset(self): |
|
297 | 297 | self.setUp() |
|
298 | 298 | chset = self.repo.get_changeset(45) |
|
299 | 299 | root = chset.root |
|
300 | 300 | docs = root.get_node('docs') |
|
301 | 301 | self.assertTrue(docs is chset.get_node('docs')) |
|
302 | 302 | api = docs.get_node('api') |
|
303 | 303 | self.assertTrue(api is chset.get_node('docs/api')) |
|
304 | 304 | index = api.get_node('index.rst') |
|
305 | 305 | self.assertTrue(index is chset.get_node('docs/api/index.rst')) |
|
306 | 306 | self.assertTrue(index is chset.get_node('docs')\ |
|
307 | 307 | .get_node('api')\ |
|
308 | 308 | .get_node('index.rst')) |
|
309 | 309 | |
|
310 | 310 | def test_branch_and_tags(self): |
|
311 | 311 | chset0 = self.repo.get_changeset(0) |
|
312 | 312 | self.assertEqual(chset0.branch, 'default') |
|
313 | 313 | self.assertEqual(chset0.tags, []) |
|
314 | 314 | |
|
315 | 315 | chset10 = self.repo.get_changeset(10) |
|
316 | 316 | self.assertEqual(chset10.branch, 'default') |
|
317 | 317 | self.assertEqual(chset10.tags, []) |
|
318 | 318 | |
|
319 | 319 | chset44 = self.repo.get_changeset(44) |
|
320 | 320 | self.assertEqual(chset44.branch, 'web') |
|
321 | 321 | |
|
322 | 322 | tip = self.repo.get_changeset('tip') |
|
323 | 323 | self.assertTrue('tip' in tip.tags) |
|
324 | 324 | |
|
325 | 325 | def _test_file_size(self, revision, path, size): |
|
326 | 326 | node = self.repo.get_changeset(revision).get_node(path) |
|
327 | 327 | self.assertTrue(node.is_file()) |
|
328 | 328 | self.assertEqual(node.size, size) |
|
329 | 329 | |
|
330 | 330 | def test_file_size(self): |
|
331 | 331 | to_check = ( |
|
332 | 332 | (10, 'setup.py', 1068), |
|
333 | 333 | (20, 'setup.py', 1106), |
|
334 | 334 | (60, 'setup.py', 1074), |
|
335 | 335 | |
|
336 | 336 | (10, 'vcs/backends/base.py', 2921), |
|
337 | 337 | (20, 'vcs/backends/base.py', 3936), |
|
338 | 338 | (60, 'vcs/backends/base.py', 6189), |
|
339 | 339 | ) |
|
340 | 340 | for revision, path, size in to_check: |
|
341 | 341 | self._test_file_size(revision, path, size) |
|
342 | 342 | |
|
343 | 343 | def test_file_history(self): |
|
344 | 344 | # we can only check if those revisions are present in the history |
|
345 | 345 | # as we cannot update this test every time file is changed |
|
346 | 346 | files = { |
|
347 | 347 | 'setup.py': [7, 18, 45, 46, 47, 69, 77], |
|
348 | 348 | 'vcs/nodes.py': [7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, |
|
349 | 349 | 61, 73, 76], |
|
350 | 350 | 'vcs/backends/hg.py': [4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, |
|
351 | 351 | 26, 27, 28, 30, 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, |
|
352 | 352 | 48, 49, 53, 54, 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, |
|
353 | 353 | 82], |
|
354 | 354 | } |
|
355 | 355 | for path, revs in files.items(): |
|
356 | 356 | tip = self.repo.get_changeset(revs[-1]) |
|
357 | 357 | node = tip.get_node(path) |
|
358 | 358 | node_revs = [chset.revision for chset in node.history] |
|
359 | 359 | self.assertTrue(set(revs).issubset(set(node_revs)), |
|
360 | 360 | "We assumed that %s is subset of revisions for which file %s " |
|
361 | 361 | "has been changed, and history of that node returned: %s" |
|
362 | 362 | % (revs, path, node_revs)) |
|
363 | 363 | |
|
364 | 364 | def test_file_annotate(self): |
|
365 | 365 | files = { |
|
366 | 366 | 'vcs/backends/__init__.py': |
|
367 | 367 | {89: {'lines_no': 31, |
|
368 | 368 | 'changesets': [32, 32, 61, 32, 32, 37, 32, 32, 32, 44, |
|
369 | 369 | 37, 37, 37, 37, 45, 37, 44, 37, 37, 37, |
|
370 | 370 | 32, 32, 32, 32, 37, 32, 37, 37, 32, |
|
371 | 371 | 32, 32]}, |
|
372 | 372 | 20: {'lines_no': 1, |
|
373 | 373 | 'changesets': [4]}, |
|
374 | 374 | 55: {'lines_no': 31, |
|
375 | 375 | 'changesets': [32, 32, 45, 32, 32, 37, 32, 32, 32, 44, |
|
376 | 376 | 37, 37, 37, 37, 45, 37, 44, 37, 37, 37, |
|
377 | 377 | 32, 32, 32, 32, 37, 32, 37, 37, 32, |
|
378 | 378 | 32, 32]}}, |
|
379 | 379 | 'vcs/exceptions.py': |
|
380 | 380 | {89: {'lines_no': 18, |
|
381 | 381 | 'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, |
|
382 | 382 | 16, 16, 17, 16, 16, 18, 18, 18]}, |
|
383 | 383 | 20: {'lines_no': 18, |
|
384 | 384 | 'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, |
|
385 | 385 | 16, 16, 17, 16, 16, 18, 18, 18]}, |
|
386 | 386 | 55: {'lines_no': 18, 'changesets': [16, 16, 16, 16, 16, 16, |
|
387 | 387 | 16, 16, 16, 16, 16, 16, |
|
388 | 388 | 17, 16, 16, 18, 18, 18]}}, |
|
389 | 389 | 'MANIFEST.in': {89: {'lines_no': 5, |
|
390 | 390 | 'changesets': [7, 7, 7, 71, 71]}, |
|
391 | 391 | 20: {'lines_no': 3, |
|
392 | 392 | 'changesets': [7, 7, 7]}, |
|
393 | 393 | 55: {'lines_no': 3, |
|
394 | 394 | 'changesets': [7, 7, 7]}}} |
|
395 | 395 | |
|
396 | 396 | for fname, revision_dict in files.items(): |
|
397 | 397 | for rev, data in revision_dict.items(): |
|
398 | 398 | cs = self.repo.get_changeset(rev) |
|
399 | 399 | l1_1 = [x[1] for x in cs.get_file_annotate(fname)] |
|
400 | 400 | l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)] |
|
401 | 401 | self.assertEqual(l1_1, l1_2) |
|
402 | 402 | l1 = l1_2 = [x[2]().revision for x in cs.get_file_annotate(fname)] |
|
403 | 403 | l2 = files[fname][rev]['changesets'] |
|
404 | 404 | self.assertTrue(l1 == l2 , "The lists of revision for %s@rev%s" |
|
405 | 405 | "from annotation list should match each other," |
|
406 | 406 | "got \n%s \nvs \n%s " % (fname, rev, l1, l2)) |
|
407 | 407 | |
|
408 | 408 | def test_changeset_state(self): |
|
409 | 409 | """ |
|
410 | 410 | Tests which files have been added/changed/removed at particular revision |
|
411 | 411 | """ |
|
412 | 412 | |
|
413 | 413 | # rev 46ad32a4f974: |
|
414 | 414 | # hg st --rev 46ad32a4f974 |
|
415 | 415 | # changed: 13 |
|
416 | 416 | # added: 20 |
|
417 | 417 | # removed: 1 |
|
418 | 418 | changed = set(['.hgignore' |
|
419 | 419 | , 'README.rst' , 'docs/conf.py' , 'docs/index.rst' , 'setup.py' |
|
420 | 420 | , 'tests/test_hg.py' , 'tests/test_nodes.py' , 'vcs/__init__.py' |
|
421 | 421 | , 'vcs/backends/__init__.py' , 'vcs/backends/base.py' |
|
422 | 422 | , 'vcs/backends/hg.py' , 'vcs/nodes.py' , 'vcs/utils/__init__.py']) |
|
423 | 423 | |
|
424 | 424 | added = set(['docs/api/backends/hg.rst' |
|
425 | 425 | , 'docs/api/backends/index.rst' , 'docs/api/index.rst' |
|
426 | 426 | , 'docs/api/nodes.rst' , 'docs/api/web/index.rst' |
|
427 | 427 | , 'docs/api/web/simplevcs.rst' , 'docs/installation.rst' |
|
428 | 428 | , 'docs/quickstart.rst' , 'setup.cfg' , 'vcs/utils/baseui_config.py' |
|
429 | 429 | , 'vcs/utils/web.py' , 'vcs/web/__init__.py' , 'vcs/web/exceptions.py' |
|
430 | 430 | , 'vcs/web/simplevcs/__init__.py' , 'vcs/web/simplevcs/exceptions.py' |
|
431 | 431 | , 'vcs/web/simplevcs/middleware.py' , 'vcs/web/simplevcs/models.py' |
|
432 | 432 | , 'vcs/web/simplevcs/settings.py' , 'vcs/web/simplevcs/utils.py' |
|
433 | 433 | , 'vcs/web/simplevcs/views.py']) |
|
434 | 434 | |
|
435 | 435 | removed = set(['docs/api.rst']) |
|
436 | 436 | |
|
437 | 437 | chset64 = self.repo.get_changeset('46ad32a4f974') |
|
438 | 438 | self.assertEqual(set((node.path for node in chset64.added)), added) |
|
439 | 439 | self.assertEqual(set((node.path for node in chset64.changed)), changed) |
|
440 | 440 | self.assertEqual(set((node.path for node in chset64.removed)), removed) |
|
441 | 441 | |
|
442 | 442 | # rev b090f22d27d6: |
|
443 | 443 | # hg st --rev b090f22d27d6 |
|
444 | 444 | # changed: 13 |
|
445 | 445 | # added: 20 |
|
446 | 446 | # removed: 1 |
|
447 | 447 | chset88 = self.repo.get_changeset('b090f22d27d6') |
|
448 | 448 | self.assertEqual(set((node.path for node in chset88.added)), set()) |
|
449 | 449 | self.assertEqual(set((node.path for node in chset88.changed)), |
|
450 | 450 | set(['.hgignore'])) |
|
451 | 451 | self.assertEqual(set((node.path for node in chset88.removed)), set()) |
|
452 | 452 | # |
|
453 | 453 | # 85: |
|
454 | 454 | # added: 2 ['vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py'] |
|
455 | 455 | # changed: 4 ['vcs/web/simplevcs/models.py', ...] |
|
456 | 456 | # removed: 1 ['vcs/utils/web.py'] |
|
457 | 457 | chset85 = self.repo.get_changeset(85) |
|
458 | 458 | self.assertEqual(set((node.path for node in chset85.added)), set([ |
|
459 | 459 | 'vcs/utils/diffs.py', |
|
460 | 460 | 'vcs/web/simplevcs/views/diffs.py'])) |
|
461 | 461 | self.assertEqual(set((node.path for node in chset85.changed)), set([ |
|
462 | 462 | 'vcs/web/simplevcs/models.py', |
|
463 | 463 | 'vcs/web/simplevcs/utils.py', |
|
464 | 464 | 'vcs/web/simplevcs/views/__init__.py', |
|
465 | 465 | 'vcs/web/simplevcs/views/repository.py', |
|
466 | 466 | ])) |
|
467 | 467 | self.assertEqual(set((node.path for node in chset85.removed)), |
|
468 | 468 | set(['vcs/utils/web.py'])) |
|
469 | 469 | |
|
470 | 470 | |
|
471 | 471 | def test_files_state(self): |
|
472 | 472 | """ |
|
473 | 473 | Tests state of FileNodes. |
|
474 | 474 | """ |
|
475 | 475 | chset = self.repo.get_changeset(85) |
|
476 | 476 | node = chset.get_node('vcs/utils/diffs.py') |
|
477 | 477 | self.assertTrue(node.state, NodeState.ADDED) |
|
478 | 478 | self.assertTrue(node.added) |
|
479 | 479 | self.assertFalse(node.changed) |
|
480 | 480 | self.assertFalse(node.not_changed) |
|
481 | 481 | self.assertFalse(node.removed) |
|
482 | 482 | |
|
483 | 483 | chset = self.repo.get_changeset(88) |
|
484 | 484 | node = chset.get_node('.hgignore') |
|
485 | 485 | self.assertTrue(node.state, NodeState.CHANGED) |
|
486 | 486 | self.assertFalse(node.added) |
|
487 | 487 | self.assertTrue(node.changed) |
|
488 | 488 | self.assertFalse(node.not_changed) |
|
489 | 489 | self.assertFalse(node.removed) |
|
490 | 490 | |
|
491 | 491 | chset = self.repo.get_changeset(85) |
|
492 | 492 | node = chset.get_node('setup.py') |
|
493 | 493 | self.assertTrue(node.state, NodeState.NOT_CHANGED) |
|
494 | 494 | self.assertFalse(node.added) |
|
495 | 495 | self.assertFalse(node.changed) |
|
496 | 496 | self.assertTrue(node.not_changed) |
|
497 | 497 | self.assertFalse(node.removed) |
|
498 | 498 | |
|
499 | 499 | # If node has REMOVED state then trying to fetch it would raise |
|
500 | 500 | # ChangesetError exception |
|
501 | 501 | chset = self.repo.get_changeset(2) |
|
502 | 502 | path = 'vcs/backends/BaseRepository.py' |
|
503 | 503 | self.assertRaises(NodeDoesNotExistError, chset.get_node, path) |
|
504 | 504 | # but it would be one of ``removed`` (changeset's attribute) |
|
505 | 505 | self.assertTrue(path in [rf.path for rf in chset.removed]) |
|
506 | 506 | |
|
507 | 507 | def test_commit_message_is_unicode(self): |
|
508 | 508 | for cm in self.repo: |
|
509 | 509 | self.assertEqual(type(cm.message), unicode) |
|
510 | 510 | |
|
511 | 511 | def test_changeset_author_is_unicode(self): |
|
512 | 512 | for cm in self.repo: |
|
513 | 513 | self.assertEqual(type(cm.author), unicode) |
|
514 | 514 | |
|
515 | 515 | def test_repo_files_content_is_unicode(self): |
|
516 | 516 | test_changeset = self.repo.get_changeset(100) |
|
517 | 517 | for node in test_changeset.get_node('/'): |
|
518 | 518 | if node.is_file(): |
|
519 | 519 | self.assertEqual(type(node.content), unicode) |
|
520 | 520 | |
|
521 | 521 | def test_wrong_path(self): |
|
522 | 522 | # There is 'setup.py' in the root dir but not there: |
|
523 | 523 | path = 'foo/bar/setup.py' |
|
524 | 524 | self.assertRaises(VCSError, self.repo.get_changeset().get_node, path) |
|
525 | 525 | |
|
526 | 526 | |
|
527 | 527 | def test_archival_file(self): |
|
528 | 528 | #TODO: |
|
529 | 529 | pass |
|
530 | 530 | |
|
531 | 531 | def test_archival_as_generator(self): |
|
532 | 532 | #TODO: |
|
533 | 533 | pass |
|
534 | 534 | |
|
535 | 535 | def test_archival_wrong_kind(self): |
|
536 | 536 | tip = self.repo.get_changeset() |
|
537 | 537 | self.assertRaises(VCSError, tip.fill_archive, kind='error') |
|
538 | 538 | |
|
539 | 539 | def test_archival_empty_prefix(self): |
|
540 | 540 | #TODO: |
|
541 | 541 | pass |
|
542 | 542 | |
|
543 | 543 | |
|
544 | 544 | def test_author_email(self): |
|
545 | 545 | self.assertEqual('marcin@python-blog.com', |
|
546 | 546 | self.repo.get_changeset('b986218ba1c9').author_email) |
|
547 | 547 | self.assertEqual('lukasz.balcerzak@python-center.pl', |
|
548 | 548 | self.repo.get_changeset('3803844fdbd3').author_email) |
|
549 | 549 | self.assertEqual('', |
|
550 | 550 | self.repo.get_changeset('84478366594b').author_email) |
|
551 | 551 | |
|
552 | 552 | def test_author_username(self): |
|
553 | 553 | self.assertEqual('Marcin Kuzminski', |
|
554 | 554 | self.repo.get_changeset('b986218ba1c9').author_name) |
|
555 | 555 | self.assertEqual('Lukasz Balcerzak', |
|
556 | 556 | self.repo.get_changeset('3803844fdbd3').author_name) |
|
557 | 557 | self.assertEqual('marcink', |
|
558 | 558 | self.repo.get_changeset('84478366594b').author_name) |
General Comments 0
You need to be logged in to leave comments.
Login now