Show More
@@ -1,53 +1,54 b'' | |||
|
1 | 1 | .. _upgrade: |
|
2 | 2 | |
|
3 | 3 | Upgrade |
|
4 | 4 | ======= |
|
5 | 5 | |
|
6 | 6 | Upgrading from Cheese Shop |
|
7 | 7 | -------------------------- |
|
8 | 8 | |
|
9 | 9 | .. note:: |
|
10 | 10 | Firstly, it is recommended that you **always** perform a database backup |
|
11 | 11 | before doing an upgrade. |
|
12 | 12 | |
|
13 | 13 | The easiest way to upgrade ``rhodecode`` is to run:: |
|
14 | 14 | |
|
15 | 15 | easy_install -U rhodecode |
|
16 | 16 | |
|
17 | 17 | Or:: |
|
18 | 18 | |
|
19 | 19 | pip install --upgrade rhodecode |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | Then make sure you run the following command from the installation directory:: |
|
23 | 23 | |
|
24 | 24 | paster make-config RhodeCode production.ini |
|
25 | 25 | |
|
26 | 26 | This will display any changes made by the new version of RhodeCode to your |
|
27 | 27 | current configuration. It will try to perform an automerge. It's always better |
|
28 | 28 | to make a backup of your configuration file before hand and recheck the |
|
29 | 29 | content after the automerge. |
|
30 | 30 | |
|
31 | 31 | .. note:: |
|
32 | The next steps only apply to upgrading from non bugfix releases eg. from | |
|
33 | any minor or major releases. Bugfix releases (eg. 1.1.2->1.1.3) will | |
|
34 | not have any database schema changes or whoosh library updates. | |
|
32 | Please always make sure your .ini files are upto date. Often errors are | |
|
33 | caused by missing params added in new versions. | |
|
34 | ||
|
35 | 35 | |
|
36 | 36 | It is also recommended that you rebuild the whoosh index after upgrading since |
|
37 | the new whoosh version could introduce some incompatible index changes. | |
|
37 | the new whoosh version could introduce some incompatible index changes. Please | |
|
38 | Read the changelog to see if there were any changes to whoosh. | |
|
38 | 39 | |
|
39 | 40 | |
|
40 | 41 | The final step is to upgrade the database. To do this simply run:: |
|
41 | 42 | |
|
42 | 43 | paster upgrade-db production.ini |
|
43 | 44 | |
|
44 | 45 | This will upgrade the schema and update some of the defaults in the database, |
|
45 | 46 | and will always recheck the settings of the application, if there are no new |
|
46 | 47 | options that need to be set. |
|
47 | 48 | |
|
48 | 49 | |
|
49 | 50 | .. _virtualenv: http://pypi.python.org/pypi/virtualenv |
|
50 | 51 | .. _python: http://www.python.org/ |
|
51 | 52 | .. _mercurial: http://mercurial.selenic.com/ |
|
52 | 53 | .. _celery: http://celeryproject.org/ |
|
53 | 54 | .. _rabbitmq: http://www.rabbitmq.com/ No newline at end of file |
@@ -1,367 +1,367 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.controllers.changeset |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | changeset controller for pylons showoing changes beetween |
|
7 | 7 | revisions |
|
8 | 8 | |
|
9 | 9 | :created_on: Apr 25, 2010 |
|
10 | 10 | :author: marcink |
|
11 | 11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
12 | 12 | :license: GPLv3, see COPYING for more details. |
|
13 | 13 | """ |
|
14 | 14 | # This program is free software: you can redistribute it and/or modify |
|
15 | 15 | # it under the terms of the GNU General Public License as published by |
|
16 | 16 | # the Free Software Foundation, either version 3 of the License, or |
|
17 | 17 | # (at your option) any later version. |
|
18 | 18 | # |
|
19 | 19 | # This program is distributed in the hope that it will be useful, |
|
20 | 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
21 | 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
22 | 22 | # GNU General Public License for more details. |
|
23 | 23 | # |
|
24 | 24 | # You should have received a copy of the GNU General Public License |
|
25 | 25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
26 | 26 | import logging |
|
27 | 27 | import traceback |
|
28 | 28 | from collections import defaultdict |
|
29 | 29 | from webob.exc import HTTPForbidden |
|
30 | 30 | |
|
31 | 31 | from pylons import tmpl_context as c, url, request, response |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | from pylons.controllers.util import redirect |
|
34 | 34 | from pylons.decorators import jsonify |
|
35 | 35 | |
|
36 | 36 | from rhodecode.lib.vcs.exceptions import RepositoryError, ChangesetError, \ |
|
37 | 37 | ChangesetDoesNotExistError |
|
38 | 38 | from rhodecode.lib.vcs.nodes import FileNode |
|
39 | 39 | |
|
40 | 40 | import rhodecode.lib.helpers as h |
|
41 | 41 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
42 | 42 | from rhodecode.lib.base import BaseRepoController, render |
|
43 | 43 | from rhodecode.lib.utils import EmptyChangeset |
|
44 | 44 | from rhodecode.lib.compat import OrderedDict |
|
45 | 45 | from rhodecode.lib import diffs |
|
46 | 46 | from rhodecode.model.db import ChangesetComment |
|
47 | 47 | from rhodecode.model.comment import ChangesetCommentsModel |
|
48 | 48 | from rhodecode.model.meta import Session |
|
49 | 49 | from rhodecode.lib.diffs import wrapped_diff |
|
50 | 50 | |
|
51 | 51 | log = logging.getLogger(__name__) |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | def anchor_url(revision, path): |
|
55 | 55 | fid = h.FID(revision, path) |
|
56 | return h.url.current(anchor=fid, **request.GET) | |
|
56 | return h.url.current(anchor=fid, **dict(request.GET)) | |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | def get_ignore_ws(fid, GET): |
|
60 | 60 | ig_ws_global = request.GET.get('ignorews') |
|
61 | 61 | ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid)) |
|
62 | 62 | if ig_ws: |
|
63 | 63 | try: |
|
64 | 64 | return int(ig_ws[0].split(':')[-1]) |
|
65 | 65 | except: |
|
66 | 66 | pass |
|
67 | 67 | return ig_ws_global |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | def _ignorews_url(fileid=None): |
|
71 | 71 | |
|
72 | 72 | params = defaultdict(list) |
|
73 | 73 | lbl = _('show white space') |
|
74 | 74 | ig_ws = get_ignore_ws(fileid, request.GET) |
|
75 | 75 | ln_ctx = get_line_ctx(fileid, request.GET) |
|
76 | 76 | # global option |
|
77 | 77 | if fileid is None: |
|
78 | 78 | if ig_ws is None: |
|
79 | 79 | params['ignorews'] += [1] |
|
80 | 80 | lbl = _('ignore white space') |
|
81 | 81 | ctx_key = 'context' |
|
82 | 82 | ctx_val = ln_ctx |
|
83 | 83 | # per file options |
|
84 | 84 | else: |
|
85 | 85 | if ig_ws is None: |
|
86 | 86 | params[fileid] += ['WS:1'] |
|
87 | 87 | lbl = _('ignore white space') |
|
88 | 88 | |
|
89 | 89 | ctx_key = fileid |
|
90 | 90 | ctx_val = 'C:%s' % ln_ctx |
|
91 | 91 | # if we have passed in ln_ctx pass it along to our params |
|
92 | 92 | if ln_ctx: |
|
93 | 93 | params[ctx_key] += [ctx_val] |
|
94 | 94 | |
|
95 | 95 | params['anchor'] = fileid |
|
96 | 96 | img = h.image(h.url('/images/icons/text_strikethrough.png'), lbl, class_='icon') |
|
97 | 97 | return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip') |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | def get_line_ctx(fid, GET): |
|
101 | 101 | ln_ctx_global = request.GET.get('context') |
|
102 | 102 | ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid)) |
|
103 | 103 | |
|
104 | 104 | if ln_ctx: |
|
105 | 105 | retval = ln_ctx[0].split(':')[-1] |
|
106 | 106 | else: |
|
107 | 107 | retval = ln_ctx_global |
|
108 | 108 | |
|
109 | 109 | try: |
|
110 | 110 | return int(retval) |
|
111 | 111 | except: |
|
112 | 112 | return |
|
113 | 113 | |
|
114 | 114 | |
|
115 | 115 | def _context_url(fileid=None): |
|
116 | 116 | """ |
|
117 | 117 | Generates url for context lines |
|
118 | 118 | |
|
119 | 119 | :param fileid: |
|
120 | 120 | """ |
|
121 | 121 | ig_ws = get_ignore_ws(fileid, request.GET) |
|
122 | 122 | ln_ctx = (get_line_ctx(fileid, request.GET) or 3) * 2 |
|
123 | 123 | |
|
124 | 124 | params = defaultdict(list) |
|
125 | 125 | |
|
126 | 126 | # global option |
|
127 | 127 | if fileid is None: |
|
128 | 128 | if ln_ctx > 0: |
|
129 | 129 | params['context'] += [ln_ctx] |
|
130 | 130 | |
|
131 | 131 | if ig_ws: |
|
132 | 132 | ig_ws_key = 'ignorews' |
|
133 | 133 | ig_ws_val = 1 |
|
134 | 134 | |
|
135 | 135 | # per file option |
|
136 | 136 | else: |
|
137 | 137 | params[fileid] += ['C:%s' % ln_ctx] |
|
138 | 138 | ig_ws_key = fileid |
|
139 | 139 | ig_ws_val = 'WS:%s' % 1 |
|
140 | 140 | |
|
141 | 141 | if ig_ws: |
|
142 | 142 | params[ig_ws_key] += [ig_ws_val] |
|
143 | 143 | |
|
144 | 144 | lbl = _('%s line context') % ln_ctx |
|
145 | 145 | |
|
146 | 146 | params['anchor'] = fileid |
|
147 | 147 | img = h.image(h.url('/images/icons/table_add.png'), lbl, class_='icon') |
|
148 | 148 | return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip') |
|
149 | 149 | |
|
150 | 150 | |
|
151 | 151 | class ChangesetController(BaseRepoController): |
|
152 | 152 | |
|
153 | 153 | @LoginRequired() |
|
154 | 154 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
155 | 155 | 'repository.admin') |
|
156 | 156 | def __before__(self): |
|
157 | 157 | super(ChangesetController, self).__before__() |
|
158 | 158 | c.affected_files_cut_off = 60 |
|
159 | 159 | |
|
160 | 160 | def index(self, revision): |
|
161 | 161 | |
|
162 | 162 | c.anchor_url = anchor_url |
|
163 | 163 | c.ignorews_url = _ignorews_url |
|
164 | 164 | c.context_url = _context_url |
|
165 | 165 | |
|
166 | 166 | #get ranges of revisions if preset |
|
167 | 167 | rev_range = revision.split('...')[:2] |
|
168 | 168 | enable_comments = True |
|
169 | 169 | try: |
|
170 | 170 | if len(rev_range) == 2: |
|
171 | 171 | enable_comments = False |
|
172 | 172 | rev_start = rev_range[0] |
|
173 | 173 | rev_end = rev_range[1] |
|
174 | 174 | rev_ranges = c.rhodecode_repo.get_changesets(start=rev_start, |
|
175 | 175 | end=rev_end) |
|
176 | 176 | else: |
|
177 | 177 | rev_ranges = [c.rhodecode_repo.get_changeset(revision)] |
|
178 | 178 | |
|
179 | 179 | c.cs_ranges = list(rev_ranges) |
|
180 | 180 | if not c.cs_ranges: |
|
181 | 181 | raise RepositoryError('Changeset range returned empty result') |
|
182 | 182 | |
|
183 | 183 | except (RepositoryError, ChangesetDoesNotExistError, Exception), e: |
|
184 | 184 | log.error(traceback.format_exc()) |
|
185 | 185 | h.flash(str(e), category='warning') |
|
186 | 186 | return redirect(url('home')) |
|
187 | 187 | |
|
188 | 188 | c.changes = OrderedDict() |
|
189 | 189 | |
|
190 | 190 | c.lines_added = 0 # count of lines added |
|
191 | 191 | c.lines_deleted = 0 # count of lines removes |
|
192 | 192 | |
|
193 | 193 | cumulative_diff = 0 |
|
194 | 194 | c.cut_off = False # defines if cut off limit is reached |
|
195 | 195 | |
|
196 | 196 | c.comments = [] |
|
197 | 197 | c.inline_comments = [] |
|
198 | 198 | c.inline_cnt = 0 |
|
199 | 199 | # Iterate over ranges (default changeset view is always one changeset) |
|
200 | 200 | for changeset in c.cs_ranges: |
|
201 | 201 | c.comments.extend(ChangesetCommentsModel()\ |
|
202 | 202 | .get_comments(c.rhodecode_db_repo.repo_id, |
|
203 | 203 | changeset.raw_id)) |
|
204 | 204 | inlines = ChangesetCommentsModel()\ |
|
205 | 205 | .get_inline_comments(c.rhodecode_db_repo.repo_id, |
|
206 | 206 | changeset.raw_id) |
|
207 | 207 | c.inline_comments.extend(inlines) |
|
208 | 208 | c.changes[changeset.raw_id] = [] |
|
209 | 209 | try: |
|
210 | 210 | changeset_parent = changeset.parents[0] |
|
211 | 211 | except IndexError: |
|
212 | 212 | changeset_parent = None |
|
213 | 213 | |
|
214 | 214 | #================================================================== |
|
215 | 215 | # ADDED FILES |
|
216 | 216 | #================================================================== |
|
217 | 217 | for node in changeset.added: |
|
218 | 218 | fid = h.FID(revision, node.path) |
|
219 | 219 | line_context_lcl = get_line_ctx(fid, request.GET) |
|
220 | 220 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) |
|
221 | 221 | lim = self.cut_off_limit |
|
222 | 222 | if cumulative_diff > self.cut_off_limit: |
|
223 | 223 | lim = -1 |
|
224 | 224 | size, cs1, cs2, diff, st = wrapped_diff(filenode_old=None, |
|
225 | 225 | filenode_new=node, |
|
226 | 226 | cut_off_limit=lim, |
|
227 | 227 | ignore_whitespace=ign_whitespace_lcl, |
|
228 | 228 | line_context=line_context_lcl, |
|
229 | 229 | enable_comments=enable_comments) |
|
230 | 230 | cumulative_diff += size |
|
231 | 231 | c.lines_added += st[0] |
|
232 | 232 | c.lines_deleted += st[1] |
|
233 | 233 | c.changes[changeset.raw_id].append(('added', node, diff, |
|
234 | 234 | cs1, cs2, st)) |
|
235 | 235 | |
|
236 | 236 | #================================================================== |
|
237 | 237 | # CHANGED FILES |
|
238 | 238 | #================================================================== |
|
239 | 239 | for node in changeset.changed: |
|
240 | 240 | try: |
|
241 | 241 | filenode_old = changeset_parent.get_node(node.path) |
|
242 | 242 | except ChangesetError: |
|
243 | 243 | log.warning('Unable to fetch parent node for diff') |
|
244 | 244 | filenode_old = FileNode(node.path, '', EmptyChangeset()) |
|
245 | 245 | |
|
246 | 246 | fid = h.FID(revision, node.path) |
|
247 | 247 | line_context_lcl = get_line_ctx(fid, request.GET) |
|
248 | 248 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) |
|
249 | 249 | lim = self.cut_off_limit |
|
250 | 250 | if cumulative_diff > self.cut_off_limit: |
|
251 | 251 | lim = -1 |
|
252 | 252 | size, cs1, cs2, diff, st = wrapped_diff(filenode_old=filenode_old, |
|
253 | 253 | filenode_new=node, |
|
254 | 254 | cut_off_limit=lim, |
|
255 | 255 | ignore_whitespace=ign_whitespace_lcl, |
|
256 | 256 | line_context=line_context_lcl, |
|
257 | 257 | enable_comments=enable_comments) |
|
258 | 258 | cumulative_diff += size |
|
259 | 259 | c.lines_added += st[0] |
|
260 | 260 | c.lines_deleted += st[1] |
|
261 | 261 | c.changes[changeset.raw_id].append(('changed', node, diff, |
|
262 | 262 | cs1, cs2, st)) |
|
263 | 263 | |
|
264 | 264 | #================================================================== |
|
265 | 265 | # REMOVED FILES |
|
266 | 266 | #================================================================== |
|
267 | 267 | for node in changeset.removed: |
|
268 | 268 | c.changes[changeset.raw_id].append(('removed', node, None, |
|
269 | 269 | None, None, (0, 0))) |
|
270 | 270 | |
|
271 | 271 | # count inline comments |
|
272 | 272 | for path, lines in c.inline_comments: |
|
273 | 273 | for comments in lines.values(): |
|
274 | 274 | c.inline_cnt += len(comments) |
|
275 | 275 | |
|
276 | 276 | if len(c.cs_ranges) == 1: |
|
277 | 277 | c.changeset = c.cs_ranges[0] |
|
278 | 278 | c.changes = c.changes[c.changeset.raw_id] |
|
279 | 279 | |
|
280 | 280 | return render('changeset/changeset.html') |
|
281 | 281 | else: |
|
282 | 282 | return render('changeset/changeset_range.html') |
|
283 | 283 | |
|
284 | 284 | def raw_changeset(self, revision): |
|
285 | 285 | |
|
286 | 286 | method = request.GET.get('diff', 'show') |
|
287 | 287 | ignore_whitespace = request.GET.get('ignorews') == '1' |
|
288 | 288 | line_context = request.GET.get('context', 3) |
|
289 | 289 | try: |
|
290 | 290 | c.scm_type = c.rhodecode_repo.alias |
|
291 | 291 | c.changeset = c.rhodecode_repo.get_changeset(revision) |
|
292 | 292 | except RepositoryError: |
|
293 | 293 | log.error(traceback.format_exc()) |
|
294 | 294 | return redirect(url('home')) |
|
295 | 295 | else: |
|
296 | 296 | try: |
|
297 | 297 | c.changeset_parent = c.changeset.parents[0] |
|
298 | 298 | except IndexError: |
|
299 | 299 | c.changeset_parent = None |
|
300 | 300 | c.changes = [] |
|
301 | 301 | |
|
302 | 302 | for node in c.changeset.added: |
|
303 | 303 | filenode_old = FileNode(node.path, '') |
|
304 | 304 | if filenode_old.is_binary or node.is_binary: |
|
305 | 305 | diff = _('binary file') + '\n' |
|
306 | 306 | else: |
|
307 | 307 | f_gitdiff = diffs.get_gitdiff(filenode_old, node, |
|
308 | 308 | ignore_whitespace=ignore_whitespace, |
|
309 | 309 | context=line_context) |
|
310 | 310 | diff = diffs.DiffProcessor(f_gitdiff, |
|
311 | 311 | format='gitdiff').raw_diff() |
|
312 | 312 | |
|
313 | 313 | cs1 = None |
|
314 | 314 | cs2 = node.last_changeset.raw_id |
|
315 | 315 | c.changes.append(('added', node, diff, cs1, cs2)) |
|
316 | 316 | |
|
317 | 317 | for node in c.changeset.changed: |
|
318 | 318 | filenode_old = c.changeset_parent.get_node(node.path) |
|
319 | 319 | if filenode_old.is_binary or node.is_binary: |
|
320 | 320 | diff = _('binary file') |
|
321 | 321 | else: |
|
322 | 322 | f_gitdiff = diffs.get_gitdiff(filenode_old, node, |
|
323 | 323 | ignore_whitespace=ignore_whitespace, |
|
324 | 324 | context=line_context) |
|
325 | 325 | diff = diffs.DiffProcessor(f_gitdiff, |
|
326 | 326 | format='gitdiff').raw_diff() |
|
327 | 327 | |
|
328 | 328 | cs1 = filenode_old.last_changeset.raw_id |
|
329 | 329 | cs2 = node.last_changeset.raw_id |
|
330 | 330 | c.changes.append(('changed', node, diff, cs1, cs2)) |
|
331 | 331 | |
|
332 | 332 | response.content_type = 'text/plain' |
|
333 | 333 | |
|
334 | 334 | if method == 'download': |
|
335 | 335 | response.content_disposition = 'attachment; filename=%s.patch' \ |
|
336 | 336 | % revision |
|
337 | 337 | |
|
338 | 338 | c.parent_tmpl = ''.join(['# Parent %s\n' % x.raw_id for x in |
|
339 | 339 | c.changeset.parents]) |
|
340 | 340 | |
|
341 | 341 | c.diffs = '' |
|
342 | 342 | for x in c.changes: |
|
343 | 343 | c.diffs += x[2] |
|
344 | 344 | |
|
345 | 345 | return render('changeset/raw_changeset.html') |
|
346 | 346 | |
|
347 | 347 | def comment(self, repo_name, revision): |
|
348 | 348 | ChangesetCommentsModel().create(text=request.POST.get('text'), |
|
349 | 349 | repo_id=c.rhodecode_db_repo.repo_id, |
|
350 | 350 | user_id=c.rhodecode_user.user_id, |
|
351 | 351 | revision=revision, |
|
352 | 352 | f_path=request.POST.get('f_path'), |
|
353 | 353 | line_no=request.POST.get('line')) |
|
354 | 354 | Session.commit() |
|
355 | 355 | return redirect(h.url('changeset_home', repo_name=repo_name, |
|
356 | 356 | revision=revision)) |
|
357 | 357 | |
|
358 | 358 | @jsonify |
|
359 | 359 | def delete_comment(self, repo_name, comment_id): |
|
360 | 360 | co = ChangesetComment.get(comment_id) |
|
361 | 361 | owner = lambda: co.author.user_id == c.rhodecode_user.user_id |
|
362 | 362 | if h.HasPermissionAny('hg.admin', 'repository.admin')() or owner: |
|
363 | 363 | ChangesetCommentsModel().delete(comment=co) |
|
364 | 364 | Session.commit() |
|
365 | 365 | return True |
|
366 | 366 | else: |
|
367 | 367 | raise HTTPForbidden() |
@@ -1,233 +1,233 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.controllers.summary |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Summary controller for Rhodecode |
|
7 | 7 | |
|
8 | 8 | :created_on: Apr 18, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import traceback |
|
27 | 27 | import calendar |
|
28 | 28 | import logging |
|
29 | 29 | from time import mktime |
|
30 | 30 | from datetime import timedelta, date |
|
31 | from itertools import product | |
|
32 | 31 | from urlparse import urlparse |
|
32 | from rhodecode.lib.compat import product | |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \ |
|
35 | 35 | NodeDoesNotExistError |
|
36 | 36 | |
|
37 | 37 | from pylons import tmpl_context as c, request, url, config |
|
38 | 38 | from pylons.i18n.translation import _ |
|
39 | 39 | |
|
40 | 40 | from beaker.cache import cache_region, region_invalidate |
|
41 | 41 | |
|
42 | 42 | from rhodecode.model.db import Statistics, CacheInvalidation |
|
43 | 43 | from rhodecode.lib import ALL_READMES, ALL_EXTS |
|
44 | 44 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
45 | 45 | from rhodecode.lib.base import BaseRepoController, render |
|
46 | 46 | from rhodecode.lib.utils import EmptyChangeset |
|
47 | 47 | from rhodecode.lib.markup_renderer import MarkupRenderer |
|
48 | 48 | from rhodecode.lib.celerylib import run_task |
|
49 | 49 | from rhodecode.lib.celerylib.tasks import get_commits_stats, \ |
|
50 | 50 | LANGUAGES_EXTENSIONS_MAP |
|
51 | 51 | from rhodecode.lib.helpers import RepoPage |
|
52 | 52 | from rhodecode.lib.compat import json, OrderedDict |
|
53 | 53 | |
|
54 | 54 | log = logging.getLogger(__name__) |
|
55 | 55 | |
|
56 | 56 | README_FILES = [''.join([x[0][0], x[1][0]]) for x in |
|
57 | 57 | sorted(list(product(ALL_READMES, ALL_EXTS)), |
|
58 | 58 | key=lambda y:y[0][1] + y[1][1])] |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | class SummaryController(BaseRepoController): |
|
62 | 62 | |
|
63 | 63 | @LoginRequired() |
|
64 | 64 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
65 | 65 | 'repository.admin') |
|
66 | 66 | def __before__(self): |
|
67 | 67 | super(SummaryController, self).__before__() |
|
68 | 68 | |
|
69 | 69 | def index(self, repo_name): |
|
70 | 70 | c.dbrepo = dbrepo = c.rhodecode_db_repo |
|
71 | 71 | c.following = self.scm_model.is_following_repo(repo_name, |
|
72 | 72 | self.rhodecode_user.user_id) |
|
73 | 73 | |
|
74 | 74 | def url_generator(**kw): |
|
75 | 75 | return url('shortlog_home', repo_name=repo_name, size=10, **kw) |
|
76 | 76 | |
|
77 | 77 | c.repo_changesets = RepoPage(c.rhodecode_repo, page=1, |
|
78 | 78 | items_per_page=10, url=url_generator) |
|
79 | 79 | |
|
80 | 80 | if self.rhodecode_user.username == 'default': |
|
81 | 81 | # for default(anonymous) user we don't need to pass credentials |
|
82 | 82 | username = '' |
|
83 | 83 | password = '' |
|
84 | 84 | else: |
|
85 | 85 | username = str(self.rhodecode_user.username) |
|
86 | 86 | password = '@' |
|
87 | 87 | |
|
88 | 88 | parsed_url = urlparse(url.current(qualified=True)) |
|
89 | 89 | |
|
90 | 90 | default_clone_uri = '{scheme}://{user}{pass}{netloc}{path}' |
|
91 | 91 | |
|
92 | 92 | uri_tmpl = config.get('clone_uri', default_clone_uri) |
|
93 | 93 | uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s') |
|
94 | 94 | |
|
95 | 95 | uri_dict = { |
|
96 | 96 | 'user': username, |
|
97 | 97 | 'pass': password, |
|
98 | 98 | 'scheme': parsed_url.scheme, |
|
99 | 99 | 'netloc': parsed_url.netloc, |
|
100 | 100 | 'path': parsed_url.path |
|
101 | 101 | } |
|
102 | 102 | uri = uri_tmpl % uri_dict |
|
103 | 103 | # generate another clone url by id |
|
104 | 104 | uri_dict.update({'path': '/_%s' % c.dbrepo.repo_id}) |
|
105 | 105 | uri_id = uri_tmpl % uri_dict |
|
106 | 106 | |
|
107 | 107 | c.clone_repo_url = uri |
|
108 | 108 | c.clone_repo_url_id = uri_id |
|
109 | 109 | c.repo_tags = OrderedDict() |
|
110 | 110 | for name, hash in c.rhodecode_repo.tags.items()[:10]: |
|
111 | 111 | try: |
|
112 | 112 | c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash) |
|
113 | 113 | except ChangesetError: |
|
114 | 114 | c.repo_tags[name] = EmptyChangeset(hash) |
|
115 | 115 | |
|
116 | 116 | c.repo_branches = OrderedDict() |
|
117 | 117 | for name, hash in c.rhodecode_repo.branches.items()[:10]: |
|
118 | 118 | try: |
|
119 | 119 | c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash) |
|
120 | 120 | except ChangesetError: |
|
121 | 121 | c.repo_branches[name] = EmptyChangeset(hash) |
|
122 | 122 | |
|
123 | 123 | td = date.today() + timedelta(days=1) |
|
124 | 124 | td_1m = td - timedelta(days=calendar.mdays[td.month]) |
|
125 | 125 | td_1y = td - timedelta(days=365) |
|
126 | 126 | |
|
127 | 127 | ts_min_m = mktime(td_1m.timetuple()) |
|
128 | 128 | ts_min_y = mktime(td_1y.timetuple()) |
|
129 | 129 | ts_max_y = mktime(td.timetuple()) |
|
130 | 130 | |
|
131 | 131 | if dbrepo.enable_statistics: |
|
132 | 132 | c.show_stats = True |
|
133 | 133 | c.no_data_msg = _('No data loaded yet') |
|
134 | 134 | run_task(get_commits_stats, c.dbrepo.repo_name, ts_min_y, ts_max_y) |
|
135 | 135 | else: |
|
136 | 136 | c.show_stats = False |
|
137 | 137 | c.no_data_msg = _('Statistics are disabled for this repository') |
|
138 | 138 | c.ts_min = ts_min_m |
|
139 | 139 | c.ts_max = ts_max_y |
|
140 | 140 | |
|
141 | 141 | stats = self.sa.query(Statistics)\ |
|
142 | 142 | .filter(Statistics.repository == dbrepo)\ |
|
143 | 143 | .scalar() |
|
144 | 144 | |
|
145 | 145 | c.stats_percentage = 0 |
|
146 | 146 | |
|
147 | 147 | if stats and stats.languages: |
|
148 | 148 | c.no_data = False is dbrepo.enable_statistics |
|
149 | 149 | lang_stats_d = json.loads(stats.languages) |
|
150 | 150 | c.commit_data = stats.commit_activity |
|
151 | 151 | c.overview_data = stats.commit_activity_combined |
|
152 | 152 | |
|
153 | 153 | lang_stats = ((x, {"count": y, |
|
154 | 154 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x)}) |
|
155 | 155 | for x, y in lang_stats_d.items()) |
|
156 | 156 | |
|
157 | 157 | c.trending_languages = json.dumps( |
|
158 | 158 | sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10] |
|
159 | 159 | ) |
|
160 | 160 | last_rev = stats.stat_on_revision + 1 |
|
161 | 161 | c.repo_last_rev = c.rhodecode_repo.count()\ |
|
162 | 162 | if c.rhodecode_repo.revisions else 0 |
|
163 | 163 | if last_rev == 0 or c.repo_last_rev == 0: |
|
164 | 164 | pass |
|
165 | 165 | else: |
|
166 | 166 | c.stats_percentage = '%.2f' % ((float((last_rev)) / |
|
167 | 167 | c.repo_last_rev) * 100) |
|
168 | 168 | else: |
|
169 | 169 | c.commit_data = json.dumps({}) |
|
170 | 170 | c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]]) |
|
171 | 171 | c.trending_languages = json.dumps({}) |
|
172 | 172 | c.no_data = True |
|
173 | 173 | |
|
174 | 174 | c.enable_downloads = dbrepo.enable_downloads |
|
175 | 175 | if c.enable_downloads: |
|
176 | 176 | c.download_options = self._get_download_links(c.rhodecode_repo) |
|
177 | 177 | |
|
178 | 178 | c.readme_data, c.readme_file = self.__get_readme_data(c.rhodecode_repo) |
|
179 | 179 | return render('summary/summary.html') |
|
180 | 180 | |
|
181 | 181 | def __get_readme_data(self, repo): |
|
182 | 182 | |
|
183 | 183 | @cache_region('long_term') |
|
184 | 184 | def _get_readme_from_cache(key): |
|
185 | 185 | readme_data = None |
|
186 | 186 | readme_file = None |
|
187 | 187 | log.debug('Fetching readme file') |
|
188 | 188 | try: |
|
189 | 189 | cs = repo.get_changeset('tip') |
|
190 | 190 | renderer = MarkupRenderer() |
|
191 | 191 | for f in README_FILES: |
|
192 | 192 | try: |
|
193 | 193 | readme = cs.get_node(f) |
|
194 | 194 | readme_file = f |
|
195 | 195 | readme_data = renderer.render(readme.content, f) |
|
196 | 196 | log.debug('Found readme %s' % readme_file) |
|
197 | 197 | break |
|
198 | 198 | except NodeDoesNotExistError: |
|
199 | 199 | continue |
|
200 | 200 | except ChangesetError: |
|
201 | 201 | pass |
|
202 | 202 | except EmptyRepositoryError: |
|
203 | 203 | pass |
|
204 | 204 | except Exception: |
|
205 | 205 | log.error(traceback.format_exc()) |
|
206 | 206 | |
|
207 | 207 | return readme_data, readme_file |
|
208 | 208 | |
|
209 | 209 | key = repo.name + '_README' |
|
210 | 210 | inv = CacheInvalidation.invalidate(key) |
|
211 | 211 | if inv is not None: |
|
212 | 212 | region_invalidate(_get_readme_from_cache, None, key) |
|
213 | 213 | CacheInvalidation.set_valid(inv.cache_key) |
|
214 | 214 | return _get_readme_from_cache(key) |
|
215 | 215 | |
|
216 | 216 | def _get_download_links(self, repo): |
|
217 | 217 | |
|
218 | 218 | download_l = [] |
|
219 | 219 | |
|
220 | 220 | branches_group = ([], _("Branches")) |
|
221 | 221 | tags_group = ([], _("Tags")) |
|
222 | 222 | |
|
223 | 223 | for name, chs in c.rhodecode_repo.branches.items(): |
|
224 | 224 | #chs = chs.split(':')[-1] |
|
225 | 225 | branches_group[0].append((chs, name),) |
|
226 | 226 | download_l.append(branches_group) |
|
227 | 227 | |
|
228 | 228 | for name, chs in c.rhodecode_repo.tags.items(): |
|
229 | 229 | #chs = chs.split(':')[-1] |
|
230 | 230 | tags_group[0].append((chs, name),) |
|
231 | 231 | download_l.append(tags_group) |
|
232 | 232 | |
|
233 | 233 | return download_l |
@@ -1,381 +1,399 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.compat |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Python backward compatibility functions and common libs |
|
7 | 7 | |
|
8 | 8 | |
|
9 | 9 | :created_on: Oct 7, 2011 |
|
10 | 10 | :author: marcink |
|
11 | 11 | :copyright: (C) 2010-2010 Marcin Kuzminski <marcin@python-works.com> |
|
12 | 12 | :license: GPLv3, see COPYING for more details. |
|
13 | 13 | """ |
|
14 | 14 | # This program is free software: you can redistribute it and/or modify |
|
15 | 15 | # it under the terms of the GNU General Public License as published by |
|
16 | 16 | # the Free Software Foundation, either version 3 of the License, or |
|
17 | 17 | # (at your option) any later version. |
|
18 | 18 | # |
|
19 | 19 | # This program is distributed in the hope that it will be useful, |
|
20 | 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
21 | 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
22 | 22 | # GNU General Public License for more details. |
|
23 | 23 | # |
|
24 | 24 | # You should have received a copy of the GNU General Public License |
|
25 | 25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
26 | 26 | |
|
27 | 27 | import os |
|
28 | 28 | from rhodecode import __platform__, PLATFORM_WIN |
|
29 | 29 | |
|
30 | 30 | #============================================================================== |
|
31 | 31 | # json |
|
32 | 32 | #============================================================================== |
|
33 | 33 | try: |
|
34 | 34 | import json |
|
35 | 35 | except ImportError: |
|
36 | 36 | import simplejson as json |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | #============================================================================== |
|
40 | 40 | # izip_longest |
|
41 | 41 | #============================================================================== |
|
42 | 42 | try: |
|
43 | 43 | from itertools import izip_longest |
|
44 | 44 | except ImportError: |
|
45 | 45 | import itertools |
|
46 | 46 | |
|
47 | 47 | def izip_longest(*args, **kwds): # noqa |
|
48 | 48 | fillvalue = kwds.get("fillvalue") |
|
49 | 49 | |
|
50 | 50 | def sentinel(counter=([fillvalue] * (len(args) - 1)).pop): |
|
51 | 51 | yield counter() # yields the fillvalue, or raises IndexError |
|
52 | 52 | |
|
53 | 53 | fillers = itertools.repeat(fillvalue) |
|
54 | 54 | iters = [itertools.chain(it, sentinel(), fillers) |
|
55 | 55 | for it in args] |
|
56 | 56 | try: |
|
57 | 57 | for tup in itertools.izip(*iters): |
|
58 | 58 | yield tup |
|
59 | 59 | except IndexError: |
|
60 | 60 | pass |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | #============================================================================== |
|
64 | 64 | # OrderedDict |
|
65 | 65 | #============================================================================== |
|
66 | 66 | |
|
67 | 67 | # Python Software Foundation License |
|
68 | 68 | |
|
69 | 69 | # XXX: it feels like using the class with "is" and "is not" instead of "==" and |
|
70 | 70 | # "!=" should be faster. |
|
71 | 71 | class _Nil(object): |
|
72 | 72 | |
|
73 | 73 | def __repr__(self): |
|
74 | 74 | return "nil" |
|
75 | 75 | |
|
76 | 76 | def __eq__(self, other): |
|
77 | 77 | if (isinstance(other, _Nil)): |
|
78 | 78 | return True |
|
79 | 79 | else: |
|
80 | 80 | return NotImplemented |
|
81 | 81 | |
|
82 | 82 | def __ne__(self, other): |
|
83 | 83 | if (isinstance(other, _Nil)): |
|
84 | 84 | return False |
|
85 | 85 | else: |
|
86 | 86 | return NotImplemented |
|
87 | 87 | |
|
88 | 88 | _nil = _Nil() |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | class _odict(object): |
|
92 | 92 | """Ordered dict data structure, with O(1) complexity for dict operations |
|
93 | 93 | that modify one element. |
|
94 | 94 | |
|
95 | 95 | Overwriting values doesn't change their original sequential order. |
|
96 | 96 | """ |
|
97 | 97 | |
|
98 | 98 | def _dict_impl(self): |
|
99 | 99 | return None |
|
100 | 100 | |
|
101 | 101 | def __init__(self, data=(), **kwds): |
|
102 | 102 | """This doesn't accept keyword initialization as normal dicts to avoid |
|
103 | 103 | a trap - inside a function or method the keyword args are accessible |
|
104 | 104 | only as a dict, without a defined order, so their original order is |
|
105 | 105 | lost. |
|
106 | 106 | """ |
|
107 | 107 | if kwds: |
|
108 | 108 | raise TypeError("__init__() of ordered dict takes no keyword " |
|
109 | 109 | "arguments to avoid an ordering trap.") |
|
110 | 110 | self._dict_impl().__init__(self) |
|
111 | 111 | # If you give a normal dict, then the order of elements is undefined |
|
112 | 112 | if hasattr(data, "iteritems"): |
|
113 | 113 | for key, val in data.iteritems(): |
|
114 | 114 | self[key] = val |
|
115 | 115 | else: |
|
116 | 116 | for key, val in data: |
|
117 | 117 | self[key] = val |
|
118 | 118 | |
|
119 | 119 | # Double-linked list header |
|
120 | 120 | def _get_lh(self): |
|
121 | 121 | dict_impl = self._dict_impl() |
|
122 | 122 | if not hasattr(self, '_lh'): |
|
123 | 123 | dict_impl.__setattr__(self, '_lh', _nil) |
|
124 | 124 | return dict_impl.__getattribute__(self, '_lh') |
|
125 | 125 | |
|
126 | 126 | def _set_lh(self, val): |
|
127 | 127 | self._dict_impl().__setattr__(self, '_lh', val) |
|
128 | 128 | |
|
129 | 129 | lh = property(_get_lh, _set_lh) |
|
130 | 130 | |
|
131 | 131 | # Double-linked list tail |
|
132 | 132 | def _get_lt(self): |
|
133 | 133 | dict_impl = self._dict_impl() |
|
134 | 134 | if not hasattr(self, '_lt'): |
|
135 | 135 | dict_impl.__setattr__(self, '_lt', _nil) |
|
136 | 136 | return dict_impl.__getattribute__(self, '_lt') |
|
137 | 137 | |
|
138 | 138 | def _set_lt(self, val): |
|
139 | 139 | self._dict_impl().__setattr__(self, '_lt', val) |
|
140 | 140 | |
|
141 | 141 | lt = property(_get_lt, _set_lt) |
|
142 | 142 | |
|
143 | 143 | def __getitem__(self, key): |
|
144 | 144 | return self._dict_impl().__getitem__(self, key)[1] |
|
145 | 145 | |
|
146 | 146 | def __setitem__(self, key, val): |
|
147 | 147 | dict_impl = self._dict_impl() |
|
148 | 148 | try: |
|
149 | 149 | dict_impl.__getitem__(self, key)[1] = val |
|
150 | 150 | except KeyError: |
|
151 | 151 | new = [dict_impl.__getattribute__(self, 'lt'), val, _nil] |
|
152 | 152 | dict_impl.__setitem__(self, key, new) |
|
153 | 153 | if dict_impl.__getattribute__(self, 'lt') == _nil: |
|
154 | 154 | dict_impl.__setattr__(self, 'lh', key) |
|
155 | 155 | else: |
|
156 | 156 | dict_impl.__getitem__( |
|
157 | 157 | self, dict_impl.__getattribute__(self, 'lt'))[2] = key |
|
158 | 158 | dict_impl.__setattr__(self, 'lt', key) |
|
159 | 159 | |
|
160 | 160 | def __delitem__(self, key): |
|
161 | 161 | dict_impl = self._dict_impl() |
|
162 | 162 | pred, _, succ = self._dict_impl().__getitem__(self, key) |
|
163 | 163 | if pred == _nil: |
|
164 | 164 | dict_impl.__setattr__(self, 'lh', succ) |
|
165 | 165 | else: |
|
166 | 166 | dict_impl.__getitem__(self, pred)[2] = succ |
|
167 | 167 | if succ == _nil: |
|
168 | 168 | dict_impl.__setattr__(self, 'lt', pred) |
|
169 | 169 | else: |
|
170 | 170 | dict_impl.__getitem__(self, succ)[0] = pred |
|
171 | 171 | dict_impl.__delitem__(self, key) |
|
172 | 172 | |
|
173 | 173 | def __contains__(self, key): |
|
174 | 174 | return key in self.keys() |
|
175 | 175 | |
|
176 | 176 | def __len__(self): |
|
177 | 177 | return len(self.keys()) |
|
178 | 178 | |
|
179 | 179 | def __str__(self): |
|
180 | 180 | pairs = ("%r: %r" % (k, v) for k, v in self.iteritems()) |
|
181 | 181 | return "{%s}" % ", ".join(pairs) |
|
182 | 182 | |
|
183 | 183 | def __repr__(self): |
|
184 | 184 | if self: |
|
185 | 185 | pairs = ("(%r, %r)" % (k, v) for k, v in self.iteritems()) |
|
186 | 186 | return "odict([%s])" % ", ".join(pairs) |
|
187 | 187 | else: |
|
188 | 188 | return "odict()" |
|
189 | 189 | |
|
190 | 190 | def get(self, k, x=None): |
|
191 | 191 | if k in self: |
|
192 | 192 | return self._dict_impl().__getitem__(self, k)[1] |
|
193 | 193 | else: |
|
194 | 194 | return x |
|
195 | 195 | |
|
196 | 196 | def __iter__(self): |
|
197 | 197 | dict_impl = self._dict_impl() |
|
198 | 198 | curr_key = dict_impl.__getattribute__(self, 'lh') |
|
199 | 199 | while curr_key != _nil: |
|
200 | 200 | yield curr_key |
|
201 | 201 | curr_key = dict_impl.__getitem__(self, curr_key)[2] |
|
202 | 202 | |
|
203 | 203 | iterkeys = __iter__ |
|
204 | 204 | |
|
205 | 205 | def keys(self): |
|
206 | 206 | return list(self.iterkeys()) |
|
207 | 207 | |
|
208 | 208 | def itervalues(self): |
|
209 | 209 | dict_impl = self._dict_impl() |
|
210 | 210 | curr_key = dict_impl.__getattribute__(self, 'lh') |
|
211 | 211 | while curr_key != _nil: |
|
212 | 212 | _, val, curr_key = dict_impl.__getitem__(self, curr_key) |
|
213 | 213 | yield val |
|
214 | 214 | |
|
215 | 215 | def values(self): |
|
216 | 216 | return list(self.itervalues()) |
|
217 | 217 | |
|
218 | 218 | def iteritems(self): |
|
219 | 219 | dict_impl = self._dict_impl() |
|
220 | 220 | curr_key = dict_impl.__getattribute__(self, 'lh') |
|
221 | 221 | while curr_key != _nil: |
|
222 | 222 | _, val, next_key = dict_impl.__getitem__(self, curr_key) |
|
223 | 223 | yield curr_key, val |
|
224 | 224 | curr_key = next_key |
|
225 | 225 | |
|
226 | 226 | def items(self): |
|
227 | 227 | return list(self.iteritems()) |
|
228 | 228 | |
|
229 | 229 | def sort(self, cmp=None, key=None, reverse=False): |
|
230 | 230 | items = [(k, v) for k, v in self.items()] |
|
231 | 231 | if cmp is not None: |
|
232 | 232 | items = sorted(items, cmp=cmp) |
|
233 | 233 | elif key is not None: |
|
234 | 234 | items = sorted(items, key=key) |
|
235 | 235 | else: |
|
236 | 236 | items = sorted(items, key=lambda x: x[1]) |
|
237 | 237 | if reverse: |
|
238 | 238 | items.reverse() |
|
239 | 239 | self.clear() |
|
240 | 240 | self.__init__(items) |
|
241 | 241 | |
|
242 | 242 | def clear(self): |
|
243 | 243 | dict_impl = self._dict_impl() |
|
244 | 244 | dict_impl.clear(self) |
|
245 | 245 | dict_impl.__setattr__(self, 'lh', _nil) |
|
246 | 246 | dict_impl.__setattr__(self, 'lt', _nil) |
|
247 | 247 | |
|
248 | 248 | def copy(self): |
|
249 | 249 | return self.__class__(self) |
|
250 | 250 | |
|
251 | 251 | def update(self, data=(), **kwds): |
|
252 | 252 | if kwds: |
|
253 | 253 | raise TypeError("update() of ordered dict takes no keyword " |
|
254 | 254 | "arguments to avoid an ordering trap.") |
|
255 | 255 | if hasattr(data, "iteritems"): |
|
256 | 256 | data = data.iteritems() |
|
257 | 257 | for key, val in data: |
|
258 | 258 | self[key] = val |
|
259 | 259 | |
|
260 | 260 | def setdefault(self, k, x=None): |
|
261 | 261 | try: |
|
262 | 262 | return self[k] |
|
263 | 263 | except KeyError: |
|
264 | 264 | self[k] = x |
|
265 | 265 | return x |
|
266 | 266 | |
|
267 | 267 | def pop(self, k, x=_nil): |
|
268 | 268 | try: |
|
269 | 269 | val = self[k] |
|
270 | 270 | del self[k] |
|
271 | 271 | return val |
|
272 | 272 | except KeyError: |
|
273 | 273 | if x == _nil: |
|
274 | 274 | raise |
|
275 | 275 | return x |
|
276 | 276 | |
|
277 | 277 | def popitem(self): |
|
278 | 278 | try: |
|
279 | 279 | dict_impl = self._dict_impl() |
|
280 | 280 | key = dict_impl.__getattribute__(self, 'lt') |
|
281 | 281 | return key, self.pop(key) |
|
282 | 282 | except KeyError: |
|
283 | 283 | raise KeyError("'popitem(): ordered dictionary is empty'") |
|
284 | 284 | |
|
285 | 285 | def riterkeys(self): |
|
286 | 286 | """To iterate on keys in reversed order. |
|
287 | 287 | """ |
|
288 | 288 | dict_impl = self._dict_impl() |
|
289 | 289 | curr_key = dict_impl.__getattribute__(self, 'lt') |
|
290 | 290 | while curr_key != _nil: |
|
291 | 291 | yield curr_key |
|
292 | 292 | curr_key = dict_impl.__getitem__(self, curr_key)[0] |
|
293 | 293 | |
|
294 | 294 | __reversed__ = riterkeys |
|
295 | 295 | |
|
296 | 296 | def rkeys(self): |
|
297 | 297 | """List of the keys in reversed order. |
|
298 | 298 | """ |
|
299 | 299 | return list(self.riterkeys()) |
|
300 | 300 | |
|
301 | 301 | def ritervalues(self): |
|
302 | 302 | """To iterate on values in reversed order. |
|
303 | 303 | """ |
|
304 | 304 | dict_impl = self._dict_impl() |
|
305 | 305 | curr_key = dict_impl.__getattribute__(self, 'lt') |
|
306 | 306 | while curr_key != _nil: |
|
307 | 307 | curr_key, val, _ = dict_impl.__getitem__(self, curr_key) |
|
308 | 308 | yield val |
|
309 | 309 | |
|
310 | 310 | def rvalues(self): |
|
311 | 311 | """List of the values in reversed order. |
|
312 | 312 | """ |
|
313 | 313 | return list(self.ritervalues()) |
|
314 | 314 | |
|
315 | 315 | def riteritems(self): |
|
316 | 316 | """To iterate on (key, value) in reversed order. |
|
317 | 317 | """ |
|
318 | 318 | dict_impl = self._dict_impl() |
|
319 | 319 | curr_key = dict_impl.__getattribute__(self, 'lt') |
|
320 | 320 | while curr_key != _nil: |
|
321 | 321 | pred_key, val, _ = dict_impl.__getitem__(self, curr_key) |
|
322 | 322 | yield curr_key, val |
|
323 | 323 | curr_key = pred_key |
|
324 | 324 | |
|
325 | 325 | def ritems(self): |
|
326 | 326 | """List of the (key, value) in reversed order. |
|
327 | 327 | """ |
|
328 | 328 | return list(self.riteritems()) |
|
329 | 329 | |
|
330 | 330 | def firstkey(self): |
|
331 | 331 | if self: |
|
332 | 332 | return self._dict_impl().__getattribute__(self, 'lh') |
|
333 | 333 | else: |
|
334 | 334 | raise KeyError("'firstkey(): ordered dictionary is empty'") |
|
335 | 335 | |
|
336 | 336 | def lastkey(self): |
|
337 | 337 | if self: |
|
338 | 338 | return self._dict_impl().__getattribute__(self, 'lt') |
|
339 | 339 | else: |
|
340 | 340 | raise KeyError("'lastkey(): ordered dictionary is empty'") |
|
341 | 341 | |
|
342 | 342 | def as_dict(self): |
|
343 | 343 | return self._dict_impl()(self.items()) |
|
344 | 344 | |
|
345 | 345 | def _repr(self): |
|
346 | 346 | """_repr(): low level repr of the whole data contained in the odict. |
|
347 | 347 | Useful for debugging. |
|
348 | 348 | """ |
|
349 | 349 | dict_impl = self._dict_impl() |
|
350 | 350 | form = "odict low level repr lh,lt,data: %r, %r, %s" |
|
351 | 351 | return form % (dict_impl.__getattribute__(self, 'lh'), |
|
352 | 352 | dict_impl.__getattribute__(self, 'lt'), |
|
353 | 353 | dict_impl.__repr__(self)) |
|
354 | 354 | |
|
355 | 355 | |
|
356 | 356 | class OrderedDict(_odict, dict): |
|
357 | 357 | |
|
358 | 358 | def _dict_impl(self): |
|
359 | 359 | return dict |
|
360 | 360 | |
|
361 | 361 | |
|
362 | 362 | #============================================================================== |
|
363 | 363 | # OrderedSet |
|
364 | 364 | #============================================================================== |
|
365 | 365 | from sqlalchemy.util import OrderedSet |
|
366 | 366 | |
|
367 | 367 | |
|
368 | 368 | #============================================================================== |
|
369 | 369 | # kill FUNCTIONS |
|
370 | 370 | #============================================================================== |
|
371 | 371 | if __platform__ in PLATFORM_WIN: |
|
372 | 372 | import ctypes |
|
373 | 373 | |
|
374 | 374 | def kill(pid, sig): |
|
375 | 375 | """kill function for Win32""" |
|
376 | 376 | kernel32 = ctypes.windll.kernel32 |
|
377 | 377 | handle = kernel32.OpenProcess(1, 0, pid) |
|
378 | 378 | return (0 != kernel32.TerminateProcess(handle, 0)) |
|
379 | 379 | |
|
380 | 380 | else: |
|
381 | 381 | kill = os.kill |
|
382 | ||
|
383 | ||
|
384 | #============================================================================== | |
|
385 | # itertools.product | |
|
386 | #============================================================================== | |
|
387 | ||
|
388 | try: | |
|
389 | from itertools import product | |
|
390 | except ImportError: | |
|
391 | def product(*args, **kwds): | |
|
392 | # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy | |
|
393 | # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111 | |
|
394 | pools = map(tuple, args) * kwds.get('repeat', 1) | |
|
395 | result = [[]] | |
|
396 | for pool in pools: | |
|
397 | result = [x + [y] for x in result for y in pool] | |
|
398 | for prod in result: | |
|
399 | yield tuple(prod) |
@@ -1,622 +1,629 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.utils |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Utilities library for RhodeCode |
|
7 | 7 | |
|
8 | 8 | :created_on: Apr 18, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | import re | |
|
27 | 28 | import logging |
|
28 | 29 | import datetime |
|
29 | 30 | import traceback |
|
30 | 31 | import paste |
|
31 | 32 | import beaker |
|
32 | 33 | import tarfile |
|
33 | 34 | import shutil |
|
34 | 35 | from os.path import abspath |
|
35 | 36 | from os.path import dirname as dn, join as jn |
|
36 | 37 | |
|
37 | 38 | from paste.script.command import Command, BadCommand |
|
38 | 39 | |
|
39 | 40 | from mercurial import ui, config |
|
40 | 41 | |
|
41 | 42 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
42 | 43 | |
|
43 | 44 | from rhodecode.lib.vcs import get_backend |
|
44 | 45 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
45 | 46 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
46 | 47 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
47 | 48 | from rhodecode.lib.vcs.exceptions import VCSError |
|
48 | 49 | |
|
49 | 50 | from rhodecode.lib.caching_query import FromCache |
|
50 | 51 | |
|
51 | 52 | from rhodecode.model import meta |
|
52 | 53 | from rhodecode.model.db import Repository, User, RhodeCodeUi, \ |
|
53 | 54 | UserLog, RepoGroup, RhodeCodeSetting, UserRepoGroupToPerm |
|
54 | 55 | from rhodecode.model.meta import Session |
|
55 | 56 | from rhodecode.model.repos_group import ReposGroupModel |
|
56 | 57 | |
|
57 | 58 | log = logging.getLogger(__name__) |
|
58 | 59 | |
|
60 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*') | |
|
61 | ||
|
59 | 62 | |
|
60 | 63 | def recursive_replace(str_, replace=' '): |
|
61 | 64 | """Recursive replace of given sign to just one instance |
|
62 | 65 | |
|
63 | 66 | :param str_: given string |
|
64 | 67 | :param replace: char to find and replace multiple instances |
|
65 | 68 | |
|
66 | 69 | Examples:: |
|
67 | 70 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
68 | 71 | 'Mighty-Mighty-Bo-sstones' |
|
69 | 72 | """ |
|
70 | 73 | |
|
71 | 74 | if str_.find(replace * 2) == -1: |
|
72 | 75 | return str_ |
|
73 | 76 | else: |
|
74 | 77 | str_ = str_.replace(replace * 2, replace) |
|
75 | 78 | return recursive_replace(str_, replace) |
|
76 | 79 | |
|
77 | 80 | |
|
78 | 81 | def repo_name_slug(value): |
|
79 | 82 | """Return slug of name of repository |
|
80 | 83 | This function is called on each creation/modification |
|
81 | 84 | of repository to prevent bad names in repo |
|
82 | 85 | """ |
|
83 | 86 | |
|
84 | 87 | slug = remove_formatting(value) |
|
85 | 88 | slug = strip_tags(slug) |
|
86 | 89 | |
|
87 | 90 | for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
88 | 91 | slug = slug.replace(c, '-') |
|
89 | 92 | slug = recursive_replace(slug, '-') |
|
90 | 93 | slug = collapse(slug, '-') |
|
91 | 94 | return slug |
|
92 | 95 | |
|
93 | 96 | |
|
94 | 97 | def get_repo_slug(request): |
|
95 | 98 | _repo = request.environ['pylons.routes_dict'].get('repo_name') |
|
96 | 99 | if _repo: |
|
97 | 100 | _repo = _repo.rstrip('/') |
|
98 | 101 | return _repo |
|
99 | 102 | |
|
100 | 103 | |
|
101 | 104 | def get_repos_group_slug(request): |
|
102 | 105 | _group = request.environ['pylons.routes_dict'].get('group_name') |
|
103 | 106 | if _group: |
|
104 | 107 | _group = _group.rstrip('/') |
|
105 | 108 | return _group |
|
106 | 109 | |
|
107 | 110 | |
|
108 | 111 | def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): |
|
109 | 112 | """ |
|
110 | 113 | Action logger for various actions made by users |
|
111 | 114 | |
|
112 | 115 | :param user: user that made this action, can be a unique username string or |
|
113 | 116 | object containing user_id attribute |
|
114 | 117 | :param action: action to log, should be on of predefined unique actions for |
|
115 | 118 | easy translations |
|
116 | 119 | :param repo: string name of repository or object containing repo_id, |
|
117 | 120 | that action was made on |
|
118 | 121 | :param ipaddr: optional ip address from what the action was made |
|
119 | 122 | :param sa: optional sqlalchemy session |
|
120 | 123 | |
|
121 | 124 | """ |
|
122 | 125 | |
|
123 | 126 | if not sa: |
|
124 | 127 | sa = meta.Session |
|
125 | 128 | |
|
126 | 129 | try: |
|
127 | 130 | if hasattr(user, 'user_id'): |
|
128 | 131 | user_obj = user |
|
129 | 132 | elif isinstance(user, basestring): |
|
130 | 133 | user_obj = User.get_by_username(user) |
|
131 | 134 | else: |
|
132 | 135 | raise Exception('You have to provide user object or username') |
|
133 | 136 | |
|
134 | 137 | if hasattr(repo, 'repo_id'): |
|
135 | 138 | repo_obj = Repository.get(repo.repo_id) |
|
136 | 139 | repo_name = repo_obj.repo_name |
|
137 | 140 | elif isinstance(repo, basestring): |
|
138 | 141 | repo_name = repo.lstrip('/') |
|
139 | 142 | repo_obj = Repository.get_by_repo_name(repo_name) |
|
140 | 143 | else: |
|
141 | 144 | raise Exception('You have to provide repository to action logger') |
|
142 | 145 | |
|
143 | 146 | user_log = UserLog() |
|
144 | 147 | user_log.user_id = user_obj.user_id |
|
145 | 148 | user_log.action = action |
|
146 | 149 | |
|
147 | 150 | user_log.repository_id = repo_obj.repo_id |
|
148 | 151 | user_log.repository_name = repo_name |
|
149 | 152 | |
|
150 | 153 | user_log.action_date = datetime.datetime.now() |
|
151 | 154 | user_log.user_ip = ipaddr |
|
152 | 155 | sa.add(user_log) |
|
153 | 156 | |
|
154 | 157 | log.info('Adding user %s, action %s on %s' % (user_obj, action, repo)) |
|
155 | 158 | if commit: |
|
156 | 159 | sa.commit() |
|
157 | 160 | except: |
|
158 | 161 | log.error(traceback.format_exc()) |
|
159 | 162 | raise |
|
160 | 163 | |
|
161 | 164 | |
|
162 | 165 | def get_repos(path, recursive=False): |
|
163 | 166 | """ |
|
164 | 167 | Scans given path for repos and return (name,(type,path)) tuple |
|
165 | 168 | |
|
166 | 169 | :param path: path to scan for repositories |
|
167 | 170 | :param recursive: recursive search and return names with subdirs in front |
|
168 | 171 | """ |
|
169 | 172 | |
|
170 | 173 | # remove ending slash for better results |
|
171 | 174 | path = path.rstrip(os.sep) |
|
172 | 175 | |
|
173 | 176 | def _get_repos(p): |
|
174 | 177 | if not os.access(p, os.W_OK): |
|
175 | 178 | return |
|
176 | 179 | for dirpath in os.listdir(p): |
|
177 | 180 | if os.path.isfile(os.path.join(p, dirpath)): |
|
178 | 181 | continue |
|
179 | 182 | cur_path = os.path.join(p, dirpath) |
|
180 | 183 | try: |
|
181 | 184 | scm_info = get_scm(cur_path) |
|
182 | 185 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info |
|
183 | 186 | except VCSError: |
|
184 | 187 | if not recursive: |
|
185 | 188 | continue |
|
186 | 189 | #check if this dir containts other repos for recursive scan |
|
187 | 190 | rec_path = os.path.join(p, dirpath) |
|
188 | 191 | if os.path.isdir(rec_path): |
|
189 | 192 | for inner_scm in _get_repos(rec_path): |
|
190 | 193 | yield inner_scm |
|
191 | 194 | |
|
192 | 195 | return _get_repos(path) |
|
193 | 196 | |
|
194 | 197 | |
|
195 | 198 | def is_valid_repo(repo_name, base_path): |
|
196 | 199 | """ |
|
197 | 200 | Returns True if given path is a valid repository False otherwise |
|
198 | 201 | :param repo_name: |
|
199 | 202 | :param base_path: |
|
200 | 203 | |
|
201 | 204 | :return True: if given path is a valid repository |
|
202 | 205 | """ |
|
203 | 206 | full_path = os.path.join(base_path, repo_name) |
|
204 | 207 | |
|
205 | 208 | try: |
|
206 | 209 | get_scm(full_path) |
|
207 | 210 | return True |
|
208 | 211 | except VCSError: |
|
209 | 212 | return False |
|
210 | 213 | |
|
211 | 214 | |
|
212 | 215 | def is_valid_repos_group(repos_group_name, base_path): |
|
213 | 216 | """ |
|
214 | 217 | Returns True if given path is a repos group False otherwise |
|
215 | 218 | |
|
216 | 219 | :param repo_name: |
|
217 | 220 | :param base_path: |
|
218 | 221 | """ |
|
219 | 222 | full_path = os.path.join(base_path, repos_group_name) |
|
220 | 223 | |
|
221 | 224 | # check if it's not a repo |
|
222 | 225 | if is_valid_repo(repos_group_name, base_path): |
|
223 | 226 | return False |
|
224 | 227 | |
|
225 | 228 | # check if it's a valid path |
|
226 | 229 | if os.path.isdir(full_path): |
|
227 | 230 | return True |
|
228 | 231 | |
|
229 | 232 | return False |
|
230 | 233 | |
|
231 | 234 | |
|
232 | 235 | def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): |
|
233 | 236 | while True: |
|
234 | 237 | ok = raw_input(prompt) |
|
235 | 238 | if ok in ('y', 'ye', 'yes'): |
|
236 | 239 | return True |
|
237 | 240 | if ok in ('n', 'no', 'nop', 'nope'): |
|
238 | 241 | return False |
|
239 | 242 | retries = retries - 1 |
|
240 | 243 | if retries < 0: |
|
241 | 244 | raise IOError |
|
242 | 245 | print complaint |
|
243 | 246 | |
|
244 | 247 | #propagated from mercurial documentation |
|
245 | 248 | ui_sections = ['alias', 'auth', |
|
246 | 249 | 'decode/encode', 'defaults', |
|
247 | 250 | 'diff', 'email', |
|
248 | 251 | 'extensions', 'format', |
|
249 | 252 | 'merge-patterns', 'merge-tools', |
|
250 | 253 | 'hooks', 'http_proxy', |
|
251 | 254 | 'smtp', 'patch', |
|
252 | 255 | 'paths', 'profiling', |
|
253 | 256 | 'server', 'trusted', |
|
254 | 257 | 'ui', 'web', ] |
|
255 | 258 | |
|
256 | 259 | |
|
257 | 260 | def make_ui(read_from='file', path=None, checkpaths=True): |
|
258 | 261 | """A function that will read python rc files or database |
|
259 | 262 | and make an mercurial ui object from read options |
|
260 | 263 | |
|
261 | 264 | :param path: path to mercurial config file |
|
262 | 265 | :param checkpaths: check the path |
|
263 | 266 | :param read_from: read from 'file' or 'db' |
|
264 | 267 | """ |
|
265 | 268 | |
|
266 | 269 | baseui = ui.ui() |
|
267 | 270 | |
|
268 | 271 | # clean the baseui object |
|
269 | 272 | baseui._ocfg = config.config() |
|
270 | 273 | baseui._ucfg = config.config() |
|
271 | 274 | baseui._tcfg = config.config() |
|
272 | 275 | |
|
273 | 276 | if read_from == 'file': |
|
274 | 277 | if not os.path.isfile(path): |
|
275 | 278 | log.debug('hgrc file is not present at %s skipping...' % path) |
|
276 | 279 | return False |
|
277 | 280 | log.debug('reading hgrc from %s' % path) |
|
278 | 281 | cfg = config.config() |
|
279 | 282 | cfg.read(path) |
|
280 | 283 | for section in ui_sections: |
|
281 | 284 | for k, v in cfg.items(section): |
|
282 | 285 | log.debug('settings ui from file[%s]%s:%s' % (section, k, v)) |
|
283 | 286 | baseui.setconfig(section, k, v) |
|
284 | 287 | |
|
285 | 288 | elif read_from == 'db': |
|
286 | 289 | sa = meta.Session |
|
287 | 290 | ret = sa.query(RhodeCodeUi)\ |
|
288 | 291 | .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\ |
|
289 | 292 | .all() |
|
290 | 293 | |
|
291 | 294 | hg_ui = ret |
|
292 | 295 | for ui_ in hg_ui: |
|
293 | 296 | if ui_.ui_active: |
|
294 | 297 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, |
|
295 | 298 | ui_.ui_key, ui_.ui_value) |
|
296 | 299 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
297 | 300 | |
|
298 | 301 | meta.Session.remove() |
|
299 | 302 | return baseui |
|
300 | 303 | |
|
301 | 304 | |
|
302 | 305 | def set_rhodecode_config(config): |
|
303 | 306 | """ |
|
304 | 307 | Updates pylons config with new settings from database |
|
305 | 308 | |
|
306 | 309 | :param config: |
|
307 | 310 | """ |
|
308 | 311 | hgsettings = RhodeCodeSetting.get_app_settings() |
|
309 | 312 | |
|
310 | 313 | for k, v in hgsettings.items(): |
|
311 | 314 | config[k] = v |
|
312 | 315 | |
|
313 | 316 | |
|
314 | 317 | def invalidate_cache(cache_key, *args): |
|
315 | 318 | """ |
|
316 | 319 | Puts cache invalidation task into db for |
|
317 | 320 | further global cache invalidation |
|
318 | 321 | """ |
|
319 | 322 | |
|
320 | 323 | from rhodecode.model.scm import ScmModel |
|
321 | 324 | |
|
322 | 325 | if cache_key.startswith('get_repo_cached_'): |
|
323 | 326 | name = cache_key.split('get_repo_cached_')[-1] |
|
324 | 327 | ScmModel().mark_for_invalidation(name) |
|
325 | 328 | |
|
326 | 329 | |
|
327 | 330 | class EmptyChangeset(BaseChangeset): |
|
328 | 331 | """ |
|
329 | 332 | An dummy empty changeset. It's possible to pass hash when creating |
|
330 | 333 | an EmptyChangeset |
|
331 | 334 | """ |
|
332 | 335 | |
|
333 | 336 | def __init__(self, cs='0' * 40, repo=None, requested_revision=None, |
|
334 | 337 | alias=None): |
|
335 | 338 | self._empty_cs = cs |
|
336 | 339 | self.revision = -1 |
|
337 | 340 | self.message = '' |
|
338 | 341 | self.author = '' |
|
339 | 342 | self.date = '' |
|
340 | 343 | self.repository = repo |
|
341 | 344 | self.requested_revision = requested_revision |
|
342 | 345 | self.alias = alias |
|
343 | 346 | |
|
344 | 347 | @LazyProperty |
|
345 | 348 | def raw_id(self): |
|
346 | 349 | """ |
|
347 | 350 | Returns raw string identifying this changeset, useful for web |
|
348 | 351 | representation. |
|
349 | 352 | """ |
|
350 | 353 | |
|
351 | 354 | return self._empty_cs |
|
352 | 355 | |
|
353 | 356 | @LazyProperty |
|
354 | 357 | def branch(self): |
|
355 | 358 | return get_backend(self.alias).DEFAULT_BRANCH_NAME |
|
356 | 359 | |
|
357 | 360 | @LazyProperty |
|
358 | 361 | def short_id(self): |
|
359 | 362 | return self.raw_id[:12] |
|
360 | 363 | |
|
361 | 364 | def get_file_changeset(self, path): |
|
362 | 365 | return self |
|
363 | 366 | |
|
364 | 367 | def get_file_content(self, path): |
|
365 | 368 | return u'' |
|
366 | 369 | |
|
367 | 370 | def get_file_size(self, path): |
|
368 | 371 | return 0 |
|
369 | 372 | |
|
370 | 373 | |
|
371 | 374 | def map_groups(groups): |
|
372 | 375 | """ |
|
373 | 376 | Checks for groups existence, and creates groups structures. |
|
374 | 377 | It returns last group in structure |
|
375 | 378 | |
|
376 | 379 | :param groups: list of groups structure |
|
377 | 380 | """ |
|
378 | 381 | sa = meta.Session |
|
379 | 382 | |
|
380 | 383 | parent = None |
|
381 | 384 | group = None |
|
382 | 385 | |
|
383 | 386 | # last element is repo in nested groups structure |
|
384 | 387 | groups = groups[:-1] |
|
385 | 388 | rgm = ReposGroupModel(sa) |
|
386 | 389 | for lvl, group_name in enumerate(groups): |
|
387 | 390 | group_name = '/'.join(groups[:lvl] + [group_name]) |
|
388 | 391 | group = RepoGroup.get_by_group_name(group_name) |
|
389 | 392 | desc = '%s group' % group_name |
|
390 | 393 | |
|
391 | 394 | # # WTF that doesn't work !? |
|
392 | 395 | # if group is None: |
|
393 | 396 | # group = rgm.create(group_name, desc, parent, just_db=True) |
|
394 | 397 | # sa.commit() |
|
395 | 398 | |
|
399 | # skip folders that are now removed repos | |
|
400 | if REMOVED_REPO_PAT.match(group_name): | |
|
401 | break | |
|
402 | ||
|
396 | 403 | if group is None: |
|
397 | 404 | log.debug('creating group level: %s group_name: %s' % (lvl, group_name)) |
|
398 | 405 | group = RepoGroup(group_name, parent) |
|
399 | 406 | group.group_description = desc |
|
400 | 407 | sa.add(group) |
|
401 | 408 | rgm._create_default_perms(group) |
|
402 | 409 | sa.commit() |
|
403 | 410 | parent = group |
|
404 | 411 | return group |
|
405 | 412 | |
|
406 | 413 | |
|
407 | 414 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
408 | 415 | """ |
|
409 | 416 | maps all repos given in initial_repo_list, non existing repositories |
|
410 | 417 | are created, if remove_obsolete is True it also check for db entries |
|
411 | 418 | that are not in initial_repo_list and removes them. |
|
412 | 419 | |
|
413 | 420 | :param initial_repo_list: list of repositories found by scanning methods |
|
414 | 421 | :param remove_obsolete: check for obsolete entries in database |
|
415 | 422 | """ |
|
416 | 423 | from rhodecode.model.repo import RepoModel |
|
417 | 424 | sa = meta.Session |
|
418 | 425 | rm = RepoModel() |
|
419 | 426 | user = sa.query(User).filter(User.admin == True).first() |
|
420 | 427 | if user is None: |
|
421 | 428 | raise Exception('Missing administrative account !') |
|
422 | 429 | added = [] |
|
423 | 430 | |
|
424 | 431 | for name, repo in initial_repo_list.items(): |
|
425 | 432 | group = map_groups(name.split(Repository.url_sep())) |
|
426 | 433 | if not rm.get_by_repo_name(name, cache=False): |
|
427 | 434 | log.info('repository %s not found creating default' % name) |
|
428 | 435 | added.append(name) |
|
429 | 436 | form_data = { |
|
430 | 437 | 'repo_name': name, |
|
431 | 438 | 'repo_name_full': name, |
|
432 | 439 | 'repo_type': repo.alias, |
|
433 | 440 | 'description': repo.description \ |
|
434 | 441 | if repo.description != 'unknown' else '%s repository' % name, |
|
435 | 442 | 'private': False, |
|
436 | 443 | 'group_id': getattr(group, 'group_id', None) |
|
437 | 444 | } |
|
438 | 445 | rm.create(form_data, user, just_db=True) |
|
439 | 446 | sa.commit() |
|
440 | 447 | removed = [] |
|
441 | 448 | if remove_obsolete: |
|
442 | 449 | #remove from database those repositories that are not in the filesystem |
|
443 | 450 | for repo in sa.query(Repository).all(): |
|
444 | 451 | if repo.repo_name not in initial_repo_list.keys(): |
|
445 | 452 | removed.append(repo.repo_name) |
|
446 | 453 | sa.delete(repo) |
|
447 | 454 | sa.commit() |
|
448 | 455 | |
|
449 | 456 | return added, removed |
|
450 | 457 | |
|
451 | 458 | |
|
452 | 459 | # set cache regions for beaker so celery can utilise it |
|
453 | 460 | def add_cache(settings): |
|
454 | 461 | cache_settings = {'regions': None} |
|
455 | 462 | for key in settings.keys(): |
|
456 | 463 | for prefix in ['beaker.cache.', 'cache.']: |
|
457 | 464 | if key.startswith(prefix): |
|
458 | 465 | name = key.split(prefix)[1].strip() |
|
459 | 466 | cache_settings[name] = settings[key].strip() |
|
460 | 467 | if cache_settings['regions']: |
|
461 | 468 | for region in cache_settings['regions'].split(','): |
|
462 | 469 | region = region.strip() |
|
463 | 470 | region_settings = {} |
|
464 | 471 | for key, value in cache_settings.items(): |
|
465 | 472 | if key.startswith(region): |
|
466 | 473 | region_settings[key.split('.')[1]] = value |
|
467 | 474 | region_settings['expire'] = int(region_settings.get('expire', |
|
468 | 475 | 60)) |
|
469 | 476 | region_settings.setdefault('lock_dir', |
|
470 | 477 | cache_settings.get('lock_dir')) |
|
471 | 478 | region_settings.setdefault('data_dir', |
|
472 | 479 | cache_settings.get('data_dir')) |
|
473 | 480 | |
|
474 | 481 | if 'type' not in region_settings: |
|
475 | 482 | region_settings['type'] = cache_settings.get('type', |
|
476 | 483 | 'memory') |
|
477 | 484 | beaker.cache.cache_regions[region] = region_settings |
|
478 | 485 | |
|
479 | 486 | |
|
480 | 487 | #============================================================================== |
|
481 | 488 | # TEST FUNCTIONS AND CREATORS |
|
482 | 489 | #============================================================================== |
|
483 | 490 | def create_test_index(repo_location, config, full_index): |
|
484 | 491 | """ |
|
485 | 492 | Makes default test index |
|
486 | 493 | |
|
487 | 494 | :param config: test config |
|
488 | 495 | :param full_index: |
|
489 | 496 | """ |
|
490 | 497 | |
|
491 | 498 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
492 | 499 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
493 | 500 | |
|
494 | 501 | repo_location = repo_location |
|
495 | 502 | |
|
496 | 503 | index_location = os.path.join(config['app_conf']['index_dir']) |
|
497 | 504 | if not os.path.exists(index_location): |
|
498 | 505 | os.makedirs(index_location) |
|
499 | 506 | |
|
500 | 507 | try: |
|
501 | 508 | l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock')) |
|
502 | 509 | WhooshIndexingDaemon(index_location=index_location, |
|
503 | 510 | repo_location=repo_location)\ |
|
504 | 511 | .run(full_index=full_index) |
|
505 | 512 | l.release() |
|
506 | 513 | except LockHeld: |
|
507 | 514 | pass |
|
508 | 515 | |
|
509 | 516 | |
|
510 | 517 | def create_test_env(repos_test_path, config): |
|
511 | 518 | """ |
|
512 | 519 | Makes a fresh database and |
|
513 | 520 | install test repository into tmp dir |
|
514 | 521 | """ |
|
515 | 522 | from rhodecode.lib.db_manage import DbManage |
|
516 | 523 | from rhodecode.tests import HG_REPO, TESTS_TMP_PATH |
|
517 | 524 | |
|
518 | 525 | # PART ONE create db |
|
519 | 526 | dbconf = config['sqlalchemy.db1.url'] |
|
520 | 527 | log.debug('making test db %s' % dbconf) |
|
521 | 528 | |
|
522 | 529 | # create test dir if it doesn't exist |
|
523 | 530 | if not os.path.isdir(repos_test_path): |
|
524 | 531 | log.debug('Creating testdir %s' % repos_test_path) |
|
525 | 532 | os.makedirs(repos_test_path) |
|
526 | 533 | |
|
527 | 534 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], |
|
528 | 535 | tests=True) |
|
529 | 536 | dbmanage.create_tables(override=True) |
|
530 | 537 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) |
|
531 | 538 | dbmanage.create_default_user() |
|
532 | 539 | dbmanage.admin_prompt() |
|
533 | 540 | dbmanage.create_permissions() |
|
534 | 541 | dbmanage.populate_default_permissions() |
|
535 | 542 | Session.commit() |
|
536 | 543 | # PART TWO make test repo |
|
537 | 544 | log.debug('making test vcs repositories') |
|
538 | 545 | |
|
539 | 546 | idx_path = config['app_conf']['index_dir'] |
|
540 | 547 | data_path = config['app_conf']['cache_dir'] |
|
541 | 548 | |
|
542 | 549 | #clean index and data |
|
543 | 550 | if idx_path and os.path.exists(idx_path): |
|
544 | 551 | log.debug('remove %s' % idx_path) |
|
545 | 552 | shutil.rmtree(idx_path) |
|
546 | 553 | |
|
547 | 554 | if data_path and os.path.exists(data_path): |
|
548 | 555 | log.debug('remove %s' % data_path) |
|
549 | 556 | shutil.rmtree(data_path) |
|
550 | 557 | |
|
551 | 558 | #CREATE DEFAULT HG REPOSITORY |
|
552 | 559 | cur_dir = dn(dn(abspath(__file__))) |
|
553 | 560 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) |
|
554 | 561 | tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) |
|
555 | 562 | tar.close() |
|
556 | 563 | |
|
557 | 564 | |
|
558 | 565 | #============================================================================== |
|
559 | 566 | # PASTER COMMANDS |
|
560 | 567 | #============================================================================== |
|
561 | 568 | class BasePasterCommand(Command): |
|
562 | 569 | """ |
|
563 | 570 | Abstract Base Class for paster commands. |
|
564 | 571 | |
|
565 | 572 | The celery commands are somewhat aggressive about loading |
|
566 | 573 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
567 | 574 | environment variable to our loader, we have to bootstrap a bit and |
|
568 | 575 | make sure we've had a chance to load the pylons config off of the |
|
569 | 576 | command line, otherwise everything fails. |
|
570 | 577 | """ |
|
571 | 578 | min_args = 1 |
|
572 | 579 | min_args_error = "Please provide a paster config file as an argument." |
|
573 | 580 | takes_config_file = 1 |
|
574 | 581 | requires_config_file = True |
|
575 | 582 | |
|
576 | 583 | def notify_msg(self, msg, log=False): |
|
577 | 584 | """Make a notification to user, additionally if logger is passed |
|
578 | 585 | it logs this action using given logger |
|
579 | 586 | |
|
580 | 587 | :param msg: message that will be printed to user |
|
581 | 588 | :param log: logging instance, to use to additionally log this message |
|
582 | 589 | |
|
583 | 590 | """ |
|
584 | 591 | if log and isinstance(log, logging): |
|
585 | 592 | log(msg) |
|
586 | 593 | |
|
587 | 594 | def run(self, args): |
|
588 | 595 | """ |
|
589 | 596 | Overrides Command.run |
|
590 | 597 | |
|
591 | 598 | Checks for a config file argument and loads it. |
|
592 | 599 | """ |
|
593 | 600 | if len(args) < self.min_args: |
|
594 | 601 | raise BadCommand( |
|
595 | 602 | self.min_args_error % {'min_args': self.min_args, |
|
596 | 603 | 'actual_args': len(args)}) |
|
597 | 604 | |
|
598 | 605 | # Decrement because we're going to lob off the first argument. |
|
599 | 606 | # @@ This is hacky |
|
600 | 607 | self.min_args -= 1 |
|
601 | 608 | self.bootstrap_config(args[0]) |
|
602 | 609 | self.update_parser() |
|
603 | 610 | return super(BasePasterCommand, self).run(args[1:]) |
|
604 | 611 | |
|
605 | 612 | def update_parser(self): |
|
606 | 613 | """ |
|
607 | 614 | Abstract method. Allows for the class's parser to be updated |
|
608 | 615 | before the superclass's `run` method is called. Necessary to |
|
609 | 616 | allow options/arguments to be passed through to the underlying |
|
610 | 617 | celery command. |
|
611 | 618 | """ |
|
612 | 619 | raise NotImplementedError("Abstract Method.") |
|
613 | 620 | |
|
614 | 621 | def bootstrap_config(self, conf): |
|
615 | 622 | """ |
|
616 | 623 | Loads the pylons configuration. |
|
617 | 624 | """ |
|
618 | 625 | from pylons import config as pylonsconfig |
|
619 | 626 | |
|
620 | 627 | path_to_ini_file = os.path.realpath(conf) |
|
621 | 628 | conf = paste.deploy.appconfig('config:' + path_to_ini_file) |
|
622 | 629 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
@@ -1,773 +1,773 b'' | |||
|
1 | 1 | """ this is forms validation classes |
|
2 | 2 | http://formencode.org/module-formencode.validators.html |
|
3 | 3 | for list off all availible validators |
|
4 | 4 | |
|
5 | 5 | we can create our own validators |
|
6 | 6 | |
|
7 | 7 | The table below outlines the options which can be used in a schema in addition to the validators themselves |
|
8 | 8 | pre_validators [] These validators will be applied before the schema |
|
9 | 9 | chained_validators [] These validators will be applied after the schema |
|
10 | 10 | allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present |
|
11 | 11 | filter_extra_fields False If True, then keys that aren't associated with a validator are removed |
|
12 | 12 | if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value. |
|
13 | 13 | ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already |
|
14 | 14 | |
|
15 | 15 | |
|
16 | 16 | <name> = formencode.validators.<name of validator> |
|
17 | 17 | <name> must equal form name |
|
18 | 18 | list=[1,2,3,4,5] |
|
19 | 19 | for SELECT use formencode.All(OneOf(list), Int()) |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | import os |
|
23 | 23 | import re |
|
24 | 24 | import logging |
|
25 | 25 | import traceback |
|
26 | 26 | |
|
27 | 27 | import formencode |
|
28 | 28 | from formencode import All |
|
29 | 29 | from formencode.validators import UnicodeString, OneOf, Int, Number, Regex, \ |
|
30 | 30 | Email, Bool, StringBoolean, Set |
|
31 | 31 | |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | from webhelpers.pylonslib.secure_form import authentication_token |
|
34 | 34 | |
|
35 | 35 | from rhodecode.config.routing import ADMIN_PREFIX |
|
36 | 36 | from rhodecode.lib.utils import repo_name_slug |
|
37 | 37 | from rhodecode.lib.auth import authenticate, get_crypt_password |
|
38 | 38 | from rhodecode.lib.exceptions import LdapImportError |
|
39 | 39 | from rhodecode.model.db import User, UsersGroup, RepoGroup, Repository |
|
40 | 40 | from rhodecode import BACKENDS |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | #this is needed to translate the messages using _() in validators |
|
46 | 46 | class State_obj(object): |
|
47 | 47 | _ = staticmethod(_) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | #============================================================================== |
|
51 | 51 | # VALIDATORS |
|
52 | 52 | #============================================================================== |
|
53 | 53 | class ValidAuthToken(formencode.validators.FancyValidator): |
|
54 | 54 | messages = {'invalid_token': _('Token mismatch')} |
|
55 | 55 | |
|
56 | 56 | def validate_python(self, value, state): |
|
57 | 57 | |
|
58 | 58 | if value != authentication_token(): |
|
59 | 59 | raise formencode.Invalid( |
|
60 | 60 | self.message('invalid_token', |
|
61 | 61 | state, search_number=value), |
|
62 | 62 | value, |
|
63 | 63 | state |
|
64 | 64 | ) |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | def ValidUsername(edit, old_data): |
|
68 | 68 | class _ValidUsername(formencode.validators.FancyValidator): |
|
69 | 69 | |
|
70 | 70 | def validate_python(self, value, state): |
|
71 | 71 | if value in ['default', 'new_user']: |
|
72 | 72 | raise formencode.Invalid(_('Invalid username'), value, state) |
|
73 | 73 | #check if user is unique |
|
74 | 74 | old_un = None |
|
75 | 75 | if edit: |
|
76 | 76 | old_un = User.get(old_data.get('user_id')).username |
|
77 | 77 | |
|
78 | 78 | if old_un != value or not edit: |
|
79 | 79 | if User.get_by_username(value, case_insensitive=True): |
|
80 | 80 | raise formencode.Invalid(_('This username already ' |
|
81 | 81 | 'exists') , value, state) |
|
82 | 82 | |
|
83 | 83 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: |
|
84 | 84 | raise formencode.Invalid( |
|
85 | 85 | _('Username may only contain alphanumeric characters ' |
|
86 | 86 | 'underscores, periods or dashes and must begin with ' |
|
87 | 87 | 'alphanumeric character'), |
|
88 | 88 | value, |
|
89 | 89 | state |
|
90 | 90 | ) |
|
91 | 91 | |
|
92 | 92 | return _ValidUsername |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | def ValidUsersGroup(edit, old_data): |
|
96 | 96 | |
|
97 | 97 | class _ValidUsersGroup(formencode.validators.FancyValidator): |
|
98 | 98 | |
|
99 | 99 | def validate_python(self, value, state): |
|
100 | 100 | if value in ['default']: |
|
101 | 101 | raise formencode.Invalid(_('Invalid group name'), value, state) |
|
102 | 102 | #check if group is unique |
|
103 | 103 | old_ugname = None |
|
104 | 104 | if edit: |
|
105 | 105 | old_ugname = UsersGroup.get( |
|
106 | 106 | old_data.get('users_group_id')).users_group_name |
|
107 | 107 | |
|
108 | 108 | if old_ugname != value or not edit: |
|
109 | 109 | if UsersGroup.get_by_group_name(value, cache=False, |
|
110 | 110 | case_insensitive=True): |
|
111 | 111 | raise formencode.Invalid(_('This users group ' |
|
112 | 112 | 'already exists'), value, |
|
113 | 113 | state) |
|
114 | 114 | |
|
115 | 115 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: |
|
116 | 116 | raise formencode.Invalid( |
|
117 | 117 | _('RepoGroup name may only contain alphanumeric characters ' |
|
118 | 118 | 'underscores, periods or dashes and must begin with ' |
|
119 | 119 | 'alphanumeric character'), |
|
120 | 120 | value, |
|
121 | 121 | state |
|
122 | 122 | ) |
|
123 | 123 | |
|
124 | 124 | return _ValidUsersGroup |
|
125 | 125 | |
|
126 | 126 | |
|
127 | 127 | def ValidReposGroup(edit, old_data): |
|
128 | 128 | class _ValidReposGroup(formencode.validators.FancyValidator): |
|
129 | 129 | |
|
130 | 130 | def validate_python(self, value, state): |
|
131 | 131 | # TODO WRITE VALIDATIONS |
|
132 | 132 | group_name = value.get('group_name') |
|
133 | 133 | group_parent_id = value.get('group_parent_id') |
|
134 | 134 | |
|
135 | 135 | # slugify repo group just in case :) |
|
136 | 136 | slug = repo_name_slug(group_name) |
|
137 | 137 | |
|
138 | 138 | # check for parent of self |
|
139 | 139 | parent_of_self = lambda: ( |
|
140 | 140 | old_data['group_id'] == int(group_parent_id) |
|
141 | 141 | if group_parent_id else False |
|
142 | 142 | ) |
|
143 | 143 | if edit and parent_of_self(): |
|
144 | 144 | e_dict = { |
|
145 | 145 | 'group_parent_id': _('Cannot assign this group as parent') |
|
146 | 146 | } |
|
147 | 147 | raise formencode.Invalid('', value, state, |
|
148 | 148 | error_dict=e_dict) |
|
149 | 149 | |
|
150 | 150 | old_gname = None |
|
151 | 151 | if edit: |
|
152 | 152 | old_gname = RepoGroup.get(old_data.get('group_id')).group_name |
|
153 | 153 | |
|
154 | 154 | if old_gname != group_name or not edit: |
|
155 | 155 | |
|
156 | 156 | # check group |
|
157 | 157 | gr = RepoGroup.query()\ |
|
158 | 158 | .filter(RepoGroup.group_name == slug)\ |
|
159 | 159 | .filter(RepoGroup.group_parent_id == group_parent_id)\ |
|
160 | 160 | .scalar() |
|
161 | 161 | |
|
162 | 162 | if gr: |
|
163 | 163 | e_dict = { |
|
164 | 164 | 'group_name': _('This group already exists') |
|
165 | 165 | } |
|
166 | 166 | raise formencode.Invalid('', value, state, |
|
167 | 167 | error_dict=e_dict) |
|
168 | 168 | |
|
169 | 169 | # check for same repo |
|
170 | 170 | repo = Repository.query()\ |
|
171 | 171 | .filter(Repository.repo_name == slug)\ |
|
172 | 172 | .scalar() |
|
173 | 173 | |
|
174 | 174 | if repo: |
|
175 | 175 | e_dict = { |
|
176 | 176 | 'group_name': _('Repository with this name already exists') |
|
177 | 177 | } |
|
178 | 178 | raise formencode.Invalid('', value, state, |
|
179 | 179 | error_dict=e_dict) |
|
180 | 180 | |
|
181 | 181 | return _ValidReposGroup |
|
182 | 182 | |
|
183 | 183 | |
|
184 | 184 | class ValidPassword(formencode.validators.FancyValidator): |
|
185 | 185 | |
|
186 | 186 | def to_python(self, value, state): |
|
187 | 187 | |
|
188 | 188 | if not value: |
|
189 | 189 | return |
|
190 | 190 | |
|
191 | 191 | if value.get('password'): |
|
192 | 192 | try: |
|
193 | 193 | value['password'] = get_crypt_password(value['password']) |
|
194 | 194 | except UnicodeEncodeError: |
|
195 | 195 | e_dict = {'password': _('Invalid characters in password')} |
|
196 | 196 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
197 | 197 | |
|
198 | 198 | if value.get('password_confirmation'): |
|
199 | 199 | try: |
|
200 | 200 | value['password_confirmation'] = \ |
|
201 | 201 | get_crypt_password(value['password_confirmation']) |
|
202 | 202 | except UnicodeEncodeError: |
|
203 | 203 | e_dict = { |
|
204 | 204 | 'password_confirmation': _('Invalid characters in password') |
|
205 | 205 | } |
|
206 | 206 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
207 | 207 | |
|
208 | 208 | if value.get('new_password'): |
|
209 | 209 | try: |
|
210 | 210 | value['new_password'] = \ |
|
211 | 211 | get_crypt_password(value['new_password']) |
|
212 | 212 | except UnicodeEncodeError: |
|
213 | 213 | e_dict = {'new_password': _('Invalid characters in password')} |
|
214 | 214 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
215 | 215 | |
|
216 | 216 | return value |
|
217 | 217 | |
|
218 | 218 | |
|
219 | 219 | class ValidPasswordsMatch(formencode.validators.FancyValidator): |
|
220 | 220 | |
|
221 | 221 | def validate_python(self, value, state): |
|
222 | 222 | |
|
223 | 223 | pass_val = value.get('password') or value.get('new_password') |
|
224 | 224 | if pass_val != value['password_confirmation']: |
|
225 | 225 | e_dict = {'password_confirmation': |
|
226 | 226 | _('Passwords do not match')} |
|
227 | 227 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
228 | 228 | |
|
229 | 229 | |
|
230 | 230 | class ValidAuth(formencode.validators.FancyValidator): |
|
231 | 231 | messages = { |
|
232 | 232 | 'invalid_password':_('invalid password'), |
|
233 | 233 | 'invalid_login':_('invalid user name'), |
|
234 | 234 | 'disabled_account':_('Your account is disabled') |
|
235 | 235 | } |
|
236 | 236 | |
|
237 | 237 | # error mapping |
|
238 | 238 | e_dict = {'username': messages['invalid_login'], |
|
239 | 239 | 'password': messages['invalid_password']} |
|
240 | 240 | e_dict_disable = {'username': messages['disabled_account']} |
|
241 | 241 | |
|
242 | 242 | def validate_python(self, value, state): |
|
243 | 243 | password = value['password'] |
|
244 | 244 | username = value['username'] |
|
245 | 245 | user = User.get_by_username(username) |
|
246 | 246 | |
|
247 | 247 | if authenticate(username, password): |
|
248 | 248 | return value |
|
249 | 249 | else: |
|
250 | 250 | if user and user.active is False: |
|
251 | 251 | log.warning('user %s is disabled' % username) |
|
252 | 252 | raise formencode.Invalid( |
|
253 | 253 | self.message('disabled_account', |
|
254 | 254 | state=State_obj), |
|
255 | 255 | value, state, |
|
256 | 256 | error_dict=self.e_dict_disable |
|
257 | 257 | ) |
|
258 | 258 | else: |
|
259 | 259 | log.warning('user %s failed to authenticate' % username) |
|
260 | 260 | raise formencode.Invalid( |
|
261 | 261 | self.message('invalid_password', |
|
262 | 262 | state=State_obj), value, state, |
|
263 | 263 | error_dict=self.e_dict |
|
264 | 264 | ) |
|
265 | 265 | |
|
266 | 266 | |
|
267 | 267 | class ValidRepoUser(formencode.validators.FancyValidator): |
|
268 | 268 | |
|
269 | 269 | def to_python(self, value, state): |
|
270 | 270 | try: |
|
271 | 271 | User.query().filter(User.active == True)\ |
|
272 | 272 | .filter(User.username == value).one() |
|
273 | 273 | except Exception: |
|
274 | 274 | raise formencode.Invalid(_('This username is not valid'), |
|
275 | 275 | value, state) |
|
276 | 276 | return value |
|
277 | 277 | |
|
278 | 278 | |
|
279 | 279 | def ValidRepoName(edit, old_data): |
|
280 | 280 | class _ValidRepoName(formencode.validators.FancyValidator): |
|
281 | 281 | def to_python(self, value, state): |
|
282 | 282 | |
|
283 | 283 | repo_name = value.get('repo_name') |
|
284 | 284 | |
|
285 | 285 | slug = repo_name_slug(repo_name) |
|
286 | 286 | if slug in [ADMIN_PREFIX, '']: |
|
287 | 287 | e_dict = {'repo_name': _('This repository name is disallowed')} |
|
288 | 288 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
289 | 289 | |
|
290 | 290 | if value.get('repo_group'): |
|
291 | 291 | gr = RepoGroup.get(value.get('repo_group')) |
|
292 | 292 | group_path = gr.full_path |
|
293 | 293 | # value needs to be aware of group name in order to check |
|
294 | 294 | # db key This is an actual just the name to store in the |
|
295 | 295 | # database |
|
296 | 296 | repo_name_full = group_path + RepoGroup.url_sep() + repo_name |
|
297 | 297 | |
|
298 | 298 | else: |
|
299 | 299 | group_path = '' |
|
300 | 300 | repo_name_full = repo_name |
|
301 | 301 | |
|
302 | 302 | value['repo_name_full'] = repo_name_full |
|
303 | 303 | rename = old_data.get('repo_name') != repo_name_full |
|
304 | 304 | create = not edit |
|
305 | 305 | if rename or create: |
|
306 | 306 | |
|
307 | 307 | if group_path != '': |
|
308 | 308 | if Repository.get_by_repo_name(repo_name_full): |
|
309 | 309 | e_dict = { |
|
310 | 310 | 'repo_name': _('This repository already exists in ' |
|
311 | 311 | 'a group "%s"') % gr.group_name |
|
312 | 312 | } |
|
313 | 313 | raise formencode.Invalid('', value, state, |
|
314 | 314 | error_dict=e_dict) |
|
315 | 315 | elif RepoGroup.get_by_group_name(repo_name_full): |
|
316 | 316 | e_dict = { |
|
317 | 317 | 'repo_name': _('There is a group with this name ' |
|
318 | 318 | 'already "%s"') % repo_name_full |
|
319 | 319 | } |
|
320 | 320 | raise formencode.Invalid('', value, state, |
|
321 | 321 | error_dict=e_dict) |
|
322 | 322 | |
|
323 | 323 | elif Repository.get_by_repo_name(repo_name_full): |
|
324 | 324 | e_dict = {'repo_name': _('This repository ' |
|
325 | 325 | 'already exists')} |
|
326 | 326 | raise formencode.Invalid('', value, state, |
|
327 | 327 | error_dict=e_dict) |
|
328 | 328 | |
|
329 | 329 | return value |
|
330 | 330 | |
|
331 | 331 | return _ValidRepoName |
|
332 | 332 | |
|
333 | 333 | |
|
334 | 334 | def ValidForkName(*args, **kwargs): |
|
335 | 335 | return ValidRepoName(*args, **kwargs) |
|
336 | 336 | |
|
337 | 337 | |
|
338 | 338 | def SlugifyName(): |
|
339 | 339 | class _SlugifyName(formencode.validators.FancyValidator): |
|
340 | 340 | |
|
341 | 341 | def to_python(self, value, state): |
|
342 | 342 | return repo_name_slug(value) |
|
343 | 343 | |
|
344 | 344 | return _SlugifyName |
|
345 | 345 | |
|
346 | 346 | |
|
347 | 347 | def ValidCloneUri(): |
|
348 | 348 | from rhodecode.lib.utils import make_ui |
|
349 | 349 | |
|
350 | 350 | def url_handler(repo_type, url, proto, ui=None): |
|
351 | 351 | if repo_type == 'hg': |
|
352 | 352 | from mercurial.httprepo import httprepository, httpsrepository |
|
353 | 353 | if proto == 'https': |
|
354 | 354 | httpsrepository(make_ui('db'), url).capabilities |
|
355 | 355 | elif proto == 'http': |
|
356 | 356 | httprepository(make_ui('db'), url).capabilities |
|
357 | 357 | elif repo_type == 'git': |
|
358 | 358 | #TODO: write a git url validator |
|
359 | 359 | pass |
|
360 | 360 | |
|
361 | 361 | class _ValidCloneUri(formencode.validators.FancyValidator): |
|
362 | 362 | |
|
363 | 363 | def to_python(self, value, state): |
|
364 | 364 | |
|
365 | 365 | repo_type = value.get('repo_type') |
|
366 | 366 | url = value.get('clone_uri') |
|
367 | 367 | e_dict = {'clone_uri': _('invalid clone url')} |
|
368 | 368 | |
|
369 | 369 | if not url: |
|
370 | 370 | pass |
|
371 | 371 | elif url.startswith('https'): |
|
372 | 372 | try: |
|
373 | 373 | url_handler(repo_type, url, 'https', make_ui('db')) |
|
374 | 374 | except Exception: |
|
375 | 375 | log.error(traceback.format_exc()) |
|
376 | 376 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
377 | 377 | elif url.startswith('http'): |
|
378 | 378 | try: |
|
379 | 379 | url_handler(repo_type, url, 'http', make_ui('db')) |
|
380 | 380 | except Exception: |
|
381 | 381 | log.error(traceback.format_exc()) |
|
382 | 382 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
383 | 383 | else: |
|
384 | 384 | e_dict = {'clone_uri': _('Invalid clone url, provide a ' |
|
385 | 385 | 'valid clone http\s url')} |
|
386 | 386 | raise formencode.Invalid('', value, state, error_dict=e_dict) |
|
387 | 387 | |
|
388 | 388 | return value |
|
389 | 389 | |
|
390 | 390 | return _ValidCloneUri |
|
391 | 391 | |
|
392 | 392 | |
|
393 | 393 | def ValidForkType(old_data): |
|
394 | 394 | class _ValidForkType(formencode.validators.FancyValidator): |
|
395 | 395 | |
|
396 | 396 | def to_python(self, value, state): |
|
397 | 397 | if old_data['repo_type'] != value: |
|
398 | 398 | raise formencode.Invalid(_('Fork have to be the same ' |
|
399 | 399 | 'type as original'), value, state) |
|
400 | 400 | |
|
401 | 401 | return value |
|
402 | 402 | return _ValidForkType |
|
403 | 403 | |
|
404 | 404 | |
|
405 | 405 | def ValidPerms(type_='repo'): |
|
406 | 406 | if type_ == 'group': |
|
407 | 407 | EMPTY_PERM = 'group.none' |
|
408 | 408 | elif type_ == 'repo': |
|
409 | 409 | EMPTY_PERM = 'repository.none' |
|
410 | 410 | |
|
411 | 411 | class _ValidPerms(formencode.validators.FancyValidator): |
|
412 | 412 | messages = { |
|
413 | 413 | 'perm_new_member_name': |
|
414 | 414 | _('This username or users group name is not valid') |
|
415 | 415 | } |
|
416 | 416 | |
|
417 | 417 | def to_python(self, value, state): |
|
418 | 418 | perms_update = [] |
|
419 | 419 | perms_new = [] |
|
420 | 420 | # build a list of permission to update and new permission to create |
|
421 | 421 | for k, v in value.items(): |
|
422 | 422 | # means new added member to permissions |
|
423 | 423 | if k.startswith('perm_new_member'): |
|
424 | 424 | new_perm = value.get('perm_new_member', False) |
|
425 | 425 | new_member = value.get('perm_new_member_name', False) |
|
426 | 426 | new_type = value.get('perm_new_member_type') |
|
427 | 427 | |
|
428 | 428 | if new_member and new_perm: |
|
429 | 429 | if (new_member, new_perm, new_type) not in perms_new: |
|
430 | 430 | perms_new.append((new_member, new_perm, new_type)) |
|
431 | 431 | elif k.startswith('u_perm_') or k.startswith('g_perm_'): |
|
432 | 432 | member = k[7:] |
|
433 | 433 | t = {'u': 'user', |
|
434 | 434 | 'g': 'users_group' |
|
435 | 435 | }[k[0]] |
|
436 | 436 | if member == 'default': |
|
437 | 437 | if value.get('private'): |
|
438 | 438 | # set none for default when updating to private repo |
|
439 | 439 | v = EMPTY_PERM |
|
440 | 440 | perms_update.append((member, v, t)) |
|
441 | 441 | |
|
442 | 442 | value['perms_updates'] = perms_update |
|
443 | 443 | value['perms_new'] = perms_new |
|
444 | 444 | |
|
445 | 445 | # update permissions |
|
446 | 446 | for k, v, t in perms_new: |
|
447 | 447 | try: |
|
448 | 448 | if t is 'user': |
|
449 | 449 | self.user_db = User.query()\ |
|
450 | 450 | .filter(User.active == True)\ |
|
451 | 451 | .filter(User.username == k).one() |
|
452 | 452 | if t is 'users_group': |
|
453 | 453 | self.user_db = UsersGroup.query()\ |
|
454 | 454 | .filter(UsersGroup.users_group_active == True)\ |
|
455 | 455 | .filter(UsersGroup.users_group_name == k).one() |
|
456 | 456 | |
|
457 | 457 | except Exception: |
|
458 | 458 | msg = self.message('perm_new_member_name', |
|
459 | 459 | state=State_obj) |
|
460 | 460 | raise formencode.Invalid( |
|
461 | 461 | msg, value, state, error_dict={'perm_new_member_name': msg} |
|
462 | 462 | ) |
|
463 | 463 | return value |
|
464 | 464 | return _ValidPerms |
|
465 | 465 | |
|
466 | 466 | |
|
467 | 467 | class ValidSettings(formencode.validators.FancyValidator): |
|
468 | 468 | |
|
469 | 469 | def to_python(self, value, state): |
|
470 | 470 | # settings form can't edit user |
|
471 | 471 | if 'user' in value: |
|
472 | 472 | del['value']['user'] |
|
473 | 473 | return value |
|
474 | 474 | |
|
475 | 475 | |
|
476 | 476 | class ValidPath(formencode.validators.FancyValidator): |
|
477 | 477 | def to_python(self, value, state): |
|
478 | 478 | |
|
479 | 479 | if not os.path.isdir(value): |
|
480 | 480 | msg = _('This is not a valid path') |
|
481 | 481 | raise formencode.Invalid(msg, value, state, |
|
482 | 482 | error_dict={'paths_root_path': msg}) |
|
483 | 483 | return value |
|
484 | 484 | |
|
485 | 485 | |
|
486 | 486 | def UniqSystemEmail(old_data): |
|
487 | 487 | class _UniqSystemEmail(formencode.validators.FancyValidator): |
|
488 | 488 | def to_python(self, value, state): |
|
489 | 489 | value = value.lower() |
|
490 |
if old_data.get('email' |
|
|
490 | if (old_data.get('email') or '').lower() != value: | |
|
491 | 491 | user = User.get_by_email(value, case_insensitive=True) |
|
492 | 492 | if user: |
|
493 | 493 | raise formencode.Invalid( |
|
494 | 494 | _("This e-mail address is already taken"), value, state |
|
495 | 495 | ) |
|
496 | 496 | return value |
|
497 | 497 | |
|
498 | 498 | return _UniqSystemEmail |
|
499 | 499 | |
|
500 | 500 | |
|
501 | 501 | class ValidSystemEmail(formencode.validators.FancyValidator): |
|
502 | 502 | def to_python(self, value, state): |
|
503 | 503 | value = value.lower() |
|
504 | 504 | user = User.get_by_email(value, case_insensitive=True) |
|
505 | 505 | if user is None: |
|
506 | 506 | raise formencode.Invalid( |
|
507 | 507 | _("This e-mail address doesn't exist."), value, state |
|
508 | 508 | ) |
|
509 | 509 | |
|
510 | 510 | return value |
|
511 | 511 | |
|
512 | 512 | |
|
513 | 513 | class LdapLibValidator(formencode.validators.FancyValidator): |
|
514 | 514 | |
|
515 | 515 | def to_python(self, value, state): |
|
516 | 516 | |
|
517 | 517 | try: |
|
518 | 518 | import ldap |
|
519 | 519 | except ImportError: |
|
520 | 520 | raise LdapImportError |
|
521 | 521 | return value |
|
522 | 522 | |
|
523 | 523 | |
|
524 | 524 | class AttrLoginValidator(formencode.validators.FancyValidator): |
|
525 | 525 | |
|
526 | 526 | def to_python(self, value, state): |
|
527 | 527 | |
|
528 | 528 | if not value or not isinstance(value, (str, unicode)): |
|
529 | 529 | raise formencode.Invalid( |
|
530 | 530 | _("The LDAP Login attribute of the CN must be specified - " |
|
531 | 531 | "this is the name of the attribute that is equivalent " |
|
532 | 532 | "to 'username'"), value, state |
|
533 | 533 | ) |
|
534 | 534 | |
|
535 | 535 | return value |
|
536 | 536 | |
|
537 | 537 | |
|
538 | 538 | #============================================================================== |
|
539 | 539 | # FORMS |
|
540 | 540 | #============================================================================== |
|
541 | 541 | class LoginForm(formencode.Schema): |
|
542 | 542 | allow_extra_fields = True |
|
543 | 543 | filter_extra_fields = True |
|
544 | 544 | username = UnicodeString( |
|
545 | 545 | strip=True, |
|
546 | 546 | min=1, |
|
547 | 547 | not_empty=True, |
|
548 | 548 | messages={ |
|
549 | 549 | 'empty': _('Please enter a login'), |
|
550 | 550 | 'tooShort': _('Enter a value %(min)i characters long or more')} |
|
551 | 551 | ) |
|
552 | 552 | |
|
553 | 553 | password = UnicodeString( |
|
554 | 554 | strip=True, |
|
555 | 555 | min=3, |
|
556 | 556 | not_empty=True, |
|
557 | 557 | messages={ |
|
558 | 558 | 'empty': _('Please enter a password'), |
|
559 | 559 | 'tooShort': _('Enter %(min)i characters or more')} |
|
560 | 560 | ) |
|
561 | 561 | |
|
562 | 562 | remember = StringBoolean(if_missing=False) |
|
563 | 563 | |
|
564 | 564 | chained_validators = [ValidAuth] |
|
565 | 565 | |
|
566 | 566 | |
|
567 | 567 | def UserForm(edit=False, old_data={}): |
|
568 | 568 | class _UserForm(formencode.Schema): |
|
569 | 569 | allow_extra_fields = True |
|
570 | 570 | filter_extra_fields = True |
|
571 | 571 | username = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
572 | 572 | ValidUsername(edit, old_data)) |
|
573 | 573 | if edit: |
|
574 | 574 | new_password = All(UnicodeString(strip=True, min=6, not_empty=False)) |
|
575 | 575 | password_confirmation = All(UnicodeString(strip=True, min=6, |
|
576 | 576 | not_empty=False)) |
|
577 | 577 | admin = StringBoolean(if_missing=False) |
|
578 | 578 | else: |
|
579 | 579 | password = All(UnicodeString(strip=True, min=6, not_empty=True)) |
|
580 | 580 | password_confirmation = All(UnicodeString(strip=True, min=6, |
|
581 | 581 | not_empty=False)) |
|
582 | 582 | |
|
583 | 583 | active = StringBoolean(if_missing=False) |
|
584 | 584 | name = UnicodeString(strip=True, min=1, not_empty=False) |
|
585 | 585 | lastname = UnicodeString(strip=True, min=1, not_empty=False) |
|
586 | 586 | email = All(Email(not_empty=True), UniqSystemEmail(old_data)) |
|
587 | 587 | |
|
588 | 588 | chained_validators = [ValidPasswordsMatch, ValidPassword] |
|
589 | 589 | |
|
590 | 590 | return _UserForm |
|
591 | 591 | |
|
592 | 592 | |
|
593 | 593 | def UsersGroupForm(edit=False, old_data={}, available_members=[]): |
|
594 | 594 | class _UsersGroupForm(formencode.Schema): |
|
595 | 595 | allow_extra_fields = True |
|
596 | 596 | filter_extra_fields = True |
|
597 | 597 | |
|
598 | 598 | users_group_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
599 | 599 | ValidUsersGroup(edit, old_data)) |
|
600 | 600 | |
|
601 | 601 | users_group_active = StringBoolean(if_missing=False) |
|
602 | 602 | |
|
603 | 603 | if edit: |
|
604 | 604 | users_group_members = OneOf(available_members, hideList=False, |
|
605 | 605 | testValueList=True, |
|
606 | 606 | if_missing=None, not_empty=False) |
|
607 | 607 | |
|
608 | 608 | return _UsersGroupForm |
|
609 | 609 | |
|
610 | 610 | |
|
611 | 611 | def ReposGroupForm(edit=False, old_data={}, available_groups=[]): |
|
612 | 612 | class _ReposGroupForm(formencode.Schema): |
|
613 | 613 | allow_extra_fields = True |
|
614 | 614 | filter_extra_fields = False |
|
615 | 615 | |
|
616 | 616 | group_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
617 | 617 | SlugifyName()) |
|
618 | 618 | group_description = UnicodeString(strip=True, min=1, |
|
619 | 619 | not_empty=True) |
|
620 | 620 | group_parent_id = OneOf(available_groups, hideList=False, |
|
621 | 621 | testValueList=True, |
|
622 | 622 | if_missing=None, not_empty=False) |
|
623 | 623 | |
|
624 | 624 | chained_validators = [ValidReposGroup(edit, old_data), ValidPerms('group')] |
|
625 | 625 | |
|
626 | 626 | return _ReposGroupForm |
|
627 | 627 | |
|
628 | 628 | |
|
629 | 629 | def RegisterForm(edit=False, old_data={}): |
|
630 | 630 | class _RegisterForm(formencode.Schema): |
|
631 | 631 | allow_extra_fields = True |
|
632 | 632 | filter_extra_fields = True |
|
633 | 633 | username = All(ValidUsername(edit, old_data), |
|
634 | 634 | UnicodeString(strip=True, min=1, not_empty=True)) |
|
635 | 635 | password = All(UnicodeString(strip=True, min=6, not_empty=True)) |
|
636 | 636 | password_confirmation = All(UnicodeString(strip=True, min=6, not_empty=True)) |
|
637 | 637 | active = StringBoolean(if_missing=False) |
|
638 | 638 | name = UnicodeString(strip=True, min=1, not_empty=False) |
|
639 | 639 | lastname = UnicodeString(strip=True, min=1, not_empty=False) |
|
640 | 640 | email = All(Email(not_empty=True), UniqSystemEmail(old_data)) |
|
641 | 641 | |
|
642 | 642 | chained_validators = [ValidPasswordsMatch, ValidPassword] |
|
643 | 643 | |
|
644 | 644 | return _RegisterForm |
|
645 | 645 | |
|
646 | 646 | |
|
647 | 647 | def PasswordResetForm(): |
|
648 | 648 | class _PasswordResetForm(formencode.Schema): |
|
649 | 649 | allow_extra_fields = True |
|
650 | 650 | filter_extra_fields = True |
|
651 | 651 | email = All(ValidSystemEmail(), Email(not_empty=True)) |
|
652 | 652 | return _PasswordResetForm |
|
653 | 653 | |
|
654 | 654 | |
|
655 | 655 | def RepoForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), |
|
656 | 656 | repo_groups=[]): |
|
657 | 657 | class _RepoForm(formencode.Schema): |
|
658 | 658 | allow_extra_fields = True |
|
659 | 659 | filter_extra_fields = False |
|
660 | 660 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
661 | 661 | SlugifyName()) |
|
662 | 662 | clone_uri = All(UnicodeString(strip=True, min=1, not_empty=False)) |
|
663 | 663 | repo_group = OneOf(repo_groups, hideList=True) |
|
664 | 664 | repo_type = OneOf(supported_backends) |
|
665 | 665 | description = UnicodeString(strip=True, min=1, not_empty=True) |
|
666 | 666 | private = StringBoolean(if_missing=False) |
|
667 | 667 | enable_statistics = StringBoolean(if_missing=False) |
|
668 | 668 | enable_downloads = StringBoolean(if_missing=False) |
|
669 | 669 | |
|
670 | 670 | if edit: |
|
671 | 671 | #this is repo owner |
|
672 | 672 | user = All(UnicodeString(not_empty=True), ValidRepoUser) |
|
673 | 673 | |
|
674 | 674 | chained_validators = [ValidCloneUri()(), |
|
675 | 675 | ValidRepoName(edit, old_data), |
|
676 | 676 | ValidPerms()] |
|
677 | 677 | return _RepoForm |
|
678 | 678 | |
|
679 | 679 | |
|
680 | 680 | def RepoForkForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), |
|
681 | 681 | repo_groups=[]): |
|
682 | 682 | class _RepoForkForm(formencode.Schema): |
|
683 | 683 | allow_extra_fields = True |
|
684 | 684 | filter_extra_fields = False |
|
685 | 685 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
686 | 686 | SlugifyName()) |
|
687 | 687 | repo_group = OneOf(repo_groups, hideList=True) |
|
688 | 688 | repo_type = All(ValidForkType(old_data), OneOf(supported_backends)) |
|
689 | 689 | description = UnicodeString(strip=True, min=1, not_empty=True) |
|
690 | 690 | private = StringBoolean(if_missing=False) |
|
691 | 691 | copy_permissions = StringBoolean(if_missing=False) |
|
692 | 692 | update_after_clone = StringBoolean(if_missing=False) |
|
693 | 693 | fork_parent_id = UnicodeString() |
|
694 | 694 | chained_validators = [ValidForkName(edit, old_data)] |
|
695 | 695 | |
|
696 | 696 | return _RepoForkForm |
|
697 | 697 | |
|
698 | 698 | |
|
699 | 699 | def RepoSettingsForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(), |
|
700 | 700 | repo_groups=[]): |
|
701 | 701 | class _RepoForm(formencode.Schema): |
|
702 | 702 | allow_extra_fields = True |
|
703 | 703 | filter_extra_fields = False |
|
704 | 704 | repo_name = All(UnicodeString(strip=True, min=1, not_empty=True), |
|
705 | 705 | SlugifyName()) |
|
706 | 706 | description = UnicodeString(strip=True, min=1, not_empty=True) |
|
707 | 707 | repo_group = OneOf(repo_groups, hideList=True) |
|
708 | 708 | private = StringBoolean(if_missing=False) |
|
709 | 709 | |
|
710 | 710 | chained_validators = [ValidRepoName(edit, old_data), ValidPerms(), |
|
711 | 711 | ValidSettings] |
|
712 | 712 | return _RepoForm |
|
713 | 713 | |
|
714 | 714 | |
|
715 | 715 | def ApplicationSettingsForm(): |
|
716 | 716 | class _ApplicationSettingsForm(formencode.Schema): |
|
717 | 717 | allow_extra_fields = True |
|
718 | 718 | filter_extra_fields = False |
|
719 | 719 | rhodecode_title = UnicodeString(strip=True, min=1, not_empty=True) |
|
720 | 720 | rhodecode_realm = UnicodeString(strip=True, min=1, not_empty=True) |
|
721 | 721 | rhodecode_ga_code = UnicodeString(strip=True, min=1, not_empty=False) |
|
722 | 722 | |
|
723 | 723 | return _ApplicationSettingsForm |
|
724 | 724 | |
|
725 | 725 | |
|
726 | 726 | def ApplicationUiSettingsForm(): |
|
727 | 727 | class _ApplicationUiSettingsForm(formencode.Schema): |
|
728 | 728 | allow_extra_fields = True |
|
729 | 729 | filter_extra_fields = False |
|
730 | 730 | web_push_ssl = OneOf(['true', 'false'], if_missing='false') |
|
731 | 731 | paths_root_path = All(ValidPath(), UnicodeString(strip=True, min=1, not_empty=True)) |
|
732 | 732 | hooks_changegroup_update = OneOf(['True', 'False'], if_missing=False) |
|
733 | 733 | hooks_changegroup_repo_size = OneOf(['True', 'False'], if_missing=False) |
|
734 | 734 | hooks_pretxnchangegroup_push_logger = OneOf(['True', 'False'], if_missing=False) |
|
735 | 735 | hooks_preoutgoing_pull_logger = OneOf(['True', 'False'], if_missing=False) |
|
736 | 736 | |
|
737 | 737 | return _ApplicationUiSettingsForm |
|
738 | 738 | |
|
739 | 739 | |
|
740 | 740 | def DefaultPermissionsForm(perms_choices, register_choices, create_choices): |
|
741 | 741 | class _DefaultPermissionsForm(formencode.Schema): |
|
742 | 742 | allow_extra_fields = True |
|
743 | 743 | filter_extra_fields = True |
|
744 | 744 | overwrite_default = StringBoolean(if_missing=False) |
|
745 | 745 | anonymous = OneOf(['True', 'False'], if_missing=False) |
|
746 | 746 | default_perm = OneOf(perms_choices) |
|
747 | 747 | default_register = OneOf(register_choices) |
|
748 | 748 | default_create = OneOf(create_choices) |
|
749 | 749 | |
|
750 | 750 | return _DefaultPermissionsForm |
|
751 | 751 | |
|
752 | 752 | |
|
753 | 753 | def LdapSettingsForm(tls_reqcert_choices, search_scope_choices, tls_kind_choices): |
|
754 | 754 | class _LdapSettingsForm(formencode.Schema): |
|
755 | 755 | allow_extra_fields = True |
|
756 | 756 | filter_extra_fields = True |
|
757 | 757 | pre_validators = [LdapLibValidator] |
|
758 | 758 | ldap_active = StringBoolean(if_missing=False) |
|
759 | 759 | ldap_host = UnicodeString(strip=True,) |
|
760 | 760 | ldap_port = Number(strip=True,) |
|
761 | 761 | ldap_tls_kind = OneOf(tls_kind_choices) |
|
762 | 762 | ldap_tls_reqcert = OneOf(tls_reqcert_choices) |
|
763 | 763 | ldap_dn_user = UnicodeString(strip=True,) |
|
764 | 764 | ldap_dn_pass = UnicodeString(strip=True,) |
|
765 | 765 | ldap_base_dn = UnicodeString(strip=True,) |
|
766 | 766 | ldap_filter = UnicodeString(strip=True,) |
|
767 | 767 | ldap_search_scope = OneOf(search_scope_choices) |
|
768 | 768 | ldap_attr_login = All(AttrLoginValidator, UnicodeString(strip=True,)) |
|
769 | 769 | ldap_attr_firstname = UnicodeString(strip=True,) |
|
770 | 770 | ldap_attr_lastname = UnicodeString(strip=True,) |
|
771 | 771 | ldap_attr_email = UnicodeString(strip=True,) |
|
772 | 772 | |
|
773 | 773 | return _LdapSettingsForm |
@@ -1,456 +1,459 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.model.scm |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Scm model for RhodeCode |
|
7 | 7 | |
|
8 | 8 | :created_on: Apr 9, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | import os |
|
26 | 26 | import time |
|
27 | 27 | import traceback |
|
28 | 28 | import logging |
|
29 | 29 | import cStringIO |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib.vcs import get_backend |
|
32 | 32 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
33 | 33 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
34 | 34 | from rhodecode.lib.vcs.nodes import FileNode |
|
35 | 35 | |
|
36 | 36 | from rhodecode import BACKENDS |
|
37 | 37 | from rhodecode.lib import helpers as h |
|
38 | 38 | from rhodecode.lib import safe_str |
|
39 | 39 | from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny |
|
40 | 40 | from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \ |
|
41 | action_logger, EmptyChangeset | |
|
41 | action_logger, EmptyChangeset, REMOVED_REPO_PAT | |
|
42 | 42 | from rhodecode.model import BaseModel |
|
43 | 43 | from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \ |
|
44 | 44 | UserFollowing, UserLog, User, RepoGroup |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class UserTemp(object): |
|
50 | 50 | def __init__(self, user_id): |
|
51 | 51 | self.user_id = user_id |
|
52 | 52 | |
|
53 | 53 | def __repr__(self): |
|
54 | 54 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | class RepoTemp(object): |
|
58 | 58 | def __init__(self, repo_id): |
|
59 | 59 | self.repo_id = repo_id |
|
60 | 60 | |
|
61 | 61 | def __repr__(self): |
|
62 | 62 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | class CachedRepoList(object): |
|
66 | 66 | |
|
67 | 67 | def __init__(self, db_repo_list, repos_path, order_by=None): |
|
68 | 68 | self.db_repo_list = db_repo_list |
|
69 | 69 | self.repos_path = repos_path |
|
70 | 70 | self.order_by = order_by |
|
71 | 71 | self.reversed = (order_by or '').startswith('-') |
|
72 | 72 | |
|
73 | 73 | def __len__(self): |
|
74 | 74 | return len(self.db_repo_list) |
|
75 | 75 | |
|
76 | 76 | def __repr__(self): |
|
77 | 77 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
78 | 78 | |
|
79 | 79 | def __iter__(self): |
|
80 | 80 | for dbr in self.db_repo_list: |
|
81 | 81 | scmr = dbr.scm_instance_cached |
|
82 | 82 | # check permission at this level |
|
83 | 83 | if not HasRepoPermissionAny( |
|
84 | 84 | 'repository.read', 'repository.write', 'repository.admin' |
|
85 | 85 | )(dbr.repo_name, 'get repo check'): |
|
86 | 86 | continue |
|
87 | 87 | |
|
88 | 88 | if scmr is None: |
|
89 | 89 | log.error( |
|
90 | 90 | '%s this repository is present in database but it ' |
|
91 | 91 | 'cannot be created as an scm instance' % dbr.repo_name |
|
92 | 92 | ) |
|
93 | 93 | continue |
|
94 | 94 | |
|
95 | 95 | last_change = scmr.last_change |
|
96 | 96 | tip = h.get_changeset_safe(scmr, 'tip') |
|
97 | 97 | |
|
98 | 98 | tmp_d = {} |
|
99 | 99 | tmp_d['name'] = dbr.repo_name |
|
100 | 100 | tmp_d['name_sort'] = tmp_d['name'].lower() |
|
101 | 101 | tmp_d['description'] = dbr.description |
|
102 | 102 | tmp_d['description_sort'] = tmp_d['description'] |
|
103 | 103 | tmp_d['last_change'] = last_change |
|
104 | 104 | tmp_d['last_change_sort'] = time.mktime(last_change.timetuple()) |
|
105 | 105 | tmp_d['tip'] = tip.raw_id |
|
106 | 106 | tmp_d['tip_sort'] = tip.revision |
|
107 | 107 | tmp_d['rev'] = tip.revision |
|
108 | 108 | tmp_d['contact'] = dbr.user.full_contact |
|
109 | 109 | tmp_d['contact_sort'] = tmp_d['contact'] |
|
110 | 110 | tmp_d['owner_sort'] = tmp_d['contact'] |
|
111 | 111 | tmp_d['repo_archives'] = list(scmr._get_archives()) |
|
112 | 112 | tmp_d['last_msg'] = tip.message |
|
113 | 113 | tmp_d['author'] = tip.author |
|
114 | 114 | tmp_d['dbrepo'] = dbr.get_dict() |
|
115 | 115 | tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {} |
|
116 | 116 | yield tmp_d |
|
117 | 117 | |
|
118 | 118 | |
|
119 | 119 | class GroupList(object): |
|
120 | 120 | |
|
121 | 121 | def __init__(self, db_repo_group_list): |
|
122 | 122 | self.db_repo_group_list = db_repo_group_list |
|
123 | 123 | |
|
124 | 124 | def __len__(self): |
|
125 | 125 | return len(self.db_repo_group_list) |
|
126 | 126 | |
|
127 | 127 | def __repr__(self): |
|
128 | 128 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
129 | 129 | |
|
130 | 130 | def __iter__(self): |
|
131 | 131 | for dbgr in self.db_repo_group_list: |
|
132 | 132 | # check permission at this level |
|
133 | 133 | if not HasReposGroupPermissionAny( |
|
134 | 134 | 'group.read', 'group.write', 'group.admin' |
|
135 | 135 | )(dbgr.group_name, 'get group repo check'): |
|
136 | 136 | continue |
|
137 | 137 | |
|
138 | 138 | yield dbgr |
|
139 | 139 | |
|
140 | 140 | |
|
141 | 141 | class ScmModel(BaseModel): |
|
142 | 142 | """ |
|
143 | 143 | Generic Scm Model |
|
144 | 144 | """ |
|
145 | 145 | |
|
146 | 146 | def __get_repo(self, instance): |
|
147 | 147 | cls = Repository |
|
148 | 148 | if isinstance(instance, cls): |
|
149 | 149 | return instance |
|
150 | 150 | elif isinstance(instance, int) or str(instance).isdigit(): |
|
151 | 151 | return cls.get(instance) |
|
152 | 152 | elif isinstance(instance, basestring): |
|
153 | 153 | return cls.get_by_repo_name(instance) |
|
154 | 154 | elif instance: |
|
155 | 155 | raise Exception('given object must be int, basestr or Instance' |
|
156 | 156 | ' of %s got %s' % (type(cls), type(instance))) |
|
157 | 157 | |
|
158 | 158 | @LazyProperty |
|
159 | 159 | def repos_path(self): |
|
160 | 160 | """ |
|
161 | 161 | Get's the repositories root path from database |
|
162 | 162 | """ |
|
163 | 163 | |
|
164 | 164 | q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() |
|
165 | 165 | |
|
166 | 166 | return q.ui_value |
|
167 | 167 | |
|
168 | 168 | def repo_scan(self, repos_path=None): |
|
169 | 169 | """ |
|
170 | 170 | Listing of repositories in given path. This path should not be a |
|
171 | 171 | repository itself. Return a dictionary of repository objects |
|
172 | 172 | |
|
173 | 173 | :param repos_path: path to directory containing repositories |
|
174 | 174 | """ |
|
175 | 175 | |
|
176 | 176 | if repos_path is None: |
|
177 | 177 | repos_path = self.repos_path |
|
178 | 178 | |
|
179 | 179 | log.info('scanning for repositories in %s' % repos_path) |
|
180 | 180 | |
|
181 | 181 | baseui = make_ui('db') |
|
182 | 182 | repos = {} |
|
183 | 183 | |
|
184 | 184 | for name, path in get_filesystem_repos(repos_path, recursive=True): |
|
185 | # skip removed repos | |
|
186 | if REMOVED_REPO_PAT.match(name): | |
|
187 | continue | |
|
185 | 188 | |
|
186 | 189 | # name need to be decomposed and put back together using the / |
|
187 | 190 | # since this is internal storage separator for rhodecode |
|
188 | 191 | name = Repository.url_sep().join(name.split(os.sep)) |
|
189 | 192 | |
|
190 | 193 | try: |
|
191 | 194 | if name in repos: |
|
192 | 195 | raise RepositoryError('Duplicate repository name %s ' |
|
193 | 196 | 'found in %s' % (name, path)) |
|
194 | 197 | else: |
|
195 | 198 | |
|
196 | 199 | klass = get_backend(path[0]) |
|
197 | 200 | |
|
198 | 201 | if path[0] == 'hg' and path[0] in BACKENDS.keys(): |
|
199 | 202 | repos[name] = klass(safe_str(path[1]), baseui=baseui) |
|
200 | 203 | |
|
201 | 204 | if path[0] == 'git' and path[0] in BACKENDS.keys(): |
|
202 | 205 | repos[name] = klass(path[1]) |
|
203 | 206 | except OSError: |
|
204 | 207 | continue |
|
205 | 208 | |
|
206 | 209 | return repos |
|
207 | 210 | |
|
208 | 211 | def get_repos(self, all_repos=None, sort_key=None): |
|
209 | 212 | """ |
|
210 | 213 | Get all repos from db and for each repo create it's |
|
211 | 214 | backend instance and fill that backed with information from database |
|
212 | 215 | |
|
213 | 216 | :param all_repos: list of repository names as strings |
|
214 | 217 | give specific repositories list, good for filtering |
|
215 | 218 | """ |
|
216 | 219 | if all_repos is None: |
|
217 | 220 | all_repos = self.sa.query(Repository)\ |
|
218 | 221 | .filter(Repository.group_id == None)\ |
|
219 | 222 | .order_by(Repository.repo_name).all() |
|
220 | 223 | |
|
221 | 224 | repo_iter = CachedRepoList(all_repos, repos_path=self.repos_path, |
|
222 | 225 | order_by=sort_key) |
|
223 | 226 | |
|
224 | 227 | return repo_iter |
|
225 | 228 | |
|
226 | 229 | def get_repos_groups(self, all_groups=None): |
|
227 | 230 | if all_groups is None: |
|
228 | 231 | all_groups = RepoGroup.query()\ |
|
229 | 232 | .filter(RepoGroup.group_parent_id == None).all() |
|
230 | 233 | group_iter = GroupList(all_groups) |
|
231 | 234 | |
|
232 | 235 | return group_iter |
|
233 | 236 | |
|
234 | 237 | def mark_for_invalidation(self, repo_name): |
|
235 | 238 | """Puts cache invalidation task into db for |
|
236 | 239 | further global cache invalidation |
|
237 | 240 | |
|
238 | 241 | :param repo_name: this repo that should invalidation take place |
|
239 | 242 | """ |
|
240 | 243 | CacheInvalidation.set_invalidate(repo_name) |
|
241 | 244 | CacheInvalidation.set_invalidate(repo_name + "_README") |
|
242 | 245 | |
|
243 | 246 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
244 | 247 | |
|
245 | 248 | f = self.sa.query(UserFollowing)\ |
|
246 | 249 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ |
|
247 | 250 | .filter(UserFollowing.user_id == user_id).scalar() |
|
248 | 251 | |
|
249 | 252 | if f is not None: |
|
250 | 253 | try: |
|
251 | 254 | self.sa.delete(f) |
|
252 | 255 | action_logger(UserTemp(user_id), |
|
253 | 256 | 'stopped_following_repo', |
|
254 | 257 | RepoTemp(follow_repo_id)) |
|
255 | 258 | return |
|
256 | 259 | except: |
|
257 | 260 | log.error(traceback.format_exc()) |
|
258 | 261 | raise |
|
259 | 262 | |
|
260 | 263 | try: |
|
261 | 264 | f = UserFollowing() |
|
262 | 265 | f.user_id = user_id |
|
263 | 266 | f.follows_repo_id = follow_repo_id |
|
264 | 267 | self.sa.add(f) |
|
265 | 268 | |
|
266 | 269 | action_logger(UserTemp(user_id), |
|
267 | 270 | 'started_following_repo', |
|
268 | 271 | RepoTemp(follow_repo_id)) |
|
269 | 272 | except: |
|
270 | 273 | log.error(traceback.format_exc()) |
|
271 | 274 | raise |
|
272 | 275 | |
|
273 | 276 | def toggle_following_user(self, follow_user_id, user_id): |
|
274 | 277 | f = self.sa.query(UserFollowing)\ |
|
275 | 278 | .filter(UserFollowing.follows_user_id == follow_user_id)\ |
|
276 | 279 | .filter(UserFollowing.user_id == user_id).scalar() |
|
277 | 280 | |
|
278 | 281 | if f is not None: |
|
279 | 282 | try: |
|
280 | 283 | self.sa.delete(f) |
|
281 | 284 | return |
|
282 | 285 | except: |
|
283 | 286 | log.error(traceback.format_exc()) |
|
284 | 287 | raise |
|
285 | 288 | |
|
286 | 289 | try: |
|
287 | 290 | f = UserFollowing() |
|
288 | 291 | f.user_id = user_id |
|
289 | 292 | f.follows_user_id = follow_user_id |
|
290 | 293 | self.sa.add(f) |
|
291 | 294 | except: |
|
292 | 295 | log.error(traceback.format_exc()) |
|
293 | 296 | raise |
|
294 | 297 | |
|
295 | 298 | def is_following_repo(self, repo_name, user_id, cache=False): |
|
296 | 299 | r = self.sa.query(Repository)\ |
|
297 | 300 | .filter(Repository.repo_name == repo_name).scalar() |
|
298 | 301 | |
|
299 | 302 | f = self.sa.query(UserFollowing)\ |
|
300 | 303 | .filter(UserFollowing.follows_repository == r)\ |
|
301 | 304 | .filter(UserFollowing.user_id == user_id).scalar() |
|
302 | 305 | |
|
303 | 306 | return f is not None |
|
304 | 307 | |
|
305 | 308 | def is_following_user(self, username, user_id, cache=False): |
|
306 | 309 | u = User.get_by_username(username) |
|
307 | 310 | |
|
308 | 311 | f = self.sa.query(UserFollowing)\ |
|
309 | 312 | .filter(UserFollowing.follows_user == u)\ |
|
310 | 313 | .filter(UserFollowing.user_id == user_id).scalar() |
|
311 | 314 | |
|
312 | 315 | return f is not None |
|
313 | 316 | |
|
314 | 317 | def get_followers(self, repo_id): |
|
315 | 318 | if not isinstance(repo_id, int): |
|
316 | 319 | repo_id = getattr(Repository.get_by_repo_name(repo_id), 'repo_id') |
|
317 | 320 | |
|
318 | 321 | return self.sa.query(UserFollowing)\ |
|
319 | 322 | .filter(UserFollowing.follows_repo_id == repo_id).count() |
|
320 | 323 | |
|
321 | 324 | def get_forks(self, repo_id): |
|
322 | 325 | if not isinstance(repo_id, int): |
|
323 | 326 | repo_id = getattr(Repository.get_by_repo_name(repo_id), 'repo_id') |
|
324 | 327 | |
|
325 | 328 | return self.sa.query(Repository)\ |
|
326 | 329 | .filter(Repository.fork_id == repo_id).count() |
|
327 | 330 | |
|
328 | 331 | def mark_as_fork(self, repo, fork, user): |
|
329 | 332 | repo = self.__get_repo(repo) |
|
330 | 333 | fork = self.__get_repo(fork) |
|
331 | 334 | repo.fork = fork |
|
332 | 335 | self.sa.add(repo) |
|
333 | 336 | return repo |
|
334 | 337 | |
|
335 | 338 | def pull_changes(self, repo_name, username): |
|
336 | 339 | dbrepo = Repository.get_by_repo_name(repo_name) |
|
337 | 340 | clone_uri = dbrepo.clone_uri |
|
338 | 341 | if not clone_uri: |
|
339 | 342 | raise Exception("This repository doesn't have a clone uri") |
|
340 | 343 | |
|
341 | 344 | repo = dbrepo.scm_instance |
|
342 | 345 | try: |
|
343 | 346 | extras = {'ip': '', |
|
344 | 347 | 'username': username, |
|
345 | 348 | 'action': 'push_remote', |
|
346 | 349 | 'repository': repo_name} |
|
347 | 350 | |
|
348 | 351 | #inject ui extra param to log this action via push logger |
|
349 | 352 | for k, v in extras.items(): |
|
350 | 353 | repo._repo.ui.setconfig('rhodecode_extras', k, v) |
|
351 | 354 | |
|
352 | 355 | repo.pull(clone_uri) |
|
353 | 356 | self.mark_for_invalidation(repo_name) |
|
354 | 357 | except: |
|
355 | 358 | log.error(traceback.format_exc()) |
|
356 | 359 | raise |
|
357 | 360 | |
|
358 | 361 | def commit_change(self, repo, repo_name, cs, user, author, message, |
|
359 | 362 | content, f_path): |
|
360 | 363 | |
|
361 | 364 | if repo.alias == 'hg': |
|
362 | 365 | from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC |
|
363 | 366 | elif repo.alias == 'git': |
|
364 | 367 | from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC |
|
365 | 368 | |
|
366 | 369 | # decoding here will force that we have proper encoded values |
|
367 | 370 | # in any other case this will throw exceptions and deny commit |
|
368 | 371 | content = safe_str(content) |
|
369 | 372 | message = safe_str(message) |
|
370 | 373 | path = safe_str(f_path) |
|
371 | 374 | author = safe_str(author) |
|
372 | 375 | m = IMC(repo) |
|
373 | 376 | m.change(FileNode(path, content)) |
|
374 | 377 | tip = m.commit(message=message, |
|
375 | 378 | author=author, |
|
376 | 379 | parents=[cs], branch=cs.branch) |
|
377 | 380 | |
|
378 | 381 | new_cs = tip.short_id |
|
379 | 382 | action = 'push_local:%s' % new_cs |
|
380 | 383 | |
|
381 | 384 | action_logger(user, action, repo_name) |
|
382 | 385 | |
|
383 | 386 | self.mark_for_invalidation(repo_name) |
|
384 | 387 | |
|
385 | 388 | def create_node(self, repo, repo_name, cs, user, author, message, content, |
|
386 | 389 | f_path): |
|
387 | 390 | if repo.alias == 'hg': |
|
388 | 391 | from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC |
|
389 | 392 | elif repo.alias == 'git': |
|
390 | 393 | from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC |
|
391 | 394 | # decoding here will force that we have proper encoded values |
|
392 | 395 | # in any other case this will throw exceptions and deny commit |
|
393 | 396 | |
|
394 | 397 | if isinstance(content, (basestring,)): |
|
395 | 398 | content = safe_str(content) |
|
396 | 399 | elif isinstance(content, (file, cStringIO.OutputType,)): |
|
397 | 400 | content = content.read() |
|
398 | 401 | else: |
|
399 | 402 | raise Exception('Content is of unrecognized type %s' % ( |
|
400 | 403 | type(content) |
|
401 | 404 | )) |
|
402 | 405 | |
|
403 | 406 | message = safe_str(message) |
|
404 | 407 | path = safe_str(f_path) |
|
405 | 408 | author = safe_str(author) |
|
406 | 409 | m = IMC(repo) |
|
407 | 410 | |
|
408 | 411 | if isinstance(cs, EmptyChangeset): |
|
409 | 412 | # Emptychangeset means we we're editing empty repository |
|
410 | 413 | parents = None |
|
411 | 414 | else: |
|
412 | 415 | parents = [cs] |
|
413 | 416 | |
|
414 | 417 | m.add(FileNode(path, content=content)) |
|
415 | 418 | tip = m.commit(message=message, |
|
416 | 419 | author=author, |
|
417 | 420 | parents=parents, branch=cs.branch) |
|
418 | 421 | new_cs = tip.short_id |
|
419 | 422 | action = 'push_local:%s' % new_cs |
|
420 | 423 | |
|
421 | 424 | action_logger(user, action, repo_name) |
|
422 | 425 | |
|
423 | 426 | self.mark_for_invalidation(repo_name) |
|
424 | 427 | |
|
425 | 428 | def get_nodes(self, repo_name, revision, root_path='/', flat=True): |
|
426 | 429 | """ |
|
427 | 430 | recursive walk in root dir and return a set of all path in that dir |
|
428 | 431 | based on repository walk function |
|
429 | 432 | |
|
430 | 433 | :param repo_name: name of repository |
|
431 | 434 | :param revision: revision for which to list nodes |
|
432 | 435 | :param root_path: root path to list |
|
433 | 436 | :param flat: return as a list, if False returns a dict with decription |
|
434 | 437 | |
|
435 | 438 | """ |
|
436 | 439 | _files = list() |
|
437 | 440 | _dirs = list() |
|
438 | 441 | try: |
|
439 | 442 | _repo = self.__get_repo(repo_name) |
|
440 | 443 | changeset = _repo.scm_instance.get_changeset(revision) |
|
441 | 444 | root_path = root_path.lstrip('/') |
|
442 | 445 | for topnode, dirs, files in changeset.walk(root_path): |
|
443 | 446 | for f in files: |
|
444 | 447 | _files.append(f.path if flat else {"name": f.path, |
|
445 | 448 | "type": "file"}) |
|
446 | 449 | for d in dirs: |
|
447 | 450 | _dirs.append(d.path if flat else {"name": d.path, |
|
448 | 451 | "type": "dir"}) |
|
449 | 452 | except RepositoryError: |
|
450 | 453 | log.debug(traceback.format_exc()) |
|
451 | 454 | raise |
|
452 | 455 | |
|
453 | 456 | return _dirs, _files |
|
454 | 457 | |
|
455 | 458 | def get_unread_journal(self): |
|
456 | 459 | return self.sa.query(UserLog).count() |
@@ -1,124 +1,124 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | <%inherit file="/base/base.html"/> |
|
3 | 3 | |
|
4 | 4 | <%def name="title()"> |
|
5 | 5 | ${_('Repositories administration')} - ${c.rhodecode_name} |
|
6 | 6 | </%def> |
|
7 | 7 | |
|
8 | 8 | |
|
9 | 9 | <%def name="breadcrumbs_links()"> |
|
10 | 10 | ${h.link_to(_('Admin'),h.url('admin_home'))} » ${_('Repositories')} |
|
11 | 11 | </%def> |
|
12 | 12 | <%def name="page_nav()"> |
|
13 | 13 | ${self.menu('admin')} |
|
14 | 14 | </%def> |
|
15 | 15 | <%def name="main()"> |
|
16 | 16 | <div class="box"> |
|
17 | 17 | |
|
18 | 18 | <div class="title"> |
|
19 | 19 | ${self.breadcrumbs()} |
|
20 | 20 | <ul class="links"> |
|
21 | 21 | <li> |
|
22 | 22 | <span>${h.link_to(_(u'ADD REPOSITORY'),h.url('new_repo'))}</span> |
|
23 | 23 | </li> |
|
24 | 24 | </ul> |
|
25 | 25 | </div> |
|
26 | 26 | |
|
27 | 27 | <div class="table"> |
|
28 | 28 | <div id='repos_list_wrap' class="yui-skin-sam"> |
|
29 | 29 | <%cnt=0%> |
|
30 | 30 | <%namespace name="dt" file="/_data_table/_dt_elements.html"/> |
|
31 | 31 | |
|
32 | 32 | <table id="repos_list"> |
|
33 | 33 | <thead> |
|
34 | 34 | <tr> |
|
35 | 35 | <th class="left"></th> |
|
36 | 36 | <th class="left">${_('Name')}</th> |
|
37 | 37 | <th class="left">${_('Description')}</th> |
|
38 | 38 | <th class="left">${_('Last change')}</th> |
|
39 | 39 | <th class="left">${_('Tip')}</th> |
|
40 | 40 | <th class="left">${_('Contact')}</th> |
|
41 | 41 | <th class="left">${_('Action')}</th> |
|
42 | 42 | </tr> |
|
43 | 43 | </thead> |
|
44 | 44 | |
|
45 |
%for cnt,repo in enumerate(c.repos_list |
|
|
46 | <tr class="parity${cnt%2}"> | |
|
45 | %for cnt,repo in enumerate(c.repos_list): | |
|
46 | <tr class="parity${(cnt+1)%2}"> | |
|
47 | 47 | <td class="quick_repo_menu"> |
|
48 | 48 | ${dt.quick_menu(repo['name'])} |
|
49 | 49 | </td> |
|
50 | 50 | <td class="reponame"> |
|
51 | 51 | ${dt.repo_name(repo['name'],repo['dbrepo']['repo_type'],repo['dbrepo']['private'],repo['dbrepo_fork'].get('repo_name'))} |
|
52 | 52 | </td> |
|
53 | 53 | ##DESCRIPTION |
|
54 | 54 | <td><span class="tooltip" title="${h.tooltip(repo['description'])}"> |
|
55 | 55 | ${h.truncate(repo['description'],60)}</span> |
|
56 | 56 | </td> |
|
57 | 57 | ##LAST CHANGE |
|
58 | 58 | <td> |
|
59 | 59 | <span class="tooltip" title="${repo['last_change']}">${h.age(repo['last_change'])}</span> |
|
60 | 60 | </td> |
|
61 | 61 | ##LAST REVISION |
|
62 | 62 | <td> |
|
63 | 63 | ${dt.revision(repo['name'],repo['rev'],repo['tip'],repo['author'],repo['last_msg'])} |
|
64 | 64 | </td> |
|
65 | 65 | <td title="${repo['contact']}">${h.person(repo['contact'])}</td> |
|
66 | 66 | <td> |
|
67 | 67 | ${h.form(url('repo', repo_name=repo['name']),method='delete')} |
|
68 | 68 | ${h.submit('remove_%s' % repo['name'],_('delete'),class_="delete_icon action_button",onclick="return confirm('"+_('Confirm to delete this repository: %s') % repo['name']+"');")} |
|
69 | 69 | ${h.end_form()} |
|
70 | 70 | </td> |
|
71 | 71 | </tr> |
|
72 | 72 | %endfor |
|
73 | 73 | </table> |
|
74 | 74 | </div> |
|
75 | 75 | </div> |
|
76 | 76 | </div> |
|
77 | 77 | <script> |
|
78 | 78 | |
|
79 | 79 | // main table sorting |
|
80 | 80 | var myColumnDefs = [ |
|
81 | 81 | {key:"menu",label:"",sortable:false,className:"quick_repo_menu hidden"}, |
|
82 | 82 | {key:"name",label:"${_('Name')}",sortable:true, |
|
83 | 83 | sortOptions: { sortFunction: nameSort }}, |
|
84 | 84 | {key:"desc",label:"${_('Description')}",sortable:true}, |
|
85 | 85 | {key:"last_change",label:"${_('Last Change')}",sortable:true, |
|
86 | 86 | sortOptions: { sortFunction: ageSort }}, |
|
87 | 87 | {key:"tip",label:"${_('Tip')}",sortable:true, |
|
88 | 88 | sortOptions: { sortFunction: revisionSort }}, |
|
89 | 89 | {key:"owner",label:"${_('Owner')}",sortable:true}, |
|
90 | 90 | {key:"action",label:"${_('Action')}",sortable:false}, |
|
91 | 91 | ]; |
|
92 | 92 | |
|
93 | 93 | var myDataSource = new YAHOO.util.DataSource(YUD.get("repos_list")); |
|
94 | 94 | |
|
95 | 95 | myDataSource.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE; |
|
96 | 96 | |
|
97 | 97 | myDataSource.responseSchema = { |
|
98 | 98 | fields: [ |
|
99 | 99 | {key:"menu"}, |
|
100 | 100 | {key:"name"}, |
|
101 | 101 | {key:"desc"}, |
|
102 | 102 | {key:"last_change"}, |
|
103 | 103 | {key:"tip"}, |
|
104 | 104 | {key:"owner"}, |
|
105 | 105 | {key:"action"}, |
|
106 | 106 | ] |
|
107 | 107 | }; |
|
108 | 108 | |
|
109 | 109 | var myDataTable = new YAHOO.widget.DataTable("repos_list_wrap", myColumnDefs, myDataSource, |
|
110 | 110 | { |
|
111 | 111 | sortedBy:{key:"name",dir:"asc"}, |
|
112 | 112 | MSG_SORTASC:"${_('Click to sort ascending')}", |
|
113 | 113 | MSG_SORTDESC:"${_('Click to sort descending')}", |
|
114 | 114 | MSG_EMPTY:"${_('No records found.')}", |
|
115 | 115 | MSG_ERROR:"${_('Data error.')}", |
|
116 | 116 | MSG_LOADING:"${_('Loading...')}", |
|
117 | 117 | } |
|
118 | 118 | ); |
|
119 | 119 | myDataTable.subscribe('postRenderEvent',function(oArgs) { |
|
120 | 120 | tooltip_activate(); |
|
121 | 121 | quick_repo_menu(); |
|
122 | 122 | }); |
|
123 | 123 | </script> |
|
124 | 124 | </%def> |
@@ -1,197 +1,197 b'' | |||
|
1 | 1 | <%page args="parent" /> |
|
2 | 2 | <div class="box"> |
|
3 | 3 | <!-- box / title --> |
|
4 | 4 | <div class="title"> |
|
5 | 5 | <h5> |
|
6 | 6 | <input class="q_filter_box" id="q_filter" size="15" type="text" name="filter" value="${_('quick filter...')}"/> ${parent.breadcrumbs()} <span id="repo_count">0</span> ${_('repositories')} |
|
7 | 7 | </h5> |
|
8 | 8 | %if c.rhodecode_user.username != 'default': |
|
9 | 9 | %if h.HasPermissionAny('hg.admin','hg.create.repository')(): |
|
10 | 10 | <ul class="links"> |
|
11 | 11 | <li> |
|
12 | 12 | <span>${h.link_to(_('ADD REPOSITORY'),h.url('admin_settings_create_repository'))}</span> |
|
13 | 13 | </li> |
|
14 | 14 | </ul> |
|
15 | 15 | %endif |
|
16 | 16 | %endif |
|
17 | 17 | </div> |
|
18 | 18 | <!-- end box / title --> |
|
19 | 19 | <div class="table"> |
|
20 | 20 | % if c.groups: |
|
21 | 21 | <div id='groups_list_wrap' class="yui-skin-sam"> |
|
22 | 22 | <table id="groups_list"> |
|
23 | 23 | <thead> |
|
24 | 24 | <tr> |
|
25 | 25 | <th class="left"><a href="#">${_('Group name')}</a></th> |
|
26 | 26 | <th class="left"><a href="#">${_('Description')}</a></th> |
|
27 | 27 | ##<th class="left"><a href="#">${_('Number of repositories')}</a></th> |
|
28 | 28 | </tr> |
|
29 | 29 | </thead> |
|
30 | 30 | |
|
31 | 31 | ## REPO GROUPS |
|
32 | 32 | % for gr in c.groups: |
|
33 | 33 | <tr> |
|
34 | 34 | <td> |
|
35 | 35 | <div style="white-space: nowrap"> |
|
36 | 36 | <img class="icon" alt="${_('Repositories group')}" src="${h.url('/images/icons/database_link.png')}"/> |
|
37 | 37 | ${h.link_to(gr.name,url('repos_group_home',group_name=gr.group_name))} |
|
38 | 38 | </div> |
|
39 | 39 | </td> |
|
40 | 40 | <td>${gr.group_description}</td> |
|
41 | 41 | ## this is commented out since for multi nested repos can be HEAVY! |
|
42 | 42 | ## in number of executed queries during traversing uncomment at will |
|
43 | 43 | ##<td><b>${gr.repositories_recursive_count}</b></td> |
|
44 | 44 | </tr> |
|
45 | 45 | % endfor |
|
46 | 46 | |
|
47 | 47 | </table> |
|
48 | 48 | </div> |
|
49 | 49 | <div style="height: 20px"></div> |
|
50 | 50 | % endif |
|
51 | 51 | <div id="welcome" style="display:none;text-align:center"> |
|
52 | 52 | <h1><a href="${h.url('home')}">${c.rhodecode_name} ${c.rhodecode_version}</a></h1> |
|
53 | 53 | </div> |
|
54 | 54 | <div id='repos_list_wrap' class="yui-skin-sam"> |
|
55 | 55 | <%cnt=0%> |
|
56 | 56 | <%namespace name="dt" file="/_data_table/_dt_elements.html"/> |
|
57 | 57 | |
|
58 | 58 | <table id="repos_list"> |
|
59 | 59 | <thead> |
|
60 | 60 | <tr> |
|
61 | 61 | <th class="left"></th> |
|
62 | 62 | <th class="left">${_('Name')}</th> |
|
63 | 63 | <th class="left">${_('Description')}</th> |
|
64 | 64 | <th class="left">${_('Last change')}</th> |
|
65 | 65 | <th class="left">${_('Tip')}</th> |
|
66 | 66 | <th class="left">${_('Owner')}</th> |
|
67 | 67 | <th class="left">${_('RSS')}</th> |
|
68 | 68 | <th class="left">${_('Atom')}</th> |
|
69 | 69 | </tr> |
|
70 | 70 | </thead> |
|
71 | 71 | <tbody> |
|
72 |
%for cnt,repo in enumerate(c.repos_list |
|
|
73 | <tr class="parity${cnt%2}"> | |
|
72 | %for cnt,repo in enumerate(c.repos_list): | |
|
73 | <tr class="parity${(cnt+1)%2}"> | |
|
74 | 74 | ##QUICK MENU |
|
75 | 75 | <td class="quick_repo_menu"> |
|
76 | 76 | ${dt.quick_menu(repo['name'])} |
|
77 | 77 | </td> |
|
78 | 78 | ##REPO NAME AND ICONS |
|
79 | 79 | <td class="reponame"> |
|
80 | 80 | ${dt.repo_name(repo['name'],repo['dbrepo']['repo_type'],repo['dbrepo']['private'],repo['dbrepo_fork'].get('repo_name'))} |
|
81 | 81 | </td> |
|
82 | 82 | ##DESCRIPTION |
|
83 | 83 | <td><span class="tooltip" title="${h.tooltip(repo['description'])}"> |
|
84 | 84 | ${h.truncate(repo['description'],60)}</span> |
|
85 | 85 | </td> |
|
86 | 86 | ##LAST CHANGE DATE |
|
87 | 87 | <td> |
|
88 | 88 | <span class="tooltip" title="${repo['last_change']}">${h.age(repo['last_change'])}</span> |
|
89 | 89 | </td> |
|
90 | 90 | ##LAST REVISION |
|
91 | 91 | <td> |
|
92 | 92 | ${dt.revision(repo['name'],repo['rev'],repo['tip'],repo['author'],repo['last_msg'])} |
|
93 | 93 | </td> |
|
94 | 94 | ## |
|
95 | 95 | <td title="${repo['contact']}">${h.person(repo['contact'])}</td> |
|
96 | 96 | <td> |
|
97 | 97 | %if c.rhodecode_user.username != 'default': |
|
98 | 98 | <a title="${_('Subscribe to %s rss feed')%repo['name']}" class="rss_icon" href="${h.url('rss_feed_home',repo_name=repo['name'],api_key=c.rhodecode_user.api_key)}"></a> |
|
99 | 99 | %else: |
|
100 | 100 | <a title="${_('Subscribe to %s rss feed')%repo['name']}" class="rss_icon" href="${h.url('rss_feed_home',repo_name=repo['name'])}"></a> |
|
101 | 101 | %endif: |
|
102 | 102 | </td> |
|
103 | 103 | <td> |
|
104 | 104 | %if c.rhodecode_user.username != 'default': |
|
105 | 105 | <a title="${_('Subscribe to %s atom feed')%repo['name']}" class="atom_icon" href="${h.url('atom_feed_home',repo_name=repo['name'],api_key=c.rhodecode_user.api_key)}"></a> |
|
106 | 106 | %else: |
|
107 | 107 | <a title="${_('Subscribe to %s atom feed')%repo['name']}" class="atom_icon" href="${h.url('atom_feed_home',repo_name=repo['name'])}"></a> |
|
108 | 108 | %endif: |
|
109 | 109 | </td> |
|
110 | 110 | </tr> |
|
111 | 111 | %endfor |
|
112 | 112 | </tbody> |
|
113 | 113 | </table> |
|
114 | 114 | </div> |
|
115 | 115 | </div> |
|
116 | 116 | </div> |
|
117 | 117 | <script> |
|
118 | YUD.get('repo_count').innerHTML = ${cnt}; | |
|
118 | YUD.get('repo_count').innerHTML = ${cnt+1}; | |
|
119 | 119 | var func = function(node){ |
|
120 | 120 | return node.parentNode.parentNode.parentNode.parentNode; |
|
121 | 121 | } |
|
122 | 122 | |
|
123 | 123 | |
|
124 | 124 | // groups table sorting |
|
125 | 125 | var myColumnDefs = [ |
|
126 | 126 | {key:"name",label:"${_('Group Name')}",sortable:true, |
|
127 | 127 | sortOptions: { sortFunction: groupNameSort }}, |
|
128 | 128 | {key:"desc",label:"${_('Description')}",sortable:true}, |
|
129 | 129 | ]; |
|
130 | 130 | |
|
131 | 131 | var myDataSource = new YAHOO.util.DataSource(YUD.get("groups_list")); |
|
132 | 132 | |
|
133 | 133 | myDataSource.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE; |
|
134 | 134 | myDataSource.responseSchema = { |
|
135 | 135 | fields: [ |
|
136 | 136 | {key:"name"}, |
|
137 | 137 | {key:"desc"}, |
|
138 | 138 | ] |
|
139 | 139 | }; |
|
140 | 140 | |
|
141 | 141 | var myDataTable = new YAHOO.widget.DataTable("groups_list_wrap", myColumnDefs, myDataSource, |
|
142 | 142 | { |
|
143 | 143 | sortedBy:{key:"name",dir:"asc"}, |
|
144 | 144 | MSG_SORTASC:"${_('Click to sort ascending')}", |
|
145 | 145 | MSG_SORTDESC:"${_('Click to sort descending')}" |
|
146 | 146 | } |
|
147 | 147 | ); |
|
148 | 148 | |
|
149 | 149 | // main table sorting |
|
150 | 150 | var myColumnDefs = [ |
|
151 | 151 | {key:"menu",label:"",sortable:false,className:"quick_repo_menu hidden"}, |
|
152 | 152 | {key:"name",label:"${_('Name')}",sortable:true, |
|
153 | 153 | sortOptions: { sortFunction: nameSort }}, |
|
154 | 154 | {key:"desc",label:"${_('Description')}",sortable:true}, |
|
155 | 155 | {key:"last_change",label:"${_('Last Change')}",sortable:true, |
|
156 | 156 | sortOptions: { sortFunction: ageSort }}, |
|
157 | 157 | {key:"tip",label:"${_('Tip')}",sortable:true, |
|
158 | 158 | sortOptions: { sortFunction: revisionSort }}, |
|
159 | 159 | {key:"owner",label:"${_('Owner')}",sortable:true}, |
|
160 | 160 | {key:"rss",label:"",sortable:false}, |
|
161 | 161 | {key:"atom",label:"",sortable:false}, |
|
162 | 162 | ]; |
|
163 | 163 | |
|
164 | 164 | var myDataSource = new YAHOO.util.DataSource(YUD.get("repos_list")); |
|
165 | 165 | |
|
166 | 166 | myDataSource.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE; |
|
167 | 167 | |
|
168 | 168 | myDataSource.responseSchema = { |
|
169 | 169 | fields: [ |
|
170 | 170 | {key:"menu"}, |
|
171 | 171 | {key:"name"}, |
|
172 | 172 | {key:"desc"}, |
|
173 | 173 | {key:"last_change"}, |
|
174 | 174 | {key:"tip"}, |
|
175 | 175 | {key:"owner"}, |
|
176 | 176 | {key:"rss"}, |
|
177 | 177 | {key:"atom"}, |
|
178 | 178 | ] |
|
179 | 179 | }; |
|
180 | 180 | |
|
181 | 181 | var myDataTable = new YAHOO.widget.DataTable("repos_list_wrap", myColumnDefs, myDataSource, |
|
182 | 182 | { |
|
183 | 183 | sortedBy:{key:"name",dir:"asc"}, |
|
184 | 184 | MSG_SORTASC:"${_('Click to sort ascending')}", |
|
185 | 185 | MSG_SORTDESC:"${_('Click to sort descending')}", |
|
186 | 186 | MSG_EMPTY:"${_('No records found.')}", |
|
187 | 187 | MSG_ERROR:"${_('Data error.')}", |
|
188 | 188 | MSG_LOADING:"${_('Loading...')}", |
|
189 | 189 | } |
|
190 | 190 | ); |
|
191 | 191 | myDataTable.subscribe('postRenderEvent',function(oArgs) { |
|
192 | 192 | tooltip_activate(); |
|
193 | 193 | quick_repo_menu(); |
|
194 | 194 | q_filter('q_filter',YUQ('div.table tr td a.repo_name'),func); |
|
195 | 195 | }); |
|
196 | 196 | |
|
197 | 197 | </script> |
General Comments 0
You need to be logged in to leave comments.
Login now