##// END OF EJS Templates
Fixed methods for checking if path in routes is a repo...
marcink -
r1505:bb6ba744 beta
parent child Browse files
Show More
@@ -1,423 +1,441 b''
1 1 """
2 2 Routes configuration
3 3
4 4 The more specific and detailed routes should be defined first so they
5 5 may take precedent over the more generic routes. For more information
6 6 refer to the routes manual at http://routes.groovie.org/docs/
7 7 """
8 8 from __future__ import with_statement
9 9 from routes import Mapper
10 from rhodecode.lib.utils import check_repo_fast as cr
10
11 11
12 12 # prefix for non repository related links needs to be prefixed with `/`
13 13 ADMIN_PREFIX = '/_admin'
14 14
15 15
16 16 def make_map(config):
17 17 """Create, configure and return the routes Mapper"""
18 18 rmap = Mapper(directory=config['pylons.paths']['controllers'],
19 19 always_scan=config['debug'])
20 20 rmap.minimization = False
21 21 rmap.explicit = False
22
22
23 from rhodecode.lib.utils import check_repo_fast
24 from rhodecode.lib.utils import check_repos_group_fast
25
23 26 def check_repo(environ, match_dict):
24 27 """
25 28 check for valid repository for proper 404 handling
29
26 30 :param environ:
27 31 :param match_dict:
28 32 """
33
29 34 repo_name = match_dict.get('repo_name')
30 return not cr(repo_name, config['base_path'])
35 return check_repo_fast(repo_name, config['base_path'])
36
37 def check_group(environ, match_dict):
38 """
39 check for valid repositories group for proper 404 handling
40
41 :param environ:
42 :param match_dict:
43 """
44 repos_group_name = match_dict.get('group_name')
45
46 return check_repos_group_fast(repos_group_name, config['base_path'])
31 47
32 48
33 49 def check_int(environ, match_dict):
34 50 return match_dict.get('id').isdigit()
35 51
36
37
38
39 52 # The ErrorController route (handles 404/500 error pages); it should
40 53 # likely stay at the top, ensuring it can always be resolved
41 54 rmap.connect('/error/{action}', controller='error')
42 55 rmap.connect('/error/{action}/{id}', controller='error')
43 56
44 57 #==========================================================================
45 58 # CUSTOM ROUTES HERE
46 59 #==========================================================================
47 60
48 61 #MAIN PAGE
49 62 rmap.connect('home', '/', controller='home', action='index')
50 63 rmap.connect('repo_switcher', '/repos', controller='home',
51 64 action='repo_switcher')
52 65 rmap.connect('bugtracker',
53 66 "http://bitbucket.org/marcinkuzminski/rhodecode/issues",
54 67 _static=True)
55 68 rmap.connect('rhodecode_official', "http://rhodecode.org", _static=True)
56 69
57 70 #ADMIN REPOSITORY REST ROUTES
58 71 with rmap.submapper(path_prefix=ADMIN_PREFIX,
59 72 controller='admin/repos') as m:
60 73 m.connect("repos", "/repos",
61 74 action="create", conditions=dict(method=["POST"]))
62 75 m.connect("repos", "/repos",
63 76 action="index", conditions=dict(method=["GET"]))
64 77 m.connect("formatted_repos", "/repos.{format}",
65 78 action="index",
66 79 conditions=dict(method=["GET"]))
67 80 m.connect("new_repo", "/repos/new",
68 81 action="new", conditions=dict(method=["GET"]))
69 82 m.connect("formatted_new_repo", "/repos/new.{format}",
70 83 action="new", conditions=dict(method=["GET"]))
71 84 m.connect("/repos/{repo_name:.*}",
72 85 action="update", conditions=dict(method=["PUT"],
73 86 function=check_repo))
74 87 m.connect("/repos/{repo_name:.*}",
75 88 action="delete", conditions=dict(method=["DELETE"],
76 89 function=check_repo))
77 90 m.connect("edit_repo", "/repos/{repo_name:.*}/edit",
78 91 action="edit", conditions=dict(method=["GET"],
79 92 function=check_repo))
80 93 m.connect("formatted_edit_repo", "/repos/{repo_name:.*}.{format}/edit",
81 94 action="edit", conditions=dict(method=["GET"],
82 95 function=check_repo))
83 96 m.connect("repo", "/repos/{repo_name:.*}",
84 97 action="show", conditions=dict(method=["GET"],
85 98 function=check_repo))
86 99 m.connect("formatted_repo", "/repos/{repo_name:.*}.{format}",
87 100 action="show", conditions=dict(method=["GET"],
88 101 function=check_repo))
89 102 #ajax delete repo perm user
90 103 m.connect('delete_repo_user', "/repos_delete_user/{repo_name:.*}",
91 104 action="delete_perm_user", conditions=dict(method=["DELETE"],
92 105 function=check_repo))
93 106 #ajax delete repo perm users_group
94 107 m.connect('delete_repo_users_group',
95 108 "/repos_delete_users_group/{repo_name:.*}",
96 109 action="delete_perm_users_group",
97 110 conditions=dict(method=["DELETE"], function=check_repo))
98 111
99 112 #settings actions
100 113 m.connect('repo_stats', "/repos_stats/{repo_name:.*}",
101 114 action="repo_stats", conditions=dict(method=["DELETE"],
102 115 function=check_repo))
103 116 m.connect('repo_cache', "/repos_cache/{repo_name:.*}",
104 117 action="repo_cache", conditions=dict(method=["DELETE"],
105 118 function=check_repo))
106 119 m.connect('repo_public_journal',
107 120 "/repos_public_journal/{repo_name:.*}",
108 121 action="repo_public_journal", conditions=dict(method=["PUT"],
109 122 function=check_repo))
110 123 m.connect('repo_pull', "/repo_pull/{repo_name:.*}",
111 124 action="repo_pull", conditions=dict(method=["PUT"],
112 125 function=check_repo))
113 126
114 127 with rmap.submapper(path_prefix=ADMIN_PREFIX,
115 128 controller='admin/repos_groups') as m:
116 129 m.connect("repos_groups", "/repos_groups",
117 130 action="create", conditions=dict(method=["POST"]))
118 131 m.connect("repos_groups", "/repos_groups",
119 132 action="index", conditions=dict(method=["GET"]))
120 133 m.connect("formatted_repos_groups", "/repos_groups.{format}",
121 134 action="index", conditions=dict(method=["GET"]))
122 135 m.connect("new_repos_group", "/repos_groups/new",
123 136 action="new", conditions=dict(method=["GET"]))
124 137 m.connect("formatted_new_repos_group", "/repos_groups/new.{format}",
125 138 action="new", conditions=dict(method=["GET"]))
126 139 m.connect("update_repos_group", "/repos_groups/{id}",
127 140 action="update", conditions=dict(method=["PUT"],
128 141 function=check_int))
129 142 m.connect("delete_repos_group", "/repos_groups/{id}",
130 143 action="delete", conditions=dict(method=["DELETE"],
131 144 function=check_int))
132 145 m.connect("edit_repos_group", "/repos_groups/{id}/edit",
133 146 action="edit", conditions=dict(method=["GET"],
134 147 function=check_int))
135 148 m.connect("formatted_edit_repos_group",
136 149 "/repos_groups/{id}.{format}/edit",
137 150 action="edit", conditions=dict(method=["GET"],
138 151 function=check_int))
139 152 m.connect("repos_group", "/repos_groups/{id}",
140 153 action="show", conditions=dict(method=["GET"],
141 154 function=check_int))
142 155 m.connect("formatted_repos_group", "/repos_groups/{id}.{format}",
143 156 action="show", conditions=dict(method=["GET"],
144 157 function=check_int))
145 158
146 159 #ADMIN USER REST ROUTES
147 160 with rmap.submapper(path_prefix=ADMIN_PREFIX,
148 161 controller='admin/users') as m:
149 162 m.connect("users", "/users",
150 163 action="create", conditions=dict(method=["POST"]))
151 164 m.connect("users", "/users",
152 165 action="index", conditions=dict(method=["GET"]))
153 166 m.connect("formatted_users", "/users.{format}",
154 167 action="index", conditions=dict(method=["GET"]))
155 168 m.connect("new_user", "/users/new",
156 169 action="new", conditions=dict(method=["GET"]))
157 170 m.connect("formatted_new_user", "/users/new.{format}",
158 171 action="new", conditions=dict(method=["GET"]))
159 172 m.connect("update_user", "/users/{id}",
160 173 action="update", conditions=dict(method=["PUT"]))
161 174 m.connect("delete_user", "/users/{id}",
162 175 action="delete", conditions=dict(method=["DELETE"]))
163 176 m.connect("edit_user", "/users/{id}/edit",
164 177 action="edit", conditions=dict(method=["GET"]))
165 178 m.connect("formatted_edit_user",
166 179 "/users/{id}.{format}/edit",
167 180 action="edit", conditions=dict(method=["GET"]))
168 181 m.connect("user", "/users/{id}",
169 182 action="show", conditions=dict(method=["GET"]))
170 183 m.connect("formatted_user", "/users/{id}.{format}",
171 184 action="show", conditions=dict(method=["GET"]))
172 185
173 186 #EXTRAS USER ROUTES
174 187 m.connect("user_perm", "/users_perm/{id}",
175 188 action="update_perm", conditions=dict(method=["PUT"]))
176 189
177 190 #ADMIN USERS REST ROUTES
178 191 with rmap.submapper(path_prefix=ADMIN_PREFIX,
179 192 controller='admin/users_groups') as m:
180 193 m.connect("users_groups", "/users_groups",
181 194 action="create", conditions=dict(method=["POST"]))
182 195 m.connect("users_groups", "/users_groups",
183 196 action="index", conditions=dict(method=["GET"]))
184 197 m.connect("formatted_users_groups", "/users_groups.{format}",
185 198 action="index", conditions=dict(method=["GET"]))
186 199 m.connect("new_users_group", "/users_groups/new",
187 200 action="new", conditions=dict(method=["GET"]))
188 201 m.connect("formatted_new_users_group", "/users_groups/new.{format}",
189 202 action="new", conditions=dict(method=["GET"]))
190 203 m.connect("update_users_group", "/users_groups/{id}",
191 204 action="update", conditions=dict(method=["PUT"]))
192 205 m.connect("delete_users_group", "/users_groups/{id}",
193 206 action="delete", conditions=dict(method=["DELETE"]))
194 207 m.connect("edit_users_group", "/users_groups/{id}/edit",
195 208 action="edit", conditions=dict(method=["GET"]))
196 209 m.connect("formatted_edit_users_group",
197 210 "/users_groups/{id}.{format}/edit",
198 211 action="edit", conditions=dict(method=["GET"]))
199 212 m.connect("users_group", "/users_groups/{id}",
200 213 action="show", conditions=dict(method=["GET"]))
201 214 m.connect("formatted_users_group", "/users_groups/{id}.{format}",
202 215 action="show", conditions=dict(method=["GET"]))
203 216
204 217 #EXTRAS USER ROUTES
205 218 m.connect("users_group_perm", "/users_groups_perm/{id}",
206 219 action="update_perm", conditions=dict(method=["PUT"]))
207 220
208 221 #ADMIN GROUP REST ROUTES
209 222 rmap.resource('group', 'groups',
210 223 controller='admin/groups', path_prefix=ADMIN_PREFIX)
211 224
212 225 #ADMIN PERMISSIONS REST ROUTES
213 226 rmap.resource('permission', 'permissions',
214 227 controller='admin/permissions', path_prefix=ADMIN_PREFIX)
215 228
216 229 ##ADMIN LDAP SETTINGS
217 230 rmap.connect('ldap_settings', '%s/ldap' % ADMIN_PREFIX,
218 231 controller='admin/ldap_settings', action='ldap_settings',
219 232 conditions=dict(method=["POST"]))
220 233
221 234 rmap.connect('ldap_home', '%s/ldap' % ADMIN_PREFIX,
222 235 controller='admin/ldap_settings')
223 236
224 237 #ADMIN SETTINGS REST ROUTES
225 238 with rmap.submapper(path_prefix=ADMIN_PREFIX,
226 239 controller='admin/settings') as m:
227 240 m.connect("admin_settings", "/settings",
228 241 action="create", conditions=dict(method=["POST"]))
229 242 m.connect("admin_settings", "/settings",
230 243 action="index", conditions=dict(method=["GET"]))
231 244 m.connect("formatted_admin_settings", "/settings.{format}",
232 245 action="index", conditions=dict(method=["GET"]))
233 246 m.connect("admin_new_setting", "/settings/new",
234 247 action="new", conditions=dict(method=["GET"]))
235 248 m.connect("formatted_admin_new_setting", "/settings/new.{format}",
236 249 action="new", conditions=dict(method=["GET"]))
237 250 m.connect("/settings/{setting_id}",
238 251 action="update", conditions=dict(method=["PUT"]))
239 252 m.connect("/settings/{setting_id}",
240 253 action="delete", conditions=dict(method=["DELETE"]))
241 254 m.connect("admin_edit_setting", "/settings/{setting_id}/edit",
242 255 action="edit", conditions=dict(method=["GET"]))
243 256 m.connect("formatted_admin_edit_setting",
244 257 "/settings/{setting_id}.{format}/edit",
245 258 action="edit", conditions=dict(method=["GET"]))
246 259 m.connect("admin_setting", "/settings/{setting_id}",
247 260 action="show", conditions=dict(method=["GET"]))
248 261 m.connect("formatted_admin_setting", "/settings/{setting_id}.{format}",
249 262 action="show", conditions=dict(method=["GET"]))
250 263 m.connect("admin_settings_my_account", "/my_account",
251 264 action="my_account", conditions=dict(method=["GET"]))
252 265 m.connect("admin_settings_my_account_update", "/my_account_update",
253 266 action="my_account_update", conditions=dict(method=["PUT"]))
254 267 m.connect("admin_settings_create_repository", "/create_repository",
255 268 action="create_repository", conditions=dict(method=["GET"]))
256 269
257 270
258 271 #ADMIN MAIN PAGES
259 272 with rmap.submapper(path_prefix=ADMIN_PREFIX,
260 273 controller='admin/admin') as m:
261 274 m.connect('admin_home', '', action='index')
262 275 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
263 276 action='add_repo')
264 277
265 278 #==========================================================================
266 279 # API V1
267 280 #==========================================================================
268 281 with rmap.submapper(path_prefix=ADMIN_PREFIX,
269 282 controller='api/api') as m:
270 283 m.connect('api', '/api')
271 284
272 285
273 286 #USER JOURNAL
274 287 rmap.connect('journal', '%s/journal' % ADMIN_PREFIX, controller='journal')
275 288
276 289 rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX,
277 290 controller='journal', action="public_journal")
278 291
279 292 rmap.connect('public_journal_rss', '%s/public_journal_rss' % ADMIN_PREFIX,
280 293 controller='journal', action="public_journal_rss")
281 294
282 295 rmap.connect('public_journal_atom',
283 296 '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal',
284 297 action="public_journal_atom")
285 298
286 299 rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX,
287 300 controller='journal', action='toggle_following',
288 301 conditions=dict(method=["POST"]))
289 302
290 303 #SEARCH
291 304 rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',)
292 305 rmap.connect('search_repo', '%s/search/{search_repo:.*}' % ADMIN_PREFIX,
293 306 controller='search')
294 307
295 308 #LOGIN/LOGOUT/REGISTER/SIGN IN
296 309 rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login')
297 310 rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login',
298 311 action='logout')
299 312
300 313 rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login',
301 314 action='register')
302 315
303 316 rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX,
304 317 controller='login', action='password_reset')
305 318
306 319 rmap.connect('reset_password_confirmation',
307 320 '%s/password_reset_confirmation' % ADMIN_PREFIX,
308 321 controller='login', action='password_reset_confirmation')
309 322
310 323 #FEEDS
311 324 rmap.connect('rss_feed_home', '/{repo_name:.*}/feed/rss',
312 325 controller='feed', action='rss',
313 326 conditions=dict(function=check_repo))
314 327
315 328 rmap.connect('atom_feed_home', '/{repo_name:.*}/feed/atom',
316 329 controller='feed', action='atom',
317 330 conditions=dict(function=check_repo))
318 331
319 332 #==========================================================================
320 333 # REPOSITORY ROUTES
321 334 #==========================================================================
335 rmap.connect('summary_home', '/{repo_name:.*}',
336 controller='summary',
337 conditions=dict(function=check_repo))
338
339 # rmap.connect('repo_group_home', '/{group_name:.*}',
340 # controller='admin/repos_groups',action="show_by_name",
341 # conditions=dict(function=check_group))
342
322 343 rmap.connect('changeset_home', '/{repo_name:.*}/changeset/{revision}',
323 344 controller='changeset', revision='tip',
324 345 conditions=dict(function=check_repo))
325 346
326 347 rmap.connect('raw_changeset_home',
327 348 '/{repo_name:.*}/raw-changeset/{revision}',
328 349 controller='changeset', action='raw_changeset',
329 350 revision='tip', conditions=dict(function=check_repo))
330 351
331 rmap.connect('summary_home', '/{repo_name:.*}',
332 controller='summary', conditions=dict(function=check_repo))
333
334 352 rmap.connect('summary_home', '/{repo_name:.*}/summary',
335 353 controller='summary', conditions=dict(function=check_repo))
336 354
337 355 rmap.connect('shortlog_home', '/{repo_name:.*}/shortlog',
338 356 controller='shortlog', conditions=dict(function=check_repo))
339 357
340 358 rmap.connect('branches_home', '/{repo_name:.*}/branches',
341 359 controller='branches', conditions=dict(function=check_repo))
342 360
343 361 rmap.connect('tags_home', '/{repo_name:.*}/tags',
344 362 controller='tags', conditions=dict(function=check_repo))
345 363
346 364 rmap.connect('changelog_home', '/{repo_name:.*}/changelog',
347 365 controller='changelog', conditions=dict(function=check_repo))
348 366
349 367 rmap.connect('changelog_details', '/{repo_name:.*}/changelog_details/{cs}',
350 368 controller='changelog', action='changelog_details',
351 369 conditions=dict(function=check_repo))
352 370
353 371 rmap.connect('files_home', '/{repo_name:.*}/files/{revision}/{f_path:.*}',
354 372 controller='files', revision='tip', f_path='',
355 373 conditions=dict(function=check_repo))
356 374
357 375 rmap.connect('files_diff_home', '/{repo_name:.*}/diff/{f_path:.*}',
358 376 controller='files', action='diff', revision='tip', f_path='',
359 377 conditions=dict(function=check_repo))
360 378
361 379 rmap.connect('files_rawfile_home',
362 380 '/{repo_name:.*}/rawfile/{revision}/{f_path:.*}',
363 381 controller='files', action='rawfile', revision='tip',
364 382 f_path='', conditions=dict(function=check_repo))
365 383
366 384 rmap.connect('files_raw_home',
367 385 '/{repo_name:.*}/raw/{revision}/{f_path:.*}',
368 386 controller='files', action='raw', revision='tip', f_path='',
369 387 conditions=dict(function=check_repo))
370 388
371 389 rmap.connect('files_annotate_home',
372 390 '/{repo_name:.*}/annotate/{revision}/{f_path:.*}',
373 391 controller='files', action='annotate', revision='tip',
374 392 f_path='', conditions=dict(function=check_repo))
375 393
376 394 rmap.connect('files_edit_home',
377 395 '/{repo_name:.*}/edit/{revision}/{f_path:.*}',
378 396 controller='files', action='edit', revision='tip',
379 397 f_path='', conditions=dict(function=check_repo))
380 398
381 399 rmap.connect('files_add_home',
382 400 '/{repo_name:.*}/add/{revision}/{f_path:.*}',
383 401 controller='files', action='add', revision='tip',
384 402 f_path='', conditions=dict(function=check_repo))
385 403
386 404 rmap.connect('files_archive_home', '/{repo_name:.*}/archive/{fname}',
387 405 controller='files', action='archivefile',
388 406 conditions=dict(function=check_repo))
389 407
390 408 rmap.connect('files_nodelist_home',
391 409 '/{repo_name:.*}/nodelist/{revision}/{f_path:.*}',
392 410 controller='files', action='nodelist',
393 411 conditions=dict(function=check_repo))
394 412
395 413 rmap.connect('repo_settings_delete', '/{repo_name:.*}/settings',
396 414 controller='settings', action="delete",
397 415 conditions=dict(method=["DELETE"], function=check_repo))
398 416
399 417 rmap.connect('repo_settings_update', '/{repo_name:.*}/settings',
400 418 controller='settings', action="update",
401 419 conditions=dict(method=["PUT"], function=check_repo))
402 420
403 421 rmap.connect('repo_settings_home', '/{repo_name:.*}/settings',
404 422 controller='settings', action='index',
405 423 conditions=dict(function=check_repo))
406 424
407 425 rmap.connect('repo_fork_create_home', '/{repo_name:.*}/fork',
408 426 controller='settings', action='fork_create',
409 427 conditions=dict(function=check_repo, method=["POST"]))
410 428
411 429 rmap.connect('repo_fork_home', '/{repo_name:.*}/fork',
412 430 controller='settings', action='fork',
413 431 conditions=dict(function=check_repo))
414 432
415 433 rmap.connect('repo_followers_home', '/{repo_name:.*}/followers',
416 434 controller='followers', action='followers',
417 435 conditions=dict(function=check_repo))
418 436
419 437 rmap.connect('repo_forks_home', '/{repo_name:.*}/forks',
420 438 controller='forks', action='forks',
421 439 conditions=dict(function=check_repo))
422 440
423 441 return rmap
@@ -1,611 +1,611 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.utils
4 4 ~~~~~~~~~~~~~~~~~~~
5 5
6 6 Utilities library for RhodeCode
7 7
8 8 :created_on: Apr 18, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25
26 26 import os
27 27 import logging
28 28 import datetime
29 29 import traceback
30 30 import paste
31 31 import beaker
32 32 from os.path import dirname as dn, join as jn
33 33
34 34 from paste.script.command import Command, BadCommand
35 35
36 from UserDict import DictMixin
37
38 from mercurial import ui, config, hg
39 from mercurial.error import RepoError
36 from mercurial import ui, config
40 37
41 38 from webhelpers.text import collapse, remove_formatting, strip_tags
42 39
40 from vcs import get_backend
43 41 from vcs.backends.base import BaseChangeset
44 42 from vcs.utils.lazy import LazyProperty
45 from vcs import get_backend
43 from vcs.utils.helpers import get_scm
44 from vcs.exceptions import VCSError
46 45
47 46 from rhodecode.model import meta
48 47 from rhodecode.model.caching_query import FromCache
49 48 from rhodecode.model.db import Repository, User, RhodeCodeUi, UserLog, Group, \
50 49 RhodeCodeSettings
51 50 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.user import UserModel
53 51
54 52 log = logging.getLogger(__name__)
55 53
56 54
57 55 def recursive_replace(str, replace=' '):
58 56 """Recursive replace of given sign to just one instance
59 57
60 58 :param str: given string
61 59 :param replace: char to find and replace multiple instances
62 60
63 61 Examples::
64 62 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
65 63 'Mighty-Mighty-Bo-sstones'
66 64 """
67 65
68 66 if str.find(replace * 2) == -1:
69 67 return str
70 68 else:
71 69 str = str.replace(replace * 2, replace)
72 70 return recursive_replace(str, replace)
73 71
74 72
75 73 def repo_name_slug(value):
76 74 """Return slug of name of repository
77 75 This function is called on each creation/modification
78 76 of repository to prevent bad names in repo
79 77 """
80 78
81 79 slug = remove_formatting(value)
82 80 slug = strip_tags(slug)
83 81
84 82 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
85 83 slug = slug.replace(c, '-')
86 84 slug = recursive_replace(slug, '-')
87 85 slug = collapse(slug, '-')
88 86 return slug
89 87
90 88
91 89 def get_repo_slug(request):
92 90 return request.environ['pylons.routes_dict'].get('repo_name')
93 91
94 92
95 93 def action_logger(user, action, repo, ipaddr='', sa=None):
96 94 """
97 95 Action logger for various actions made by users
98 96
99 97 :param user: user that made this action, can be a unique username string or
100 98 object containing user_id attribute
101 99 :param action: action to log, should be on of predefined unique actions for
102 100 easy translations
103 101 :param repo: string name of repository or object containing repo_id,
104 102 that action was made on
105 103 :param ipaddr: optional ip address from what the action was made
106 104 :param sa: optional sqlalchemy session
107 105
108 106 """
109 107
110 108 if not sa:
111 109 sa = meta.Session()
112 110
113 111 try:
114 um = UserModel()
115 112 if hasattr(user, 'user_id'):
116 113 user_obj = user
117 114 elif isinstance(user, basestring):
118 user_obj = um.get_by_username(user, cache=False)
115 user_obj = User.by_username(user, cache=False)
119 116 else:
120 117 raise Exception('You have to provide user object or username')
121 118
122 119 rm = RepoModel()
123 120 if hasattr(repo, 'repo_id'):
124 121 repo_obj = rm.get(repo.repo_id, cache=False)
125 122 repo_name = repo_obj.repo_name
126 123 elif isinstance(repo, basestring):
127 124 repo_name = repo.lstrip('/')
128 125 repo_obj = rm.get_by_repo_name(repo_name, cache=False)
129 126 else:
130 127 raise Exception('You have to provide repository to action logger')
131 128
132 129 user_log = UserLog()
133 130 user_log.user_id = user_obj.user_id
134 131 user_log.action = action
135 132
136 133 user_log.repository_id = repo_obj.repo_id
137 134 user_log.repository_name = repo_name
138 135
139 136 user_log.action_date = datetime.datetime.now()
140 137 user_log.user_ip = ipaddr
141 138 sa.add(user_log)
142 139 sa.commit()
143 140
144 141 log.info('Adding user %s, action %s on %s', user_obj, action, repo)
145 142 except:
146 143 log.error(traceback.format_exc())
147 144 sa.rollback()
148 145
149 146
150 147 def get_repos(path, recursive=False):
151 148 """
152 149 Scans given path for repos and return (name,(type,path)) tuple
153 150
154 151 :param path: path to scann for repositories
155 152 :param recursive: recursive search and return names with subdirs in front
156 153 """
157 154 from vcs.utils.helpers import get_scm
158 155 from vcs.exceptions import VCSError
159 156
160 157 if path.endswith(os.sep):
161 158 #remove ending slash for better results
162 159 path = path[:-1]
163 160
164 161 def _get_repos(p):
165 162 if not os.access(p, os.W_OK):
166 163 return
167 164 for dirpath in os.listdir(p):
168 165 if os.path.isfile(os.path.join(p, dirpath)):
169 166 continue
170 167 cur_path = os.path.join(p, dirpath)
171 168 try:
172 169 scm_info = get_scm(cur_path)
173 170 yield scm_info[1].split(path)[-1].lstrip(os.sep), scm_info
174 171 except VCSError:
175 172 if not recursive:
176 173 continue
177 174 #check if this dir containts other repos for recursive scan
178 175 rec_path = os.path.join(p, dirpath)
179 176 if os.path.isdir(rec_path):
180 177 for inner_scm in _get_repos(rec_path):
181 178 yield inner_scm
182 179
183 180 return _get_repos(path)
184 181
185 182
186 183 def check_repo_fast(repo_name, base_path):
187 184 """
188 Check given path for existence of directory
185 Returns True if given path is a valid repository False otherwise
189 186 :param repo_name:
190 187 :param base_path:
191 188
192 :return False: if this directory is present
189 :return True: if given path is a valid repository
193 190 """
194 if os.path.isdir(os.path.join(base_path, repo_name)):
195 return False
196 return True
197
198
199 def check_repo(repo_name, base_path, verify=True):
200
201 repo_path = os.path.join(base_path, repo_name)
202
191 full_path = os.path.join(base_path, repo_name)
192
203 193 try:
204 if not check_repo_fast(repo_name, base_path):
205 return False
206 r = hg.repository(ui.ui(), repo_path)
207 if verify:
208 hg.verify(r)
209 #here we hnow that repo exists it was verified
210 log.info('%s repo is already created', repo_name)
194 get_scm(full_path)
195 return True
196 except VCSError:
211 197 return False
212 except RepoError:
213 #it means that there is no valid repo there...
214 log.info('%s repo is free for creation', repo_name)
198
199 def check_repos_group_fast(repos_group_name, base_path):
200 """
201 Returns True if given path is a repos group False otherwise
202
203 :param repo_name:
204 :param base_path:
205 """
206 full_path = os.path.join(base_path, repos_group_name)
207
208 # check if it's not a repo
209 if check_repo_fast(repos_group_name, base_path):
210 return False
211
212 # check if it's a valid path
213 if os.path.isdir(full_path):
215 214 return True
216
217
215
216 return False
217
218 218 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
219 219 while True:
220 220 ok = raw_input(prompt)
221 221 if ok in ('y', 'ye', 'yes'):
222 222 return True
223 223 if ok in ('n', 'no', 'nop', 'nope'):
224 224 return False
225 225 retries = retries - 1
226 226 if retries < 0:
227 227 raise IOError
228 228 print complaint
229 229
230 230 #propagated from mercurial documentation
231 231 ui_sections = ['alias', 'auth',
232 232 'decode/encode', 'defaults',
233 233 'diff', 'email',
234 234 'extensions', 'format',
235 235 'merge-patterns', 'merge-tools',
236 236 'hooks', 'http_proxy',
237 237 'smtp', 'patch',
238 238 'paths', 'profiling',
239 239 'server', 'trusted',
240 240 'ui', 'web', ]
241 241
242 242
243 243 def make_ui(read_from='file', path=None, checkpaths=True):
244 244 """A function that will read python rc files or database
245 245 and make an mercurial ui object from read options
246 246
247 247 :param path: path to mercurial config file
248 248 :param checkpaths: check the path
249 249 :param read_from: read from 'file' or 'db'
250 250 """
251 251
252 252 baseui = ui.ui()
253 253
254 254 #clean the baseui object
255 255 baseui._ocfg = config.config()
256 256 baseui._ucfg = config.config()
257 257 baseui._tcfg = config.config()
258 258
259 259 if read_from == 'file':
260 260 if not os.path.isfile(path):
261 261 log.warning('Unable to read config file %s' % path)
262 262 return False
263 263 log.debug('reading hgrc from %s', path)
264 264 cfg = config.config()
265 265 cfg.read(path)
266 266 for section in ui_sections:
267 267 for k, v in cfg.items(section):
268 268 log.debug('settings ui from file[%s]%s:%s', section, k, v)
269 269 baseui.setconfig(section, k, v)
270 270
271 271 elif read_from == 'db':
272 272 sa = meta.Session()
273 273 ret = sa.query(RhodeCodeUi)\
274 274 .options(FromCache("sql_cache_short",
275 275 "get_hg_ui_settings")).all()
276 276
277 277 hg_ui = ret
278 278 for ui_ in hg_ui:
279 279 if ui_.ui_active:
280 280 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
281 281 ui_.ui_key, ui_.ui_value)
282 282 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
283 283
284 284 meta.Session.remove()
285 285 return baseui
286 286
287 287
288 288 def set_rhodecode_config(config):
289 289 """Updates pylons config with new settings from database
290 290
291 291 :param config:
292 292 """
293 293 hgsettings = RhodeCodeSettings.get_app_settings()
294 294
295 295 for k, v in hgsettings.items():
296 296 config[k] = v
297 297
298 298
299 299 def invalidate_cache(cache_key, *args):
300 300 """Puts cache invalidation task into db for
301 301 further global cache invalidation
302 302 """
303 303
304 304 from rhodecode.model.scm import ScmModel
305 305
306 306 if cache_key.startswith('get_repo_cached_'):
307 307 name = cache_key.split('get_repo_cached_')[-1]
308 308 ScmModel().mark_for_invalidation(name)
309 309
310 310
311 311 class EmptyChangeset(BaseChangeset):
312 312 """
313 313 An dummy empty changeset. It's possible to pass hash when creating
314 314 an EmptyChangeset
315 315 """
316 316
317 317 def __init__(self, cs='0' * 40, repo=None, requested_revision=None, alias=None):
318 318 self._empty_cs = cs
319 319 self.revision = -1
320 320 self.message = ''
321 321 self.author = ''
322 322 self.date = ''
323 323 self.repository = repo
324 324 self.requested_revision = requested_revision
325 325 self.alias = alias
326 326
327 327 @LazyProperty
328 328 def raw_id(self):
329 329 """Returns raw string identifying this changeset, useful for web
330 330 representation.
331 331 """
332 332
333 333 return self._empty_cs
334 334
335 335 @LazyProperty
336 336 def branch(self):
337 337 return get_backend(self.alias).DEFAULT_BRANCH_NAME
338 338
339 339 @LazyProperty
340 340 def short_id(self):
341 341 return self.raw_id[:12]
342 342
343 343 def get_file_changeset(self, path):
344 344 return self
345 345
346 346 def get_file_content(self, path):
347 347 return u''
348 348
349 349 def get_file_size(self, path):
350 350 return 0
351 351
352 352
353 353 def map_groups(groups):
354 354 """Checks for groups existence, and creates groups structures.
355 355 It returns last group in structure
356 356
357 357 :param groups: list of groups structure
358 358 """
359 359 sa = meta.Session()
360 360
361 361 parent = None
362 362 group = None
363 363 for lvl, group_name in enumerate(groups[:-1]):
364 364 group = sa.query(Group).filter(Group.group_name == group_name).scalar()
365 365
366 366 if group is None:
367 367 group = Group(group_name, parent)
368 368 sa.add(group)
369 369 sa.commit()
370 370
371 371 parent = group
372 372
373 373 return group
374 374
375 375
376 376 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
377 377 """maps all repos given in initial_repo_list, non existing repositories
378 378 are created, if remove_obsolete is True it also check for db entries
379 379 that are not in initial_repo_list and removes them.
380 380
381 381 :param initial_repo_list: list of repositories found by scanning methods
382 382 :param remove_obsolete: check for obsolete entries in database
383 383 """
384 384
385 385 sa = meta.Session()
386 386 rm = RepoModel()
387 387 user = sa.query(User).filter(User.admin == True).first()
388 388 added = []
389 389 for name, repo in initial_repo_list.items():
390 390 group = map_groups(name.split(os.sep))
391 391 if not rm.get_by_repo_name(name, cache=False):
392 392 log.info('repository %s not found creating default', name)
393 393 added.append(name)
394 394 form_data = {
395 395 'repo_name': name,
396 396 'repo_name_full': name,
397 397 'repo_type': repo.alias,
398 398 'description': repo.description \
399 399 if repo.description != 'unknown' else \
400 400 '%s repository' % name,
401 401 'private': False,
402 402 'group_id': getattr(group, 'group_id', None)
403 403 }
404 404 rm.create(form_data, user, just_db=True)
405 405
406 406 removed = []
407 407 if remove_obsolete:
408 408 #remove from database those repositories that are not in the filesystem
409 409 for repo in sa.query(Repository).all():
410 410 if repo.repo_name not in initial_repo_list.keys():
411 411 removed.append(repo.repo_name)
412 412 sa.delete(repo)
413 413 sa.commit()
414 414
415 415 return added, removed
416 416
417 417 #set cache regions for beaker so celery can utilise it
418 418 def add_cache(settings):
419 419 cache_settings = {'regions': None}
420 420 for key in settings.keys():
421 421 for prefix in ['beaker.cache.', 'cache.']:
422 422 if key.startswith(prefix):
423 423 name = key.split(prefix)[1].strip()
424 424 cache_settings[name] = settings[key].strip()
425 425 if cache_settings['regions']:
426 426 for region in cache_settings['regions'].split(','):
427 427 region = region.strip()
428 428 region_settings = {}
429 429 for key, value in cache_settings.items():
430 430 if key.startswith(region):
431 431 region_settings[key.split('.')[1]] = value
432 432 region_settings['expire'] = int(region_settings.get('expire',
433 433 60))
434 434 region_settings.setdefault('lock_dir',
435 435 cache_settings.get('lock_dir'))
436 436 region_settings.setdefault('data_dir',
437 437 cache_settings.get('data_dir'))
438 438
439 439 if 'type' not in region_settings:
440 440 region_settings['type'] = cache_settings.get('type',
441 441 'memory')
442 442 beaker.cache.cache_regions[region] = region_settings
443 443
444 444
445 445 def get_current_revision():
446 446 """Returns tuple of (number, id) from repository containing this package
447 447 or None if repository could not be found.
448 448 """
449 449
450 450 try:
451 451 from vcs import get_repo
452 452 from vcs.utils.helpers import get_scm
453 453 from vcs.exceptions import RepositoryError, VCSError
454 454 repopath = os.path.join(os.path.dirname(__file__), '..', '..')
455 455 scm = get_scm(repopath)[0]
456 456 repo = get_repo(path=repopath, alias=scm)
457 457 tip = repo.get_changeset()
458 458 return (tip.revision, tip.short_id)
459 459 except (ImportError, RepositoryError, VCSError), err:
460 460 logging.debug("Cannot retrieve rhodecode's revision. Original error "
461 461 "was: %s" % err)
462 462 return None
463 463
464 464
465 465 #==============================================================================
466 466 # TEST FUNCTIONS AND CREATORS
467 467 #==============================================================================
468 468 def create_test_index(repo_location, config, full_index):
469 469 """
470 470 Makes default test index
471 471
472 472 :param config: test config
473 473 :param full_index:
474 474 """
475 475
476 476 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
477 477 from rhodecode.lib.pidlock import DaemonLock, LockHeld
478 478
479 479 repo_location = repo_location
480 480
481 481 index_location = os.path.join(config['app_conf']['index_dir'])
482 482 if not os.path.exists(index_location):
483 483 os.makedirs(index_location)
484 484
485 485 try:
486 486 l = DaemonLock(file=jn(dn(index_location), 'make_index.lock'))
487 487 WhooshIndexingDaemon(index_location=index_location,
488 488 repo_location=repo_location)\
489 489 .run(full_index=full_index)
490 490 l.release()
491 491 except LockHeld:
492 492 pass
493 493
494 494
495 495 def create_test_env(repos_test_path, config):
496 496 """Makes a fresh database and
497 497 install test repository into tmp dir
498 498 """
499 499 from rhodecode.lib.db_manage import DbManage
500 500 from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \
501 501 HG_FORK, GIT_FORK, TESTS_TMP_PATH
502 502 import tarfile
503 503 import shutil
504 504 from os.path import abspath
505 505
506 506 # PART ONE create db
507 507 dbconf = config['sqlalchemy.db1.url']
508 508 log.debug('making test db %s', dbconf)
509 509
510 510 # create test dir if it doesn't exist
511 511 if not os.path.isdir(repos_test_path):
512 512 log.debug('Creating testdir %s' % repos_test_path)
513 513 os.makedirs(repos_test_path)
514 514
515 515 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
516 516 tests=True)
517 517 dbmanage.create_tables(override=True)
518 518 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
519 519 dbmanage.create_default_user()
520 520 dbmanage.admin_prompt()
521 521 dbmanage.create_permissions()
522 522 dbmanage.populate_default_permissions()
523 523
524 524 # PART TWO make test repo
525 525 log.debug('making test vcs repositories')
526 526
527 527 idx_path = config['app_conf']['index_dir']
528 528 data_path = config['app_conf']['cache_dir']
529 529
530 530 #clean index and data
531 531 if idx_path and os.path.exists(idx_path):
532 532 log.debug('remove %s' % idx_path)
533 533 shutil.rmtree(idx_path)
534 534
535 535 if data_path and os.path.exists(data_path):
536 536 log.debug('remove %s' % data_path)
537 537 shutil.rmtree(data_path)
538 538
539 539 #CREATE DEFAULT HG REPOSITORY
540 540 cur_dir = dn(dn(abspath(__file__)))
541 541 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
542 542 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
543 543 tar.close()
544 544
545 545
546 546 #==============================================================================
547 547 # PASTER COMMANDS
548 548 #==============================================================================
549 549 class BasePasterCommand(Command):
550 550 """
551 551 Abstract Base Class for paster commands.
552 552
553 553 The celery commands are somewhat aggressive about loading
554 554 celery.conf, and since our module sets the `CELERY_LOADER`
555 555 environment variable to our loader, we have to bootstrap a bit and
556 556 make sure we've had a chance to load the pylons config off of the
557 557 command line, otherwise everything fails.
558 558 """
559 559 min_args = 1
560 560 min_args_error = "Please provide a paster config file as an argument."
561 561 takes_config_file = 1
562 562 requires_config_file = True
563 563
564 564 def notify_msg(self, msg, log=False):
565 565 """Make a notification to user, additionally if logger is passed
566 566 it logs this action using given logger
567 567
568 568 :param msg: message that will be printed to user
569 569 :param log: logging instance, to use to additionally log this message
570 570
571 571 """
572 572 if log and isinstance(log, logging):
573 573 log(msg)
574 574
575 575 def run(self, args):
576 576 """
577 577 Overrides Command.run
578 578
579 579 Checks for a config file argument and loads it.
580 580 """
581 581 if len(args) < self.min_args:
582 582 raise BadCommand(
583 583 self.min_args_error % {'min_args': self.min_args,
584 584 'actual_args': len(args)})
585 585
586 586 # Decrement because we're going to lob off the first argument.
587 587 # @@ This is hacky
588 588 self.min_args -= 1
589 589 self.bootstrap_config(args[0])
590 590 self.update_parser()
591 591 return super(BasePasterCommand, self).run(args[1:])
592 592
593 593 def update_parser(self):
594 594 """
595 595 Abstract method. Allows for the class's parser to be updated
596 596 before the superclass's `run` method is called. Necessary to
597 597 allow options/arguments to be passed through to the underlying
598 598 celery command.
599 599 """
600 600 raise NotImplementedError("Abstract Method.")
601 601
602 602 def bootstrap_config(self, conf):
603 603 """
604 604 Loads the pylons configuration.
605 605 """
606 606 from pylons import config as pylonsconfig
607 607
608 608 path_to_ini_file = os.path.realpath(conf)
609 609 conf = paste.deploy.appconfig('config:' + path_to_ini_file)
610 610 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
611 611
@@ -1,358 +1,358 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.model.repo
4 4 ~~~~~~~~~~~~~~~~~~~~
5 5
6 6 Repository model for rhodecode
7 7
8 8 :created_on: Jun 5, 2010
9 9 :author: marcink
10 10 :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 25 import os
26 26 import shutil
27 27 import logging
28 28 import traceback
29 29 from datetime import datetime
30 30
31 31 from sqlalchemy.orm import joinedload, make_transient
32 32
33 33 from vcs.utils.lazy import LazyProperty
34 34 from vcs.backends import get_backend
35 35
36 36 from rhodecode.lib import safe_str
37 37
38 38 from rhodecode.model import BaseModel
39 39 from rhodecode.model.caching_query import FromCache
40 40 from rhodecode.model.db import Repository, RepoToPerm, User, Permission, \
41 41 Statistics, UsersGroup, UsersGroupRepoToPerm, RhodeCodeUi, Group
42 42 from rhodecode.model.user import UserModel
43 43
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 47 class RepoModel(BaseModel):
48 48
49 49 @LazyProperty
50 50 def repos_path(self):
51 51 """Get's the repositories root path from database
52 52 """
53 53
54 54 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
55 55 return q.ui_value
56 56
57 57 def get(self, repo_id, cache=False):
58 58 repo = self.sa.query(Repository)\
59 59 .filter(Repository.repo_id == repo_id)
60 60
61 61 if cache:
62 62 repo = repo.options(FromCache("sql_cache_short",
63 63 "get_repo_%s" % repo_id))
64 64 return repo.scalar()
65 65
66 66 def get_by_repo_name(self, repo_name, cache=False):
67 67 repo = self.sa.query(Repository)\
68 68 .filter(Repository.repo_name == repo_name)
69 69
70 70 if cache:
71 71 repo = repo.options(FromCache("sql_cache_short",
72 72 "get_repo_%s" % repo_name))
73 73 return repo.scalar()
74 74
75 75
76 76 def get_users_js(self):
77 77
78 78 users = self.sa.query(User).filter(User.active == True).all()
79 79 u_tmpl = '''{id:%s, fname:"%s", lname:"%s", nname:"%s"},'''
80 80 users_array = '[%s]' % '\n'.join([u_tmpl % (u.user_id, u.name,
81 81 u.lastname, u.username)
82 82 for u in users])
83 83 return users_array
84 84
85 85 def get_users_groups_js(self):
86 86 users_groups = self.sa.query(UsersGroup)\
87 87 .filter(UsersGroup.users_group_active == True).all()
88 88
89 89 g_tmpl = '''{id:%s, grname:"%s",grmembers:"%s"},'''
90 90
91 91 users_groups_array = '[%s]' % '\n'.join([g_tmpl % \
92 92 (gr.users_group_id, gr.users_group_name,
93 93 len(gr.members))
94 94 for gr in users_groups])
95 95 return users_groups_array
96 96
97 97 def update(self, repo_name, form_data):
98 98 try:
99 99 cur_repo = self.get_by_repo_name(repo_name, cache=False)
100 100
101 101 #update permissions
102 102 for member, perm, member_type in form_data['perms_updates']:
103 103 if member_type == 'user':
104 104 r2p = self.sa.query(RepoToPerm)\
105 105 .filter(RepoToPerm.user == User.by_username(member))\
106 106 .filter(RepoToPerm.repository == cur_repo)\
107 107 .one()
108 108
109 109 r2p.permission = self.sa.query(Permission)\
110 110 .filter(Permission.permission_name ==
111 111 perm).scalar()
112 112 self.sa.add(r2p)
113 113 else:
114 114 g2p = self.sa.query(UsersGroupRepoToPerm)\
115 115 .filter(UsersGroupRepoToPerm.users_group ==
116 116 UsersGroup.get_by_group_name(member))\
117 117 .filter(UsersGroupRepoToPerm.repository ==
118 118 cur_repo).one()
119 119
120 120 g2p.permission = self.sa.query(Permission)\
121 121 .filter(Permission.permission_name ==
122 122 perm).scalar()
123 123 self.sa.add(g2p)
124 124
125 125 #set new permissions
126 126 for member, perm, member_type in form_data['perms_new']:
127 127 if member_type == 'user':
128 128 r2p = RepoToPerm()
129 129 r2p.repository = cur_repo
130 130 r2p.user = User.by_username(member)
131 131
132 132 r2p.permission = self.sa.query(Permission)\
133 133 .filter(Permission.
134 134 permission_name == perm)\
135 135 .scalar()
136 136 self.sa.add(r2p)
137 137 else:
138 138 g2p = UsersGroupRepoToPerm()
139 139 g2p.repository = cur_repo
140 140 g2p.users_group = UsersGroup.get_by_group_name(member)
141 141 g2p.permission = self.sa.query(Permission)\
142 142 .filter(Permission.
143 143 permission_name == perm)\
144 144 .scalar()
145 145 self.sa.add(g2p)
146 146
147 147 #update current repo
148 148 for k, v in form_data.items():
149 149 if k == 'user':
150 150 cur_repo.user = User.by_username(v)
151 151 elif k == 'repo_name':
152 152 cur_repo.repo_name = form_data['repo_name_full']
153 153 elif k == 'repo_group':
154 154 cur_repo.group_id = v
155 155
156 156 else:
157 157 setattr(cur_repo, k, v)
158 158
159 159 self.sa.add(cur_repo)
160 160
161 161 if repo_name != form_data['repo_name_full']:
162 162 # rename repository
163 163 self.__rename_repo(old=repo_name,
164 164 new=form_data['repo_name_full'])
165 165
166 166 self.sa.commit()
167 167 except:
168 168 log.error(traceback.format_exc())
169 169 self.sa.rollback()
170 170 raise
171 171
172 172 def create(self, form_data, cur_user, just_db=False, fork=False):
173 173
174 174 try:
175 175 if fork:
176 176 repo_name = form_data['fork_name']
177 177 org_name = form_data['repo_name']
178 178 org_full_name = org_name
179 179
180 180 else:
181 181 org_name = repo_name = form_data['repo_name']
182 182 repo_name_full = form_data['repo_name_full']
183 183
184 184 new_repo = Repository()
185 185 new_repo.enable_statistics = False
186 186 for k, v in form_data.items():
187 187 if k == 'repo_name':
188 188 if fork:
189 189 v = repo_name
190 190 else:
191 191 v = repo_name_full
192 192 if k == 'repo_group':
193 193 k = 'group_id'
194 194
195 195 setattr(new_repo, k, v)
196 196
197 197 if fork:
198 198 parent_repo = self.sa.query(Repository)\
199 199 .filter(Repository.repo_name == org_full_name).one()
200 200 new_repo.fork = parent_repo
201 201
202 202 new_repo.user_id = cur_user.user_id
203 203 self.sa.add(new_repo)
204 204
205 205 #create default permission
206 206 repo_to_perm = RepoToPerm()
207 207 default = 'repository.read'
208 208 for p in UserModel(self.sa).get_by_username('default',
209 209 cache=False).user_perms:
210 210 if p.permission.permission_name.startswith('repository.'):
211 211 default = p.permission.permission_name
212 212 break
213 213
214 214 default_perm = 'repository.none' if form_data['private'] else default
215 215
216 216 repo_to_perm.permission_id = self.sa.query(Permission)\
217 217 .filter(Permission.permission_name == default_perm)\
218 218 .one().permission_id
219 219
220 220 repo_to_perm.repository = new_repo
221 221 repo_to_perm.user_id = UserModel(self.sa)\
222 222 .get_by_username('default', cache=False).user_id
223 223
224 224 self.sa.add(repo_to_perm)
225 225
226 226 if not just_db:
227 227 self.__create_repo(repo_name, form_data['repo_type'],
228 228 form_data['repo_group'],
229 229 form_data['clone_uri'])
230 230
231 231 self.sa.commit()
232 232
233 233 #now automatically start following this repository as owner
234 234 from rhodecode.model.scm import ScmModel
235 235 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
236 236 cur_user.user_id)
237 237
238 238 except:
239 239 log.error(traceback.format_exc())
240 240 self.sa.rollback()
241 241 raise
242 242
243 243 def create_fork(self, form_data, cur_user):
244 244 from rhodecode.lib.celerylib import tasks, run_task
245 245 run_task(tasks.create_repo_fork, form_data, cur_user)
246 246
247 247 def delete(self, repo):
248 248 try:
249 249 self.sa.delete(repo)
250 250 self.__delete_repo(repo)
251 251 self.sa.commit()
252 252 except:
253 253 log.error(traceback.format_exc())
254 254 self.sa.rollback()
255 255 raise
256 256
257 257 def delete_perm_user(self, form_data, repo_name):
258 258 try:
259 259 self.sa.query(RepoToPerm)\
260 260 .filter(RepoToPerm.repository \
261 261 == self.get_by_repo_name(repo_name))\
262 262 .filter(RepoToPerm.user_id == form_data['user_id']).delete()
263 263 self.sa.commit()
264 264 except:
265 265 log.error(traceback.format_exc())
266 266 self.sa.rollback()
267 267 raise
268 268
269 269 def delete_perm_users_group(self, form_data, repo_name):
270 270 try:
271 271 self.sa.query(UsersGroupRepoToPerm)\
272 272 .filter(UsersGroupRepoToPerm.repository \
273 273 == self.get_by_repo_name(repo_name))\
274 274 .filter(UsersGroupRepoToPerm.users_group_id \
275 275 == form_data['users_group_id']).delete()
276 276 self.sa.commit()
277 277 except:
278 278 log.error(traceback.format_exc())
279 279 self.sa.rollback()
280 280 raise
281 281
282 282 def delete_stats(self, repo_name):
283 283 try:
284 284 self.sa.query(Statistics)\
285 285 .filter(Statistics.repository == \
286 286 self.get_by_repo_name(repo_name)).delete()
287 287 self.sa.commit()
288 288 except:
289 289 log.error(traceback.format_exc())
290 290 self.sa.rollback()
291 291 raise
292 292
293 293 def __create_repo(self, repo_name, alias, new_parent_id, clone_uri=False):
294 294 """
295 295 makes repository on filesystem. It's group aware means it'll create
296 296 a repository within a group, and alter the paths accordingly of
297 297 group location
298 298
299 299 :param repo_name:
300 300 :param alias:
301 301 :param parent_id:
302 302 :param clone_uri:
303 303 """
304 from rhodecode.lib.utils import check_repo
305
304 from rhodecode.lib.utils import check_repo_fast
305
306 306 if new_parent_id:
307 307 paths = Group.get(new_parent_id).full_path.split(Group.url_sep())
308 308 new_parent_path = os.sep.join(paths)
309 309 else:
310 310 new_parent_path = ''
311 311
312 312 repo_path = os.path.join(*map(lambda x:safe_str(x),
313 313 [self.repos_path, new_parent_path, repo_name]))
314 314
315 if check_repo(repo_path, self.repos_path):
315 if check_repo_fast(repo_path, self.repos_path) is False:
316 316 log.info('creating repo %s in %s @ %s', repo_name, repo_path,
317 317 clone_uri)
318 318 backend = get_backend(alias)
319 319
320 320 backend(repo_path, create=True, src_url=clone_uri)
321 321
322 322
323 323 def __rename_repo(self, old, new):
324 324 """
325 325 renames repository on filesystem
326 326
327 327 :param old: old name
328 328 :param new: new name
329 329 """
330 330 log.info('renaming repo from %s to %s', old, new)
331 331
332 332 old_path = os.path.join(self.repos_path, old)
333 333 new_path = os.path.join(self.repos_path, new)
334 334 if os.path.isdir(new_path):
335 335 raise Exception('Was trying to rename to already existing dir %s' \
336 336 % new_path)
337 337 shutil.move(old_path, new_path)
338 338
339 339 def __delete_repo(self, repo):
340 340 """
341 341 removes repo from filesystem, the removal is acctually made by
342 342 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
343 343 repository is no longer valid for rhodecode, can be undeleted later on
344 344 by reverting the renames on this repository
345 345
346 346 :param repo: repo object
347 347 """
348 348 rm_path = os.path.join(self.repos_path, repo.repo_name)
349 349 log.info("Removing %s", rm_path)
350 350 #disable hg/git
351 351 alias = repo.repo_type
352 352 shutil.move(os.path.join(rm_path, '.%s' % alias),
353 353 os.path.join(rm_path, 'rm__.%s' % alias))
354 354 #disable repo
355 355 shutil.move(rm_path, os.path.join(self.repos_path, 'rm__%s__%s' \
356 356 % (datetime.today()\
357 357 .strftime('%Y%m%d_%H%M%S_%f'),
358 358 repo.repo_name)))
General Comments 0
You need to be logged in to leave comments. Login now