##// END OF EJS Templates
Fixed bug with log_delete hook didn't properly store...
marcink -
r4012:55dbc440 default
parent child Browse files
Show More
@@ -1,822 +1,821 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 import decorator
35 import decorator
36 import warnings
36 import warnings
37 from os.path import abspath
37 from os.path import abspath
38 from os.path import dirname as dn, join as jn
38 from os.path import dirname as dn, join as jn
39
39
40 from paste.script.command import Command, BadCommand
40 from paste.script.command import Command, BadCommand
41
41
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
43
43
44 from rhodecode.lib.vcs import get_backend
44 from rhodecode.lib.vcs import get_backend
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
47 from rhodecode.lib.vcs.utils.hgcompat import ui, config
47 from rhodecode.lib.vcs.utils.hgcompat import ui, config
48 from rhodecode.lib.vcs.utils.helpers import get_scm
48 from rhodecode.lib.vcs.utils.helpers import get_scm
49 from rhodecode.lib.vcs.exceptions import VCSError
49 from rhodecode.lib.vcs.exceptions import VCSError
50
50
51 from rhodecode.lib.caching_query import FromCache
51 from rhodecode.lib.caching_query import FromCache
52
52
53 from rhodecode.model import meta
53 from rhodecode.model import meta
54 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
54 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
55 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation, UserGroup
55 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation, UserGroup
56 from rhodecode.model.meta import Session
56 from rhodecode.model.meta import Session
57 from rhodecode.model.repos_group import ReposGroupModel
57 from rhodecode.model.repos_group import ReposGroupModel
58 from rhodecode.lib.utils2 import safe_str, safe_unicode
58 from rhodecode.lib.utils2 import safe_str, safe_unicode, get_current_rhodecode_user
59 from rhodecode.lib.vcs.utils.fakemod import create_module
59 from rhodecode.lib.vcs.utils.fakemod import create_module
60 from rhodecode.model.users_group import UserGroupModel
60 from rhodecode.model.users_group import UserGroupModel
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65
65
66
66
67 def recursive_replace(str_, replace=' '):
67 def recursive_replace(str_, replace=' '):
68 """
68 """
69 Recursive replace of given sign to just one instance
69 Recursive replace of given sign to just one instance
70
70
71 :param str_: given string
71 :param str_: given string
72 :param replace: char to find and replace multiple instances
72 :param replace: char to find and replace multiple instances
73
73
74 Examples::
74 Examples::
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 'Mighty-Mighty-Bo-sstones'
76 'Mighty-Mighty-Bo-sstones'
77 """
77 """
78
78
79 if str_.find(replace * 2) == -1:
79 if str_.find(replace * 2) == -1:
80 return str_
80 return str_
81 else:
81 else:
82 str_ = str_.replace(replace * 2, replace)
82 str_ = str_.replace(replace * 2, replace)
83 return recursive_replace(str_, replace)
83 return recursive_replace(str_, replace)
84
84
85
85
86 def repo_name_slug(value):
86 def repo_name_slug(value):
87 """
87 """
88 Return slug of name of repository
88 Return slug of name of repository
89 This function is called on each creation/modification
89 This function is called on each creation/modification
90 of repository to prevent bad names in repo
90 of repository to prevent bad names in repo
91 """
91 """
92
92
93 slug = remove_formatting(value)
93 slug = remove_formatting(value)
94 slug = strip_tags(slug)
94 slug = strip_tags(slug)
95
95
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 slug = slug.replace(c, '-')
97 slug = slug.replace(c, '-')
98 slug = recursive_replace(slug, '-')
98 slug = recursive_replace(slug, '-')
99 slug = collapse(slug, '-')
99 slug = collapse(slug, '-')
100 return slug
100 return slug
101
101
102
102
103 #==============================================================================
103 #==============================================================================
104 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
104 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
105 #==============================================================================
105 #==============================================================================
106 def get_repo_slug(request):
106 def get_repo_slug(request):
107 _repo = request.environ['pylons.routes_dict'].get('repo_name')
107 _repo = request.environ['pylons.routes_dict'].get('repo_name')
108 if _repo:
108 if _repo:
109 _repo = _repo.rstrip('/')
109 _repo = _repo.rstrip('/')
110 return _repo
110 return _repo
111
111
112
112
113 def get_repos_group_slug(request):
113 def get_repos_group_slug(request):
114 _group = request.environ['pylons.routes_dict'].get('group_name')
114 _group = request.environ['pylons.routes_dict'].get('group_name')
115 if _group:
115 if _group:
116 _group = _group.rstrip('/')
116 _group = _group.rstrip('/')
117 return _group
117 return _group
118
118
119
119
120 def get_user_group_slug(request):
120 def get_user_group_slug(request):
121 _group = request.environ['pylons.routes_dict'].get('id')
121 _group = request.environ['pylons.routes_dict'].get('id')
122 try:
122 try:
123 _group = UserGroup.get(_group)
123 _group = UserGroup.get(_group)
124 if _group:
124 if _group:
125 _group = _group.users_group_name
125 _group = _group.users_group_name
126 except Exception:
126 except Exception:
127 log.debug(traceback.format_exc())
127 log.debug(traceback.format_exc())
128 #catch all failures here
128 #catch all failures here
129 pass
129 pass
130
130
131 return _group
131 return _group
132
132
133
133
134 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
134 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
135 """
135 """
136 Action logger for various actions made by users
136 Action logger for various actions made by users
137
137
138 :param user: user that made this action, can be a unique username string or
138 :param user: user that made this action, can be a unique username string or
139 object containing user_id attribute
139 object containing user_id attribute
140 :param action: action to log, should be on of predefined unique actions for
140 :param action: action to log, should be on of predefined unique actions for
141 easy translations
141 easy translations
142 :param repo: string name of repository or object containing repo_id,
142 :param repo: string name of repository or object containing repo_id,
143 that action was made on
143 that action was made on
144 :param ipaddr: optional ip address from what the action was made
144 :param ipaddr: optional ip address from what the action was made
145 :param sa: optional sqlalchemy session
145 :param sa: optional sqlalchemy session
146
146
147 """
147 """
148
148
149 if not sa:
149 if not sa:
150 sa = meta.Session()
150 sa = meta.Session()
151 # if we don't get explicit IP address try to get one from registered user
151 # if we don't get explicit IP address try to get one from registered user
152 # in tmpl context var
152 # in tmpl context var
153 from pylons import tmpl_context
153 if not ipaddr:
154 if not ipaddr and hasattr(tmpl_context, 'rhodecode_user'):
154 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
155 ipaddr = tmpl_context.rhodecode_user.ip_addr
156
155
157 try:
156 try:
158 if hasattr(user, 'user_id'):
157 if hasattr(user, 'user_id'):
159 user_obj = User.get(user.user_id)
158 user_obj = User.get(user.user_id)
160 elif isinstance(user, basestring):
159 elif isinstance(user, basestring):
161 user_obj = User.get_by_username(user)
160 user_obj = User.get_by_username(user)
162 else:
161 else:
163 raise Exception('You have to provide a user object or a username')
162 raise Exception('You have to provide a user object or a username')
164
163
165 if hasattr(repo, 'repo_id'):
164 if hasattr(repo, 'repo_id'):
166 repo_obj = Repository.get(repo.repo_id)
165 repo_obj = Repository.get(repo.repo_id)
167 repo_name = repo_obj.repo_name
166 repo_name = repo_obj.repo_name
168 elif isinstance(repo, basestring):
167 elif isinstance(repo, basestring):
169 repo_name = repo.lstrip('/')
168 repo_name = repo.lstrip('/')
170 repo_obj = Repository.get_by_repo_name(repo_name)
169 repo_obj = Repository.get_by_repo_name(repo_name)
171 else:
170 else:
172 repo_obj = None
171 repo_obj = None
173 repo_name = ''
172 repo_name = ''
174
173
175 user_log = UserLog()
174 user_log = UserLog()
176 user_log.user_id = user_obj.user_id
175 user_log.user_id = user_obj.user_id
177 user_log.username = user_obj.username
176 user_log.username = user_obj.username
178 user_log.action = safe_unicode(action)
177 user_log.action = safe_unicode(action)
179
178
180 user_log.repository = repo_obj
179 user_log.repository = repo_obj
181 user_log.repository_name = repo_name
180 user_log.repository_name = repo_name
182
181
183 user_log.action_date = datetime.datetime.now()
182 user_log.action_date = datetime.datetime.now()
184 user_log.user_ip = ipaddr
183 user_log.user_ip = ipaddr
185 sa.add(user_log)
184 sa.add(user_log)
186
185
187 log.info('Logging action:%s on %s by user:%s ip:%s' %
186 log.info('Logging action:%s on %s by user:%s ip:%s' %
188 (action, safe_unicode(repo), user_obj, ipaddr))
187 (action, safe_unicode(repo), user_obj, ipaddr))
189 if commit:
188 if commit:
190 sa.commit()
189 sa.commit()
191 except Exception:
190 except Exception:
192 log.error(traceback.format_exc())
191 log.error(traceback.format_exc())
193 raise
192 raise
194
193
195
194
196 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
195 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
197 """
196 """
198 Scans given path for repos and return (name,(type,path)) tuple
197 Scans given path for repos and return (name,(type,path)) tuple
199
198
200 :param path: path to scan for repositories
199 :param path: path to scan for repositories
201 :param recursive: recursive search and return names with subdirs in front
200 :param recursive: recursive search and return names with subdirs in front
202 """
201 """
203
202
204 # remove ending slash for better results
203 # remove ending slash for better results
205 path = path.rstrip(os.sep)
204 path = path.rstrip(os.sep)
206 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
205 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
207
206
208 def _get_repos(p):
207 def _get_repos(p):
209 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
208 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
210 log.warn('ignoring repo path without access: %s' % (p,))
209 log.warn('ignoring repo path without access: %s' % (p,))
211 return
210 return
212 if not os.access(p, os.W_OK):
211 if not os.access(p, os.W_OK):
213 log.warn('repo path without write access: %s' % (p,))
212 log.warn('repo path without write access: %s' % (p,))
214 for dirpath in os.listdir(p):
213 for dirpath in os.listdir(p):
215 if os.path.isfile(os.path.join(p, dirpath)):
214 if os.path.isfile(os.path.join(p, dirpath)):
216 continue
215 continue
217 cur_path = os.path.join(p, dirpath)
216 cur_path = os.path.join(p, dirpath)
218
217
219 # skip removed repos
218 # skip removed repos
220 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
219 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
221 continue
220 continue
222
221
223 #skip .<somethin> dirs
222 #skip .<somethin> dirs
224 if dirpath.startswith('.'):
223 if dirpath.startswith('.'):
225 continue
224 continue
226
225
227 try:
226 try:
228 scm_info = get_scm(cur_path)
227 scm_info = get_scm(cur_path)
229 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
228 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
230 except VCSError:
229 except VCSError:
231 if not recursive:
230 if not recursive:
232 continue
231 continue
233 #check if this dir containts other repos for recursive scan
232 #check if this dir containts other repos for recursive scan
234 rec_path = os.path.join(p, dirpath)
233 rec_path = os.path.join(p, dirpath)
235 if os.path.isdir(rec_path):
234 if os.path.isdir(rec_path):
236 for inner_scm in _get_repos(rec_path):
235 for inner_scm in _get_repos(rec_path):
237 yield inner_scm
236 yield inner_scm
238
237
239 return _get_repos(path)
238 return _get_repos(path)
240
239
241
240
242 def is_valid_repo(repo_name, base_path, scm=None):
241 def is_valid_repo(repo_name, base_path, scm=None):
243 """
242 """
244 Returns True if given path is a valid repository False otherwise.
243 Returns True if given path is a valid repository False otherwise.
245 If scm param is given also compare if given scm is the same as expected
244 If scm param is given also compare if given scm is the same as expected
246 from scm parameter
245 from scm parameter
247
246
248 :param repo_name:
247 :param repo_name:
249 :param base_path:
248 :param base_path:
250 :param scm:
249 :param scm:
251
250
252 :return True: if given path is a valid repository
251 :return True: if given path is a valid repository
253 """
252 """
254 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
253 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
255
254
256 try:
255 try:
257 scm_ = get_scm(full_path)
256 scm_ = get_scm(full_path)
258 if scm:
257 if scm:
259 return scm_[0] == scm
258 return scm_[0] == scm
260 return True
259 return True
261 except VCSError:
260 except VCSError:
262 return False
261 return False
263
262
264
263
265 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
264 def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False):
266 """
265 """
267 Returns True if given path is a repository group False otherwise
266 Returns True if given path is a repository group False otherwise
268
267
269 :param repo_name:
268 :param repo_name:
270 :param base_path:
269 :param base_path:
271 """
270 """
272 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
271 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
273
272
274 # check if it's not a repo
273 # check if it's not a repo
275 if is_valid_repo(repos_group_name, base_path):
274 if is_valid_repo(repos_group_name, base_path):
276 return False
275 return False
277
276
278 try:
277 try:
279 # we need to check bare git repos at higher level
278 # we need to check bare git repos at higher level
280 # since we might match branches/hooks/info/objects or possible
279 # since we might match branches/hooks/info/objects or possible
281 # other things inside bare git repo
280 # other things inside bare git repo
282 get_scm(os.path.dirname(full_path))
281 get_scm(os.path.dirname(full_path))
283 return False
282 return False
284 except VCSError:
283 except VCSError:
285 pass
284 pass
286
285
287 # check if it's a valid path
286 # check if it's a valid path
288 if skip_path_check or os.path.isdir(full_path):
287 if skip_path_check or os.path.isdir(full_path):
289 return True
288 return True
290
289
291 return False
290 return False
292
291
293
292
294 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
293 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
295 while True:
294 while True:
296 ok = raw_input(prompt)
295 ok = raw_input(prompt)
297 if ok in ('y', 'ye', 'yes'):
296 if ok in ('y', 'ye', 'yes'):
298 return True
297 return True
299 if ok in ('n', 'no', 'nop', 'nope'):
298 if ok in ('n', 'no', 'nop', 'nope'):
300 return False
299 return False
301 retries = retries - 1
300 retries = retries - 1
302 if retries < 0:
301 if retries < 0:
303 raise IOError
302 raise IOError
304 print complaint
303 print complaint
305
304
306 #propagated from mercurial documentation
305 #propagated from mercurial documentation
307 ui_sections = ['alias', 'auth',
306 ui_sections = ['alias', 'auth',
308 'decode/encode', 'defaults',
307 'decode/encode', 'defaults',
309 'diff', 'email',
308 'diff', 'email',
310 'extensions', 'format',
309 'extensions', 'format',
311 'merge-patterns', 'merge-tools',
310 'merge-patterns', 'merge-tools',
312 'hooks', 'http_proxy',
311 'hooks', 'http_proxy',
313 'smtp', 'patch',
312 'smtp', 'patch',
314 'paths', 'profiling',
313 'paths', 'profiling',
315 'server', 'trusted',
314 'server', 'trusted',
316 'ui', 'web', ]
315 'ui', 'web', ]
317
316
318
317
319 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
318 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
320 """
319 """
321 A function that will read python rc files or database
320 A function that will read python rc files or database
322 and make an mercurial ui object from read options
321 and make an mercurial ui object from read options
323
322
324 :param path: path to mercurial config file
323 :param path: path to mercurial config file
325 :param checkpaths: check the path
324 :param checkpaths: check the path
326 :param read_from: read from 'file' or 'db'
325 :param read_from: read from 'file' or 'db'
327 """
326 """
328
327
329 baseui = ui.ui()
328 baseui = ui.ui()
330
329
331 # clean the baseui object
330 # clean the baseui object
332 baseui._ocfg = config.config()
331 baseui._ocfg = config.config()
333 baseui._ucfg = config.config()
332 baseui._ucfg = config.config()
334 baseui._tcfg = config.config()
333 baseui._tcfg = config.config()
335
334
336 if read_from == 'file':
335 if read_from == 'file':
337 if not os.path.isfile(path):
336 if not os.path.isfile(path):
338 log.debug('hgrc file is not present at %s, skipping...' % path)
337 log.debug('hgrc file is not present at %s, skipping...' % path)
339 return False
338 return False
340 log.debug('reading hgrc from %s' % path)
339 log.debug('reading hgrc from %s' % path)
341 cfg = config.config()
340 cfg = config.config()
342 cfg.read(path)
341 cfg.read(path)
343 for section in ui_sections:
342 for section in ui_sections:
344 for k, v in cfg.items(section):
343 for k, v in cfg.items(section):
345 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
344 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
346 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
345 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
347
346
348 elif read_from == 'db':
347 elif read_from == 'db':
349 sa = meta.Session()
348 sa = meta.Session()
350 ret = sa.query(RhodeCodeUi)\
349 ret = sa.query(RhodeCodeUi)\
351 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
350 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
352 .all()
351 .all()
353
352
354 hg_ui = ret
353 hg_ui = ret
355 for ui_ in hg_ui:
354 for ui_ in hg_ui:
356 if ui_.ui_active:
355 if ui_.ui_active:
357 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
356 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
358 ui_.ui_key, ui_.ui_value)
357 ui_.ui_key, ui_.ui_value)
359 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
358 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
360 safe_str(ui_.ui_value))
359 safe_str(ui_.ui_value))
361 if ui_.ui_key == 'push_ssl':
360 if ui_.ui_key == 'push_ssl':
362 # force set push_ssl requirement to False, rhodecode
361 # force set push_ssl requirement to False, rhodecode
363 # handles that
362 # handles that
364 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
363 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
365 False)
364 False)
366 if clear_session:
365 if clear_session:
367 meta.Session.remove()
366 meta.Session.remove()
368 return baseui
367 return baseui
369
368
370
369
371 def set_rhodecode_config(config):
370 def set_rhodecode_config(config):
372 """
371 """
373 Updates pylons config with new settings from database
372 Updates pylons config with new settings from database
374
373
375 :param config:
374 :param config:
376 """
375 """
377 hgsettings = RhodeCodeSetting.get_app_settings()
376 hgsettings = RhodeCodeSetting.get_app_settings()
378
377
379 for k, v in hgsettings.items():
378 for k, v in hgsettings.items():
380 config[k] = v
379 config[k] = v
381
380
382
381
383 def set_vcs_config(config):
382 def set_vcs_config(config):
384 """
383 """
385 Patch VCS config with some RhodeCode specific stuff
384 Patch VCS config with some RhodeCode specific stuff
386
385
387 :param config: rhodecode.CONFIG
386 :param config: rhodecode.CONFIG
388 """
387 """
389 import rhodecode
388 import rhodecode
390 from rhodecode.lib.vcs import conf
389 from rhodecode.lib.vcs import conf
391 from rhodecode.lib.utils2 import aslist
390 from rhodecode.lib.utils2 import aslist
392 conf.settings.BACKENDS = {
391 conf.settings.BACKENDS = {
393 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
392 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
394 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
393 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
395 }
394 }
396
395
397 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
396 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
398 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
397 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
399 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
398 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
400 'utf8'), sep=',')
399 'utf8'), sep=',')
401
400
402
401
403 def map_groups(path):
402 def map_groups(path):
404 """
403 """
405 Given a full path to a repository, create all nested groups that this
404 Given a full path to a repository, create all nested groups that this
406 repo is inside. This function creates parent-child relationships between
405 repo is inside. This function creates parent-child relationships between
407 groups and creates default perms for all new groups.
406 groups and creates default perms for all new groups.
408
407
409 :param paths: full path to repository
408 :param paths: full path to repository
410 """
409 """
411 sa = meta.Session()
410 sa = meta.Session()
412 groups = path.split(Repository.url_sep())
411 groups = path.split(Repository.url_sep())
413 parent = None
412 parent = None
414 group = None
413 group = None
415
414
416 # last element is repo in nested groups structure
415 # last element is repo in nested groups structure
417 groups = groups[:-1]
416 groups = groups[:-1]
418 rgm = ReposGroupModel(sa)
417 rgm = ReposGroupModel(sa)
419 owner = User.get_first_admin()
418 owner = User.get_first_admin()
420 for lvl, group_name in enumerate(groups):
419 for lvl, group_name in enumerate(groups):
421 group_name = '/'.join(groups[:lvl] + [group_name])
420 group_name = '/'.join(groups[:lvl] + [group_name])
422 group = RepoGroup.get_by_group_name(group_name)
421 group = RepoGroup.get_by_group_name(group_name)
423 desc = '%s group' % group_name
422 desc = '%s group' % group_name
424
423
425 # skip folders that are now removed repos
424 # skip folders that are now removed repos
426 if REMOVED_REPO_PAT.match(group_name):
425 if REMOVED_REPO_PAT.match(group_name):
427 break
426 break
428
427
429 if group is None:
428 if group is None:
430 log.debug('creating group level: %s group_name: %s'
429 log.debug('creating group level: %s group_name: %s'
431 % (lvl, group_name))
430 % (lvl, group_name))
432 group = RepoGroup(group_name, parent)
431 group = RepoGroup(group_name, parent)
433 group.group_description = desc
432 group.group_description = desc
434 group.user = owner
433 group.user = owner
435 sa.add(group)
434 sa.add(group)
436 perm_obj = rgm._create_default_perms(group)
435 perm_obj = rgm._create_default_perms(group)
437 sa.add(perm_obj)
436 sa.add(perm_obj)
438 sa.flush()
437 sa.flush()
439
438
440 parent = group
439 parent = group
441 return group
440 return group
442
441
443
442
444 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
443 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
445 install_git_hook=False):
444 install_git_hook=False):
446 """
445 """
447 maps all repos given in initial_repo_list, non existing repositories
446 maps all repos given in initial_repo_list, non existing repositories
448 are created, if remove_obsolete is True it also check for db entries
447 are created, if remove_obsolete is True it also check for db entries
449 that are not in initial_repo_list and removes them.
448 that are not in initial_repo_list and removes them.
450
449
451 :param initial_repo_list: list of repositories found by scanning methods
450 :param initial_repo_list: list of repositories found by scanning methods
452 :param remove_obsolete: check for obsolete entries in database
451 :param remove_obsolete: check for obsolete entries in database
453 :param install_git_hook: if this is True, also check and install githook
452 :param install_git_hook: if this is True, also check and install githook
454 for a repo if missing
453 for a repo if missing
455 """
454 """
456 from rhodecode.model.repo import RepoModel
455 from rhodecode.model.repo import RepoModel
457 from rhodecode.model.scm import ScmModel
456 from rhodecode.model.scm import ScmModel
458 sa = meta.Session()
457 sa = meta.Session()
459 rm = RepoModel()
458 rm = RepoModel()
460 user = User.get_first_admin()
459 user = User.get_first_admin()
461 added = []
460 added = []
462
461
463 ##creation defaults
462 ##creation defaults
464 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
463 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
465 enable_statistics = defs.get('repo_enable_statistics')
464 enable_statistics = defs.get('repo_enable_statistics')
466 enable_locking = defs.get('repo_enable_locking')
465 enable_locking = defs.get('repo_enable_locking')
467 enable_downloads = defs.get('repo_enable_downloads')
466 enable_downloads = defs.get('repo_enable_downloads')
468 private = defs.get('repo_private')
467 private = defs.get('repo_private')
469
468
470 for name, repo in initial_repo_list.items():
469 for name, repo in initial_repo_list.items():
471 group = map_groups(name)
470 group = map_groups(name)
472 db_repo = rm.get_by_repo_name(name)
471 db_repo = rm.get_by_repo_name(name)
473 # found repo that is on filesystem not in RhodeCode database
472 # found repo that is on filesystem not in RhodeCode database
474 if not db_repo:
473 if not db_repo:
475 log.info('repository %s not found, creating now' % name)
474 log.info('repository %s not found, creating now' % name)
476 added.append(name)
475 added.append(name)
477 desc = (repo.description
476 desc = (repo.description
478 if repo.description != 'unknown'
477 if repo.description != 'unknown'
479 else '%s repository' % name)
478 else '%s repository' % name)
480
479
481 new_repo = rm.create_repo(
480 new_repo = rm.create_repo(
482 repo_name=name,
481 repo_name=name,
483 repo_type=repo.alias,
482 repo_type=repo.alias,
484 description=desc,
483 description=desc,
485 repos_group=getattr(group, 'group_id', None),
484 repos_group=getattr(group, 'group_id', None),
486 owner=user,
485 owner=user,
487 just_db=True,
486 just_db=True,
488 enable_locking=enable_locking,
487 enable_locking=enable_locking,
489 enable_downloads=enable_downloads,
488 enable_downloads=enable_downloads,
490 enable_statistics=enable_statistics,
489 enable_statistics=enable_statistics,
491 private=private
490 private=private
492 )
491 )
493 # we added that repo just now, and make sure it has githook
492 # we added that repo just now, and make sure it has githook
494 # installed
493 # installed
495 if new_repo.repo_type == 'git':
494 if new_repo.repo_type == 'git':
496 ScmModel().install_git_hook(new_repo.scm_instance)
495 ScmModel().install_git_hook(new_repo.scm_instance)
497 new_repo.update_changeset_cache()
496 new_repo.update_changeset_cache()
498 elif install_git_hook:
497 elif install_git_hook:
499 if db_repo.repo_type == 'git':
498 if db_repo.repo_type == 'git':
500 ScmModel().install_git_hook(db_repo.scm_instance)
499 ScmModel().install_git_hook(db_repo.scm_instance)
501
500
502 sa.commit()
501 sa.commit()
503 removed = []
502 removed = []
504 if remove_obsolete:
503 if remove_obsolete:
505 # remove from database those repositories that are not in the filesystem
504 # remove from database those repositories that are not in the filesystem
506 for repo in sa.query(Repository).all():
505 for repo in sa.query(Repository).all():
507 if repo.repo_name not in initial_repo_list.keys():
506 if repo.repo_name not in initial_repo_list.keys():
508 log.debug("Removing non-existing repository found in db `%s`" %
507 log.debug("Removing non-existing repository found in db `%s`" %
509 repo.repo_name)
508 repo.repo_name)
510 try:
509 try:
511 removed.append(repo.repo_name)
510 removed.append(repo.repo_name)
512 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
511 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
513 sa.commit()
512 sa.commit()
514 except Exception:
513 except Exception:
515 #don't hold further removals on error
514 #don't hold further removals on error
516 log.error(traceback.format_exc())
515 log.error(traceback.format_exc())
517 sa.rollback()
516 sa.rollback()
518 return added, removed
517 return added, removed
519
518
520
519
521 # set cache regions for beaker so celery can utilise it
520 # set cache regions for beaker so celery can utilise it
522 def add_cache(settings):
521 def add_cache(settings):
523 cache_settings = {'regions': None}
522 cache_settings = {'regions': None}
524 for key in settings.keys():
523 for key in settings.keys():
525 for prefix in ['beaker.cache.', 'cache.']:
524 for prefix in ['beaker.cache.', 'cache.']:
526 if key.startswith(prefix):
525 if key.startswith(prefix):
527 name = key.split(prefix)[1].strip()
526 name = key.split(prefix)[1].strip()
528 cache_settings[name] = settings[key].strip()
527 cache_settings[name] = settings[key].strip()
529 if cache_settings['regions']:
528 if cache_settings['regions']:
530 for region in cache_settings['regions'].split(','):
529 for region in cache_settings['regions'].split(','):
531 region = region.strip()
530 region = region.strip()
532 region_settings = {}
531 region_settings = {}
533 for key, value in cache_settings.items():
532 for key, value in cache_settings.items():
534 if key.startswith(region):
533 if key.startswith(region):
535 region_settings[key.split('.')[1]] = value
534 region_settings[key.split('.')[1]] = value
536 region_settings['expire'] = int(region_settings.get('expire',
535 region_settings['expire'] = int(region_settings.get('expire',
537 60))
536 60))
538 region_settings.setdefault('lock_dir',
537 region_settings.setdefault('lock_dir',
539 cache_settings.get('lock_dir'))
538 cache_settings.get('lock_dir'))
540 region_settings.setdefault('data_dir',
539 region_settings.setdefault('data_dir',
541 cache_settings.get('data_dir'))
540 cache_settings.get('data_dir'))
542
541
543 if 'type' not in region_settings:
542 if 'type' not in region_settings:
544 region_settings['type'] = cache_settings.get('type',
543 region_settings['type'] = cache_settings.get('type',
545 'memory')
544 'memory')
546 beaker.cache.cache_regions[region] = region_settings
545 beaker.cache.cache_regions[region] = region_settings
547
546
548
547
549 def load_rcextensions(root_path):
548 def load_rcextensions(root_path):
550 import rhodecode
549 import rhodecode
551 from rhodecode.config import conf
550 from rhodecode.config import conf
552
551
553 path = os.path.join(root_path, 'rcextensions', '__init__.py')
552 path = os.path.join(root_path, 'rcextensions', '__init__.py')
554 if os.path.isfile(path):
553 if os.path.isfile(path):
555 rcext = create_module('rc', path)
554 rcext = create_module('rc', path)
556 EXT = rhodecode.EXTENSIONS = rcext
555 EXT = rhodecode.EXTENSIONS = rcext
557 log.debug('Found rcextensions now loading %s...' % rcext)
556 log.debug('Found rcextensions now loading %s...' % rcext)
558
557
559 # Additional mappings that are not present in the pygments lexers
558 # Additional mappings that are not present in the pygments lexers
560 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
559 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
561
560
562 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
561 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
563
562
564 if getattr(EXT, 'INDEX_EXTENSIONS', []):
563 if getattr(EXT, 'INDEX_EXTENSIONS', []):
565 log.debug('settings custom INDEX_EXTENSIONS')
564 log.debug('settings custom INDEX_EXTENSIONS')
566 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
565 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
567
566
568 #ADDITIONAL MAPPINGS
567 #ADDITIONAL MAPPINGS
569 log.debug('adding extra into INDEX_EXTENSIONS')
568 log.debug('adding extra into INDEX_EXTENSIONS')
570 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
569 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
571
570
572 # auto check if the module is not missing any data, set to default if is
571 # auto check if the module is not missing any data, set to default if is
573 # this will help autoupdate new feature of rcext module
572 # this will help autoupdate new feature of rcext module
574 from rhodecode.config import rcextensions
573 from rhodecode.config import rcextensions
575 for k in dir(rcextensions):
574 for k in dir(rcextensions):
576 if not k.startswith('_') and not hasattr(EXT, k):
575 if not k.startswith('_') and not hasattr(EXT, k):
577 setattr(EXT, k, getattr(rcextensions, k))
576 setattr(EXT, k, getattr(rcextensions, k))
578
577
579
578
580 def get_custom_lexer(extension):
579 def get_custom_lexer(extension):
581 """
580 """
582 returns a custom lexer if it's defined in rcextensions module, or None
581 returns a custom lexer if it's defined in rcextensions module, or None
583 if there's no custom lexer defined
582 if there's no custom lexer defined
584 """
583 """
585 import rhodecode
584 import rhodecode
586 from pygments import lexers
585 from pygments import lexers
587 #check if we didn't define this extension as other lexer
586 #check if we didn't define this extension as other lexer
588 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
587 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
589 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
588 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
590 return lexers.get_lexer_by_name(_lexer_name)
589 return lexers.get_lexer_by_name(_lexer_name)
591
590
592
591
593 #==============================================================================
592 #==============================================================================
594 # TEST FUNCTIONS AND CREATORS
593 # TEST FUNCTIONS AND CREATORS
595 #==============================================================================
594 #==============================================================================
596 def create_test_index(repo_location, config, full_index):
595 def create_test_index(repo_location, config, full_index):
597 """
596 """
598 Makes default test index
597 Makes default test index
599
598
600 :param config: test config
599 :param config: test config
601 :param full_index:
600 :param full_index:
602 """
601 """
603
602
604 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
603 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
605 from rhodecode.lib.pidlock import DaemonLock, LockHeld
604 from rhodecode.lib.pidlock import DaemonLock, LockHeld
606
605
607 repo_location = repo_location
606 repo_location = repo_location
608
607
609 index_location = os.path.join(config['app_conf']['index_dir'])
608 index_location = os.path.join(config['app_conf']['index_dir'])
610 if not os.path.exists(index_location):
609 if not os.path.exists(index_location):
611 os.makedirs(index_location)
610 os.makedirs(index_location)
612
611
613 try:
612 try:
614 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
613 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
615 WhooshIndexingDaemon(index_location=index_location,
614 WhooshIndexingDaemon(index_location=index_location,
616 repo_location=repo_location)\
615 repo_location=repo_location)\
617 .run(full_index=full_index)
616 .run(full_index=full_index)
618 l.release()
617 l.release()
619 except LockHeld:
618 except LockHeld:
620 pass
619 pass
621
620
622
621
623 def create_test_env(repos_test_path, config):
622 def create_test_env(repos_test_path, config):
624 """
623 """
625 Makes a fresh database and
624 Makes a fresh database and
626 install test repository into tmp dir
625 install test repository into tmp dir
627 """
626 """
628 from rhodecode.lib.db_manage import DbManage
627 from rhodecode.lib.db_manage import DbManage
629 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
628 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
630
629
631 # PART ONE create db
630 # PART ONE create db
632 dbconf = config['sqlalchemy.db1.url']
631 dbconf = config['sqlalchemy.db1.url']
633 log.debug('making test db %s' % dbconf)
632 log.debug('making test db %s' % dbconf)
634
633
635 # create test dir if it doesn't exist
634 # create test dir if it doesn't exist
636 if not os.path.isdir(repos_test_path):
635 if not os.path.isdir(repos_test_path):
637 log.debug('Creating testdir %s' % repos_test_path)
636 log.debug('Creating testdir %s' % repos_test_path)
638 os.makedirs(repos_test_path)
637 os.makedirs(repos_test_path)
639
638
640 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
639 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
641 tests=True)
640 tests=True)
642 dbmanage.create_tables(override=True)
641 dbmanage.create_tables(override=True)
643 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
642 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
644 dbmanage.create_default_user()
643 dbmanage.create_default_user()
645 dbmanage.admin_prompt()
644 dbmanage.admin_prompt()
646 dbmanage.create_permissions()
645 dbmanage.create_permissions()
647 dbmanage.populate_default_permissions()
646 dbmanage.populate_default_permissions()
648 Session().commit()
647 Session().commit()
649 # PART TWO make test repo
648 # PART TWO make test repo
650 log.debug('making test vcs repositories')
649 log.debug('making test vcs repositories')
651
650
652 idx_path = config['app_conf']['index_dir']
651 idx_path = config['app_conf']['index_dir']
653 data_path = config['app_conf']['cache_dir']
652 data_path = config['app_conf']['cache_dir']
654
653
655 #clean index and data
654 #clean index and data
656 if idx_path and os.path.exists(idx_path):
655 if idx_path and os.path.exists(idx_path):
657 log.debug('remove %s' % idx_path)
656 log.debug('remove %s' % idx_path)
658 shutil.rmtree(idx_path)
657 shutil.rmtree(idx_path)
659
658
660 if data_path and os.path.exists(data_path):
659 if data_path and os.path.exists(data_path):
661 log.debug('remove %s' % data_path)
660 log.debug('remove %s' % data_path)
662 shutil.rmtree(data_path)
661 shutil.rmtree(data_path)
663
662
664 #CREATE DEFAULT TEST REPOS
663 #CREATE DEFAULT TEST REPOS
665 cur_dir = dn(dn(abspath(__file__)))
664 cur_dir = dn(dn(abspath(__file__)))
666 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
665 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
667 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
666 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
668 tar.close()
667 tar.close()
669
668
670 cur_dir = dn(dn(abspath(__file__)))
669 cur_dir = dn(dn(abspath(__file__)))
671 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
670 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
672 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
671 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
673 tar.close()
672 tar.close()
674
673
675 #LOAD VCS test stuff
674 #LOAD VCS test stuff
676 from rhodecode.tests.vcs import setup_package
675 from rhodecode.tests.vcs import setup_package
677 setup_package()
676 setup_package()
678
677
679
678
680 #==============================================================================
679 #==============================================================================
681 # PASTER COMMANDS
680 # PASTER COMMANDS
682 #==============================================================================
681 #==============================================================================
683 class BasePasterCommand(Command):
682 class BasePasterCommand(Command):
684 """
683 """
685 Abstract Base Class for paster commands.
684 Abstract Base Class for paster commands.
686
685
687 The celery commands are somewhat aggressive about loading
686 The celery commands are somewhat aggressive about loading
688 celery.conf, and since our module sets the `CELERY_LOADER`
687 celery.conf, and since our module sets the `CELERY_LOADER`
689 environment variable to our loader, we have to bootstrap a bit and
688 environment variable to our loader, we have to bootstrap a bit and
690 make sure we've had a chance to load the pylons config off of the
689 make sure we've had a chance to load the pylons config off of the
691 command line, otherwise everything fails.
690 command line, otherwise everything fails.
692 """
691 """
693 min_args = 1
692 min_args = 1
694 min_args_error = "Please provide a paster config file as an argument."
693 min_args_error = "Please provide a paster config file as an argument."
695 takes_config_file = 1
694 takes_config_file = 1
696 requires_config_file = True
695 requires_config_file = True
697
696
698 def notify_msg(self, msg, log=False):
697 def notify_msg(self, msg, log=False):
699 """Make a notification to user, additionally if logger is passed
698 """Make a notification to user, additionally if logger is passed
700 it logs this action using given logger
699 it logs this action using given logger
701
700
702 :param msg: message that will be printed to user
701 :param msg: message that will be printed to user
703 :param log: logging instance, to use to additionally log this message
702 :param log: logging instance, to use to additionally log this message
704
703
705 """
704 """
706 if log and isinstance(log, logging):
705 if log and isinstance(log, logging):
707 log(msg)
706 log(msg)
708
707
709 def run(self, args):
708 def run(self, args):
710 """
709 """
711 Overrides Command.run
710 Overrides Command.run
712
711
713 Checks for a config file argument and loads it.
712 Checks for a config file argument and loads it.
714 """
713 """
715 if len(args) < self.min_args:
714 if len(args) < self.min_args:
716 raise BadCommand(
715 raise BadCommand(
717 self.min_args_error % {'min_args': self.min_args,
716 self.min_args_error % {'min_args': self.min_args,
718 'actual_args': len(args)})
717 'actual_args': len(args)})
719
718
720 # Decrement because we're going to lob off the first argument.
719 # Decrement because we're going to lob off the first argument.
721 # @@ This is hacky
720 # @@ This is hacky
722 self.min_args -= 1
721 self.min_args -= 1
723 self.bootstrap_config(args[0])
722 self.bootstrap_config(args[0])
724 self.update_parser()
723 self.update_parser()
725 return super(BasePasterCommand, self).run(args[1:])
724 return super(BasePasterCommand, self).run(args[1:])
726
725
727 def update_parser(self):
726 def update_parser(self):
728 """
727 """
729 Abstract method. Allows for the class's parser to be updated
728 Abstract method. Allows for the class's parser to be updated
730 before the superclass's `run` method is called. Necessary to
729 before the superclass's `run` method is called. Necessary to
731 allow options/arguments to be passed through to the underlying
730 allow options/arguments to be passed through to the underlying
732 celery command.
731 celery command.
733 """
732 """
734 raise NotImplementedError("Abstract Method.")
733 raise NotImplementedError("Abstract Method.")
735
734
736 def bootstrap_config(self, conf):
735 def bootstrap_config(self, conf):
737 """
736 """
738 Loads the pylons configuration.
737 Loads the pylons configuration.
739 """
738 """
740 from pylons import config as pylonsconfig
739 from pylons import config as pylonsconfig
741
740
742 self.path_to_ini_file = os.path.realpath(conf)
741 self.path_to_ini_file = os.path.realpath(conf)
743 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
742 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
744 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
743 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
745
744
746 def _init_session(self):
745 def _init_session(self):
747 """
746 """
748 Inits SqlAlchemy Session
747 Inits SqlAlchemy Session
749 """
748 """
750 logging.config.fileConfig(self.path_to_ini_file)
749 logging.config.fileConfig(self.path_to_ini_file)
751 from pylons import config
750 from pylons import config
752 from rhodecode.model import init_model
751 from rhodecode.model import init_model
753 from rhodecode.lib.utils2 import engine_from_config
752 from rhodecode.lib.utils2 import engine_from_config
754
753
755 #get to remove repos !!
754 #get to remove repos !!
756 add_cache(config)
755 add_cache(config)
757 engine = engine_from_config(config, 'sqlalchemy.db1.')
756 engine = engine_from_config(config, 'sqlalchemy.db1.')
758 init_model(engine)
757 init_model(engine)
759
758
760
759
761 def check_git_version():
760 def check_git_version():
762 """
761 """
763 Checks what version of git is installed in system, and issues a warning
762 Checks what version of git is installed in system, and issues a warning
764 if it's too old for RhodeCode to properly work.
763 if it's too old for RhodeCode to properly work.
765 """
764 """
766 from rhodecode import BACKENDS
765 from rhodecode import BACKENDS
767 from rhodecode.lib.vcs.backends.git.repository import GitRepository
766 from rhodecode.lib.vcs.backends.git.repository import GitRepository
768 from rhodecode.lib.vcs.conf import settings
767 from rhodecode.lib.vcs.conf import settings
769 from distutils.version import StrictVersion
768 from distutils.version import StrictVersion
770
769
771 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
770 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
772 _safe=True)
771 _safe=True)
773
772
774 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
773 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
775 if len(ver.split('.')) > 3:
774 if len(ver.split('.')) > 3:
776 #StrictVersion needs to be only 3 element type
775 #StrictVersion needs to be only 3 element type
777 ver = '.'.join(ver.split('.')[:3])
776 ver = '.'.join(ver.split('.')[:3])
778 try:
777 try:
779 _ver = StrictVersion(ver)
778 _ver = StrictVersion(ver)
780 except Exception:
779 except Exception:
781 _ver = StrictVersion('0.0.0')
780 _ver = StrictVersion('0.0.0')
782 stderr = traceback.format_exc()
781 stderr = traceback.format_exc()
783
782
784 req_ver = '1.7.4'
783 req_ver = '1.7.4'
785 to_old_git = False
784 to_old_git = False
786 if _ver < StrictVersion(req_ver):
785 if _ver < StrictVersion(req_ver):
787 to_old_git = True
786 to_old_git = True
788
787
789 if 'git' in BACKENDS:
788 if 'git' in BACKENDS:
790 log.debug('GIT executable: "%s" version detected: %s'
789 log.debug('GIT executable: "%s" version detected: %s'
791 % (settings.GIT_EXECUTABLE_PATH, stdout))
790 % (settings.GIT_EXECUTABLE_PATH, stdout))
792 if stderr:
791 if stderr:
793 log.warning('Unable to detect git version, org error was: %r' % stderr)
792 log.warning('Unable to detect git version, org error was: %r' % stderr)
794 elif to_old_git:
793 elif to_old_git:
795 log.warning('RhodeCode detected git version %s, which is too old '
794 log.warning('RhodeCode detected git version %s, which is too old '
796 'for the system to function properly. Make sure '
795 'for the system to function properly. Make sure '
797 'its version is at least %s' % (ver, req_ver))
796 'its version is at least %s' % (ver, req_ver))
798 return _ver
797 return _ver
799
798
800
799
801 @decorator.decorator
800 @decorator.decorator
802 def jsonify(func, *args, **kwargs):
801 def jsonify(func, *args, **kwargs):
803 """Action decorator that formats output for JSON
802 """Action decorator that formats output for JSON
804
803
805 Given a function that will return content, this decorator will turn
804 Given a function that will return content, this decorator will turn
806 the result into JSON, with a content-type of 'application/json' and
805 the result into JSON, with a content-type of 'application/json' and
807 output it.
806 output it.
808
807
809 """
808 """
810 from pylons.decorators.util import get_pylons
809 from pylons.decorators.util import get_pylons
811 from rhodecode.lib.compat import json
810 from rhodecode.lib.compat import json
812 pylons = get_pylons(args)
811 pylons = get_pylons(args)
813 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
812 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
814 data = func(*args, **kwargs)
813 data = func(*args, **kwargs)
815 if isinstance(data, (list, tuple)):
814 if isinstance(data, (list, tuple)):
816 msg = "JSON responses with Array envelopes are susceptible to " \
815 msg = "JSON responses with Array envelopes are susceptible to " \
817 "cross-site data leak attacks, see " \
816 "cross-site data leak attacks, see " \
818 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
817 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
819 warnings.warn(msg, Warning, 2)
818 warnings.warn(msg, Warning, 2)
820 log.warning(msg)
819 log.warning(msg)
821 log.debug("Returning JSON wrapped action output")
820 log.debug("Returning JSON wrapped action output")
822 return json.dumps(data, encoding='utf-8')
821 return json.dumps(data, encoding='utf-8')
@@ -1,644 +1,655 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Some simple helper functions
6 Some simple helper functions
7
7
8 :created_on: Jan 5, 2011
8 :created_on: Jan 5, 2011
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import sys
28 import sys
29 import time
29 import time
30 import uuid
30 import uuid
31 import datetime
31 import datetime
32 import traceback
32 import traceback
33 import webob
33 import webob
34
34
35 from pylons.i18n.translation import _, ungettext
35 from pylons.i18n.translation import _, ungettext
36 from rhodecode.lib.vcs.utils.lazy import LazyProperty
36 from rhodecode.lib.vcs.utils.lazy import LazyProperty
37 from rhodecode.lib.compat import json
37 from rhodecode.lib.compat import json
38
38
39
39
40 def __get_lem():
40 def __get_lem():
41 """
41 """
42 Get language extension map based on what's inside pygments lexers
42 Get language extension map based on what's inside pygments lexers
43 """
43 """
44 from pygments import lexers
44 from pygments import lexers
45 from string import lower
45 from string import lower
46 from collections import defaultdict
46 from collections import defaultdict
47
47
48 d = defaultdict(lambda: [])
48 d = defaultdict(lambda: [])
49
49
50 def __clean(s):
50 def __clean(s):
51 s = s.lstrip('*')
51 s = s.lstrip('*')
52 s = s.lstrip('.')
52 s = s.lstrip('.')
53
53
54 if s.find('[') != -1:
54 if s.find('[') != -1:
55 exts = []
55 exts = []
56 start, stop = s.find('['), s.find(']')
56 start, stop = s.find('['), s.find(']')
57
57
58 for suffix in s[start + 1:stop]:
58 for suffix in s[start + 1:stop]:
59 exts.append(s[:s.find('[')] + suffix)
59 exts.append(s[:s.find('[')] + suffix)
60 return map(lower, exts)
60 return map(lower, exts)
61 else:
61 else:
62 return map(lower, [s])
62 return map(lower, [s])
63
63
64 for lx, t in sorted(lexers.LEXERS.items()):
64 for lx, t in sorted(lexers.LEXERS.items()):
65 m = map(__clean, t[-2])
65 m = map(__clean, t[-2])
66 if m:
66 if m:
67 m = reduce(lambda x, y: x + y, m)
67 m = reduce(lambda x, y: x + y, m)
68 for ext in m:
68 for ext in m:
69 desc = lx.replace('Lexer', '')
69 desc = lx.replace('Lexer', '')
70 d[ext].append(desc)
70 d[ext].append(desc)
71
71
72 return dict(d)
72 return dict(d)
73
73
74
74
75 def str2bool(_str):
75 def str2bool(_str):
76 """
76 """
77 returs True/False value from given string, it tries to translate the
77 returs True/False value from given string, it tries to translate the
78 string into boolean
78 string into boolean
79
79
80 :param _str: string value to translate into boolean
80 :param _str: string value to translate into boolean
81 :rtype: boolean
81 :rtype: boolean
82 :returns: boolean from given string
82 :returns: boolean from given string
83 """
83 """
84 if _str is None:
84 if _str is None:
85 return False
85 return False
86 if _str in (True, False):
86 if _str in (True, False):
87 return _str
87 return _str
88 _str = str(_str).strip().lower()
88 _str = str(_str).strip().lower()
89 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
89 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
90
90
91
91
92 def aslist(obj, sep=None, strip=True):
92 def aslist(obj, sep=None, strip=True):
93 """
93 """
94 Returns given string separated by sep as list
94 Returns given string separated by sep as list
95
95
96 :param obj:
96 :param obj:
97 :param sep:
97 :param sep:
98 :param strip:
98 :param strip:
99 """
99 """
100 if isinstance(obj, (basestring)):
100 if isinstance(obj, (basestring)):
101 lst = obj.split(sep)
101 lst = obj.split(sep)
102 if strip:
102 if strip:
103 lst = [v.strip() for v in lst]
103 lst = [v.strip() for v in lst]
104 return lst
104 return lst
105 elif isinstance(obj, (list, tuple)):
105 elif isinstance(obj, (list, tuple)):
106 return obj
106 return obj
107 elif obj is None:
107 elif obj is None:
108 return []
108 return []
109 else:
109 else:
110 return [obj]
110 return [obj]
111
111
112
112
113 def convert_line_endings(line, mode):
113 def convert_line_endings(line, mode):
114 """
114 """
115 Converts a given line "line end" accordingly to given mode
115 Converts a given line "line end" accordingly to given mode
116
116
117 Available modes are::
117 Available modes are::
118 0 - Unix
118 0 - Unix
119 1 - Mac
119 1 - Mac
120 2 - DOS
120 2 - DOS
121
121
122 :param line: given line to convert
122 :param line: given line to convert
123 :param mode: mode to convert to
123 :param mode: mode to convert to
124 :rtype: str
124 :rtype: str
125 :return: converted line according to mode
125 :return: converted line according to mode
126 """
126 """
127 from string import replace
127 from string import replace
128
128
129 if mode == 0:
129 if mode == 0:
130 line = replace(line, '\r\n', '\n')
130 line = replace(line, '\r\n', '\n')
131 line = replace(line, '\r', '\n')
131 line = replace(line, '\r', '\n')
132 elif mode == 1:
132 elif mode == 1:
133 line = replace(line, '\r\n', '\r')
133 line = replace(line, '\r\n', '\r')
134 line = replace(line, '\n', '\r')
134 line = replace(line, '\n', '\r')
135 elif mode == 2:
135 elif mode == 2:
136 line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
136 line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
137 return line
137 return line
138
138
139
139
140 def detect_mode(line, default):
140 def detect_mode(line, default):
141 """
141 """
142 Detects line break for given line, if line break couldn't be found
142 Detects line break for given line, if line break couldn't be found
143 given default value is returned
143 given default value is returned
144
144
145 :param line: str line
145 :param line: str line
146 :param default: default
146 :param default: default
147 :rtype: int
147 :rtype: int
148 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
148 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
149 """
149 """
150 if line.endswith('\r\n'):
150 if line.endswith('\r\n'):
151 return 2
151 return 2
152 elif line.endswith('\n'):
152 elif line.endswith('\n'):
153 return 0
153 return 0
154 elif line.endswith('\r'):
154 elif line.endswith('\r'):
155 return 1
155 return 1
156 else:
156 else:
157 return default
157 return default
158
158
159
159
160 def generate_api_key(username, salt=None):
160 def generate_api_key(username, salt=None):
161 """
161 """
162 Generates unique API key for given username, if salt is not given
162 Generates unique API key for given username, if salt is not given
163 it'll be generated from some random string
163 it'll be generated from some random string
164
164
165 :param username: username as string
165 :param username: username as string
166 :param salt: salt to hash generate KEY
166 :param salt: salt to hash generate KEY
167 :rtype: str
167 :rtype: str
168 :returns: sha1 hash from username+salt
168 :returns: sha1 hash from username+salt
169 """
169 """
170 from tempfile import _RandomNameSequence
170 from tempfile import _RandomNameSequence
171 import hashlib
171 import hashlib
172
172
173 if salt is None:
173 if salt is None:
174 salt = _RandomNameSequence().next()
174 salt = _RandomNameSequence().next()
175
175
176 return hashlib.sha1(username + salt).hexdigest()
176 return hashlib.sha1(username + salt).hexdigest()
177
177
178
178
179 def safe_int(val, default=None):
179 def safe_int(val, default=None):
180 """
180 """
181 Returns int() of val if val is not convertable to int use default
181 Returns int() of val if val is not convertable to int use default
182 instead
182 instead
183
183
184 :param val:
184 :param val:
185 :param default:
185 :param default:
186 """
186 """
187
187
188 try:
188 try:
189 val = int(val)
189 val = int(val)
190 except (ValueError, TypeError):
190 except (ValueError, TypeError):
191 val = default
191 val = default
192
192
193 return val
193 return val
194
194
195
195
196 def safe_unicode(str_, from_encoding=None):
196 def safe_unicode(str_, from_encoding=None):
197 """
197 """
198 safe unicode function. Does few trick to turn str_ into unicode
198 safe unicode function. Does few trick to turn str_ into unicode
199
199
200 In case of UnicodeDecode error we try to return it with encoding detected
200 In case of UnicodeDecode error we try to return it with encoding detected
201 by chardet library if it fails fallback to unicode with errors replaced
201 by chardet library if it fails fallback to unicode with errors replaced
202
202
203 :param str_: string to decode
203 :param str_: string to decode
204 :rtype: unicode
204 :rtype: unicode
205 :returns: unicode object
205 :returns: unicode object
206 """
206 """
207 if isinstance(str_, unicode):
207 if isinstance(str_, unicode):
208 return str_
208 return str_
209
209
210 if not from_encoding:
210 if not from_encoding:
211 import rhodecode
211 import rhodecode
212 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
212 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
213 'utf8'), sep=',')
213 'utf8'), sep=',')
214 from_encoding = DEFAULT_ENCODINGS
214 from_encoding = DEFAULT_ENCODINGS
215
215
216 if not isinstance(from_encoding, (list, tuple)):
216 if not isinstance(from_encoding, (list, tuple)):
217 from_encoding = [from_encoding]
217 from_encoding = [from_encoding]
218
218
219 try:
219 try:
220 return unicode(str_)
220 return unicode(str_)
221 except UnicodeDecodeError:
221 except UnicodeDecodeError:
222 pass
222 pass
223
223
224 for enc in from_encoding:
224 for enc in from_encoding:
225 try:
225 try:
226 return unicode(str_, enc)
226 return unicode(str_, enc)
227 except UnicodeDecodeError:
227 except UnicodeDecodeError:
228 pass
228 pass
229
229
230 try:
230 try:
231 import chardet
231 import chardet
232 encoding = chardet.detect(str_)['encoding']
232 encoding = chardet.detect(str_)['encoding']
233 if encoding is None:
233 if encoding is None:
234 raise Exception()
234 raise Exception()
235 return str_.decode(encoding)
235 return str_.decode(encoding)
236 except (ImportError, UnicodeDecodeError, Exception):
236 except (ImportError, UnicodeDecodeError, Exception):
237 return unicode(str_, from_encoding[0], 'replace')
237 return unicode(str_, from_encoding[0], 'replace')
238
238
239
239
240 def safe_str(unicode_, to_encoding=None):
240 def safe_str(unicode_, to_encoding=None):
241 """
241 """
242 safe str function. Does few trick to turn unicode_ into string
242 safe str function. Does few trick to turn unicode_ into string
243
243
244 In case of UnicodeEncodeError we try to return it with encoding detected
244 In case of UnicodeEncodeError we try to return it with encoding detected
245 by chardet library if it fails fallback to string with errors replaced
245 by chardet library if it fails fallback to string with errors replaced
246
246
247 :param unicode_: unicode to encode
247 :param unicode_: unicode to encode
248 :rtype: str
248 :rtype: str
249 :returns: str object
249 :returns: str object
250 """
250 """
251
251
252 # if it's not basestr cast to str
252 # if it's not basestr cast to str
253 if not isinstance(unicode_, basestring):
253 if not isinstance(unicode_, basestring):
254 return str(unicode_)
254 return str(unicode_)
255
255
256 if isinstance(unicode_, str):
256 if isinstance(unicode_, str):
257 return unicode_
257 return unicode_
258
258
259 if not to_encoding:
259 if not to_encoding:
260 import rhodecode
260 import rhodecode
261 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
261 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
262 'utf8'), sep=',')
262 'utf8'), sep=',')
263 to_encoding = DEFAULT_ENCODINGS
263 to_encoding = DEFAULT_ENCODINGS
264
264
265 if not isinstance(to_encoding, (list, tuple)):
265 if not isinstance(to_encoding, (list, tuple)):
266 to_encoding = [to_encoding]
266 to_encoding = [to_encoding]
267
267
268 for enc in to_encoding:
268 for enc in to_encoding:
269 try:
269 try:
270 return unicode_.encode(enc)
270 return unicode_.encode(enc)
271 except UnicodeEncodeError:
271 except UnicodeEncodeError:
272 pass
272 pass
273
273
274 try:
274 try:
275 import chardet
275 import chardet
276 encoding = chardet.detect(unicode_)['encoding']
276 encoding = chardet.detect(unicode_)['encoding']
277 if encoding is None:
277 if encoding is None:
278 raise UnicodeEncodeError()
278 raise UnicodeEncodeError()
279
279
280 return unicode_.encode(encoding)
280 return unicode_.encode(encoding)
281 except (ImportError, UnicodeEncodeError):
281 except (ImportError, UnicodeEncodeError):
282 return unicode_.encode(to_encoding[0], 'replace')
282 return unicode_.encode(to_encoding[0], 'replace')
283
283
284
284
285 def remove_suffix(s, suffix):
285 def remove_suffix(s, suffix):
286 if s.endswith(suffix):
286 if s.endswith(suffix):
287 s = s[:-1 * len(suffix)]
287 s = s[:-1 * len(suffix)]
288 return s
288 return s
289
289
290
290
291 def remove_prefix(s, prefix):
291 def remove_prefix(s, prefix):
292 if s.startswith(prefix):
292 if s.startswith(prefix):
293 s = s[len(prefix):]
293 s = s[len(prefix):]
294 return s
294 return s
295
295
296
296
297 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
297 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
298 """
298 """
299 Custom engine_from_config functions that makes sure we use NullPool for
299 Custom engine_from_config functions that makes sure we use NullPool for
300 file based sqlite databases. This prevents errors on sqlite. This only
300 file based sqlite databases. This prevents errors on sqlite. This only
301 applies to sqlalchemy versions < 0.7.0
301 applies to sqlalchemy versions < 0.7.0
302
302
303 """
303 """
304 import sqlalchemy
304 import sqlalchemy
305 from sqlalchemy import engine_from_config as efc
305 from sqlalchemy import engine_from_config as efc
306 import logging
306 import logging
307
307
308 if int(sqlalchemy.__version__.split('.')[1]) < 7:
308 if int(sqlalchemy.__version__.split('.')[1]) < 7:
309
309
310 # This solution should work for sqlalchemy < 0.7.0, and should use
310 # This solution should work for sqlalchemy < 0.7.0, and should use
311 # proxy=TimerProxy() for execution time profiling
311 # proxy=TimerProxy() for execution time profiling
312
312
313 from sqlalchemy.pool import NullPool
313 from sqlalchemy.pool import NullPool
314 url = configuration[prefix + 'url']
314 url = configuration[prefix + 'url']
315
315
316 if url.startswith('sqlite'):
316 if url.startswith('sqlite'):
317 kwargs.update({'poolclass': NullPool})
317 kwargs.update({'poolclass': NullPool})
318 return efc(configuration, prefix, **kwargs)
318 return efc(configuration, prefix, **kwargs)
319 else:
319 else:
320 import time
320 import time
321 from sqlalchemy import event
321 from sqlalchemy import event
322 from sqlalchemy.engine import Engine
322 from sqlalchemy.engine import Engine
323
323
324 log = logging.getLogger('sqlalchemy.engine')
324 log = logging.getLogger('sqlalchemy.engine')
325 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
325 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
326 engine = efc(configuration, prefix, **kwargs)
326 engine = efc(configuration, prefix, **kwargs)
327
327
328 def color_sql(sql):
328 def color_sql(sql):
329 COLOR_SEQ = "\033[1;%dm"
329 COLOR_SEQ = "\033[1;%dm"
330 COLOR_SQL = YELLOW
330 COLOR_SQL = YELLOW
331 normal = '\x1b[0m'
331 normal = '\x1b[0m'
332 return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal])
332 return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal])
333
333
334 if configuration['debug']:
334 if configuration['debug']:
335 #attach events only for debug configuration
335 #attach events only for debug configuration
336
336
337 def before_cursor_execute(conn, cursor, statement,
337 def before_cursor_execute(conn, cursor, statement,
338 parameters, context, executemany):
338 parameters, context, executemany):
339 context._query_start_time = time.time()
339 context._query_start_time = time.time()
340 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
340 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
341
341
342 def after_cursor_execute(conn, cursor, statement,
342 def after_cursor_execute(conn, cursor, statement,
343 parameters, context, executemany):
343 parameters, context, executemany):
344 total = time.time() - context._query_start_time
344 total = time.time() - context._query_start_time
345 log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total))
345 log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total))
346
346
347 event.listen(engine, "before_cursor_execute",
347 event.listen(engine, "before_cursor_execute",
348 before_cursor_execute)
348 before_cursor_execute)
349 event.listen(engine, "after_cursor_execute",
349 event.listen(engine, "after_cursor_execute",
350 after_cursor_execute)
350 after_cursor_execute)
351
351
352 return engine
352 return engine
353
353
354
354
355 def age(prevdate, show_short_version=False, now=None):
355 def age(prevdate, show_short_version=False, now=None):
356 """
356 """
357 turns a datetime into an age string.
357 turns a datetime into an age string.
358 If show_short_version is True, then it will generate a not so accurate but shorter string,
358 If show_short_version is True, then it will generate a not so accurate but shorter string,
359 example: 2days ago, instead of 2 days and 23 hours ago.
359 example: 2days ago, instead of 2 days and 23 hours ago.
360
360
361 :param prevdate: datetime object
361 :param prevdate: datetime object
362 :param show_short_version: if it should aproximate the date and return a shorter string
362 :param show_short_version: if it should aproximate the date and return a shorter string
363 :rtype: unicode
363 :rtype: unicode
364 :returns: unicode words describing age
364 :returns: unicode words describing age
365 """
365 """
366 now = now or datetime.datetime.now()
366 now = now or datetime.datetime.now()
367 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
367 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
368 deltas = {}
368 deltas = {}
369 future = False
369 future = False
370
370
371 if prevdate > now:
371 if prevdate > now:
372 now, prevdate = prevdate, now
372 now, prevdate = prevdate, now
373 future = True
373 future = True
374 if future:
374 if future:
375 prevdate = prevdate.replace(microsecond=0)
375 prevdate = prevdate.replace(microsecond=0)
376 # Get date parts deltas
376 # Get date parts deltas
377 from dateutil import relativedelta
377 from dateutil import relativedelta
378 for part in order:
378 for part in order:
379 d = relativedelta.relativedelta(now, prevdate)
379 d = relativedelta.relativedelta(now, prevdate)
380 deltas[part] = getattr(d, part + 's')
380 deltas[part] = getattr(d, part + 's')
381
381
382 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
382 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
383 # not 1 hour, -59 minutes and -59 seconds)
383 # not 1 hour, -59 minutes and -59 seconds)
384 for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours
384 for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours
385 part = order[num]
385 part = order[num]
386 carry_part = order[num - 1]
386 carry_part = order[num - 1]
387
387
388 if deltas[part] < 0:
388 if deltas[part] < 0:
389 deltas[part] += length
389 deltas[part] += length
390 deltas[carry_part] -= 1
390 deltas[carry_part] -= 1
391
391
392 # Same thing for days except that the increment depends on the (variable)
392 # Same thing for days except that the increment depends on the (variable)
393 # number of days in the month
393 # number of days in the month
394 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
394 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
395 if deltas['day'] < 0:
395 if deltas['day'] < 0:
396 if prevdate.month == 2 and (prevdate.year % 4 == 0 and
396 if prevdate.month == 2 and (prevdate.year % 4 == 0 and
397 (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)):
397 (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)):
398 deltas['day'] += 29
398 deltas['day'] += 29
399 else:
399 else:
400 deltas['day'] += month_lengths[prevdate.month - 1]
400 deltas['day'] += month_lengths[prevdate.month - 1]
401
401
402 deltas['month'] -= 1
402 deltas['month'] -= 1
403
403
404 if deltas['month'] < 0:
404 if deltas['month'] < 0:
405 deltas['month'] += 12
405 deltas['month'] += 12
406 deltas['year'] -= 1
406 deltas['year'] -= 1
407
407
408 # Format the result
408 # Format the result
409 fmt_funcs = {
409 fmt_funcs = {
410 'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
410 'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
411 'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
411 'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
412 'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
412 'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
413 'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
413 'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
414 'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
414 'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
415 'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
415 'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
416 }
416 }
417
417
418 for i, part in enumerate(order):
418 for i, part in enumerate(order):
419 value = deltas[part]
419 value = deltas[part]
420 if value == 0:
420 if value == 0:
421 continue
421 continue
422
422
423 if i < 5:
423 if i < 5:
424 sub_part = order[i + 1]
424 sub_part = order[i + 1]
425 sub_value = deltas[sub_part]
425 sub_value = deltas[sub_part]
426 else:
426 else:
427 sub_value = 0
427 sub_value = 0
428
428
429 if sub_value == 0 or show_short_version:
429 if sub_value == 0 or show_short_version:
430 if future:
430 if future:
431 return _(u'in %s') % fmt_funcs[part](value)
431 return _(u'in %s') % fmt_funcs[part](value)
432 else:
432 else:
433 return _(u'%s ago') % fmt_funcs[part](value)
433 return _(u'%s ago') % fmt_funcs[part](value)
434 if future:
434 if future:
435 return _(u'in %s and %s') % (fmt_funcs[part](value),
435 return _(u'in %s and %s') % (fmt_funcs[part](value),
436 fmt_funcs[sub_part](sub_value))
436 fmt_funcs[sub_part](sub_value))
437 else:
437 else:
438 return _(u'%s and %s ago') % (fmt_funcs[part](value),
438 return _(u'%s and %s ago') % (fmt_funcs[part](value),
439 fmt_funcs[sub_part](sub_value))
439 fmt_funcs[sub_part](sub_value))
440
440
441 return _(u'just now')
441 return _(u'just now')
442
442
443
443
444 def uri_filter(uri):
444 def uri_filter(uri):
445 """
445 """
446 Removes user:password from given url string
446 Removes user:password from given url string
447
447
448 :param uri:
448 :param uri:
449 :rtype: unicode
449 :rtype: unicode
450 :returns: filtered list of strings
450 :returns: filtered list of strings
451 """
451 """
452 if not uri:
452 if not uri:
453 return ''
453 return ''
454
454
455 proto = ''
455 proto = ''
456
456
457 for pat in ('https://', 'http://'):
457 for pat in ('https://', 'http://'):
458 if uri.startswith(pat):
458 if uri.startswith(pat):
459 uri = uri[len(pat):]
459 uri = uri[len(pat):]
460 proto = pat
460 proto = pat
461 break
461 break
462
462
463 # remove passwords and username
463 # remove passwords and username
464 uri = uri[uri.find('@') + 1:]
464 uri = uri[uri.find('@') + 1:]
465
465
466 # get the port
466 # get the port
467 cred_pos = uri.find(':')
467 cred_pos = uri.find(':')
468 if cred_pos == -1:
468 if cred_pos == -1:
469 host, port = uri, None
469 host, port = uri, None
470 else:
470 else:
471 host, port = uri[:cred_pos], uri[cred_pos + 1:]
471 host, port = uri[:cred_pos], uri[cred_pos + 1:]
472
472
473 return filter(None, [proto, host, port])
473 return filter(None, [proto, host, port])
474
474
475
475
476 def credentials_filter(uri):
476 def credentials_filter(uri):
477 """
477 """
478 Returns a url with removed credentials
478 Returns a url with removed credentials
479
479
480 :param uri:
480 :param uri:
481 """
481 """
482
482
483 uri = uri_filter(uri)
483 uri = uri_filter(uri)
484 #check if we have port
484 #check if we have port
485 if len(uri) > 2 and uri[2]:
485 if len(uri) > 2 and uri[2]:
486 uri[2] = ':' + uri[2]
486 uri[2] = ':' + uri[2]
487
487
488 return ''.join(uri)
488 return ''.join(uri)
489
489
490
490
491 def get_changeset_safe(repo, rev):
491 def get_changeset_safe(repo, rev):
492 """
492 """
493 Safe version of get_changeset if this changeset doesn't exists for a
493 Safe version of get_changeset if this changeset doesn't exists for a
494 repo it returns a Dummy one instead
494 repo it returns a Dummy one instead
495
495
496 :param repo:
496 :param repo:
497 :param rev:
497 :param rev:
498 """
498 """
499 from rhodecode.lib.vcs.backends.base import BaseRepository
499 from rhodecode.lib.vcs.backends.base import BaseRepository
500 from rhodecode.lib.vcs.exceptions import RepositoryError
500 from rhodecode.lib.vcs.exceptions import RepositoryError
501 from rhodecode.lib.vcs.backends.base import EmptyChangeset
501 from rhodecode.lib.vcs.backends.base import EmptyChangeset
502 if not isinstance(repo, BaseRepository):
502 if not isinstance(repo, BaseRepository):
503 raise Exception('You must pass an Repository '
503 raise Exception('You must pass an Repository '
504 'object as first argument got %s', type(repo))
504 'object as first argument got %s', type(repo))
505
505
506 try:
506 try:
507 cs = repo.get_changeset(rev)
507 cs = repo.get_changeset(rev)
508 except RepositoryError:
508 except RepositoryError:
509 cs = EmptyChangeset(requested_revision=rev)
509 cs = EmptyChangeset(requested_revision=rev)
510 return cs
510 return cs
511
511
512
512
513 def datetime_to_time(dt):
513 def datetime_to_time(dt):
514 if dt:
514 if dt:
515 return time.mktime(dt.timetuple())
515 return time.mktime(dt.timetuple())
516
516
517
517
518 def time_to_datetime(tm):
518 def time_to_datetime(tm):
519 if tm:
519 if tm:
520 if isinstance(tm, basestring):
520 if isinstance(tm, basestring):
521 try:
521 try:
522 tm = float(tm)
522 tm = float(tm)
523 except ValueError:
523 except ValueError:
524 return
524 return
525 return datetime.datetime.fromtimestamp(tm)
525 return datetime.datetime.fromtimestamp(tm)
526
526
527 MENTIONS_REGEX = r'(?:^@|\s@)([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)(?:\s{1})'
527 MENTIONS_REGEX = r'(?:^@|\s@)([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)(?:\s{1})'
528
528
529
529
530 def extract_mentioned_users(s):
530 def extract_mentioned_users(s):
531 """
531 """
532 Returns unique usernames from given string s that have @mention
532 Returns unique usernames from given string s that have @mention
533
533
534 :param s: string to get mentions
534 :param s: string to get mentions
535 """
535 """
536 usrs = set()
536 usrs = set()
537 for username in re.findall(MENTIONS_REGEX, s):
537 for username in re.findall(MENTIONS_REGEX, s):
538 usrs.add(username)
538 usrs.add(username)
539
539
540 return sorted(list(usrs), key=lambda k: k.lower())
540 return sorted(list(usrs), key=lambda k: k.lower())
541
541
542
542
543 class AttributeDict(dict):
543 class AttributeDict(dict):
544 def __getattr__(self, attr):
544 def __getattr__(self, attr):
545 return self.get(attr, None)
545 return self.get(attr, None)
546 __setattr__ = dict.__setitem__
546 __setattr__ = dict.__setitem__
547 __delattr__ = dict.__delitem__
547 __delattr__ = dict.__delitem__
548
548
549
549
550 def fix_PATH(os_=None):
550 def fix_PATH(os_=None):
551 """
551 """
552 Get current active python path, and append it to PATH variable to fix issues
552 Get current active python path, and append it to PATH variable to fix issues
553 of subprocess calls and different python versions
553 of subprocess calls and different python versions
554 """
554 """
555 if os_ is None:
555 if os_ is None:
556 import os
556 import os
557 else:
557 else:
558 os = os_
558 os = os_
559
559
560 cur_path = os.path.split(sys.executable)[0]
560 cur_path = os.path.split(sys.executable)[0]
561 if not os.environ['PATH'].startswith(cur_path):
561 if not os.environ['PATH'].startswith(cur_path):
562 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
562 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
563
563
564
564
565 def obfuscate_url_pw(engine):
565 def obfuscate_url_pw(engine):
566 _url = engine or ''
566 _url = engine or ''
567 from sqlalchemy.engine import url as sa_url
567 from sqlalchemy.engine import url as sa_url
568 try:
568 try:
569 _url = sa_url.make_url(engine)
569 _url = sa_url.make_url(engine)
570 if _url.password:
570 if _url.password:
571 _url.password = 'XXXXX'
571 _url.password = 'XXXXX'
572 except Exception:
572 except Exception:
573 pass
573 pass
574 return str(_url)
574 return str(_url)
575
575
576
576
577 def get_server_url(environ):
577 def get_server_url(environ):
578 req = webob.Request(environ)
578 req = webob.Request(environ)
579 return req.host_url + req.script_name
579 return req.host_url + req.script_name
580
580
581
581
582 def _extract_extras(env=None):
582 def _extract_extras(env=None):
583 """
583 """
584 Extracts the rc extras data from os.environ, and wraps it into named
584 Extracts the rc extras data from os.environ, and wraps it into named
585 AttributeDict object
585 AttributeDict object
586 """
586 """
587 if not env:
587 if not env:
588 env = os.environ
588 env = os.environ
589
589
590 try:
590 try:
591 rc_extras = json.loads(env['RC_SCM_DATA'])
591 rc_extras = json.loads(env['RC_SCM_DATA'])
592 except Exception:
592 except Exception:
593 print os.environ
593 print os.environ
594 print >> sys.stderr, traceback.format_exc()
594 print >> sys.stderr, traceback.format_exc()
595 rc_extras = {}
595 rc_extras = {}
596
596
597 try:
597 try:
598 for k in ['username', 'repository', 'locked_by', 'scm', 'make_lock',
598 for k in ['username', 'repository', 'locked_by', 'scm', 'make_lock',
599 'action', 'ip']:
599 'action', 'ip']:
600 rc_extras[k]
600 rc_extras[k]
601 except KeyError, e:
601 except KeyError, e:
602 raise Exception('Missing key %s in os.environ %s' % (e, rc_extras))
602 raise Exception('Missing key %s in os.environ %s' % (e, rc_extras))
603
603
604 return AttributeDict(rc_extras)
604 return AttributeDict(rc_extras)
605
605
606
606
607 def _set_extras(extras):
607 def _set_extras(extras):
608 os.environ['RC_SCM_DATA'] = json.dumps(extras)
608 os.environ['RC_SCM_DATA'] = json.dumps(extras)
609
609
610
610
611 def unique_id(hexlen=32):
611 def unique_id(hexlen=32):
612 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
612 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
613 return suuid(truncate_to=hexlen, alphabet=alphabet)
613 return suuid(truncate_to=hexlen, alphabet=alphabet)
614
614
615
615
616 def suuid(url=None, truncate_to=22, alphabet=None):
616 def suuid(url=None, truncate_to=22, alphabet=None):
617 """
617 """
618 Generate and return a short URL safe UUID.
618 Generate and return a short URL safe UUID.
619
619
620 If the url parameter is provided, set the namespace to the provided
620 If the url parameter is provided, set the namespace to the provided
621 URL and generate a UUID.
621 URL and generate a UUID.
622
622
623 :param url to get the uuid for
623 :param url to get the uuid for
624 :truncate_to: truncate the basic 22 UUID to shorter version
624 :truncate_to: truncate the basic 22 UUID to shorter version
625
625
626 The IDs won't be universally unique any longer, but the probability of
626 The IDs won't be universally unique any longer, but the probability of
627 a collision will still be very low.
627 a collision will still be very low.
628 """
628 """
629 # Define our alphabet.
629 # Define our alphabet.
630 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
630 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
631
631
632 # If no URL is given, generate a random UUID.
632 # If no URL is given, generate a random UUID.
633 if url is None:
633 if url is None:
634 unique_id = uuid.uuid4().int
634 unique_id = uuid.uuid4().int
635 else:
635 else:
636 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
636 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
637
637
638 alphabet_length = len(_ALPHABET)
638 alphabet_length = len(_ALPHABET)
639 output = []
639 output = []
640 while unique_id > 0:
640 while unique_id > 0:
641 digit = unique_id % alphabet_length
641 digit = unique_id % alphabet_length
642 output.append(_ALPHABET[digit])
642 output.append(_ALPHABET[digit])
643 unique_id = int(unique_id / alphabet_length)
643 unique_id = int(unique_id / alphabet_length)
644 return "".join(output)[:truncate_to]
644 return "".join(output)[:truncate_to]
645
646 def get_current_rhodecode_user():
647 """
648 Get's rhodecode user from threadlocal tmpl_context variable if it's
649 defined, else returns None.
650 """
651 from pylons import tmpl_context
652 if hasattr(tmpl_context, 'rhodecode_user'):
653 return tmpl_context.rhodecode_user
654
655 return None
@@ -1,755 +1,757 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.model.repo
3 rhodecode.model.repo
4 ~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~
5
5
6 Repository model for rhodecode
6 Repository model for rhodecode
7
7
8 :created_on: Jun 5, 2010
8 :created_on: Jun 5, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 from __future__ import with_statement
25 from __future__ import with_statement
26 import os
26 import os
27 import shutil
27 import shutil
28 import logging
28 import logging
29 import traceback
29 import traceback
30 from datetime import datetime
30 from datetime import datetime
31
31
32 from rhodecode.lib.vcs.backends import get_backend
32 from rhodecode.lib.vcs.backends import get_backend
33 from rhodecode.lib.compat import json
33 from rhodecode.lib.compat import json
34 from rhodecode.lib.utils2 import LazyProperty, safe_str, safe_unicode,\
34 from rhodecode.lib.utils2 import LazyProperty, safe_str, safe_unicode,\
35 remove_prefix, obfuscate_url_pw
35 remove_prefix, obfuscate_url_pw, get_current_rhodecode_user
36 from rhodecode.lib.caching_query import FromCache
36 from rhodecode.lib.caching_query import FromCache
37 from rhodecode.lib.hooks import log_create_repository, log_delete_repository
37 from rhodecode.lib.hooks import log_create_repository, log_delete_repository
38
38
39 from rhodecode.model import BaseModel
39 from rhodecode.model import BaseModel
40 from rhodecode.model.db import Repository, UserRepoToPerm, User, Permission, \
40 from rhodecode.model.db import Repository, UserRepoToPerm, User, Permission, \
41 Statistics, UserGroup, UserGroupRepoToPerm, RhodeCodeUi, RepoGroup,\
41 Statistics, UserGroup, UserGroupRepoToPerm, RhodeCodeUi, RepoGroup,\
42 RhodeCodeSetting, RepositoryField
42 RhodeCodeSetting, RepositoryField
43 from rhodecode.lib import helpers as h
43 from rhodecode.lib import helpers as h
44 from rhodecode.lib.auth import HasRepoPermissionAny, HasUserGroupPermissionAny
44 from rhodecode.lib.auth import HasRepoPermissionAny, HasUserGroupPermissionAny
45 from rhodecode.lib.exceptions import AttachedForksError
45 from rhodecode.lib.exceptions import AttachedForksError
46 from rhodecode.model.scm import UserGroupList
46 from rhodecode.model.scm import UserGroupList
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 class RepoModel(BaseModel):
51 class RepoModel(BaseModel):
52
52
53 cls = Repository
53 cls = Repository
54 URL_SEPARATOR = Repository.url_sep()
54 URL_SEPARATOR = Repository.url_sep()
55
55
56 def _get_user_group(self, users_group):
56 def _get_user_group(self, users_group):
57 return self._get_instance(UserGroup, users_group,
57 return self._get_instance(UserGroup, users_group,
58 callback=UserGroup.get_by_group_name)
58 callback=UserGroup.get_by_group_name)
59
59
60 def _get_repo_group(self, repos_group):
60 def _get_repo_group(self, repos_group):
61 return self._get_instance(RepoGroup, repos_group,
61 return self._get_instance(RepoGroup, repos_group,
62 callback=RepoGroup.get_by_group_name)
62 callback=RepoGroup.get_by_group_name)
63
63
64 def _create_default_perms(self, repository, private):
64 def _create_default_perms(self, repository, private):
65 # create default permission
65 # create default permission
66 default = 'repository.read'
66 default = 'repository.read'
67 def_user = User.get_default_user()
67 def_user = User.get_default_user()
68 for p in def_user.user_perms:
68 for p in def_user.user_perms:
69 if p.permission.permission_name.startswith('repository.'):
69 if p.permission.permission_name.startswith('repository.'):
70 default = p.permission.permission_name
70 default = p.permission.permission_name
71 break
71 break
72
72
73 default_perm = 'repository.none' if private else default
73 default_perm = 'repository.none' if private else default
74
74
75 repo_to_perm = UserRepoToPerm()
75 repo_to_perm = UserRepoToPerm()
76 repo_to_perm.permission = Permission.get_by_key(default_perm)
76 repo_to_perm.permission = Permission.get_by_key(default_perm)
77
77
78 repo_to_perm.repository = repository
78 repo_to_perm.repository = repository
79 repo_to_perm.user_id = def_user.user_id
79 repo_to_perm.user_id = def_user.user_id
80
80
81 return repo_to_perm
81 return repo_to_perm
82
82
83 @LazyProperty
83 @LazyProperty
84 def repos_path(self):
84 def repos_path(self):
85 """
85 """
86 Get's the repositories root path from database
86 Get's the repositories root path from database
87 """
87 """
88
88
89 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
89 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
90 return q.ui_value
90 return q.ui_value
91
91
92 def get(self, repo_id, cache=False):
92 def get(self, repo_id, cache=False):
93 repo = self.sa.query(Repository)\
93 repo = self.sa.query(Repository)\
94 .filter(Repository.repo_id == repo_id)
94 .filter(Repository.repo_id == repo_id)
95
95
96 if cache:
96 if cache:
97 repo = repo.options(FromCache("sql_cache_short",
97 repo = repo.options(FromCache("sql_cache_short",
98 "get_repo_%s" % repo_id))
98 "get_repo_%s" % repo_id))
99 return repo.scalar()
99 return repo.scalar()
100
100
101 def get_repo(self, repository):
101 def get_repo(self, repository):
102 return self._get_repo(repository)
102 return self._get_repo(repository)
103
103
104 def get_by_repo_name(self, repo_name, cache=False):
104 def get_by_repo_name(self, repo_name, cache=False):
105 repo = self.sa.query(Repository)\
105 repo = self.sa.query(Repository)\
106 .filter(Repository.repo_name == repo_name)
106 .filter(Repository.repo_name == repo_name)
107
107
108 if cache:
108 if cache:
109 repo = repo.options(FromCache("sql_cache_short",
109 repo = repo.options(FromCache("sql_cache_short",
110 "get_repo_%s" % repo_name))
110 "get_repo_%s" % repo_name))
111 return repo.scalar()
111 return repo.scalar()
112
112
113 def get_all_user_repos(self, user):
113 def get_all_user_repos(self, user):
114 """
114 """
115 Get's all repositories that user have at least read access
115 Get's all repositories that user have at least read access
116
116
117 :param user:
117 :param user:
118 """
118 """
119 from rhodecode.lib.auth import AuthUser
119 from rhodecode.lib.auth import AuthUser
120 user = self._get_user(user)
120 user = self._get_user(user)
121 repos = AuthUser(user_id=user.user_id).permissions['repositories']
121 repos = AuthUser(user_id=user.user_id).permissions['repositories']
122 access_check = lambda r: r[1] in ['repository.read',
122 access_check = lambda r: r[1] in ['repository.read',
123 'repository.write',
123 'repository.write',
124 'repository.admin']
124 'repository.admin']
125 repos = [x[0] for x in filter(access_check, repos.items())]
125 repos = [x[0] for x in filter(access_check, repos.items())]
126 return Repository.query().filter(Repository.repo_name.in_(repos))
126 return Repository.query().filter(Repository.repo_name.in_(repos))
127
127
128 def get_users_js(self):
128 def get_users_js(self):
129 users = self.sa.query(User).filter(User.active == True).all()
129 users = self.sa.query(User).filter(User.active == True).all()
130 return json.dumps([
130 return json.dumps([
131 {
131 {
132 'id': u.user_id,
132 'id': u.user_id,
133 'fname': u.name,
133 'fname': u.name,
134 'lname': u.lastname,
134 'lname': u.lastname,
135 'nname': u.username,
135 'nname': u.username,
136 'gravatar_lnk': h.gravatar_url(u.email, 14)
136 'gravatar_lnk': h.gravatar_url(u.email, 14)
137 } for u in users]
137 } for u in users]
138 )
138 )
139
139
140 def get_users_groups_js(self):
140 def get_users_groups_js(self):
141 users_groups = self.sa.query(UserGroup)\
141 users_groups = self.sa.query(UserGroup)\
142 .filter(UserGroup.users_group_active == True).all()
142 .filter(UserGroup.users_group_active == True).all()
143 users_groups = UserGroupList(users_groups, perm_set=['usergroup.read',
143 users_groups = UserGroupList(users_groups, perm_set=['usergroup.read',
144 'usergroup.write',
144 'usergroup.write',
145 'usergroup.admin'])
145 'usergroup.admin'])
146 return json.dumps([
146 return json.dumps([
147 {
147 {
148 'id': gr.users_group_id,
148 'id': gr.users_group_id,
149 'grname': gr.users_group_name,
149 'grname': gr.users_group_name,
150 'grmembers': len(gr.members),
150 'grmembers': len(gr.members),
151 } for gr in users_groups]
151 } for gr in users_groups]
152 )
152 )
153
153
154 @classmethod
154 @classmethod
155 def _render_datatable(cls, tmpl, *args, **kwargs):
155 def _render_datatable(cls, tmpl, *args, **kwargs):
156 import rhodecode
156 import rhodecode
157 from pylons import tmpl_context as c
157 from pylons import tmpl_context as c
158 from pylons.i18n.translation import _
158 from pylons.i18n.translation import _
159
159
160 _tmpl_lookup = rhodecode.CONFIG['pylons.app_globals'].mako_lookup
160 _tmpl_lookup = rhodecode.CONFIG['pylons.app_globals'].mako_lookup
161 template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
161 template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
162
162
163 tmpl = template.get_def(tmpl)
163 tmpl = template.get_def(tmpl)
164 kwargs.update(dict(_=_, h=h, c=c))
164 kwargs.update(dict(_=_, h=h, c=c))
165 return tmpl.render(*args, **kwargs)
165 return tmpl.render(*args, **kwargs)
166
166
167 @classmethod
167 @classmethod
168 def update_repoinfo(cls, repositories=None):
168 def update_repoinfo(cls, repositories=None):
169 if not repositories:
169 if not repositories:
170 repositories = Repository.getAll()
170 repositories = Repository.getAll()
171 for repo in repositories:
171 for repo in repositories:
172 repo.update_changeset_cache()
172 repo.update_changeset_cache()
173
173
174 def get_repos_as_dict(self, repos_list=None, admin=False, perm_check=True,
174 def get_repos_as_dict(self, repos_list=None, admin=False, perm_check=True,
175 super_user_actions=False):
175 super_user_actions=False):
176 _render = self._render_datatable
176 _render = self._render_datatable
177 from pylons import tmpl_context as c
177 from pylons import tmpl_context as c
178
178
179 def quick_menu(repo_name):
179 def quick_menu(repo_name):
180 return _render('quick_menu', repo_name)
180 return _render('quick_menu', repo_name)
181
181
182 def repo_lnk(name, rtype, private, fork_of):
182 def repo_lnk(name, rtype, private, fork_of):
183 return _render('repo_name', name, rtype, private, fork_of,
183 return _render('repo_name', name, rtype, private, fork_of,
184 short_name=not admin, admin=False)
184 short_name=not admin, admin=False)
185
185
186 def last_change(last_change):
186 def last_change(last_change):
187 return _render("last_change", last_change)
187 return _render("last_change", last_change)
188
188
189 def rss_lnk(repo_name):
189 def rss_lnk(repo_name):
190 return _render("rss", repo_name)
190 return _render("rss", repo_name)
191
191
192 def atom_lnk(repo_name):
192 def atom_lnk(repo_name):
193 return _render("atom", repo_name)
193 return _render("atom", repo_name)
194
194
195 def last_rev(repo_name, cs_cache):
195 def last_rev(repo_name, cs_cache):
196 return _render('revision', repo_name, cs_cache.get('revision'),
196 return _render('revision', repo_name, cs_cache.get('revision'),
197 cs_cache.get('raw_id'), cs_cache.get('author'),
197 cs_cache.get('raw_id'), cs_cache.get('author'),
198 cs_cache.get('message'))
198 cs_cache.get('message'))
199
199
200 def desc(desc):
200 def desc(desc):
201 if c.visual.stylify_metatags:
201 if c.visual.stylify_metatags:
202 return h.urlify_text(h.desc_stylize(h.truncate(desc, 60)))
202 return h.urlify_text(h.desc_stylize(h.truncate(desc, 60)))
203 else:
203 else:
204 return h.urlify_text(h.truncate(desc, 60))
204 return h.urlify_text(h.truncate(desc, 60))
205
205
206 def repo_actions(repo_name):
206 def repo_actions(repo_name):
207 return _render('repo_actions', repo_name, super_user_actions)
207 return _render('repo_actions', repo_name, super_user_actions)
208
208
209 def owner_actions(user_id, username):
209 def owner_actions(user_id, username):
210 return _render('user_name', user_id, username)
210 return _render('user_name', user_id, username)
211
211
212 repos_data = []
212 repos_data = []
213 for repo in repos_list:
213 for repo in repos_list:
214 if perm_check:
214 if perm_check:
215 # check permission at this level
215 # check permission at this level
216 if not HasRepoPermissionAny(
216 if not HasRepoPermissionAny(
217 'repository.read', 'repository.write', 'repository.admin'
217 'repository.read', 'repository.write', 'repository.admin'
218 )(repo.repo_name, 'get_repos_as_dict check'):
218 )(repo.repo_name, 'get_repos_as_dict check'):
219 continue
219 continue
220 cs_cache = repo.changeset_cache
220 cs_cache = repo.changeset_cache
221 row = {
221 row = {
222 "menu": quick_menu(repo.repo_name),
222 "menu": quick_menu(repo.repo_name),
223 "raw_name": repo.repo_name.lower(),
223 "raw_name": repo.repo_name.lower(),
224 "name": repo_lnk(repo.repo_name, repo.repo_type,
224 "name": repo_lnk(repo.repo_name, repo.repo_type,
225 repo.private, repo.fork),
225 repo.private, repo.fork),
226 "last_change": last_change(repo.last_db_change),
226 "last_change": last_change(repo.last_db_change),
227 "last_changeset": last_rev(repo.repo_name, cs_cache),
227 "last_changeset": last_rev(repo.repo_name, cs_cache),
228 "raw_tip": cs_cache.get('revision'),
228 "raw_tip": cs_cache.get('revision'),
229 "desc": desc(repo.description),
229 "desc": desc(repo.description),
230 "owner": h.person(repo.user.username),
230 "owner": h.person(repo.user.username),
231 "rss": rss_lnk(repo.repo_name),
231 "rss": rss_lnk(repo.repo_name),
232 "atom": atom_lnk(repo.repo_name),
232 "atom": atom_lnk(repo.repo_name),
233
233
234 }
234 }
235 if admin:
235 if admin:
236 row.update({
236 row.update({
237 "action": repo_actions(repo.repo_name),
237 "action": repo_actions(repo.repo_name),
238 "owner": owner_actions(repo.user.user_id,
238 "owner": owner_actions(repo.user.user_id,
239 h.person(repo.user.username))
239 h.person(repo.user.username))
240 })
240 })
241 repos_data.append(row)
241 repos_data.append(row)
242
242
243 return {
243 return {
244 "totalRecords": len(repos_list),
244 "totalRecords": len(repos_list),
245 "startIndex": 0,
245 "startIndex": 0,
246 "sort": "name",
246 "sort": "name",
247 "dir": "asc",
247 "dir": "asc",
248 "records": repos_data
248 "records": repos_data
249 }
249 }
250
250
251 def _get_defaults(self, repo_name):
251 def _get_defaults(self, repo_name):
252 """
252 """
253 Get's information about repository, and returns a dict for
253 Get's information about repository, and returns a dict for
254 usage in forms
254 usage in forms
255
255
256 :param repo_name:
256 :param repo_name:
257 """
257 """
258
258
259 repo_info = Repository.get_by_repo_name(repo_name)
259 repo_info = Repository.get_by_repo_name(repo_name)
260
260
261 if repo_info is None:
261 if repo_info is None:
262 return None
262 return None
263
263
264 defaults = repo_info.get_dict()
264 defaults = repo_info.get_dict()
265 group, repo_name, repo_name_full = repo_info.groups_and_repo
265 group, repo_name, repo_name_full = repo_info.groups_and_repo
266 defaults['repo_name'] = repo_name
266 defaults['repo_name'] = repo_name
267 defaults['repo_group'] = getattr(group[-1] if group else None,
267 defaults['repo_group'] = getattr(group[-1] if group else None,
268 'group_id', None)
268 'group_id', None)
269
269
270 for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
270 for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
271 (1, 'repo_description'), (1, 'repo_enable_locking'),
271 (1, 'repo_description'), (1, 'repo_enable_locking'),
272 (1, 'repo_landing_rev'), (0, 'clone_uri'),
272 (1, 'repo_landing_rev'), (0, 'clone_uri'),
273 (1, 'repo_private'), (1, 'repo_enable_statistics')]:
273 (1, 'repo_private'), (1, 'repo_enable_statistics')]:
274 attr = k
274 attr = k
275 if strip:
275 if strip:
276 attr = remove_prefix(k, 'repo_')
276 attr = remove_prefix(k, 'repo_')
277
277
278 defaults[k] = defaults[attr]
278 defaults[k] = defaults[attr]
279
279
280 # fill owner
280 # fill owner
281 if repo_info.user:
281 if repo_info.user:
282 defaults.update({'user': repo_info.user.username})
282 defaults.update({'user': repo_info.user.username})
283 else:
283 else:
284 replacement_user = User.query().filter(User.admin ==
284 replacement_user = User.query().filter(User.admin ==
285 True).first().username
285 True).first().username
286 defaults.update({'user': replacement_user})
286 defaults.update({'user': replacement_user})
287
287
288 # fill repository users
288 # fill repository users
289 for p in repo_info.repo_to_perm:
289 for p in repo_info.repo_to_perm:
290 defaults.update({'u_perm_%s' % p.user.username:
290 defaults.update({'u_perm_%s' % p.user.username:
291 p.permission.permission_name})
291 p.permission.permission_name})
292
292
293 # fill repository groups
293 # fill repository groups
294 for p in repo_info.users_group_to_perm:
294 for p in repo_info.users_group_to_perm:
295 defaults.update({'g_perm_%s' % p.users_group.users_group_name:
295 defaults.update({'g_perm_%s' % p.users_group.users_group_name:
296 p.permission.permission_name})
296 p.permission.permission_name})
297
297
298 return defaults
298 return defaults
299
299
300 def update(self, org_repo_name, **kwargs):
300 def update(self, org_repo_name, **kwargs):
301 try:
301 try:
302 cur_repo = self.get_by_repo_name(org_repo_name, cache=False)
302 cur_repo = self.get_by_repo_name(org_repo_name, cache=False)
303
303
304 if 'user' in kwargs:
304 if 'user' in kwargs:
305 cur_repo.user = User.get_by_username(kwargs['user'])
305 cur_repo.user = User.get_by_username(kwargs['user'])
306
306
307 if 'repo_group' in kwargs:
307 if 'repo_group' in kwargs:
308 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
308 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
309
309
310 for strip, k in [(1, 'repo_enable_downloads'),
310 for strip, k in [(1, 'repo_enable_downloads'),
311 (1, 'repo_description'), (1, 'repo_enable_locking'),
311 (1, 'repo_description'), (1, 'repo_enable_locking'),
312 (1, 'repo_landing_rev'), (0, 'clone_uri'),
312 (1, 'repo_landing_rev'), (0, 'clone_uri'),
313 (1, 'repo_private'), (1, 'repo_enable_statistics')]:
313 (1, 'repo_private'), (1, 'repo_enable_statistics')]:
314 if k in kwargs:
314 if k in kwargs:
315 val = kwargs[k]
315 val = kwargs[k]
316 if strip:
316 if strip:
317 k = remove_prefix(k, 'repo_')
317 k = remove_prefix(k, 'repo_')
318 setattr(cur_repo, k, val)
318 setattr(cur_repo, k, val)
319
319
320 new_name = cur_repo.get_new_name(kwargs['repo_name'])
320 new_name = cur_repo.get_new_name(kwargs['repo_name'])
321 cur_repo.repo_name = new_name
321 cur_repo.repo_name = new_name
322 #if private flag is set, reset default permission to NONE
322 #if private flag is set, reset default permission to NONE
323
323
324 if kwargs.get('repo_private'):
324 if kwargs.get('repo_private'):
325 EMPTY_PERM = 'repository.none'
325 EMPTY_PERM = 'repository.none'
326 RepoModel().grant_user_permission(
326 RepoModel().grant_user_permission(
327 repo=cur_repo, user='default', perm=EMPTY_PERM
327 repo=cur_repo, user='default', perm=EMPTY_PERM
328 )
328 )
329 #handle extra fields
329 #handle extra fields
330 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
330 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
331 k = RepositoryField.un_prefix_key(field)
331 k = RepositoryField.un_prefix_key(field)
332 ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
332 ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
333 if ex_field:
333 if ex_field:
334 ex_field.field_value = kwargs[field]
334 ex_field.field_value = kwargs[field]
335 self.sa.add(ex_field)
335 self.sa.add(ex_field)
336 self.sa.add(cur_repo)
336 self.sa.add(cur_repo)
337
337
338 if org_repo_name != new_name:
338 if org_repo_name != new_name:
339 # rename repository
339 # rename repository
340 self.__rename_repo(old=org_repo_name, new=new_name)
340 self.__rename_repo(old=org_repo_name, new=new_name)
341
341
342 return cur_repo
342 return cur_repo
343 except Exception:
343 except Exception:
344 log.error(traceback.format_exc())
344 log.error(traceback.format_exc())
345 raise
345 raise
346
346
347 def create_repo(self, repo_name, repo_type, description, owner,
347 def create_repo(self, repo_name, repo_type, description, owner,
348 private=False, clone_uri=None, repos_group=None,
348 private=False, clone_uri=None, repos_group=None,
349 landing_rev='tip', just_db=False, fork_of=None,
349 landing_rev='tip', just_db=False, fork_of=None,
350 copy_fork_permissions=False, enable_statistics=False,
350 copy_fork_permissions=False, enable_statistics=False,
351 enable_locking=False, enable_downloads=False):
351 enable_locking=False, enable_downloads=False):
352 """
352 """
353 Create repository
353 Create repository
354
354
355 """
355 """
356 from rhodecode.model.scm import ScmModel
356 from rhodecode.model.scm import ScmModel
357
357
358 owner = self._get_user(owner)
358 owner = self._get_user(owner)
359 fork_of = self._get_repo(fork_of)
359 fork_of = self._get_repo(fork_of)
360 repos_group = self._get_repo_group(repos_group)
360 repos_group = self._get_repo_group(repos_group)
361 try:
361 try:
362
362
363 # repo name is just a name of repository
363 # repo name is just a name of repository
364 # while repo_name_full is a full qualified name that is combined
364 # while repo_name_full is a full qualified name that is combined
365 # with name and path of group
365 # with name and path of group
366 repo_name_full = repo_name
366 repo_name_full = repo_name
367 repo_name = repo_name.split(self.URL_SEPARATOR)[-1]
367 repo_name = repo_name.split(self.URL_SEPARATOR)[-1]
368
368
369 new_repo = Repository()
369 new_repo = Repository()
370 new_repo.enable_statistics = False
370 new_repo.enable_statistics = False
371 new_repo.repo_name = repo_name_full
371 new_repo.repo_name = repo_name_full
372 new_repo.repo_type = repo_type
372 new_repo.repo_type = repo_type
373 new_repo.user = owner
373 new_repo.user = owner
374 new_repo.group = repos_group
374 new_repo.group = repos_group
375 new_repo.description = description or repo_name
375 new_repo.description = description or repo_name
376 new_repo.private = private
376 new_repo.private = private
377 new_repo.clone_uri = clone_uri
377 new_repo.clone_uri = clone_uri
378 new_repo.landing_rev = landing_rev
378 new_repo.landing_rev = landing_rev
379
379
380 new_repo.enable_statistics = enable_statistics
380 new_repo.enable_statistics = enable_statistics
381 new_repo.enable_locking = enable_locking
381 new_repo.enable_locking = enable_locking
382 new_repo.enable_downloads = enable_downloads
382 new_repo.enable_downloads = enable_downloads
383
383
384 if repos_group:
384 if repos_group:
385 new_repo.enable_locking = repos_group.enable_locking
385 new_repo.enable_locking = repos_group.enable_locking
386
386
387 if fork_of:
387 if fork_of:
388 parent_repo = fork_of
388 parent_repo = fork_of
389 new_repo.fork = parent_repo
389 new_repo.fork = parent_repo
390
390
391 self.sa.add(new_repo)
391 self.sa.add(new_repo)
392
392
393 if fork_of:
393 if fork_of:
394 if copy_fork_permissions:
394 if copy_fork_permissions:
395 repo = fork_of
395 repo = fork_of
396 user_perms = UserRepoToPerm.query()\
396 user_perms = UserRepoToPerm.query()\
397 .filter(UserRepoToPerm.repository == repo).all()
397 .filter(UserRepoToPerm.repository == repo).all()
398 group_perms = UserGroupRepoToPerm.query()\
398 group_perms = UserGroupRepoToPerm.query()\
399 .filter(UserGroupRepoToPerm.repository == repo).all()
399 .filter(UserGroupRepoToPerm.repository == repo).all()
400
400
401 for perm in user_perms:
401 for perm in user_perms:
402 UserRepoToPerm.create(perm.user, new_repo,
402 UserRepoToPerm.create(perm.user, new_repo,
403 perm.permission)
403 perm.permission)
404
404
405 for perm in group_perms:
405 for perm in group_perms:
406 UserGroupRepoToPerm.create(perm.users_group, new_repo,
406 UserGroupRepoToPerm.create(perm.users_group, new_repo,
407 perm.permission)
407 perm.permission)
408 else:
408 else:
409 perm_obj = self._create_default_perms(new_repo, private)
409 perm_obj = self._create_default_perms(new_repo, private)
410 self.sa.add(perm_obj)
410 self.sa.add(perm_obj)
411 else:
411 else:
412 perm_obj = self._create_default_perms(new_repo, private)
412 perm_obj = self._create_default_perms(new_repo, private)
413 self.sa.add(perm_obj)
413 self.sa.add(perm_obj)
414
414
415 if not just_db:
415 if not just_db:
416 self.__create_repo(repo_name, repo_type,
416 self.__create_repo(repo_name, repo_type,
417 repos_group,
417 repos_group,
418 clone_uri)
418 clone_uri)
419 log_create_repository(new_repo.get_dict(),
419 log_create_repository(new_repo.get_dict(),
420 created_by=owner.username)
420 created_by=owner.username)
421
421
422 # now automatically start following this repository as owner
422 # now automatically start following this repository as owner
423 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
423 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
424 owner.user_id)
424 owner.user_id)
425 return new_repo
425 return new_repo
426 except Exception:
426 except Exception:
427 log.error(traceback.format_exc())
427 log.error(traceback.format_exc())
428 raise
428 raise
429
429
430 def create(self, form_data, cur_user, just_db=False, fork=None):
430 def create(self, form_data, cur_user, just_db=False, fork=None):
431 """
431 """
432 Backward compatibility function, just a wrapper on top of create_repo
432 Backward compatibility function, just a wrapper on top of create_repo
433
433
434 :param form_data:
434 :param form_data:
435 :param cur_user:
435 :param cur_user:
436 :param just_db:
436 :param just_db:
437 :param fork:
437 :param fork:
438 """
438 """
439 owner = cur_user
439 owner = cur_user
440 repo_name = form_data['repo_name_full']
440 repo_name = form_data['repo_name_full']
441 repo_type = form_data['repo_type']
441 repo_type = form_data['repo_type']
442 description = form_data['repo_description']
442 description = form_data['repo_description']
443 private = form_data['repo_private']
443 private = form_data['repo_private']
444 clone_uri = form_data.get('clone_uri')
444 clone_uri = form_data.get('clone_uri')
445 repos_group = form_data['repo_group']
445 repos_group = form_data['repo_group']
446 landing_rev = form_data['repo_landing_rev']
446 landing_rev = form_data['repo_landing_rev']
447 copy_fork_permissions = form_data.get('copy_permissions')
447 copy_fork_permissions = form_data.get('copy_permissions')
448 fork_of = form_data.get('fork_parent_id')
448 fork_of = form_data.get('fork_parent_id')
449
449
450 ## repo creation defaults, private and repo_type are filled in form
450 ## repo creation defaults, private and repo_type are filled in form
451 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
451 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
452 enable_statistics = defs.get('repo_enable_statistics')
452 enable_statistics = defs.get('repo_enable_statistics')
453 enable_locking = defs.get('repo_enable_locking')
453 enable_locking = defs.get('repo_enable_locking')
454 enable_downloads = defs.get('repo_enable_downloads')
454 enable_downloads = defs.get('repo_enable_downloads')
455
455
456 return self.create_repo(
456 return self.create_repo(
457 repo_name, repo_type, description, owner, private, clone_uri,
457 repo_name, repo_type, description, owner, private, clone_uri,
458 repos_group, landing_rev, just_db, fork_of, copy_fork_permissions,
458 repos_group, landing_rev, just_db, fork_of, copy_fork_permissions,
459 enable_statistics, enable_locking, enable_downloads
459 enable_statistics, enable_locking, enable_downloads
460 )
460 )
461
461
462 def _update_permissions(self, repo, perms_new=None, perms_updates=None,
462 def _update_permissions(self, repo, perms_new=None, perms_updates=None,
463 check_perms=True):
463 check_perms=True):
464 if not perms_new:
464 if not perms_new:
465 perms_new = []
465 perms_new = []
466 if not perms_updates:
466 if not perms_updates:
467 perms_updates = []
467 perms_updates = []
468
468
469 # update permissions
469 # update permissions
470 for member, perm, member_type in perms_updates:
470 for member, perm, member_type in perms_updates:
471 if member_type == 'user':
471 if member_type == 'user':
472 # this updates existing one
472 # this updates existing one
473 self.grant_user_permission(
473 self.grant_user_permission(
474 repo=repo, user=member, perm=perm
474 repo=repo, user=member, perm=perm
475 )
475 )
476 else:
476 else:
477 #check if we have permissions to alter this usergroup
477 #check if we have permissions to alter this usergroup
478 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
478 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
479 if not check_perms or HasUserGroupPermissionAny(*req_perms)(member):
479 if not check_perms or HasUserGroupPermissionAny(*req_perms)(member):
480 self.grant_users_group_permission(
480 self.grant_users_group_permission(
481 repo=repo, group_name=member, perm=perm
481 repo=repo, group_name=member, perm=perm
482 )
482 )
483 # set new permissions
483 # set new permissions
484 for member, perm, member_type in perms_new:
484 for member, perm, member_type in perms_new:
485 if member_type == 'user':
485 if member_type == 'user':
486 self.grant_user_permission(
486 self.grant_user_permission(
487 repo=repo, user=member, perm=perm
487 repo=repo, user=member, perm=perm
488 )
488 )
489 else:
489 else:
490 #check if we have permissions to alter this usergroup
490 #check if we have permissions to alter this usergroup
491 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
491 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
492 if not check_perms or HasUserGroupPermissionAny(*req_perms)(member):
492 if not check_perms or HasUserGroupPermissionAny(*req_perms)(member):
493 self.grant_users_group_permission(
493 self.grant_users_group_permission(
494 repo=repo, group_name=member, perm=perm
494 repo=repo, group_name=member, perm=perm
495 )
495 )
496
496
497 def create_fork(self, form_data, cur_user):
497 def create_fork(self, form_data, cur_user):
498 """
498 """
499 Simple wrapper into executing celery task for fork creation
499 Simple wrapper into executing celery task for fork creation
500
500
501 :param form_data:
501 :param form_data:
502 :param cur_user:
502 :param cur_user:
503 """
503 """
504 from rhodecode.lib.celerylib import tasks, run_task
504 from rhodecode.lib.celerylib import tasks, run_task
505 run_task(tasks.create_repo_fork, form_data, cur_user)
505 run_task(tasks.create_repo_fork, form_data, cur_user)
506
506
507 def delete(self, repo, forks=None, fs_remove=True):
507 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
508 """
508 """
509 Delete given repository, forks parameter defines what do do with
509 Delete given repository, forks parameter defines what do do with
510 attached forks. Throws AttachedForksError if deleted repo has attached
510 attached forks. Throws AttachedForksError if deleted repo has attached
511 forks
511 forks
512
512
513 :param repo:
513 :param repo:
514 :param forks: str 'delete' or 'detach'
514 :param forks: str 'delete' or 'detach'
515 :param fs_remove: remove(archive) repo from filesystem
515 :param fs_remove: remove(archive) repo from filesystem
516 """
516 """
517 if not cur_user:
518 cur_user = getattr(get_current_rhodecode_user(), 'username', '?')
517 repo = self._get_repo(repo)
519 repo = self._get_repo(repo)
518 if repo:
520 if repo:
519 if forks == 'detach':
521 if forks == 'detach':
520 for r in repo.forks:
522 for r in repo.forks:
521 r.fork = None
523 r.fork = None
522 self.sa.add(r)
524 self.sa.add(r)
523 elif forks == 'delete':
525 elif forks == 'delete':
524 for r in repo.forks:
526 for r in repo.forks:
525 self.delete(r, forks='delete')
527 self.delete(r, forks='delete')
526 elif [f for f in repo.forks]:
528 elif [f for f in repo.forks]:
527 raise AttachedForksError()
529 raise AttachedForksError()
528
530
529 old_repo_dict = repo.get_dict()
531 old_repo_dict = repo.get_dict()
530 owner = repo.user
532 owner = repo.user
531 try:
533 try:
532 self.sa.delete(repo)
534 self.sa.delete(repo)
533 if fs_remove:
535 if fs_remove:
534 self.__delete_repo(repo)
536 self.__delete_repo(repo)
535 else:
537 else:
536 log.debug('skipping removal from filesystem')
538 log.debug('skipping removal from filesystem')
537 log_delete_repository(old_repo_dict,
539 log_delete_repository(old_repo_dict,
538 deleted_by=owner.username)
540 deleted_by=cur_user)
539 except Exception:
541 except Exception:
540 log.error(traceback.format_exc())
542 log.error(traceback.format_exc())
541 raise
543 raise
542
544
543 def grant_user_permission(self, repo, user, perm):
545 def grant_user_permission(self, repo, user, perm):
544 """
546 """
545 Grant permission for user on given repository, or update existing one
547 Grant permission for user on given repository, or update existing one
546 if found
548 if found
547
549
548 :param repo: Instance of Repository, repository_id, or repository name
550 :param repo: Instance of Repository, repository_id, or repository name
549 :param user: Instance of User, user_id or username
551 :param user: Instance of User, user_id or username
550 :param perm: Instance of Permission, or permission_name
552 :param perm: Instance of Permission, or permission_name
551 """
553 """
552 user = self._get_user(user)
554 user = self._get_user(user)
553 repo = self._get_repo(repo)
555 repo = self._get_repo(repo)
554 permission = self._get_perm(perm)
556 permission = self._get_perm(perm)
555
557
556 # check if we have that permission already
558 # check if we have that permission already
557 obj = self.sa.query(UserRepoToPerm)\
559 obj = self.sa.query(UserRepoToPerm)\
558 .filter(UserRepoToPerm.user == user)\
560 .filter(UserRepoToPerm.user == user)\
559 .filter(UserRepoToPerm.repository == repo)\
561 .filter(UserRepoToPerm.repository == repo)\
560 .scalar()
562 .scalar()
561 if obj is None:
563 if obj is None:
562 # create new !
564 # create new !
563 obj = UserRepoToPerm()
565 obj = UserRepoToPerm()
564 obj.repository = repo
566 obj.repository = repo
565 obj.user = user
567 obj.user = user
566 obj.permission = permission
568 obj.permission = permission
567 self.sa.add(obj)
569 self.sa.add(obj)
568 log.debug('Granted perm %s to %s on %s' % (perm, user, repo))
570 log.debug('Granted perm %s to %s on %s' % (perm, user, repo))
569
571
570 def revoke_user_permission(self, repo, user):
572 def revoke_user_permission(self, repo, user):
571 """
573 """
572 Revoke permission for user on given repository
574 Revoke permission for user on given repository
573
575
574 :param repo: Instance of Repository, repository_id, or repository name
576 :param repo: Instance of Repository, repository_id, or repository name
575 :param user: Instance of User, user_id or username
577 :param user: Instance of User, user_id or username
576 """
578 """
577
579
578 user = self._get_user(user)
580 user = self._get_user(user)
579 repo = self._get_repo(repo)
581 repo = self._get_repo(repo)
580
582
581 obj = self.sa.query(UserRepoToPerm)\
583 obj = self.sa.query(UserRepoToPerm)\
582 .filter(UserRepoToPerm.repository == repo)\
584 .filter(UserRepoToPerm.repository == repo)\
583 .filter(UserRepoToPerm.user == user)\
585 .filter(UserRepoToPerm.user == user)\
584 .scalar()
586 .scalar()
585 if obj:
587 if obj:
586 self.sa.delete(obj)
588 self.sa.delete(obj)
587 log.debug('Revoked perm on %s on %s' % (repo, user))
589 log.debug('Revoked perm on %s on %s' % (repo, user))
588
590
589 def grant_users_group_permission(self, repo, group_name, perm):
591 def grant_users_group_permission(self, repo, group_name, perm):
590 """
592 """
591 Grant permission for user group on given repository, or update
593 Grant permission for user group on given repository, or update
592 existing one if found
594 existing one if found
593
595
594 :param repo: Instance of Repository, repository_id, or repository name
596 :param repo: Instance of Repository, repository_id, or repository name
595 :param group_name: Instance of UserGroup, users_group_id,
597 :param group_name: Instance of UserGroup, users_group_id,
596 or user group name
598 or user group name
597 :param perm: Instance of Permission, or permission_name
599 :param perm: Instance of Permission, or permission_name
598 """
600 """
599 repo = self._get_repo(repo)
601 repo = self._get_repo(repo)
600 group_name = self._get_user_group(group_name)
602 group_name = self._get_user_group(group_name)
601 permission = self._get_perm(perm)
603 permission = self._get_perm(perm)
602
604
603 # check if we have that permission already
605 # check if we have that permission already
604 obj = self.sa.query(UserGroupRepoToPerm)\
606 obj = self.sa.query(UserGroupRepoToPerm)\
605 .filter(UserGroupRepoToPerm.users_group == group_name)\
607 .filter(UserGroupRepoToPerm.users_group == group_name)\
606 .filter(UserGroupRepoToPerm.repository == repo)\
608 .filter(UserGroupRepoToPerm.repository == repo)\
607 .scalar()
609 .scalar()
608
610
609 if obj is None:
611 if obj is None:
610 # create new
612 # create new
611 obj = UserGroupRepoToPerm()
613 obj = UserGroupRepoToPerm()
612
614
613 obj.repository = repo
615 obj.repository = repo
614 obj.users_group = group_name
616 obj.users_group = group_name
615 obj.permission = permission
617 obj.permission = permission
616 self.sa.add(obj)
618 self.sa.add(obj)
617 log.debug('Granted perm %s to %s on %s' % (perm, group_name, repo))
619 log.debug('Granted perm %s to %s on %s' % (perm, group_name, repo))
618
620
619 def revoke_users_group_permission(self, repo, group_name):
621 def revoke_users_group_permission(self, repo, group_name):
620 """
622 """
621 Revoke permission for user group on given repository
623 Revoke permission for user group on given repository
622
624
623 :param repo: Instance of Repository, repository_id, or repository name
625 :param repo: Instance of Repository, repository_id, or repository name
624 :param group_name: Instance of UserGroup, users_group_id,
626 :param group_name: Instance of UserGroup, users_group_id,
625 or user group name
627 or user group name
626 """
628 """
627 repo = self._get_repo(repo)
629 repo = self._get_repo(repo)
628 group_name = self._get_user_group(group_name)
630 group_name = self._get_user_group(group_name)
629
631
630 obj = self.sa.query(UserGroupRepoToPerm)\
632 obj = self.sa.query(UserGroupRepoToPerm)\
631 .filter(UserGroupRepoToPerm.repository == repo)\
633 .filter(UserGroupRepoToPerm.repository == repo)\
632 .filter(UserGroupRepoToPerm.users_group == group_name)\
634 .filter(UserGroupRepoToPerm.users_group == group_name)\
633 .scalar()
635 .scalar()
634 if obj:
636 if obj:
635 self.sa.delete(obj)
637 self.sa.delete(obj)
636 log.debug('Revoked perm to %s on %s' % (repo, group_name))
638 log.debug('Revoked perm to %s on %s' % (repo, group_name))
637
639
638 def delete_stats(self, repo_name):
640 def delete_stats(self, repo_name):
639 """
641 """
640 removes stats for given repo
642 removes stats for given repo
641
643
642 :param repo_name:
644 :param repo_name:
643 """
645 """
644 repo = self._get_repo(repo_name)
646 repo = self._get_repo(repo_name)
645 try:
647 try:
646 obj = self.sa.query(Statistics)\
648 obj = self.sa.query(Statistics)\
647 .filter(Statistics.repository == repo).scalar()
649 .filter(Statistics.repository == repo).scalar()
648 if obj:
650 if obj:
649 self.sa.delete(obj)
651 self.sa.delete(obj)
650 except Exception:
652 except Exception:
651 log.error(traceback.format_exc())
653 log.error(traceback.format_exc())
652 raise
654 raise
653
655
654 def _create_repo(self, repo_name, alias, parent, clone_uri=False,
656 def _create_repo(self, repo_name, alias, parent, clone_uri=False,
655 repo_store_location=None):
657 repo_store_location=None):
656 return self.__create_repo(repo_name, alias, parent, clone_uri,
658 return self.__create_repo(repo_name, alias, parent, clone_uri,
657 repo_store_location)
659 repo_store_location)
658
660
659 def __create_repo(self, repo_name, alias, parent, clone_uri=False,
661 def __create_repo(self, repo_name, alias, parent, clone_uri=False,
660 repo_store_location=None):
662 repo_store_location=None):
661 """
663 """
662 makes repository on filesystem. It's group aware means it'll create
664 makes repository on filesystem. It's group aware means it'll create
663 a repository within a group, and alter the paths accordingly of
665 a repository within a group, and alter the paths accordingly of
664 group location
666 group location
665
667
666 :param repo_name:
668 :param repo_name:
667 :param alias:
669 :param alias:
668 :param parent_id:
670 :param parent_id:
669 :param clone_uri:
671 :param clone_uri:
670 :param repo_path:
672 :param repo_path:
671 """
673 """
672 from rhodecode.lib.utils import is_valid_repo, is_valid_repos_group
674 from rhodecode.lib.utils import is_valid_repo, is_valid_repos_group
673 from rhodecode.model.scm import ScmModel
675 from rhodecode.model.scm import ScmModel
674
676
675 if parent:
677 if parent:
676 new_parent_path = os.sep.join(parent.full_path_splitted)
678 new_parent_path = os.sep.join(parent.full_path_splitted)
677 else:
679 else:
678 new_parent_path = ''
680 new_parent_path = ''
679 if repo_store_location:
681 if repo_store_location:
680 _paths = [repo_store_location]
682 _paths = [repo_store_location]
681 else:
683 else:
682 _paths = [self.repos_path, new_parent_path, repo_name]
684 _paths = [self.repos_path, new_parent_path, repo_name]
683 # we need to make it str for mercurial
685 # we need to make it str for mercurial
684 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
686 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
685
687
686 # check if this path is not a repository
688 # check if this path is not a repository
687 if is_valid_repo(repo_path, self.repos_path):
689 if is_valid_repo(repo_path, self.repos_path):
688 raise Exception('This path %s is a valid repository' % repo_path)
690 raise Exception('This path %s is a valid repository' % repo_path)
689
691
690 # check if this path is a group
692 # check if this path is a group
691 if is_valid_repos_group(repo_path, self.repos_path):
693 if is_valid_repos_group(repo_path, self.repos_path):
692 raise Exception('This path %s is a valid group' % repo_path)
694 raise Exception('This path %s is a valid group' % repo_path)
693
695
694 log.info('creating repo %s in %s @ %s' % (
696 log.info('creating repo %s in %s @ %s' % (
695 repo_name, safe_unicode(repo_path),
697 repo_name, safe_unicode(repo_path),
696 obfuscate_url_pw(clone_uri)
698 obfuscate_url_pw(clone_uri)
697 )
699 )
698 )
700 )
699 backend = get_backend(alias)
701 backend = get_backend(alias)
700 if alias == 'hg':
702 if alias == 'hg':
701 repo = backend(repo_path, create=True, src_url=clone_uri)
703 repo = backend(repo_path, create=True, src_url=clone_uri)
702 elif alias == 'git':
704 elif alias == 'git':
703 repo = backend(repo_path, create=True, src_url=clone_uri, bare=True)
705 repo = backend(repo_path, create=True, src_url=clone_uri, bare=True)
704 # add rhodecode hook into this repo
706 # add rhodecode hook into this repo
705 ScmModel().install_git_hook(repo=repo)
707 ScmModel().install_git_hook(repo=repo)
706 else:
708 else:
707 raise Exception('Undefined alias %s' % alias)
709 raise Exception('Undefined alias %s' % alias)
708 return repo
710 return repo
709
711
710 def __rename_repo(self, old, new):
712 def __rename_repo(self, old, new):
711 """
713 """
712 renames repository on filesystem
714 renames repository on filesystem
713
715
714 :param old: old name
716 :param old: old name
715 :param new: new name
717 :param new: new name
716 """
718 """
717 log.info('renaming repo from %s to %s' % (old, new))
719 log.info('renaming repo from %s to %s' % (old, new))
718
720
719 old_path = os.path.join(self.repos_path, old)
721 old_path = os.path.join(self.repos_path, old)
720 new_path = os.path.join(self.repos_path, new)
722 new_path = os.path.join(self.repos_path, new)
721 if os.path.isdir(new_path):
723 if os.path.isdir(new_path):
722 raise Exception(
724 raise Exception(
723 'Was trying to rename to already existing dir %s' % new_path
725 'Was trying to rename to already existing dir %s' % new_path
724 )
726 )
725 shutil.move(old_path, new_path)
727 shutil.move(old_path, new_path)
726
728
727 def __delete_repo(self, repo):
729 def __delete_repo(self, repo):
728 """
730 """
729 removes repo from filesystem, the removal is acctually made by
731 removes repo from filesystem, the removal is acctually made by
730 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
732 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
731 repository is no longer valid for rhodecode, can be undeleted later on
733 repository is no longer valid for rhodecode, can be undeleted later on
732 by reverting the renames on this repository
734 by reverting the renames on this repository
733
735
734 :param repo: repo object
736 :param repo: repo object
735 """
737 """
736 rm_path = os.path.join(self.repos_path, repo.repo_name)
738 rm_path = os.path.join(self.repos_path, repo.repo_name)
737 log.info("Removing %s" % (rm_path))
739 log.info("Removing %s" % (rm_path))
738 # disable hg/git internal that it doesn't get detected as repo
740 # disable hg/git internal that it doesn't get detected as repo
739 alias = repo.repo_type
741 alias = repo.repo_type
740
742
741 bare = getattr(repo.scm_instance, 'bare', False)
743 bare = getattr(repo.scm_instance, 'bare', False)
742
744
743 if not bare:
745 if not bare:
744 # skip this for bare git repos
746 # skip this for bare git repos
745 shutil.move(os.path.join(rm_path, '.%s' % alias),
747 shutil.move(os.path.join(rm_path, '.%s' % alias),
746 os.path.join(rm_path, 'rm__.%s' % alias))
748 os.path.join(rm_path, 'rm__.%s' % alias))
747 # disable repo
749 # disable repo
748 _now = datetime.now()
750 _now = datetime.now()
749 _ms = str(_now.microsecond).rjust(6, '0')
751 _ms = str(_now.microsecond).rjust(6, '0')
750 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
752 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
751 repo.just_name)
753 repo.just_name)
752 if repo.group:
754 if repo.group:
753 args = repo.group.full_path_splitted + [_d]
755 args = repo.group.full_path_splitted + [_d]
754 _d = os.path.join(*args)
756 _d = os.path.join(*args)
755 shutil.move(rm_path, os.path.join(self.repos_path, _d))
757 shutil.move(rm_path, os.path.join(self.repos_path, _d))
General Comments 0
You need to be logged in to leave comments. Login now