##// END OF EJS Templates
Fix typos.
Andrew Shadura -
r2899:c76aa8b0 beta
parent child Browse files
Show More
@@ -1,709 +1,709 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 from os.path import abspath
35 from os.path import abspath
36 from os.path import dirname as dn, join as jn
36 from os.path import dirname as dn, join as jn
37
37
38 from paste.script.command import Command, BadCommand
38 from paste.script.command import Command, BadCommand
39
39
40 from mercurial import ui, config
40 from mercurial import ui, config
41
41
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
43
43
44 from rhodecode.lib.vcs import get_backend
44 from rhodecode.lib.vcs import get_backend
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
47 from rhodecode.lib.vcs.utils.helpers import get_scm
47 from rhodecode.lib.vcs.utils.helpers import get_scm
48 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.exceptions import VCSError
49
49
50 from rhodecode.lib.caching_query import FromCache
50 from rhodecode.lib.caching_query import FromCache
51
51
52 from rhodecode.model import meta
52 from rhodecode.model import meta
53 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
53 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
54 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
54 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.repos_group import ReposGroupModel
56 from rhodecode.model.repos_group import ReposGroupModel
57 from rhodecode.lib.utils2 import safe_str, safe_unicode
57 from rhodecode.lib.utils2 import safe_str, safe_unicode
58 from rhodecode.lib.vcs.utils.fakemod import create_module
58 from rhodecode.lib.vcs.utils.fakemod import create_module
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
63
63
64
64
65 def recursive_replace(str_, replace=' '):
65 def recursive_replace(str_, replace=' '):
66 """
66 """
67 Recursive replace of given sign to just one instance
67 Recursive replace of given sign to just one instance
68
68
69 :param str_: given string
69 :param str_: given string
70 :param replace: char to find and replace multiple instances
70 :param replace: char to find and replace multiple instances
71
71
72 Examples::
72 Examples::
73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
74 'Mighty-Mighty-Bo-sstones'
74 'Mighty-Mighty-Bo-sstones'
75 """
75 """
76
76
77 if str_.find(replace * 2) == -1:
77 if str_.find(replace * 2) == -1:
78 return str_
78 return str_
79 else:
79 else:
80 str_ = str_.replace(replace * 2, replace)
80 str_ = str_.replace(replace * 2, replace)
81 return recursive_replace(str_, replace)
81 return recursive_replace(str_, replace)
82
82
83
83
84 def repo_name_slug(value):
84 def repo_name_slug(value):
85 """
85 """
86 Return slug of name of repository
86 Return slug of name of repository
87 This function is called on each creation/modification
87 This function is called on each creation/modification
88 of repository to prevent bad names in repo
88 of repository to prevent bad names in repo
89 """
89 """
90
90
91 slug = remove_formatting(value)
91 slug = remove_formatting(value)
92 slug = strip_tags(slug)
92 slug = strip_tags(slug)
93
93
94 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
94 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
95 slug = slug.replace(c, '-')
95 slug = slug.replace(c, '-')
96 slug = recursive_replace(slug, '-')
96 slug = recursive_replace(slug, '-')
97 slug = collapse(slug, '-')
97 slug = collapse(slug, '-')
98 return slug
98 return slug
99
99
100
100
101 def get_repo_slug(request):
101 def get_repo_slug(request):
102 _repo = request.environ['pylons.routes_dict'].get('repo_name')
102 _repo = request.environ['pylons.routes_dict'].get('repo_name')
103 if _repo:
103 if _repo:
104 _repo = _repo.rstrip('/')
104 _repo = _repo.rstrip('/')
105 return _repo
105 return _repo
106
106
107
107
108 def get_repos_group_slug(request):
108 def get_repos_group_slug(request):
109 _group = request.environ['pylons.routes_dict'].get('group_name')
109 _group = request.environ['pylons.routes_dict'].get('group_name')
110 if _group:
110 if _group:
111 _group = _group.rstrip('/')
111 _group = _group.rstrip('/')
112 return _group
112 return _group
113
113
114
114
115 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
115 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
116 """
116 """
117 Action logger for various actions made by users
117 Action logger for various actions made by users
118
118
119 :param user: user that made this action, can be a unique username string or
119 :param user: user that made this action, can be a unique username string or
120 object containing user_id attribute
120 object containing user_id attribute
121 :param action: action to log, should be on of predefined unique actions for
121 :param action: action to log, should be on of predefined unique actions for
122 easy translations
122 easy translations
123 :param repo: string name of repository or object containing repo_id,
123 :param repo: string name of repository or object containing repo_id,
124 that action was made on
124 that action was made on
125 :param ipaddr: optional ip address from what the action was made
125 :param ipaddr: optional ip address from what the action was made
126 :param sa: optional sqlalchemy session
126 :param sa: optional sqlalchemy session
127
127
128 """
128 """
129
129
130 if not sa:
130 if not sa:
131 sa = meta.Session()
131 sa = meta.Session()
132
132
133 try:
133 try:
134 if hasattr(user, 'user_id'):
134 if hasattr(user, 'user_id'):
135 user_obj = user
135 user_obj = user
136 elif isinstance(user, basestring):
136 elif isinstance(user, basestring):
137 user_obj = User.get_by_username(user)
137 user_obj = User.get_by_username(user)
138 else:
138 else:
139 raise Exception('You have to provide user object or username')
139 raise Exception('You have to provide a user object or a username')
140
140
141 if hasattr(repo, 'repo_id'):
141 if hasattr(repo, 'repo_id'):
142 repo_obj = Repository.get(repo.repo_id)
142 repo_obj = Repository.get(repo.repo_id)
143 repo_name = repo_obj.repo_name
143 repo_name = repo_obj.repo_name
144 elif isinstance(repo, basestring):
144 elif isinstance(repo, basestring):
145 repo_name = repo.lstrip('/')
145 repo_name = repo.lstrip('/')
146 repo_obj = Repository.get_by_repo_name(repo_name)
146 repo_obj = Repository.get_by_repo_name(repo_name)
147 else:
147 else:
148 repo_obj = None
148 repo_obj = None
149 repo_name = ''
149 repo_name = ''
150
150
151 user_log = UserLog()
151 user_log = UserLog()
152 user_log.user_id = user_obj.user_id
152 user_log.user_id = user_obj.user_id
153 user_log.action = safe_unicode(action)
153 user_log.action = safe_unicode(action)
154
154
155 user_log.repository = repo_obj
155 user_log.repository = repo_obj
156 user_log.repository_name = repo_name
156 user_log.repository_name = repo_name
157
157
158 user_log.action_date = datetime.datetime.now()
158 user_log.action_date = datetime.datetime.now()
159 user_log.user_ip = ipaddr
159 user_log.user_ip = ipaddr
160 sa.add(user_log)
160 sa.add(user_log)
161
161
162 log.info(
162 log.info(
163 'Adding user %s, action %s on %s' % (user_obj, action,
163 'Adding user %s, action %s on %s' % (user_obj, action,
164 safe_unicode(repo))
164 safe_unicode(repo))
165 )
165 )
166 if commit:
166 if commit:
167 sa.commit()
167 sa.commit()
168 except:
168 except:
169 log.error(traceback.format_exc())
169 log.error(traceback.format_exc())
170 raise
170 raise
171
171
172
172
173 def get_repos(path, recursive=False):
173 def get_repos(path, recursive=False):
174 """
174 """
175 Scans given path for repos and return (name,(type,path)) tuple
175 Scans given path for repos and return (name,(type,path)) tuple
176
176
177 :param path: path to scan for repositories
177 :param path: path to scan for repositories
178 :param recursive: recursive search and return names with subdirs in front
178 :param recursive: recursive search and return names with subdirs in front
179 """
179 """
180
180
181 # remove ending slash for better results
181 # remove ending slash for better results
182 path = path.rstrip(os.sep)
182 path = path.rstrip(os.sep)
183
183
184 def _get_repos(p):
184 def _get_repos(p):
185 if not os.access(p, os.W_OK):
185 if not os.access(p, os.W_OK):
186 return
186 return
187 for dirpath in os.listdir(p):
187 for dirpath in os.listdir(p):
188 if os.path.isfile(os.path.join(p, dirpath)):
188 if os.path.isfile(os.path.join(p, dirpath)):
189 continue
189 continue
190 cur_path = os.path.join(p, dirpath)
190 cur_path = os.path.join(p, dirpath)
191 try:
191 try:
192 scm_info = get_scm(cur_path)
192 scm_info = get_scm(cur_path)
193 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
193 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
194 except VCSError:
194 except VCSError:
195 if not recursive:
195 if not recursive:
196 continue
196 continue
197 #check if this dir containts other repos for recursive scan
197 #check if this dir containts other repos for recursive scan
198 rec_path = os.path.join(p, dirpath)
198 rec_path = os.path.join(p, dirpath)
199 if os.path.isdir(rec_path):
199 if os.path.isdir(rec_path):
200 for inner_scm in _get_repos(rec_path):
200 for inner_scm in _get_repos(rec_path):
201 yield inner_scm
201 yield inner_scm
202
202
203 return _get_repos(path)
203 return _get_repos(path)
204
204
205
205
206 def is_valid_repo(repo_name, base_path, scm=None):
206 def is_valid_repo(repo_name, base_path, scm=None):
207 """
207 """
208 Returns True if given path is a valid repository False otherwise.
208 Returns True if given path is a valid repository False otherwise.
209 If scm param is given also compare if given scm is the same as expected
209 If scm param is given also compare if given scm is the same as expected
210 from scm parameter
210 from scm parameter
211
211
212 :param repo_name:
212 :param repo_name:
213 :param base_path:
213 :param base_path:
214 :param scm:
214 :param scm:
215
215
216 :return True: if given path is a valid repository
216 :return True: if given path is a valid repository
217 """
217 """
218 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
218 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
219
219
220 try:
220 try:
221 scm_ = get_scm(full_path)
221 scm_ = get_scm(full_path)
222 if scm:
222 if scm:
223 return scm_[0] == scm
223 return scm_[0] == scm
224 return True
224 return True
225 except VCSError:
225 except VCSError:
226 return False
226 return False
227
227
228
228
229 def is_valid_repos_group(repos_group_name, base_path):
229 def is_valid_repos_group(repos_group_name, base_path):
230 """
230 """
231 Returns True if given path is a repos group False otherwise
231 Returns True if given path is a repos group False otherwise
232
232
233 :param repo_name:
233 :param repo_name:
234 :param base_path:
234 :param base_path:
235 """
235 """
236 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
236 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
237
237
238 # check if it's not a repo
238 # check if it's not a repo
239 if is_valid_repo(repos_group_name, base_path):
239 if is_valid_repo(repos_group_name, base_path):
240 return False
240 return False
241
241
242 try:
242 try:
243 # we need to check bare git repos at higher level
243 # we need to check bare git repos at higher level
244 # since we might match branches/hooks/info/objects or possible
244 # since we might match branches/hooks/info/objects or possible
245 # other things inside bare git repo
245 # other things inside bare git repo
246 get_scm(os.path.dirname(full_path))
246 get_scm(os.path.dirname(full_path))
247 return False
247 return False
248 except VCSError:
248 except VCSError:
249 pass
249 pass
250
250
251 # check if it's a valid path
251 # check if it's a valid path
252 if os.path.isdir(full_path):
252 if os.path.isdir(full_path):
253 return True
253 return True
254
254
255 return False
255 return False
256
256
257
257
258 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
258 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
259 while True:
259 while True:
260 ok = raw_input(prompt)
260 ok = raw_input(prompt)
261 if ok in ('y', 'ye', 'yes'):
261 if ok in ('y', 'ye', 'yes'):
262 return True
262 return True
263 if ok in ('n', 'no', 'nop', 'nope'):
263 if ok in ('n', 'no', 'nop', 'nope'):
264 return False
264 return False
265 retries = retries - 1
265 retries = retries - 1
266 if retries < 0:
266 if retries < 0:
267 raise IOError
267 raise IOError
268 print complaint
268 print complaint
269
269
270 #propagated from mercurial documentation
270 #propagated from mercurial documentation
271 ui_sections = ['alias', 'auth',
271 ui_sections = ['alias', 'auth',
272 'decode/encode', 'defaults',
272 'decode/encode', 'defaults',
273 'diff', 'email',
273 'diff', 'email',
274 'extensions', 'format',
274 'extensions', 'format',
275 'merge-patterns', 'merge-tools',
275 'merge-patterns', 'merge-tools',
276 'hooks', 'http_proxy',
276 'hooks', 'http_proxy',
277 'smtp', 'patch',
277 'smtp', 'patch',
278 'paths', 'profiling',
278 'paths', 'profiling',
279 'server', 'trusted',
279 'server', 'trusted',
280 'ui', 'web', ]
280 'ui', 'web', ]
281
281
282
282
283 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
283 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
284 """
284 """
285 A function that will read python rc files or database
285 A function that will read python rc files or database
286 and make an mercurial ui object from read options
286 and make an mercurial ui object from read options
287
287
288 :param path: path to mercurial config file
288 :param path: path to mercurial config file
289 :param checkpaths: check the path
289 :param checkpaths: check the path
290 :param read_from: read from 'file' or 'db'
290 :param read_from: read from 'file' or 'db'
291 """
291 """
292
292
293 baseui = ui.ui()
293 baseui = ui.ui()
294
294
295 # clean the baseui object
295 # clean the baseui object
296 baseui._ocfg = config.config()
296 baseui._ocfg = config.config()
297 baseui._ucfg = config.config()
297 baseui._ucfg = config.config()
298 baseui._tcfg = config.config()
298 baseui._tcfg = config.config()
299
299
300 if read_from == 'file':
300 if read_from == 'file':
301 if not os.path.isfile(path):
301 if not os.path.isfile(path):
302 log.debug('hgrc file is not present at %s skipping...' % path)
302 log.debug('hgrc file is not present at %s, skipping...' % path)
303 return False
303 return False
304 log.debug('reading hgrc from %s' % path)
304 log.debug('reading hgrc from %s' % path)
305 cfg = config.config()
305 cfg = config.config()
306 cfg.read(path)
306 cfg.read(path)
307 for section in ui_sections:
307 for section in ui_sections:
308 for k, v in cfg.items(section):
308 for k, v in cfg.items(section):
309 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
309 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
310 baseui.setconfig(section, k, v)
310 baseui.setconfig(section, k, v)
311
311
312 elif read_from == 'db':
312 elif read_from == 'db':
313 sa = meta.Session()
313 sa = meta.Session()
314 ret = sa.query(RhodeCodeUi)\
314 ret = sa.query(RhodeCodeUi)\
315 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
315 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
316 .all()
316 .all()
317
317
318 hg_ui = ret
318 hg_ui = ret
319 for ui_ in hg_ui:
319 for ui_ in hg_ui:
320 if ui_.ui_active:
320 if ui_.ui_active:
321 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
321 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
322 ui_.ui_key, ui_.ui_value)
322 ui_.ui_key, ui_.ui_value)
323 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
323 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
324 if ui_.ui_key == 'push_ssl':
324 if ui_.ui_key == 'push_ssl':
325 # force set push_ssl requirement to False, rhodecode
325 # force set push_ssl requirement to False, rhodecode
326 # handles that
326 # handles that
327 baseui.setconfig(ui_.ui_section, ui_.ui_key, False)
327 baseui.setconfig(ui_.ui_section, ui_.ui_key, False)
328 if clear_session:
328 if clear_session:
329 meta.Session.remove()
329 meta.Session.remove()
330 return baseui
330 return baseui
331
331
332
332
333 def set_rhodecode_config(config):
333 def set_rhodecode_config(config):
334 """
334 """
335 Updates pylons config with new settings from database
335 Updates pylons config with new settings from database
336
336
337 :param config:
337 :param config:
338 """
338 """
339 hgsettings = RhodeCodeSetting.get_app_settings()
339 hgsettings = RhodeCodeSetting.get_app_settings()
340
340
341 for k, v in hgsettings.items():
341 for k, v in hgsettings.items():
342 config[k] = v
342 config[k] = v
343
343
344
344
345 def invalidate_cache(cache_key, *args):
345 def invalidate_cache(cache_key, *args):
346 """
346 """
347 Puts cache invalidation task into db for
347 Puts cache invalidation task into db for
348 further global cache invalidation
348 further global cache invalidation
349 """
349 """
350
350
351 from rhodecode.model.scm import ScmModel
351 from rhodecode.model.scm import ScmModel
352
352
353 if cache_key.startswith('get_repo_cached_'):
353 if cache_key.startswith('get_repo_cached_'):
354 name = cache_key.split('get_repo_cached_')[-1]
354 name = cache_key.split('get_repo_cached_')[-1]
355 ScmModel().mark_for_invalidation(name)
355 ScmModel().mark_for_invalidation(name)
356
356
357
357
358 def map_groups(path):
358 def map_groups(path):
359 """
359 """
360 Given a full path to a repository, create all nested groups that this
360 Given a full path to a repository, create all nested groups that this
361 repo is inside. This function creates parent-child relationships between
361 repo is inside. This function creates parent-child relationships between
362 groups and creates default perms for all new groups.
362 groups and creates default perms for all new groups.
363
363
364 :param paths: full path to repository
364 :param paths: full path to repository
365 """
365 """
366 sa = meta.Session()
366 sa = meta.Session()
367 groups = path.split(Repository.url_sep())
367 groups = path.split(Repository.url_sep())
368 parent = None
368 parent = None
369 group = None
369 group = None
370
370
371 # last element is repo in nested groups structure
371 # last element is repo in nested groups structure
372 groups = groups[:-1]
372 groups = groups[:-1]
373 rgm = ReposGroupModel(sa)
373 rgm = ReposGroupModel(sa)
374 for lvl, group_name in enumerate(groups):
374 for lvl, group_name in enumerate(groups):
375 group_name = '/'.join(groups[:lvl] + [group_name])
375 group_name = '/'.join(groups[:lvl] + [group_name])
376 group = RepoGroup.get_by_group_name(group_name)
376 group = RepoGroup.get_by_group_name(group_name)
377 desc = '%s group' % group_name
377 desc = '%s group' % group_name
378
378
379 # skip folders that are now removed repos
379 # skip folders that are now removed repos
380 if REMOVED_REPO_PAT.match(group_name):
380 if REMOVED_REPO_PAT.match(group_name):
381 break
381 break
382
382
383 if group is None:
383 if group is None:
384 log.debug('creating group level: %s group_name: %s' % (lvl,
384 log.debug('creating group level: %s group_name: %s' % (lvl,
385 group_name))
385 group_name))
386 group = RepoGroup(group_name, parent)
386 group = RepoGroup(group_name, parent)
387 group.group_description = desc
387 group.group_description = desc
388 sa.add(group)
388 sa.add(group)
389 rgm._create_default_perms(group)
389 rgm._create_default_perms(group)
390 sa.flush()
390 sa.flush()
391 parent = group
391 parent = group
392 return group
392 return group
393
393
394
394
395 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
395 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
396 install_git_hook=False):
396 install_git_hook=False):
397 """
397 """
398 maps all repos given in initial_repo_list, non existing repositories
398 maps all repos given in initial_repo_list, non existing repositories
399 are created, if remove_obsolete is True it also check for db entries
399 are created, if remove_obsolete is True it also check for db entries
400 that are not in initial_repo_list and removes them.
400 that are not in initial_repo_list and removes them.
401
401
402 :param initial_repo_list: list of repositories found by scanning methods
402 :param initial_repo_list: list of repositories found by scanning methods
403 :param remove_obsolete: check for obsolete entries in database
403 :param remove_obsolete: check for obsolete entries in database
404 :param install_git_hook: if this is True, also check and install githook
404 :param install_git_hook: if this is True, also check and install githook
405 for a repo if missing
405 for a repo if missing
406 """
406 """
407 from rhodecode.model.repo import RepoModel
407 from rhodecode.model.repo import RepoModel
408 from rhodecode.model.scm import ScmModel
408 from rhodecode.model.scm import ScmModel
409 sa = meta.Session()
409 sa = meta.Session()
410 rm = RepoModel()
410 rm = RepoModel()
411 user = sa.query(User).filter(User.admin == True).first()
411 user = sa.query(User).filter(User.admin == True).first()
412 if user is None:
412 if user is None:
413 raise Exception('Missing administrative account !')
413 raise Exception('Missing administrative account!')
414 added = []
414 added = []
415
415
416 # # clear cache keys
416 # # clear cache keys
417 # log.debug("Clearing cache keys now...")
417 # log.debug("Clearing cache keys now...")
418 # CacheInvalidation.clear_cache()
418 # CacheInvalidation.clear_cache()
419 # sa.commit()
419 # sa.commit()
420
420
421 for name, repo in initial_repo_list.items():
421 for name, repo in initial_repo_list.items():
422 group = map_groups(name)
422 group = map_groups(name)
423 db_repo = rm.get_by_repo_name(name)
423 db_repo = rm.get_by_repo_name(name)
424 # found repo that is on filesystem not in RhodeCode database
424 # found repo that is on filesystem not in RhodeCode database
425 if not db_repo:
425 if not db_repo:
426 log.info('repository %s not found creating now' % name)
426 log.info('repository %s not found, creating now' % name)
427 added.append(name)
427 added.append(name)
428 desc = (repo.description
428 desc = (repo.description
429 if repo.description != 'unknown'
429 if repo.description != 'unknown'
430 else '%s repository' % name)
430 else '%s repository' % name)
431 new_repo = rm.create_repo(
431 new_repo = rm.create_repo(
432 repo_name=name,
432 repo_name=name,
433 repo_type=repo.alias,
433 repo_type=repo.alias,
434 description=desc,
434 description=desc,
435 repos_group=getattr(group, 'group_id', None),
435 repos_group=getattr(group, 'group_id', None),
436 owner=user,
436 owner=user,
437 just_db=True
437 just_db=True
438 )
438 )
439 # we added that repo just now, and make sure it has githook
439 # we added that repo just now, and make sure it has githook
440 # installed
440 # installed
441 if new_repo.repo_type == 'git':
441 if new_repo.repo_type == 'git':
442 ScmModel().install_git_hook(new_repo.scm_instance)
442 ScmModel().install_git_hook(new_repo.scm_instance)
443 elif install_git_hook:
443 elif install_git_hook:
444 if db_repo.repo_type == 'git':
444 if db_repo.repo_type == 'git':
445 ScmModel().install_git_hook(db_repo.scm_instance)
445 ScmModel().install_git_hook(db_repo.scm_instance)
446 # during starting install all cache keys for all repositories in the
446 # during starting install all cache keys for all repositories in the
447 # system, this will register all repos and multiple instances
447 # system, this will register all repos and multiple instances
448 key, _prefix, _org_key = CacheInvalidation._get_key(name)
448 key, _prefix, _org_key = CacheInvalidation._get_key(name)
449 log.debug("Creating cache key for %s instance_id:`%s`" % (name, _prefix))
449 log.debug("Creating a cache key for %s instance_id:`%s`" % (name, _prefix))
450 CacheInvalidation._get_or_create_key(key, _prefix, _org_key, commit=False)
450 CacheInvalidation._get_or_create_key(key, _prefix, _org_key, commit=False)
451 sa.commit()
451 sa.commit()
452 removed = []
452 removed = []
453 if remove_obsolete:
453 if remove_obsolete:
454 # remove from database those repositories that are not in the filesystem
454 # remove from database those repositories that are not in the filesystem
455 for repo in sa.query(Repository).all():
455 for repo in sa.query(Repository).all():
456 if repo.repo_name not in initial_repo_list.keys():
456 if repo.repo_name not in initial_repo_list.keys():
457 log.debug("Removing non existing repository found in db `%s`" %
457 log.debug("Removing non-existing repository found in db `%s`" %
458 repo.repo_name)
458 repo.repo_name)
459 try:
459 try:
460 sa.delete(repo)
460 sa.delete(repo)
461 sa.commit()
461 sa.commit()
462 removed.append(repo.repo_name)
462 removed.append(repo.repo_name)
463 except:
463 except:
464 #don't hold further removals on error
464 #don't hold further removals on error
465 log.error(traceback.format_exc())
465 log.error(traceback.format_exc())
466 sa.rollback()
466 sa.rollback()
467
467
468 return added, removed
468 return added, removed
469
469
470
470
471 # set cache regions for beaker so celery can utilise it
471 # set cache regions for beaker so celery can utilise it
472 def add_cache(settings):
472 def add_cache(settings):
473 cache_settings = {'regions': None}
473 cache_settings = {'regions': None}
474 for key in settings.keys():
474 for key in settings.keys():
475 for prefix in ['beaker.cache.', 'cache.']:
475 for prefix in ['beaker.cache.', 'cache.']:
476 if key.startswith(prefix):
476 if key.startswith(prefix):
477 name = key.split(prefix)[1].strip()
477 name = key.split(prefix)[1].strip()
478 cache_settings[name] = settings[key].strip()
478 cache_settings[name] = settings[key].strip()
479 if cache_settings['regions']:
479 if cache_settings['regions']:
480 for region in cache_settings['regions'].split(','):
480 for region in cache_settings['regions'].split(','):
481 region = region.strip()
481 region = region.strip()
482 region_settings = {}
482 region_settings = {}
483 for key, value in cache_settings.items():
483 for key, value in cache_settings.items():
484 if key.startswith(region):
484 if key.startswith(region):
485 region_settings[key.split('.')[1]] = value
485 region_settings[key.split('.')[1]] = value
486 region_settings['expire'] = int(region_settings.get('expire',
486 region_settings['expire'] = int(region_settings.get('expire',
487 60))
487 60))
488 region_settings.setdefault('lock_dir',
488 region_settings.setdefault('lock_dir',
489 cache_settings.get('lock_dir'))
489 cache_settings.get('lock_dir'))
490 region_settings.setdefault('data_dir',
490 region_settings.setdefault('data_dir',
491 cache_settings.get('data_dir'))
491 cache_settings.get('data_dir'))
492
492
493 if 'type' not in region_settings:
493 if 'type' not in region_settings:
494 region_settings['type'] = cache_settings.get('type',
494 region_settings['type'] = cache_settings.get('type',
495 'memory')
495 'memory')
496 beaker.cache.cache_regions[region] = region_settings
496 beaker.cache.cache_regions[region] = region_settings
497
497
498
498
499 def load_rcextensions(root_path):
499 def load_rcextensions(root_path):
500 import rhodecode
500 import rhodecode
501 from rhodecode.config import conf
501 from rhodecode.config import conf
502
502
503 path = os.path.join(root_path, 'rcextensions', '__init__.py')
503 path = os.path.join(root_path, 'rcextensions', '__init__.py')
504 if os.path.isfile(path):
504 if os.path.isfile(path):
505 rcext = create_module('rc', path)
505 rcext = create_module('rc', path)
506 EXT = rhodecode.EXTENSIONS = rcext
506 EXT = rhodecode.EXTENSIONS = rcext
507 log.debug('Found rcextensions now loading %s...' % rcext)
507 log.debug('Found rcextensions now loading %s...' % rcext)
508
508
509 # Additional mappings that are not present in the pygments lexers
509 # Additional mappings that are not present in the pygments lexers
510 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
510 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
511
511
512 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
512 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
513
513
514 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
514 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
515 log.debug('settings custom INDEX_EXTENSIONS')
515 log.debug('settings custom INDEX_EXTENSIONS')
516 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
516 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
517
517
518 #ADDITIONAL MAPPINGS
518 #ADDITIONAL MAPPINGS
519 log.debug('adding extra into INDEX_EXTENSIONS')
519 log.debug('adding extra into INDEX_EXTENSIONS')
520 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
520 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
521
521
522
522
523 #==============================================================================
523 #==============================================================================
524 # TEST FUNCTIONS AND CREATORS
524 # TEST FUNCTIONS AND CREATORS
525 #==============================================================================
525 #==============================================================================
526 def create_test_index(repo_location, config, full_index):
526 def create_test_index(repo_location, config, full_index):
527 """
527 """
528 Makes default test index
528 Makes default test index
529
529
530 :param config: test config
530 :param config: test config
531 :param full_index:
531 :param full_index:
532 """
532 """
533
533
534 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
534 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
535 from rhodecode.lib.pidlock import DaemonLock, LockHeld
535 from rhodecode.lib.pidlock import DaemonLock, LockHeld
536
536
537 repo_location = repo_location
537 repo_location = repo_location
538
538
539 index_location = os.path.join(config['app_conf']['index_dir'])
539 index_location = os.path.join(config['app_conf']['index_dir'])
540 if not os.path.exists(index_location):
540 if not os.path.exists(index_location):
541 os.makedirs(index_location)
541 os.makedirs(index_location)
542
542
543 try:
543 try:
544 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
544 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
545 WhooshIndexingDaemon(index_location=index_location,
545 WhooshIndexingDaemon(index_location=index_location,
546 repo_location=repo_location)\
546 repo_location=repo_location)\
547 .run(full_index=full_index)
547 .run(full_index=full_index)
548 l.release()
548 l.release()
549 except LockHeld:
549 except LockHeld:
550 pass
550 pass
551
551
552
552
553 def create_test_env(repos_test_path, config):
553 def create_test_env(repos_test_path, config):
554 """
554 """
555 Makes a fresh database and
555 Makes a fresh database and
556 install test repository into tmp dir
556 install test repository into tmp dir
557 """
557 """
558 from rhodecode.lib.db_manage import DbManage
558 from rhodecode.lib.db_manage import DbManage
559 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
559 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
560
560
561 # PART ONE create db
561 # PART ONE create db
562 dbconf = config['sqlalchemy.db1.url']
562 dbconf = config['sqlalchemy.db1.url']
563 log.debug('making test db %s' % dbconf)
563 log.debug('making test db %s' % dbconf)
564
564
565 # create test dir if it doesn't exist
565 # create test dir if it doesn't exist
566 if not os.path.isdir(repos_test_path):
566 if not os.path.isdir(repos_test_path):
567 log.debug('Creating testdir %s' % repos_test_path)
567 log.debug('Creating testdir %s' % repos_test_path)
568 os.makedirs(repos_test_path)
568 os.makedirs(repos_test_path)
569
569
570 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
570 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
571 tests=True)
571 tests=True)
572 dbmanage.create_tables(override=True)
572 dbmanage.create_tables(override=True)
573 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
573 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
574 dbmanage.create_default_user()
574 dbmanage.create_default_user()
575 dbmanage.admin_prompt()
575 dbmanage.admin_prompt()
576 dbmanage.create_permissions()
576 dbmanage.create_permissions()
577 dbmanage.populate_default_permissions()
577 dbmanage.populate_default_permissions()
578 Session().commit()
578 Session().commit()
579 # PART TWO make test repo
579 # PART TWO make test repo
580 log.debug('making test vcs repositories')
580 log.debug('making test vcs repositories')
581
581
582 idx_path = config['app_conf']['index_dir']
582 idx_path = config['app_conf']['index_dir']
583 data_path = config['app_conf']['cache_dir']
583 data_path = config['app_conf']['cache_dir']
584
584
585 #clean index and data
585 #clean index and data
586 if idx_path and os.path.exists(idx_path):
586 if idx_path and os.path.exists(idx_path):
587 log.debug('remove %s' % idx_path)
587 log.debug('remove %s' % idx_path)
588 shutil.rmtree(idx_path)
588 shutil.rmtree(idx_path)
589
589
590 if data_path and os.path.exists(data_path):
590 if data_path and os.path.exists(data_path):
591 log.debug('remove %s' % data_path)
591 log.debug('remove %s' % data_path)
592 shutil.rmtree(data_path)
592 shutil.rmtree(data_path)
593
593
594 #CREATE DEFAULT TEST REPOS
594 #CREATE DEFAULT TEST REPOS
595 cur_dir = dn(dn(abspath(__file__)))
595 cur_dir = dn(dn(abspath(__file__)))
596 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
596 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
597 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
597 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
598 tar.close()
598 tar.close()
599
599
600 cur_dir = dn(dn(abspath(__file__)))
600 cur_dir = dn(dn(abspath(__file__)))
601 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
601 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
602 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
602 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
603 tar.close()
603 tar.close()
604
604
605 #LOAD VCS test stuff
605 #LOAD VCS test stuff
606 from rhodecode.tests.vcs import setup_package
606 from rhodecode.tests.vcs import setup_package
607 setup_package()
607 setup_package()
608
608
609
609
610 #==============================================================================
610 #==============================================================================
611 # PASTER COMMANDS
611 # PASTER COMMANDS
612 #==============================================================================
612 #==============================================================================
613 class BasePasterCommand(Command):
613 class BasePasterCommand(Command):
614 """
614 """
615 Abstract Base Class for paster commands.
615 Abstract Base Class for paster commands.
616
616
617 The celery commands are somewhat aggressive about loading
617 The celery commands are somewhat aggressive about loading
618 celery.conf, and since our module sets the `CELERY_LOADER`
618 celery.conf, and since our module sets the `CELERY_LOADER`
619 environment variable to our loader, we have to bootstrap a bit and
619 environment variable to our loader, we have to bootstrap a bit and
620 make sure we've had a chance to load the pylons config off of the
620 make sure we've had a chance to load the pylons config off of the
621 command line, otherwise everything fails.
621 command line, otherwise everything fails.
622 """
622 """
623 min_args = 1
623 min_args = 1
624 min_args_error = "Please provide a paster config file as an argument."
624 min_args_error = "Please provide a paster config file as an argument."
625 takes_config_file = 1
625 takes_config_file = 1
626 requires_config_file = True
626 requires_config_file = True
627
627
628 def notify_msg(self, msg, log=False):
628 def notify_msg(self, msg, log=False):
629 """Make a notification to user, additionally if logger is passed
629 """Make a notification to user, additionally if logger is passed
630 it logs this action using given logger
630 it logs this action using given logger
631
631
632 :param msg: message that will be printed to user
632 :param msg: message that will be printed to user
633 :param log: logging instance, to use to additionally log this message
633 :param log: logging instance, to use to additionally log this message
634
634
635 """
635 """
636 if log and isinstance(log, logging):
636 if log and isinstance(log, logging):
637 log(msg)
637 log(msg)
638
638
639 def run(self, args):
639 def run(self, args):
640 """
640 """
641 Overrides Command.run
641 Overrides Command.run
642
642
643 Checks for a config file argument and loads it.
643 Checks for a config file argument and loads it.
644 """
644 """
645 if len(args) < self.min_args:
645 if len(args) < self.min_args:
646 raise BadCommand(
646 raise BadCommand(
647 self.min_args_error % {'min_args': self.min_args,
647 self.min_args_error % {'min_args': self.min_args,
648 'actual_args': len(args)})
648 'actual_args': len(args)})
649
649
650 # Decrement because we're going to lob off the first argument.
650 # Decrement because we're going to lob off the first argument.
651 # @@ This is hacky
651 # @@ This is hacky
652 self.min_args -= 1
652 self.min_args -= 1
653 self.bootstrap_config(args[0])
653 self.bootstrap_config(args[0])
654 self.update_parser()
654 self.update_parser()
655 return super(BasePasterCommand, self).run(args[1:])
655 return super(BasePasterCommand, self).run(args[1:])
656
656
657 def update_parser(self):
657 def update_parser(self):
658 """
658 """
659 Abstract method. Allows for the class's parser to be updated
659 Abstract method. Allows for the class's parser to be updated
660 before the superclass's `run` method is called. Necessary to
660 before the superclass's `run` method is called. Necessary to
661 allow options/arguments to be passed through to the underlying
661 allow options/arguments to be passed through to the underlying
662 celery command.
662 celery command.
663 """
663 """
664 raise NotImplementedError("Abstract Method.")
664 raise NotImplementedError("Abstract Method.")
665
665
666 def bootstrap_config(self, conf):
666 def bootstrap_config(self, conf):
667 """
667 """
668 Loads the pylons configuration.
668 Loads the pylons configuration.
669 """
669 """
670 from pylons import config as pylonsconfig
670 from pylons import config as pylonsconfig
671
671
672 self.path_to_ini_file = os.path.realpath(conf)
672 self.path_to_ini_file = os.path.realpath(conf)
673 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
673 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
674 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
674 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
675
675
676
676
677 def check_git_version():
677 def check_git_version():
678 """
678 """
679 Checks what version of git is installed in system, and issues a warning
679 Checks what version of git is installed in system, and issues a warning
680 if it's to old for RhodeCode to properly work.
680 if it's too old for RhodeCode to properly work.
681 """
681 """
682 import subprocess
682 import subprocess
683 from distutils.version import StrictVersion
683 from distutils.version import StrictVersion
684 from rhodecode import BACKENDS
684 from rhodecode import BACKENDS
685
685
686 p = subprocess.Popen('git --version', shell=True,
686 p = subprocess.Popen('git --version', shell=True,
687 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
687 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
688 stdout, stderr = p.communicate()
688 stdout, stderr = p.communicate()
689 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
689 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
690 try:
690 try:
691 _ver = StrictVersion(ver)
691 _ver = StrictVersion(ver)
692 except:
692 except:
693 _ver = StrictVersion('0.0.0')
693 _ver = StrictVersion('0.0.0')
694 stderr = traceback.format_exc()
694 stderr = traceback.format_exc()
695
695
696 req_ver = '1.7.4'
696 req_ver = '1.7.4'
697 to_old_git = False
697 to_old_git = False
698 if _ver <= StrictVersion(req_ver):
698 if _ver <= StrictVersion(req_ver):
699 to_old_git = True
699 to_old_git = True
700
700
701 if 'git' in BACKENDS:
701 if 'git' in BACKENDS:
702 log.debug('GIT version detected: %s' % stdout)
702 log.debug('GIT version detected: %s' % stdout)
703 if stderr:
703 if stderr:
704 log.warning('Unable to detect git version org error was:%r' % stderr)
704 log.warning('Unable to detect git version org error was:%r' % stderr)
705 elif to_old_git:
705 elif to_old_git:
706 log.warning('RhodeCode detected git version %s, which is to old '
706 log.warning('RhodeCode detected git version %s, which is too old '
707 'for the system to function properly make sure '
707 'for the system to function properly. Make sure '
708 'it is at least in version %s' % (ver, req_ver))
708 'it is at least in version %s' % (ver, req_ver))
709 return _ver No newline at end of file
709 return _ver
General Comments 0
You need to be logged in to leave comments. Login now