##// END OF EJS Templates
reposcann should skip directories with starting with '.'...
marcink -
r3228:ba2e2514 beta
parent child Browse files
Show More
@@ -1,754 +1,767 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 import decorator
35 import decorator
36 import warnings
36 import warnings
37 from os.path import abspath
37 from os.path import abspath
38 from os.path import dirname as dn, join as jn
38 from os.path import dirname as dn, join as jn
39
39
40 from paste.script.command import Command, BadCommand
40 from paste.script.command import Command, BadCommand
41
41
42 from mercurial import ui, config
42 from mercurial import ui, config
43
43
44 from webhelpers.text import collapse, remove_formatting, strip_tags
44 from webhelpers.text import collapse, remove_formatting, strip_tags
45
45
46 from rhodecode.lib.vcs import get_backend
46 from rhodecode.lib.vcs import get_backend
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
49 from rhodecode.lib.vcs.utils.helpers import get_scm
49 from rhodecode.lib.vcs.utils.helpers import get_scm
50 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.exceptions import VCSError
51
51
52 from rhodecode.lib.caching_query import FromCache
52 from rhodecode.lib.caching_query import FromCache
53
53
54 from rhodecode.model import meta
54 from rhodecode.model import meta
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
57 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
58 from rhodecode.model.repos_group import ReposGroupModel
58 from rhodecode.model.repos_group import ReposGroupModel
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
60 from rhodecode.lib.vcs.utils.fakemod import create_module
60 from rhodecode.lib.vcs.utils.fakemod import create_module
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65
65
66
66
67 def recursive_replace(str_, replace=' '):
67 def recursive_replace(str_, replace=' '):
68 """
68 """
69 Recursive replace of given sign to just one instance
69 Recursive replace of given sign to just one instance
70
70
71 :param str_: given string
71 :param str_: given string
72 :param replace: char to find and replace multiple instances
72 :param replace: char to find and replace multiple instances
73
73
74 Examples::
74 Examples::
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 'Mighty-Mighty-Bo-sstones'
76 'Mighty-Mighty-Bo-sstones'
77 """
77 """
78
78
79 if str_.find(replace * 2) == -1:
79 if str_.find(replace * 2) == -1:
80 return str_
80 return str_
81 else:
81 else:
82 str_ = str_.replace(replace * 2, replace)
82 str_ = str_.replace(replace * 2, replace)
83 return recursive_replace(str_, replace)
83 return recursive_replace(str_, replace)
84
84
85
85
86 def repo_name_slug(value):
86 def repo_name_slug(value):
87 """
87 """
88 Return slug of name of repository
88 Return slug of name of repository
89 This function is called on each creation/modification
89 This function is called on each creation/modification
90 of repository to prevent bad names in repo
90 of repository to prevent bad names in repo
91 """
91 """
92
92
93 slug = remove_formatting(value)
93 slug = remove_formatting(value)
94 slug = strip_tags(slug)
94 slug = strip_tags(slug)
95
95
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 slug = slug.replace(c, '-')
97 slug = slug.replace(c, '-')
98 slug = recursive_replace(slug, '-')
98 slug = recursive_replace(slug, '-')
99 slug = collapse(slug, '-')
99 slug = collapse(slug, '-')
100 return slug
100 return slug
101
101
102
102
103 def get_repo_slug(request):
103 def get_repo_slug(request):
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 if _repo:
105 if _repo:
106 _repo = _repo.rstrip('/')
106 _repo = _repo.rstrip('/')
107 return _repo
107 return _repo
108
108
109
109
110 def get_repos_group_slug(request):
110 def get_repos_group_slug(request):
111 _group = request.environ['pylons.routes_dict'].get('group_name')
111 _group = request.environ['pylons.routes_dict'].get('group_name')
112 if _group:
112 if _group:
113 _group = _group.rstrip('/')
113 _group = _group.rstrip('/')
114 return _group
114 return _group
115
115
116
116
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
118 """
118 """
119 Action logger for various actions made by users
119 Action logger for various actions made by users
120
120
121 :param user: user that made this action, can be a unique username string or
121 :param user: user that made this action, can be a unique username string or
122 object containing user_id attribute
122 object containing user_id attribute
123 :param action: action to log, should be on of predefined unique actions for
123 :param action: action to log, should be on of predefined unique actions for
124 easy translations
124 easy translations
125 :param repo: string name of repository or object containing repo_id,
125 :param repo: string name of repository or object containing repo_id,
126 that action was made on
126 that action was made on
127 :param ipaddr: optional ip address from what the action was made
127 :param ipaddr: optional ip address from what the action was made
128 :param sa: optional sqlalchemy session
128 :param sa: optional sqlalchemy session
129
129
130 """
130 """
131
131
132 if not sa:
132 if not sa:
133 sa = meta.Session()
133 sa = meta.Session()
134
134
135 try:
135 try:
136 if hasattr(user, 'user_id'):
136 if hasattr(user, 'user_id'):
137 user_obj = User.get(user.user_id)
137 user_obj = User.get(user.user_id)
138 elif isinstance(user, basestring):
138 elif isinstance(user, basestring):
139 user_obj = User.get_by_username(user)
139 user_obj = User.get_by_username(user)
140 else:
140 else:
141 raise Exception('You have to provide a user object or a username')
141 raise Exception('You have to provide a user object or a username')
142
142
143 if hasattr(repo, 'repo_id'):
143 if hasattr(repo, 'repo_id'):
144 repo_obj = Repository.get(repo.repo_id)
144 repo_obj = Repository.get(repo.repo_id)
145 repo_name = repo_obj.repo_name
145 repo_name = repo_obj.repo_name
146 elif isinstance(repo, basestring):
146 elif isinstance(repo, basestring):
147 repo_name = repo.lstrip('/')
147 repo_name = repo.lstrip('/')
148 repo_obj = Repository.get_by_repo_name(repo_name)
148 repo_obj = Repository.get_by_repo_name(repo_name)
149 else:
149 else:
150 repo_obj = None
150 repo_obj = None
151 repo_name = ''
151 repo_name = ''
152
152
153 user_log = UserLog()
153 user_log = UserLog()
154 user_log.user_id = user_obj.user_id
154 user_log.user_id = user_obj.user_id
155 user_log.username = user_obj.username
155 user_log.username = user_obj.username
156 user_log.action = safe_unicode(action)
156 user_log.action = safe_unicode(action)
157
157
158 user_log.repository = repo_obj
158 user_log.repository = repo_obj
159 user_log.repository_name = repo_name
159 user_log.repository_name = repo_name
160
160
161 user_log.action_date = datetime.datetime.now()
161 user_log.action_date = datetime.datetime.now()
162 user_log.user_ip = ipaddr
162 user_log.user_ip = ipaddr
163 sa.add(user_log)
163 sa.add(user_log)
164
164
165 log.info('Logging action %s on %s by %s' %
165 log.info('Logging action %s on %s by %s' %
166 (action, safe_unicode(repo), user_obj))
166 (action, safe_unicode(repo), user_obj))
167 if commit:
167 if commit:
168 sa.commit()
168 sa.commit()
169 except:
169 except:
170 log.error(traceback.format_exc())
170 log.error(traceback.format_exc())
171 raise
171 raise
172
172
173
173
174 def get_repos(path, recursive=False):
174 def get_repos(path, recursive=False, skip_removed_repos=True):
175 """
175 """
176 Scans given path for repos and return (name,(type,path)) tuple
176 Scans given path for repos and return (name,(type,path)) tuple
177
177
178 :param path: path to scan for repositories
178 :param path: path to scan for repositories
179 :param recursive: recursive search and return names with subdirs in front
179 :param recursive: recursive search and return names with subdirs in front
180 """
180 """
181
181
182 # remove ending slash for better results
182 # remove ending slash for better results
183 path = path.rstrip(os.sep)
183 path = path.rstrip(os.sep)
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
184
185
185 def _get_repos(p):
186 def _get_repos(p):
186 if not os.access(p, os.W_OK):
187 if not os.access(p, os.W_OK):
187 return
188 return
188 for dirpath in os.listdir(p):
189 for dirpath in os.listdir(p):
189 if os.path.isfile(os.path.join(p, dirpath)):
190 if os.path.isfile(os.path.join(p, dirpath)):
190 continue
191 continue
191 cur_path = os.path.join(p, dirpath)
192 cur_path = os.path.join(p, dirpath)
193
194 # skip removed repos
195 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
196 continue
197
198 #skip .<somethin> dirs
199 if dirpath.startswith('.'):
200 continue
201
192 try:
202 try:
193 scm_info = get_scm(cur_path)
203 scm_info = get_scm(cur_path)
194 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
204 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
195 except VCSError:
205 except VCSError:
196 if not recursive:
206 if not recursive:
197 continue
207 continue
198 #check if this dir containts other repos for recursive scan
208 #check if this dir containts other repos for recursive scan
199 rec_path = os.path.join(p, dirpath)
209 rec_path = os.path.join(p, dirpath)
200 if os.path.isdir(rec_path):
210 if os.path.isdir(rec_path):
201 for inner_scm in _get_repos(rec_path):
211 for inner_scm in _get_repos(rec_path):
202 yield inner_scm
212 yield inner_scm
203
213
204 return _get_repos(path)
214 return _get_repos(path)
205
215
216 #alias for backward compat
217 get_filesystem_repos = get_repos
218
206
219
207 def is_valid_repo(repo_name, base_path, scm=None):
220 def is_valid_repo(repo_name, base_path, scm=None):
208 """
221 """
209 Returns True if given path is a valid repository False otherwise.
222 Returns True if given path is a valid repository False otherwise.
210 If scm param is given also compare if given scm is the same as expected
223 If scm param is given also compare if given scm is the same as expected
211 from scm parameter
224 from scm parameter
212
225
213 :param repo_name:
226 :param repo_name:
214 :param base_path:
227 :param base_path:
215 :param scm:
228 :param scm:
216
229
217 :return True: if given path is a valid repository
230 :return True: if given path is a valid repository
218 """
231 """
219 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
232 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
220
233
221 try:
234 try:
222 scm_ = get_scm(full_path)
235 scm_ = get_scm(full_path)
223 if scm:
236 if scm:
224 return scm_[0] == scm
237 return scm_[0] == scm
225 return True
238 return True
226 except VCSError:
239 except VCSError:
227 return False
240 return False
228
241
229
242
230 def is_valid_repos_group(repos_group_name, base_path):
243 def is_valid_repos_group(repos_group_name, base_path):
231 """
244 """
232 Returns True if given path is a repos group False otherwise
245 Returns True if given path is a repos group False otherwise
233
246
234 :param repo_name:
247 :param repo_name:
235 :param base_path:
248 :param base_path:
236 """
249 """
237 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
250 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
238
251
239 # check if it's not a repo
252 # check if it's not a repo
240 if is_valid_repo(repos_group_name, base_path):
253 if is_valid_repo(repos_group_name, base_path):
241 return False
254 return False
242
255
243 try:
256 try:
244 # we need to check bare git repos at higher level
257 # we need to check bare git repos at higher level
245 # since we might match branches/hooks/info/objects or possible
258 # since we might match branches/hooks/info/objects or possible
246 # other things inside bare git repo
259 # other things inside bare git repo
247 get_scm(os.path.dirname(full_path))
260 get_scm(os.path.dirname(full_path))
248 return False
261 return False
249 except VCSError:
262 except VCSError:
250 pass
263 pass
251
264
252 # check if it's a valid path
265 # check if it's a valid path
253 if os.path.isdir(full_path):
266 if os.path.isdir(full_path):
254 return True
267 return True
255
268
256 return False
269 return False
257
270
258
271
259 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
272 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
260 while True:
273 while True:
261 ok = raw_input(prompt)
274 ok = raw_input(prompt)
262 if ok in ('y', 'ye', 'yes'):
275 if ok in ('y', 'ye', 'yes'):
263 return True
276 return True
264 if ok in ('n', 'no', 'nop', 'nope'):
277 if ok in ('n', 'no', 'nop', 'nope'):
265 return False
278 return False
266 retries = retries - 1
279 retries = retries - 1
267 if retries < 0:
280 if retries < 0:
268 raise IOError
281 raise IOError
269 print complaint
282 print complaint
270
283
271 #propagated from mercurial documentation
284 #propagated from mercurial documentation
272 ui_sections = ['alias', 'auth',
285 ui_sections = ['alias', 'auth',
273 'decode/encode', 'defaults',
286 'decode/encode', 'defaults',
274 'diff', 'email',
287 'diff', 'email',
275 'extensions', 'format',
288 'extensions', 'format',
276 'merge-patterns', 'merge-tools',
289 'merge-patterns', 'merge-tools',
277 'hooks', 'http_proxy',
290 'hooks', 'http_proxy',
278 'smtp', 'patch',
291 'smtp', 'patch',
279 'paths', 'profiling',
292 'paths', 'profiling',
280 'server', 'trusted',
293 'server', 'trusted',
281 'ui', 'web', ]
294 'ui', 'web', ]
282
295
283
296
284 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
297 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
285 """
298 """
286 A function that will read python rc files or database
299 A function that will read python rc files or database
287 and make an mercurial ui object from read options
300 and make an mercurial ui object from read options
288
301
289 :param path: path to mercurial config file
302 :param path: path to mercurial config file
290 :param checkpaths: check the path
303 :param checkpaths: check the path
291 :param read_from: read from 'file' or 'db'
304 :param read_from: read from 'file' or 'db'
292 """
305 """
293
306
294 baseui = ui.ui()
307 baseui = ui.ui()
295
308
296 # clean the baseui object
309 # clean the baseui object
297 baseui._ocfg = config.config()
310 baseui._ocfg = config.config()
298 baseui._ucfg = config.config()
311 baseui._ucfg = config.config()
299 baseui._tcfg = config.config()
312 baseui._tcfg = config.config()
300
313
301 if read_from == 'file':
314 if read_from == 'file':
302 if not os.path.isfile(path):
315 if not os.path.isfile(path):
303 log.debug('hgrc file is not present at %s, skipping...' % path)
316 log.debug('hgrc file is not present at %s, skipping...' % path)
304 return False
317 return False
305 log.debug('reading hgrc from %s' % path)
318 log.debug('reading hgrc from %s' % path)
306 cfg = config.config()
319 cfg = config.config()
307 cfg.read(path)
320 cfg.read(path)
308 for section in ui_sections:
321 for section in ui_sections:
309 for k, v in cfg.items(section):
322 for k, v in cfg.items(section):
310 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
323 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
311 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
324 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
312
325
313 elif read_from == 'db':
326 elif read_from == 'db':
314 sa = meta.Session()
327 sa = meta.Session()
315 ret = sa.query(RhodeCodeUi)\
328 ret = sa.query(RhodeCodeUi)\
316 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
329 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
317 .all()
330 .all()
318
331
319 hg_ui = ret
332 hg_ui = ret
320 for ui_ in hg_ui:
333 for ui_ in hg_ui:
321 if ui_.ui_active:
334 if ui_.ui_active:
322 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
335 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
323 ui_.ui_key, ui_.ui_value)
336 ui_.ui_key, ui_.ui_value)
324 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
337 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
325 safe_str(ui_.ui_value))
338 safe_str(ui_.ui_value))
326 if ui_.ui_key == 'push_ssl':
339 if ui_.ui_key == 'push_ssl':
327 # force set push_ssl requirement to False, rhodecode
340 # force set push_ssl requirement to False, rhodecode
328 # handles that
341 # handles that
329 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
342 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
330 False)
343 False)
331 if clear_session:
344 if clear_session:
332 meta.Session.remove()
345 meta.Session.remove()
333 return baseui
346 return baseui
334
347
335
348
336 def set_rhodecode_config(config):
349 def set_rhodecode_config(config):
337 """
350 """
338 Updates pylons config with new settings from database
351 Updates pylons config with new settings from database
339
352
340 :param config:
353 :param config:
341 """
354 """
342 hgsettings = RhodeCodeSetting.get_app_settings()
355 hgsettings = RhodeCodeSetting.get_app_settings()
343
356
344 for k, v in hgsettings.items():
357 for k, v in hgsettings.items():
345 config[k] = v
358 config[k] = v
346
359
347
360
348 def invalidate_cache(cache_key, *args):
361 def invalidate_cache(cache_key, *args):
349 """
362 """
350 Puts cache invalidation task into db for
363 Puts cache invalidation task into db for
351 further global cache invalidation
364 further global cache invalidation
352 """
365 """
353
366
354 from rhodecode.model.scm import ScmModel
367 from rhodecode.model.scm import ScmModel
355
368
356 if cache_key.startswith('get_repo_cached_'):
369 if cache_key.startswith('get_repo_cached_'):
357 name = cache_key.split('get_repo_cached_')[-1]
370 name = cache_key.split('get_repo_cached_')[-1]
358 ScmModel().mark_for_invalidation(name)
371 ScmModel().mark_for_invalidation(name)
359
372
360
373
361 def map_groups(path):
374 def map_groups(path):
362 """
375 """
363 Given a full path to a repository, create all nested groups that this
376 Given a full path to a repository, create all nested groups that this
364 repo is inside. This function creates parent-child relationships between
377 repo is inside. This function creates parent-child relationships between
365 groups and creates default perms for all new groups.
378 groups and creates default perms for all new groups.
366
379
367 :param paths: full path to repository
380 :param paths: full path to repository
368 """
381 """
369 sa = meta.Session()
382 sa = meta.Session()
370 groups = path.split(Repository.url_sep())
383 groups = path.split(Repository.url_sep())
371 parent = None
384 parent = None
372 group = None
385 group = None
373
386
374 # last element is repo in nested groups structure
387 # last element is repo in nested groups structure
375 groups = groups[:-1]
388 groups = groups[:-1]
376 rgm = ReposGroupModel(sa)
389 rgm = ReposGroupModel(sa)
377 for lvl, group_name in enumerate(groups):
390 for lvl, group_name in enumerate(groups):
378 group_name = '/'.join(groups[:lvl] + [group_name])
391 group_name = '/'.join(groups[:lvl] + [group_name])
379 group = RepoGroup.get_by_group_name(group_name)
392 group = RepoGroup.get_by_group_name(group_name)
380 desc = '%s group' % group_name
393 desc = '%s group' % group_name
381
394
382 # skip folders that are now removed repos
395 # skip folders that are now removed repos
383 if REMOVED_REPO_PAT.match(group_name):
396 if REMOVED_REPO_PAT.match(group_name):
384 break
397 break
385
398
386 if group is None:
399 if group is None:
387 log.debug('creating group level: %s group_name: %s' % (lvl,
400 log.debug('creating group level: %s group_name: %s' % (lvl,
388 group_name))
401 group_name))
389 group = RepoGroup(group_name, parent)
402 group = RepoGroup(group_name, parent)
390 group.group_description = desc
403 group.group_description = desc
391 sa.add(group)
404 sa.add(group)
392 rgm._create_default_perms(group)
405 rgm._create_default_perms(group)
393 sa.flush()
406 sa.flush()
394 parent = group
407 parent = group
395 return group
408 return group
396
409
397
410
398 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
411 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
399 install_git_hook=False):
412 install_git_hook=False):
400 """
413 """
401 maps all repos given in initial_repo_list, non existing repositories
414 maps all repos given in initial_repo_list, non existing repositories
402 are created, if remove_obsolete is True it also check for db entries
415 are created, if remove_obsolete is True it also check for db entries
403 that are not in initial_repo_list and removes them.
416 that are not in initial_repo_list and removes them.
404
417
405 :param initial_repo_list: list of repositories found by scanning methods
418 :param initial_repo_list: list of repositories found by scanning methods
406 :param remove_obsolete: check for obsolete entries in database
419 :param remove_obsolete: check for obsolete entries in database
407 :param install_git_hook: if this is True, also check and install githook
420 :param install_git_hook: if this is True, also check and install githook
408 for a repo if missing
421 for a repo if missing
409 """
422 """
410 from rhodecode.model.repo import RepoModel
423 from rhodecode.model.repo import RepoModel
411 from rhodecode.model.scm import ScmModel
424 from rhodecode.model.scm import ScmModel
412 sa = meta.Session()
425 sa = meta.Session()
413 rm = RepoModel()
426 rm = RepoModel()
414 user = sa.query(User).filter(User.admin == True).first()
427 user = sa.query(User).filter(User.admin == True).first()
415 if user is None:
428 if user is None:
416 raise Exception('Missing administrative account!')
429 raise Exception('Missing administrative account!')
417 added = []
430 added = []
418
431
419 # # clear cache keys
432 # # clear cache keys
420 # log.debug("Clearing cache keys now...")
433 # log.debug("Clearing cache keys now...")
421 # CacheInvalidation.clear_cache()
434 # CacheInvalidation.clear_cache()
422 # sa.commit()
435 # sa.commit()
423
436
424 ##creation defaults
437 ##creation defaults
425 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
438 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
426 enable_statistics = defs.get('repo_enable_statistics')
439 enable_statistics = defs.get('repo_enable_statistics')
427 enable_locking = defs.get('repo_enable_locking')
440 enable_locking = defs.get('repo_enable_locking')
428 enable_downloads = defs.get('repo_enable_downloads')
441 enable_downloads = defs.get('repo_enable_downloads')
429 private = defs.get('repo_private')
442 private = defs.get('repo_private')
430
443
431 for name, repo in initial_repo_list.items():
444 for name, repo in initial_repo_list.items():
432 group = map_groups(name)
445 group = map_groups(name)
433 db_repo = rm.get_by_repo_name(name)
446 db_repo = rm.get_by_repo_name(name)
434 # found repo that is on filesystem not in RhodeCode database
447 # found repo that is on filesystem not in RhodeCode database
435 if not db_repo:
448 if not db_repo:
436 log.info('repository %s not found, creating now' % name)
449 log.info('repository %s not found, creating now' % name)
437 added.append(name)
450 added.append(name)
438 desc = (repo.description
451 desc = (repo.description
439 if repo.description != 'unknown'
452 if repo.description != 'unknown'
440 else '%s repository' % name)
453 else '%s repository' % name)
441
454
442 new_repo = rm.create_repo(
455 new_repo = rm.create_repo(
443 repo_name=name,
456 repo_name=name,
444 repo_type=repo.alias,
457 repo_type=repo.alias,
445 description=desc,
458 description=desc,
446 repos_group=getattr(group, 'group_id', None),
459 repos_group=getattr(group, 'group_id', None),
447 owner=user,
460 owner=user,
448 just_db=True,
461 just_db=True,
449 enable_locking=enable_locking,
462 enable_locking=enable_locking,
450 enable_downloads=enable_downloads,
463 enable_downloads=enable_downloads,
451 enable_statistics=enable_statistics,
464 enable_statistics=enable_statistics,
452 private=private
465 private=private
453 )
466 )
454 # we added that repo just now, and make sure it has githook
467 # we added that repo just now, and make sure it has githook
455 # installed
468 # installed
456 if new_repo.repo_type == 'git':
469 if new_repo.repo_type == 'git':
457 ScmModel().install_git_hook(new_repo.scm_instance)
470 ScmModel().install_git_hook(new_repo.scm_instance)
458 new_repo.update_changeset_cache()
471 new_repo.update_changeset_cache()
459 elif install_git_hook:
472 elif install_git_hook:
460 if db_repo.repo_type == 'git':
473 if db_repo.repo_type == 'git':
461 ScmModel().install_git_hook(db_repo.scm_instance)
474 ScmModel().install_git_hook(db_repo.scm_instance)
462 # during starting install all cache keys for all repositories in the
475 # during starting install all cache keys for all repositories in the
463 # system, this will register all repos and multiple instances
476 # system, this will register all repos and multiple instances
464 key, _prefix, _org_key = CacheInvalidation._get_key(name)
477 key, _prefix, _org_key = CacheInvalidation._get_key(name)
465 CacheInvalidation.invalidate(name)
478 CacheInvalidation.invalidate(name)
466 log.debug("Creating a cache key for %s, instance_id %s"
479 log.debug("Creating a cache key for %s, instance_id %s"
467 % (name, _prefix or 'unknown'))
480 % (name, _prefix or 'unknown'))
468
481
469 sa.commit()
482 sa.commit()
470 removed = []
483 removed = []
471 if remove_obsolete:
484 if remove_obsolete:
472 # remove from database those repositories that are not in the filesystem
485 # remove from database those repositories that are not in the filesystem
473 for repo in sa.query(Repository).all():
486 for repo in sa.query(Repository).all():
474 if repo.repo_name not in initial_repo_list.keys():
487 if repo.repo_name not in initial_repo_list.keys():
475 log.debug("Removing non-existing repository found in db `%s`" %
488 log.debug("Removing non-existing repository found in db `%s`" %
476 repo.repo_name)
489 repo.repo_name)
477 try:
490 try:
478 sa.delete(repo)
491 sa.delete(repo)
479 sa.commit()
492 sa.commit()
480 removed.append(repo.repo_name)
493 removed.append(repo.repo_name)
481 except:
494 except:
482 #don't hold further removals on error
495 #don't hold further removals on error
483 log.error(traceback.format_exc())
496 log.error(traceback.format_exc())
484 sa.rollback()
497 sa.rollback()
485
498
486 return added, removed
499 return added, removed
487
500
488
501
489 # set cache regions for beaker so celery can utilise it
502 # set cache regions for beaker so celery can utilise it
490 def add_cache(settings):
503 def add_cache(settings):
491 cache_settings = {'regions': None}
504 cache_settings = {'regions': None}
492 for key in settings.keys():
505 for key in settings.keys():
493 for prefix in ['beaker.cache.', 'cache.']:
506 for prefix in ['beaker.cache.', 'cache.']:
494 if key.startswith(prefix):
507 if key.startswith(prefix):
495 name = key.split(prefix)[1].strip()
508 name = key.split(prefix)[1].strip()
496 cache_settings[name] = settings[key].strip()
509 cache_settings[name] = settings[key].strip()
497 if cache_settings['regions']:
510 if cache_settings['regions']:
498 for region in cache_settings['regions'].split(','):
511 for region in cache_settings['regions'].split(','):
499 region = region.strip()
512 region = region.strip()
500 region_settings = {}
513 region_settings = {}
501 for key, value in cache_settings.items():
514 for key, value in cache_settings.items():
502 if key.startswith(region):
515 if key.startswith(region):
503 region_settings[key.split('.')[1]] = value
516 region_settings[key.split('.')[1]] = value
504 region_settings['expire'] = int(region_settings.get('expire',
517 region_settings['expire'] = int(region_settings.get('expire',
505 60))
518 60))
506 region_settings.setdefault('lock_dir',
519 region_settings.setdefault('lock_dir',
507 cache_settings.get('lock_dir'))
520 cache_settings.get('lock_dir'))
508 region_settings.setdefault('data_dir',
521 region_settings.setdefault('data_dir',
509 cache_settings.get('data_dir'))
522 cache_settings.get('data_dir'))
510
523
511 if 'type' not in region_settings:
524 if 'type' not in region_settings:
512 region_settings['type'] = cache_settings.get('type',
525 region_settings['type'] = cache_settings.get('type',
513 'memory')
526 'memory')
514 beaker.cache.cache_regions[region] = region_settings
527 beaker.cache.cache_regions[region] = region_settings
515
528
516
529
517 def load_rcextensions(root_path):
530 def load_rcextensions(root_path):
518 import rhodecode
531 import rhodecode
519 from rhodecode.config import conf
532 from rhodecode.config import conf
520
533
521 path = os.path.join(root_path, 'rcextensions', '__init__.py')
534 path = os.path.join(root_path, 'rcextensions', '__init__.py')
522 if os.path.isfile(path):
535 if os.path.isfile(path):
523 rcext = create_module('rc', path)
536 rcext = create_module('rc', path)
524 EXT = rhodecode.EXTENSIONS = rcext
537 EXT = rhodecode.EXTENSIONS = rcext
525 log.debug('Found rcextensions now loading %s...' % rcext)
538 log.debug('Found rcextensions now loading %s...' % rcext)
526
539
527 # Additional mappings that are not present in the pygments lexers
540 # Additional mappings that are not present in the pygments lexers
528 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
541 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
529
542
530 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
543 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
531
544
532 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
545 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
533 log.debug('settings custom INDEX_EXTENSIONS')
546 log.debug('settings custom INDEX_EXTENSIONS')
534 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
547 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
535
548
536 #ADDITIONAL MAPPINGS
549 #ADDITIONAL MAPPINGS
537 log.debug('adding extra into INDEX_EXTENSIONS')
550 log.debug('adding extra into INDEX_EXTENSIONS')
538 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
551 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
539
552
540
553
541 #==============================================================================
554 #==============================================================================
542 # TEST FUNCTIONS AND CREATORS
555 # TEST FUNCTIONS AND CREATORS
543 #==============================================================================
556 #==============================================================================
544 def create_test_index(repo_location, config, full_index):
557 def create_test_index(repo_location, config, full_index):
545 """
558 """
546 Makes default test index
559 Makes default test index
547
560
548 :param config: test config
561 :param config: test config
549 :param full_index:
562 :param full_index:
550 """
563 """
551
564
552 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
565 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
553 from rhodecode.lib.pidlock import DaemonLock, LockHeld
566 from rhodecode.lib.pidlock import DaemonLock, LockHeld
554
567
555 repo_location = repo_location
568 repo_location = repo_location
556
569
557 index_location = os.path.join(config['app_conf']['index_dir'])
570 index_location = os.path.join(config['app_conf']['index_dir'])
558 if not os.path.exists(index_location):
571 if not os.path.exists(index_location):
559 os.makedirs(index_location)
572 os.makedirs(index_location)
560
573
561 try:
574 try:
562 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
575 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
563 WhooshIndexingDaemon(index_location=index_location,
576 WhooshIndexingDaemon(index_location=index_location,
564 repo_location=repo_location)\
577 repo_location=repo_location)\
565 .run(full_index=full_index)
578 .run(full_index=full_index)
566 l.release()
579 l.release()
567 except LockHeld:
580 except LockHeld:
568 pass
581 pass
569
582
570
583
571 def create_test_env(repos_test_path, config):
584 def create_test_env(repos_test_path, config):
572 """
585 """
573 Makes a fresh database and
586 Makes a fresh database and
574 install test repository into tmp dir
587 install test repository into tmp dir
575 """
588 """
576 from rhodecode.lib.db_manage import DbManage
589 from rhodecode.lib.db_manage import DbManage
577 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
590 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
578
591
579 # PART ONE create db
592 # PART ONE create db
580 dbconf = config['sqlalchemy.db1.url']
593 dbconf = config['sqlalchemy.db1.url']
581 log.debug('making test db %s' % dbconf)
594 log.debug('making test db %s' % dbconf)
582
595
583 # create test dir if it doesn't exist
596 # create test dir if it doesn't exist
584 if not os.path.isdir(repos_test_path):
597 if not os.path.isdir(repos_test_path):
585 log.debug('Creating testdir %s' % repos_test_path)
598 log.debug('Creating testdir %s' % repos_test_path)
586 os.makedirs(repos_test_path)
599 os.makedirs(repos_test_path)
587
600
588 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
601 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
589 tests=True)
602 tests=True)
590 dbmanage.create_tables(override=True)
603 dbmanage.create_tables(override=True)
591 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
604 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
592 dbmanage.create_default_user()
605 dbmanage.create_default_user()
593 dbmanage.admin_prompt()
606 dbmanage.admin_prompt()
594 dbmanage.create_permissions()
607 dbmanage.create_permissions()
595 dbmanage.populate_default_permissions()
608 dbmanage.populate_default_permissions()
596 Session().commit()
609 Session().commit()
597 # PART TWO make test repo
610 # PART TWO make test repo
598 log.debug('making test vcs repositories')
611 log.debug('making test vcs repositories')
599
612
600 idx_path = config['app_conf']['index_dir']
613 idx_path = config['app_conf']['index_dir']
601 data_path = config['app_conf']['cache_dir']
614 data_path = config['app_conf']['cache_dir']
602
615
603 #clean index and data
616 #clean index and data
604 if idx_path and os.path.exists(idx_path):
617 if idx_path and os.path.exists(idx_path):
605 log.debug('remove %s' % idx_path)
618 log.debug('remove %s' % idx_path)
606 shutil.rmtree(idx_path)
619 shutil.rmtree(idx_path)
607
620
608 if data_path and os.path.exists(data_path):
621 if data_path and os.path.exists(data_path):
609 log.debug('remove %s' % data_path)
622 log.debug('remove %s' % data_path)
610 shutil.rmtree(data_path)
623 shutil.rmtree(data_path)
611
624
612 #CREATE DEFAULT TEST REPOS
625 #CREATE DEFAULT TEST REPOS
613 cur_dir = dn(dn(abspath(__file__)))
626 cur_dir = dn(dn(abspath(__file__)))
614 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
627 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
615 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
628 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
616 tar.close()
629 tar.close()
617
630
618 cur_dir = dn(dn(abspath(__file__)))
631 cur_dir = dn(dn(abspath(__file__)))
619 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
632 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
620 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
633 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
621 tar.close()
634 tar.close()
622
635
623 #LOAD VCS test stuff
636 #LOAD VCS test stuff
624 from rhodecode.tests.vcs import setup_package
637 from rhodecode.tests.vcs import setup_package
625 setup_package()
638 setup_package()
626
639
627
640
628 #==============================================================================
641 #==============================================================================
629 # PASTER COMMANDS
642 # PASTER COMMANDS
630 #==============================================================================
643 #==============================================================================
631 class BasePasterCommand(Command):
644 class BasePasterCommand(Command):
632 """
645 """
633 Abstract Base Class for paster commands.
646 Abstract Base Class for paster commands.
634
647
635 The celery commands are somewhat aggressive about loading
648 The celery commands are somewhat aggressive about loading
636 celery.conf, and since our module sets the `CELERY_LOADER`
649 celery.conf, and since our module sets the `CELERY_LOADER`
637 environment variable to our loader, we have to bootstrap a bit and
650 environment variable to our loader, we have to bootstrap a bit and
638 make sure we've had a chance to load the pylons config off of the
651 make sure we've had a chance to load the pylons config off of the
639 command line, otherwise everything fails.
652 command line, otherwise everything fails.
640 """
653 """
641 min_args = 1
654 min_args = 1
642 min_args_error = "Please provide a paster config file as an argument."
655 min_args_error = "Please provide a paster config file as an argument."
643 takes_config_file = 1
656 takes_config_file = 1
644 requires_config_file = True
657 requires_config_file = True
645
658
646 def notify_msg(self, msg, log=False):
659 def notify_msg(self, msg, log=False):
647 """Make a notification to user, additionally if logger is passed
660 """Make a notification to user, additionally if logger is passed
648 it logs this action using given logger
661 it logs this action using given logger
649
662
650 :param msg: message that will be printed to user
663 :param msg: message that will be printed to user
651 :param log: logging instance, to use to additionally log this message
664 :param log: logging instance, to use to additionally log this message
652
665
653 """
666 """
654 if log and isinstance(log, logging):
667 if log and isinstance(log, logging):
655 log(msg)
668 log(msg)
656
669
657 def run(self, args):
670 def run(self, args):
658 """
671 """
659 Overrides Command.run
672 Overrides Command.run
660
673
661 Checks for a config file argument and loads it.
674 Checks for a config file argument and loads it.
662 """
675 """
663 if len(args) < self.min_args:
676 if len(args) < self.min_args:
664 raise BadCommand(
677 raise BadCommand(
665 self.min_args_error % {'min_args': self.min_args,
678 self.min_args_error % {'min_args': self.min_args,
666 'actual_args': len(args)})
679 'actual_args': len(args)})
667
680
668 # Decrement because we're going to lob off the first argument.
681 # Decrement because we're going to lob off the first argument.
669 # @@ This is hacky
682 # @@ This is hacky
670 self.min_args -= 1
683 self.min_args -= 1
671 self.bootstrap_config(args[0])
684 self.bootstrap_config(args[0])
672 self.update_parser()
685 self.update_parser()
673 return super(BasePasterCommand, self).run(args[1:])
686 return super(BasePasterCommand, self).run(args[1:])
674
687
675 def update_parser(self):
688 def update_parser(self):
676 """
689 """
677 Abstract method. Allows for the class's parser to be updated
690 Abstract method. Allows for the class's parser to be updated
678 before the superclass's `run` method is called. Necessary to
691 before the superclass's `run` method is called. Necessary to
679 allow options/arguments to be passed through to the underlying
692 allow options/arguments to be passed through to the underlying
680 celery command.
693 celery command.
681 """
694 """
682 raise NotImplementedError("Abstract Method.")
695 raise NotImplementedError("Abstract Method.")
683
696
684 def bootstrap_config(self, conf):
697 def bootstrap_config(self, conf):
685 """
698 """
686 Loads the pylons configuration.
699 Loads the pylons configuration.
687 """
700 """
688 from pylons import config as pylonsconfig
701 from pylons import config as pylonsconfig
689
702
690 self.path_to_ini_file = os.path.realpath(conf)
703 self.path_to_ini_file = os.path.realpath(conf)
691 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
704 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
692 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
705 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
693
706
694
707
695 def check_git_version():
708 def check_git_version():
696 """
709 """
697 Checks what version of git is installed in system, and issues a warning
710 Checks what version of git is installed in system, and issues a warning
698 if it's too old for RhodeCode to properly work.
711 if it's too old for RhodeCode to properly work.
699 """
712 """
700 import subprocess
713 import subprocess
701 from distutils.version import StrictVersion
714 from distutils.version import StrictVersion
702 from rhodecode import BACKENDS
715 from rhodecode import BACKENDS
703
716
704 p = subprocess.Popen('git --version', shell=True,
717 p = subprocess.Popen('git --version', shell=True,
705 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
718 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
706 stdout, stderr = p.communicate()
719 stdout, stderr = p.communicate()
707 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
720 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
708 if len(ver.split('.')) > 3:
721 if len(ver.split('.')) > 3:
709 #StrictVersion needs to be only 3 element type
722 #StrictVersion needs to be only 3 element type
710 ver = '.'.join(ver.split('.')[:3])
723 ver = '.'.join(ver.split('.')[:3])
711 try:
724 try:
712 _ver = StrictVersion(ver)
725 _ver = StrictVersion(ver)
713 except:
726 except:
714 _ver = StrictVersion('0.0.0')
727 _ver = StrictVersion('0.0.0')
715 stderr = traceback.format_exc()
728 stderr = traceback.format_exc()
716
729
717 req_ver = '1.7.4'
730 req_ver = '1.7.4'
718 to_old_git = False
731 to_old_git = False
719 if _ver < StrictVersion(req_ver):
732 if _ver < StrictVersion(req_ver):
720 to_old_git = True
733 to_old_git = True
721
734
722 if 'git' in BACKENDS:
735 if 'git' in BACKENDS:
723 log.debug('GIT version detected: %s' % stdout)
736 log.debug('GIT version detected: %s' % stdout)
724 if stderr:
737 if stderr:
725 log.warning('Unable to detect git version org error was:%r' % stderr)
738 log.warning('Unable to detect git version org error was:%r' % stderr)
726 elif to_old_git:
739 elif to_old_git:
727 log.warning('RhodeCode detected git version %s, which is too old '
740 log.warning('RhodeCode detected git version %s, which is too old '
728 'for the system to function properly. Make sure '
741 'for the system to function properly. Make sure '
729 'its version is at least %s' % (ver, req_ver))
742 'its version is at least %s' % (ver, req_ver))
730 return _ver
743 return _ver
731
744
732
745
733 @decorator.decorator
746 @decorator.decorator
734 def jsonify(func, *args, **kwargs):
747 def jsonify(func, *args, **kwargs):
735 """Action decorator that formats output for JSON
748 """Action decorator that formats output for JSON
736
749
737 Given a function that will return content, this decorator will turn
750 Given a function that will return content, this decorator will turn
738 the result into JSON, with a content-type of 'application/json' and
751 the result into JSON, with a content-type of 'application/json' and
739 output it.
752 output it.
740
753
741 """
754 """
742 from pylons.decorators.util import get_pylons
755 from pylons.decorators.util import get_pylons
743 from rhodecode.lib.ext_json import json
756 from rhodecode.lib.ext_json import json
744 pylons = get_pylons(args)
757 pylons = get_pylons(args)
745 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
758 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
746 data = func(*args, **kwargs)
759 data = func(*args, **kwargs)
747 if isinstance(data, (list, tuple)):
760 if isinstance(data, (list, tuple)):
748 msg = "JSON responses with Array envelopes are susceptible to " \
761 msg = "JSON responses with Array envelopes are susceptible to " \
749 "cross-site data leak attacks, see " \
762 "cross-site data leak attacks, see " \
750 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
763 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
751 warnings.warn(msg, Warning, 2)
764 warnings.warn(msg, Warning, 2)
752 log.warning(msg)
765 log.warning(msg)
753 log.debug("Returning JSON wrapped action output")
766 log.debug("Returning JSON wrapped action output")
754 return json.dumps(data, encoding='utf-8')
767 return json.dumps(data, encoding='utf-8')
@@ -1,240 +1,241 b''
1 """
1 """
2 Utitlites aimed to help achieve mostly basic tasks.
2 Utitlites aimed to help achieve mostly basic tasks.
3 """
3 """
4 from __future__ import division
4 from __future__ import division
5
5
6 import re
6 import re
7 import time
7 import time
8 import datetime
8 import datetime
9 import os.path
9 import os.path
10 from subprocess import Popen, PIPE
10 from subprocess import Popen, PIPE
11 from rhodecode.lib.vcs.exceptions import VCSError
11 from rhodecode.lib.vcs.exceptions import VCSError
12 from rhodecode.lib.vcs.exceptions import RepositoryError
12 from rhodecode.lib.vcs.exceptions import RepositoryError
13 from rhodecode.lib.vcs.utils.paths import abspath
13 from rhodecode.lib.vcs.utils.paths import abspath
14
14
15 ALIASES = ['hg', 'git']
15 ALIASES = ['hg', 'git']
16
16
17
17
18 def get_scm(path, search_up=False, explicit_alias=None):
18 def get_scm(path, search_up=False, explicit_alias=None):
19 """
19 """
20 Returns one of alias from ``ALIASES`` (in order of precedence same as
20 Returns one of alias from ``ALIASES`` (in order of precedence same as
21 shortcuts given in ``ALIASES``) and top working dir path for the given
21 shortcuts given in ``ALIASES``) and top working dir path for the given
22 argument. If no scm-specific directory is found or more than one scm is
22 argument. If no scm-specific directory is found or more than one scm is
23 found at that directory, ``VCSError`` is raised.
23 found at that directory, ``VCSError`` is raised.
24
24
25 :param search_up: if set to ``True``, this function would try to
25 :param search_up: if set to ``True``, this function would try to
26 move up to parent directory every time no scm is recognized for the
26 move up to parent directory every time no scm is recognized for the
27 currently checked path. Default: ``False``.
27 currently checked path. Default: ``False``.
28 :param explicit_alias: can be one of available backend aliases, when given
28 :param explicit_alias: can be one of available backend aliases, when given
29 it will return given explicit alias in repositories under more than one
29 it will return given explicit alias in repositories under more than one
30 version control, if explicit_alias is different than found it will raise
30 version control, if explicit_alias is different than found it will raise
31 VCSError
31 VCSError
32 """
32 """
33 if not os.path.isdir(path):
33 if not os.path.isdir(path):
34 raise VCSError("Given path %s is not a directory" % path)
34 raise VCSError("Given path %s is not a directory" % path)
35
35
36 def get_scms(path):
36 def get_scms(path):
37 return [(scm, path) for scm in get_scms_for_path(path)]
37 return [(scm, path) for scm in get_scms_for_path(path)]
38
38
39 found_scms = get_scms(path)
39 found_scms = get_scms(path)
40 while not found_scms and search_up:
40 while not found_scms and search_up:
41 newpath = abspath(path, '..')
41 newpath = abspath(path, '..')
42 if newpath == path:
42 if newpath == path:
43 break
43 break
44 path = newpath
44 path = newpath
45 found_scms = get_scms(path)
45 found_scms = get_scms(path)
46
46
47 if len(found_scms) > 1:
47 if len(found_scms) > 1:
48 for scm in found_scms:
48 for scm in found_scms:
49 if scm[0] == explicit_alias:
49 if scm[0] == explicit_alias:
50 return scm
50 return scm
51 raise VCSError('More than one [%s] scm found at given path %s'
51 raise VCSError('More than one [%s] scm found at given path %s'
52 % (','.join((x[0] for x in found_scms)), path))
52 % (','.join((x[0] for x in found_scms)), path))
53
53
54 if len(found_scms) is 0:
54 if len(found_scms) is 0:
55 raise VCSError('No scm found at given path %s' % path)
55 raise VCSError('No scm found at given path %s' % path)
56
56
57 return found_scms[0]
57 return found_scms[0]
58
58
59
59
60 def get_scms_for_path(path):
60 def get_scms_for_path(path):
61 """
61 """
62 Returns all scm's found at the given path. If no scm is recognized
62 Returns all scm's found at the given path. If no scm is recognized
63 - empty list is returned.
63 - empty list is returned.
64
64
65 :param path: path to directory which should be checked. May be callable.
65 :param path: path to directory which should be checked. May be callable.
66
66
67 :raises VCSError: if given ``path`` is not a directory
67 :raises VCSError: if given ``path`` is not a directory
68 """
68 """
69 from rhodecode.lib.vcs.backends import get_backend
69 from rhodecode.lib.vcs.backends import get_backend
70 if hasattr(path, '__call__'):
70 if hasattr(path, '__call__'):
71 path = path()
71 path = path()
72 if not os.path.isdir(path):
72 if not os.path.isdir(path):
73 raise VCSError("Given path %r is not a directory" % path)
73 raise VCSError("Given path %r is not a directory" % path)
74
74
75 result = []
75 result = []
76 for key in ALIASES:
76 for key in ALIASES:
77 dirname = os.path.join(path, '.' + key)
77 dirname = os.path.join(path, '.' + key)
78 if os.path.isdir(dirname):
78 if os.path.isdir(dirname):
79 result.append(key)
79 result.append(key)
80 continue
80 continue
81 dirname = os.path.join(path, 'rm__.' + key)
81 dirname = os.path.join(path, 'rm__.' + key)
82 if os.path.isdir(dirname):
82 if os.path.isdir(dirname):
83 return [None]
83 return result
84 # We still need to check if it's not bare repository as
84 # We still need to check if it's not bare repository as
85 # bare repos don't have working directories
85 # bare repos don't have working directories
86 try:
86 try:
87 get_backend(key)(path)
87 get_backend(key)(path)
88 result.append(key)
88 result.append(key)
89 continue
89 continue
90 except RepositoryError:
90 except RepositoryError:
91 # Wrong backend
91 # Wrong backend
92 pass
92 pass
93 except VCSError:
93 except VCSError:
94 # No backend at all
94 # No backend at all
95 pass
95 pass
96 return result
96 return result
97
97
98
98
99 def run_command(cmd, *args):
99 def run_command(cmd, *args):
100 """
100 """
101 Runs command on the system with given ``args``.
101 Runs command on the system with given ``args``.
102 """
102 """
103 command = ' '.join((cmd, args))
103 command = ' '.join((cmd, args))
104 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
104 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
105 stdout, stderr = p.communicate()
105 stdout, stderr = p.communicate()
106 return p.retcode, stdout, stderr
106 return p.retcode, stdout, stderr
107
107
108
108
109 def get_highlighted_code(name, code, type='terminal'):
109 def get_highlighted_code(name, code, type='terminal'):
110 """
110 """
111 If pygments are available on the system
111 If pygments are available on the system
112 then returned output is colored. Otherwise
112 then returned output is colored. Otherwise
113 unchanged content is returned.
113 unchanged content is returned.
114 """
114 """
115 import logging
115 import logging
116 try:
116 try:
117 import pygments
117 import pygments
118 pygments
118 pygments
119 except ImportError:
119 except ImportError:
120 return code
120 return code
121 from pygments import highlight
121 from pygments import highlight
122 from pygments.lexers import guess_lexer_for_filename, ClassNotFound
122 from pygments.lexers import guess_lexer_for_filename, ClassNotFound
123 from pygments.formatters import TerminalFormatter
123 from pygments.formatters import TerminalFormatter
124
124
125 try:
125 try:
126 lexer = guess_lexer_for_filename(name, code)
126 lexer = guess_lexer_for_filename(name, code)
127 formatter = TerminalFormatter()
127 formatter = TerminalFormatter()
128 content = highlight(code, lexer, formatter)
128 content = highlight(code, lexer, formatter)
129 except ClassNotFound:
129 except ClassNotFound:
130 logging.debug("Couldn't guess Lexer, will not use pygments.")
130 logging.debug("Couldn't guess Lexer, will not use pygments.")
131 content = code
131 content = code
132 return content
132 return content
133
133
134
134 def parse_changesets(text):
135 def parse_changesets(text):
135 """
136 """
136 Returns dictionary with *start*, *main* and *end* ids.
137 Returns dictionary with *start*, *main* and *end* ids.
137
138
138 Examples::
139 Examples::
139
140
140 >>> parse_changesets('aaabbb')
141 >>> parse_changesets('aaabbb')
141 {'start': None, 'main': 'aaabbb', 'end': None}
142 {'start': None, 'main': 'aaabbb', 'end': None}
142 >>> parse_changesets('aaabbb..cccddd')
143 >>> parse_changesets('aaabbb..cccddd')
143 {'start': 'aaabbb', 'main': None, 'end': 'cccddd'}
144 {'start': 'aaabbb', 'main': None, 'end': 'cccddd'}
144
145
145 """
146 """
146 text = text.strip()
147 text = text.strip()
147 CID_RE = r'[a-zA-Z0-9]+'
148 CID_RE = r'[a-zA-Z0-9]+'
148 if not '..' in text:
149 if not '..' in text:
149 m = re.match(r'^(?P<cid>%s)$' % CID_RE, text)
150 m = re.match(r'^(?P<cid>%s)$' % CID_RE, text)
150 if m:
151 if m:
151 return {
152 return {
152 'start': None,
153 'start': None,
153 'main': text,
154 'main': text,
154 'end': None,
155 'end': None,
155 }
156 }
156 else:
157 else:
157 RE = r'^(?P<start>%s)?\.{2,3}(?P<end>%s)?$' % (CID_RE, CID_RE)
158 RE = r'^(?P<start>%s)?\.{2,3}(?P<end>%s)?$' % (CID_RE, CID_RE)
158 m = re.match(RE, text)
159 m = re.match(RE, text)
159 if m:
160 if m:
160 result = m.groupdict()
161 result = m.groupdict()
161 result['main'] = None
162 result['main'] = None
162 return result
163 return result
163 raise ValueError("IDs not recognized")
164 raise ValueError("IDs not recognized")
164
165
165 def parse_datetime(text):
166 def parse_datetime(text):
166 """
167 """
167 Parses given text and returns ``datetime.datetime`` instance or raises
168 Parses given text and returns ``datetime.datetime`` instance or raises
168 ``ValueError``.
169 ``ValueError``.
169
170
170 :param text: string of desired date/datetime or something more verbose,
171 :param text: string of desired date/datetime or something more verbose,
171 like *yesterday*, *2weeks 3days*, etc.
172 like *yesterday*, *2weeks 3days*, etc.
172 """
173 """
173
174
174 text = text.strip().lower()
175 text = text.strip().lower()
175
176
176 INPUT_FORMATS = (
177 INPUT_FORMATS = (
177 '%Y-%m-%d %H:%M:%S',
178 '%Y-%m-%d %H:%M:%S',
178 '%Y-%m-%d %H:%M',
179 '%Y-%m-%d %H:%M',
179 '%Y-%m-%d',
180 '%Y-%m-%d',
180 '%m/%d/%Y %H:%M:%S',
181 '%m/%d/%Y %H:%M:%S',
181 '%m/%d/%Y %H:%M',
182 '%m/%d/%Y %H:%M',
182 '%m/%d/%Y',
183 '%m/%d/%Y',
183 '%m/%d/%y %H:%M:%S',
184 '%m/%d/%y %H:%M:%S',
184 '%m/%d/%y %H:%M',
185 '%m/%d/%y %H:%M',
185 '%m/%d/%y',
186 '%m/%d/%y',
186 )
187 )
187 for format in INPUT_FORMATS:
188 for format in INPUT_FORMATS:
188 try:
189 try:
189 return datetime.datetime(*time.strptime(text, format)[:6])
190 return datetime.datetime(*time.strptime(text, format)[:6])
190 except ValueError:
191 except ValueError:
191 pass
192 pass
192
193
193 # Try descriptive texts
194 # Try descriptive texts
194 if text == 'tomorrow':
195 if text == 'tomorrow':
195 future = datetime.datetime.now() + datetime.timedelta(days=1)
196 future = datetime.datetime.now() + datetime.timedelta(days=1)
196 args = future.timetuple()[:3] + (23, 59, 59)
197 args = future.timetuple()[:3] + (23, 59, 59)
197 return datetime.datetime(*args)
198 return datetime.datetime(*args)
198 elif text == 'today':
199 elif text == 'today':
199 return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
200 return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
200 elif text == 'now':
201 elif text == 'now':
201 return datetime.datetime.now()
202 return datetime.datetime.now()
202 elif text == 'yesterday':
203 elif text == 'yesterday':
203 past = datetime.datetime.now() - datetime.timedelta(days=1)
204 past = datetime.datetime.now() - datetime.timedelta(days=1)
204 return datetime.datetime(*past.timetuple()[:3])
205 return datetime.datetime(*past.timetuple()[:3])
205 else:
206 else:
206 days = 0
207 days = 0
207 matched = re.match(
208 matched = re.match(
208 r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
209 r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
209 if matched:
210 if matched:
210 groupdict = matched.groupdict()
211 groupdict = matched.groupdict()
211 if groupdict['days']:
212 if groupdict['days']:
212 days += int(matched.groupdict()['days'])
213 days += int(matched.groupdict()['days'])
213 if groupdict['weeks']:
214 if groupdict['weeks']:
214 days += int(matched.groupdict()['weeks']) * 7
215 days += int(matched.groupdict()['weeks']) * 7
215 past = datetime.datetime.now() - datetime.timedelta(days=days)
216 past = datetime.datetime.now() - datetime.timedelta(days=days)
216 return datetime.datetime(*past.timetuple()[:3])
217 return datetime.datetime(*past.timetuple()[:3])
217
218
218 raise ValueError('Wrong date: "%s"' % text)
219 raise ValueError('Wrong date: "%s"' % text)
219
220
220
221
221 def get_dict_for_attrs(obj, attrs):
222 def get_dict_for_attrs(obj, attrs):
222 """
223 """
223 Returns dictionary for each attribute from given ``obj``.
224 Returns dictionary for each attribute from given ``obj``.
224 """
225 """
225 data = {}
226 data = {}
226 for attr in attrs:
227 for attr in attrs:
227 data[attr] = getattr(obj, attr)
228 data[attr] = getattr(obj, attr)
228 return data
229 return data
229
230
230
231
231 def get_total_seconds(timedelta):
232 def get_total_seconds(timedelta):
232 """
233 """
233 Backported for Python 2.5.
234 Backported for Python 2.5.
234
235
235 See http://docs.python.org/library/datetime.html.
236 See http://docs.python.org/library/datetime.html.
236 """
237 """
237 return ((timedelta.microseconds + (
238 return ((timedelta.microseconds + (
238 timedelta.seconds +
239 timedelta.seconds +
239 timedelta.days * 24 * 60 * 60
240 timedelta.days * 24 * 60 * 60
240 ) * 10**6) / 10**6)
241 ) * 10**6) / 10**6)
@@ -1,635 +1,631 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.model.scm
3 rhodecode.model.scm
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Scm model for RhodeCode
6 Scm model for RhodeCode
7
7
8 :created_on: Apr 9, 2010
8 :created_on: Apr 9, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 from __future__ import with_statement
25 from __future__ import with_statement
26 import os
26 import os
27 import re
27 import re
28 import time
28 import time
29 import traceback
29 import traceback
30 import logging
30 import logging
31 import cStringIO
31 import cStringIO
32 import pkg_resources
32 import pkg_resources
33 from os.path import dirname as dn, join as jn
33 from os.path import dirname as dn, join as jn
34
34
35 from sqlalchemy import func
35 from sqlalchemy import func
36 from pylons.i18n.translation import _
36 from pylons.i18n.translation import _
37
37
38 import rhodecode
38 import rhodecode
39 from rhodecode.lib.vcs import get_backend
39 from rhodecode.lib.vcs import get_backend
40 from rhodecode.lib.vcs.exceptions import RepositoryError
40 from rhodecode.lib.vcs.exceptions import RepositoryError
41 from rhodecode.lib.vcs.utils.lazy import LazyProperty
41 from rhodecode.lib.vcs.utils.lazy import LazyProperty
42 from rhodecode.lib.vcs.nodes import FileNode
42 from rhodecode.lib.vcs.nodes import FileNode
43 from rhodecode.lib.vcs.backends.base import EmptyChangeset
43 from rhodecode.lib.vcs.backends.base import EmptyChangeset
44
44
45 from rhodecode import BACKENDS
45 from rhodecode import BACKENDS
46 from rhodecode.lib import helpers as h
46 from rhodecode.lib import helpers as h
47 from rhodecode.lib.utils2 import safe_str, safe_unicode
47 from rhodecode.lib.utils2 import safe_str, safe_unicode
48 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
48 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
49 from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \
49 from rhodecode.lib.utils import get_filesystem_repos, make_ui, \
50 action_logger, REMOVED_REPO_PAT
50 action_logger, REMOVED_REPO_PAT
51 from rhodecode.model import BaseModel
51 from rhodecode.model import BaseModel
52 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
52 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
53 UserFollowing, UserLog, User, RepoGroup, PullRequest
53 UserFollowing, UserLog, User, RepoGroup, PullRequest
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72
72
73
73
74 class CachedRepoList(object):
74 class CachedRepoList(object):
75 """
75 """
76 Cached repo list, uses in-memory cache after initialization, that is
76 Cached repo list, uses in-memory cache after initialization, that is
77 super fast
77 super fast
78 """
78 """
79
79
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 self.db_repo_list = db_repo_list
81 self.db_repo_list = db_repo_list
82 self.repos_path = repos_path
82 self.repos_path = repos_path
83 self.order_by = order_by
83 self.order_by = order_by
84 self.reversed = (order_by or '').startswith('-')
84 self.reversed = (order_by or '').startswith('-')
85 if not perm_set:
85 if not perm_set:
86 perm_set = ['repository.read', 'repository.write',
86 perm_set = ['repository.read', 'repository.write',
87 'repository.admin']
87 'repository.admin']
88 self.perm_set = perm_set
88 self.perm_set = perm_set
89
89
90 def __len__(self):
90 def __len__(self):
91 return len(self.db_repo_list)
91 return len(self.db_repo_list)
92
92
93 def __repr__(self):
93 def __repr__(self):
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95
95
96 def __iter__(self):
96 def __iter__(self):
97 # pre-propagated cache_map to save executing select statements
97 # pre-propagated cache_map to save executing select statements
98 # for each repo
98 # for each repo
99 cache_map = CacheInvalidation.get_cache_map()
99 cache_map = CacheInvalidation.get_cache_map()
100
100
101 for dbr in self.db_repo_list:
101 for dbr in self.db_repo_list:
102 scmr = dbr.scm_instance_cached(cache_map)
102 scmr = dbr.scm_instance_cached(cache_map)
103 # check permission at this level
103 # check permission at this level
104 if not HasRepoPermissionAny(
104 if not HasRepoPermissionAny(
105 *self.perm_set
105 *self.perm_set
106 )(dbr.repo_name, 'get repo check'):
106 )(dbr.repo_name, 'get repo check'):
107 continue
107 continue
108
108
109 if scmr is None:
109 if scmr is None:
110 log.error(
110 log.error(
111 '%s this repository is present in database but it '
111 '%s this repository is present in database but it '
112 'cannot be created as an scm instance' % dbr.repo_name
112 'cannot be created as an scm instance' % dbr.repo_name
113 )
113 )
114 continue
114 continue
115
115
116 last_change = scmr.last_change
116 last_change = scmr.last_change
117 tip = h.get_changeset_safe(scmr, 'tip')
117 tip = h.get_changeset_safe(scmr, 'tip')
118
118
119 tmp_d = {}
119 tmp_d = {}
120 tmp_d['name'] = dbr.repo_name
120 tmp_d['name'] = dbr.repo_name
121 tmp_d['name_sort'] = tmp_d['name'].lower()
121 tmp_d['name_sort'] = tmp_d['name'].lower()
122 tmp_d['raw_name'] = tmp_d['name'].lower()
122 tmp_d['raw_name'] = tmp_d['name'].lower()
123 tmp_d['description'] = dbr.description
123 tmp_d['description'] = dbr.description
124 tmp_d['description_sort'] = tmp_d['description'].lower()
124 tmp_d['description_sort'] = tmp_d['description'].lower()
125 tmp_d['last_change'] = last_change
125 tmp_d['last_change'] = last_change
126 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
126 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
127 tmp_d['tip'] = tip.raw_id
127 tmp_d['tip'] = tip.raw_id
128 tmp_d['tip_sort'] = tip.revision
128 tmp_d['tip_sort'] = tip.revision
129 tmp_d['rev'] = tip.revision
129 tmp_d['rev'] = tip.revision
130 tmp_d['contact'] = dbr.user.full_contact
130 tmp_d['contact'] = dbr.user.full_contact
131 tmp_d['contact_sort'] = tmp_d['contact']
131 tmp_d['contact_sort'] = tmp_d['contact']
132 tmp_d['owner_sort'] = tmp_d['contact']
132 tmp_d['owner_sort'] = tmp_d['contact']
133 tmp_d['repo_archives'] = list(scmr._get_archives())
133 tmp_d['repo_archives'] = list(scmr._get_archives())
134 tmp_d['last_msg'] = tip.message
134 tmp_d['last_msg'] = tip.message
135 tmp_d['author'] = tip.author
135 tmp_d['author'] = tip.author
136 tmp_d['dbrepo'] = dbr.get_dict()
136 tmp_d['dbrepo'] = dbr.get_dict()
137 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
137 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
138 yield tmp_d
138 yield tmp_d
139
139
140
140
141 class SimpleCachedRepoList(CachedRepoList):
141 class SimpleCachedRepoList(CachedRepoList):
142 """
142 """
143 Lighter version of CachedRepoList without the scm initialisation
143 Lighter version of CachedRepoList without the scm initialisation
144 """
144 """
145
145
146 def __iter__(self):
146 def __iter__(self):
147 for dbr in self.db_repo_list:
147 for dbr in self.db_repo_list:
148 # check permission at this level
148 # check permission at this level
149 if not HasRepoPermissionAny(
149 if not HasRepoPermissionAny(
150 *self.perm_set
150 *self.perm_set
151 )(dbr.repo_name, 'get repo check'):
151 )(dbr.repo_name, 'get repo check'):
152 continue
152 continue
153
153
154 tmp_d = {}
154 tmp_d = {}
155 tmp_d['name'] = dbr.repo_name
155 tmp_d['name'] = dbr.repo_name
156 tmp_d['name_sort'] = tmp_d['name'].lower()
156 tmp_d['name_sort'] = tmp_d['name'].lower()
157 tmp_d['raw_name'] = tmp_d['name'].lower()
157 tmp_d['raw_name'] = tmp_d['name'].lower()
158 tmp_d['description'] = dbr.description
158 tmp_d['description'] = dbr.description
159 tmp_d['description_sort'] = tmp_d['description'].lower()
159 tmp_d['description_sort'] = tmp_d['description'].lower()
160 tmp_d['dbrepo'] = dbr.get_dict()
160 tmp_d['dbrepo'] = dbr.get_dict()
161 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
161 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
162 yield tmp_d
162 yield tmp_d
163
163
164
164
165 class GroupList(object):
165 class GroupList(object):
166
166
167 def __init__(self, db_repo_group_list, perm_set=None):
167 def __init__(self, db_repo_group_list, perm_set=None):
168 """
168 """
169 Creates iterator from given list of group objects, additionally
169 Creates iterator from given list of group objects, additionally
170 checking permission for them from perm_set var
170 checking permission for them from perm_set var
171
171
172 :param db_repo_group_list:
172 :param db_repo_group_list:
173 :param perm_set: list of permissons to check
173 :param perm_set: list of permissons to check
174 """
174 """
175 self.db_repo_group_list = db_repo_group_list
175 self.db_repo_group_list = db_repo_group_list
176 if not perm_set:
176 if not perm_set:
177 perm_set = ['group.read', 'group.write', 'group.admin']
177 perm_set = ['group.read', 'group.write', 'group.admin']
178 self.perm_set = perm_set
178 self.perm_set = perm_set
179
179
180 def __len__(self):
180 def __len__(self):
181 return len(self.db_repo_group_list)
181 return len(self.db_repo_group_list)
182
182
183 def __repr__(self):
183 def __repr__(self):
184 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
184 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
185
185
186 def __iter__(self):
186 def __iter__(self):
187 for dbgr in self.db_repo_group_list:
187 for dbgr in self.db_repo_group_list:
188 # check permission at this level
188 # check permission at this level
189 if not HasReposGroupPermissionAny(
189 if not HasReposGroupPermissionAny(
190 *self.perm_set
190 *self.perm_set
191 )(dbgr.group_name, 'get group repo check'):
191 )(dbgr.group_name, 'get group repo check'):
192 continue
192 continue
193
193
194 yield dbgr
194 yield dbgr
195
195
196
196
197 class ScmModel(BaseModel):
197 class ScmModel(BaseModel):
198 """
198 """
199 Generic Scm Model
199 Generic Scm Model
200 """
200 """
201
201
202 def __get_repo(self, instance):
202 def __get_repo(self, instance):
203 cls = Repository
203 cls = Repository
204 if isinstance(instance, cls):
204 if isinstance(instance, cls):
205 return instance
205 return instance
206 elif isinstance(instance, int) or safe_str(instance).isdigit():
206 elif isinstance(instance, int) or safe_str(instance).isdigit():
207 return cls.get(instance)
207 return cls.get(instance)
208 elif isinstance(instance, basestring):
208 elif isinstance(instance, basestring):
209 return cls.get_by_repo_name(instance)
209 return cls.get_by_repo_name(instance)
210 elif instance:
210 elif instance:
211 raise Exception('given object must be int, basestr or Instance'
211 raise Exception('given object must be int, basestr or Instance'
212 ' of %s got %s' % (type(cls), type(instance)))
212 ' of %s got %s' % (type(cls), type(instance)))
213
213
214 @LazyProperty
214 @LazyProperty
215 def repos_path(self):
215 def repos_path(self):
216 """
216 """
217 Get's the repositories root path from database
217 Get's the repositories root path from database
218 """
218 """
219
219
220 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
220 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
221
221
222 return q.ui_value
222 return q.ui_value
223
223
224 def repo_scan(self, repos_path=None):
224 def repo_scan(self, repos_path=None):
225 """
225 """
226 Listing of repositories in given path. This path should not be a
226 Listing of repositories in given path. This path should not be a
227 repository itself. Return a dictionary of repository objects
227 repository itself. Return a dictionary of repository objects
228
228
229 :param repos_path: path to directory containing repositories
229 :param repos_path: path to directory containing repositories
230 """
230 """
231
231
232 if repos_path is None:
232 if repos_path is None:
233 repos_path = self.repos_path
233 repos_path = self.repos_path
234
234
235 log.info('scanning for repositories in %s' % repos_path)
235 log.info('scanning for repositories in %s' % repos_path)
236
236
237 baseui = make_ui('db')
237 baseui = make_ui('db')
238 repos = {}
238 repos = {}
239
239
240 for name, path in get_filesystem_repos(repos_path, recursive=True):
240 for name, path in get_filesystem_repos(repos_path, recursive=True):
241 # skip removed repos
242 if REMOVED_REPO_PAT.match(name) or path[0] is None:
243 continue
244
245 # name need to be decomposed and put back together using the /
241 # name need to be decomposed and put back together using the /
246 # since this is internal storage separator for rhodecode
242 # since this is internal storage separator for rhodecode
247 name = Repository.normalize_repo_name(name)
243 name = Repository.normalize_repo_name(name)
248
244
249 try:
245 try:
250 if name in repos:
246 if name in repos:
251 raise RepositoryError('Duplicate repository name %s '
247 raise RepositoryError('Duplicate repository name %s '
252 'found in %s' % (name, path))
248 'found in %s' % (name, path))
253 else:
249 else:
254
250
255 klass = get_backend(path[0])
251 klass = get_backend(path[0])
256
252
257 if path[0] == 'hg' and path[0] in BACKENDS.keys():
253 if path[0] == 'hg' and path[0] in BACKENDS.keys():
258 repos[name] = klass(safe_str(path[1]), baseui=baseui)
254 repos[name] = klass(safe_str(path[1]), baseui=baseui)
259
255
260 if path[0] == 'git' and path[0] in BACKENDS.keys():
256 if path[0] == 'git' and path[0] in BACKENDS.keys():
261 repos[name] = klass(path[1])
257 repos[name] = klass(path[1])
262 except OSError:
258 except OSError:
263 continue
259 continue
264
260 log.debug('found %s paths with repositories' % (len(repos)))
265 return repos
261 return repos
266
262
267 def get_repos(self, all_repos=None, sort_key=None, simple=False):
263 def get_repos(self, all_repos=None, sort_key=None, simple=False):
268 """
264 """
269 Get all repos from db and for each repo create it's
265 Get all repos from db and for each repo create it's
270 backend instance and fill that backed with information from database
266 backend instance and fill that backed with information from database
271
267
272 :param all_repos: list of repository names as strings
268 :param all_repos: list of repository names as strings
273 give specific repositories list, good for filtering
269 give specific repositories list, good for filtering
274
270
275 :param sort_key: initial sorting of repos
271 :param sort_key: initial sorting of repos
276 :param simple: use SimpleCachedList - one without the SCM info
272 :param simple: use SimpleCachedList - one without the SCM info
277 """
273 """
278 if all_repos is None:
274 if all_repos is None:
279 all_repos = self.sa.query(Repository)\
275 all_repos = self.sa.query(Repository)\
280 .filter(Repository.group_id == None)\
276 .filter(Repository.group_id == None)\
281 .order_by(func.lower(Repository.repo_name)).all()
277 .order_by(func.lower(Repository.repo_name)).all()
282 if simple:
278 if simple:
283 repo_iter = SimpleCachedRepoList(all_repos,
279 repo_iter = SimpleCachedRepoList(all_repos,
284 repos_path=self.repos_path,
280 repos_path=self.repos_path,
285 order_by=sort_key)
281 order_by=sort_key)
286 else:
282 else:
287 repo_iter = CachedRepoList(all_repos,
283 repo_iter = CachedRepoList(all_repos,
288 repos_path=self.repos_path,
284 repos_path=self.repos_path,
289 order_by=sort_key)
285 order_by=sort_key)
290
286
291 return repo_iter
287 return repo_iter
292
288
293 def get_repos_groups(self, all_groups=None):
289 def get_repos_groups(self, all_groups=None):
294 if all_groups is None:
290 if all_groups is None:
295 all_groups = RepoGroup.query()\
291 all_groups = RepoGroup.query()\
296 .filter(RepoGroup.group_parent_id == None).all()
292 .filter(RepoGroup.group_parent_id == None).all()
297 group_iter = GroupList(all_groups)
293 group_iter = GroupList(all_groups)
298
294
299 return group_iter
295 return group_iter
300
296
301 def mark_for_invalidation(self, repo_name):
297 def mark_for_invalidation(self, repo_name):
302 """
298 """
303 Puts cache invalidation task into db for
299 Puts cache invalidation task into db for
304 further global cache invalidation
300 further global cache invalidation
305
301
306 :param repo_name: this repo that should invalidation take place
302 :param repo_name: this repo that should invalidation take place
307 """
303 """
308 CacheInvalidation.set_invalidate(repo_name=repo_name)
304 CacheInvalidation.set_invalidate(repo_name=repo_name)
309 repo = Repository.get_by_repo_name(repo_name)
305 repo = Repository.get_by_repo_name(repo_name)
310 if repo:
306 if repo:
311 repo.update_changeset_cache()
307 repo.update_changeset_cache()
312
308
313 def toggle_following_repo(self, follow_repo_id, user_id):
309 def toggle_following_repo(self, follow_repo_id, user_id):
314
310
315 f = self.sa.query(UserFollowing)\
311 f = self.sa.query(UserFollowing)\
316 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
312 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
317 .filter(UserFollowing.user_id == user_id).scalar()
313 .filter(UserFollowing.user_id == user_id).scalar()
318
314
319 if f is not None:
315 if f is not None:
320 try:
316 try:
321 self.sa.delete(f)
317 self.sa.delete(f)
322 action_logger(UserTemp(user_id),
318 action_logger(UserTemp(user_id),
323 'stopped_following_repo',
319 'stopped_following_repo',
324 RepoTemp(follow_repo_id))
320 RepoTemp(follow_repo_id))
325 return
321 return
326 except:
322 except:
327 log.error(traceback.format_exc())
323 log.error(traceback.format_exc())
328 raise
324 raise
329
325
330 try:
326 try:
331 f = UserFollowing()
327 f = UserFollowing()
332 f.user_id = user_id
328 f.user_id = user_id
333 f.follows_repo_id = follow_repo_id
329 f.follows_repo_id = follow_repo_id
334 self.sa.add(f)
330 self.sa.add(f)
335
331
336 action_logger(UserTemp(user_id),
332 action_logger(UserTemp(user_id),
337 'started_following_repo',
333 'started_following_repo',
338 RepoTemp(follow_repo_id))
334 RepoTemp(follow_repo_id))
339 except:
335 except:
340 log.error(traceback.format_exc())
336 log.error(traceback.format_exc())
341 raise
337 raise
342
338
343 def toggle_following_user(self, follow_user_id, user_id):
339 def toggle_following_user(self, follow_user_id, user_id):
344 f = self.sa.query(UserFollowing)\
340 f = self.sa.query(UserFollowing)\
345 .filter(UserFollowing.follows_user_id == follow_user_id)\
341 .filter(UserFollowing.follows_user_id == follow_user_id)\
346 .filter(UserFollowing.user_id == user_id).scalar()
342 .filter(UserFollowing.user_id == user_id).scalar()
347
343
348 if f is not None:
344 if f is not None:
349 try:
345 try:
350 self.sa.delete(f)
346 self.sa.delete(f)
351 return
347 return
352 except:
348 except:
353 log.error(traceback.format_exc())
349 log.error(traceback.format_exc())
354 raise
350 raise
355
351
356 try:
352 try:
357 f = UserFollowing()
353 f = UserFollowing()
358 f.user_id = user_id
354 f.user_id = user_id
359 f.follows_user_id = follow_user_id
355 f.follows_user_id = follow_user_id
360 self.sa.add(f)
356 self.sa.add(f)
361 except:
357 except:
362 log.error(traceback.format_exc())
358 log.error(traceback.format_exc())
363 raise
359 raise
364
360
365 def is_following_repo(self, repo_name, user_id, cache=False):
361 def is_following_repo(self, repo_name, user_id, cache=False):
366 r = self.sa.query(Repository)\
362 r = self.sa.query(Repository)\
367 .filter(Repository.repo_name == repo_name).scalar()
363 .filter(Repository.repo_name == repo_name).scalar()
368
364
369 f = self.sa.query(UserFollowing)\
365 f = self.sa.query(UserFollowing)\
370 .filter(UserFollowing.follows_repository == r)\
366 .filter(UserFollowing.follows_repository == r)\
371 .filter(UserFollowing.user_id == user_id).scalar()
367 .filter(UserFollowing.user_id == user_id).scalar()
372
368
373 return f is not None
369 return f is not None
374
370
375 def is_following_user(self, username, user_id, cache=False):
371 def is_following_user(self, username, user_id, cache=False):
376 u = User.get_by_username(username)
372 u = User.get_by_username(username)
377
373
378 f = self.sa.query(UserFollowing)\
374 f = self.sa.query(UserFollowing)\
379 .filter(UserFollowing.follows_user == u)\
375 .filter(UserFollowing.follows_user == u)\
380 .filter(UserFollowing.user_id == user_id).scalar()
376 .filter(UserFollowing.user_id == user_id).scalar()
381
377
382 return f is not None
378 return f is not None
383
379
384 def get_followers(self, repo):
380 def get_followers(self, repo):
385 repo = self._get_repo(repo)
381 repo = self._get_repo(repo)
386
382
387 return self.sa.query(UserFollowing)\
383 return self.sa.query(UserFollowing)\
388 .filter(UserFollowing.follows_repository == repo).count()
384 .filter(UserFollowing.follows_repository == repo).count()
389
385
390 def get_forks(self, repo):
386 def get_forks(self, repo):
391 repo = self._get_repo(repo)
387 repo = self._get_repo(repo)
392 return self.sa.query(Repository)\
388 return self.sa.query(Repository)\
393 .filter(Repository.fork == repo).count()
389 .filter(Repository.fork == repo).count()
394
390
395 def get_pull_requests(self, repo):
391 def get_pull_requests(self, repo):
396 repo = self._get_repo(repo)
392 repo = self._get_repo(repo)
397 return self.sa.query(PullRequest)\
393 return self.sa.query(PullRequest)\
398 .filter(PullRequest.other_repo == repo).count()
394 .filter(PullRequest.other_repo == repo).count()
399
395
400 def mark_as_fork(self, repo, fork, user):
396 def mark_as_fork(self, repo, fork, user):
401 repo = self.__get_repo(repo)
397 repo = self.__get_repo(repo)
402 fork = self.__get_repo(fork)
398 fork = self.__get_repo(fork)
403 if fork and repo.repo_id == fork.repo_id:
399 if fork and repo.repo_id == fork.repo_id:
404 raise Exception("Cannot set repository as fork of itself")
400 raise Exception("Cannot set repository as fork of itself")
405 repo.fork = fork
401 repo.fork = fork
406 self.sa.add(repo)
402 self.sa.add(repo)
407 return repo
403 return repo
408
404
409 def pull_changes(self, repo, username):
405 def pull_changes(self, repo, username):
410 dbrepo = self.__get_repo(repo)
406 dbrepo = self.__get_repo(repo)
411 clone_uri = dbrepo.clone_uri
407 clone_uri = dbrepo.clone_uri
412 if not clone_uri:
408 if not clone_uri:
413 raise Exception("This repository doesn't have a clone uri")
409 raise Exception("This repository doesn't have a clone uri")
414
410
415 repo = dbrepo.scm_instance
411 repo = dbrepo.scm_instance
416 from rhodecode import CONFIG
412 from rhodecode import CONFIG
417 try:
413 try:
418 extras = {
414 extras = {
419 'ip': '',
415 'ip': '',
420 'username': username,
416 'username': username,
421 'action': 'push_remote',
417 'action': 'push_remote',
422 'repository': dbrepo.repo_name,
418 'repository': dbrepo.repo_name,
423 'scm': repo.alias,
419 'scm': repo.alias,
424 'config': CONFIG['__file__'],
420 'config': CONFIG['__file__'],
425 'make_lock': None,
421 'make_lock': None,
426 'locked_by': [None, None]
422 'locked_by': [None, None]
427 }
423 }
428
424
429 Repository.inject_ui(repo, extras=extras)
425 Repository.inject_ui(repo, extras=extras)
430
426
431 if repo.alias == 'git':
427 if repo.alias == 'git':
432 repo.fetch(clone_uri)
428 repo.fetch(clone_uri)
433 else:
429 else:
434 repo.pull(clone_uri)
430 repo.pull(clone_uri)
435 self.mark_for_invalidation(dbrepo.repo_name)
431 self.mark_for_invalidation(dbrepo.repo_name)
436 except:
432 except:
437 log.error(traceback.format_exc())
433 log.error(traceback.format_exc())
438 raise
434 raise
439
435
440 def commit_change(self, repo, repo_name, cs, user, author, message,
436 def commit_change(self, repo, repo_name, cs, user, author, message,
441 content, f_path):
437 content, f_path):
442 """
438 """
443 Commits changes
439 Commits changes
444
440
445 :param repo: SCM instance
441 :param repo: SCM instance
446
442
447 """
443 """
448
444
449 if repo.alias == 'hg':
445 if repo.alias == 'hg':
450 from rhodecode.lib.vcs.backends.hg import \
446 from rhodecode.lib.vcs.backends.hg import \
451 MercurialInMemoryChangeset as IMC
447 MercurialInMemoryChangeset as IMC
452 elif repo.alias == 'git':
448 elif repo.alias == 'git':
453 from rhodecode.lib.vcs.backends.git import \
449 from rhodecode.lib.vcs.backends.git import \
454 GitInMemoryChangeset as IMC
450 GitInMemoryChangeset as IMC
455
451
456 # decoding here will force that we have proper encoded values
452 # decoding here will force that we have proper encoded values
457 # in any other case this will throw exceptions and deny commit
453 # in any other case this will throw exceptions and deny commit
458 content = safe_str(content)
454 content = safe_str(content)
459 path = safe_str(f_path)
455 path = safe_str(f_path)
460 # message and author needs to be unicode
456 # message and author needs to be unicode
461 # proper backend should then translate that into required type
457 # proper backend should then translate that into required type
462 message = safe_unicode(message)
458 message = safe_unicode(message)
463 author = safe_unicode(author)
459 author = safe_unicode(author)
464 m = IMC(repo)
460 m = IMC(repo)
465 m.change(FileNode(path, content))
461 m.change(FileNode(path, content))
466 tip = m.commit(message=message,
462 tip = m.commit(message=message,
467 author=author,
463 author=author,
468 parents=[cs], branch=cs.branch)
464 parents=[cs], branch=cs.branch)
469
465
470 action = 'push_local:%s' % tip.raw_id
466 action = 'push_local:%s' % tip.raw_id
471 action_logger(user, action, repo_name)
467 action_logger(user, action, repo_name)
472 self.mark_for_invalidation(repo_name)
468 self.mark_for_invalidation(repo_name)
473 return tip
469 return tip
474
470
475 def create_node(self, repo, repo_name, cs, user, author, message, content,
471 def create_node(self, repo, repo_name, cs, user, author, message, content,
476 f_path):
472 f_path):
477 if repo.alias == 'hg':
473 if repo.alias == 'hg':
478 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
474 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
479 elif repo.alias == 'git':
475 elif repo.alias == 'git':
480 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
476 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
481 # decoding here will force that we have proper encoded values
477 # decoding here will force that we have proper encoded values
482 # in any other case this will throw exceptions and deny commit
478 # in any other case this will throw exceptions and deny commit
483
479
484 if isinstance(content, (basestring,)):
480 if isinstance(content, (basestring,)):
485 content = safe_str(content)
481 content = safe_str(content)
486 elif isinstance(content, (file, cStringIO.OutputType,)):
482 elif isinstance(content, (file, cStringIO.OutputType,)):
487 content = content.read()
483 content = content.read()
488 else:
484 else:
489 raise Exception('Content is of unrecognized type %s' % (
485 raise Exception('Content is of unrecognized type %s' % (
490 type(content)
486 type(content)
491 ))
487 ))
492
488
493 message = safe_unicode(message)
489 message = safe_unicode(message)
494 author = safe_unicode(author)
490 author = safe_unicode(author)
495 path = safe_str(f_path)
491 path = safe_str(f_path)
496 m = IMC(repo)
492 m = IMC(repo)
497
493
498 if isinstance(cs, EmptyChangeset):
494 if isinstance(cs, EmptyChangeset):
499 # EmptyChangeset means we we're editing empty repository
495 # EmptyChangeset means we we're editing empty repository
500 parents = None
496 parents = None
501 else:
497 else:
502 parents = [cs]
498 parents = [cs]
503
499
504 m.add(FileNode(path, content=content))
500 m.add(FileNode(path, content=content))
505 tip = m.commit(message=message,
501 tip = m.commit(message=message,
506 author=author,
502 author=author,
507 parents=parents, branch=cs.branch)
503 parents=parents, branch=cs.branch)
508
504
509 action = 'push_local:%s' % tip.raw_id
505 action = 'push_local:%s' % tip.raw_id
510 action_logger(user, action, repo_name)
506 action_logger(user, action, repo_name)
511 self.mark_for_invalidation(repo_name)
507 self.mark_for_invalidation(repo_name)
512 return tip
508 return tip
513
509
514 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
510 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
515 """
511 """
516 recursive walk in root dir and return a set of all path in that dir
512 recursive walk in root dir and return a set of all path in that dir
517 based on repository walk function
513 based on repository walk function
518
514
519 :param repo_name: name of repository
515 :param repo_name: name of repository
520 :param revision: revision for which to list nodes
516 :param revision: revision for which to list nodes
521 :param root_path: root path to list
517 :param root_path: root path to list
522 :param flat: return as a list, if False returns a dict with decription
518 :param flat: return as a list, if False returns a dict with decription
523
519
524 """
520 """
525 _files = list()
521 _files = list()
526 _dirs = list()
522 _dirs = list()
527 try:
523 try:
528 _repo = self.__get_repo(repo_name)
524 _repo = self.__get_repo(repo_name)
529 changeset = _repo.scm_instance.get_changeset(revision)
525 changeset = _repo.scm_instance.get_changeset(revision)
530 root_path = root_path.lstrip('/')
526 root_path = root_path.lstrip('/')
531 for topnode, dirs, files in changeset.walk(root_path):
527 for topnode, dirs, files in changeset.walk(root_path):
532 for f in files:
528 for f in files:
533 _files.append(f.path if flat else {"name": f.path,
529 _files.append(f.path if flat else {"name": f.path,
534 "type": "file"})
530 "type": "file"})
535 for d in dirs:
531 for d in dirs:
536 _dirs.append(d.path if flat else {"name": d.path,
532 _dirs.append(d.path if flat else {"name": d.path,
537 "type": "dir"})
533 "type": "dir"})
538 except RepositoryError:
534 except RepositoryError:
539 log.debug(traceback.format_exc())
535 log.debug(traceback.format_exc())
540 raise
536 raise
541
537
542 return _dirs, _files
538 return _dirs, _files
543
539
544 def get_unread_journal(self):
540 def get_unread_journal(self):
545 return self.sa.query(UserLog).count()
541 return self.sa.query(UserLog).count()
546
542
547 def get_repo_landing_revs(self, repo=None):
543 def get_repo_landing_revs(self, repo=None):
548 """
544 """
549 Generates select option with tags branches and bookmarks (for hg only)
545 Generates select option with tags branches and bookmarks (for hg only)
550 grouped by type
546 grouped by type
551
547
552 :param repo:
548 :param repo:
553 :type repo:
549 :type repo:
554 """
550 """
555
551
556 hist_l = []
552 hist_l = []
557 choices = []
553 choices = []
558 repo = self.__get_repo(repo)
554 repo = self.__get_repo(repo)
559 hist_l.append(['tip', _('latest tip')])
555 hist_l.append(['tip', _('latest tip')])
560 choices.append('tip')
556 choices.append('tip')
561 if not repo:
557 if not repo:
562 return choices, hist_l
558 return choices, hist_l
563
559
564 repo = repo.scm_instance
560 repo = repo.scm_instance
565
561
566 branches_group = ([(k, k) for k, v in
562 branches_group = ([(k, k) for k, v in
567 repo.branches.iteritems()], _("Branches"))
563 repo.branches.iteritems()], _("Branches"))
568 hist_l.append(branches_group)
564 hist_l.append(branches_group)
569 choices.extend([x[0] for x in branches_group[0]])
565 choices.extend([x[0] for x in branches_group[0]])
570
566
571 if repo.alias == 'hg':
567 if repo.alias == 'hg':
572 bookmarks_group = ([(k, k) for k, v in
568 bookmarks_group = ([(k, k) for k, v in
573 repo.bookmarks.iteritems()], _("Bookmarks"))
569 repo.bookmarks.iteritems()], _("Bookmarks"))
574 hist_l.append(bookmarks_group)
570 hist_l.append(bookmarks_group)
575 choices.extend([x[0] for x in bookmarks_group[0]])
571 choices.extend([x[0] for x in bookmarks_group[0]])
576
572
577 tags_group = ([(k, k) for k, v in
573 tags_group = ([(k, k) for k, v in
578 repo.tags.iteritems()], _("Tags"))
574 repo.tags.iteritems()], _("Tags"))
579 hist_l.append(tags_group)
575 hist_l.append(tags_group)
580 choices.extend([x[0] for x in tags_group[0]])
576 choices.extend([x[0] for x in tags_group[0]])
581
577
582 return choices, hist_l
578 return choices, hist_l
583
579
584 def install_git_hook(self, repo, force_create=False):
580 def install_git_hook(self, repo, force_create=False):
585 """
581 """
586 Creates a rhodecode hook inside a git repository
582 Creates a rhodecode hook inside a git repository
587
583
588 :param repo: Instance of VCS repo
584 :param repo: Instance of VCS repo
589 :param force_create: Create even if same name hook exists
585 :param force_create: Create even if same name hook exists
590 """
586 """
591
587
592 loc = jn(repo.path, 'hooks')
588 loc = jn(repo.path, 'hooks')
593 if not repo.bare:
589 if not repo.bare:
594 loc = jn(repo.path, '.git', 'hooks')
590 loc = jn(repo.path, '.git', 'hooks')
595 if not os.path.isdir(loc):
591 if not os.path.isdir(loc):
596 os.makedirs(loc)
592 os.makedirs(loc)
597
593
598 tmpl_post = pkg_resources.resource_string(
594 tmpl_post = pkg_resources.resource_string(
599 'rhodecode', jn('config', 'post_receive_tmpl.py')
595 'rhodecode', jn('config', 'post_receive_tmpl.py')
600 )
596 )
601 tmpl_pre = pkg_resources.resource_string(
597 tmpl_pre = pkg_resources.resource_string(
602 'rhodecode', jn('config', 'pre_receive_tmpl.py')
598 'rhodecode', jn('config', 'pre_receive_tmpl.py')
603 )
599 )
604
600
605 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
601 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
606 _hook_file = jn(loc, '%s-receive' % h_type)
602 _hook_file = jn(loc, '%s-receive' % h_type)
607 _rhodecode_hook = False
603 _rhodecode_hook = False
608 log.debug('Installing git hook in repo %s' % repo)
604 log.debug('Installing git hook in repo %s' % repo)
609 if os.path.exists(_hook_file):
605 if os.path.exists(_hook_file):
610 # let's take a look at this hook, maybe it's rhodecode ?
606 # let's take a look at this hook, maybe it's rhodecode ?
611 log.debug('hook exists, checking if it is from rhodecode')
607 log.debug('hook exists, checking if it is from rhodecode')
612 _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
608 _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
613 with open(_hook_file, 'rb') as f:
609 with open(_hook_file, 'rb') as f:
614 data = f.read()
610 data = f.read()
615 matches = re.compile(r'(?:%s)\s*=\s*(.*)'
611 matches = re.compile(r'(?:%s)\s*=\s*(.*)'
616 % 'RC_HOOK_VER').search(data)
612 % 'RC_HOOK_VER').search(data)
617 if matches:
613 if matches:
618 try:
614 try:
619 ver = matches.groups()[0]
615 ver = matches.groups()[0]
620 log.debug('got %s it is rhodecode' % (ver))
616 log.debug('got %s it is rhodecode' % (ver))
621 _rhodecode_hook = True
617 _rhodecode_hook = True
622 except:
618 except:
623 log.error(traceback.format_exc())
619 log.error(traceback.format_exc())
624 else:
620 else:
625 # there is no hook in this dir, so we want to create one
621 # there is no hook in this dir, so we want to create one
626 _rhodecode_hook = True
622 _rhodecode_hook = True
627
623
628 if _rhodecode_hook or force_create:
624 if _rhodecode_hook or force_create:
629 log.debug('writing %s hook file !' % h_type)
625 log.debug('writing %s hook file !' % h_type)
630 with open(_hook_file, 'wb') as f:
626 with open(_hook_file, 'wb') as f:
631 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
627 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
632 f.write(tmpl)
628 f.write(tmpl)
633 os.chmod(_hook_file, 0755)
629 os.chmod(_hook_file, 0755)
634 else:
630 else:
635 log.debug('skipping writing hook file')
631 log.debug('skipping writing hook file')
General Comments 0
You need to be logged in to leave comments. Login now