##// END OF EJS Templates
fixed python2.5 compat and repo mapper issue
marcink -
r2620:cd207411 beta
parent child Browse files
Show More
@@ -1,697 +1,697 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 from os.path import abspath
35 from os.path import abspath
36 from os.path import dirname as dn, join as jn
36 from os.path import dirname as dn, join as jn
37
37
38 from paste.script.command import Command, BadCommand
38 from paste.script.command import Command, BadCommand
39
39
40 from mercurial import ui, config
40 from mercurial import ui, config
41
41
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
43
43
44 from rhodecode.lib.vcs import get_backend
44 from rhodecode.lib.vcs import get_backend
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
45 from rhodecode.lib.vcs.backends.base import BaseChangeset
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
46 from rhodecode.lib.vcs.utils.lazy import LazyProperty
47 from rhodecode.lib.vcs.utils.helpers import get_scm
47 from rhodecode.lib.vcs.utils.helpers import get_scm
48 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.exceptions import VCSError
49
49
50 from rhodecode.lib.caching_query import FromCache
50 from rhodecode.lib.caching_query import FromCache
51
51
52 from rhodecode.model import meta
52 from rhodecode.model import meta
53 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
53 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
54 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
54 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.repos_group import ReposGroupModel
56 from rhodecode.model.repos_group import ReposGroupModel
57 from rhodecode.lib.utils2 import safe_str, safe_unicode
57 from rhodecode.lib.utils2 import safe_str, safe_unicode
58 from rhodecode.lib.vcs.utils.fakemod import create_module
58 from rhodecode.lib.vcs.utils.fakemod import create_module
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
63
63
64
64
65 def recursive_replace(str_, replace=' '):
65 def recursive_replace(str_, replace=' '):
66 """
66 """
67 Recursive replace of given sign to just one instance
67 Recursive replace of given sign to just one instance
68
68
69 :param str_: given string
69 :param str_: given string
70 :param replace: char to find and replace multiple instances
70 :param replace: char to find and replace multiple instances
71
71
72 Examples::
72 Examples::
73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
73 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
74 'Mighty-Mighty-Bo-sstones'
74 'Mighty-Mighty-Bo-sstones'
75 """
75 """
76
76
77 if str_.find(replace * 2) == -1:
77 if str_.find(replace * 2) == -1:
78 return str_
78 return str_
79 else:
79 else:
80 str_ = str_.replace(replace * 2, replace)
80 str_ = str_.replace(replace * 2, replace)
81 return recursive_replace(str_, replace)
81 return recursive_replace(str_, replace)
82
82
83
83
84 def repo_name_slug(value):
84 def repo_name_slug(value):
85 """
85 """
86 Return slug of name of repository
86 Return slug of name of repository
87 This function is called on each creation/modification
87 This function is called on each creation/modification
88 of repository to prevent bad names in repo
88 of repository to prevent bad names in repo
89 """
89 """
90
90
91 slug = remove_formatting(value)
91 slug = remove_formatting(value)
92 slug = strip_tags(slug)
92 slug = strip_tags(slug)
93
93
94 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
94 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
95 slug = slug.replace(c, '-')
95 slug = slug.replace(c, '-')
96 slug = recursive_replace(slug, '-')
96 slug = recursive_replace(slug, '-')
97 slug = collapse(slug, '-')
97 slug = collapse(slug, '-')
98 return slug
98 return slug
99
99
100
100
101 def get_repo_slug(request):
101 def get_repo_slug(request):
102 _repo = request.environ['pylons.routes_dict'].get('repo_name')
102 _repo = request.environ['pylons.routes_dict'].get('repo_name')
103 if _repo:
103 if _repo:
104 _repo = _repo.rstrip('/')
104 _repo = _repo.rstrip('/')
105 return _repo
105 return _repo
106
106
107
107
108 def get_repos_group_slug(request):
108 def get_repos_group_slug(request):
109 _group = request.environ['pylons.routes_dict'].get('group_name')
109 _group = request.environ['pylons.routes_dict'].get('group_name')
110 if _group:
110 if _group:
111 _group = _group.rstrip('/')
111 _group = _group.rstrip('/')
112 return _group
112 return _group
113
113
114
114
115 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
115 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
116 """
116 """
117 Action logger for various actions made by users
117 Action logger for various actions made by users
118
118
119 :param user: user that made this action, can be a unique username string or
119 :param user: user that made this action, can be a unique username string or
120 object containing user_id attribute
120 object containing user_id attribute
121 :param action: action to log, should be on of predefined unique actions for
121 :param action: action to log, should be on of predefined unique actions for
122 easy translations
122 easy translations
123 :param repo: string name of repository or object containing repo_id,
123 :param repo: string name of repository or object containing repo_id,
124 that action was made on
124 that action was made on
125 :param ipaddr: optional ip address from what the action was made
125 :param ipaddr: optional ip address from what the action was made
126 :param sa: optional sqlalchemy session
126 :param sa: optional sqlalchemy session
127
127
128 """
128 """
129
129
130 if not sa:
130 if not sa:
131 sa = meta.Session()
131 sa = meta.Session()
132
132
133 try:
133 try:
134 if hasattr(user, 'user_id'):
134 if hasattr(user, 'user_id'):
135 user_obj = user
135 user_obj = user
136 elif isinstance(user, basestring):
136 elif isinstance(user, basestring):
137 user_obj = User.get_by_username(user)
137 user_obj = User.get_by_username(user)
138 else:
138 else:
139 raise Exception('You have to provide user object or username')
139 raise Exception('You have to provide user object or username')
140
140
141 if hasattr(repo, 'repo_id'):
141 if hasattr(repo, 'repo_id'):
142 repo_obj = Repository.get(repo.repo_id)
142 repo_obj = Repository.get(repo.repo_id)
143 repo_name = repo_obj.repo_name
143 repo_name = repo_obj.repo_name
144 elif isinstance(repo, basestring):
144 elif isinstance(repo, basestring):
145 repo_name = repo.lstrip('/')
145 repo_name = repo.lstrip('/')
146 repo_obj = Repository.get_by_repo_name(repo_name)
146 repo_obj = Repository.get_by_repo_name(repo_name)
147 else:
147 else:
148 repo_obj = None
148 repo_obj = None
149 repo_name = ''
149 repo_name = ''
150
150
151 user_log = UserLog()
151 user_log = UserLog()
152 user_log.user_id = user_obj.user_id
152 user_log.user_id = user_obj.user_id
153 user_log.action = safe_unicode(action)
153 user_log.action = safe_unicode(action)
154
154
155 user_log.repository = repo_obj
155 user_log.repository = repo_obj
156 user_log.repository_name = repo_name
156 user_log.repository_name = repo_name
157
157
158 user_log.action_date = datetime.datetime.now()
158 user_log.action_date = datetime.datetime.now()
159 user_log.user_ip = ipaddr
159 user_log.user_ip = ipaddr
160 sa.add(user_log)
160 sa.add(user_log)
161
161
162 log.info(
162 log.info(
163 'Adding user %s, action %s on %s' % (user_obj, action,
163 'Adding user %s, action %s on %s' % (user_obj, action,
164 safe_unicode(repo))
164 safe_unicode(repo))
165 )
165 )
166 if commit:
166 if commit:
167 sa.commit()
167 sa.commit()
168 except:
168 except:
169 log.error(traceback.format_exc())
169 log.error(traceback.format_exc())
170 raise
170 raise
171
171
172
172
173 def get_repos(path, recursive=False):
173 def get_repos(path, recursive=False):
174 """
174 """
175 Scans given path for repos and return (name,(type,path)) tuple
175 Scans given path for repos and return (name,(type,path)) tuple
176
176
177 :param path: path to scan for repositories
177 :param path: path to scan for repositories
178 :param recursive: recursive search and return names with subdirs in front
178 :param recursive: recursive search and return names with subdirs in front
179 """
179 """
180
180
181 # remove ending slash for better results
181 # remove ending slash for better results
182 path = path.rstrip(os.sep)
182 path = path.rstrip(os.sep)
183
183
184 def _get_repos(p):
184 def _get_repos(p):
185 if not os.access(p, os.W_OK):
185 if not os.access(p, os.W_OK):
186 return
186 return
187 for dirpath in os.listdir(p):
187 for dirpath in os.listdir(p):
188 if os.path.isfile(os.path.join(p, dirpath)):
188 if os.path.isfile(os.path.join(p, dirpath)):
189 continue
189 continue
190 cur_path = os.path.join(p, dirpath)
190 cur_path = os.path.join(p, dirpath)
191 try:
191 try:
192 scm_info = get_scm(cur_path)
192 scm_info = get_scm(cur_path)
193 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
193 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
194 except VCSError:
194 except VCSError:
195 if not recursive:
195 if not recursive:
196 continue
196 continue
197 #check if this dir containts other repos for recursive scan
197 #check if this dir containts other repos for recursive scan
198 rec_path = os.path.join(p, dirpath)
198 rec_path = os.path.join(p, dirpath)
199 if os.path.isdir(rec_path):
199 if os.path.isdir(rec_path):
200 for inner_scm in _get_repos(rec_path):
200 for inner_scm in _get_repos(rec_path):
201 yield inner_scm
201 yield inner_scm
202
202
203 return _get_repos(path)
203 return _get_repos(path)
204
204
205
205
206 def is_valid_repo(repo_name, base_path):
206 def is_valid_repo(repo_name, base_path):
207 """
207 """
208 Returns True if given path is a valid repository False otherwise
208 Returns True if given path is a valid repository False otherwise
209
209
210 :param repo_name:
210 :param repo_name:
211 :param base_path:
211 :param base_path:
212
212
213 :return True: if given path is a valid repository
213 :return True: if given path is a valid repository
214 """
214 """
215 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
215 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
216
216
217 try:
217 try:
218 get_scm(full_path)
218 get_scm(full_path)
219 return True
219 return True
220 except VCSError:
220 except VCSError:
221 return False
221 return False
222
222
223
223
224 def is_valid_repos_group(repos_group_name, base_path):
224 def is_valid_repos_group(repos_group_name, base_path):
225 """
225 """
226 Returns True if given path is a repos group False otherwise
226 Returns True if given path is a repos group False otherwise
227
227
228 :param repo_name:
228 :param repo_name:
229 :param base_path:
229 :param base_path:
230 """
230 """
231 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
231 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
232
232
233 # check if it's not a repo
233 # check if it's not a repo
234 if is_valid_repo(repos_group_name, base_path):
234 if is_valid_repo(repos_group_name, base_path):
235 return False
235 return False
236
236
237 try:
237 try:
238 # we need to check bare git repos at higher level
238 # we need to check bare git repos at higher level
239 # since we might match branches/hooks/info/objects or possible
239 # since we might match branches/hooks/info/objects or possible
240 # other things inside bare git repo
240 # other things inside bare git repo
241 get_scm(os.path.dirname(full_path))
241 get_scm(os.path.dirname(full_path))
242 return False
242 return False
243 except VCSError:
243 except VCSError:
244 pass
244 pass
245
245
246 # check if it's a valid path
246 # check if it's a valid path
247 if os.path.isdir(full_path):
247 if os.path.isdir(full_path):
248 return True
248 return True
249
249
250 return False
250 return False
251
251
252
252
253 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
253 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
254 while True:
254 while True:
255 ok = raw_input(prompt)
255 ok = raw_input(prompt)
256 if ok in ('y', 'ye', 'yes'):
256 if ok in ('y', 'ye', 'yes'):
257 return True
257 return True
258 if ok in ('n', 'no', 'nop', 'nope'):
258 if ok in ('n', 'no', 'nop', 'nope'):
259 return False
259 return False
260 retries = retries - 1
260 retries = retries - 1
261 if retries < 0:
261 if retries < 0:
262 raise IOError
262 raise IOError
263 print complaint
263 print complaint
264
264
265 #propagated from mercurial documentation
265 #propagated from mercurial documentation
266 ui_sections = ['alias', 'auth',
266 ui_sections = ['alias', 'auth',
267 'decode/encode', 'defaults',
267 'decode/encode', 'defaults',
268 'diff', 'email',
268 'diff', 'email',
269 'extensions', 'format',
269 'extensions', 'format',
270 'merge-patterns', 'merge-tools',
270 'merge-patterns', 'merge-tools',
271 'hooks', 'http_proxy',
271 'hooks', 'http_proxy',
272 'smtp', 'patch',
272 'smtp', 'patch',
273 'paths', 'profiling',
273 'paths', 'profiling',
274 'server', 'trusted',
274 'server', 'trusted',
275 'ui', 'web', ]
275 'ui', 'web', ]
276
276
277
277
278 def make_ui(read_from='file', path=None, checkpaths=True):
278 def make_ui(read_from='file', path=None, checkpaths=True):
279 """
279 """
280 A function that will read python rc files or database
280 A function that will read python rc files or database
281 and make an mercurial ui object from read options
281 and make an mercurial ui object from read options
282
282
283 :param path: path to mercurial config file
283 :param path: path to mercurial config file
284 :param checkpaths: check the path
284 :param checkpaths: check the path
285 :param read_from: read from 'file' or 'db'
285 :param read_from: read from 'file' or 'db'
286 """
286 """
287
287
288 baseui = ui.ui()
288 baseui = ui.ui()
289
289
290 # clean the baseui object
290 # clean the baseui object
291 baseui._ocfg = config.config()
291 baseui._ocfg = config.config()
292 baseui._ucfg = config.config()
292 baseui._ucfg = config.config()
293 baseui._tcfg = config.config()
293 baseui._tcfg = config.config()
294
294
295 if read_from == 'file':
295 if read_from == 'file':
296 if not os.path.isfile(path):
296 if not os.path.isfile(path):
297 log.debug('hgrc file is not present at %s skipping...' % path)
297 log.debug('hgrc file is not present at %s skipping...' % path)
298 return False
298 return False
299 log.debug('reading hgrc from %s' % path)
299 log.debug('reading hgrc from %s' % path)
300 cfg = config.config()
300 cfg = config.config()
301 cfg.read(path)
301 cfg.read(path)
302 for section in ui_sections:
302 for section in ui_sections:
303 for k, v in cfg.items(section):
303 for k, v in cfg.items(section):
304 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
304 log.debug('settings ui from file[%s]%s:%s' % (section, k, v))
305 baseui.setconfig(section, k, v)
305 baseui.setconfig(section, k, v)
306
306
307 elif read_from == 'db':
307 elif read_from == 'db':
308 sa = meta.Session()
308 sa = meta.Session()
309 ret = sa.query(RhodeCodeUi)\
309 ret = sa.query(RhodeCodeUi)\
310 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
310 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
311 .all()
311 .all()
312
312
313 hg_ui = ret
313 hg_ui = ret
314 for ui_ in hg_ui:
314 for ui_ in hg_ui:
315 if ui_.ui_active:
315 if ui_.ui_active:
316 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
316 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
317 ui_.ui_key, ui_.ui_value)
317 ui_.ui_key, ui_.ui_value)
318 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
318 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
319
319
320 meta.Session.remove()
320 meta.Session.remove()
321 return baseui
321 return baseui
322
322
323
323
324 def set_rhodecode_config(config):
324 def set_rhodecode_config(config):
325 """
325 """
326 Updates pylons config with new settings from database
326 Updates pylons config with new settings from database
327
327
328 :param config:
328 :param config:
329 """
329 """
330 hgsettings = RhodeCodeSetting.get_app_settings()
330 hgsettings = RhodeCodeSetting.get_app_settings()
331
331
332 for k, v in hgsettings.items():
332 for k, v in hgsettings.items():
333 config[k] = v
333 config[k] = v
334
334
335
335
336 def invalidate_cache(cache_key, *args):
336 def invalidate_cache(cache_key, *args):
337 """
337 """
338 Puts cache invalidation task into db for
338 Puts cache invalidation task into db for
339 further global cache invalidation
339 further global cache invalidation
340 """
340 """
341
341
342 from rhodecode.model.scm import ScmModel
342 from rhodecode.model.scm import ScmModel
343
343
344 if cache_key.startswith('get_repo_cached_'):
344 if cache_key.startswith('get_repo_cached_'):
345 name = cache_key.split('get_repo_cached_')[-1]
345 name = cache_key.split('get_repo_cached_')[-1]
346 ScmModel().mark_for_invalidation(name)
346 ScmModel().mark_for_invalidation(name)
347
347
348
348
349 class EmptyChangeset(BaseChangeset):
349 class EmptyChangeset(BaseChangeset):
350 """
350 """
351 An dummy empty changeset. It's possible to pass hash when creating
351 An dummy empty changeset. It's possible to pass hash when creating
352 an EmptyChangeset
352 an EmptyChangeset
353 """
353 """
354
354
355 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
355 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
356 alias=None):
356 alias=None):
357 self._empty_cs = cs
357 self._empty_cs = cs
358 self.revision = -1
358 self.revision = -1
359 self.message = ''
359 self.message = ''
360 self.author = ''
360 self.author = ''
361 self.date = ''
361 self.date = ''
362 self.repository = repo
362 self.repository = repo
363 self.requested_revision = requested_revision
363 self.requested_revision = requested_revision
364 self.alias = alias
364 self.alias = alias
365
365
366 @LazyProperty
366 @LazyProperty
367 def raw_id(self):
367 def raw_id(self):
368 """
368 """
369 Returns raw string identifying this changeset, useful for web
369 Returns raw string identifying this changeset, useful for web
370 representation.
370 representation.
371 """
371 """
372
372
373 return self._empty_cs
373 return self._empty_cs
374
374
375 @LazyProperty
375 @LazyProperty
376 def branch(self):
376 def branch(self):
377 return get_backend(self.alias).DEFAULT_BRANCH_NAME
377 return get_backend(self.alias).DEFAULT_BRANCH_NAME
378
378
379 @LazyProperty
379 @LazyProperty
380 def short_id(self):
380 def short_id(self):
381 return self.raw_id[:12]
381 return self.raw_id[:12]
382
382
383 def get_file_changeset(self, path):
383 def get_file_changeset(self, path):
384 return self
384 return self
385
385
386 def get_file_content(self, path):
386 def get_file_content(self, path):
387 return u''
387 return u''
388
388
389 def get_file_size(self, path):
389 def get_file_size(self, path):
390 return 0
390 return 0
391
391
392
392
393 def map_groups(path):
393 def map_groups(path):
394 """
394 """
395 Given a full path to a repository, create all nested groups that this
395 Given a full path to a repository, create all nested groups that this
396 repo is inside. This function creates parent-child relationships between
396 repo is inside. This function creates parent-child relationships between
397 groups and creates default perms for all new groups.
397 groups and creates default perms for all new groups.
398
398
399 :param paths: full path to repository
399 :param paths: full path to repository
400 """
400 """
401 sa = meta.Session()
401 sa = meta.Session()
402 groups = path.split(Repository.url_sep())
402 groups = path.split(Repository.url_sep())
403 parent = None
403 parent = None
404 group = None
404 group = None
405
405
406 # last element is repo in nested groups structure
406 # last element is repo in nested groups structure
407 groups = groups[:-1]
407 groups = groups[:-1]
408 rgm = ReposGroupModel(sa)
408 rgm = ReposGroupModel(sa)
409 for lvl, group_name in enumerate(groups):
409 for lvl, group_name in enumerate(groups):
410 group_name = '/'.join(groups[:lvl] + [group_name])
410 group_name = '/'.join(groups[:lvl] + [group_name])
411 group = RepoGroup.get_by_group_name(group_name)
411 group = RepoGroup.get_by_group_name(group_name)
412 desc = '%s group' % group_name
412 desc = '%s group' % group_name
413
413
414 # skip folders that are now removed repos
414 # skip folders that are now removed repos
415 if REMOVED_REPO_PAT.match(group_name):
415 if REMOVED_REPO_PAT.match(group_name):
416 break
416 break
417
417
418 if group is None:
418 if group is None:
419 log.debug('creating group level: %s group_name: %s' % (lvl,
419 log.debug('creating group level: %s group_name: %s' % (lvl,
420 group_name))
420 group_name))
421 group = RepoGroup(group_name, parent)
421 group = RepoGroup(group_name, parent)
422 group.group_description = desc
422 group.group_description = desc
423 sa.add(group)
423 sa.add(group)
424 rgm._create_default_perms(group)
424 rgm._create_default_perms(group)
425 sa.flush()
425 sa.flush()
426 parent = group
426 parent = group
427 return group
427 return group
428
428
429
429
430 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
430 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
431 install_git_hook=False):
431 install_git_hook=False):
432 """
432 """
433 maps all repos given in initial_repo_list, non existing repositories
433 maps all repos given in initial_repo_list, non existing repositories
434 are created, if remove_obsolete is True it also check for db entries
434 are created, if remove_obsolete is True it also check for db entries
435 that are not in initial_repo_list and removes them.
435 that are not in initial_repo_list and removes them.
436
436
437 :param initial_repo_list: list of repositories found by scanning methods
437 :param initial_repo_list: list of repositories found by scanning methods
438 :param remove_obsolete: check for obsolete entries in database
438 :param remove_obsolete: check for obsolete entries in database
439 :param install_git_hook: if this is True, also check and install githook
439 :param install_git_hook: if this is True, also check and install githook
440 for a repo if missing
440 for a repo if missing
441 """
441 """
442 from rhodecode.model.repo import RepoModel
442 from rhodecode.model.repo import RepoModel
443 from rhodecode.model.scm import ScmModel
443 from rhodecode.model.scm import ScmModel
444 sa = meta.Session()
444 sa = meta.Session()
445 rm = RepoModel()
445 rm = RepoModel()
446 user = sa.query(User).filter(User.admin == True).first()
446 user = sa.query(User).filter(User.admin == True).first()
447 if user is None:
447 if user is None:
448 raise Exception('Missing administrative account !')
448 raise Exception('Missing administrative account !')
449 added = []
449 added = []
450
450
451 for name, repo in initial_repo_list.items():
451 for name, repo in initial_repo_list.items():
452 group = map_groups(name)
452 group = map_groups(name)
453 repo = rm.get_by_repo_name(name)
453 db_repo = rm.get_by_repo_name(name)
454 if not repo:
454 if not db_repo:
455 log.info('repository %s not found creating now' % name)
455 log.info('repository %s not found creating now' % name)
456 added.append(name)
456 added.append(name)
457 desc = (repo.description
457 desc = (repo.description
458 if repo.description != 'unknown'
458 if repo.description != 'unknown'
459 else '%s repository' % name)
459 else '%s repository' % name)
460 rm.create_repo(
460 rm.create_repo(
461 repo_name=name,
461 repo_name=name,
462 repo_type=repo.alias,
462 repo_type=repo.alias,
463 description=desc,
463 description=desc,
464 repos_group=getattr(group, 'group_id', None),
464 repos_group=getattr(group, 'group_id', None),
465 owner=user,
465 owner=user,
466 just_db=True
466 just_db=True
467 )
467 )
468 elif install_git_hook:
468 elif install_git_hook:
469 if repo.repo_type == 'git':
469 if db_repo.repo_type == 'git':
470 ScmModel().install_git_hook(repo.scm_instance)
470 ScmModel().install_git_hook(db_repo.scm_instance)
471 sa.commit()
471 sa.commit()
472 removed = []
472 removed = []
473 if remove_obsolete:
473 if remove_obsolete:
474 # remove from database those repositories that are not in the filesystem
474 # remove from database those repositories that are not in the filesystem
475 for repo in sa.query(Repository).all():
475 for repo in sa.query(Repository).all():
476 if repo.repo_name not in initial_repo_list.keys():
476 if repo.repo_name not in initial_repo_list.keys():
477 log.debug("Removing non existing repository found in db %s" %
477 log.debug("Removing non existing repository found in db %s" %
478 repo.repo_name)
478 repo.repo_name)
479 try:
479 try:
480 sa.delete(repo)
480 sa.delete(repo)
481 sa.commit()
481 sa.commit()
482 removed.append(repo.repo_name)
482 removed.append(repo.repo_name)
483 except:
483 except:
484 #don't hold further removals on error
484 #don't hold further removals on error
485 log.error(traceback.format_exc())
485 log.error(traceback.format_exc())
486
486
487 # clear cache keys
487 # clear cache keys
488 log.debug("Clearing cache keys now...")
488 log.debug("Clearing cache keys now...")
489 CacheInvalidation.clear_cache()
489 CacheInvalidation.clear_cache()
490 sa.commit()
490 sa.commit()
491 return added, removed
491 return added, removed
492
492
493
493
494 # set cache regions for beaker so celery can utilise it
494 # set cache regions for beaker so celery can utilise it
495 def add_cache(settings):
495 def add_cache(settings):
496 cache_settings = {'regions': None}
496 cache_settings = {'regions': None}
497 for key in settings.keys():
497 for key in settings.keys():
498 for prefix in ['beaker.cache.', 'cache.']:
498 for prefix in ['beaker.cache.', 'cache.']:
499 if key.startswith(prefix):
499 if key.startswith(prefix):
500 name = key.split(prefix)[1].strip()
500 name = key.split(prefix)[1].strip()
501 cache_settings[name] = settings[key].strip()
501 cache_settings[name] = settings[key].strip()
502 if cache_settings['regions']:
502 if cache_settings['regions']:
503 for region in cache_settings['regions'].split(','):
503 for region in cache_settings['regions'].split(','):
504 region = region.strip()
504 region = region.strip()
505 region_settings = {}
505 region_settings = {}
506 for key, value in cache_settings.items():
506 for key, value in cache_settings.items():
507 if key.startswith(region):
507 if key.startswith(region):
508 region_settings[key.split('.')[1]] = value
508 region_settings[key.split('.')[1]] = value
509 region_settings['expire'] = int(region_settings.get('expire',
509 region_settings['expire'] = int(region_settings.get('expire',
510 60))
510 60))
511 region_settings.setdefault('lock_dir',
511 region_settings.setdefault('lock_dir',
512 cache_settings.get('lock_dir'))
512 cache_settings.get('lock_dir'))
513 region_settings.setdefault('data_dir',
513 region_settings.setdefault('data_dir',
514 cache_settings.get('data_dir'))
514 cache_settings.get('data_dir'))
515
515
516 if 'type' not in region_settings:
516 if 'type' not in region_settings:
517 region_settings['type'] = cache_settings.get('type',
517 region_settings['type'] = cache_settings.get('type',
518 'memory')
518 'memory')
519 beaker.cache.cache_regions[region] = region_settings
519 beaker.cache.cache_regions[region] = region_settings
520
520
521
521
522 def load_rcextensions(root_path):
522 def load_rcextensions(root_path):
523 import rhodecode
523 import rhodecode
524 from rhodecode.config import conf
524 from rhodecode.config import conf
525
525
526 path = os.path.join(root_path, 'rcextensions', '__init__.py')
526 path = os.path.join(root_path, 'rcextensions', '__init__.py')
527 if os.path.isfile(path):
527 if os.path.isfile(path):
528 rcext = create_module('rc', path)
528 rcext = create_module('rc', path)
529 EXT = rhodecode.EXTENSIONS = rcext
529 EXT = rhodecode.EXTENSIONS = rcext
530 log.debug('Found rcextensions now loading %s...' % rcext)
530 log.debug('Found rcextensions now loading %s...' % rcext)
531
531
532 # Additional mappings that are not present in the pygments lexers
532 # Additional mappings that are not present in the pygments lexers
533 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
533 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
534
534
535 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
535 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
536
536
537 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
537 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
538 log.debug('settings custom INDEX_EXTENSIONS')
538 log.debug('settings custom INDEX_EXTENSIONS')
539 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
539 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
540
540
541 #ADDITIONAL MAPPINGS
541 #ADDITIONAL MAPPINGS
542 log.debug('adding extra into INDEX_EXTENSIONS')
542 log.debug('adding extra into INDEX_EXTENSIONS')
543 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
543 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
544
544
545
545
546 #==============================================================================
546 #==============================================================================
547 # TEST FUNCTIONS AND CREATORS
547 # TEST FUNCTIONS AND CREATORS
548 #==============================================================================
548 #==============================================================================
549 def create_test_index(repo_location, config, full_index):
549 def create_test_index(repo_location, config, full_index):
550 """
550 """
551 Makes default test index
551 Makes default test index
552
552
553 :param config: test config
553 :param config: test config
554 :param full_index:
554 :param full_index:
555 """
555 """
556
556
557 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
557 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
558 from rhodecode.lib.pidlock import DaemonLock, LockHeld
558 from rhodecode.lib.pidlock import DaemonLock, LockHeld
559
559
560 repo_location = repo_location
560 repo_location = repo_location
561
561
562 index_location = os.path.join(config['app_conf']['index_dir'])
562 index_location = os.path.join(config['app_conf']['index_dir'])
563 if not os.path.exists(index_location):
563 if not os.path.exists(index_location):
564 os.makedirs(index_location)
564 os.makedirs(index_location)
565
565
566 try:
566 try:
567 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
567 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
568 WhooshIndexingDaemon(index_location=index_location,
568 WhooshIndexingDaemon(index_location=index_location,
569 repo_location=repo_location)\
569 repo_location=repo_location)\
570 .run(full_index=full_index)
570 .run(full_index=full_index)
571 l.release()
571 l.release()
572 except LockHeld:
572 except LockHeld:
573 pass
573 pass
574
574
575
575
576 def create_test_env(repos_test_path, config):
576 def create_test_env(repos_test_path, config):
577 """
577 """
578 Makes a fresh database and
578 Makes a fresh database and
579 install test repository into tmp dir
579 install test repository into tmp dir
580 """
580 """
581 from rhodecode.lib.db_manage import DbManage
581 from rhodecode.lib.db_manage import DbManage
582 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
582 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
583
583
584 # PART ONE create db
584 # PART ONE create db
585 dbconf = config['sqlalchemy.db1.url']
585 dbconf = config['sqlalchemy.db1.url']
586 log.debug('making test db %s' % dbconf)
586 log.debug('making test db %s' % dbconf)
587
587
588 # create test dir if it doesn't exist
588 # create test dir if it doesn't exist
589 if not os.path.isdir(repos_test_path):
589 if not os.path.isdir(repos_test_path):
590 log.debug('Creating testdir %s' % repos_test_path)
590 log.debug('Creating testdir %s' % repos_test_path)
591 os.makedirs(repos_test_path)
591 os.makedirs(repos_test_path)
592
592
593 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
593 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
594 tests=True)
594 tests=True)
595 dbmanage.create_tables(override=True)
595 dbmanage.create_tables(override=True)
596 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
596 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
597 dbmanage.create_default_user()
597 dbmanage.create_default_user()
598 dbmanage.admin_prompt()
598 dbmanage.admin_prompt()
599 dbmanage.create_permissions()
599 dbmanage.create_permissions()
600 dbmanage.populate_default_permissions()
600 dbmanage.populate_default_permissions()
601 Session().commit()
601 Session().commit()
602 # PART TWO make test repo
602 # PART TWO make test repo
603 log.debug('making test vcs repositories')
603 log.debug('making test vcs repositories')
604
604
605 idx_path = config['app_conf']['index_dir']
605 idx_path = config['app_conf']['index_dir']
606 data_path = config['app_conf']['cache_dir']
606 data_path = config['app_conf']['cache_dir']
607
607
608 #clean index and data
608 #clean index and data
609 if idx_path and os.path.exists(idx_path):
609 if idx_path and os.path.exists(idx_path):
610 log.debug('remove %s' % idx_path)
610 log.debug('remove %s' % idx_path)
611 shutil.rmtree(idx_path)
611 shutil.rmtree(idx_path)
612
612
613 if data_path and os.path.exists(data_path):
613 if data_path and os.path.exists(data_path):
614 log.debug('remove %s' % data_path)
614 log.debug('remove %s' % data_path)
615 shutil.rmtree(data_path)
615 shutil.rmtree(data_path)
616
616
617 #CREATE DEFAULT TEST REPOS
617 #CREATE DEFAULT TEST REPOS
618 cur_dir = dn(dn(abspath(__file__)))
618 cur_dir = dn(dn(abspath(__file__)))
619 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
619 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
620 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
620 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
621 tar.close()
621 tar.close()
622
622
623 cur_dir = dn(dn(abspath(__file__)))
623 cur_dir = dn(dn(abspath(__file__)))
624 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
624 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
625 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
625 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
626 tar.close()
626 tar.close()
627
627
628 #LOAD VCS test stuff
628 #LOAD VCS test stuff
629 from rhodecode.tests.vcs import setup_package
629 from rhodecode.tests.vcs import setup_package
630 setup_package()
630 setup_package()
631
631
632
632
633 #==============================================================================
633 #==============================================================================
634 # PASTER COMMANDS
634 # PASTER COMMANDS
635 #==============================================================================
635 #==============================================================================
636 class BasePasterCommand(Command):
636 class BasePasterCommand(Command):
637 """
637 """
638 Abstract Base Class for paster commands.
638 Abstract Base Class for paster commands.
639
639
640 The celery commands are somewhat aggressive about loading
640 The celery commands are somewhat aggressive about loading
641 celery.conf, and since our module sets the `CELERY_LOADER`
641 celery.conf, and since our module sets the `CELERY_LOADER`
642 environment variable to our loader, we have to bootstrap a bit and
642 environment variable to our loader, we have to bootstrap a bit and
643 make sure we've had a chance to load the pylons config off of the
643 make sure we've had a chance to load the pylons config off of the
644 command line, otherwise everything fails.
644 command line, otherwise everything fails.
645 """
645 """
646 min_args = 1
646 min_args = 1
647 min_args_error = "Please provide a paster config file as an argument."
647 min_args_error = "Please provide a paster config file as an argument."
648 takes_config_file = 1
648 takes_config_file = 1
649 requires_config_file = True
649 requires_config_file = True
650
650
651 def notify_msg(self, msg, log=False):
651 def notify_msg(self, msg, log=False):
652 """Make a notification to user, additionally if logger is passed
652 """Make a notification to user, additionally if logger is passed
653 it logs this action using given logger
653 it logs this action using given logger
654
654
655 :param msg: message that will be printed to user
655 :param msg: message that will be printed to user
656 :param log: logging instance, to use to additionally log this message
656 :param log: logging instance, to use to additionally log this message
657
657
658 """
658 """
659 if log and isinstance(log, logging):
659 if log and isinstance(log, logging):
660 log(msg)
660 log(msg)
661
661
662 def run(self, args):
662 def run(self, args):
663 """
663 """
664 Overrides Command.run
664 Overrides Command.run
665
665
666 Checks for a config file argument and loads it.
666 Checks for a config file argument and loads it.
667 """
667 """
668 if len(args) < self.min_args:
668 if len(args) < self.min_args:
669 raise BadCommand(
669 raise BadCommand(
670 self.min_args_error % {'min_args': self.min_args,
670 self.min_args_error % {'min_args': self.min_args,
671 'actual_args': len(args)})
671 'actual_args': len(args)})
672
672
673 # Decrement because we're going to lob off the first argument.
673 # Decrement because we're going to lob off the first argument.
674 # @@ This is hacky
674 # @@ This is hacky
675 self.min_args -= 1
675 self.min_args -= 1
676 self.bootstrap_config(args[0])
676 self.bootstrap_config(args[0])
677 self.update_parser()
677 self.update_parser()
678 return super(BasePasterCommand, self).run(args[1:])
678 return super(BasePasterCommand, self).run(args[1:])
679
679
680 def update_parser(self):
680 def update_parser(self):
681 """
681 """
682 Abstract method. Allows for the class's parser to be updated
682 Abstract method. Allows for the class's parser to be updated
683 before the superclass's `run` method is called. Necessary to
683 before the superclass's `run` method is called. Necessary to
684 allow options/arguments to be passed through to the underlying
684 allow options/arguments to be passed through to the underlying
685 celery command.
685 celery command.
686 """
686 """
687 raise NotImplementedError("Abstract Method.")
687 raise NotImplementedError("Abstract Method.")
688
688
689 def bootstrap_config(self, conf):
689 def bootstrap_config(self, conf):
690 """
690 """
691 Loads the pylons configuration.
691 Loads the pylons configuration.
692 """
692 """
693 from pylons import config as pylonsconfig
693 from pylons import config as pylonsconfig
694
694
695 self.path_to_ini_file = os.path.realpath(conf)
695 self.path_to_ini_file = os.path.realpath(conf)
696 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
696 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
697 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
697 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
@@ -1,597 +1,598 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.model.scm
3 rhodecode.model.scm
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Scm model for RhodeCode
6 Scm model for RhodeCode
7
7
8 :created_on: Apr 9, 2010
8 :created_on: Apr 9, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 from __future__ import with_statement
25 import os
26 import os
26 import re
27 import re
27 import time
28 import time
28 import traceback
29 import traceback
29 import logging
30 import logging
30 import cStringIO
31 import cStringIO
31 import pkg_resources
32 import pkg_resources
32 from os.path import dirname as dn, join as jn
33 from os.path import dirname as dn, join as jn
33
34
34 from sqlalchemy import func
35 from sqlalchemy import func
35 from pylons.i18n.translation import _
36 from pylons.i18n.translation import _
36
37
37 import rhodecode
38 import rhodecode
38 from rhodecode.lib.vcs import get_backend
39 from rhodecode.lib.vcs import get_backend
39 from rhodecode.lib.vcs.exceptions import RepositoryError
40 from rhodecode.lib.vcs.exceptions import RepositoryError
40 from rhodecode.lib.vcs.utils.lazy import LazyProperty
41 from rhodecode.lib.vcs.utils.lazy import LazyProperty
41 from rhodecode.lib.vcs.nodes import FileNode
42 from rhodecode.lib.vcs.nodes import FileNode
42
43
43 from rhodecode import BACKENDS
44 from rhodecode import BACKENDS
44 from rhodecode.lib import helpers as h
45 from rhodecode.lib import helpers as h
45 from rhodecode.lib.utils2 import safe_str, safe_unicode
46 from rhodecode.lib.utils2 import safe_str, safe_unicode
46 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
47 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
47 from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \
48 from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \
48 action_logger, EmptyChangeset, REMOVED_REPO_PAT
49 action_logger, EmptyChangeset, REMOVED_REPO_PAT
49 from rhodecode.model import BaseModel
50 from rhodecode.model import BaseModel
50 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
51 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
51 UserFollowing, UserLog, User, RepoGroup, PullRequest
52 UserFollowing, UserLog, User, RepoGroup, PullRequest
52
53
53 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
54
55
55
56
56 class UserTemp(object):
57 class UserTemp(object):
57 def __init__(self, user_id):
58 def __init__(self, user_id):
58 self.user_id = user_id
59 self.user_id = user_id
59
60
60 def __repr__(self):
61 def __repr__(self):
61 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
62 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
62
63
63
64
64 class RepoTemp(object):
65 class RepoTemp(object):
65 def __init__(self, repo_id):
66 def __init__(self, repo_id):
66 self.repo_id = repo_id
67 self.repo_id = repo_id
67
68
68 def __repr__(self):
69 def __repr__(self):
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
70 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
70
71
71
72
72 class CachedRepoList(object):
73 class CachedRepoList(object):
73 """
74 """
74 Cached repo list, uses in-memory cache after initialization, that is
75 Cached repo list, uses in-memory cache after initialization, that is
75 super fast
76 super fast
76 """
77 """
77
78
78 def __init__(self, db_repo_list, repos_path, order_by=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None):
79 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
80 self.repos_path = repos_path
81 self.repos_path = repos_path
81 self.order_by = order_by
82 self.order_by = order_by
82 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
83
84
84 def __len__(self):
85 def __len__(self):
85 return len(self.db_repo_list)
86 return len(self.db_repo_list)
86
87
87 def __repr__(self):
88 def __repr__(self):
88 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
89 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
89
90
90 def __iter__(self):
91 def __iter__(self):
91 # pre-propagated cache_map to save executing select statements
92 # pre-propagated cache_map to save executing select statements
92 # for each repo
93 # for each repo
93 cache_map = CacheInvalidation.get_cache_map()
94 cache_map = CacheInvalidation.get_cache_map()
94
95
95 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
96 scmr = dbr.scm_instance_cached(cache_map)
97 scmr = dbr.scm_instance_cached(cache_map)
97 # check permission at this level
98 # check permission at this level
98 if not HasRepoPermissionAny(
99 if not HasRepoPermissionAny(
99 'repository.read', 'repository.write', 'repository.admin'
100 'repository.read', 'repository.write', 'repository.admin'
100 )(dbr.repo_name, 'get repo check'):
101 )(dbr.repo_name, 'get repo check'):
101 continue
102 continue
102
103
103 if scmr is None:
104 if scmr is None:
104 log.error(
105 log.error(
105 '%s this repository is present in database but it '
106 '%s this repository is present in database but it '
106 'cannot be created as an scm instance' % dbr.repo_name
107 'cannot be created as an scm instance' % dbr.repo_name
107 )
108 )
108 continue
109 continue
109
110
110 last_change = scmr.last_change
111 last_change = scmr.last_change
111 tip = h.get_changeset_safe(scmr, 'tip')
112 tip = h.get_changeset_safe(scmr, 'tip')
112
113
113 tmp_d = {}
114 tmp_d = {}
114 tmp_d['name'] = dbr.repo_name
115 tmp_d['name'] = dbr.repo_name
115 tmp_d['name_sort'] = tmp_d['name'].lower()
116 tmp_d['name_sort'] = tmp_d['name'].lower()
116 tmp_d['description'] = dbr.description
117 tmp_d['description'] = dbr.description
117 tmp_d['description_sort'] = tmp_d['description'].lower()
118 tmp_d['description_sort'] = tmp_d['description'].lower()
118 tmp_d['last_change'] = last_change
119 tmp_d['last_change'] = last_change
119 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
120 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
120 tmp_d['tip'] = tip.raw_id
121 tmp_d['tip'] = tip.raw_id
121 tmp_d['tip_sort'] = tip.revision
122 tmp_d['tip_sort'] = tip.revision
122 tmp_d['rev'] = tip.revision
123 tmp_d['rev'] = tip.revision
123 tmp_d['contact'] = dbr.user.full_contact
124 tmp_d['contact'] = dbr.user.full_contact
124 tmp_d['contact_sort'] = tmp_d['contact']
125 tmp_d['contact_sort'] = tmp_d['contact']
125 tmp_d['owner_sort'] = tmp_d['contact']
126 tmp_d['owner_sort'] = tmp_d['contact']
126 tmp_d['repo_archives'] = list(scmr._get_archives())
127 tmp_d['repo_archives'] = list(scmr._get_archives())
127 tmp_d['last_msg'] = tip.message
128 tmp_d['last_msg'] = tip.message
128 tmp_d['author'] = tip.author
129 tmp_d['author'] = tip.author
129 tmp_d['dbrepo'] = dbr.get_dict()
130 tmp_d['dbrepo'] = dbr.get_dict()
130 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
131 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
131 yield tmp_d
132 yield tmp_d
132
133
133
134
134 class SimpleCachedRepoList(CachedRepoList):
135 class SimpleCachedRepoList(CachedRepoList):
135 """
136 """
136 Lighter version of CachedRepoList without the scm initialisation
137 Lighter version of CachedRepoList without the scm initialisation
137 """
138 """
138
139
139 def __iter__(self):
140 def __iter__(self):
140 for dbr in self.db_repo_list:
141 for dbr in self.db_repo_list:
141 # check permission at this level
142 # check permission at this level
142 if not HasRepoPermissionAny(
143 if not HasRepoPermissionAny(
143 'repository.read', 'repository.write', 'repository.admin'
144 'repository.read', 'repository.write', 'repository.admin'
144 )(dbr.repo_name, 'get repo check'):
145 )(dbr.repo_name, 'get repo check'):
145 continue
146 continue
146
147
147 tmp_d = {}
148 tmp_d = {}
148 tmp_d['name'] = dbr.repo_name
149 tmp_d['name'] = dbr.repo_name
149 tmp_d['name_sort'] = tmp_d['name'].lower()
150 tmp_d['name_sort'] = tmp_d['name'].lower()
150 tmp_d['description'] = dbr.description
151 tmp_d['description'] = dbr.description
151 tmp_d['description_sort'] = tmp_d['description'].lower()
152 tmp_d['description_sort'] = tmp_d['description'].lower()
152 tmp_d['dbrepo'] = dbr.get_dict()
153 tmp_d['dbrepo'] = dbr.get_dict()
153 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
154 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
154 yield tmp_d
155 yield tmp_d
155
156
156
157
157 class GroupList(object):
158 class GroupList(object):
158
159
159 def __init__(self, db_repo_group_list):
160 def __init__(self, db_repo_group_list):
160 self.db_repo_group_list = db_repo_group_list
161 self.db_repo_group_list = db_repo_group_list
161
162
162 def __len__(self):
163 def __len__(self):
163 return len(self.db_repo_group_list)
164 return len(self.db_repo_group_list)
164
165
165 def __repr__(self):
166 def __repr__(self):
166 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
167 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
167
168
168 def __iter__(self):
169 def __iter__(self):
169 for dbgr in self.db_repo_group_list:
170 for dbgr in self.db_repo_group_list:
170 # check permission at this level
171 # check permission at this level
171 if not HasReposGroupPermissionAny(
172 if not HasReposGroupPermissionAny(
172 'group.read', 'group.write', 'group.admin'
173 'group.read', 'group.write', 'group.admin'
173 )(dbgr.group_name, 'get group repo check'):
174 )(dbgr.group_name, 'get group repo check'):
174 continue
175 continue
175
176
176 yield dbgr
177 yield dbgr
177
178
178
179
179 class ScmModel(BaseModel):
180 class ScmModel(BaseModel):
180 """
181 """
181 Generic Scm Model
182 Generic Scm Model
182 """
183 """
183
184
184 def __get_repo(self, instance):
185 def __get_repo(self, instance):
185 cls = Repository
186 cls = Repository
186 if isinstance(instance, cls):
187 if isinstance(instance, cls):
187 return instance
188 return instance
188 elif isinstance(instance, int) or str(instance).isdigit():
189 elif isinstance(instance, int) or str(instance).isdigit():
189 return cls.get(instance)
190 return cls.get(instance)
190 elif isinstance(instance, basestring):
191 elif isinstance(instance, basestring):
191 return cls.get_by_repo_name(instance)
192 return cls.get_by_repo_name(instance)
192 elif instance:
193 elif instance:
193 raise Exception('given object must be int, basestr or Instance'
194 raise Exception('given object must be int, basestr or Instance'
194 ' of %s got %s' % (type(cls), type(instance)))
195 ' of %s got %s' % (type(cls), type(instance)))
195
196
196 @LazyProperty
197 @LazyProperty
197 def repos_path(self):
198 def repos_path(self):
198 """
199 """
199 Get's the repositories root path from database
200 Get's the repositories root path from database
200 """
201 """
201
202
202 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
203 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
203
204
204 return q.ui_value
205 return q.ui_value
205
206
206 def repo_scan(self, repos_path=None):
207 def repo_scan(self, repos_path=None):
207 """
208 """
208 Listing of repositories in given path. This path should not be a
209 Listing of repositories in given path. This path should not be a
209 repository itself. Return a dictionary of repository objects
210 repository itself. Return a dictionary of repository objects
210
211
211 :param repos_path: path to directory containing repositories
212 :param repos_path: path to directory containing repositories
212 """
213 """
213
214
214 if repos_path is None:
215 if repos_path is None:
215 repos_path = self.repos_path
216 repos_path = self.repos_path
216
217
217 log.info('scanning for repositories in %s' % repos_path)
218 log.info('scanning for repositories in %s' % repos_path)
218
219
219 baseui = make_ui('db')
220 baseui = make_ui('db')
220 repos = {}
221 repos = {}
221
222
222 for name, path in get_filesystem_repos(repos_path, recursive=True):
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
223 # skip removed repos
224 # skip removed repos
224 if REMOVED_REPO_PAT.match(name):
225 if REMOVED_REPO_PAT.match(name):
225 continue
226 continue
226
227
227 # name need to be decomposed and put back together using the /
228 # name need to be decomposed and put back together using the /
228 # since this is internal storage separator for rhodecode
229 # since this is internal storage separator for rhodecode
229 name = Repository.url_sep().join(name.split(os.sep))
230 name = Repository.url_sep().join(name.split(os.sep))
230
231
231 try:
232 try:
232 if name in repos:
233 if name in repos:
233 raise RepositoryError('Duplicate repository name %s '
234 raise RepositoryError('Duplicate repository name %s '
234 'found in %s' % (name, path))
235 'found in %s' % (name, path))
235 else:
236 else:
236
237
237 klass = get_backend(path[0])
238 klass = get_backend(path[0])
238
239
239 if path[0] == 'hg' and path[0] in BACKENDS.keys():
240 if path[0] == 'hg' and path[0] in BACKENDS.keys():
240 repos[name] = klass(safe_str(path[1]), baseui=baseui)
241 repos[name] = klass(safe_str(path[1]), baseui=baseui)
241
242
242 if path[0] == 'git' and path[0] in BACKENDS.keys():
243 if path[0] == 'git' and path[0] in BACKENDS.keys():
243 repos[name] = klass(path[1])
244 repos[name] = klass(path[1])
244 except OSError:
245 except OSError:
245 continue
246 continue
246
247
247 return repos
248 return repos
248
249
249 def get_repos(self, all_repos=None, sort_key=None, simple=False):
250 def get_repos(self, all_repos=None, sort_key=None, simple=False):
250 """
251 """
251 Get all repos from db and for each repo create it's
252 Get all repos from db and for each repo create it's
252 backend instance and fill that backed with information from database
253 backend instance and fill that backed with information from database
253
254
254 :param all_repos: list of repository names as strings
255 :param all_repos: list of repository names as strings
255 give specific repositories list, good for filtering
256 give specific repositories list, good for filtering
256
257
257 :param sort_key: initial sorting of repos
258 :param sort_key: initial sorting of repos
258 :param simple: use SimpleCachedList - one without the SCM info
259 :param simple: use SimpleCachedList - one without the SCM info
259 """
260 """
260 if all_repos is None:
261 if all_repos is None:
261 all_repos = self.sa.query(Repository)\
262 all_repos = self.sa.query(Repository)\
262 .filter(Repository.group_id == None)\
263 .filter(Repository.group_id == None)\
263 .order_by(func.lower(Repository.repo_name)).all()
264 .order_by(func.lower(Repository.repo_name)).all()
264 if simple:
265 if simple:
265 repo_iter = SimpleCachedRepoList(all_repos,
266 repo_iter = SimpleCachedRepoList(all_repos,
266 repos_path=self.repos_path,
267 repos_path=self.repos_path,
267 order_by=sort_key)
268 order_by=sort_key)
268 else:
269 else:
269 repo_iter = CachedRepoList(all_repos,
270 repo_iter = CachedRepoList(all_repos,
270 repos_path=self.repos_path,
271 repos_path=self.repos_path,
271 order_by=sort_key)
272 order_by=sort_key)
272
273
273 return repo_iter
274 return repo_iter
274
275
275 def get_repos_groups(self, all_groups=None):
276 def get_repos_groups(self, all_groups=None):
276 if all_groups is None:
277 if all_groups is None:
277 all_groups = RepoGroup.query()\
278 all_groups = RepoGroup.query()\
278 .filter(RepoGroup.group_parent_id == None).all()
279 .filter(RepoGroup.group_parent_id == None).all()
279 group_iter = GroupList(all_groups)
280 group_iter = GroupList(all_groups)
280
281
281 return group_iter
282 return group_iter
282
283
283 def mark_for_invalidation(self, repo_name):
284 def mark_for_invalidation(self, repo_name):
284 """
285 """
285 Puts cache invalidation task into db for
286 Puts cache invalidation task into db for
286 further global cache invalidation
287 further global cache invalidation
287
288
288 :param repo_name: this repo that should invalidation take place
289 :param repo_name: this repo that should invalidation take place
289 """
290 """
290 CacheInvalidation.set_invalidate(repo_name)
291 CacheInvalidation.set_invalidate(repo_name)
291
292
292 def toggle_following_repo(self, follow_repo_id, user_id):
293 def toggle_following_repo(self, follow_repo_id, user_id):
293
294
294 f = self.sa.query(UserFollowing)\
295 f = self.sa.query(UserFollowing)\
295 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
296 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
296 .filter(UserFollowing.user_id == user_id).scalar()
297 .filter(UserFollowing.user_id == user_id).scalar()
297
298
298 if f is not None:
299 if f is not None:
299 try:
300 try:
300 self.sa.delete(f)
301 self.sa.delete(f)
301 action_logger(UserTemp(user_id),
302 action_logger(UserTemp(user_id),
302 'stopped_following_repo',
303 'stopped_following_repo',
303 RepoTemp(follow_repo_id))
304 RepoTemp(follow_repo_id))
304 return
305 return
305 except:
306 except:
306 log.error(traceback.format_exc())
307 log.error(traceback.format_exc())
307 raise
308 raise
308
309
309 try:
310 try:
310 f = UserFollowing()
311 f = UserFollowing()
311 f.user_id = user_id
312 f.user_id = user_id
312 f.follows_repo_id = follow_repo_id
313 f.follows_repo_id = follow_repo_id
313 self.sa.add(f)
314 self.sa.add(f)
314
315
315 action_logger(UserTemp(user_id),
316 action_logger(UserTemp(user_id),
316 'started_following_repo',
317 'started_following_repo',
317 RepoTemp(follow_repo_id))
318 RepoTemp(follow_repo_id))
318 except:
319 except:
319 log.error(traceback.format_exc())
320 log.error(traceback.format_exc())
320 raise
321 raise
321
322
322 def toggle_following_user(self, follow_user_id, user_id):
323 def toggle_following_user(self, follow_user_id, user_id):
323 f = self.sa.query(UserFollowing)\
324 f = self.sa.query(UserFollowing)\
324 .filter(UserFollowing.follows_user_id == follow_user_id)\
325 .filter(UserFollowing.follows_user_id == follow_user_id)\
325 .filter(UserFollowing.user_id == user_id).scalar()
326 .filter(UserFollowing.user_id == user_id).scalar()
326
327
327 if f is not None:
328 if f is not None:
328 try:
329 try:
329 self.sa.delete(f)
330 self.sa.delete(f)
330 return
331 return
331 except:
332 except:
332 log.error(traceback.format_exc())
333 log.error(traceback.format_exc())
333 raise
334 raise
334
335
335 try:
336 try:
336 f = UserFollowing()
337 f = UserFollowing()
337 f.user_id = user_id
338 f.user_id = user_id
338 f.follows_user_id = follow_user_id
339 f.follows_user_id = follow_user_id
339 self.sa.add(f)
340 self.sa.add(f)
340 except:
341 except:
341 log.error(traceback.format_exc())
342 log.error(traceback.format_exc())
342 raise
343 raise
343
344
344 def is_following_repo(self, repo_name, user_id, cache=False):
345 def is_following_repo(self, repo_name, user_id, cache=False):
345 r = self.sa.query(Repository)\
346 r = self.sa.query(Repository)\
346 .filter(Repository.repo_name == repo_name).scalar()
347 .filter(Repository.repo_name == repo_name).scalar()
347
348
348 f = self.sa.query(UserFollowing)\
349 f = self.sa.query(UserFollowing)\
349 .filter(UserFollowing.follows_repository == r)\
350 .filter(UserFollowing.follows_repository == r)\
350 .filter(UserFollowing.user_id == user_id).scalar()
351 .filter(UserFollowing.user_id == user_id).scalar()
351
352
352 return f is not None
353 return f is not None
353
354
354 def is_following_user(self, username, user_id, cache=False):
355 def is_following_user(self, username, user_id, cache=False):
355 u = User.get_by_username(username)
356 u = User.get_by_username(username)
356
357
357 f = self.sa.query(UserFollowing)\
358 f = self.sa.query(UserFollowing)\
358 .filter(UserFollowing.follows_user == u)\
359 .filter(UserFollowing.follows_user == u)\
359 .filter(UserFollowing.user_id == user_id).scalar()
360 .filter(UserFollowing.user_id == user_id).scalar()
360
361
361 return f is not None
362 return f is not None
362
363
363 def get_followers(self, repo):
364 def get_followers(self, repo):
364 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
365
366
366 return self.sa.query(UserFollowing)\
367 return self.sa.query(UserFollowing)\
367 .filter(UserFollowing.follows_repository == repo).count()
368 .filter(UserFollowing.follows_repository == repo).count()
368
369
369 def get_forks(self, repo):
370 def get_forks(self, repo):
370 repo = self._get_repo(repo)
371 repo = self._get_repo(repo)
371 return self.sa.query(Repository)\
372 return self.sa.query(Repository)\
372 .filter(Repository.fork == repo).count()
373 .filter(Repository.fork == repo).count()
373
374
374 def get_pull_requests(self, repo):
375 def get_pull_requests(self, repo):
375 repo = self._get_repo(repo)
376 repo = self._get_repo(repo)
376 return self.sa.query(PullRequest)\
377 return self.sa.query(PullRequest)\
377 .filter(PullRequest.other_repo == repo).count()
378 .filter(PullRequest.other_repo == repo).count()
378
379
379 def mark_as_fork(self, repo, fork, user):
380 def mark_as_fork(self, repo, fork, user):
380 repo = self.__get_repo(repo)
381 repo = self.__get_repo(repo)
381 fork = self.__get_repo(fork)
382 fork = self.__get_repo(fork)
382 repo.fork = fork
383 repo.fork = fork
383 self.sa.add(repo)
384 self.sa.add(repo)
384 return repo
385 return repo
385
386
386 def pull_changes(self, repo, username):
387 def pull_changes(self, repo, username):
387 dbrepo = self.__get_repo(repo)
388 dbrepo = self.__get_repo(repo)
388 clone_uri = dbrepo.clone_uri
389 clone_uri = dbrepo.clone_uri
389 if not clone_uri:
390 if not clone_uri:
390 raise Exception("This repository doesn't have a clone uri")
391 raise Exception("This repository doesn't have a clone uri")
391
392
392 repo = dbrepo.scm_instance
393 repo = dbrepo.scm_instance
393 try:
394 try:
394 extras = {
395 extras = {
395 'ip': '',
396 'ip': '',
396 'username': username,
397 'username': username,
397 'action': 'push_remote',
398 'action': 'push_remote',
398 'repository': dbrepo.repo_name,
399 'repository': dbrepo.repo_name,
399 'scm': repo.alias,
400 'scm': repo.alias,
400 }
401 }
401 Repository.inject_ui(repo, extras=extras)
402 Repository.inject_ui(repo, extras=extras)
402
403
403 if repo.alias == 'git':
404 if repo.alias == 'git':
404 repo.fetch(clone_uri)
405 repo.fetch(clone_uri)
405 else:
406 else:
406 repo.pull(clone_uri)
407 repo.pull(clone_uri)
407 self.mark_for_invalidation(dbrepo.repo_name)
408 self.mark_for_invalidation(dbrepo.repo_name)
408 except:
409 except:
409 log.error(traceback.format_exc())
410 log.error(traceback.format_exc())
410 raise
411 raise
411
412
412 def commit_change(self, repo, repo_name, cs, user, author, message,
413 def commit_change(self, repo, repo_name, cs, user, author, message,
413 content, f_path):
414 content, f_path):
414
415
415 if repo.alias == 'hg':
416 if repo.alias == 'hg':
416 from rhodecode.lib.vcs.backends.hg import \
417 from rhodecode.lib.vcs.backends.hg import \
417 MercurialInMemoryChangeset as IMC
418 MercurialInMemoryChangeset as IMC
418 elif repo.alias == 'git':
419 elif repo.alias == 'git':
419 from rhodecode.lib.vcs.backends.git import \
420 from rhodecode.lib.vcs.backends.git import \
420 GitInMemoryChangeset as IMC
421 GitInMemoryChangeset as IMC
421
422
422 # decoding here will force that we have proper encoded values
423 # decoding here will force that we have proper encoded values
423 # in any other case this will throw exceptions and deny commit
424 # in any other case this will throw exceptions and deny commit
424 content = safe_str(content)
425 content = safe_str(content)
425 path = safe_str(f_path)
426 path = safe_str(f_path)
426 # message and author needs to be unicode
427 # message and author needs to be unicode
427 # proper backend should then translate that into required type
428 # proper backend should then translate that into required type
428 message = safe_unicode(message)
429 message = safe_unicode(message)
429 author = safe_unicode(author)
430 author = safe_unicode(author)
430 m = IMC(repo)
431 m = IMC(repo)
431 m.change(FileNode(path, content))
432 m.change(FileNode(path, content))
432 tip = m.commit(message=message,
433 tip = m.commit(message=message,
433 author=author,
434 author=author,
434 parents=[cs], branch=cs.branch)
435 parents=[cs], branch=cs.branch)
435
436
436 new_cs = tip.short_id
437 new_cs = tip.short_id
437 action = 'push_local:%s' % new_cs
438 action = 'push_local:%s' % new_cs
438
439
439 action_logger(user, action, repo_name)
440 action_logger(user, action, repo_name)
440
441
441 self.mark_for_invalidation(repo_name)
442 self.mark_for_invalidation(repo_name)
442
443
443 def create_node(self, repo, repo_name, cs, user, author, message, content,
444 def create_node(self, repo, repo_name, cs, user, author, message, content,
444 f_path):
445 f_path):
445 if repo.alias == 'hg':
446 if repo.alias == 'hg':
446 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
447 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
447 elif repo.alias == 'git':
448 elif repo.alias == 'git':
448 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
449 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
449 # decoding here will force that we have proper encoded values
450 # decoding here will force that we have proper encoded values
450 # in any other case this will throw exceptions and deny commit
451 # in any other case this will throw exceptions and deny commit
451
452
452 if isinstance(content, (basestring,)):
453 if isinstance(content, (basestring,)):
453 content = safe_str(content)
454 content = safe_str(content)
454 elif isinstance(content, (file, cStringIO.OutputType,)):
455 elif isinstance(content, (file, cStringIO.OutputType,)):
455 content = content.read()
456 content = content.read()
456 else:
457 else:
457 raise Exception('Content is of unrecognized type %s' % (
458 raise Exception('Content is of unrecognized type %s' % (
458 type(content)
459 type(content)
459 ))
460 ))
460
461
461 message = safe_unicode(message)
462 message = safe_unicode(message)
462 author = safe_unicode(author)
463 author = safe_unicode(author)
463 path = safe_str(f_path)
464 path = safe_str(f_path)
464 m = IMC(repo)
465 m = IMC(repo)
465
466
466 if isinstance(cs, EmptyChangeset):
467 if isinstance(cs, EmptyChangeset):
467 # EmptyChangeset means we we're editing empty repository
468 # EmptyChangeset means we we're editing empty repository
468 parents = None
469 parents = None
469 else:
470 else:
470 parents = [cs]
471 parents = [cs]
471
472
472 m.add(FileNode(path, content=content))
473 m.add(FileNode(path, content=content))
473 tip = m.commit(message=message,
474 tip = m.commit(message=message,
474 author=author,
475 author=author,
475 parents=parents, branch=cs.branch)
476 parents=parents, branch=cs.branch)
476 new_cs = tip.short_id
477 new_cs = tip.short_id
477 action = 'push_local:%s' % new_cs
478 action = 'push_local:%s' % new_cs
478
479
479 action_logger(user, action, repo_name)
480 action_logger(user, action, repo_name)
480
481
481 self.mark_for_invalidation(repo_name)
482 self.mark_for_invalidation(repo_name)
482
483
483 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
484 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
484 """
485 """
485 recursive walk in root dir and return a set of all path in that dir
486 recursive walk in root dir and return a set of all path in that dir
486 based on repository walk function
487 based on repository walk function
487
488
488 :param repo_name: name of repository
489 :param repo_name: name of repository
489 :param revision: revision for which to list nodes
490 :param revision: revision for which to list nodes
490 :param root_path: root path to list
491 :param root_path: root path to list
491 :param flat: return as a list, if False returns a dict with decription
492 :param flat: return as a list, if False returns a dict with decription
492
493
493 """
494 """
494 _files = list()
495 _files = list()
495 _dirs = list()
496 _dirs = list()
496 try:
497 try:
497 _repo = self.__get_repo(repo_name)
498 _repo = self.__get_repo(repo_name)
498 changeset = _repo.scm_instance.get_changeset(revision)
499 changeset = _repo.scm_instance.get_changeset(revision)
499 root_path = root_path.lstrip('/')
500 root_path = root_path.lstrip('/')
500 for topnode, dirs, files in changeset.walk(root_path):
501 for topnode, dirs, files in changeset.walk(root_path):
501 for f in files:
502 for f in files:
502 _files.append(f.path if flat else {"name": f.path,
503 _files.append(f.path if flat else {"name": f.path,
503 "type": "file"})
504 "type": "file"})
504 for d in dirs:
505 for d in dirs:
505 _dirs.append(d.path if flat else {"name": d.path,
506 _dirs.append(d.path if flat else {"name": d.path,
506 "type": "dir"})
507 "type": "dir"})
507 except RepositoryError:
508 except RepositoryError:
508 log.debug(traceback.format_exc())
509 log.debug(traceback.format_exc())
509 raise
510 raise
510
511
511 return _dirs, _files
512 return _dirs, _files
512
513
513 def get_unread_journal(self):
514 def get_unread_journal(self):
514 return self.sa.query(UserLog).count()
515 return self.sa.query(UserLog).count()
515
516
516 def get_repo_landing_revs(self, repo=None):
517 def get_repo_landing_revs(self, repo=None):
517 """
518 """
518 Generates select option with tags branches and bookmarks (for hg only)
519 Generates select option with tags branches and bookmarks (for hg only)
519 grouped by type
520 grouped by type
520
521
521 :param repo:
522 :param repo:
522 :type repo:
523 :type repo:
523 """
524 """
524
525
525 hist_l = []
526 hist_l = []
526 choices = []
527 choices = []
527 repo = self.__get_repo(repo)
528 repo = self.__get_repo(repo)
528 hist_l.append(['tip', _('latest tip')])
529 hist_l.append(['tip', _('latest tip')])
529 choices.append('tip')
530 choices.append('tip')
530 if not repo:
531 if not repo:
531 return choices, hist_l
532 return choices, hist_l
532
533
533 repo = repo.scm_instance
534 repo = repo.scm_instance
534
535
535 branches_group = ([(k, k) for k, v in
536 branches_group = ([(k, k) for k, v in
536 repo.branches.iteritems()], _("Branches"))
537 repo.branches.iteritems()], _("Branches"))
537 hist_l.append(branches_group)
538 hist_l.append(branches_group)
538 choices.extend([x[0] for x in branches_group[0]])
539 choices.extend([x[0] for x in branches_group[0]])
539
540
540 if repo.alias == 'hg':
541 if repo.alias == 'hg':
541 bookmarks_group = ([(k, k) for k, v in
542 bookmarks_group = ([(k, k) for k, v in
542 repo.bookmarks.iteritems()], _("Bookmarks"))
543 repo.bookmarks.iteritems()], _("Bookmarks"))
543 hist_l.append(bookmarks_group)
544 hist_l.append(bookmarks_group)
544 choices.extend([x[0] for x in bookmarks_group[0]])
545 choices.extend([x[0] for x in bookmarks_group[0]])
545
546
546 tags_group = ([(k, k) for k, v in
547 tags_group = ([(k, k) for k, v in
547 repo.tags.iteritems()], _("Tags"))
548 repo.tags.iteritems()], _("Tags"))
548 hist_l.append(tags_group)
549 hist_l.append(tags_group)
549 choices.extend([x[0] for x in tags_group[0]])
550 choices.extend([x[0] for x in tags_group[0]])
550
551
551 return choices, hist_l
552 return choices, hist_l
552
553
553 def install_git_hook(self, repo, force_create=False):
554 def install_git_hook(self, repo, force_create=False):
554 """
555 """
555 Creates a rhodecode hook inside a git repository
556 Creates a rhodecode hook inside a git repository
556
557
557 :param repo: Instance of VCS repo
558 :param repo: Instance of VCS repo
558 :param force_create: Create even if same name hook exists
559 :param force_create: Create even if same name hook exists
559 """
560 """
560
561
561 loc = jn(repo.path, 'hooks')
562 loc = jn(repo.path, 'hooks')
562 if not repo.bare:
563 if not repo.bare:
563 loc = jn(repo.path, '.git', 'hooks')
564 loc = jn(repo.path, '.git', 'hooks')
564 if not os.path.isdir(loc):
565 if not os.path.isdir(loc):
565 os.makedirs(loc)
566 os.makedirs(loc)
566
567
567 tmpl = pkg_resources.resource_string(
568 tmpl = pkg_resources.resource_string(
568 'rhodecode', jn('config', 'post_receive_tmpl.py')
569 'rhodecode', jn('config', 'post_receive_tmpl.py')
569 )
570 )
570
571
571 _hook_file = jn(loc, 'post-receive')
572 _hook_file = jn(loc, 'post-receive')
572 _rhodecode_hook = False
573 _rhodecode_hook = False
573 log.debug('Installing git hook in repo %s' % repo)
574 log.debug('Installing git hook in repo %s' % repo)
574 if os.path.exists(_hook_file):
575 if os.path.exists(_hook_file):
575 # let's take a look at this hook, maybe it's rhodecode ?
576 # let's take a look at this hook, maybe it's rhodecode ?
576 log.debug('hook exists, checking if it is from rhodecode')
577 log.debug('hook exists, checking if it is from rhodecode')
577 _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
578 _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
578 with open(_hook_file, 'rb') as f:
579 with open(_hook_file, 'rb') as f:
579 data = f.read()
580 data = f.read()
580 matches = re.compile(r'(?:%s)\s*=\s*(.*)'
581 matches = re.compile(r'(?:%s)\s*=\s*(.*)'
581 % 'RC_HOOK_VER').search(data)
582 % 'RC_HOOK_VER').search(data)
582 if matches:
583 if matches:
583 try:
584 try:
584 ver = matches.groups()[0]
585 ver = matches.groups()[0]
585 log.debug('got %s it is rhodecode' % (ver))
586 log.debug('got %s it is rhodecode' % (ver))
586 _rhodecode_hook = True
587 _rhodecode_hook = True
587 except:
588 except:
588 log.error(traceback.format_exc())
589 log.error(traceback.format_exc())
589
590
590 if _rhodecode_hook or force_create:
591 if _rhodecode_hook or force_create:
591 log.debug('writing hook file !')
592 log.debug('writing hook file !')
592 with open(_hook_file, 'wb') as f:
593 with open(_hook_file, 'wb') as f:
593 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
594 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
594 f.write(tmpl)
595 f.write(tmpl)
595 os.chmod(_hook_file, 0755)
596 os.chmod(_hook_file, 0755)
596 else:
597 else:
597 log.debug('skipping writing hook file') No newline at end of file
598 log.debug('skipping writing hook file')
General Comments 0
You need to be logged in to leave comments. Login now