##// END OF EJS Templates
Remove wrong/unnecessary/unfixable comment(s)
Bradley M. Kuhn -
r4175:e9f6b533 kallithea-2.2.5-r...
parent child Browse files
Show More
@@ -1,476 +1,468 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 rhodecode.lib.hooks
16 16 ~~~~~~~~~~~~~~~~~~~
17 17
18 18 Hooks runned by rhodecode
19 19
20 20 :created_on: Aug 6, 2010
21 21 :author: marcink
22 22 :copyright: (c) 2013 RhodeCode GmbH.
23 23 :license: GPLv3, see LICENSE for more details.
24 24 """
25 25
26 26 import os
27 27 import sys
28 28 import time
29 29 import binascii
30 30
31 31 from rhodecode.lib.vcs.utils.hgcompat import nullrev, revrange
32 32 from rhodecode.lib import helpers as h
33 33 from rhodecode.lib.utils import action_logger
34 34 from rhodecode.lib.vcs.backends.base import EmptyChangeset
35 35 from rhodecode.lib.exceptions import HTTPLockedRC, UserCreationError
36 36 from rhodecode.lib.utils2 import safe_str, _extract_extras
37 37 from rhodecode.model.db import Repository, User
38 38
39 39
40 40 def _get_scm_size(alias, root_path):
41 41
42 42 if not alias.startswith('.'):
43 43 alias += '.'
44 44
45 45 size_scm, size_root = 0, 0
46 46 for path, dirs, files in os.walk(safe_str(root_path)):
47 47 if path.find(alias) != -1:
48 48 for f in files:
49 49 try:
50 50 size_scm += os.path.getsize(os.path.join(path, f))
51 51 except OSError:
52 52 pass
53 53 else:
54 54 for f in files:
55 55 try:
56 56 size_root += os.path.getsize(os.path.join(path, f))
57 57 except OSError:
58 58 pass
59 59
60 60 size_scm_f = h.format_byte_size(size_scm)
61 61 size_root_f = h.format_byte_size(size_root)
62 62 size_total_f = h.format_byte_size(size_root + size_scm)
63 63
64 64 return size_scm_f, size_root_f, size_total_f
65 65
66 66
67 67 def repo_size(ui, repo, hooktype=None, **kwargs):
68 68 """
69 69 Presents size of repository after push
70 70
71 71 :param ui:
72 72 :param repo:
73 73 :param hooktype:
74 74 """
75 75
76 76 size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root)
77 77
78 78 last_cs = repo[len(repo) - 1]
79 79
80 80 msg = ('Repository size .hg:%s repo:%s total:%s\n'
81 81 'Last revision is now r%s:%s\n') % (
82 82 size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12]
83 83 )
84 84
85 85 sys.stdout.write(msg)
86 86
87 87
88 88 def pre_push(ui, repo, **kwargs):
89 89 # pre push function, currently used to ban pushing when
90 90 # repository is locked
91 91 ex = _extract_extras()
92 92
93 93 usr = User.get_by_username(ex.username)
94 94 if ex.locked_by[0] and usr.user_id != int(ex.locked_by[0]):
95 95 locked_by = User.get(ex.locked_by[0]).username
96 96 # this exception is interpreted in git/hg middlewares and based
97 97 # on that proper return code is server to client
98 98 _http_ret = HTTPLockedRC(ex.repository, locked_by)
99 99 if str(_http_ret.code).startswith('2'):
100 100 #2xx Codes don't raise exceptions
101 101 sys.stdout.write(_http_ret.title)
102 102 else:
103 103 raise _http_ret
104 104
105 105
106 106 def pre_pull(ui, repo, **kwargs):
107 107 # pre push function, currently used to ban pushing when
108 108 # repository is locked
109 109 ex = _extract_extras()
110 110 if ex.locked_by[0]:
111 111 locked_by = User.get(ex.locked_by[0]).username
112 112 # this exception is interpreted in git/hg middlewares and based
113 113 # on that proper return code is server to client
114 114 _http_ret = HTTPLockedRC(ex.repository, locked_by)
115 115 if str(_http_ret.code).startswith('2'):
116 116 #2xx Codes don't raise exceptions
117 117 sys.stdout.write(_http_ret.title)
118 118 else:
119 119 raise _http_ret
120 120
121 121
122 122 def log_pull_action(ui, repo, **kwargs):
123 123 """
124 124 Logs user last pull action
125 125
126 126 :param ui:
127 127 :param repo:
128 128 """
129 129 ex = _extract_extras()
130 130
131 131 user = User.get_by_username(ex.username)
132 132 action = 'pull'
133 133 action_logger(user, action, ex.repository, ex.ip, commit=True)
134 134 # extension hook call
135 135 from rhodecode import EXTENSIONS
136 136 callback = getattr(EXTENSIONS, 'PULL_HOOK', None)
137 137 if callable(callback):
138 138 kw = {}
139 139 kw.update(ex)
140 140 callback(**kw)
141 141
142 142 if ex.make_lock is not None and ex.make_lock:
143 143 Repository.lock(Repository.get_by_repo_name(ex.repository), user.user_id)
144 144 #msg = 'Made lock on repo `%s`' % repository
145 145 #sys.stdout.write(msg)
146 146
147 147 if ex.locked_by[0]:
148 148 locked_by = User.get(ex.locked_by[0]).username
149 149 _http_ret = HTTPLockedRC(ex.repository, locked_by)
150 150 if str(_http_ret.code).startswith('2'):
151 151 #2xx Codes don't raise exceptions
152 152 sys.stdout.write(_http_ret.title)
153 153 return 0
154 154
155 155
156 156 def log_push_action(ui, repo, **kwargs):
157 157 """
158 158 Maps user last push action to new changeset id, from mercurial
159 159
160 160 :param ui:
161 161 :param repo: repo object containing the `ui` object
162 162 """
163 163
164 164 ex = _extract_extras()
165 165
166 166 action_tmpl = ex.action + ':%s'
167 167 revs = []
168 168 if ex.scm == 'hg':
169 169 node = kwargs['node']
170 170
171 171 def get_revs(repo, rev_opt):
172 172 if rev_opt:
173 173 revs = revrange(repo, rev_opt)
174 174
175 175 if len(revs) == 0:
176 176 return (nullrev, nullrev)
177 177 return max(revs), min(revs)
178 178 else:
179 179 return len(repo) - 1, 0
180 180
181 181 stop, start = get_revs(repo, [node + ':'])
182 182 _h = binascii.hexlify
183 183 revs = [_h(repo[r].node()) for r in xrange(start, stop + 1)]
184 184 elif ex.scm == 'git':
185 185 revs = kwargs.get('_git_revs', [])
186 186 if '_git_revs' in kwargs:
187 187 kwargs.pop('_git_revs')
188 188
189 189 action = action_tmpl % ','.join(revs)
190 190 action_logger(ex.username, action, ex.repository, ex.ip, commit=True)
191 191
192 192 # extension hook call
193 193 from rhodecode import EXTENSIONS
194 194 callback = getattr(EXTENSIONS, 'PUSH_HOOK', None)
195 195 if callable(callback):
196 196 kw = {'pushed_revs': revs}
197 197 kw.update(ex)
198 198 callback(**kw)
199 199
200 200 if ex.make_lock is not None and not ex.make_lock:
201 201 Repository.unlock(Repository.get_by_repo_name(ex.repository))
202 202 msg = 'Released lock on repo `%s`\n' % ex.repository
203 203 sys.stdout.write(msg)
204 204
205 205 if ex.locked_by[0]:
206 206 locked_by = User.get(ex.locked_by[0]).username
207 207 _http_ret = HTTPLockedRC(ex.repository, locked_by)
208 208 if str(_http_ret.code).startswith('2'):
209 209 #2xx Codes don't raise exceptions
210 210 sys.stdout.write(_http_ret.title)
211 211
212 212 return 0
213 213
214 214
215 215 def log_create_repository(repository_dict, created_by, **kwargs):
216 216 """
217 Post create repository Hook. This is a dummy function for admins to re-use
218 if needed. It's taken from rhodecode-extensions module and executed
219 if present
217 Post create repository Hook.
220 218
221 219 :param repository: dict dump of repository object
222 220 :param created_by: username who created repository
223 221
224 222 available keys of repository_dict:
225 223
226 224 'repo_type',
227 225 'description',
228 226 'private',
229 227 'created_on',
230 228 'enable_downloads',
231 229 'repo_id',
232 230 'user_id',
233 231 'enable_statistics',
234 232 'clone_uri',
235 233 'fork_id',
236 234 'group_id',
237 235 'repo_name'
238 236
239 237 """
240 238 from rhodecode import EXTENSIONS
241 239 callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None)
242 240 if callable(callback):
243 241 kw = {}
244 242 kw.update(repository_dict)
245 243 kw.update({'created_by': created_by})
246 244 kw.update(kwargs)
247 245 return callback(**kw)
248 246
249 247 return 0
250 248
251 249
252 250 def check_allowed_create_user(user_dict, created_by, **kwargs):
253 251 # pre create hooks
254 252 from rhodecode import EXTENSIONS
255 253 callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None)
256 254 if callable(callback):
257 255 allowed, reason = callback(created_by=created_by, **user_dict)
258 256 if not allowed:
259 257 raise UserCreationError(reason)
260 258
261 259
262 260 def log_create_user(user_dict, created_by, **kwargs):
263 261 """
264 Post create user Hook. This is a dummy function for admins to re-use
265 if needed. It's taken from rhodecode-extensions module and executed
266 if present
262 Post create user Hook.
267 263
268 264 :param user_dict: dict dump of user object
269 265
270 266 available keys for user_dict:
271 267
272 268 'username',
273 269 'full_name_or_username',
274 270 'full_contact',
275 271 'user_id',
276 272 'name',
277 273 'firstname',
278 274 'short_contact',
279 275 'admin',
280 276 'lastname',
281 277 'ip_addresses',
282 278 'ldap_dn',
283 279 'email',
284 280 'api_key',
285 281 'last_login',
286 282 'full_name',
287 283 'active',
288 284 'password',
289 285 'emails',
290 286 'inherit_default_permissions'
291 287
292 288 """
293 289 from rhodecode import EXTENSIONS
294 290 callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None)
295 291 if callable(callback):
296 292 return callback(created_by=created_by, **user_dict)
297 293
298 294 return 0
299 295
300 296
301 297 def log_delete_repository(repository_dict, deleted_by, **kwargs):
302 298 """
303 Post delete repository Hook. This is a dummy function for admins to re-use
304 if needed. It's taken from rhodecode-extensions module and executed
305 if present
299 Post delete repository Hook.
306 300
307 301 :param repository: dict dump of repository object
308 302 :param deleted_by: username who deleted the repository
309 303
310 304 available keys of repository_dict:
311 305
312 306 'repo_type',
313 307 'description',
314 308 'private',
315 309 'created_on',
316 310 'enable_downloads',
317 311 'repo_id',
318 312 'user_id',
319 313 'enable_statistics',
320 314 'clone_uri',
321 315 'fork_id',
322 316 'group_id',
323 317 'repo_name'
324 318
325 319 """
326 320 from rhodecode import EXTENSIONS
327 321 callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None)
328 322 if callable(callback):
329 323 kw = {}
330 324 kw.update(repository_dict)
331 325 kw.update({'deleted_by': deleted_by,
332 326 'deleted_on': time.time()})
333 327 kw.update(kwargs)
334 328 return callback(**kw)
335 329
336 330 return 0
337 331
338 332
339 333 def log_delete_user(user_dict, deleted_by, **kwargs):
340 334 """
341 Post delete user Hook. This is a dummy function for admins to re-use
342 if needed. It's taken from rhodecode-extensions module and executed
343 if present
335 Post delete user Hook.
344 336
345 337 :param user_dict: dict dump of user object
346 338
347 339 available keys for user_dict:
348 340
349 341 'username',
350 342 'full_name_or_username',
351 343 'full_contact',
352 344 'user_id',
353 345 'name',
354 346 'firstname',
355 347 'short_contact',
356 348 'admin',
357 349 'lastname',
358 350 'ip_addresses',
359 351 'ldap_dn',
360 352 'email',
361 353 'api_key',
362 354 'last_login',
363 355 'full_name',
364 356 'active',
365 357 'password',
366 358 'emails',
367 359 'inherit_default_permissions'
368 360
369 361 """
370 362 from rhodecode import EXTENSIONS
371 363 callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None)
372 364 if callable(callback):
373 365 return callback(deleted_by=deleted_by, **user_dict)
374 366
375 367 return 0
376 368
377 369
378 370 handle_git_pre_receive = (lambda repo_path, revs, env:
379 371 handle_git_receive(repo_path, revs, env, hook_type='pre'))
380 372 handle_git_post_receive = (lambda repo_path, revs, env:
381 373 handle_git_receive(repo_path, revs, env, hook_type='post'))
382 374
383 375
384 376 def handle_git_receive(repo_path, revs, env, hook_type='post'):
385 377 """
386 378 A really hacky method that is runned by git post-receive hook and logs
387 379 an push action together with pushed revisions. It's executed by subprocess
388 380 thus needs all info to be able to create a on the fly pylons enviroment,
389 381 connect to database and run the logging code. Hacky as sh*t but works.
390 382
391 383 :param repo_path:
392 384 :param revs:
393 385 :param env:
394 386 """
395 387 from paste.deploy import appconfig
396 388 from sqlalchemy import engine_from_config
397 389 from rhodecode.config.environment import load_environment
398 390 from rhodecode.model import init_model
399 391 from rhodecode.model.db import RhodeCodeUi
400 392 from rhodecode.lib.utils import make_ui
401 393 extras = _extract_extras(env)
402 394
403 395 path, ini_name = os.path.split(extras['config'])
404 396 conf = appconfig('config:%s' % ini_name, relative_to=path)
405 397 load_environment(conf.global_conf, conf.local_conf, test_env=False,
406 398 test_index=False)
407 399
408 400 engine = engine_from_config(conf, 'sqlalchemy.db1.')
409 401 init_model(engine)
410 402
411 403 baseui = make_ui('db')
412 404 # fix if it's not a bare repo
413 405 if repo_path.endswith(os.sep + '.git'):
414 406 repo_path = repo_path[:-5]
415 407
416 408 repo = Repository.get_by_full_path(repo_path)
417 409 if not repo:
418 410 raise OSError('Repository %s not found in database'
419 411 % (safe_str(repo_path)))
420 412
421 413 _hooks = dict(baseui.configitems('hooks')) or {}
422 414
423 415 if hook_type == 'pre':
424 416 repo = repo.scm_instance
425 417 else:
426 418 #post push shouldn't use the cached instance never
427 419 repo = repo.scm_instance_no_cache()
428 420
429 421 if hook_type == 'pre':
430 422 pre_push(baseui, repo)
431 423
432 424 # if push hook is enabled via web interface
433 425 elif hook_type == 'post' and _hooks.get(RhodeCodeUi.HOOK_PUSH):
434 426 rev_data = []
435 427 for l in revs:
436 428 old_rev, new_rev, ref = l.split(' ')
437 429 _ref_data = ref.split('/')
438 430 if _ref_data[1] in ['tags', 'heads']:
439 431 rev_data.append({'old_rev': old_rev,
440 432 'new_rev': new_rev,
441 433 'ref': ref,
442 434 'type': _ref_data[1],
443 435 'name': _ref_data[2].strip()})
444 436
445 437 git_revs = []
446 438
447 439 for push_ref in rev_data:
448 440 _type = push_ref['type']
449 441 if _type == 'heads':
450 442 if push_ref['old_rev'] == EmptyChangeset().raw_id:
451 443 # update the symbolic ref if we push new repo
452 444 if repo.is_empty():
453 445 repo._repo.refs.set_symbolic_ref('HEAD',
454 446 'refs/heads/%s' % push_ref['name'])
455 447
456 448 cmd = "for-each-ref --format='%(refname)' 'refs/heads/*'"
457 449 heads = repo.run_git_command(cmd)[0]
458 450 heads = heads.replace(push_ref['ref'], '')
459 451 heads = ' '.join(map(lambda c: c.strip('\n').strip(),
460 452 heads.splitlines()))
461 453 cmd = (('log %(new_rev)s' % push_ref) +
462 454 ' --reverse --pretty=format:"%H" --not ' + heads)
463 455 git_revs += repo.run_git_command(cmd)[0].splitlines()
464 456
465 457 elif push_ref['new_rev'] == EmptyChangeset().raw_id:
466 458 #delete branch case
467 459 git_revs += ['delete_branch=>%s' % push_ref['name']]
468 460 else:
469 461 cmd = (('log %(old_rev)s..%(new_rev)s' % push_ref) +
470 462 ' --reverse --pretty=format:"%H"')
471 463 git_revs += repo.run_git_command(cmd)[0].splitlines()
472 464
473 465 elif _type == 'tags':
474 466 git_revs += ['tag=>%s' % push_ref['name']]
475 467
476 468 log_push_action(baseui, repo, _git_revs=git_revs)
@@ -1,195 +1,195 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 rhodecode.lib.indexers.__init__
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 Whoosh indexing module for RhodeCode
19 19
20 20 :created_on: Aug 17, 2010
21 21 :author: marcink
22 22 :copyright: (c) 2013 RhodeCode GmbH.
23 23 :license: GPLv3, see LICENSE for more details.
24 24 """
25 25
26 26 import os
27 27 import sys
28 28 import logging
29 29 from os.path import dirname as dn, join as jn
30 30
31 #to get the rhodecode import
31 # Add location of top level folder to sys.path
32 32 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
33 33
34 34 from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
35 35 from whoosh.fields import TEXT, ID, STORED, NUMERIC, BOOLEAN, Schema, FieldType, DATETIME
36 36 from whoosh.formats import Characters
37 37 from whoosh.highlight import highlight as whoosh_highlight, HtmlFormatter, ContextFragmenter
38 38 from rhodecode.lib.utils2 import LazyProperty
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42 # CUSTOM ANALYZER wordsplit + lowercase filter
43 43 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
44 44
45 45 #INDEX SCHEMA DEFINITION
46 46 SCHEMA = Schema(
47 47 fileid=ID(unique=True),
48 48 owner=TEXT(),
49 49 repository=TEXT(stored=True),
50 50 path=TEXT(stored=True),
51 51 content=FieldType(format=Characters(), analyzer=ANALYZER,
52 52 scorable=True, stored=True),
53 53 modtime=STORED(),
54 54 extension=TEXT(stored=True)
55 55 )
56 56
57 57 IDX_NAME = 'HG_INDEX'
58 58 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
59 59 FRAGMENTER = ContextFragmenter(200)
60 60
61 61 CHGSETS_SCHEMA = Schema(
62 62 raw_id=ID(unique=True, stored=True),
63 63 date=NUMERIC(stored=True),
64 64 last=BOOLEAN(),
65 65 owner=TEXT(),
66 66 repository=ID(unique=True, stored=True),
67 67 author=TEXT(stored=True),
68 68 message=FieldType(format=Characters(), analyzer=ANALYZER,
69 69 scorable=True, stored=True),
70 70 parents=TEXT(),
71 71 added=TEXT(),
72 72 removed=TEXT(),
73 73 changed=TEXT(),
74 74 )
75 75
76 76 CHGSET_IDX_NAME = 'CHGSET_INDEX'
77 77
78 78 # used only to generate queries in journal
79 79 JOURNAL_SCHEMA = Schema(
80 80 username=TEXT(),
81 81 date=DATETIME(),
82 82 action=TEXT(),
83 83 repository=TEXT(),
84 84 ip=TEXT(),
85 85 )
86 86
87 87
88 88 class WhooshResultWrapper(object):
89 89 def __init__(self, search_type, searcher, matcher, highlight_items,
90 90 repo_location):
91 91 self.search_type = search_type
92 92 self.searcher = searcher
93 93 self.matcher = matcher
94 94 self.highlight_items = highlight_items
95 95 self.fragment_size = 200
96 96 self.repo_location = repo_location
97 97
98 98 @LazyProperty
99 99 def doc_ids(self):
100 100 docs_id = []
101 101 while self.matcher.is_active():
102 102 docnum = self.matcher.id()
103 103 chunks = [offsets for offsets in self.get_chunks()]
104 104 docs_id.append([docnum, chunks])
105 105 self.matcher.next()
106 106 return docs_id
107 107
108 108 def __str__(self):
109 109 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
110 110
111 111 def __repr__(self):
112 112 return self.__str__()
113 113
114 114 def __len__(self):
115 115 return len(self.doc_ids)
116 116
117 117 def __iter__(self):
118 118 """
119 119 Allows Iteration over results,and lazy generate content
120 120
121 121 *Requires* implementation of ``__getitem__`` method.
122 122 """
123 123 for docid in self.doc_ids:
124 124 yield self.get_full_content(docid)
125 125
126 126 def __getitem__(self, key):
127 127 """
128 128 Slicing of resultWrapper
129 129 """
130 130 i, j = key.start, key.stop
131 131
132 132 slices = []
133 133 for docid in self.doc_ids[i:j]:
134 134 slices.append(self.get_full_content(docid))
135 135 return slices
136 136
137 137 def get_full_content(self, docid):
138 138 res = self.searcher.stored_fields(docid[0])
139 139 log.debug('result: %s' % res)
140 140 if self.search_type == 'content':
141 141 full_repo_path = jn(self.repo_location, res['repository'])
142 142 f_path = res['path'].split(full_repo_path)[-1]
143 143 f_path = f_path.lstrip(os.sep)
144 144 content_short = self.get_short_content(res, docid[1])
145 145 res.update({'content_short': content_short,
146 146 'content_short_hl': self.highlight(content_short),
147 147 'f_path': f_path
148 148 })
149 149 elif self.search_type == 'path':
150 150 full_repo_path = jn(self.repo_location, res['repository'])
151 151 f_path = res['path'].split(full_repo_path)[-1]
152 152 f_path = f_path.lstrip(os.sep)
153 153 res.update({'f_path': f_path})
154 154 elif self.search_type == 'message':
155 155 res.update({'message_hl': self.highlight(res['message'])})
156 156
157 157 log.debug('result: %s' % res)
158 158
159 159 return res
160 160
161 161 def get_short_content(self, res, chunks):
162 162
163 163 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
164 164
165 165 def get_chunks(self):
166 166 """
167 167 Smart function that implements chunking the content
168 168 but not overlap chunks so it doesn't highlight the same
169 169 close occurrences twice.
170 170 """
171 171 memory = [(0, 0)]
172 172 if self.matcher.supports('positions'):
173 173 for span in self.matcher.spans():
174 174 start = span.startchar or 0
175 175 end = span.endchar or 0
176 176 start_offseted = max(0, start - self.fragment_size)
177 177 end_offseted = end + self.fragment_size
178 178
179 179 if start_offseted < memory[-1][1]:
180 180 start_offseted = memory[-1][1]
181 181 memory.append((start_offseted, end_offseted,))
182 182 yield (start_offseted, end_offseted,)
183 183
184 184 def highlight(self, content, top=5):
185 185 if self.search_type not in ['content', 'message']:
186 186 return ''
187 187 hl = whoosh_highlight(
188 188 text=content,
189 189 terms=self.highlight_items,
190 190 analyzer=ANALYZER,
191 191 fragmenter=FRAGMENTER,
192 192 formatter=FORMATTER,
193 193 top=top
194 194 )
195 195 return hl
@@ -1,445 +1,445 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 rhodecode.lib.indexers.daemon
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 A daemon will read from task table and run tasks
19 19
20 20 :created_on: Jan 26, 2010
21 21 :author: marcink
22 22 :copyright: (c) 2013 RhodeCode GmbH.
23 23 :license: GPLv3, see LICENSE for more details.
24 24 """
25 25
26 26 from __future__ import with_statement
27 27
28 28 import os
29 29 import sys
30 30 import logging
31 31 import traceback
32 32
33 33 from shutil import rmtree
34 34 from time import mktime
35 35
36 36 from os.path import dirname as dn
37 37 from os.path import join as jn
38 38
39 #to get the rhodecode import
39 # Add location of top level folder to sys.path
40 40 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
41 41 sys.path.append(project_path)
42 42
43 43 from rhodecode.config.conf import INDEX_EXTENSIONS
44 44 from rhodecode.model.scm import ScmModel
45 45 from rhodecode.model.db import Repository
46 46 from rhodecode.lib.utils2 import safe_unicode, safe_str
47 47 from rhodecode.lib.indexers import SCHEMA, IDX_NAME, CHGSETS_SCHEMA, \
48 48 CHGSET_IDX_NAME
49 49
50 50 from rhodecode.lib.vcs.exceptions import ChangesetError, RepositoryError, \
51 51 NodeDoesNotExistError
52 52
53 53 from whoosh.index import create_in, open_dir, exists_in
54 54 from whoosh.query import *
55 55 from whoosh.qparser import QueryParser
56 56
57 57 log = logging.getLogger('whoosh_indexer')
58 58
59 59
60 60 class WhooshIndexingDaemon(object):
61 61 """
62 62 Daemon for atomic indexing jobs
63 63 """
64 64
65 65 def __init__(self, indexname=IDX_NAME, index_location=None,
66 66 repo_location=None, sa=None, repo_list=None,
67 67 repo_update_list=None):
68 68 self.indexname = indexname
69 69
70 70 self.index_location = index_location
71 71 if not index_location:
72 72 raise Exception('You have to provide index location')
73 73
74 74 self.repo_location = repo_location
75 75 if not repo_location:
76 76 raise Exception('You have to provide repositories location')
77 77
78 78 self.repo_paths = ScmModel(sa).repo_scan(self.repo_location)
79 79
80 80 #filter repo list
81 81 if repo_list:
82 82 #Fix non-ascii repo names to unicode
83 83 repo_list = map(safe_unicode, repo_list)
84 84 self.filtered_repo_paths = {}
85 85 for repo_name, repo in self.repo_paths.items():
86 86 if repo_name in repo_list:
87 87 self.filtered_repo_paths[repo_name] = repo
88 88
89 89 self.repo_paths = self.filtered_repo_paths
90 90
91 91 #filter update repo list
92 92 self.filtered_repo_update_paths = {}
93 93 if repo_update_list:
94 94 self.filtered_repo_update_paths = {}
95 95 for repo_name, repo in self.repo_paths.items():
96 96 if repo_name in repo_update_list:
97 97 self.filtered_repo_update_paths[repo_name] = repo
98 98 self.repo_paths = self.filtered_repo_update_paths
99 99
100 100 self.initial = True
101 101 if not os.path.isdir(self.index_location):
102 102 os.makedirs(self.index_location)
103 103 log.info('Cannot run incremental index since it does not '
104 104 'yet exist running full build')
105 105 elif not exists_in(self.index_location, IDX_NAME):
106 106 log.info('Running full index build as the file content '
107 107 'index does not exist')
108 108 elif not exists_in(self.index_location, CHGSET_IDX_NAME):
109 109 log.info('Running full index build as the changeset '
110 110 'index does not exist')
111 111 else:
112 112 self.initial = False
113 113
114 114 def _get_index_revision(self, repo):
115 115 db_repo = Repository.get_by_repo_name(repo.name_unicode)
116 116 landing_rev = 'tip'
117 117 if db_repo:
118 118 _rev_type, _rev = db_repo.landing_rev
119 119 landing_rev = _rev
120 120 return landing_rev
121 121
122 122 def _get_index_changeset(self, repo, index_rev=None):
123 123 if not index_rev:
124 124 index_rev = self._get_index_revision(repo)
125 125 cs = repo.get_changeset(index_rev)
126 126 return cs
127 127
128 128 def get_paths(self, repo):
129 129 """
130 130 recursive walk in root dir and return a set of all path in that dir
131 131 based on repository walk function
132 132 """
133 133 index_paths_ = set()
134 134 try:
135 135 cs = self._get_index_changeset(repo)
136 136 for _topnode, _dirs, files in cs.walk('/'):
137 137 for f in files:
138 138 index_paths_.add(jn(safe_str(repo.path), safe_str(f.path)))
139 139
140 140 except RepositoryError:
141 141 log.debug(traceback.format_exc())
142 142 pass
143 143 return index_paths_
144 144
145 145 def get_node(self, repo, path, index_rev=None):
146 146 """
147 147 gets a filenode based on given full path.It operates on string for
148 148 hg git compatability.
149 149
150 150 :param repo: scm repo instance
151 151 :param path: full path including root location
152 152 :return: FileNode
153 153 """
154 154 root_path = safe_str(repo.path)+'/'
155 155 parts = safe_str(path).partition(root_path)
156 156 cs = self._get_index_changeset(repo, index_rev=index_rev)
157 157 node = cs.get_node(parts[-1])
158 158 return node
159 159
160 160 def get_node_mtime(self, node):
161 161 return mktime(node.last_changeset.date.timetuple())
162 162
163 163 def add_doc(self, writer, path, repo, repo_name, index_rev=None):
164 164 """
165 165 Adding doc to writer this function itself fetches data from
166 166 the instance of vcs backend
167 167 """
168 168
169 169 node = self.get_node(repo, path, index_rev)
170 170 indexed = indexed_w_content = 0
171 171 # we just index the content of chosen files, and skip binary files
172 172 if node.extension in INDEX_EXTENSIONS and not node.is_binary:
173 173 u_content = node.content
174 174 if not isinstance(u_content, unicode):
175 175 log.warning(' >> %s Could not get this content as unicode '
176 176 'replacing with empty content' % path)
177 177 u_content = u''
178 178 else:
179 179 log.debug(' >> %s [WITH CONTENT]' % path)
180 180 indexed_w_content += 1
181 181
182 182 else:
183 183 log.debug(' >> %s' % path)
184 184 # just index file name without it's content
185 185 u_content = u''
186 186 indexed += 1
187 187
188 188 p = safe_unicode(path)
189 189 writer.add_document(
190 190 fileid=p,
191 191 owner=unicode(repo.contact),
192 192 repository=safe_unicode(repo_name),
193 193 path=p,
194 194 content=u_content,
195 195 modtime=self.get_node_mtime(node),
196 196 extension=node.extension
197 197 )
198 198 return indexed, indexed_w_content
199 199
200 200 def index_changesets(self, writer, repo_name, repo, start_rev=None):
201 201 """
202 202 Add all changeset in the vcs repo starting at start_rev
203 203 to the index writer
204 204
205 205 :param writer: the whoosh index writer to add to
206 206 :param repo_name: name of the repository from whence the
207 207 changeset originates including the repository group
208 208 :param repo: the vcs repository instance to index changesets for,
209 209 the presumption is the repo has changesets to index
210 210 :param start_rev=None: the full sha id to start indexing from
211 211 if start_rev is None then index from the first changeset in
212 212 the repo
213 213 """
214 214
215 215 if start_rev is None:
216 216 start_rev = repo[0].raw_id
217 217
218 218 log.debug('indexing changesets in %s starting at rev: %s' %
219 219 (repo_name, start_rev))
220 220
221 221 indexed = 0
222 222 cs_iter = repo.get_changesets(start=start_rev)
223 223 total = len(cs_iter)
224 224 for cs in cs_iter:
225 225 log.debug(' >> %s/%s' % (cs, total))
226 226 writer.add_document(
227 227 raw_id=unicode(cs.raw_id),
228 228 owner=unicode(repo.contact),
229 229 date=cs._timestamp,
230 230 repository=safe_unicode(repo_name),
231 231 author=cs.author,
232 232 message=cs.message,
233 233 last=cs.last,
234 234 added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(),
235 235 removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(),
236 236 changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(),
237 237 parents=u' '.join([cs.raw_id for cs in cs.parents]),
238 238 )
239 239 indexed += 1
240 240
241 241 log.debug('indexed %d changesets for repo %s' % (indexed, repo_name))
242 242 return indexed
243 243
244 244 def index_files(self, file_idx_writer, repo_name, repo):
245 245 """
246 246 Index files for given repo_name
247 247
248 248 :param file_idx_writer: the whoosh index writer to add to
249 249 :param repo_name: name of the repository we're indexing
250 250 :param repo: instance of vcs repo
251 251 """
252 252 i_cnt = iwc_cnt = 0
253 253 log.debug('building index for %s @revision:%s' % (repo.path,
254 254 self._get_index_revision(repo)))
255 255 index_rev = self._get_index_revision(repo)
256 256 for idx_path in self.get_paths(repo):
257 257 i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev)
258 258 i_cnt += i
259 259 iwc_cnt += iwc
260 260
261 261 log.debug('added %s files %s with content for repo %s' %
262 262 (i_cnt + iwc_cnt, iwc_cnt, repo.path))
263 263 return i_cnt, iwc_cnt
264 264
265 265 def update_changeset_index(self):
266 266 idx = open_dir(self.index_location, indexname=CHGSET_IDX_NAME)
267 267
268 268 with idx.searcher() as searcher:
269 269 writer = idx.writer()
270 270 writer_is_dirty = False
271 271 try:
272 272 indexed_total = 0
273 273 repo_name = None
274 274 for repo_name, repo in self.repo_paths.items():
275 275 # skip indexing if there aren't any revs in the repo
276 276 num_of_revs = len(repo)
277 277 if num_of_revs < 1:
278 278 continue
279 279
280 280 qp = QueryParser('repository', schema=CHGSETS_SCHEMA)
281 281 q = qp.parse(u"last:t AND %s" % repo_name)
282 282
283 283 results = searcher.search(q)
284 284
285 285 # default to scanning the entire repo
286 286 last_rev = 0
287 287 start_id = None
288 288
289 289 if len(results) > 0:
290 290 # assuming that there is only one result, if not this
291 291 # may require a full re-index.
292 292 start_id = results[0]['raw_id']
293 293 last_rev = repo.get_changeset(revision=start_id).revision
294 294
295 295 # there are new changesets to index or a new repo to index
296 296 if last_rev == 0 or num_of_revs > last_rev + 1:
297 297 # delete the docs in the index for the previous
298 298 # last changeset(s)
299 299 for hit in results:
300 300 q = qp.parse(u"last:t AND %s AND raw_id:%s" %
301 301 (repo_name, hit['raw_id']))
302 302 writer.delete_by_query(q)
303 303
304 304 # index from the previous last changeset + all new ones
305 305 indexed_total += self.index_changesets(writer,
306 306 repo_name, repo, start_id)
307 307 writer_is_dirty = True
308 308 log.debug('indexed %s changesets for repo %s' % (
309 309 indexed_total, repo_name)
310 310 )
311 311 finally:
312 312 if writer_is_dirty:
313 313 log.debug('>> COMMITING CHANGES TO CHANGESET INDEX<<')
314 314 writer.commit(merge=True)
315 315 log.debug('>>> FINISHED REBUILDING CHANGESET INDEX <<<')
316 316 else:
317 317 log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<')
318 318
319 319 def update_file_index(self):
320 320 log.debug((u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
321 321 'AND REPOS %s') % (INDEX_EXTENSIONS, self.repo_paths.keys()))
322 322
323 323 idx = open_dir(self.index_location, indexname=self.indexname)
324 324 # The set of all paths in the index
325 325 indexed_paths = set()
326 326 # The set of all paths we need to re-index
327 327 to_index = set()
328 328
329 329 writer = idx.writer()
330 330 writer_is_dirty = False
331 331 try:
332 332 with idx.reader() as reader:
333 333
334 334 # Loop over the stored fields in the index
335 335 for fields in reader.all_stored_fields():
336 336 indexed_path = fields['path']
337 337 indexed_repo_path = fields['repository']
338 338 indexed_paths.add(indexed_path)
339 339
340 340 if not indexed_repo_path in self.filtered_repo_update_paths:
341 341 continue
342 342
343 343 repo = self.repo_paths[indexed_repo_path]
344 344
345 345 try:
346 346 node = self.get_node(repo, indexed_path)
347 347 # Check if this file was changed since it was indexed
348 348 indexed_time = fields['modtime']
349 349 mtime = self.get_node_mtime(node)
350 350 if mtime > indexed_time:
351 351 # The file has changed, delete it and add it to
352 352 # the list of files to reindex
353 353 log.debug(
354 354 'adding to reindex list %s mtime: %s vs %s' % (
355 355 indexed_path, mtime, indexed_time)
356 356 )
357 357 writer.delete_by_term('fileid', indexed_path)
358 358 writer_is_dirty = True
359 359
360 360 to_index.add(indexed_path)
361 361 except (ChangesetError, NodeDoesNotExistError):
362 362 # This file was deleted since it was indexed
363 363 log.debug('removing from index %s' % indexed_path)
364 364 writer.delete_by_term('path', indexed_path)
365 365 writer_is_dirty = True
366 366
367 367 # Loop over the files in the filesystem
368 368 # Assume we have a function that gathers the filenames of the
369 369 # documents to be indexed
370 370 ri_cnt_total = 0 # indexed
371 371 riwc_cnt_total = 0 # indexed with content
372 372 for repo_name, repo in self.repo_paths.items():
373 373 # skip indexing if there aren't any revisions
374 374 if len(repo) < 1:
375 375 continue
376 376 ri_cnt = 0 # indexed
377 377 riwc_cnt = 0 # indexed with content
378 378 for path in self.get_paths(repo):
379 379 path = safe_unicode(path)
380 380 if path in to_index or path not in indexed_paths:
381 381
382 382 # This is either a file that's changed, or a new file
383 383 # that wasn't indexed before. So index it!
384 384 i, iwc = self.add_doc(writer, path, repo, repo_name)
385 385 writer_is_dirty = True
386 386 log.debug('re indexing %s' % path)
387 387 ri_cnt += i
388 388 ri_cnt_total += 1
389 389 riwc_cnt += iwc
390 390 riwc_cnt_total += iwc
391 391 log.debug('added %s files %s with content for repo %s' % (
392 392 ri_cnt + riwc_cnt, riwc_cnt, repo.path)
393 393 )
394 394 log.debug('indexed %s files in total and %s with content' % (
395 395 ri_cnt_total, riwc_cnt_total)
396 396 )
397 397 finally:
398 398 if writer_is_dirty:
399 399 log.debug('>> COMMITING CHANGES TO FILE INDEX <<')
400 400 writer.commit(merge=True)
401 401 log.debug('>>> FINISHED REBUILDING FILE INDEX <<<')
402 402 else:
403 403 log.debug('>> NOTHING TO COMMIT TO FILE INDEX <<')
404 404 writer.cancel()
405 405
406 406 def build_indexes(self):
407 407 if os.path.exists(self.index_location):
408 408 log.debug('removing previous index')
409 409 rmtree(self.index_location)
410 410
411 411 if not os.path.exists(self.index_location):
412 412 os.mkdir(self.index_location)
413 413
414 414 chgset_idx = create_in(self.index_location, CHGSETS_SCHEMA,
415 415 indexname=CHGSET_IDX_NAME)
416 416 chgset_idx_writer = chgset_idx.writer()
417 417
418 418 file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
419 419 file_idx_writer = file_idx.writer()
420 420 log.debug('BUILDING INDEX FOR EXTENSIONS %s '
421 421 'AND REPOS %s' % (INDEX_EXTENSIONS, self.repo_paths.keys()))
422 422
423 423 for repo_name, repo in self.repo_paths.items():
424 424 # skip indexing if there aren't any revisions
425 425 if len(repo) < 1:
426 426 continue
427 427
428 428 self.index_files(file_idx_writer, repo_name, repo)
429 429 self.index_changesets(chgset_idx_writer, repo_name, repo)
430 430
431 431 log.debug('>> COMMITING CHANGES <<')
432 432 file_idx_writer.commit(merge=True)
433 433 chgset_idx_writer.commit(merge=True)
434 434 log.debug('>>> FINISHED BUILDING INDEX <<<')
435 435
436 436 def update_indexes(self):
437 437 self.update_file_index()
438 438 self.update_changeset_index()
439 439
440 440 def run(self, full_index=False):
441 441 """Run daemon"""
442 442 if full_index or self.initial:
443 443 self.build_indexes()
444 444 else:
445 445 self.update_indexes()
@@ -1,85 +1,85 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 rhodecode.lib.paster_commands.cache_keys
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 cleanup-keys paster command for RhodeCode
19 19
20 20
21 21 :created_on: mar 27, 2013
22 22 :author: marcink
23 23 :copyright: (c) 2013 RhodeCode GmbH.
24 24 :license: GPLv3, see LICENSE for more details.
25 25 """
26 26
27 27 from __future__ import with_statement
28 28
29 29 import os
30 30 import sys
31 31 import logging
32 32
33 33 from rhodecode.model.meta import Session
34 34 from rhodecode.lib.utils import BasePasterCommand
35 35 from rhodecode.model.db import CacheInvalidation
36 36
37 # fix rhodecode import
37 # Add location of top level folder to sys.path
38 38 from os.path import dirname as dn
39 39 rc_path = dn(dn(dn(os.path.realpath(__file__))))
40 40 sys.path.append(rc_path)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 class Command(BasePasterCommand):
46 46
47 47 max_args = 1
48 48 min_args = 1
49 49
50 50 usage = "CONFIG_FILE"
51 51 group_name = "RhodeCode"
52 52 takes_config_file = -1
53 53 parser = BasePasterCommand.standard_parser(verbose=True)
54 54 summary = "Cache keys utils"
55 55
56 56 def command(self):
57 57 #get SqlAlchemy session
58 58 self._init_session()
59 59 _caches = CacheInvalidation.query().order_by(CacheInvalidation.cache_key).all()
60 60 if self.options.show:
61 61 for c_obj in _caches:
62 62 print 'key:%s active:%s' % (c_obj.cache_key, c_obj.cache_active)
63 63 elif self.options.cleanup:
64 64 for c_obj in _caches:
65 65 Session().delete(c_obj)
66 66 print 'removing key:%s' % (c_obj.cache_key)
67 67 Session().commit()
68 68 else:
69 69 print 'nothing done exiting...'
70 70 sys.exit(0)
71 71
72 72 def update_parser(self):
73 73 self.parser.add_option(
74 74 '--show',
75 75 action='store_true',
76 76 dest='show',
77 77 help=("show existing cache keys with together with status")
78 78 )
79 79
80 80 self.parser.add_option(
81 81 '--cleanup',
82 82 action="store_true",
83 83 dest="cleanup",
84 84 help="cleanup existing cache keys"
85 85 )
@@ -1,76 +1,76 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 rhodecode.lib.paster_commands.ishell
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 interactive shell paster command for RhodeCode
19 19
20 20 :created_on: Apr 4, 2013
21 21 :author: marcink
22 22 :copyright: (c) 2013 RhodeCode GmbH.
23 23 :license: GPLv3, see LICENSE for more details.
24 24 """
25 25
26 26 from __future__ import with_statement
27 27
28 28 import os
29 29 import sys
30 30 import logging
31 31
32 32 from rhodecode.lib.utils import BasePasterCommand
33 33
34 # fix rhodecode import
34 # Add location of top level folder to sys.path
35 35 from os.path import dirname as dn
36 36 rc_path = dn(dn(dn(os.path.realpath(__file__))))
37 37 sys.path.append(rc_path)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 class Command(BasePasterCommand):
43 43
44 44 max_args = 1
45 45 min_args = 1
46 46
47 47 usage = "CONFIG_FILE"
48 48 group_name = "RhodeCode"
49 49 takes_config_file = -1
50 50 parser = BasePasterCommand.standard_parser(verbose=True)
51 51 summary = "Interactive shell"
52 52
53 53 def command(self):
54 54 #get SqlAlchemy session
55 55 self._init_session()
56 56
57 57 # imports, used in ipython shell
58 58 import os
59 59 import sys
60 60 import time
61 61 import shutil
62 62 import datetime
63 63 from rhodecode.model.db import *
64 64
65 65 try:
66 66 from IPython import embed
67 67 from IPython.config.loader import Config
68 68 cfg = Config()
69 69 cfg.InteractiveShellEmbed.confirm_exit = False
70 70 embed(config=cfg, banner1="RhodeCode IShell.")
71 71 except ImportError:
72 72 print 'ipython installation required for ishell'
73 73 sys.exit(-1)
74 74
75 75 def update_parser(self):
76 76 pass
@@ -1,113 +1,113 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 rhodecode.lib.paster_commands.make_index
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 make-index paster command for RhodeCode
19 19
20 20 :created_on: Aug 17, 2010
21 21 :author: marcink
22 22 :copyright: (c) 2013 RhodeCode GmbH.
23 23 :license: GPLv3, see LICENSE for more details.
24 24
25 25 """
26 26
27 27 from __future__ import with_statement
28 28
29 29 import os
30 30 import sys
31 31 import logging
32 32
33 33 from rhodecode.lib.utils import BasePasterCommand
34 34 from string import strip
35 35 from shutil import rmtree
36 36 from rhodecode.model.repo import RepoModel
37 37 from rhodecode.lib.utils import BasePasterCommand, load_rcextensions
38 38
39 # fix rhodecode import
39 # Add location of top level folder to sys.path
40 40 from os.path import dirname as dn
41 41 rc_path = dn(dn(dn(os.path.realpath(__file__))))
42 42 sys.path.append(rc_path)
43 43
44 44
45 45 class Command(BasePasterCommand):
46 46
47 47 max_args = 1
48 48 min_args = 1
49 49
50 50 usage = "CONFIG_FILE"
51 51 group_name = "RhodeCode"
52 52 takes_config_file = -1
53 53 parser = BasePasterCommand.standard_parser(verbose=True)
54 54 summary = "Creates or updates full text search index"
55 55
56 56 def command(self):
57 57 logging.config.fileConfig(self.path_to_ini_file)
58 58 #get SqlAlchemy session
59 59 self._init_session()
60 60 from pylons import config
61 61 index_location = config['index_dir']
62 62 load_rcextensions(config['here'])
63 63
64 64 repo_location = self.options.repo_location \
65 65 if self.options.repo_location else RepoModel().repos_path
66 66 repo_list = map(strip, self.options.repo_list.split(',')) \
67 67 if self.options.repo_list else None
68 68
69 69 repo_update_list = map(strip, self.options.repo_update_list.split(',')) \
70 70 if self.options.repo_update_list else None
71 71
72 72 #======================================================================
73 73 # WHOOSH DAEMON
74 74 #======================================================================
75 75 from rhodecode.lib.pidlock import LockHeld, DaemonLock
76 76 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
77 77 try:
78 78 l = DaemonLock(file_=os.path.join(dn(dn(index_location)),
79 79 'make_index.lock'))
80 80 WhooshIndexingDaemon(index_location=index_location,
81 81 repo_location=repo_location,
82 82 repo_list=repo_list,
83 83 repo_update_list=repo_update_list)\
84 84 .run(full_index=self.options.full_index)
85 85 l.release()
86 86 except LockHeld:
87 87 sys.exit(1)
88 88
89 89 def update_parser(self):
90 90 self.parser.add_option('--repo-location',
91 91 action='store',
92 92 dest='repo_location',
93 93 help="Specifies repositories location to index OPTIONAL",
94 94 )
95 95 self.parser.add_option('--index-only',
96 96 action='store',
97 97 dest='repo_list',
98 98 help="Specifies a comma separated list of repositores "
99 99 "to build index on. If not given all repositories "
100 100 "are scanned for indexing. OPTIONAL",
101 101 )
102 102 self.parser.add_option('--update-only',
103 103 action='store',
104 104 dest='repo_update_list',
105 105 help="Specifies a comma separated list of repositores "
106 106 "to re-build index on. OPTIONAL",
107 107 )
108 108 self.parser.add_option('-f',
109 109 action='store_true',
110 110 dest='full_index',
111 111 help="Specifies that index should be made full i.e"
112 112 " destroy old and build from scratch",
113 113 default=False)
@@ -1,83 +1,83 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 rhodecode.lib.paster_commands.make_rcextensions
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 make-rcext paster command for RhodeCode
19 19
20 20 :created_on: Mar 6, 2012
21 21 :author: marcink
22 22 :copyright: (c) 2013 RhodeCode GmbH.
23 23 :license: GPLv3, see LICENSE for more details.
24 24
25 25 """
26 26
27 27 from __future__ import with_statement
28 28
29 29 import os
30 30 import sys
31 31 import logging
32 32 import pkg_resources
33 33
34 34 from rhodecode.lib.utils import BasePasterCommand, ask_ok
35 35
36 # fix rhodecode import
36 # Add location of top level folder to sys.path
37 37 from os.path import dirname as dn
38 38 rc_path = dn(dn(dn(os.path.realpath(__file__))))
39 39 sys.path.append(rc_path)
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 class Command(BasePasterCommand):
45 45
46 46 max_args = 1
47 47 min_args = 1
48 48
49 49 usage = "CONFIG_FILE"
50 50 group_name = "RhodeCode"
51 51 takes_config_file = -1
52 52 parser = BasePasterCommand.standard_parser(verbose=True)
53 53 summary = "Creates additional extensions for rhodecode"
54 54
55 55 def command(self):
56 56 logging.config.fileConfig(self.path_to_ini_file)
57 57 from pylons import config
58 58
59 59 def _make_file(ext_file, tmpl):
60 60 bdir = os.path.split(ext_file)[0]
61 61 if not os.path.isdir(bdir):
62 62 os.makedirs(bdir)
63 63 with open(ext_file, 'wb') as f:
64 64 f.write(tmpl)
65 65 log.info('Writen new extensions file to %s' % ext_file)
66 66
67 67 here = config['here']
68 68 tmpl = pkg_resources.resource_string(
69 69 'rhodecode', os.path.join('config', 'rcextensions', '__init__.py')
70 70 )
71 71 ext_file = os.path.join(here, 'rcextensions', '__init__.py')
72 72 if os.path.exists(ext_file):
73 73 msg = ('Extension file already exists, do you want '
74 74 'to overwrite it ? [y/n]')
75 75 if ask_ok(msg):
76 76 _make_file(ext_file, tmpl)
77 77 else:
78 78 log.info('nothing done...')
79 79 else:
80 80 _make_file(ext_file, tmpl)
81 81
82 82 def update_parser(self):
83 83 pass
@@ -1,71 +1,71 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 rhodecode.lib.paster_commands.make_rcextensions
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 repo-scan paster command for RhodeCode
19 19
20 20 :created_on: Feb 9, 2013
21 21 :author: marcink
22 22 :copyright: (c) 2013 RhodeCode GmbH.
23 23 :license: GPLv3, see LICENSE for more details.
24 24 """
25 25
26 26 from __future__ import with_statement
27 27
28 28 import os
29 29 import sys
30 30 import logging
31 31
32 32 from rhodecode.model.scm import ScmModel
33 33 from rhodecode.lib.utils import BasePasterCommand, repo2db_mapper
34 34
35 # fix rhodecode import
35 # Add location of top level folder to sys.path
36 36 from os.path import dirname as dn
37 37 rc_path = dn(dn(dn(os.path.realpath(__file__))))
38 38 sys.path.append(rc_path)
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42
43 43 class Command(BasePasterCommand):
44 44
45 45 max_args = 1
46 46 min_args = 1
47 47
48 48 usage = "CONFIG_FILE"
49 49 group_name = "RhodeCode"
50 50 takes_config_file = -1
51 51 parser = BasePasterCommand.standard_parser(verbose=True)
52 52 summary = "Rescan default location for new repositories"
53 53
54 54 def command(self):
55 55 #get SqlAlchemy session
56 56 self._init_session()
57 57 rm_obsolete = self.options.delete_obsolete
58 58 log.info('Now scanning root location for new repos...')
59 59 added, removed = repo2db_mapper(ScmModel().repo_scan(),
60 60 remove_obsolete=rm_obsolete)
61 61 added = ', '.join(added) or '-'
62 62 removed = ', '.join(removed) or '-'
63 63 log.info('Scan completed added: %s removed: %s' % (added, removed))
64 64
65 65 def update_parser(self):
66 66 self.parser.add_option(
67 67 '--delete-obsolete',
68 68 action='store_true',
69 69 help="Use this flag do delete repositories that are "
70 70 "present in RhodeCode database but not on the filesystem",
71 71 )
@@ -1,110 +1,110 b''
1 1 import os
2 2 import sys
3 3 from paste.script.appinstall import AbstractInstallCommand
4 4 from paste.script.command import BadCommand
5 5 from paste.deploy import appconfig
6 6
7 # fix rhodecode import
7 # Add location of top level folder to sys.path
8 8 from os.path import dirname as dn
9 9 rc_path = dn(dn(dn(os.path.realpath(__file__))))
10 10 sys.path.append(rc_path)
11 11
12 12
13 13 class Command(AbstractInstallCommand):
14 14
15 15 default_verbosity = 1
16 16 max_args = 1
17 17 min_args = 1
18 18 summary = "Setup an application, given a config file"
19 19 usage = "CONFIG_FILE"
20 20 group_name = "RhodeCode"
21 21
22 22 description = """\
23 23
24 24 Setup RhodeCode according to its configuration file. This is
25 25 the second part of a two-phase web application installation
26 26 process (the first phase is prepare-app). The setup process
27 27 consist of things like setting up databases, creating super user
28 28 """
29 29
30 30 parser = AbstractInstallCommand.standard_parser(
31 31 simulate=True, quiet=True, interactive=True)
32 32 parser.add_option('--user',
33 33 action='store',
34 34 dest='username',
35 35 default=None,
36 36 help='Admin Username')
37 37 parser.add_option('--email',
38 38 action='store',
39 39 dest='email',
40 40 default=None,
41 41 help='Admin Email')
42 42 parser.add_option('--password',
43 43 action='store',
44 44 dest='password',
45 45 default=None,
46 46 help='Admin password min 6 chars')
47 47 parser.add_option('--repos',
48 48 action='store',
49 49 dest='repos_location',
50 50 default=None,
51 51 help='Absolute path to repositories location')
52 52 parser.add_option('--name',
53 53 action='store',
54 54 dest='section_name',
55 55 default=None,
56 56 help='The name of the section to set up (default: app:main)')
57 57 parser.add_option('--force-yes',
58 58 action='store_true',
59 59 dest='force_ask',
60 60 default=None,
61 61 help='Force yes to every question')
62 62 parser.add_option('--force-no',
63 63 action='store_false',
64 64 dest='force_ask',
65 65 default=None,
66 66 help='Force no to every question')
67 67 parser.add_option('--public-access',
68 68 action='store_true',
69 69 dest='public_access',
70 70 default=None,
71 71 help='Enable public access on this installation (default)')
72 72 parser.add_option('--no-public-access',
73 73 action='store_false',
74 74 dest='public_access',
75 75 default=None,
76 76 help='Disable public access on this installation ')
77 77 def command(self):
78 78 config_spec = self.args[0]
79 79 section = self.options.section_name
80 80 if section is None:
81 81 if '#' in config_spec:
82 82 config_spec, section = config_spec.split('#', 1)
83 83 else:
84 84 section = 'main'
85 85 if not ':' in section:
86 86 plain_section = section
87 87 section = 'app:' + section
88 88 else:
89 89 plain_section = section.split(':', 1)[0]
90 90 if not config_spec.startswith('config:'):
91 91 config_spec = 'config:' + config_spec
92 92 if plain_section != 'main':
93 93 config_spec += '#' + plain_section
94 94 config_file = config_spec[len('config:'):].split('#', 1)[0]
95 95 config_file = os.path.join(os.getcwd(), config_file)
96 96 self.logging_file_config(config_file)
97 97 conf = appconfig(config_spec, relative_to=os.getcwd())
98 98 ep_name = conf.context.entry_point_name
99 99 ep_group = conf.context.protocol
100 100 dist = conf.context.distribution
101 101 if dist is None:
102 102 raise BadCommand(
103 103 "The section %r is not the application (probably a filter). "
104 104 "You should add #section_name, where section_name is the "
105 105 "section that configures your application" % plain_section)
106 106 installer = self.get_installer(dist, ep_group, ep_name)
107 107 installer.setup_config(
108 108 self, config_file, section, self.sysconfig_install_vars(installer))
109 109 self.call_sysconfig_functions(
110 110 'post_setup_hook', installer, config_file)
@@ -1,88 +1,88 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 rhodecode.lib.paster_commands.make_rcextensions
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 uodate-repoinfo paster command for RhodeCode
19 19
20 20 :created_on: Jul 14, 2012
21 21 :author: marcink
22 22 :copyright: (c) 2013 RhodeCode GmbH.
23 23 :license: GPLv3, see LICENSE for more details.
24 24 """
25 25
26 26 from __future__ import with_statement
27 27
28 28 import os
29 29 import sys
30 30 import logging
31 31 import string
32 32
33 33 from rhodecode.lib.utils import BasePasterCommand
34 34 from rhodecode.model.db import Repository
35 35 from rhodecode.model.repo import RepoModel
36 36 from rhodecode.model.meta import Session
37 37
38 # fix rhodecode import
38 # Add location of top level folder to sys.path
39 39 from os.path import dirname as dn
40 40 rc_path = dn(dn(dn(os.path.realpath(__file__))))
41 41 sys.path.append(rc_path)
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 class Command(BasePasterCommand):
47 47
48 48 max_args = 1
49 49 min_args = 1
50 50
51 51 usage = "CONFIG_FILE"
52 52 group_name = "RhodeCode"
53 53 takes_config_file = -1
54 54 parser = BasePasterCommand.standard_parser(verbose=True)
55 55 summary = "Updates repositories caches for last changeset"
56 56
57 57 def command(self):
58 58 #get SqlAlchemy session
59 59 self._init_session()
60 60
61 61 repo_update_list = map(string.strip,
62 62 self.options.repo_update_list.split(',')) \
63 63 if self.options.repo_update_list else None
64 64
65 65 if repo_update_list:
66 66 repo_list = Repository.query()\
67 67 .filter(Repository.repo_name.in_(repo_update_list))
68 68 else:
69 69 repo_list = Repository.getAll()
70 70 RepoModel.update_repoinfo(repositories=repo_list)
71 71 Session().commit()
72 72
73 73 if self.options.invalidate_cache:
74 74 for r in repo_list:
75 75 r.set_invalidate()
76 76 log.info('Updated cache for %s repositories' % (len(repo_list)))
77 77
78 78 def update_parser(self):
79 79 self.parser.add_option('--update-only',
80 80 action='store',
81 81 dest='repo_update_list',
82 82 help="Specifies a comma separated list of repositores "
83 83 "to update last commit info for. OPTIONAL")
84 84 self.parser.add_option('--invalidate-cache',
85 85 action='store_true',
86 86 dest='invalidate_cache',
87 87 help="Trigger cache invalidation event for repos. "
88 88 "OPTIONAL")
General Comments 0
You need to be logged in to leave comments. Login now