##// END OF EJS Templates
Hacking for git support,and new faster repo scan
marcink -
r631:05528ad9 beta
parent child Browse files
Show More
@@ -3,6 +3,12 b''
3 Changelog
3 Changelog
4 =========
4 =========
5
5
6 1.1.0 (**XXXX-XX-XX**)
7 ----------------------
8 - git support
9 - performance upgrade for cached repos list
10
11
6 1.0.0 (**2010-10-xx**)
12 1.0.0 (**2010-10-xx**)
7 ----------------------
13 ----------------------
8
14
@@ -24,7 +24,7 b' versioning implementation: http://semver'
24 @author: marcink
24 @author: marcink
25 """
25 """
26
26
27 VERSION = (1, 0, 0, 'rc4')
27 VERSION = (1, 1, 0, 'beta')
28
28
29 __version__ = '.'.join((str(each) for each in VERSION[:4]))
29 __version__ = '.'.join((str(each) for each in VERSION[:4]))
30
30
@@ -20,7 +20,7 b' def load_environment(global_conf, app_co'
20 object
20 object
21 """
21 """
22 config = PylonsConfig()
22 config = PylonsConfig()
23
23
24 # Pylons paths
24 # Pylons paths
25 root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
25 root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
26 paths = dict(root=root,
26 paths = dict(root=root,
@@ -34,11 +34,11 b' def load_environment(global_conf, app_co'
34 config['routes.map'] = make_map(config)
34 config['routes.map'] = make_map(config)
35 config['pylons.app_globals'] = app_globals.Globals(config)
35 config['pylons.app_globals'] = app_globals.Globals(config)
36 config['pylons.h'] = rhodecode.lib.helpers
36 config['pylons.h'] = rhodecode.lib.helpers
37
37
38 # Setup cache object as early as possible
38 # Setup cache object as early as possible
39 import pylons
39 import pylons
40 pylons.cache._push_object(config['pylons.app_globals'].cache)
40 pylons.cache._push_object(config['pylons.app_globals'].cache)
41
41
42 # Create the Mako TemplateLookup, with the default auto-escaping
42 # Create the Mako TemplateLookup, with the default auto-escaping
43 config['pylons.app_globals'].mako_lookup = TemplateLookup(
43 config['pylons.app_globals'].mako_lookup = TemplateLookup(
44 directories=paths['templates'],
44 directories=paths['templates'],
@@ -53,8 +53,8 b' def load_environment(global_conf, app_co'
53 if test:
53 if test:
54 from rhodecode.lib.utils import create_test_env, create_test_index
54 from rhodecode.lib.utils import create_test_env, create_test_index
55 create_test_env('/tmp', config)
55 create_test_env('/tmp', config)
56 create_test_index('/tmp/*', True)
56 create_test_index('/tmp', True)
57
57
58 #MULTIPLE DB configs
58 #MULTIPLE DB configs
59 # Setup the SQLAlchemy database engine
59 # Setup the SQLAlchemy database engine
60 if config['debug'] and not test:
60 if config['debug'] and not test:
@@ -68,12 +68,12 b' def load_environment(global_conf, app_co'
68 init_model(sa_engine_db1)
68 init_model(sa_engine_db1)
69 #init baseui
69 #init baseui
70 config['pylons.app_globals'].baseui = make_ui('db')
70 config['pylons.app_globals'].baseui = make_ui('db')
71
71
72 repo2db_mapper(_get_repos_cached_initial(config['pylons.app_globals'], initial))
72 repo2db_mapper(_get_repos_cached_initial(config['pylons.app_globals'], initial))
73 set_available_permissions(config)
73 set_available_permissions(config)
74 set_base_path(config)
74 set_base_path(config)
75 set_rhodecode_config(config)
75 set_rhodecode_config(config)
76 # CONFIGURATION OPTIONS HERE (note: all config options will override
76 # CONFIGURATION OPTIONS HERE (note: all config options will override
77 # any Pylons config options)
77 # any Pylons config options)
78
78
79 return config
79 return config
@@ -19,13 +19,13 b' class Globals(object):'
19 self.cache = CacheManager(**parse_cache_config_options(config))
19 self.cache = CacheManager(**parse_cache_config_options(config))
20 self.available_permissions = None # propagated after init_model
20 self.available_permissions = None # propagated after init_model
21 self.baseui = None # propagated after init_model
21 self.baseui = None # propagated after init_model
22
22
23 @LazyProperty
23 @LazyProperty
24 def paths(self):
24 def paths(self):
25 if self.baseui:
25 if self.baseui:
26 return self.baseui.configitems('paths')
26 return self.baseui.configitems('paths')
27
27
28 @LazyProperty
28 @LazyProperty
29 def base_path(self):
29 def base_path(self):
30 if self.baseui:
30 if self.baseui:
31 return self.paths[0][1].replace('*', '')
31 return self.paths[0][1]
@@ -8,7 +8,11 b' from rhodecode.lib.smtp_mailer import Sm'
8 from rhodecode.lib.utils import OrderedDict
8 from rhodecode.lib.utils import OrderedDict
9 from time import mktime
9 from time import mktime
10 from vcs.backends.hg import MercurialRepository
10 from vcs.backends.hg import MercurialRepository
11 from vcs.backends.git import GitRepository
12 import os
11 import traceback
13 import traceback
14 from vcs.backends import get_repo
15 from vcs.utils.helpers import get_scm
12
16
13 try:
17 try:
14 import json
18 import json
@@ -95,8 +99,9 b' def get_commits_stats(repo_name, ts_min_'
95
99
96 commits_by_day_author_aggregate = {}
100 commits_by_day_author_aggregate = {}
97 commits_by_day_aggregate = {}
101 commits_by_day_aggregate = {}
98 repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
102 repos_path = get_hg_ui_settings()['paths_root_path']
99 repo = MercurialRepository(repos_path + repo_name)
103 p = os.path.join(repos_path, repo_name)
104 repo = get_repo(get_scm(p)[0], p)
100
105
101 skip_date_limit = True
106 skip_date_limit = True
102 parse_limit = 250 #limit for single task changeset parsing optimal for
107 parse_limit = 250 #limit for single task changeset parsing optimal for
@@ -305,8 +310,10 b' def __get_codes_stats(repo_name):'
305 's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws']
310 's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws']
306
311
307
312
308 repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
313 repos_path = get_hg_ui_settings()['paths_root_path']
309 repo = MercurialRepository(repos_path + repo_name)
314 p = os.path.join(repos_path, repo_name)
315 repo = get_repo(get_scm(p)[0], p)
316
310 tip = repo.get_changeset()
317 tip = repo.get_changeset()
311
318
312 code_stats = {}
319 code_stats = {}
@@ -162,7 +162,7 b' class DbManage(object):'
162 paths = RhodeCodeUi()
162 paths = RhodeCodeUi()
163 paths.ui_section = 'paths'
163 paths.ui_section = 'paths'
164 paths.ui_key = '/'
164 paths.ui_key = '/'
165 paths.ui_value = os.path.join(path, '*')
165 paths.ui_value = path
166
166
167
167
168 hgsettings1 = RhodeCodeSettings()
168 hgsettings1 = RhodeCodeSettings()
@@ -64,20 +64,20 b" def recursive_replace(str, replace=' '):"
64 return str
64 return str
65 else:
65 else:
66 str = str.replace(replace * 2, replace)
66 str = str.replace(replace * 2, replace)
67 return recursive_replace(str, replace)
67 return recursive_replace(str, replace)
68
68
69 class _ToolTip(object):
69 class _ToolTip(object):
70
70
71 def __call__(self, tooltip_title, trim_at=50):
71 def __call__(self, tooltip_title, trim_at=50):
72 """
72 """
73 Special function just to wrap our text into nice formatted autowrapped
73 Special function just to wrap our text into nice formatted autowrapped
74 text
74 text
75 :param tooltip_title:
75 :param tooltip_title:
76 """
76 """
77
77
78 return wrap_paragraphs(escape(tooltip_title), trim_at)\
78 return wrap_paragraphs(escape(tooltip_title), trim_at)\
79 .replace('\n', '<br/>')
79 .replace('\n', '<br/>')
80
80
81 def activate(self):
81 def activate(self):
82 """
82 """
83 Adds tooltip mechanism to the given Html all tooltips have to have
83 Adds tooltip mechanism to the given Html all tooltips have to have
@@ -85,7 +85,7 b' class _ToolTip(object):'
85 Then a tooltip will be generated based on that
85 Then a tooltip will be generated based on that
86 All with yui js tooltip
86 All with yui js tooltip
87 """
87 """
88
88
89 js = '''
89 js = '''
90 YAHOO.util.Event.onDOMReady(function(){
90 YAHOO.util.Event.onDOMReady(function(){
91 function toolTipsId(){
91 function toolTipsId(){
@@ -190,19 +190,19 b' class _ToolTip(object):'
190
190
191 });
191 });
192 });
192 });
193 '''
193 '''
194 return literal(js)
194 return literal(js)
195
195
196 tooltip = _ToolTip()
196 tooltip = _ToolTip()
197
197
198 class _FilesBreadCrumbs(object):
198 class _FilesBreadCrumbs(object):
199
199
200 def __call__(self, repo_name, rev, paths):
200 def __call__(self, repo_name, rev, paths):
201 url_l = [link_to(repo_name, url('files_home',
201 url_l = [link_to(repo_name, url('files_home',
202 repo_name=repo_name,
202 repo_name=repo_name,
203 revision=rev, f_path=''))]
203 revision=rev, f_path=''))]
204 paths_l = paths.split('/')
204 paths_l = paths.split('/')
205
205
206 for cnt, p in enumerate(paths_l, 1):
206 for cnt, p in enumerate(paths_l, 1):
207 if p != '':
207 if p != '':
208 url_l.append(link_to(p, url('files_home',
208 url_l.append(link_to(p, url('files_home',
@@ -236,12 +236,12 b' def pygmentize_annotation(filenode, **kw'
236 pygmentize function for annotation
236 pygmentize function for annotation
237 :param filenode:
237 :param filenode:
238 """
238 """
239
239
240 color_dict = {}
240 color_dict = {}
241 def gen_color():
241 def gen_color():
242 """generator for getting 10k of evenly distibuted colors using hsv color
242 """generator for getting 10k of evenly distibuted colors using hsv color
243 and golden ratio.
243 and golden ratio.
244 """
244 """
245 import colorsys
245 import colorsys
246 n = 10000
246 n = 10000
247 golden_ratio = 0.618033988749895
247 golden_ratio = 0.618033988749895
@@ -252,21 +252,21 b' def pygmentize_annotation(filenode, **kw'
252 h %= 1
252 h %= 1
253 HSV_tuple = [h, 0.95, 0.95]
253 HSV_tuple = [h, 0.95, 0.95]
254 RGB_tuple = colorsys.hsv_to_rgb(*HSV_tuple)
254 RGB_tuple = colorsys.hsv_to_rgb(*HSV_tuple)
255 yield map(lambda x:str(int(x * 256)), RGB_tuple)
255 yield map(lambda x:str(int(x * 256)), RGB_tuple)
256
256
257 cgenerator = gen_color()
257 cgenerator = gen_color()
258
258
259 def get_color_string(cs):
259 def get_color_string(cs):
260 if color_dict.has_key(cs):
260 if color_dict.has_key(cs):
261 col = color_dict[cs]
261 col = color_dict[cs]
262 else:
262 else:
263 col = color_dict[cs] = cgenerator.next()
263 col = color_dict[cs] = cgenerator.next()
264 return "color: rgb(%s)! important;" % (', '.join(col))
264 return "color: rgb(%s)! important;" % (', '.join(col))
265
265
266 def url_func(changeset):
266 def url_func(changeset):
267 tooltip_html = "<div style='font-size:0.8em'><b>Author:</b>" + \
267 tooltip_html = "<div style='font-size:0.8em'><b>Author:</b>" + \
268 " %s<br/><b>Date:</b> %s</b><br/><b>Message:</b> %s<br/></div>"
268 " %s<br/><b>Date:</b> %s</b><br/><b>Message:</b> %s<br/></div>"
269
269
270 tooltip_html = tooltip_html % (changeset.author,
270 tooltip_html = tooltip_html % (changeset.author,
271 changeset.date,
271 changeset.date,
272 tooltip(changeset.message))
272 tooltip(changeset.message))
@@ -280,11 +280,11 b' def pygmentize_annotation(filenode, **kw'
280 class_='tooltip',
280 class_='tooltip',
281 tooltip_title=tooltip_html
281 tooltip_title=tooltip_html
282 )
282 )
283
283
284 uri += '\n'
284 uri += '\n'
285 return uri
285 return uri
286 return literal(annotate_highlight(filenode, url_func, **kwargs))
286 return literal(annotate_highlight(filenode, url_func, **kwargs))
287
287
288 def repo_name_slug(value):
288 def repo_name_slug(value):
289 """Return slug of name of repository
289 """Return slug of name of repository
290 This function is called on each creation/modification
290 This function is called on each creation/modification
@@ -292,7 +292,7 b' def repo_name_slug(value):'
292 """
292 """
293 slug = remove_formatting(value)
293 slug = remove_formatting(value)
294 slug = strip_tags(slug)
294 slug = strip_tags(slug)
295
295
296 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
296 for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
297 slug = slug.replace(c, '-')
297 slug = slug.replace(c, '-')
298 slug = recursive_replace(slug, '-')
298 slug = recursive_replace(slug, '-')
@@ -305,7 +305,7 b' def get_changeset_safe(repo, rev):'
305 if not isinstance(repo, BaseRepository):
305 if not isinstance(repo, BaseRepository):
306 raise Exception('You must pass an Repository '
306 raise Exception('You must pass an Repository '
307 'object as first argument got %s', type(repo))
307 'object as first argument got %s', type(repo))
308
308
309 try:
309 try:
310 cs = repo.get_changeset(rev)
310 cs = repo.get_changeset(rev)
311 except RepositoryError:
311 except RepositoryError:
@@ -323,7 +323,7 b' flash = _Flash()'
323 from mercurial import util
323 from mercurial import util
324 from mercurial.templatefilters import age as _age, person as _person
324 from mercurial.templatefilters import age as _age, person as _person
325
325
326 age = lambda x:_age(x)
326 age = lambda x:x
327 capitalize = lambda x: x.capitalize()
327 capitalize = lambda x: x.capitalize()
328 date = lambda x: util.datestr(x)
328 date = lambda x: util.datestr(x)
329 email = util.email
329 email = util.email
@@ -333,8 +333,8 b' hgdate = lambda x: "%d %d" % x'
333 isodate = lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2')
333 isodate = lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2')
334 isodatesec = lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2')
334 isodatesec = lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2')
335 localdate = lambda x: (x[0], util.makedate()[1])
335 localdate = lambda x: (x[0], util.makedate()[1])
336 rfc822date = lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2")
336 rfc822date = lambda x: x#util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2")
337 rfc822date_notz = lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S")
337 rfc822date_notz = lambda x: x#util.datestr(x, "%a, %d %b %Y %H:%M:%S")
338 rfc3339date = lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2")
338 rfc3339date = lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2")
339 time_ago = lambda x: util.datestr(_age(x), "%a, %d %b %Y %H:%M:%S %1%2")
339 time_ago = lambda x: util.datestr(_age(x), "%a, %d %b %Y %H:%M:%S %1%2")
340
340
@@ -358,8 +358,8 b' def gravatar_url(email_address, size=30)'
358 baseurl_nossl = "http://www.gravatar.com/avatar/"
358 baseurl_nossl = "http://www.gravatar.com/avatar/"
359 baseurl_ssl = "https://secure.gravatar.com/avatar/"
359 baseurl_ssl = "https://secure.gravatar.com/avatar/"
360 baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl
360 baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl
361
361
362
362
363 # construct the url
363 # construct the url
364 gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?"
364 gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?"
365 gravatar_url += urllib.urlencode({'d':default, 's':str(size)})
365 gravatar_url += urllib.urlencode({'d':default, 's':str(size)})
@@ -370,7 +370,7 b' def safe_unicode(str):'
370 """safe unicode function. In case of UnicodeDecode error we try to return
370 """safe unicode function. In case of UnicodeDecode error we try to return
371 unicode with errors replace, if this failes we return unicode with
371 unicode with errors replace, if this failes we return unicode with
372 string_escape decoding """
372 string_escape decoding """
373
373
374 try:
374 try:
375 u_str = unicode(str)
375 u_str = unicode(str)
376 except UnicodeDecodeError:
376 except UnicodeDecodeError:
@@ -379,5 +379,5 b' def safe_unicode(str):'
379 except UnicodeDecodeError:
379 except UnicodeDecodeError:
380 #incase we have a decode error just represent as byte string
380 #incase we have a decode error just represent as byte string
381 u_str = unicode(str(str).encode('string_escape'))
381 u_str = unicode(str(str).encode('string_escape'))
382
382
383 return u_str
383 return u_str
@@ -1,4 +1,10 b''
1 import os
2 import sys
1 from os.path import dirname as dn, join as jn
3 from os.path import dirname as dn, join as jn
4
5 #to get the rhodecode import
6 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
7
2 from rhodecode.config.environment import load_environment
8 from rhodecode.config.environment import load_environment
3 from rhodecode.model.hg import HgModel
9 from rhodecode.model.hg import HgModel
4 from shutil import rmtree
10 from shutil import rmtree
@@ -9,15 +15,10 b' from whoosh.analysis import RegexTokeniz'
9 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
15 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
10 from whoosh.index import create_in, open_dir
16 from whoosh.index import create_in, open_dir
11 from whoosh.formats import Characters
17 from whoosh.formats import Characters
12 from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter
18 from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter
13
19
14 import os
15 import sys
16 import traceback
20 import traceback
17
21
18 #to get the rhodecode import
19 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
20
21
22
22 #LOCATION WE KEEP THE INDEX
23 #LOCATION WE KEEP THE INDEX
23 IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index')
24 IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index')
@@ -45,9 +46,62 b' SCHEMA = Schema(owner=TEXT(),'
45
46
46
47
47 IDX_NAME = 'HG_INDEX'
48 IDX_NAME = 'HG_INDEX'
48 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
49 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
49 FRAGMENTER = SimpleFragmenter(200)
50 FRAGMENTER = SimpleFragmenter(200)
50
51
52 from paste.script import command
53 import ConfigParser
54
55 class MakeIndex(command.Command):
56
57 max_args = 1
58 min_args = 1
59
60 usage = "CONFIG_FILE"
61 summary = "Creates index for full text search given configuration file"
62 group_name = "Whoosh indexing"
63
64 parser = command.Command.standard_parser(verbose=True)
65 # parser.add_option('--repo-location',
66 # action='store',
67 # dest='repo_location',
68 # help="Specifies repositories location to index",
69 # )
70 parser.add_option('-f',
71 action='store_true',
72 dest='full_index',
73 help="Specifies that index should be made full i.e"
74 " destroy old and build from scratch",
75 default=False)
76 def command(self):
77 config_name = self.args[0]
78
79 p = config_name.split('/')
80 if len(p) == 1:
81 root = '.'
82 else:
83 root = '/'.join(p[:-1])
84 print root
85 config = ConfigParser.ConfigParser({'here':root})
86 config.read(config_name)
87 print dict(config.items('app:main'))['index_dir']
88 index_location = dict(config.items('app:main'))['index_dir']
89 #return
90
91 #=======================================================================
92 # WHOOSH DAEMON
93 #=======================================================================
94 from rhodecode.lib.pidlock import LockHeld, DaemonLock
95 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
96 try:
97 l = DaemonLock()
98 WhooshIndexingDaemon(index_location=index_location)\
99 .run(full_index=self.options.full_index)
100 l.release()
101 except LockHeld:
102 sys.exit(1)
103
104
51 class ResultWrapper(object):
105 class ResultWrapper(object):
52 def __init__(self, search_type, searcher, matcher, highlight_items):
106 def __init__(self, search_type, searcher, matcher, highlight_items):
53 self.search_type = search_type
107 self.search_type = search_type
@@ -55,7 +109,7 b' class ResultWrapper(object):'
55 self.matcher = matcher
109 self.matcher = matcher
56 self.highlight_items = highlight_items
110 self.highlight_items = highlight_items
57 self.fragment_size = 200 / 2
111 self.fragment_size = 200 / 2
58
112
59 @LazyProperty
113 @LazyProperty
60 def doc_ids(self):
114 def doc_ids(self):
61 docs_id = []
115 docs_id = []
@@ -64,8 +118,8 b' class ResultWrapper(object):'
64 chunks = [offsets for offsets in self.get_chunks()]
118 chunks = [offsets for offsets in self.get_chunks()]
65 docs_id.append([docnum, chunks])
119 docs_id.append([docnum, chunks])
66 self.matcher.next()
120 self.matcher.next()
67 return docs_id
121 return docs_id
68
122
69 def __str__(self):
123 def __str__(self):
70 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
124 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
71
125
@@ -91,32 +145,32 b' class ResultWrapper(object):'
91 slice = []
145 slice = []
92 for docid in self.doc_ids[i:j]:
146 for docid in self.doc_ids[i:j]:
93 slice.append(self.get_full_content(docid))
147 slice.append(self.get_full_content(docid))
94 return slice
148 return slice
95
149
96
150
97 def get_full_content(self, docid):
151 def get_full_content(self, docid):
98 res = self.searcher.stored_fields(docid[0])
152 res = self.searcher.stored_fields(docid[0])
99 f_path = res['path'][res['path'].find(res['repository']) \
153 f_path = res['path'][res['path'].find(res['repository']) \
100 + len(res['repository']):].lstrip('/')
154 + len(res['repository']):].lstrip('/')
101
155
102 content_short = self.get_short_content(res, docid[1])
156 content_short = self.get_short_content(res, docid[1])
103 res.update({'content_short':content_short,
157 res.update({'content_short':content_short,
104 'content_short_hl':self.highlight(content_short),
158 'content_short_hl':self.highlight(content_short),
105 'f_path':f_path})
159 'f_path':f_path})
106
160
107 return res
161 return res
108
162
109 def get_short_content(self, res, chunks):
163 def get_short_content(self, res, chunks):
110
164
111 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
165 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
112
166
113 def get_chunks(self):
167 def get_chunks(self):
114 """
168 """
115 Smart function that implements chunking the content
169 Smart function that implements chunking the content
116 but not overlap chunks so it doesn't highlight the same
170 but not overlap chunks so it doesn't highlight the same
117 close occurrences twice.
171 close occurrences twice.
118 :param matcher:
172 @param matcher:
119 :param size:
173 @param size:
120 """
174 """
121 memory = [(0, 0)]
175 memory = [(0, 0)]
122 for span in self.matcher.spans():
176 for span in self.matcher.spans():
@@ -124,12 +178,12 b' class ResultWrapper(object):'
124 end = span.endchar or 0
178 end = span.endchar or 0
125 start_offseted = max(0, start - self.fragment_size)
179 start_offseted = max(0, start - self.fragment_size)
126 end_offseted = end + self.fragment_size
180 end_offseted = end + self.fragment_size
127
181
128 if start_offseted < memory[-1][1]:
182 if start_offseted < memory[-1][1]:
129 start_offseted = memory[-1][1]
183 start_offseted = memory[-1][1]
130 memory.append((start_offseted, end_offseted,))
184 memory.append((start_offseted, end_offseted,))
131 yield (start_offseted, end_offseted,)
185 yield (start_offseted, end_offseted,)
132
186
133 def highlight(self, content, top=5):
187 def highlight(self, content, top=5):
134 if self.search_type != 'content':
188 if self.search_type != 'content':
135 return ''
189 return ''
@@ -139,4 +193,4 b' class ResultWrapper(object):'
139 fragmenter=FRAGMENTER,
193 fragmenter=FRAGMENTER,
140 formatter=FORMATTER,
194 formatter=FORMATTER,
141 top=top)
195 top=top)
142 return hl
196 return hl
@@ -32,12 +32,12 b' from os.path import join as jn'
32 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
32 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
33 sys.path.append(project_path)
33 sys.path.append(project_path)
34
34
35 from rhodecode.lib.pidlock import LockHeld, DaemonLock
35
36 from rhodecode.model.hg import HgModel
36 from rhodecode.model.hg import HgModel
37 from rhodecode.lib.helpers import safe_unicode
37 from rhodecode.lib.helpers import safe_unicode
38 from whoosh.index import create_in, open_dir
38 from whoosh.index import create_in, open_dir
39 from shutil import rmtree
39 from shutil import rmtree
40 from rhodecode.lib.indexers import INDEX_EXTENSIONS, IDX_LOCATION, SCHEMA, IDX_NAME
40 from rhodecode.lib.indexers import INDEX_EXTENSIONS, SCHEMA, IDX_NAME
41
41
42 from time import mktime
42 from time import mktime
43 from vcs.exceptions import ChangesetError, RepositoryError
43 from vcs.exceptions import ChangesetError, RepositoryError
@@ -61,25 +61,37 b' ch.setFormatter(formatter)'
61 # add ch to logger
61 # add ch to logger
62 log.addHandler(ch)
62 log.addHandler(ch)
63
63
64 def scan_paths(root_location):
64 def get_repos_location():
65 return HgModel.repo_scan('/', root_location, None, True)
65 return HgModel.get_repos_location()
66
66
67
67 class WhooshIndexingDaemon(object):
68 class WhooshIndexingDaemon(object):
68 """
69 """
69 Deamon for atomic jobs
70 Deamon for atomic jobs
70 """
71 """
71
72
72 def __init__(self, indexname='HG_INDEX', repo_location=None):
73 def __init__(self, indexname='HG_INDEX', index_location=None,
74 repo_location=None):
73 self.indexname = indexname
75 self.indexname = indexname
76
77 self.index_location = index_location
78 if not index_location:
79 raise Exception('You have to provide index location')
80
74 self.repo_location = repo_location
81 self.repo_location = repo_location
75 self.repo_paths = scan_paths(self.repo_location)
82 if not repo_location:
83 raise Exception('You have to provide repositories location')
84
85
86
87 self.repo_paths = HgModel.repo_scan('/', self.repo_location, None, True)
76 self.initial = False
88 self.initial = False
77 if not os.path.isdir(IDX_LOCATION):
89 if not os.path.isdir(self.index_location):
78 os.mkdir(IDX_LOCATION)
90 os.mkdir(self.index_location)
79 log.info('Cannot run incremental index since it does not'
91 log.info('Cannot run incremental index since it does not'
80 ' yet exist running full build')
92 ' yet exist running full build')
81 self.initial = True
93 self.initial = True
82
94
83 def get_paths(self, repo):
95 def get_paths(self, repo):
84 """
96 """
85 recursive walk in root dir and return a set of all path in that dir
97 recursive walk in root dir and return a set of all path in that dir
@@ -87,27 +99,25 b' class WhooshIndexingDaemon(object):'
87 """
99 """
88 index_paths_ = set()
100 index_paths_ = set()
89 try:
101 try:
90 tip = repo.get_changeset()
102 for topnode, dirs, files in repo.walk('/', 'tip'):
91
92 for topnode, dirs, files in tip.walk('/'):
93 for f in files:
103 for f in files:
94 index_paths_.add(jn(repo.path, f.path))
104 index_paths_.add(jn(repo.path, f.path))
95 for dir in dirs:
105 for dir in dirs:
96 for f in files:
106 for f in files:
97 index_paths_.add(jn(repo.path, f.path))
107 index_paths_.add(jn(repo.path, f.path))
98
108
99 except RepositoryError:
109 except RepositoryError:
100 pass
110 pass
101 return index_paths_
111 return index_paths_
102
112
103 def get_node(self, repo, path):
113 def get_node(self, repo, path):
104 n_path = path[len(repo.path) + 1:]
114 n_path = path[len(repo.path) + 1:]
105 node = repo.get_changeset().get_node(n_path)
115 node = repo.get_changeset().get_node(n_path)
106 return node
116 return node
107
117
108 def get_node_mtime(self, node):
118 def get_node_mtime(self, node):
109 return mktime(node.last_changeset.date.timetuple())
119 return mktime(node.last_changeset.date.timetuple())
110
120
111 def add_doc(self, writer, path, repo):
121 def add_doc(self, writer, path, repo):
112 """Adding doc to writer"""
122 """Adding doc to writer"""
113 node = self.get_node(repo, path)
123 node = self.get_node(repo, path)
@@ -120,63 +130,63 b' class WhooshIndexingDaemon(object):'
120 log.debug(' >> %s' % path)
130 log.debug(' >> %s' % path)
121 #just index file name without it's content
131 #just index file name without it's content
122 u_content = u''
132 u_content = u''
123
133
124 writer.add_document(owner=unicode(repo.contact),
134 writer.add_document(owner=unicode(repo.contact),
125 repository=safe_unicode(repo.name),
135 repository=safe_unicode(repo.name),
126 path=safe_unicode(path),
136 path=safe_unicode(path),
127 content=u_content,
137 content=u_content,
128 modtime=self.get_node_mtime(node),
138 modtime=self.get_node_mtime(node),
129 extension=node.extension)
139 extension=node.extension)
140
130
141
131
132 def build_index(self):
142 def build_index(self):
133 if os.path.exists(IDX_LOCATION):
143 if os.path.exists(self.index_location):
134 log.debug('removing previous index')
144 log.debug('removing previous index')
135 rmtree(IDX_LOCATION)
145 rmtree(self.index_location)
136
146
137 if not os.path.exists(IDX_LOCATION):
147 if not os.path.exists(self.index_location):
138 os.mkdir(IDX_LOCATION)
148 os.mkdir(self.index_location)
139
149
140 idx = create_in(IDX_LOCATION, SCHEMA, indexname=IDX_NAME)
150 idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
141 writer = idx.writer()
151 writer = idx.writer()
142
152
143 for cnt, repo in enumerate(self.repo_paths.values()):
153 for cnt, repo in enumerate(self.repo_paths.values()):
144 log.debug('building index @ %s' % repo.path)
154 log.debug('building index @ %s' % repo.path)
145
155
146 for idx_path in self.get_paths(repo):
156 for idx_path in self.get_paths(repo):
147 self.add_doc(writer, idx_path, repo)
157 self.add_doc(writer, idx_path, repo)
148
158
149 log.debug('>> COMMITING CHANGES <<')
159 log.debug('>> COMMITING CHANGES <<')
150 writer.commit(merge=True)
160 writer.commit(merge=True)
151 log.debug('>>> FINISHED BUILDING INDEX <<<')
161 log.debug('>>> FINISHED BUILDING INDEX <<<')
152
162
153
163
154 def update_index(self):
164 def update_index(self):
155 log.debug('STARTING INCREMENTAL INDEXING UPDATE')
165 log.debug('STARTING INCREMENTAL INDEXING UPDATE')
156
166
157 idx = open_dir(IDX_LOCATION, indexname=self.indexname)
167 idx = open_dir(self.index_location, indexname=self.indexname)
158 # The set of all paths in the index
168 # The set of all paths in the index
159 indexed_paths = set()
169 indexed_paths = set()
160 # The set of all paths we need to re-index
170 # The set of all paths we need to re-index
161 to_index = set()
171 to_index = set()
162
172
163 reader = idx.reader()
173 reader = idx.reader()
164 writer = idx.writer()
174 writer = idx.writer()
165
175
166 # Loop over the stored fields in the index
176 # Loop over the stored fields in the index
167 for fields in reader.all_stored_fields():
177 for fields in reader.all_stored_fields():
168 indexed_path = fields['path']
178 indexed_path = fields['path']
169 indexed_paths.add(indexed_path)
179 indexed_paths.add(indexed_path)
170
180
171 repo = self.repo_paths[fields['repository']]
181 repo = self.repo_paths[fields['repository']]
172
182
173 try:
183 try:
174 node = self.get_node(repo, indexed_path)
184 node = self.get_node(repo, indexed_path)
175 except ChangesetError:
185 except ChangesetError:
176 # This file was deleted since it was indexed
186 # This file was deleted since it was indexed
177 log.debug('removing from index %s' % indexed_path)
187 log.debug('removing from index %s' % indexed_path)
178 writer.delete_by_term('path', indexed_path)
188 writer.delete_by_term('path', indexed_path)
179
189
180 else:
190 else:
181 # Check if this file was changed since it was indexed
191 # Check if this file was changed since it was indexed
182 indexed_time = fields['modtime']
192 indexed_time = fields['modtime']
@@ -187,7 +197,7 b' class WhooshIndexingDaemon(object):'
187 log.debug('adding to reindex list %s' % indexed_path)
197 log.debug('adding to reindex list %s' % indexed_path)
188 writer.delete_by_term('path', indexed_path)
198 writer.delete_by_term('path', indexed_path)
189 to_index.add(indexed_path)
199 to_index.add(indexed_path)
190
200
191 # Loop over the files in the filesystem
201 # Loop over the files in the filesystem
192 # Assume we have a function that gathers the filenames of the
202 # Assume we have a function that gathers the filenames of the
193 # documents to be indexed
203 # documents to be indexed
@@ -198,51 +208,14 b' class WhooshIndexingDaemon(object):'
198 # that wasn't indexed before. So index it!
208 # that wasn't indexed before. So index it!
199 self.add_doc(writer, path, repo)
209 self.add_doc(writer, path, repo)
200 log.debug('re indexing %s' % path)
210 log.debug('re indexing %s' % path)
201
211
202 log.debug('>> COMMITING CHANGES <<')
212 log.debug('>> COMMITING CHANGES <<')
203 writer.commit(merge=True)
213 writer.commit(merge=True)
204 log.debug('>>> FINISHED REBUILDING INDEX <<<')
214 log.debug('>>> FINISHED REBUILDING INDEX <<<')
205
215
206 def run(self, full_index=False):
216 def run(self, full_index=False):
207 """Run daemon"""
217 """Run daemon"""
208 if full_index or self.initial:
218 if full_index or self.initial:
209 self.build_index()
219 self.build_index()
210 else:
220 else:
211 self.update_index()
221 self.update_index()
212
213 if __name__ == "__main__":
214 arg = sys.argv[1:]
215 if len(arg) != 2:
216 sys.stderr.write('Please specify indexing type [full|incremental]'
217 'and path to repositories as script args \n')
218 sys.exit()
219
220
221 if arg[0] == 'full':
222 full_index = True
223 elif arg[0] == 'incremental':
224 # False means looking just for changes
225 full_index = False
226 else:
227 sys.stdout.write('Please use [full|incremental]'
228 ' as script first arg \n')
229 sys.exit()
230
231 if not os.path.isdir(arg[1]):
232 sys.stderr.write('%s is not a valid path \n' % arg[1])
233 sys.exit()
234 else:
235 if arg[1].endswith('/'):
236 repo_location = arg[1] + '*'
237 else:
238 repo_location = arg[1] + '/*'
239
240 try:
241 l = DaemonLock()
242 WhooshIndexingDaemon(repo_location=repo_location)\
243 .run(full_index=full_index)
244 l.release()
245 reload(logging)
246 except LockHeld:
247 sys.exit(1)
248
@@ -16,24 +16,28 b''
16 # along with this program; if not, write to the Free Software
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
17 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
18 # MA 02110-1301, USA.
18 # MA 02110-1301, USA.
19 from UserDict import DictMixin
20 from mercurial import ui, config, hg
21 from mercurial.error import RepoError
22 from rhodecode.model import meta
23 from rhodecode.model.caching_query import FromCache
24 from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, \
25 UserLog
26 from rhodecode.model.repo import RepoModel
27 from rhodecode.model.user import UserModel
28 from vcs.backends.base import BaseChangeset
29 from vcs.backends.git import GitRepository
30 from vcs.backends.hg import MercurialRepository
31 from vcs.utils.lazy import LazyProperty
32 import datetime
33 import logging
34 import os
19
35
20 """
36 """
21 Created on April 18, 2010
37 Created on April 18, 2010
22 Utilities for RhodeCode
38 Utilities for RhodeCode
23 @author: marcink
39 @author: marcink
24 """
40 """
25 from rhodecode.model.caching_query import FromCache
26 from mercurial import ui, config, hg
27 from mercurial.error import RepoError
28 from rhodecode.model import meta
29 from rhodecode.model.user import UserModel
30 from rhodecode.model.repo import RepoModel
31 from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, UserLog
32 from vcs.backends.base import BaseChangeset
33 from vcs.utils.lazy import LazyProperty
34 import logging
35 import datetime
36 import os
37
41
38 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
39
43
@@ -96,14 +100,30 b' def action_logger(user, action, repo, ip'
96 sa.rollback()
100 sa.rollback()
97 log.error('could not log user action:%s', str(e))
101 log.error('could not log user action:%s', str(e))
98
102
99 def check_repo_dir(paths):
103 def get_repos(path, recursive=False, initial=False):
100 repos_path = paths[0][1].split('/')
104 """
101 if repos_path[-1] in ['*', '**']:
105 Scans given path for repos and return (name,(type,path)) tuple
102 repos_path = repos_path[:-1]
106 :param prefix:
103 if repos_path[0] != '/':
107 :param path:
104 repos_path[0] = '/'
108 :param recursive:
105 if not os.path.isdir(os.path.join(*repos_path)):
109 :param initial:
106 raise Exception('Not a valid repository in %s' % paths[0][1])
110 """
111 from vcs.utils.helpers import get_scm
112 from vcs.exceptions import VCSError
113 scm = get_scm(path)
114 if scm:
115 raise Exception('The given path %s should not be a repository got %s',
116 path, scm)
117
118 for dirpath in os.listdir(path):
119 try:
120 yield dirpath, get_scm(os.path.join(path, dirpath))
121 except VCSError:
122 pass
123
124 if __name__ == '__main__':
125 get_repos('', '/home/marcink/workspace-python')
126
107
127
108 def check_repo_fast(repo_name, base_path):
128 def check_repo_fast(repo_name, base_path):
109 if os.path.isdir(os.path.join(base_path, repo_name)):return False
129 if os.path.isdir(os.path.join(base_path, repo_name)):return False
@@ -231,8 +251,6 b" def make_ui(read_from='file', path=None,"
231 for k, v in cfg.items(section):
251 for k, v in cfg.items(section):
232 baseui.setconfig(section, k, v)
252 baseui.setconfig(section, k, v)
233 log.debug('settings ui from file[%s]%s:%s', section, k, v)
253 log.debug('settings ui from file[%s]%s:%s', section, k, v)
234 if checkpaths:check_repo_dir(cfg.items('paths'))
235
236
254
237 elif read_from == 'db':
255 elif read_from == 'db':
238 hg_ui = get_hg_ui_cached()
256 hg_ui = get_hg_ui_cached()
@@ -284,7 +302,7 b' class EmptyChangeset(BaseChangeset):'
284 @LazyProperty
302 @LazyProperty
285 def raw_id(self):
303 def raw_id(self):
286 """
304 """
287 Returns raw string identifing this changeset, useful for web
305 Returns raw string identifying this changeset, useful for web
288 representation.
306 representation.
289 """
307 """
290 return '0' * 40
308 return '0' * 40
@@ -308,16 +326,21 b' def repo2db_mapper(initial_repo_list, re'
308 """
326 """
309
327
310 sa = meta.Session()
328 sa = meta.Session()
329 rm = RepoModel(sa)
311 user = sa.query(User).filter(User.admin == True).first()
330 user = sa.query(User).filter(User.admin == True).first()
312
331
313 rm = RepoModel()
332 for name, repo in initial_repo_list.items():
333 if not rm.get(name, cache=False):
334 log.info('repository %s not found creating default', name)
314
335
315 for name, repo in initial_repo_list.items():
336 if isinstance(repo, MercurialRepository):
316 if not RepoModel(sa).get(name, cache=False):
337 repo_type = 'hg'
317 log.info('repository %s not found creating default', name)
338 if isinstance(repo, GitRepository):
339 repo_type = 'git'
318
340
319 form_data = {
341 form_data = {
320 'repo_name':name,
342 'repo_name':name,
343 'repo_type':repo_type,
321 'description':repo.description if repo.description != 'unknown' else \
344 'description':repo.description if repo.description != 'unknown' else \
322 'auto description for %s' % name,
345 'auto description for %s' % name,
323 'private':False
346 'private':False
@@ -335,7 +358,6 b' def repo2db_mapper(initial_repo_list, re'
335
358
336 meta.Session.remove()
359 meta.Session.remove()
337
360
338 from UserDict import DictMixin
339
361
340 class OrderedDict(dict, DictMixin):
362 class OrderedDict(dict, DictMixin):
341
363
@@ -22,9 +22,9 b' class RhodeCodeUi(Base):'
22 ui_key = Column("ui_key", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
22 ui_key = Column("ui_key", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
23 ui_value = Column("ui_value", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
23 ui_value = Column("ui_value", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
24 ui_active = Column("ui_active", BOOLEAN(), nullable=True, unique=None, default=True)
24 ui_active = Column("ui_active", BOOLEAN(), nullable=True, unique=None, default=True)
25
25
26
26
27 class User(Base):
27 class User(Base):
28 __tablename__ = 'users'
28 __tablename__ = 'users'
29 __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'useexisting':True})
29 __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'useexisting':True})
30 user_id = Column("user_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
30 user_id = Column("user_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
@@ -36,21 +36,21 b' class User(Base):'
36 lastname = Column("lastname", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
36 lastname = Column("lastname", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
37 email = Column("email", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
37 email = Column("email", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
38 last_login = Column("last_login", DATETIME(timezone=False), nullable=True, unique=None, default=None)
38 last_login = Column("last_login", DATETIME(timezone=False), nullable=True, unique=None, default=None)
39
39
40 user_log = relation('UserLog')
40 user_log = relation('UserLog')
41 user_perms = relation('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id")
41 user_perms = relation('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id")
42
42
43 @LazyProperty
43 @LazyProperty
44 def full_contact(self):
44 def full_contact(self):
45 return '%s %s <%s>' % (self.name, self.lastname, self.email)
45 return '%s %s <%s>' % (self.name, self.lastname, self.email)
46
46
47 def __repr__(self):
47 def __repr__(self):
48 return "<User('id:%s:%s')>" % (self.user_id, self.username)
48 return "<User('id:%s:%s')>" % (self.user_id, self.username)
49
49
50 def update_lastlogin(self):
50 def update_lastlogin(self):
51 """Update user lastlogin"""
51 """Update user lastlogin"""
52 import datetime
52 import datetime
53
53
54 try:
54 try:
55 session = Session.object_session(self)
55 session = Session.object_session(self)
56 self.last_login = datetime.datetime.now()
56 self.last_login = datetime.datetime.now()
@@ -58,48 +58,49 b' class User(Base):'
58 session.commit()
58 session.commit()
59 log.debug('updated user %s lastlogin', self.username)
59 log.debug('updated user %s lastlogin', self.username)
60 except Exception:
60 except Exception:
61 session.rollback()
61 session.rollback()
62
62
63
63
64 class UserLog(Base):
64 class UserLog(Base):
65 __tablename__ = 'user_logs'
65 __tablename__ = 'user_logs'
66 __table_args__ = {'useexisting':True}
66 __table_args__ = {'useexisting':True}
67 user_log_id = Column("user_log_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
67 user_log_id = Column("user_log_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
68 user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
68 user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
69 repository_id = Column("repository_id", INTEGER(length=None, convert_unicode=False, assert_unicode=None), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
69 repository_id = Column("repository_id", INTEGER(length=None, convert_unicode=False, assert_unicode=None), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
70 repository_name = Column("repository_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
70 repository_name = Column("repository_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
71 user_ip = Column("user_ip", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
71 user_ip = Column("user_ip", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
72 action = Column("action", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
72 action = Column("action", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
73 action_date = Column("action_date", DATETIME(timezone=False), nullable=True, unique=None, default=None)
73 action_date = Column("action_date", DATETIME(timezone=False), nullable=True, unique=None, default=None)
74 revision = Column('revision', TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
74 revision = Column('revision', TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
75
75
76 user = relation('User')
76 user = relation('User')
77 repository = relation('Repository')
77 repository = relation('Repository')
78
78
79 class Repository(Base):
79 class Repository(Base):
80 __tablename__ = 'repositories'
80 __tablename__ = 'repositories'
81 __table_args__ = (UniqueConstraint('repo_name'), {'useexisting':True},)
81 __table_args__ = (UniqueConstraint('repo_name'), {'useexisting':True},)
82 repo_id = Column("repo_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
82 repo_id = Column("repo_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
83 repo_name = Column("repo_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
83 repo_name = Column("repo_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
84 repo_type = Column("repo_type", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
84 user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=False, default=None)
85 user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=False, default=None)
85 private = Column("private", BOOLEAN(), nullable=True, unique=None, default=None)
86 private = Column("private", BOOLEAN(), nullable=True, unique=None, default=None)
86 description = Column("description", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
87 description = Column("description", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
87 fork_id = Column("fork_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=True, unique=False, default=None)
88 fork_id = Column("fork_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=True, unique=False, default=None)
88
89
89 user = relation('User')
90 user = relation('User')
90 fork = relation('Repository', remote_side=repo_id)
91 fork = relation('Repository', remote_side=repo_id)
91 repo_to_perm = relation('RepoToPerm', cascade='all')
92 repo_to_perm = relation('RepoToPerm', cascade='all')
92
93
93 def __repr__(self):
94 def __repr__(self):
94 return "<Repository('id:%s:%s')>" % (self.repo_id, self.repo_name)
95 return "<Repository('id:%s:%s')>" % (self.repo_id, self.repo_name)
95
96
96 class Permission(Base):
97 class Permission(Base):
97 __tablename__ = 'permissions'
98 __tablename__ = 'permissions'
98 __table_args__ = {'useexisting':True}
99 __table_args__ = {'useexisting':True}
99 permission_id = Column("permission_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
100 permission_id = Column("permission_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
100 permission_name = Column("permission_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
101 permission_name = Column("permission_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
101 permission_longname = Column("permission_longname", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
102 permission_longname = Column("permission_longname", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
102
103
103 def __repr__(self):
104 def __repr__(self):
104 return "<Permission('%s:%s')>" % (self.permission_id, self.permission_name)
105 return "<Permission('%s:%s')>" % (self.permission_id, self.permission_name)
105
106
@@ -109,8 +110,8 b' class RepoToPerm(Base):'
109 repo_to_perm_id = Column("repo_to_perm_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
110 repo_to_perm_id = Column("repo_to_perm_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
110 user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
111 user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
111 permission_id = Column("permission_id", INTEGER(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None)
112 permission_id = Column("permission_id", INTEGER(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None)
112 repository_id = Column("repository_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
113 repository_id = Column("repository_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
113
114
114 user = relation('User')
115 user = relation('User')
115 permission = relation('Permission')
116 permission = relation('Permission')
116 repository = relation('Repository')
117 repository = relation('Repository')
@@ -121,7 +122,7 b' class UserToPerm(Base):'
121 user_to_perm_id = Column("user_to_perm_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
122 user_to_perm_id = Column("user_to_perm_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
122 user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
123 user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
123 permission_id = Column("permission_id", INTEGER(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None)
124 permission_id = Column("permission_id", INTEGER(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None)
124
125
125 user = relation('User')
126 user = relation('User')
126 permission = relation('Permission')
127 permission = relation('Permission')
127
128
@@ -134,6 +135,6 b' class Statistics(Base):'
134 commit_activity = Column("commit_activity", BLOB(), nullable=False)#JSON data
135 commit_activity = Column("commit_activity", BLOB(), nullable=False)#JSON data
135 commit_activity_combined = Column("commit_activity_combined", BLOB(), nullable=False)#JSON data
136 commit_activity_combined = Column("commit_activity_combined", BLOB(), nullable=False)#JSON data
136 languages = Column("languages", BLOB(), nullable=False)#JSON data
137 languages = Column("languages", BLOB(), nullable=False)#JSON data
137
138
138 repository = relation('Repository')
139 repository = relation('Repository')
139
140
@@ -194,16 +194,12 b' class ValidSettings(formencode.validator'
194
194
195 class ValidPath(formencode.validators.FancyValidator):
195 class ValidPath(formencode.validators.FancyValidator):
196 def to_python(self, value, state):
196 def to_python(self, value, state):
197 isdir = os.path.isdir(value.replace('*', ''))
197
198 if (value.endswith('/*') or value.endswith('/**')) and isdir:
198 if not os.path.isdir(value):
199 return value
200 elif not isdir:
201 msg = _('This is not a valid path')
199 msg = _('This is not a valid path')
202 else:
200 raise formencode.Invalid(msg, value, state,
203 msg = _('You need to specify * or ** at the end of path (ie. /tmp/*)')
204
205 raise formencode.Invalid(msg, value, state,
206 error_dict={'paths_root_path':msg})
201 error_dict={'paths_root_path':msg})
202 return value
207
203
208 def UniqSystemEmail(old_data):
204 def UniqSystemEmail(old_data):
209 class _UniqSystemEmail(formencode.validators.FancyValidator):
205 class _UniqSystemEmail(formencode.validators.FancyValidator):
@@ -24,7 +24,6 b' Model for RhodeCode'
24 """
24 """
25 from beaker.cache import cache_region
25 from beaker.cache import cache_region
26 from mercurial import ui
26 from mercurial import ui
27 from mercurial.hgweb.hgwebdir_mod import findrepos
28 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
29 from rhodecode.lib.utils import invalidate_cache
28 from rhodecode.lib.utils import invalidate_cache
30 from rhodecode.lib.auth import HasRepoPermissionAny
29 from rhodecode.lib.auth import HasRepoPermissionAny
@@ -33,12 +32,12 b' from rhodecode.model.db import Repositor'
33 from sqlalchemy.orm import joinedload
32 from sqlalchemy.orm import joinedload
34 from vcs.exceptions import RepositoryError, VCSError
33 from vcs.exceptions import RepositoryError, VCSError
35 import logging
34 import logging
36 import os
37 import sys
35 import sys
38 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
39
37
40 try:
38 try:
41 from vcs.backends.hg import MercurialRepository
39 from vcs.backends.hg import MercurialRepository
40 from vcs.backends.git import GitRepository
42 except ImportError:
41 except ImportError:
43 sys.stderr.write('You have to import vcs module')
42 sys.stderr.write('You have to import vcs module')
44 raise Exception('Unable to import vcs')
43 raise Exception('Unable to import vcs')
@@ -47,7 +46,7 b' def _get_repos_cached_initial(app_global'
47 """return cached dict with repos
46 """return cached dict with repos
48 """
47 """
49 g = app_globals
48 g = app_globals
50 return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui, initial)
49 return HgModel().repo_scan(g.paths[0][1], g.baseui, initial)
51
50
52 @cache_region('long_term', 'cached_repo_list')
51 @cache_region('long_term', 'cached_repo_list')
53 def _get_repos_cached():
52 def _get_repos_cached():
@@ -55,7 +54,7 b' def _get_repos_cached():'
55 """
54 """
56 log.info('getting all repositories list')
55 log.info('getting all repositories list')
57 from pylons import app_globals as g
56 from pylons import app_globals as g
58 return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui)
57 return HgModel().repo_scan(g.paths[0][1], g.baseui)
59
58
60 @cache_region('super_short_term', 'cached_repos_switcher_list')
59 @cache_region('super_short_term', 'cached_repos_switcher_list')
61 def _get_repos_switcher_cached(cached_repo_list):
60 def _get_repos_switcher_cached(cached_repo_list):
@@ -73,42 +72,34 b' def _full_changelog_cached(repo_name):'
73 return list(reversed(list(HgModel().get_repo(repo_name))))
72 return list(reversed(list(HgModel().get_repo(repo_name))))
74
73
75 class HgModel(object):
74 class HgModel(object):
76 """Mercurial Model
75 """
76 Mercurial Model
77 """
77 """
78
78
79 def __init__(self):
79 def __init__(self, sa=None):
80 pass
80 if not sa:
81 self.sa = meta.Session()
82 else:
83 self.sa = sa
81
84
82 @staticmethod
85 def repo_scan(self, repos_path, baseui, initial=False):
83 def repo_scan(repos_prefix, repos_path, baseui, initial=False):
84 """
86 """
85 Listing of repositories in given path. This path should not be a
87 Listing of repositories in given path. This path should not be a
86 repository itself. Return a dictionary of repository objects
88 repository itself. Return a dictionary of repository objects
87 :param repos_path: path to directory it could take syntax with
89
88 * or ** for deep recursive displaying repositories
90 :param repos_path: path to directory containing repositories
91 :param baseui
92 :param initial: initial scann
89 """
93 """
90 sa = meta.Session()
94 log.info('scanning for repositories in %s', repos_path)
91 def check_repo_dir(path):
92 """Checks the repository
93 :param path:
94 """
95 repos_path = path.split('/')
96 if repos_path[-1] in ['*', '**']:
97 repos_path = repos_path[:-1]
98 if repos_path[0] != '/':
99 repos_path[0] = '/'
100 if not os.path.isdir(os.path.join(*repos_path)):
101 raise RepositoryError('Not a valid repository in %s' % path)
102 if not repos_path.endswith('*'):
103 raise VCSError('You need to specify * or ** at the end of path '
104 'for recursive scanning')
105
95
106 check_repo_dir(repos_path)
107 log.info('scanning for repositories in %s', repos_path)
108 repos = findrepos([(repos_prefix, repos_path)])
109 if not isinstance(baseui, ui.ui):
96 if not isinstance(baseui, ui.ui):
110 baseui = ui.ui()
97 baseui = ui.ui()
111
98
99 from rhodecode.lib.utils import get_repos
100 repos = get_repos(repos_path)
101
102
112 repos_list = {}
103 repos_list = {}
113 for name, path in repos:
104 for name, path in repos:
114 try:
105 try:
@@ -117,15 +108,19 b' class HgModel(object):'
117 raise RepositoryError('Duplicate repository name %s found in'
108 raise RepositoryError('Duplicate repository name %s found in'
118 ' %s' % (name, path))
109 ' %s' % (name, path))
119 else:
110 else:
111 if path[0] == 'hg':
112 repos_list[name] = MercurialRepository(path[1], baseui=baseui)
113 repos_list[name].name = name
120
114
121 repos_list[name] = MercurialRepository(path, baseui=baseui)
115 if path[0] == 'git':
122 repos_list[name].name = name
116 repos_list[name] = GitRepository(path[1])
117 repos_list[name].name = name
123
118
124 dbrepo = None
119 dbrepo = None
125 if not initial:
120 if not initial:
126 #for initial scann on application first run we don't
121 #for initial scann on application first run we don't
127 #have db repos yet.
122 #have db repos yet.
128 dbrepo = sa.query(Repository)\
123 dbrepo = self.sa.query(Repository)\
129 .options(joinedload(Repository.fork))\
124 .options(joinedload(Repository.fork))\
130 .filter(Repository.repo_name == name)\
125 .filter(Repository.repo_name == name)\
131 .scalar()
126 .scalar()
@@ -137,16 +132,17 b' class HgModel(object):'
137 if dbrepo.user:
132 if dbrepo.user:
138 repos_list[name].contact = dbrepo.user.full_contact
133 repos_list[name].contact = dbrepo.user.full_contact
139 else:
134 else:
140 repos_list[name].contact = sa.query(User)\
135 repos_list[name].contact = self.sa.query(User)\
141 .filter(User.admin == True).first().full_contact
136 .filter(User.admin == True).first().full_contact
142 except OSError:
137 except OSError:
143 continue
138 continue
144 meta.Session.remove()
139
145 return repos_list
140 return repos_list
146
141
147 def get_repos(self):
142 def get_repos(self):
148 for name, repo in _get_repos_cached().items():
143 for name, repo in _get_repos_cached().items():
149 if repo._get_hidden():
144
145 if isinstance(repo, MercurialRepository) and repo._get_hidden():
150 #skip hidden web repository
146 #skip hidden web repository
151 continue
147 continue
152
148
@@ -13,7 +13,7 b''
13 </tr>
13 </tr>
14 %for cnt,cs in enumerate(c.repo_changesets):
14 %for cnt,cs in enumerate(c.repo_changesets):
15 <tr class="parity${cnt%2}">
15 <tr class="parity${cnt%2}">
16 <td>${h.age(cs._ctx.date())} - ${h.rfc822date_notz(cs._ctx.date())} </td>
16 <td>${h.age(cs.date)} - ${h.rfc822date_notz(cs.date)} </td>
17 <td title="${cs.author}">${h.person(cs.author)}</td>
17 <td title="${cs.author}">${h.person(cs.author)}</td>
18 <td>r${cs.revision}:${cs.short_id}</td>
18 <td>r${cs.revision}:${cs.short_id}</td>
19 <td>
19 <td>
General Comments 0
You need to be logged in to leave comments. Login now