Show More
@@ -3,6 +3,12 b'' | |||
|
3 | 3 | Changelog |
|
4 | 4 | ========= |
|
5 | 5 | |
|
6 | 1.1.0 (**XXXX-XX-XX**) | |
|
7 | ---------------------- | |
|
8 | - git support | |
|
9 | - performance upgrade for cached repos list | |
|
10 | ||
|
11 | ||
|
6 | 12 | 1.0.0 (**2010-10-xx**) |
|
7 | 13 | ---------------------- |
|
8 | 14 |
@@ -24,7 +24,7 b' versioning implementation: http://semver' | |||
|
24 | 24 | @author: marcink |
|
25 | 25 | """ |
|
26 | 26 | |
|
27 |
VERSION = (1, |
|
|
27 | VERSION = (1, 1, 0, 'beta') | |
|
28 | 28 | |
|
29 | 29 | __version__ = '.'.join((str(each) for each in VERSION[:4])) |
|
30 | 30 |
@@ -20,7 +20,7 b' def load_environment(global_conf, app_co' | |||
|
20 | 20 | object |
|
21 | 21 | """ |
|
22 | 22 | config = PylonsConfig() |
|
23 | ||
|
23 | ||
|
24 | 24 | # Pylons paths |
|
25 | 25 | root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
|
26 | 26 | paths = dict(root=root, |
@@ -34,11 +34,11 b' def load_environment(global_conf, app_co' | |||
|
34 | 34 | config['routes.map'] = make_map(config) |
|
35 | 35 | config['pylons.app_globals'] = app_globals.Globals(config) |
|
36 | 36 | config['pylons.h'] = rhodecode.lib.helpers |
|
37 | ||
|
37 | ||
|
38 | 38 | # Setup cache object as early as possible |
|
39 | 39 | import pylons |
|
40 | 40 | pylons.cache._push_object(config['pylons.app_globals'].cache) |
|
41 | ||
|
41 | ||
|
42 | 42 | # Create the Mako TemplateLookup, with the default auto-escaping |
|
43 | 43 | config['pylons.app_globals'].mako_lookup = TemplateLookup( |
|
44 | 44 | directories=paths['templates'], |
@@ -53,8 +53,8 b' def load_environment(global_conf, app_co' | |||
|
53 | 53 | if test: |
|
54 | 54 | from rhodecode.lib.utils import create_test_env, create_test_index |
|
55 | 55 | create_test_env('/tmp', config) |
|
56 |
create_test_index('/tmp |
|
|
57 | ||
|
56 | create_test_index('/tmp', True) | |
|
57 | ||
|
58 | 58 | #MULTIPLE DB configs |
|
59 | 59 | # Setup the SQLAlchemy database engine |
|
60 | 60 | if config['debug'] and not test: |
@@ -68,12 +68,12 b' def load_environment(global_conf, app_co' | |||
|
68 | 68 | init_model(sa_engine_db1) |
|
69 | 69 | #init baseui |
|
70 | 70 | config['pylons.app_globals'].baseui = make_ui('db') |
|
71 | ||
|
71 | ||
|
72 | 72 | repo2db_mapper(_get_repos_cached_initial(config['pylons.app_globals'], initial)) |
|
73 | 73 | set_available_permissions(config) |
|
74 | 74 | set_base_path(config) |
|
75 | 75 | set_rhodecode_config(config) |
|
76 | 76 | # CONFIGURATION OPTIONS HERE (note: all config options will override |
|
77 | 77 | # any Pylons config options) |
|
78 | ||
|
78 | ||
|
79 | 79 | return config |
@@ -19,13 +19,13 b' class Globals(object):' | |||
|
19 | 19 | self.cache = CacheManager(**parse_cache_config_options(config)) |
|
20 | 20 | self.available_permissions = None # propagated after init_model |
|
21 | 21 | self.baseui = None # propagated after init_model |
|
22 | ||
|
22 | ||
|
23 | 23 | @LazyProperty |
|
24 | 24 | def paths(self): |
|
25 | 25 | if self.baseui: |
|
26 | 26 | return self.baseui.configitems('paths') |
|
27 | ||
|
27 | ||
|
28 | 28 | @LazyProperty |
|
29 | 29 | def base_path(self): |
|
30 | 30 | if self.baseui: |
|
31 |
return self.paths[0][1] |
|
|
31 | return self.paths[0][1] |
@@ -8,7 +8,11 b' from rhodecode.lib.smtp_mailer import Sm' | |||
|
8 | 8 | from rhodecode.lib.utils import OrderedDict |
|
9 | 9 | from time import mktime |
|
10 | 10 | from vcs.backends.hg import MercurialRepository |
|
11 | from vcs.backends.git import GitRepository | |
|
12 | import os | |
|
11 | 13 | import traceback |
|
14 | from vcs.backends import get_repo | |
|
15 | from vcs.utils.helpers import get_scm | |
|
12 | 16 | |
|
13 | 17 | try: |
|
14 | 18 | import json |
@@ -95,8 +99,9 b' def get_commits_stats(repo_name, ts_min_' | |||
|
95 | 99 | |
|
96 | 100 | commits_by_day_author_aggregate = {} |
|
97 | 101 | commits_by_day_aggregate = {} |
|
98 |
repos_path = get_hg_ui_settings()['paths_root_path'] |
|
|
99 |
|
|
|
102 | repos_path = get_hg_ui_settings()['paths_root_path'] | |
|
103 | p = os.path.join(repos_path, repo_name) | |
|
104 | repo = get_repo(get_scm(p)[0], p) | |
|
100 | 105 | |
|
101 | 106 | skip_date_limit = True |
|
102 | 107 | parse_limit = 250 #limit for single task changeset parsing optimal for |
@@ -305,8 +310,10 b' def __get_codes_stats(repo_name):' | |||
|
305 | 310 | 's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws'] |
|
306 | 311 | |
|
307 | 312 | |
|
308 |
repos_path = get_hg_ui_settings()['paths_root_path'] |
|
|
309 |
|
|
|
313 | repos_path = get_hg_ui_settings()['paths_root_path'] | |
|
314 | p = os.path.join(repos_path, repo_name) | |
|
315 | repo = get_repo(get_scm(p)[0], p) | |
|
316 | ||
|
310 | 317 | tip = repo.get_changeset() |
|
311 | 318 | |
|
312 | 319 | code_stats = {} |
@@ -162,7 +162,7 b' class DbManage(object):' | |||
|
162 | 162 | paths = RhodeCodeUi() |
|
163 | 163 | paths.ui_section = 'paths' |
|
164 | 164 | paths.ui_key = '/' |
|
165 |
paths.ui_value = |
|
|
165 | paths.ui_value = path | |
|
166 | 166 | |
|
167 | 167 | |
|
168 | 168 | hgsettings1 = RhodeCodeSettings() |
@@ -64,20 +64,20 b" def recursive_replace(str, replace=' '):" | |||
|
64 | 64 | return str |
|
65 | 65 | else: |
|
66 | 66 | str = str.replace(replace * 2, replace) |
|
67 |
return recursive_replace(str, replace) |
|
|
67 | return recursive_replace(str, replace) | |
|
68 | 68 | |
|
69 | 69 | class _ToolTip(object): |
|
70 | ||
|
70 | ||
|
71 | 71 | def __call__(self, tooltip_title, trim_at=50): |
|
72 | 72 | """ |
|
73 | 73 | Special function just to wrap our text into nice formatted autowrapped |
|
74 | 74 | text |
|
75 | 75 | :param tooltip_title: |
|
76 | 76 | """ |
|
77 | ||
|
77 | ||
|
78 | 78 | return wrap_paragraphs(escape(tooltip_title), trim_at)\ |
|
79 | 79 | .replace('\n', '<br/>') |
|
80 | ||
|
80 | ||
|
81 | 81 | def activate(self): |
|
82 | 82 | """ |
|
83 | 83 | Adds tooltip mechanism to the given Html all tooltips have to have |
@@ -85,7 +85,7 b' class _ToolTip(object):' | |||
|
85 | 85 | Then a tooltip will be generated based on that |
|
86 | 86 | All with yui js tooltip |
|
87 | 87 | """ |
|
88 | ||
|
88 | ||
|
89 | 89 | js = ''' |
|
90 | 90 | YAHOO.util.Event.onDOMReady(function(){ |
|
91 | 91 | function toolTipsId(){ |
@@ -190,19 +190,19 b' class _ToolTip(object):' | |||
|
190 | 190 | |
|
191 | 191 | }); |
|
192 | 192 | }); |
|
193 |
''' |
|
|
193 | ''' | |
|
194 | 194 | return literal(js) |
|
195 | 195 | |
|
196 | 196 | tooltip = _ToolTip() |
|
197 | 197 | |
|
198 | 198 | class _FilesBreadCrumbs(object): |
|
199 | ||
|
199 | ||
|
200 | 200 | def __call__(self, repo_name, rev, paths): |
|
201 | 201 | url_l = [link_to(repo_name, url('files_home', |
|
202 | 202 | repo_name=repo_name, |
|
203 | 203 | revision=rev, f_path=''))] |
|
204 | 204 | paths_l = paths.split('/') |
|
205 | ||
|
205 | ||
|
206 | 206 | for cnt, p in enumerate(paths_l, 1): |
|
207 | 207 | if p != '': |
|
208 | 208 | url_l.append(link_to(p, url('files_home', |
@@ -236,12 +236,12 b' def pygmentize_annotation(filenode, **kw' | |||
|
236 | 236 | pygmentize function for annotation |
|
237 | 237 | :param filenode: |
|
238 | 238 | """ |
|
239 | ||
|
239 | ||
|
240 | 240 | color_dict = {} |
|
241 | 241 | def gen_color(): |
|
242 | 242 | """generator for getting 10k of evenly distibuted colors using hsv color |
|
243 | 243 | and golden ratio. |
|
244 |
""" |
|
|
244 | """ | |
|
245 | 245 | import colorsys |
|
246 | 246 | n = 10000 |
|
247 | 247 | golden_ratio = 0.618033988749895 |
@@ -252,21 +252,21 b' def pygmentize_annotation(filenode, **kw' | |||
|
252 | 252 | h %= 1 |
|
253 | 253 | HSV_tuple = [h, 0.95, 0.95] |
|
254 | 254 | RGB_tuple = colorsys.hsv_to_rgb(*HSV_tuple) |
|
255 |
yield map(lambda x:str(int(x * 256)), RGB_tuple) |
|
|
255 | yield map(lambda x:str(int(x * 256)), RGB_tuple) | |
|
256 | 256 | |
|
257 | 257 | cgenerator = gen_color() |
|
258 | ||
|
258 | ||
|
259 | 259 | def get_color_string(cs): |
|
260 | 260 | if color_dict.has_key(cs): |
|
261 | 261 | col = color_dict[cs] |
|
262 | 262 | else: |
|
263 | 263 | col = color_dict[cs] = cgenerator.next() |
|
264 | 264 | return "color: rgb(%s)! important;" % (', '.join(col)) |
|
265 | ||
|
265 | ||
|
266 | 266 | def url_func(changeset): |
|
267 | 267 | tooltip_html = "<div style='font-size:0.8em'><b>Author:</b>" + \ |
|
268 |
" %s<br/><b>Date:</b> %s</b><br/><b>Message:</b> %s<br/></div>" |
|
|
269 | ||
|
268 | " %s<br/><b>Date:</b> %s</b><br/><b>Message:</b> %s<br/></div>" | |
|
269 | ||
|
270 | 270 | tooltip_html = tooltip_html % (changeset.author, |
|
271 | 271 | changeset.date, |
|
272 | 272 | tooltip(changeset.message)) |
@@ -280,11 +280,11 b' def pygmentize_annotation(filenode, **kw' | |||
|
280 | 280 | class_='tooltip', |
|
281 | 281 | tooltip_title=tooltip_html |
|
282 | 282 | ) |
|
283 | ||
|
283 | ||
|
284 | 284 | uri += '\n' |
|
285 |
return uri |
|
|
285 | return uri | |
|
286 | 286 | return literal(annotate_highlight(filenode, url_func, **kwargs)) |
|
287 | ||
|
287 | ||
|
288 | 288 | def repo_name_slug(value): |
|
289 | 289 | """Return slug of name of repository |
|
290 | 290 | This function is called on each creation/modification |
@@ -292,7 +292,7 b' def repo_name_slug(value):' | |||
|
292 | 292 | """ |
|
293 | 293 | slug = remove_formatting(value) |
|
294 | 294 | slug = strip_tags(slug) |
|
295 | ||
|
295 | ||
|
296 | 296 | for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
297 | 297 | slug = slug.replace(c, '-') |
|
298 | 298 | slug = recursive_replace(slug, '-') |
@@ -305,7 +305,7 b' def get_changeset_safe(repo, rev):' | |||
|
305 | 305 | if not isinstance(repo, BaseRepository): |
|
306 | 306 | raise Exception('You must pass an Repository ' |
|
307 | 307 | 'object as first argument got %s', type(repo)) |
|
308 | ||
|
308 | ||
|
309 | 309 | try: |
|
310 | 310 | cs = repo.get_changeset(rev) |
|
311 | 311 | except RepositoryError: |
@@ -323,7 +323,7 b' flash = _Flash()' | |||
|
323 | 323 | from mercurial import util |
|
324 | 324 | from mercurial.templatefilters import age as _age, person as _person |
|
325 | 325 | |
|
326 |
age = lambda x: |
|
|
326 | age = lambda x:x | |
|
327 | 327 | capitalize = lambda x: x.capitalize() |
|
328 | 328 | date = lambda x: util.datestr(x) |
|
329 | 329 | email = util.email |
@@ -333,8 +333,8 b' hgdate = lambda x: "%d %d" % x' | |||
|
333 | 333 | isodate = lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2') |
|
334 | 334 | isodatesec = lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2') |
|
335 | 335 | localdate = lambda x: (x[0], util.makedate()[1]) |
|
336 |
rfc822date = lambda x: |
|
|
337 |
rfc822date_notz = lambda x: |
|
|
336 | rfc822date = lambda x: x#util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2") | |
|
337 | rfc822date_notz = lambda x: x#util.datestr(x, "%a, %d %b %Y %H:%M:%S") | |
|
338 | 338 | rfc3339date = lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2") |
|
339 | 339 | time_ago = lambda x: util.datestr(_age(x), "%a, %d %b %Y %H:%M:%S %1%2") |
|
340 | 340 | |
@@ -358,8 +358,8 b' def gravatar_url(email_address, size=30)' | |||
|
358 | 358 | baseurl_nossl = "http://www.gravatar.com/avatar/" |
|
359 | 359 | baseurl_ssl = "https://secure.gravatar.com/avatar/" |
|
360 | 360 | baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl |
|
361 | ||
|
362 | ||
|
361 | ||
|
362 | ||
|
363 | 363 | # construct the url |
|
364 | 364 | gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?" |
|
365 | 365 | gravatar_url += urllib.urlencode({'d':default, 's':str(size)}) |
@@ -370,7 +370,7 b' def safe_unicode(str):' | |||
|
370 | 370 | """safe unicode function. In case of UnicodeDecode error we try to return |
|
371 | 371 | unicode with errors replace, if this failes we return unicode with |
|
372 | 372 | string_escape decoding """ |
|
373 | ||
|
373 | ||
|
374 | 374 | try: |
|
375 | 375 | u_str = unicode(str) |
|
376 | 376 | except UnicodeDecodeError: |
@@ -379,5 +379,5 b' def safe_unicode(str):' | |||
|
379 | 379 | except UnicodeDecodeError: |
|
380 | 380 | #incase we have a decode error just represent as byte string |
|
381 | 381 | u_str = unicode(str(str).encode('string_escape')) |
|
382 | ||
|
382 | ||
|
383 | 383 | return u_str |
@@ -1,4 +1,10 b'' | |||
|
1 | import os | |
|
2 | import sys | |
|
1 | 3 | from os.path import dirname as dn, join as jn |
|
4 | ||
|
5 | #to get the rhodecode import | |
|
6 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) | |
|
7 | ||
|
2 | 8 | from rhodecode.config.environment import load_environment |
|
3 | 9 | from rhodecode.model.hg import HgModel |
|
4 | 10 | from shutil import rmtree |
@@ -9,15 +15,10 b' from whoosh.analysis import RegexTokeniz' | |||
|
9 | 15 | from whoosh.fields import TEXT, ID, STORED, Schema, FieldType |
|
10 | 16 | from whoosh.index import create_in, open_dir |
|
11 | 17 | from whoosh.formats import Characters |
|
12 |
from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter |
|
|
18 | from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter | |
|
13 | 19 | |
|
14 | import os | |
|
15 | import sys | |
|
16 | 20 | import traceback |
|
17 | 21 | |
|
18 | #to get the rhodecode import | |
|
19 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) | |
|
20 | ||
|
21 | 22 | |
|
22 | 23 | #LOCATION WE KEEP THE INDEX |
|
23 | 24 | IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index') |
@@ -45,9 +46,62 b' SCHEMA = Schema(owner=TEXT(),' | |||
|
45 | 46 | |
|
46 | 47 | |
|
47 | 48 | IDX_NAME = 'HG_INDEX' |
|
48 |
FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') |
|
|
49 | FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') | |
|
49 | 50 | FRAGMENTER = SimpleFragmenter(200) |
|
50 | ||
|
51 | ||
|
52 | from paste.script import command | |
|
53 | import ConfigParser | |
|
54 | ||
|
55 | class MakeIndex(command.Command): | |
|
56 | ||
|
57 | max_args = 1 | |
|
58 | min_args = 1 | |
|
59 | ||
|
60 | usage = "CONFIG_FILE" | |
|
61 | summary = "Creates index for full text search given configuration file" | |
|
62 | group_name = "Whoosh indexing" | |
|
63 | ||
|
64 | parser = command.Command.standard_parser(verbose=True) | |
|
65 | # parser.add_option('--repo-location', | |
|
66 | # action='store', | |
|
67 | # dest='repo_location', | |
|
68 | # help="Specifies repositories location to index", | |
|
69 | # ) | |
|
70 | parser.add_option('-f', | |
|
71 | action='store_true', | |
|
72 | dest='full_index', | |
|
73 | help="Specifies that index should be made full i.e" | |
|
74 | " destroy old and build from scratch", | |
|
75 | default=False) | |
|
76 | def command(self): | |
|
77 | config_name = self.args[0] | |
|
78 | ||
|
79 | p = config_name.split('/') | |
|
80 | if len(p) == 1: | |
|
81 | root = '.' | |
|
82 | else: | |
|
83 | root = '/'.join(p[:-1]) | |
|
84 | print root | |
|
85 | config = ConfigParser.ConfigParser({'here':root}) | |
|
86 | config.read(config_name) | |
|
87 | print dict(config.items('app:main'))['index_dir'] | |
|
88 | index_location = dict(config.items('app:main'))['index_dir'] | |
|
89 | #return | |
|
90 | ||
|
91 | #======================================================================= | |
|
92 | # WHOOSH DAEMON | |
|
93 | #======================================================================= | |
|
94 | from rhodecode.lib.pidlock import LockHeld, DaemonLock | |
|
95 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon | |
|
96 | try: | |
|
97 | l = DaemonLock() | |
|
98 | WhooshIndexingDaemon(index_location=index_location)\ | |
|
99 | .run(full_index=self.options.full_index) | |
|
100 | l.release() | |
|
101 | except LockHeld: | |
|
102 | sys.exit(1) | |
|
103 | ||
|
104 | ||
|
51 | 105 | class ResultWrapper(object): |
|
52 | 106 | def __init__(self, search_type, searcher, matcher, highlight_items): |
|
53 | 107 | self.search_type = search_type |
@@ -55,7 +109,7 b' class ResultWrapper(object):' | |||
|
55 | 109 | self.matcher = matcher |
|
56 | 110 | self.highlight_items = highlight_items |
|
57 | 111 | self.fragment_size = 200 / 2 |
|
58 | ||
|
112 | ||
|
59 | 113 | @LazyProperty |
|
60 | 114 | def doc_ids(self): |
|
61 | 115 | docs_id = [] |
@@ -64,8 +118,8 b' class ResultWrapper(object):' | |||
|
64 | 118 | chunks = [offsets for offsets in self.get_chunks()] |
|
65 | 119 | docs_id.append([docnum, chunks]) |
|
66 | 120 | self.matcher.next() |
|
67 |
return docs_id |
|
|
68 | ||
|
121 | return docs_id | |
|
122 | ||
|
69 | 123 | def __str__(self): |
|
70 | 124 | return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids)) |
|
71 | 125 | |
@@ -91,32 +145,32 b' class ResultWrapper(object):' | |||
|
91 | 145 | slice = [] |
|
92 | 146 | for docid in self.doc_ids[i:j]: |
|
93 | 147 | slice.append(self.get_full_content(docid)) |
|
94 |
return slice |
|
|
95 | ||
|
148 | return slice | |
|
149 | ||
|
96 | 150 | |
|
97 | 151 | def get_full_content(self, docid): |
|
98 | 152 | res = self.searcher.stored_fields(docid[0]) |
|
99 | 153 | f_path = res['path'][res['path'].find(res['repository']) \ |
|
100 | 154 | + len(res['repository']):].lstrip('/') |
|
101 | ||
|
155 | ||
|
102 | 156 | content_short = self.get_short_content(res, docid[1]) |
|
103 | 157 | res.update({'content_short':content_short, |
|
104 | 158 | 'content_short_hl':self.highlight(content_short), |
|
105 | 159 | 'f_path':f_path}) |
|
106 | ||
|
107 |
return res |
|
|
108 | ||
|
160 | ||
|
161 | return res | |
|
162 | ||
|
109 | 163 | def get_short_content(self, res, chunks): |
|
110 | ||
|
164 | ||
|
111 | 165 | return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks]) |
|
112 | ||
|
166 | ||
|
113 | 167 | def get_chunks(self): |
|
114 | 168 | """ |
|
115 | 169 | Smart function that implements chunking the content |
|
116 | 170 | but not overlap chunks so it doesn't highlight the same |
|
117 | 171 | close occurrences twice. |
|
118 |
|
|
|
119 |
|
|
|
172 | @param matcher: | |
|
173 | @param size: | |
|
120 | 174 | """ |
|
121 | 175 | memory = [(0, 0)] |
|
122 | 176 | for span in self.matcher.spans(): |
@@ -124,12 +178,12 b' class ResultWrapper(object):' | |||
|
124 | 178 | end = span.endchar or 0 |
|
125 | 179 | start_offseted = max(0, start - self.fragment_size) |
|
126 | 180 | end_offseted = end + self.fragment_size |
|
127 | ||
|
181 | ||
|
128 | 182 | if start_offseted < memory[-1][1]: |
|
129 | 183 | start_offseted = memory[-1][1] |
|
130 |
memory.append((start_offseted, end_offseted,)) |
|
|
131 |
yield (start_offseted, end_offseted,) |
|
|
132 | ||
|
184 | memory.append((start_offseted, end_offseted,)) | |
|
185 | yield (start_offseted, end_offseted,) | |
|
186 | ||
|
133 | 187 | def highlight(self, content, top=5): |
|
134 | 188 | if self.search_type != 'content': |
|
135 | 189 | return '' |
@@ -139,4 +193,4 b' class ResultWrapper(object):' | |||
|
139 | 193 | fragmenter=FRAGMENTER, |
|
140 | 194 | formatter=FORMATTER, |
|
141 | 195 | top=top) |
|
142 |
return hl |
|
|
196 | return hl |
@@ -32,12 +32,12 b' from os.path import join as jn' | |||
|
32 | 32 | project_path = dn(dn(dn(dn(os.path.realpath(__file__))))) |
|
33 | 33 | sys.path.append(project_path) |
|
34 | 34 | |
|
35 | from rhodecode.lib.pidlock import LockHeld, DaemonLock | |
|
35 | ||
|
36 | 36 | from rhodecode.model.hg import HgModel |
|
37 | 37 | from rhodecode.lib.helpers import safe_unicode |
|
38 | 38 | from whoosh.index import create_in, open_dir |
|
39 | 39 | from shutil import rmtree |
|
40 |
from rhodecode.lib.indexers import INDEX_EXTENSIONS, |
|
|
40 | from rhodecode.lib.indexers import INDEX_EXTENSIONS, SCHEMA, IDX_NAME | |
|
41 | 41 | |
|
42 | 42 | from time import mktime |
|
43 | 43 | from vcs.exceptions import ChangesetError, RepositoryError |
@@ -61,25 +61,37 b' ch.setFormatter(formatter)' | |||
|
61 | 61 | # add ch to logger |
|
62 | 62 | log.addHandler(ch) |
|
63 | 63 | |
|
64 | def scan_paths(root_location): | |
|
65 |
return HgModel. |
|
|
64 | def get_repos_location(): | |
|
65 | return HgModel.get_repos_location() | |
|
66 | ||
|
66 | 67 | |
|
67 | 68 | class WhooshIndexingDaemon(object): |
|
68 | 69 | """ |
|
69 | 70 | Deamon for atomic jobs |
|
70 | 71 | """ |
|
71 | 72 | |
|
72 |
def __init__(self, indexname='HG_INDEX', |
|
|
73 | def __init__(self, indexname='HG_INDEX', index_location=None, | |
|
74 | repo_location=None): | |
|
73 | 75 | self.indexname = indexname |
|
76 | ||
|
77 | self.index_location = index_location | |
|
78 | if not index_location: | |
|
79 | raise Exception('You have to provide index location') | |
|
80 | ||
|
74 | 81 | self.repo_location = repo_location |
|
75 | self.repo_paths = scan_paths(self.repo_location) | |
|
82 | if not repo_location: | |
|
83 | raise Exception('You have to provide repositories location') | |
|
84 | ||
|
85 | ||
|
86 | ||
|
87 | self.repo_paths = HgModel.repo_scan('/', self.repo_location, None, True) | |
|
76 | 88 | self.initial = False |
|
77 |
if not os.path.isdir( |
|
|
78 |
os.mkdir( |
|
|
89 | if not os.path.isdir(self.index_location): | |
|
90 | os.mkdir(self.index_location) | |
|
79 | 91 | log.info('Cannot run incremental index since it does not' |
|
80 | 92 | ' yet exist running full build') |
|
81 | 93 | self.initial = True |
|
82 | ||
|
94 | ||
|
83 | 95 | def get_paths(self, repo): |
|
84 | 96 | """ |
|
85 | 97 | recursive walk in root dir and return a set of all path in that dir |
@@ -87,27 +99,25 b' class WhooshIndexingDaemon(object):' | |||
|
87 | 99 | """ |
|
88 | 100 | index_paths_ = set() |
|
89 | 101 | try: |
|
90 | tip = repo.get_changeset() | |
|
91 | ||
|
92 | for topnode, dirs, files in tip.walk('/'): | |
|
102 | for topnode, dirs, files in repo.walk('/', 'tip'): | |
|
93 | 103 | for f in files: |
|
94 | 104 | index_paths_.add(jn(repo.path, f.path)) |
|
95 | 105 | for dir in dirs: |
|
96 | 106 | for f in files: |
|
97 | 107 | index_paths_.add(jn(repo.path, f.path)) |
|
98 | ||
|
108 | ||
|
99 | 109 | except RepositoryError: |
|
100 | 110 | pass |
|
101 |
return index_paths_ |
|
|
102 | ||
|
111 | return index_paths_ | |
|
112 | ||
|
103 | 113 | def get_node(self, repo, path): |
|
104 | 114 | n_path = path[len(repo.path) + 1:] |
|
105 | 115 | node = repo.get_changeset().get_node(n_path) |
|
106 | 116 | return node |
|
107 | ||
|
117 | ||
|
108 | 118 | def get_node_mtime(self, node): |
|
109 | 119 | return mktime(node.last_changeset.date.timetuple()) |
|
110 | ||
|
120 | ||
|
111 | 121 | def add_doc(self, writer, path, repo): |
|
112 | 122 | """Adding doc to writer""" |
|
113 | 123 | node = self.get_node(repo, path) |
@@ -120,63 +130,63 b' class WhooshIndexingDaemon(object):' | |||
|
120 | 130 | log.debug(' >> %s' % path) |
|
121 | 131 | #just index file name without it's content |
|
122 | 132 | u_content = u'' |
|
123 | ||
|
133 | ||
|
124 | 134 | writer.add_document(owner=unicode(repo.contact), |
|
125 | 135 | repository=safe_unicode(repo.name), |
|
126 | 136 | path=safe_unicode(path), |
|
127 | 137 | content=u_content, |
|
128 | 138 | modtime=self.get_node_mtime(node), |
|
129 |
extension=node.extension) |
|
|
139 | extension=node.extension) | |
|
140 | ||
|
130 | 141 | |
|
131 | ||
|
132 | 142 | def build_index(self): |
|
133 |
if os.path.exists( |
|
|
143 | if os.path.exists(self.index_location): | |
|
134 | 144 | log.debug('removing previous index') |
|
135 | rmtree(IDX_LOCATION) | |
|
136 | ||
|
137 |
if not os.path.exists( |
|
|
138 |
os.mkdir( |
|
|
139 | ||
|
140 |
idx = create_in( |
|
|
145 | rmtree(self.index_location) | |
|
146 | ||
|
147 | if not os.path.exists(self.index_location): | |
|
148 | os.mkdir(self.index_location) | |
|
149 | ||
|
150 | idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME) | |
|
141 | 151 | writer = idx.writer() |
|
142 | ||
|
152 | ||
|
143 | 153 | for cnt, repo in enumerate(self.repo_paths.values()): |
|
144 | 154 | log.debug('building index @ %s' % repo.path) |
|
145 | ||
|
155 | ||
|
146 | 156 | for idx_path in self.get_paths(repo): |
|
147 | 157 | self.add_doc(writer, idx_path, repo) |
|
148 | ||
|
158 | ||
|
149 | 159 | log.debug('>> COMMITING CHANGES <<') |
|
150 | 160 | writer.commit(merge=True) |
|
151 | 161 | log.debug('>>> FINISHED BUILDING INDEX <<<') |
|
152 | ||
|
153 | ||
|
162 | ||
|
163 | ||
|
154 | 164 | def update_index(self): |
|
155 | 165 | log.debug('STARTING INCREMENTAL INDEXING UPDATE') |
|
156 | ||
|
157 |
idx = open_dir( |
|
|
166 | ||
|
167 | idx = open_dir(self.index_location, indexname=self.indexname) | |
|
158 | 168 | # The set of all paths in the index |
|
159 | 169 | indexed_paths = set() |
|
160 | 170 | # The set of all paths we need to re-index |
|
161 | 171 | to_index = set() |
|
162 | ||
|
172 | ||
|
163 | 173 | reader = idx.reader() |
|
164 | 174 | writer = idx.writer() |
|
165 | ||
|
175 | ||
|
166 | 176 | # Loop over the stored fields in the index |
|
167 | 177 | for fields in reader.all_stored_fields(): |
|
168 | 178 | indexed_path = fields['path'] |
|
169 | 179 | indexed_paths.add(indexed_path) |
|
170 | ||
|
180 | ||
|
171 | 181 | repo = self.repo_paths[fields['repository']] |
|
172 | ||
|
182 | ||
|
173 | 183 | try: |
|
174 | 184 | node = self.get_node(repo, indexed_path) |
|
175 | 185 | except ChangesetError: |
|
176 | 186 | # This file was deleted since it was indexed |
|
177 | 187 | log.debug('removing from index %s' % indexed_path) |
|
178 | 188 | writer.delete_by_term('path', indexed_path) |
|
179 | ||
|
189 | ||
|
180 | 190 | else: |
|
181 | 191 | # Check if this file was changed since it was indexed |
|
182 | 192 | indexed_time = fields['modtime'] |
@@ -187,7 +197,7 b' class WhooshIndexingDaemon(object):' | |||
|
187 | 197 | log.debug('adding to reindex list %s' % indexed_path) |
|
188 | 198 | writer.delete_by_term('path', indexed_path) |
|
189 | 199 | to_index.add(indexed_path) |
|
190 | ||
|
200 | ||
|
191 | 201 | # Loop over the files in the filesystem |
|
192 | 202 | # Assume we have a function that gathers the filenames of the |
|
193 | 203 | # documents to be indexed |
@@ -198,51 +208,14 b' class WhooshIndexingDaemon(object):' | |||
|
198 | 208 | # that wasn't indexed before. So index it! |
|
199 | 209 | self.add_doc(writer, path, repo) |
|
200 | 210 | log.debug('re indexing %s' % path) |
|
201 | ||
|
211 | ||
|
202 | 212 | log.debug('>> COMMITING CHANGES <<') |
|
203 | 213 | writer.commit(merge=True) |
|
204 | 214 | log.debug('>>> FINISHED REBUILDING INDEX <<<') |
|
205 | ||
|
215 | ||
|
206 | 216 | def run(self, full_index=False): |
|
207 | 217 | """Run daemon""" |
|
208 | 218 | if full_index or self.initial: |
|
209 | 219 | self.build_index() |
|
210 | 220 | else: |
|
211 | 221 | self.update_index() |
|
212 | ||
|
213 | if __name__ == "__main__": | |
|
214 | arg = sys.argv[1:] | |
|
215 | if len(arg) != 2: | |
|
216 | sys.stderr.write('Please specify indexing type [full|incremental]' | |
|
217 | 'and path to repositories as script args \n') | |
|
218 | sys.exit() | |
|
219 | ||
|
220 | ||
|
221 | if arg[0] == 'full': | |
|
222 | full_index = True | |
|
223 | elif arg[0] == 'incremental': | |
|
224 | # False means looking just for changes | |
|
225 | full_index = False | |
|
226 | else: | |
|
227 | sys.stdout.write('Please use [full|incremental]' | |
|
228 | ' as script first arg \n') | |
|
229 | sys.exit() | |
|
230 | ||
|
231 | if not os.path.isdir(arg[1]): | |
|
232 | sys.stderr.write('%s is not a valid path \n' % arg[1]) | |
|
233 | sys.exit() | |
|
234 | else: | |
|
235 | if arg[1].endswith('/'): | |
|
236 | repo_location = arg[1] + '*' | |
|
237 | else: | |
|
238 | repo_location = arg[1] + '/*' | |
|
239 | ||
|
240 | try: | |
|
241 | l = DaemonLock() | |
|
242 | WhooshIndexingDaemon(repo_location=repo_location)\ | |
|
243 | .run(full_index=full_index) | |
|
244 | l.release() | |
|
245 | reload(logging) | |
|
246 | except LockHeld: | |
|
247 | sys.exit(1) | |
|
248 |
@@ -16,24 +16,28 b'' | |||
|
16 | 16 | # along with this program; if not, write to the Free Software |
|
17 | 17 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, |
|
18 | 18 | # MA 02110-1301, USA. |
|
19 | from UserDict import DictMixin | |
|
20 | from mercurial import ui, config, hg | |
|
21 | from mercurial.error import RepoError | |
|
22 | from rhodecode.model import meta | |
|
23 | from rhodecode.model.caching_query import FromCache | |
|
24 | from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, \ | |
|
25 | UserLog | |
|
26 | from rhodecode.model.repo import RepoModel | |
|
27 | from rhodecode.model.user import UserModel | |
|
28 | from vcs.backends.base import BaseChangeset | |
|
29 | from vcs.backends.git import GitRepository | |
|
30 | from vcs.backends.hg import MercurialRepository | |
|
31 | from vcs.utils.lazy import LazyProperty | |
|
32 | import datetime | |
|
33 | import logging | |
|
34 | import os | |
|
19 | 35 | |
|
20 | 36 | """ |
|
21 | 37 | Created on April 18, 2010 |
|
22 | 38 | Utilities for RhodeCode |
|
23 | 39 | @author: marcink |
|
24 | 40 | """ |
|
25 | from rhodecode.model.caching_query import FromCache | |
|
26 | from mercurial import ui, config, hg | |
|
27 | from mercurial.error import RepoError | |
|
28 | from rhodecode.model import meta | |
|
29 | from rhodecode.model.user import UserModel | |
|
30 | from rhodecode.model.repo import RepoModel | |
|
31 | from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, UserLog | |
|
32 | from vcs.backends.base import BaseChangeset | |
|
33 | from vcs.utils.lazy import LazyProperty | |
|
34 | import logging | |
|
35 | import datetime | |
|
36 | import os | |
|
37 | 41 | |
|
38 | 42 | log = logging.getLogger(__name__) |
|
39 | 43 | |
@@ -96,14 +100,30 b' def action_logger(user, action, repo, ip' | |||
|
96 | 100 | sa.rollback() |
|
97 | 101 | log.error('could not log user action:%s', str(e)) |
|
98 | 102 | |
|
99 | def check_repo_dir(paths): | |
|
100 | repos_path = paths[0][1].split('/') | |
|
101 | if repos_path[-1] in ['*', '**']: | |
|
102 | repos_path = repos_path[:-1] | |
|
103 | if repos_path[0] != '/': | |
|
104 | repos_path[0] = '/' | |
|
105 | if not os.path.isdir(os.path.join(*repos_path)): | |
|
106 | raise Exception('Not a valid repository in %s' % paths[0][1]) | |
|
103 | def get_repos(path, recursive=False, initial=False): | |
|
104 | """ | |
|
105 | Scans given path for repos and return (name,(type,path)) tuple | |
|
106 | :param prefix: | |
|
107 | :param path: | |
|
108 | :param recursive: | |
|
109 | :param initial: | |
|
110 | """ | |
|
111 | from vcs.utils.helpers import get_scm | |
|
112 | from vcs.exceptions import VCSError | |
|
113 | scm = get_scm(path) | |
|
114 | if scm: | |
|
115 | raise Exception('The given path %s should not be a repository got %s', | |
|
116 | path, scm) | |
|
117 | ||
|
118 | for dirpath in os.listdir(path): | |
|
119 | try: | |
|
120 | yield dirpath, get_scm(os.path.join(path, dirpath)) | |
|
121 | except VCSError: | |
|
122 | pass | |
|
123 | ||
|
124 | if __name__ == '__main__': | |
|
125 | get_repos('', '/home/marcink/workspace-python') | |
|
126 | ||
|
107 | 127 | |
|
108 | 128 | def check_repo_fast(repo_name, base_path): |
|
109 | 129 | if os.path.isdir(os.path.join(base_path, repo_name)):return False |
@@ -231,8 +251,6 b" def make_ui(read_from='file', path=None," | |||
|
231 | 251 | for k, v in cfg.items(section): |
|
232 | 252 | baseui.setconfig(section, k, v) |
|
233 | 253 | log.debug('settings ui from file[%s]%s:%s', section, k, v) |
|
234 | if checkpaths:check_repo_dir(cfg.items('paths')) | |
|
235 | ||
|
236 | 254 | |
|
237 | 255 | elif read_from == 'db': |
|
238 | 256 | hg_ui = get_hg_ui_cached() |
@@ -284,7 +302,7 b' class EmptyChangeset(BaseChangeset):' | |||
|
284 | 302 | @LazyProperty |
|
285 | 303 | def raw_id(self): |
|
286 | 304 | """ |
|
287 | Returns raw string identifing this changeset, useful for web | |
|
305 | Returns raw string identifying this changeset, useful for web | |
|
288 | 306 | representation. |
|
289 | 307 | """ |
|
290 | 308 | return '0' * 40 |
@@ -308,16 +326,21 b' def repo2db_mapper(initial_repo_list, re' | |||
|
308 | 326 | """ |
|
309 | 327 | |
|
310 | 328 | sa = meta.Session() |
|
329 | rm = RepoModel(sa) | |
|
311 | 330 | user = sa.query(User).filter(User.admin == True).first() |
|
312 | 331 | |
|
313 | rm = RepoModel() | |
|
332 | for name, repo in initial_repo_list.items(): | |
|
333 | if not rm.get(name, cache=False): | |
|
334 | log.info('repository %s not found creating default', name) | |
|
314 | 335 | |
|
315 | for name, repo in initial_repo_list.items(): | |
|
316 | if not RepoModel(sa).get(name, cache=False): | |
|
317 | log.info('repository %s not found creating default', name) | |
|
336 | if isinstance(repo, MercurialRepository): | |
|
337 | repo_type = 'hg' | |
|
338 | if isinstance(repo, GitRepository): | |
|
339 | repo_type = 'git' | |
|
318 | 340 | |
|
319 | 341 | form_data = { |
|
320 | 342 | 'repo_name':name, |
|
343 | 'repo_type':repo_type, | |
|
321 | 344 | 'description':repo.description if repo.description != 'unknown' else \ |
|
322 | 345 | 'auto description for %s' % name, |
|
323 | 346 | 'private':False |
@@ -335,7 +358,6 b' def repo2db_mapper(initial_repo_list, re' | |||
|
335 | 358 | |
|
336 | 359 | meta.Session.remove() |
|
337 | 360 | |
|
338 | from UserDict import DictMixin | |
|
339 | 361 | |
|
340 | 362 | class OrderedDict(dict, DictMixin): |
|
341 | 363 |
@@ -22,9 +22,9 b' class RhodeCodeUi(Base):' | |||
|
22 | 22 | ui_key = Column("ui_key", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
23 | 23 | ui_value = Column("ui_value", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
24 | 24 | ui_active = Column("ui_active", BOOLEAN(), nullable=True, unique=None, default=True) |
|
25 | ||
|
26 | ||
|
27 |
class User(Base): |
|
|
25 | ||
|
26 | ||
|
27 | class User(Base): | |
|
28 | 28 | __tablename__ = 'users' |
|
29 | 29 | __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'useexisting':True}) |
|
30 | 30 | user_id = Column("user_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True) |
@@ -36,21 +36,21 b' class User(Base):' | |||
|
36 | 36 | lastname = Column("lastname", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
37 | 37 | email = Column("email", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
38 | 38 | last_login = Column("last_login", DATETIME(timezone=False), nullable=True, unique=None, default=None) |
|
39 | ||
|
39 | ||
|
40 | 40 | user_log = relation('UserLog') |
|
41 | 41 | user_perms = relation('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id") |
|
42 | ||
|
42 | ||
|
43 | 43 | @LazyProperty |
|
44 | 44 | def full_contact(self): |
|
45 | 45 | return '%s %s <%s>' % (self.name, self.lastname, self.email) |
|
46 | ||
|
46 | ||
|
47 | 47 | def __repr__(self): |
|
48 | 48 | return "<User('id:%s:%s')>" % (self.user_id, self.username) |
|
49 | ||
|
49 | ||
|
50 | 50 | def update_lastlogin(self): |
|
51 | 51 | """Update user lastlogin""" |
|
52 | 52 | import datetime |
|
53 | ||
|
53 | ||
|
54 | 54 | try: |
|
55 | 55 | session = Session.object_session(self) |
|
56 | 56 | self.last_login = datetime.datetime.now() |
@@ -58,48 +58,49 b' class User(Base):' | |||
|
58 | 58 | session.commit() |
|
59 | 59 | log.debug('updated user %s lastlogin', self.username) |
|
60 | 60 | except Exception: |
|
61 |
session.rollback() |
|
|
62 | ||
|
63 | ||
|
64 |
class UserLog(Base): |
|
|
61 | session.rollback() | |
|
62 | ||
|
63 | ||
|
64 | class UserLog(Base): | |
|
65 | 65 | __tablename__ = 'user_logs' |
|
66 | 66 | __table_args__ = {'useexisting':True} |
|
67 | 67 | user_log_id = Column("user_log_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True) |
|
68 | 68 | user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None) |
|
69 | 69 | repository_id = Column("repository_id", INTEGER(length=None, convert_unicode=False, assert_unicode=None), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None) |
|
70 | 70 | repository_name = Column("repository_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
71 |
user_ip = Column("user_ip", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
|
71 | user_ip = Column("user_ip", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) | |
|
72 | 72 | action = Column("action", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
73 | 73 | action_date = Column("action_date", DATETIME(timezone=False), nullable=True, unique=None, default=None) |
|
74 | 74 | revision = Column('revision', TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
75 | ||
|
75 | ||
|
76 | 76 | user = relation('User') |
|
77 | 77 | repository = relation('Repository') |
|
78 | ||
|
78 | ||
|
79 | 79 | class Repository(Base): |
|
80 | 80 | __tablename__ = 'repositories' |
|
81 | 81 | __table_args__ = (UniqueConstraint('repo_name'), {'useexisting':True},) |
|
82 | 82 | repo_id = Column("repo_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True) |
|
83 | 83 | repo_name = Column("repo_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
84 | repo_type = Column("repo_type", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None) | |
|
84 | 85 | user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=False, default=None) |
|
85 | 86 | private = Column("private", BOOLEAN(), nullable=True, unique=None, default=None) |
|
86 | 87 | description = Column("description", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
87 | 88 | fork_id = Column("fork_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=True, unique=False, default=None) |
|
88 | ||
|
89 | ||
|
89 | 90 | user = relation('User') |
|
90 | 91 | fork = relation('Repository', remote_side=repo_id) |
|
91 | 92 | repo_to_perm = relation('RepoToPerm', cascade='all') |
|
92 | ||
|
93 | ||
|
93 | 94 | def __repr__(self): |
|
94 | 95 | return "<Repository('id:%s:%s')>" % (self.repo_id, self.repo_name) |
|
95 | ||
|
96 | ||
|
96 | 97 | class Permission(Base): |
|
97 | 98 | __tablename__ = 'permissions' |
|
98 | 99 | __table_args__ = {'useexisting':True} |
|
99 | 100 | permission_id = Column("permission_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True) |
|
100 | 101 | permission_name = Column("permission_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
101 | 102 | permission_longname = Column("permission_longname", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
102 | ||
|
103 | ||
|
103 | 104 | def __repr__(self): |
|
104 | 105 | return "<Permission('%s:%s')>" % (self.permission_id, self.permission_name) |
|
105 | 106 | |
@@ -109,8 +110,8 b' class RepoToPerm(Base):' | |||
|
109 | 110 | repo_to_perm_id = Column("repo_to_perm_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True) |
|
110 | 111 | user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None) |
|
111 | 112 | permission_id = Column("permission_id", INTEGER(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None) |
|
112 |
repository_id = Column("repository_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None) |
|
|
113 | ||
|
113 | repository_id = Column("repository_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None) | |
|
114 | ||
|
114 | 115 | user = relation('User') |
|
115 | 116 | permission = relation('Permission') |
|
116 | 117 | repository = relation('Repository') |
@@ -121,7 +122,7 b' class UserToPerm(Base):' | |||
|
121 | 122 | user_to_perm_id = Column("user_to_perm_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True) |
|
122 | 123 | user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None) |
|
123 | 124 | permission_id = Column("permission_id", INTEGER(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None) |
|
124 | ||
|
125 | ||
|
125 | 126 | user = relation('User') |
|
126 | 127 | permission = relation('Permission') |
|
127 | 128 | |
@@ -134,6 +135,6 b' class Statistics(Base):' | |||
|
134 | 135 | commit_activity = Column("commit_activity", BLOB(), nullable=False)#JSON data |
|
135 | 136 | commit_activity_combined = Column("commit_activity_combined", BLOB(), nullable=False)#JSON data |
|
136 | 137 | languages = Column("languages", BLOB(), nullable=False)#JSON data |
|
137 | ||
|
138 | ||
|
138 | 139 | repository = relation('Repository') |
|
139 | 140 |
@@ -194,16 +194,12 b' class ValidSettings(formencode.validator' | |||
|
194 | 194 | |
|
195 | 195 | class ValidPath(formencode.validators.FancyValidator): |
|
196 | 196 | def to_python(self, value, state): |
|
197 | isdir = os.path.isdir(value.replace('*', '')) | |
|
198 | if (value.endswith('/*') or value.endswith('/**')) and isdir: | |
|
199 | return value | |
|
200 | elif not isdir: | |
|
197 | ||
|
198 | if not os.path.isdir(value): | |
|
201 | 199 | msg = _('This is not a valid path') |
|
202 | else: | |
|
203 | msg = _('You need to specify * or ** at the end of path (ie. /tmp/*)') | |
|
204 | ||
|
205 | raise formencode.Invalid(msg, value, state, | |
|
200 | raise formencode.Invalid(msg, value, state, | |
|
206 | 201 | error_dict={'paths_root_path':msg}) |
|
202 | return value | |
|
207 | 203 | |
|
208 | 204 | def UniqSystemEmail(old_data): |
|
209 | 205 | class _UniqSystemEmail(formencode.validators.FancyValidator): |
@@ -24,7 +24,6 b' Model for RhodeCode' | |||
|
24 | 24 | """ |
|
25 | 25 | from beaker.cache import cache_region |
|
26 | 26 | from mercurial import ui |
|
27 | from mercurial.hgweb.hgwebdir_mod import findrepos | |
|
28 | 27 | from rhodecode.lib import helpers as h |
|
29 | 28 | from rhodecode.lib.utils import invalidate_cache |
|
30 | 29 | from rhodecode.lib.auth import HasRepoPermissionAny |
@@ -33,12 +32,12 b' from rhodecode.model.db import Repositor' | |||
|
33 | 32 | from sqlalchemy.orm import joinedload |
|
34 | 33 | from vcs.exceptions import RepositoryError, VCSError |
|
35 | 34 | import logging |
|
36 | import os | |
|
37 | 35 | import sys |
|
38 | 36 | log = logging.getLogger(__name__) |
|
39 | 37 | |
|
40 | 38 | try: |
|
41 | 39 | from vcs.backends.hg import MercurialRepository |
|
40 | from vcs.backends.git import GitRepository | |
|
42 | 41 | except ImportError: |
|
43 | 42 | sys.stderr.write('You have to import vcs module') |
|
44 | 43 | raise Exception('Unable to import vcs') |
@@ -47,7 +46,7 b' def _get_repos_cached_initial(app_global' | |||
|
47 | 46 | """return cached dict with repos |
|
48 | 47 | """ |
|
49 | 48 | g = app_globals |
|
50 |
return HgModel.repo_scan( |
|
|
49 | return HgModel().repo_scan(g.paths[0][1], g.baseui, initial) | |
|
51 | 50 | |
|
52 | 51 | @cache_region('long_term', 'cached_repo_list') |
|
53 | 52 | def _get_repos_cached(): |
@@ -55,7 +54,7 b' def _get_repos_cached():' | |||
|
55 | 54 | """ |
|
56 | 55 | log.info('getting all repositories list') |
|
57 | 56 | from pylons import app_globals as g |
|
58 |
return HgModel.repo_scan( |
|
|
57 | return HgModel().repo_scan(g.paths[0][1], g.baseui) | |
|
59 | 58 | |
|
60 | 59 | @cache_region('super_short_term', 'cached_repos_switcher_list') |
|
61 | 60 | def _get_repos_switcher_cached(cached_repo_list): |
@@ -73,42 +72,34 b' def _full_changelog_cached(repo_name):' | |||
|
73 | 72 | return list(reversed(list(HgModel().get_repo(repo_name)))) |
|
74 | 73 | |
|
75 | 74 | class HgModel(object): |
|
76 | """Mercurial Model | |
|
75 | """ | |
|
76 | Mercurial Model | |
|
77 | 77 | """ |
|
78 | 78 | |
|
79 | def __init__(self): | |
|
80 |
|
|
|
79 | def __init__(self, sa=None): | |
|
80 | if not sa: | |
|
81 | self.sa = meta.Session() | |
|
82 | else: | |
|
83 | self.sa = sa | |
|
81 | 84 | |
|
82 | @staticmethod | |
|
83 | def repo_scan(repos_prefix, repos_path, baseui, initial=False): | |
|
85 | def repo_scan(self, repos_path, baseui, initial=False): | |
|
84 | 86 | """ |
|
85 | 87 | Listing of repositories in given path. This path should not be a |
|
86 | 88 | repository itself. Return a dictionary of repository objects |
|
87 | :param repos_path: path to directory it could take syntax with | |
|
88 | * or ** for deep recursive displaying repositories | |
|
89 | ||
|
90 | :param repos_path: path to directory containing repositories | |
|
91 | :param baseui | |
|
92 | :param initial: initial scann | |
|
89 | 93 | """ |
|
90 | sa = meta.Session() | |
|
91 | def check_repo_dir(path): | |
|
92 | """Checks the repository | |
|
93 | :param path: | |
|
94 | """ | |
|
95 | repos_path = path.split('/') | |
|
96 | if repos_path[-1] in ['*', '**']: | |
|
97 | repos_path = repos_path[:-1] | |
|
98 | if repos_path[0] != '/': | |
|
99 | repos_path[0] = '/' | |
|
100 | if not os.path.isdir(os.path.join(*repos_path)): | |
|
101 | raise RepositoryError('Not a valid repository in %s' % path) | |
|
102 | if not repos_path.endswith('*'): | |
|
103 | raise VCSError('You need to specify * or ** at the end of path ' | |
|
104 | 'for recursive scanning') | |
|
94 | log.info('scanning for repositories in %s', repos_path) | |
|
105 | 95 | |
|
106 | check_repo_dir(repos_path) | |
|
107 | log.info('scanning for repositories in %s', repos_path) | |
|
108 | repos = findrepos([(repos_prefix, repos_path)]) | |
|
109 | 96 | if not isinstance(baseui, ui.ui): |
|
110 | 97 | baseui = ui.ui() |
|
111 | 98 | |
|
99 | from rhodecode.lib.utils import get_repos | |
|
100 | repos = get_repos(repos_path) | |
|
101 | ||
|
102 | ||
|
112 | 103 | repos_list = {} |
|
113 | 104 | for name, path in repos: |
|
114 | 105 | try: |
@@ -117,15 +108,19 b' class HgModel(object):' | |||
|
117 | 108 | raise RepositoryError('Duplicate repository name %s found in' |
|
118 | 109 | ' %s' % (name, path)) |
|
119 | 110 | else: |
|
111 | if path[0] == 'hg': | |
|
112 | repos_list[name] = MercurialRepository(path[1], baseui=baseui) | |
|
113 | repos_list[name].name = name | |
|
120 | 114 | |
|
121 | repos_list[name] = MercurialRepository(path, baseui=baseui) | |
|
122 |
repos_list[name] |
|
|
115 | if path[0] == 'git': | |
|
116 | repos_list[name] = GitRepository(path[1]) | |
|
117 | repos_list[name].name = name | |
|
123 | 118 | |
|
124 | 119 | dbrepo = None |
|
125 | 120 | if not initial: |
|
126 | 121 | #for initial scann on application first run we don't |
|
127 | 122 | #have db repos yet. |
|
128 | dbrepo = sa.query(Repository)\ | |
|
123 | dbrepo = self.sa.query(Repository)\ | |
|
129 | 124 | .options(joinedload(Repository.fork))\ |
|
130 | 125 | .filter(Repository.repo_name == name)\ |
|
131 | 126 | .scalar() |
@@ -137,16 +132,17 b' class HgModel(object):' | |||
|
137 | 132 | if dbrepo.user: |
|
138 | 133 | repos_list[name].contact = dbrepo.user.full_contact |
|
139 | 134 | else: |
|
140 | repos_list[name].contact = sa.query(User)\ | |
|
135 | repos_list[name].contact = self.sa.query(User)\ | |
|
141 | 136 | .filter(User.admin == True).first().full_contact |
|
142 | 137 | except OSError: |
|
143 | 138 | continue |
|
144 | meta.Session.remove() | |
|
139 | ||
|
145 | 140 | return repos_list |
|
146 | 141 | |
|
147 | 142 | def get_repos(self): |
|
148 | 143 | for name, repo in _get_repos_cached().items(): |
|
149 | if repo._get_hidden(): | |
|
144 | ||
|
145 | if isinstance(repo, MercurialRepository) and repo._get_hidden(): | |
|
150 | 146 | #skip hidden web repository |
|
151 | 147 | continue |
|
152 | 148 |
@@ -13,7 +13,7 b'' | |||
|
13 | 13 | </tr> |
|
14 | 14 | %for cnt,cs in enumerate(c.repo_changesets): |
|
15 | 15 | <tr class="parity${cnt%2}"> |
|
16 |
<td>${h.age(cs. |
|
|
16 | <td>${h.age(cs.date)} - ${h.rfc822date_notz(cs.date)} </td> | |
|
17 | 17 | <td title="${cs.author}">${h.person(cs.author)}</td> |
|
18 | 18 | <td>r${cs.revision}:${cs.short_id}</td> |
|
19 | 19 | <td> |
General Comments 0
You need to be logged in to leave comments.
Login now