Show More
@@ -3,6 +3,12 b'' | |||||
3 | Changelog |
|
3 | Changelog | |
4 | ========= |
|
4 | ========= | |
5 |
|
5 | |||
|
6 | 1.1.0 (**XXXX-XX-XX**) | |||
|
7 | ---------------------- | |||
|
8 | - git support | |||
|
9 | - performance upgrade for cached repos list | |||
|
10 | ||||
|
11 | ||||
6 | 1.0.0 (**2010-10-xx**) |
|
12 | 1.0.0 (**2010-10-xx**) | |
7 | ---------------------- |
|
13 | ---------------------- | |
8 |
|
14 |
@@ -24,7 +24,7 b' versioning implementation: http://semver' | |||||
24 | @author: marcink |
|
24 | @author: marcink | |
25 | """ |
|
25 | """ | |
26 |
|
26 | |||
27 |
VERSION = (1, |
|
27 | VERSION = (1, 1, 0, 'beta') | |
28 |
|
28 | |||
29 | __version__ = '.'.join((str(each) for each in VERSION[:4])) |
|
29 | __version__ = '.'.join((str(each) for each in VERSION[:4])) | |
30 |
|
30 |
@@ -53,7 +53,7 b' def load_environment(global_conf, app_co' | |||||
53 | if test: |
|
53 | if test: | |
54 | from rhodecode.lib.utils import create_test_env, create_test_index |
|
54 | from rhodecode.lib.utils import create_test_env, create_test_index | |
55 | create_test_env('/tmp', config) |
|
55 | create_test_env('/tmp', config) | |
56 |
create_test_index('/tmp |
|
56 | create_test_index('/tmp', True) | |
57 |
|
57 | |||
58 | #MULTIPLE DB configs |
|
58 | #MULTIPLE DB configs | |
59 | # Setup the SQLAlchemy database engine |
|
59 | # Setup the SQLAlchemy database engine |
@@ -28,4 +28,4 b' class Globals(object):' | |||||
28 | @LazyProperty |
|
28 | @LazyProperty | |
29 | def base_path(self): |
|
29 | def base_path(self): | |
30 | if self.baseui: |
|
30 | if self.baseui: | |
31 |
return self.paths[0][1] |
|
31 | return self.paths[0][1] |
@@ -8,7 +8,11 b' from rhodecode.lib.smtp_mailer import Sm' | |||||
8 | from rhodecode.lib.utils import OrderedDict |
|
8 | from rhodecode.lib.utils import OrderedDict | |
9 | from time import mktime |
|
9 | from time import mktime | |
10 | from vcs.backends.hg import MercurialRepository |
|
10 | from vcs.backends.hg import MercurialRepository | |
|
11 | from vcs.backends.git import GitRepository | |||
|
12 | import os | |||
11 | import traceback |
|
13 | import traceback | |
|
14 | from vcs.backends import get_repo | |||
|
15 | from vcs.utils.helpers import get_scm | |||
12 |
|
16 | |||
13 | try: |
|
17 | try: | |
14 | import json |
|
18 | import json | |
@@ -95,8 +99,9 b' def get_commits_stats(repo_name, ts_min_' | |||||
95 |
|
99 | |||
96 | commits_by_day_author_aggregate = {} |
|
100 | commits_by_day_author_aggregate = {} | |
97 | commits_by_day_aggregate = {} |
|
101 | commits_by_day_aggregate = {} | |
98 |
repos_path = get_hg_ui_settings()['paths_root_path'] |
|
102 | repos_path = get_hg_ui_settings()['paths_root_path'] | |
99 |
|
|
103 | p = os.path.join(repos_path, repo_name) | |
|
104 | repo = get_repo(get_scm(p)[0], p) | |||
100 |
|
105 | |||
101 | skip_date_limit = True |
|
106 | skip_date_limit = True | |
102 | parse_limit = 250 #limit for single task changeset parsing optimal for |
|
107 | parse_limit = 250 #limit for single task changeset parsing optimal for | |
@@ -305,8 +310,10 b' def __get_codes_stats(repo_name):' | |||||
305 | 's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws'] |
|
310 | 's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws'] | |
306 |
|
311 | |||
307 |
|
312 | |||
308 |
repos_path = get_hg_ui_settings()['paths_root_path'] |
|
313 | repos_path = get_hg_ui_settings()['paths_root_path'] | |
309 |
|
|
314 | p = os.path.join(repos_path, repo_name) | |
|
315 | repo = get_repo(get_scm(p)[0], p) | |||
|
316 | ||||
310 | tip = repo.get_changeset() |
|
317 | tip = repo.get_changeset() | |
311 |
|
318 | |||
312 | code_stats = {} |
|
319 | code_stats = {} |
@@ -162,7 +162,7 b' class DbManage(object):' | |||||
162 | paths = RhodeCodeUi() |
|
162 | paths = RhodeCodeUi() | |
163 | paths.ui_section = 'paths' |
|
163 | paths.ui_section = 'paths' | |
164 | paths.ui_key = '/' |
|
164 | paths.ui_key = '/' | |
165 |
paths.ui_value = |
|
165 | paths.ui_value = path | |
166 |
|
166 | |||
167 |
|
167 | |||
168 | hgsettings1 = RhodeCodeSettings() |
|
168 | hgsettings1 = RhodeCodeSettings() |
@@ -323,7 +323,7 b' flash = _Flash()' | |||||
323 | from mercurial import util |
|
323 | from mercurial import util | |
324 | from mercurial.templatefilters import age as _age, person as _person |
|
324 | from mercurial.templatefilters import age as _age, person as _person | |
325 |
|
325 | |||
326 |
age = lambda x: |
|
326 | age = lambda x:x | |
327 | capitalize = lambda x: x.capitalize() |
|
327 | capitalize = lambda x: x.capitalize() | |
328 | date = lambda x: util.datestr(x) |
|
328 | date = lambda x: util.datestr(x) | |
329 | email = util.email |
|
329 | email = util.email | |
@@ -333,8 +333,8 b' hgdate = lambda x: "%d %d" % x' | |||||
333 | isodate = lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2') |
|
333 | isodate = lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2') | |
334 | isodatesec = lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2') |
|
334 | isodatesec = lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2') | |
335 | localdate = lambda x: (x[0], util.makedate()[1]) |
|
335 | localdate = lambda x: (x[0], util.makedate()[1]) | |
336 |
rfc822date = lambda x: |
|
336 | rfc822date = lambda x: x#util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2") | |
337 |
rfc822date_notz = lambda x: |
|
337 | rfc822date_notz = lambda x: x#util.datestr(x, "%a, %d %b %Y %H:%M:%S") | |
338 | rfc3339date = lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2") |
|
338 | rfc3339date = lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2") | |
339 | time_ago = lambda x: util.datestr(_age(x), "%a, %d %b %Y %H:%M:%S %1%2") |
|
339 | time_ago = lambda x: util.datestr(_age(x), "%a, %d %b %Y %H:%M:%S %1%2") | |
340 |
|
340 |
@@ -1,4 +1,10 b'' | |||||
|
1 | import os | |||
|
2 | import sys | |||
1 | from os.path import dirname as dn, join as jn |
|
3 | from os.path import dirname as dn, join as jn | |
|
4 | ||||
|
5 | #to get the rhodecode import | |||
|
6 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) | |||
|
7 | ||||
2 | from rhodecode.config.environment import load_environment |
|
8 | from rhodecode.config.environment import load_environment | |
3 | from rhodecode.model.hg import HgModel |
|
9 | from rhodecode.model.hg import HgModel | |
4 | from shutil import rmtree |
|
10 | from shutil import rmtree | |
@@ -11,13 +17,8 b' from whoosh.index import create_in, open' | |||||
11 | from whoosh.formats import Characters |
|
17 | from whoosh.formats import Characters | |
12 |
from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter |
|
18 | from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter | |
13 |
|
19 | |||
14 | import os |
|
|||
15 | import sys |
|
|||
16 | import traceback |
|
20 | import traceback | |
17 |
|
21 | |||
18 | #to get the rhodecode import |
|
|||
19 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) |
|
|||
20 |
|
||||
21 |
|
22 | |||
22 | #LOCATION WE KEEP THE INDEX |
|
23 | #LOCATION WE KEEP THE INDEX | |
23 | IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index') |
|
24 | IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index') | |
@@ -48,6 +49,59 b" IDX_NAME = 'HG_INDEX'" | |||||
48 |
FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') |
|
49 | FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') | |
49 | FRAGMENTER = SimpleFragmenter(200) |
|
50 | FRAGMENTER = SimpleFragmenter(200) | |
50 |
|
51 | |||
|
52 | from paste.script import command | |||
|
53 | import ConfigParser | |||
|
54 | ||||
|
55 | class MakeIndex(command.Command): | |||
|
56 | ||||
|
57 | max_args = 1 | |||
|
58 | min_args = 1 | |||
|
59 | ||||
|
60 | usage = "CONFIG_FILE" | |||
|
61 | summary = "Creates index for full text search given configuration file" | |||
|
62 | group_name = "Whoosh indexing" | |||
|
63 | ||||
|
64 | parser = command.Command.standard_parser(verbose=True) | |||
|
65 | # parser.add_option('--repo-location', | |||
|
66 | # action='store', | |||
|
67 | # dest='repo_location', | |||
|
68 | # help="Specifies repositories location to index", | |||
|
69 | # ) | |||
|
70 | parser.add_option('-f', | |||
|
71 | action='store_true', | |||
|
72 | dest='full_index', | |||
|
73 | help="Specifies that index should be made full i.e" | |||
|
74 | " destroy old and build from scratch", | |||
|
75 | default=False) | |||
|
76 | def command(self): | |||
|
77 | config_name = self.args[0] | |||
|
78 | ||||
|
79 | p = config_name.split('/') | |||
|
80 | if len(p) == 1: | |||
|
81 | root = '.' | |||
|
82 | else: | |||
|
83 | root = '/'.join(p[:-1]) | |||
|
84 | print root | |||
|
85 | config = ConfigParser.ConfigParser({'here':root}) | |||
|
86 | config.read(config_name) | |||
|
87 | print dict(config.items('app:main'))['index_dir'] | |||
|
88 | index_location = dict(config.items('app:main'))['index_dir'] | |||
|
89 | #return | |||
|
90 | ||||
|
91 | #======================================================================= | |||
|
92 | # WHOOSH DAEMON | |||
|
93 | #======================================================================= | |||
|
94 | from rhodecode.lib.pidlock import LockHeld, DaemonLock | |||
|
95 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon | |||
|
96 | try: | |||
|
97 | l = DaemonLock() | |||
|
98 | WhooshIndexingDaemon(index_location=index_location)\ | |||
|
99 | .run(full_index=self.options.full_index) | |||
|
100 | l.release() | |||
|
101 | except LockHeld: | |||
|
102 | sys.exit(1) | |||
|
103 | ||||
|
104 | ||||
51 | class ResultWrapper(object): |
|
105 | class ResultWrapper(object): | |
52 | def __init__(self, search_type, searcher, matcher, highlight_items): |
|
106 | def __init__(self, search_type, searcher, matcher, highlight_items): | |
53 | self.search_type = search_type |
|
107 | self.search_type = search_type | |
@@ -115,8 +169,8 b' class ResultWrapper(object):' | |||||
115 | Smart function that implements chunking the content |
|
169 | Smart function that implements chunking the content | |
116 | but not overlap chunks so it doesn't highlight the same |
|
170 | but not overlap chunks so it doesn't highlight the same | |
117 | close occurrences twice. |
|
171 | close occurrences twice. | |
118 |
|
|
172 | @param matcher: | |
119 |
|
|
173 | @param size: | |
120 | """ |
|
174 | """ | |
121 | memory = [(0, 0)] |
|
175 | memory = [(0, 0)] | |
122 | for span in self.matcher.spans(): |
|
176 | for span in self.matcher.spans(): |
@@ -32,12 +32,12 b' from os.path import join as jn' | |||||
32 | project_path = dn(dn(dn(dn(os.path.realpath(__file__))))) |
|
32 | project_path = dn(dn(dn(dn(os.path.realpath(__file__))))) | |
33 | sys.path.append(project_path) |
|
33 | sys.path.append(project_path) | |
34 |
|
34 | |||
35 | from rhodecode.lib.pidlock import LockHeld, DaemonLock |
|
35 | ||
36 | from rhodecode.model.hg import HgModel |
|
36 | from rhodecode.model.hg import HgModel | |
37 | from rhodecode.lib.helpers import safe_unicode |
|
37 | from rhodecode.lib.helpers import safe_unicode | |
38 | from whoosh.index import create_in, open_dir |
|
38 | from whoosh.index import create_in, open_dir | |
39 | from shutil import rmtree |
|
39 | from shutil import rmtree | |
40 |
from rhodecode.lib.indexers import INDEX_EXTENSIONS, |
|
40 | from rhodecode.lib.indexers import INDEX_EXTENSIONS, SCHEMA, IDX_NAME | |
41 |
|
41 | |||
42 | from time import mktime |
|
42 | from time import mktime | |
43 | from vcs.exceptions import ChangesetError, RepositoryError |
|
43 | from vcs.exceptions import ChangesetError, RepositoryError | |
@@ -61,21 +61,33 b' ch.setFormatter(formatter)' | |||||
61 | # add ch to logger |
|
61 | # add ch to logger | |
62 | log.addHandler(ch) |
|
62 | log.addHandler(ch) | |
63 |
|
63 | |||
64 | def scan_paths(root_location): |
|
64 | def get_repos_location(): | |
65 |
return HgModel. |
|
65 | return HgModel.get_repos_location() | |
|
66 | ||||
66 |
|
67 | |||
67 | class WhooshIndexingDaemon(object): |
|
68 | class WhooshIndexingDaemon(object): | |
68 | """ |
|
69 | """ | |
69 | Deamon for atomic jobs |
|
70 | Deamon for atomic jobs | |
70 | """ |
|
71 | """ | |
71 |
|
72 | |||
72 |
def __init__(self, indexname='HG_INDEX', |
|
73 | def __init__(self, indexname='HG_INDEX', index_location=None, | |
|
74 | repo_location=None): | |||
73 | self.indexname = indexname |
|
75 | self.indexname = indexname | |
|
76 | ||||
|
77 | self.index_location = index_location | |||
|
78 | if not index_location: | |||
|
79 | raise Exception('You have to provide index location') | |||
|
80 | ||||
74 | self.repo_location = repo_location |
|
81 | self.repo_location = repo_location | |
75 | self.repo_paths = scan_paths(self.repo_location) |
|
82 | if not repo_location: | |
|
83 | raise Exception('You have to provide repositories location') | |||
|
84 | ||||
|
85 | ||||
|
86 | ||||
|
87 | self.repo_paths = HgModel.repo_scan('/', self.repo_location, None, True) | |||
76 | self.initial = False |
|
88 | self.initial = False | |
77 |
if not os.path.isdir( |
|
89 | if not os.path.isdir(self.index_location): | |
78 |
os.mkdir( |
|
90 | os.mkdir(self.index_location) | |
79 | log.info('Cannot run incremental index since it does not' |
|
91 | log.info('Cannot run incremental index since it does not' | |
80 | ' yet exist running full build') |
|
92 | ' yet exist running full build') | |
81 | self.initial = True |
|
93 | self.initial = True | |
@@ -87,9 +99,7 b' class WhooshIndexingDaemon(object):' | |||||
87 | """ |
|
99 | """ | |
88 | index_paths_ = set() |
|
100 | index_paths_ = set() | |
89 | try: |
|
101 | try: | |
90 | tip = repo.get_changeset() |
|
102 | for topnode, dirs, files in repo.walk('/', 'tip'): | |
91 |
|
||||
92 | for topnode, dirs, files in tip.walk('/'): |
|
|||
93 | for f in files: |
|
103 | for f in files: | |
94 | index_paths_.add(jn(repo.path, f.path)) |
|
104 | index_paths_.add(jn(repo.path, f.path)) | |
95 | for dir in dirs: |
|
105 | for dir in dirs: | |
@@ -130,14 +140,14 b' class WhooshIndexingDaemon(object):' | |||||
130 |
|
140 | |||
131 |
|
141 | |||
132 | def build_index(self): |
|
142 | def build_index(self): | |
133 |
if os.path.exists( |
|
143 | if os.path.exists(self.index_location): | |
134 | log.debug('removing previous index') |
|
144 | log.debug('removing previous index') | |
135 | rmtree(IDX_LOCATION) |
|
145 | rmtree(self.index_location) | |
136 |
|
146 | |||
137 |
if not os.path.exists( |
|
147 | if not os.path.exists(self.index_location): | |
138 |
os.mkdir( |
|
148 | os.mkdir(self.index_location) | |
139 |
|
149 | |||
140 |
idx = create_in( |
|
150 | idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME) | |
141 | writer = idx.writer() |
|
151 | writer = idx.writer() | |
142 |
|
152 | |||
143 | for cnt, repo in enumerate(self.repo_paths.values()): |
|
153 | for cnt, repo in enumerate(self.repo_paths.values()): | |
@@ -154,7 +164,7 b' class WhooshIndexingDaemon(object):' | |||||
154 | def update_index(self): |
|
164 | def update_index(self): | |
155 | log.debug('STARTING INCREMENTAL INDEXING UPDATE') |
|
165 | log.debug('STARTING INCREMENTAL INDEXING UPDATE') | |
156 |
|
166 | |||
157 |
idx = open_dir( |
|
167 | idx = open_dir(self.index_location, indexname=self.indexname) | |
158 | # The set of all paths in the index |
|
168 | # The set of all paths in the index | |
159 | indexed_paths = set() |
|
169 | indexed_paths = set() | |
160 | # The set of all paths we need to re-index |
|
170 | # The set of all paths we need to re-index | |
@@ -209,40 +219,3 b' class WhooshIndexingDaemon(object):' | |||||
209 | self.build_index() |
|
219 | self.build_index() | |
210 | else: |
|
220 | else: | |
211 | self.update_index() |
|
221 | self.update_index() | |
212 |
|
||||
213 | if __name__ == "__main__": |
|
|||
214 | arg = sys.argv[1:] |
|
|||
215 | if len(arg) != 2: |
|
|||
216 | sys.stderr.write('Please specify indexing type [full|incremental]' |
|
|||
217 | 'and path to repositories as script args \n') |
|
|||
218 | sys.exit() |
|
|||
219 |
|
||||
220 |
|
||||
221 | if arg[0] == 'full': |
|
|||
222 | full_index = True |
|
|||
223 | elif arg[0] == 'incremental': |
|
|||
224 | # False means looking just for changes |
|
|||
225 | full_index = False |
|
|||
226 | else: |
|
|||
227 | sys.stdout.write('Please use [full|incremental]' |
|
|||
228 | ' as script first arg \n') |
|
|||
229 | sys.exit() |
|
|||
230 |
|
||||
231 | if not os.path.isdir(arg[1]): |
|
|||
232 | sys.stderr.write('%s is not a valid path \n' % arg[1]) |
|
|||
233 | sys.exit() |
|
|||
234 | else: |
|
|||
235 | if arg[1].endswith('/'): |
|
|||
236 | repo_location = arg[1] + '*' |
|
|||
237 | else: |
|
|||
238 | repo_location = arg[1] + '/*' |
|
|||
239 |
|
||||
240 | try: |
|
|||
241 | l = DaemonLock() |
|
|||
242 | WhooshIndexingDaemon(repo_location=repo_location)\ |
|
|||
243 | .run(full_index=full_index) |
|
|||
244 | l.release() |
|
|||
245 | reload(logging) |
|
|||
246 | except LockHeld: |
|
|||
247 | sys.exit(1) |
|
|||
248 |
|
@@ -16,24 +16,28 b'' | |||||
16 | # along with this program; if not, write to the Free Software |
|
16 | # along with this program; if not, write to the Free Software | |
17 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, |
|
17 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, | |
18 | # MA 02110-1301, USA. |
|
18 | # MA 02110-1301, USA. | |
|
19 | from UserDict import DictMixin | |||
|
20 | from mercurial import ui, config, hg | |||
|
21 | from mercurial.error import RepoError | |||
|
22 | from rhodecode.model import meta | |||
|
23 | from rhodecode.model.caching_query import FromCache | |||
|
24 | from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, \ | |||
|
25 | UserLog | |||
|
26 | from rhodecode.model.repo import RepoModel | |||
|
27 | from rhodecode.model.user import UserModel | |||
|
28 | from vcs.backends.base import BaseChangeset | |||
|
29 | from vcs.backends.git import GitRepository | |||
|
30 | from vcs.backends.hg import MercurialRepository | |||
|
31 | from vcs.utils.lazy import LazyProperty | |||
|
32 | import datetime | |||
|
33 | import logging | |||
|
34 | import os | |||
19 |
|
35 | |||
20 | """ |
|
36 | """ | |
21 | Created on April 18, 2010 |
|
37 | Created on April 18, 2010 | |
22 | Utilities for RhodeCode |
|
38 | Utilities for RhodeCode | |
23 | @author: marcink |
|
39 | @author: marcink | |
24 | """ |
|
40 | """ | |
25 | from rhodecode.model.caching_query import FromCache |
|
|||
26 | from mercurial import ui, config, hg |
|
|||
27 | from mercurial.error import RepoError |
|
|||
28 | from rhodecode.model import meta |
|
|||
29 | from rhodecode.model.user import UserModel |
|
|||
30 | from rhodecode.model.repo import RepoModel |
|
|||
31 | from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, UserLog |
|
|||
32 | from vcs.backends.base import BaseChangeset |
|
|||
33 | from vcs.utils.lazy import LazyProperty |
|
|||
34 | import logging |
|
|||
35 | import datetime |
|
|||
36 | import os |
|
|||
37 |
|
41 | |||
38 | log = logging.getLogger(__name__) |
|
42 | log = logging.getLogger(__name__) | |
39 |
|
43 | |||
@@ -96,14 +100,30 b' def action_logger(user, action, repo, ip' | |||||
96 | sa.rollback() |
|
100 | sa.rollback() | |
97 | log.error('could not log user action:%s', str(e)) |
|
101 | log.error('could not log user action:%s', str(e)) | |
98 |
|
102 | |||
99 | def check_repo_dir(paths): |
|
103 | def get_repos(path, recursive=False, initial=False): | |
100 | repos_path = paths[0][1].split('/') |
|
104 | """ | |
101 | if repos_path[-1] in ['*', '**']: |
|
105 | Scans given path for repos and return (name,(type,path)) tuple | |
102 | repos_path = repos_path[:-1] |
|
106 | :param prefix: | |
103 | if repos_path[0] != '/': |
|
107 | :param path: | |
104 | repos_path[0] = '/' |
|
108 | :param recursive: | |
105 | if not os.path.isdir(os.path.join(*repos_path)): |
|
109 | :param initial: | |
106 | raise Exception('Not a valid repository in %s' % paths[0][1]) |
|
110 | """ | |
|
111 | from vcs.utils.helpers import get_scm | |||
|
112 | from vcs.exceptions import VCSError | |||
|
113 | scm = get_scm(path) | |||
|
114 | if scm: | |||
|
115 | raise Exception('The given path %s should not be a repository got %s', | |||
|
116 | path, scm) | |||
|
117 | ||||
|
118 | for dirpath in os.listdir(path): | |||
|
119 | try: | |||
|
120 | yield dirpath, get_scm(os.path.join(path, dirpath)) | |||
|
121 | except VCSError: | |||
|
122 | pass | |||
|
123 | ||||
|
124 | if __name__ == '__main__': | |||
|
125 | get_repos('', '/home/marcink/workspace-python') | |||
|
126 | ||||
107 |
|
127 | |||
108 | def check_repo_fast(repo_name, base_path): |
|
128 | def check_repo_fast(repo_name, base_path): | |
109 | if os.path.isdir(os.path.join(base_path, repo_name)):return False |
|
129 | if os.path.isdir(os.path.join(base_path, repo_name)):return False | |
@@ -231,8 +251,6 b" def make_ui(read_from='file', path=None," | |||||
231 | for k, v in cfg.items(section): |
|
251 | for k, v in cfg.items(section): | |
232 | baseui.setconfig(section, k, v) |
|
252 | baseui.setconfig(section, k, v) | |
233 | log.debug('settings ui from file[%s]%s:%s', section, k, v) |
|
253 | log.debug('settings ui from file[%s]%s:%s', section, k, v) | |
234 | if checkpaths:check_repo_dir(cfg.items('paths')) |
|
|||
235 |
|
||||
236 |
|
254 | |||
237 | elif read_from == 'db': |
|
255 | elif read_from == 'db': | |
238 | hg_ui = get_hg_ui_cached() |
|
256 | hg_ui = get_hg_ui_cached() | |
@@ -284,7 +302,7 b' class EmptyChangeset(BaseChangeset):' | |||||
284 | @LazyProperty |
|
302 | @LazyProperty | |
285 | def raw_id(self): |
|
303 | def raw_id(self): | |
286 | """ |
|
304 | """ | |
287 | Returns raw string identifing this changeset, useful for web |
|
305 | Returns raw string identifying this changeset, useful for web | |
288 | representation. |
|
306 | representation. | |
289 | """ |
|
307 | """ | |
290 | return '0' * 40 |
|
308 | return '0' * 40 | |
@@ -308,16 +326,21 b' def repo2db_mapper(initial_repo_list, re' | |||||
308 | """ |
|
326 | """ | |
309 |
|
327 | |||
310 | sa = meta.Session() |
|
328 | sa = meta.Session() | |
|
329 | rm = RepoModel(sa) | |||
311 | user = sa.query(User).filter(User.admin == True).first() |
|
330 | user = sa.query(User).filter(User.admin == True).first() | |
312 |
|
331 | |||
313 | rm = RepoModel() |
|
332 | for name, repo in initial_repo_list.items(): | |
|
333 | if not rm.get(name, cache=False): | |||
|
334 | log.info('repository %s not found creating default', name) | |||
314 |
|
335 | |||
315 | for name, repo in initial_repo_list.items(): |
|
336 | if isinstance(repo, MercurialRepository): | |
316 | if not RepoModel(sa).get(name, cache=False): |
|
337 | repo_type = 'hg' | |
317 | log.info('repository %s not found creating default', name) |
|
338 | if isinstance(repo, GitRepository): | |
|
339 | repo_type = 'git' | |||
318 |
|
340 | |||
319 | form_data = { |
|
341 | form_data = { | |
320 | 'repo_name':name, |
|
342 | 'repo_name':name, | |
|
343 | 'repo_type':repo_type, | |||
321 | 'description':repo.description if repo.description != 'unknown' else \ |
|
344 | 'description':repo.description if repo.description != 'unknown' else \ | |
322 | 'auto description for %s' % name, |
|
345 | 'auto description for %s' % name, | |
323 | 'private':False |
|
346 | 'private':False | |
@@ -335,7 +358,6 b' def repo2db_mapper(initial_repo_list, re' | |||||
335 |
|
358 | |||
336 | meta.Session.remove() |
|
359 | meta.Session.remove() | |
337 |
|
360 | |||
338 | from UserDict import DictMixin |
|
|||
339 |
|
361 | |||
340 | class OrderedDict(dict, DictMixin): |
|
362 | class OrderedDict(dict, DictMixin): | |
341 |
|
363 |
@@ -81,6 +81,7 b' class Repository(Base):' | |||||
81 | __table_args__ = (UniqueConstraint('repo_name'), {'useexisting':True},) |
|
81 | __table_args__ = (UniqueConstraint('repo_name'), {'useexisting':True},) | |
82 | repo_id = Column("repo_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True) |
|
82 | repo_id = Column("repo_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True) | |
83 | repo_name = Column("repo_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
83 | repo_name = Column("repo_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) | |
|
84 | repo_type = Column("repo_type", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None) | |||
84 | user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=False, default=None) |
|
85 | user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=False, default=None) | |
85 | private = Column("private", BOOLEAN(), nullable=True, unique=None, default=None) |
|
86 | private = Column("private", BOOLEAN(), nullable=True, unique=None, default=None) | |
86 | description = Column("description", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
87 | description = Column("description", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
@@ -194,16 +194,12 b' class ValidSettings(formencode.validator' | |||||
194 |
|
194 | |||
195 | class ValidPath(formencode.validators.FancyValidator): |
|
195 | class ValidPath(formencode.validators.FancyValidator): | |
196 | def to_python(self, value, state): |
|
196 | def to_python(self, value, state): | |
197 | isdir = os.path.isdir(value.replace('*', '')) |
|
197 | ||
198 | if (value.endswith('/*') or value.endswith('/**')) and isdir: |
|
198 | if not os.path.isdir(value): | |
199 | return value |
|
|||
200 | elif not isdir: |
|
|||
201 | msg = _('This is not a valid path') |
|
199 | msg = _('This is not a valid path') | |
202 | else: |
|
|||
203 | msg = _('You need to specify * or ** at the end of path (ie. /tmp/*)') |
|
|||
204 |
|
||||
205 | raise formencode.Invalid(msg, value, state, |
|
200 | raise formencode.Invalid(msg, value, state, | |
206 | error_dict={'paths_root_path':msg}) |
|
201 | error_dict={'paths_root_path':msg}) | |
|
202 | return value | |||
207 |
|
203 | |||
208 | def UniqSystemEmail(old_data): |
|
204 | def UniqSystemEmail(old_data): | |
209 | class _UniqSystemEmail(formencode.validators.FancyValidator): |
|
205 | class _UniqSystemEmail(formencode.validators.FancyValidator): |
@@ -24,7 +24,6 b' Model for RhodeCode' | |||||
24 | """ |
|
24 | """ | |
25 | from beaker.cache import cache_region |
|
25 | from beaker.cache import cache_region | |
26 | from mercurial import ui |
|
26 | from mercurial import ui | |
27 | from mercurial.hgweb.hgwebdir_mod import findrepos |
|
|||
28 | from rhodecode.lib import helpers as h |
|
27 | from rhodecode.lib import helpers as h | |
29 | from rhodecode.lib.utils import invalidate_cache |
|
28 | from rhodecode.lib.utils import invalidate_cache | |
30 | from rhodecode.lib.auth import HasRepoPermissionAny |
|
29 | from rhodecode.lib.auth import HasRepoPermissionAny | |
@@ -33,12 +32,12 b' from rhodecode.model.db import Repositor' | |||||
33 | from sqlalchemy.orm import joinedload |
|
32 | from sqlalchemy.orm import joinedload | |
34 | from vcs.exceptions import RepositoryError, VCSError |
|
33 | from vcs.exceptions import RepositoryError, VCSError | |
35 | import logging |
|
34 | import logging | |
36 | import os |
|
|||
37 | import sys |
|
35 | import sys | |
38 | log = logging.getLogger(__name__) |
|
36 | log = logging.getLogger(__name__) | |
39 |
|
37 | |||
40 | try: |
|
38 | try: | |
41 | from vcs.backends.hg import MercurialRepository |
|
39 | from vcs.backends.hg import MercurialRepository | |
|
40 | from vcs.backends.git import GitRepository | |||
42 | except ImportError: |
|
41 | except ImportError: | |
43 | sys.stderr.write('You have to import vcs module') |
|
42 | sys.stderr.write('You have to import vcs module') | |
44 | raise Exception('Unable to import vcs') |
|
43 | raise Exception('Unable to import vcs') | |
@@ -47,7 +46,7 b' def _get_repos_cached_initial(app_global' | |||||
47 | """return cached dict with repos |
|
46 | """return cached dict with repos | |
48 | """ |
|
47 | """ | |
49 | g = app_globals |
|
48 | g = app_globals | |
50 |
return HgModel.repo_scan( |
|
49 | return HgModel().repo_scan(g.paths[0][1], g.baseui, initial) | |
51 |
|
50 | |||
52 | @cache_region('long_term', 'cached_repo_list') |
|
51 | @cache_region('long_term', 'cached_repo_list') | |
53 | def _get_repos_cached(): |
|
52 | def _get_repos_cached(): | |
@@ -55,7 +54,7 b' def _get_repos_cached():' | |||||
55 | """ |
|
54 | """ | |
56 | log.info('getting all repositories list') |
|
55 | log.info('getting all repositories list') | |
57 | from pylons import app_globals as g |
|
56 | from pylons import app_globals as g | |
58 |
return HgModel.repo_scan( |
|
57 | return HgModel().repo_scan(g.paths[0][1], g.baseui) | |
59 |
|
58 | |||
60 | @cache_region('super_short_term', 'cached_repos_switcher_list') |
|
59 | @cache_region('super_short_term', 'cached_repos_switcher_list') | |
61 | def _get_repos_switcher_cached(cached_repo_list): |
|
60 | def _get_repos_switcher_cached(cached_repo_list): | |
@@ -73,42 +72,34 b' def _full_changelog_cached(repo_name):' | |||||
73 | return list(reversed(list(HgModel().get_repo(repo_name)))) |
|
72 | return list(reversed(list(HgModel().get_repo(repo_name)))) | |
74 |
|
73 | |||
75 | class HgModel(object): |
|
74 | class HgModel(object): | |
76 | """Mercurial Model |
|
75 | """ | |
|
76 | Mercurial Model | |||
77 | """ |
|
77 | """ | |
78 |
|
78 | |||
79 | def __init__(self): |
|
79 | def __init__(self, sa=None): | |
80 |
|
|
80 | if not sa: | |
|
81 | self.sa = meta.Session() | |||
|
82 | else: | |||
|
83 | self.sa = sa | |||
81 |
|
84 | |||
82 | @staticmethod |
|
85 | def repo_scan(self, repos_path, baseui, initial=False): | |
83 | def repo_scan(repos_prefix, repos_path, baseui, initial=False): |
|
|||
84 | """ |
|
86 | """ | |
85 | Listing of repositories in given path. This path should not be a |
|
87 | Listing of repositories in given path. This path should not be a | |
86 | repository itself. Return a dictionary of repository objects |
|
88 | repository itself. Return a dictionary of repository objects | |
87 | :param repos_path: path to directory it could take syntax with |
|
89 | ||
88 | * or ** for deep recursive displaying repositories |
|
90 | :param repos_path: path to directory containing repositories | |
89 | """ |
|
91 | :param baseui | |
90 | sa = meta.Session() |
|
92 | :param initial: initial scann | |
91 | def check_repo_dir(path): |
|
|||
92 | """Checks the repository |
|
|||
93 | :param path: |
|
|||
94 |
|
|
93 | """ | |
95 | repos_path = path.split('/') |
|
94 | log.info('scanning for repositories in %s', repos_path) | |
96 | if repos_path[-1] in ['*', '**']: |
|
|||
97 | repos_path = repos_path[:-1] |
|
|||
98 | if repos_path[0] != '/': |
|
|||
99 | repos_path[0] = '/' |
|
|||
100 | if not os.path.isdir(os.path.join(*repos_path)): |
|
|||
101 | raise RepositoryError('Not a valid repository in %s' % path) |
|
|||
102 | if not repos_path.endswith('*'): |
|
|||
103 | raise VCSError('You need to specify * or ** at the end of path ' |
|
|||
104 | 'for recursive scanning') |
|
|||
105 |
|
95 | |||
106 | check_repo_dir(repos_path) |
|
|||
107 | log.info('scanning for repositories in %s', repos_path) |
|
|||
108 | repos = findrepos([(repos_prefix, repos_path)]) |
|
|||
109 | if not isinstance(baseui, ui.ui): |
|
96 | if not isinstance(baseui, ui.ui): | |
110 | baseui = ui.ui() |
|
97 | baseui = ui.ui() | |
111 |
|
98 | |||
|
99 | from rhodecode.lib.utils import get_repos | |||
|
100 | repos = get_repos(repos_path) | |||
|
101 | ||||
|
102 | ||||
112 | repos_list = {} |
|
103 | repos_list = {} | |
113 | for name, path in repos: |
|
104 | for name, path in repos: | |
114 | try: |
|
105 | try: | |
@@ -117,15 +108,19 b' class HgModel(object):' | |||||
117 | raise RepositoryError('Duplicate repository name %s found in' |
|
108 | raise RepositoryError('Duplicate repository name %s found in' | |
118 | ' %s' % (name, path)) |
|
109 | ' %s' % (name, path)) | |
119 | else: |
|
110 | else: | |
|
111 | if path[0] == 'hg': | |||
|
112 | repos_list[name] = MercurialRepository(path[1], baseui=baseui) | |||
|
113 | repos_list[name].name = name | |||
120 |
|
114 | |||
121 | repos_list[name] = MercurialRepository(path, baseui=baseui) |
|
115 | if path[0] == 'git': | |
|
116 | repos_list[name] = GitRepository(path[1]) | |||
122 | repos_list[name].name = name |
|
117 | repos_list[name].name = name | |
123 |
|
118 | |||
124 | dbrepo = None |
|
119 | dbrepo = None | |
125 | if not initial: |
|
120 | if not initial: | |
126 | #for initial scann on application first run we don't |
|
121 | #for initial scann on application first run we don't | |
127 | #have db repos yet. |
|
122 | #have db repos yet. | |
128 | dbrepo = sa.query(Repository)\ |
|
123 | dbrepo = self.sa.query(Repository)\ | |
129 | .options(joinedload(Repository.fork))\ |
|
124 | .options(joinedload(Repository.fork))\ | |
130 | .filter(Repository.repo_name == name)\ |
|
125 | .filter(Repository.repo_name == name)\ | |
131 | .scalar() |
|
126 | .scalar() | |
@@ -137,16 +132,17 b' class HgModel(object):' | |||||
137 | if dbrepo.user: |
|
132 | if dbrepo.user: | |
138 | repos_list[name].contact = dbrepo.user.full_contact |
|
133 | repos_list[name].contact = dbrepo.user.full_contact | |
139 | else: |
|
134 | else: | |
140 | repos_list[name].contact = sa.query(User)\ |
|
135 | repos_list[name].contact = self.sa.query(User)\ | |
141 | .filter(User.admin == True).first().full_contact |
|
136 | .filter(User.admin == True).first().full_contact | |
142 | except OSError: |
|
137 | except OSError: | |
143 | continue |
|
138 | continue | |
144 | meta.Session.remove() |
|
139 | ||
145 | return repos_list |
|
140 | return repos_list | |
146 |
|
141 | |||
147 | def get_repos(self): |
|
142 | def get_repos(self): | |
148 | for name, repo in _get_repos_cached().items(): |
|
143 | for name, repo in _get_repos_cached().items(): | |
149 | if repo._get_hidden(): |
|
144 | ||
|
145 | if isinstance(repo, MercurialRepository) and repo._get_hidden(): | |||
150 | #skip hidden web repository |
|
146 | #skip hidden web repository | |
151 | continue |
|
147 | continue | |
152 |
|
148 |
@@ -13,7 +13,7 b'' | |||||
13 | </tr> |
|
13 | </tr> | |
14 | %for cnt,cs in enumerate(c.repo_changesets): |
|
14 | %for cnt,cs in enumerate(c.repo_changesets): | |
15 | <tr class="parity${cnt%2}"> |
|
15 | <tr class="parity${cnt%2}"> | |
16 |
<td>${h.age(cs. |
|
16 | <td>${h.age(cs.date)} - ${h.rfc822date_notz(cs.date)} </td> | |
17 | <td title="${cs.author}">${h.person(cs.author)}</td> |
|
17 | <td title="${cs.author}">${h.person(cs.author)}</td> | |
18 | <td>r${cs.revision}:${cs.short_id}</td> |
|
18 | <td>r${cs.revision}:${cs.short_id}</td> | |
19 | <td> |
|
19 | <td> |
General Comments 0
You need to be logged in to leave comments.
Login now