Show More
@@ -1,106 +1,109 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.lib.celerylib.__init__ |
|
3 | rhodecode.lib.celerylib.__init__ | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | celery libs for RhodeCode |
|
6 | celery libs for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Nov 27, 2010 |
|
8 | :created_on: Nov 27, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import sys |
|
27 | import sys | |
28 | import socket |
|
28 | import socket | |
29 | import traceback |
|
29 | import traceback | |
30 | import logging |
|
30 | import logging | |
|
31 | from os.path import dirname as dn, join as jn | |||
31 |
|
32 | |||
32 | from hashlib import md5 |
|
33 | from hashlib import md5 | |
33 | from decorator import decorator |
|
34 | from decorator import decorator | |
34 | from pylons import config |
|
35 | from pylons import config | |
35 |
|
36 | |||
36 | from vcs.utils.lazy import LazyProperty |
|
37 | from vcs.utils.lazy import LazyProperty | |
37 |
|
38 | |||
38 | from rhodecode.lib import str2bool |
|
39 | from rhodecode.lib import str2bool | |
39 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
40 | from rhodecode.lib.pidlock import DaemonLock, LockHeld | |
40 |
|
41 | |||
41 | from celery.messaging import establish_connection |
|
42 | from celery.messaging import establish_connection | |
42 |
|
43 | |||
43 |
|
44 | |||
44 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
45 |
|
46 | |||
46 | try: |
|
47 | try: | |
47 | CELERY_ON = str2bool(config['app_conf'].get('use_celery')) |
|
48 | CELERY_ON = str2bool(config['app_conf'].get('use_celery')) | |
48 | except KeyError: |
|
49 | except KeyError: | |
49 | CELERY_ON = False |
|
50 | CELERY_ON = False | |
50 |
|
51 | |||
51 |
|
52 | |||
52 | class ResultWrapper(object): |
|
53 | class ResultWrapper(object): | |
53 | def __init__(self, task): |
|
54 | def __init__(self, task): | |
54 | self.task = task |
|
55 | self.task = task | |
55 |
|
56 | |||
56 | @LazyProperty |
|
57 | @LazyProperty | |
57 | def result(self): |
|
58 | def result(self): | |
58 | return self.task |
|
59 | return self.task | |
59 |
|
60 | |||
60 |
|
61 | |||
61 | def run_task(task, *args, **kwargs): |
|
62 | def run_task(task, *args, **kwargs): | |
62 | if CELERY_ON: |
|
63 | if CELERY_ON: | |
63 | try: |
|
64 | try: | |
64 | t = task.apply_async(args=args, kwargs=kwargs) |
|
65 | t = task.apply_async(args=args, kwargs=kwargs) | |
65 | log.info('running task %s:%s', t.task_id, task) |
|
66 | log.info('running task %s:%s', t.task_id, task) | |
66 | return t |
|
67 | return t | |
67 |
|
68 | |||
68 | except socket.error, e: |
|
69 | except socket.error, e: | |
69 | if e.errno == 111: |
|
70 | if e.errno == 111: | |
70 | log.debug('Unable to connect to celeryd. Sync execution') |
|
71 | log.debug('Unable to connect to celeryd. Sync execution') | |
71 | else: |
|
72 | else: | |
72 | log.error(traceback.format_exc()) |
|
73 | log.error(traceback.format_exc()) | |
73 | except KeyError, e: |
|
74 | except KeyError, e: | |
74 | log.debug('Unable to connect to celeryd. Sync execution') |
|
75 | log.debug('Unable to connect to celeryd. Sync execution') | |
75 | except Exception, e: |
|
76 | except Exception, e: | |
76 | log.error(traceback.format_exc()) |
|
77 | log.error(traceback.format_exc()) | |
77 |
|
78 | |||
78 | log.debug('executing task %s in sync mode', task) |
|
79 | log.debug('executing task %s in sync mode', task) | |
79 | return ResultWrapper(task(*args, **kwargs)) |
|
80 | return ResultWrapper(task(*args, **kwargs)) | |
80 |
|
81 | |||
81 |
|
82 | |||
82 | def __get_lockkey(func, *fargs, **fkwargs): |
|
83 | def __get_lockkey(func, *fargs, **fkwargs): | |
83 | params = list(fargs) |
|
84 | params = list(fargs) | |
84 | params.extend(['%s-%s' % ar for ar in fkwargs.items()]) |
|
85 | params.extend(['%s-%s' % ar for ar in fkwargs.items()]) | |
85 |
|
86 | |||
86 | func_name = str(func.__name__) if hasattr(func, '__name__') else str(func) |
|
87 | func_name = str(func.__name__) if hasattr(func, '__name__') else str(func) | |
87 |
|
88 | |||
88 | lockkey = 'task_%s' % \ |
|
89 | lockkey = 'task_%s.lock' % \ | |
89 | md5(func_name + '-' + '-'.join(map(str, params))).hexdigest() |
|
90 | md5(func_name + '-' + '-'.join(map(str, params))).hexdigest() | |
90 | return lockkey |
|
91 | return lockkey | |
91 |
|
92 | |||
92 |
|
93 | |||
93 | def locked_task(func): |
|
94 | def locked_task(func): | |
94 | def __wrapper(func, *fargs, **fkwargs): |
|
95 | def __wrapper(func, *fargs, **fkwargs): | |
95 | lockkey = __get_lockkey(func, *fargs, **fkwargs) |
|
96 | lockkey = __get_lockkey(func, *fargs, **fkwargs) | |
|
97 | lockkey_path = dn(dn(dn(os.path.abspath(__file__)))) | |||
|
98 | ||||
96 | log.info('running task with lockkey %s', lockkey) |
|
99 | log.info('running task with lockkey %s', lockkey) | |
97 | try: |
|
100 | try: | |
98 | l = DaemonLock(lockkey) |
|
101 | l = DaemonLock(jn(lockkey_path, lockkey)) | |
99 | ret = func(*fargs, **fkwargs) |
|
102 | ret = func(*fargs, **fkwargs) | |
100 | l.release() |
|
103 | l.release() | |
101 | return ret |
|
104 | return ret | |
102 | except LockHeld: |
|
105 | except LockHeld: | |
103 | log.info('LockHeld') |
|
106 | log.info('LockHeld') | |
104 | return 'Task with key %s already running' % lockkey |
|
107 | return 'Task with key %s already running' % lockkey | |
105 |
|
108 | |||
106 | return decorator(__wrapper, func) |
|
109 | return decorator(__wrapper, func) |
@@ -1,375 +1,378 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.lib.celerylib.tasks |
|
3 | rhodecode.lib.celerylib.tasks | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | RhodeCode task modules, containing all task that suppose to be run |
|
6 | RhodeCode task modules, containing all task that suppose to be run | |
7 | by celery daemon |
|
7 | by celery daemon | |
8 |
|
8 | |||
9 | :created_on: Oct 6, 2010 |
|
9 | :created_on: Oct 6, 2010 | |
10 | :author: marcink |
|
10 | :author: marcink | |
11 | :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com> |
|
11 | :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com> | |
12 | :license: GPLv3, see COPYING for more details. |
|
12 | :license: GPLv3, see COPYING for more details. | |
13 | """ |
|
13 | """ | |
14 | # This program is free software: you can redistribute it and/or modify |
|
14 | # This program is free software: you can redistribute it and/or modify | |
15 | # it under the terms of the GNU General Public License as published by |
|
15 | # it under the terms of the GNU General Public License as published by | |
16 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | # the Free Software Foundation, either version 3 of the License, or | |
17 | # (at your option) any later version. |
|
17 | # (at your option) any later version. | |
18 | # |
|
18 | # | |
19 | # This program is distributed in the hope that it will be useful, |
|
19 | # This program is distributed in the hope that it will be useful, | |
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
22 | # GNU General Public License for more details. |
|
22 | # GNU General Public License for more details. | |
23 | # |
|
23 | # | |
24 | # You should have received a copy of the GNU General Public License |
|
24 | # You should have received a copy of the GNU General Public License | |
25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
26 | from celery.decorators import task |
|
26 | from celery.decorators import task | |
27 |
|
27 | |||
28 | import os |
|
28 | import os | |
29 | import traceback |
|
29 | import traceback | |
30 | import logging |
|
30 | import logging | |
|
31 | from os.path import dirname as dn, join as jn | |||
31 |
|
32 | |||
32 | from time import mktime |
|
33 | from time import mktime | |
33 | from operator import itemgetter |
|
34 | from operator import itemgetter | |
34 | from string import lower |
|
35 | from string import lower | |
35 |
|
36 | |||
36 | from pylons import config |
|
37 | from pylons import config | |
37 | from pylons.i18n.translation import _ |
|
38 | from pylons.i18n.translation import _ | |
38 |
|
39 | |||
39 | from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP |
|
40 | from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP | |
40 | from rhodecode.lib.celerylib import run_task, locked_task, str2bool, \ |
|
41 | from rhodecode.lib.celerylib import run_task, locked_task, str2bool, \ | |
41 | __get_lockkey, LockHeld, DaemonLock |
|
42 | __get_lockkey, LockHeld, DaemonLock | |
42 | from rhodecode.lib.helpers import person |
|
43 | from rhodecode.lib.helpers import person | |
43 | from rhodecode.lib.smtp_mailer import SmtpMailer |
|
44 | from rhodecode.lib.smtp_mailer import SmtpMailer | |
44 | from rhodecode.lib.utils import add_cache |
|
45 | from rhodecode.lib.utils import add_cache | |
45 | from rhodecode.lib.odict import OrderedDict |
|
46 | from rhodecode.lib.odict import OrderedDict | |
46 | from rhodecode.model import init_model |
|
47 | from rhodecode.model import init_model | |
47 | from rhodecode.model import meta |
|
48 | from rhodecode.model import meta | |
48 | from rhodecode.model.db import RhodeCodeUi, Statistics, Repository |
|
49 | from rhodecode.model.db import RhodeCodeUi, Statistics, Repository | |
49 |
|
50 | |||
50 | from vcs.backends import get_repo |
|
51 | from vcs.backends import get_repo | |
51 |
|
52 | |||
52 | from sqlalchemy import engine_from_config |
|
53 | from sqlalchemy import engine_from_config | |
53 |
|
54 | |||
54 | add_cache(config) |
|
55 | add_cache(config) | |
55 |
|
56 | |||
56 | try: |
|
57 | try: | |
57 | import json |
|
58 | import json | |
58 | except ImportError: |
|
59 | except ImportError: | |
59 | #python 2.5 compatibility |
|
60 | #python 2.5 compatibility | |
60 | import simplejson as json |
|
61 | import simplejson as json | |
61 |
|
62 | |||
62 | __all__ = ['whoosh_index', 'get_commits_stats', |
|
63 | __all__ = ['whoosh_index', 'get_commits_stats', | |
63 | 'reset_user_password', 'send_email'] |
|
64 | 'reset_user_password', 'send_email'] | |
64 |
|
65 | |||
65 | CELERY_ON = str2bool(config['app_conf'].get('use_celery')) |
|
66 | CELERY_ON = str2bool(config['app_conf'].get('use_celery')) | |
66 |
|
67 | |||
67 |
|
68 | |||
68 |
|
69 | |||
69 | def get_session(): |
|
70 | def get_session(): | |
70 | if CELERY_ON: |
|
71 | if CELERY_ON: | |
71 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
72 | engine = engine_from_config(config, 'sqlalchemy.db1.') | |
72 | init_model(engine) |
|
73 | init_model(engine) | |
73 | sa = meta.Session() |
|
74 | sa = meta.Session() | |
74 | return sa |
|
75 | return sa | |
75 |
|
76 | |||
76 |
|
77 | |||
77 | def get_repos_path(): |
|
78 | def get_repos_path(): | |
78 | sa = get_session() |
|
79 | sa = get_session() | |
79 | q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() |
|
80 | q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() | |
80 | return q.ui_value |
|
81 | return q.ui_value | |
81 |
|
82 | |||
82 |
|
83 | |||
83 | @task(ignore_result=True) |
|
84 | @task(ignore_result=True) | |
84 | @locked_task |
|
85 | @locked_task | |
85 | def whoosh_index(repo_location, full_index): |
|
86 | def whoosh_index(repo_location, full_index): | |
86 | #log = whoosh_index.get_logger() |
|
87 | #log = whoosh_index.get_logger() | |
87 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
88 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon | |
88 | index_location = config['index_dir'] |
|
89 | index_location = config['index_dir'] | |
89 | WhooshIndexingDaemon(index_location=index_location, |
|
90 | WhooshIndexingDaemon(index_location=index_location, | |
90 | repo_location=repo_location, sa=get_session())\ |
|
91 | repo_location=repo_location, sa=get_session())\ | |
91 | .run(full_index=full_index) |
|
92 | .run(full_index=full_index) | |
92 |
|
93 | |||
93 |
|
94 | |||
94 | @task(ignore_result=True) |
|
95 | @task(ignore_result=True) | |
95 | def get_commits_stats(repo_name, ts_min_y, ts_max_y): |
|
96 | def get_commits_stats(repo_name, ts_min_y, ts_max_y): | |
96 | try: |
|
97 | try: | |
97 | log = get_commits_stats.get_logger() |
|
98 | log = get_commits_stats.get_logger() | |
98 | except: |
|
99 | except: | |
99 | log = logging.getLogger(__name__) |
|
100 | log = logging.getLogger(__name__) | |
100 |
|
101 | |||
101 | lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, |
|
102 | lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, | |
102 | ts_max_y) |
|
103 | ts_max_y) | |
|
104 | lockkey_path = dn(dn(dn(dn(os.path.abspath(__file__))))) | |||
|
105 | print jn(lockkey_path, lockkey) | |||
103 | log.info('running task with lockkey %s', lockkey) |
|
106 | log.info('running task with lockkey %s', lockkey) | |
104 | try: |
|
107 | try: | |
105 | lock = DaemonLock(lockkey) |
|
108 | lock = l = DaemonLock(jn(lockkey_path, lockkey)) | |
106 |
|
109 | |||
107 | #for js data compatibilty cleans the key for person from ' |
|
110 | #for js data compatibilty cleans the key for person from ' | |
108 | akc = lambda k: person(k).replace('"', "") |
|
111 | akc = lambda k: person(k).replace('"', "") | |
109 |
|
112 | |||
110 | co_day_auth_aggr = {} |
|
113 | co_day_auth_aggr = {} | |
111 | commits_by_day_aggregate = {} |
|
114 | commits_by_day_aggregate = {} | |
112 | repos_path = get_repos_path() |
|
115 | repos_path = get_repos_path() | |
113 | p = os.path.join(repos_path, repo_name) |
|
116 | p = os.path.join(repos_path, repo_name) | |
114 | repo = get_repo(p) |
|
117 | repo = get_repo(p) | |
115 | repo_size = len(repo.revisions) |
|
118 | repo_size = len(repo.revisions) | |
116 | #return if repo have no revisions |
|
119 | #return if repo have no revisions | |
117 | if repo_size < 1: |
|
120 | if repo_size < 1: | |
118 | lock.release() |
|
121 | lock.release() | |
119 | return True |
|
122 | return True | |
120 |
|
123 | |||
121 | skip_date_limit = True |
|
124 | skip_date_limit = True | |
122 | parse_limit = int(config['app_conf'].get('commit_parse_limit')) |
|
125 | parse_limit = int(config['app_conf'].get('commit_parse_limit')) | |
123 | last_rev = 0 |
|
126 | last_rev = 0 | |
124 | last_cs = None |
|
127 | last_cs = None | |
125 | timegetter = itemgetter('time') |
|
128 | timegetter = itemgetter('time') | |
126 |
|
129 | |||
127 | sa = get_session() |
|
130 | sa = get_session() | |
128 |
|
131 | |||
129 | dbrepo = sa.query(Repository)\ |
|
132 | dbrepo = sa.query(Repository)\ | |
130 | .filter(Repository.repo_name == repo_name).scalar() |
|
133 | .filter(Repository.repo_name == repo_name).scalar() | |
131 | cur_stats = sa.query(Statistics)\ |
|
134 | cur_stats = sa.query(Statistics)\ | |
132 | .filter(Statistics.repository == dbrepo).scalar() |
|
135 | .filter(Statistics.repository == dbrepo).scalar() | |
133 |
|
136 | |||
134 | if cur_stats is not None: |
|
137 | if cur_stats is not None: | |
135 | last_rev = cur_stats.stat_on_revision |
|
138 | last_rev = cur_stats.stat_on_revision | |
136 |
|
139 | |||
137 | if last_rev == repo.get_changeset().revision and repo_size > 1: |
|
140 | if last_rev == repo.get_changeset().revision and repo_size > 1: | |
138 | #pass silently without any work if we're not on first revision or |
|
141 | #pass silently without any work if we're not on first revision or | |
139 | #current state of parsing revision(from db marker) is the |
|
142 | #current state of parsing revision(from db marker) is the | |
140 | #last revision |
|
143 | #last revision | |
141 | lock.release() |
|
144 | lock.release() | |
142 | return True |
|
145 | return True | |
143 |
|
146 | |||
144 | if cur_stats: |
|
147 | if cur_stats: | |
145 | commits_by_day_aggregate = OrderedDict(json.loads( |
|
148 | commits_by_day_aggregate = OrderedDict(json.loads( | |
146 | cur_stats.commit_activity_combined)) |
|
149 | cur_stats.commit_activity_combined)) | |
147 | co_day_auth_aggr = json.loads(cur_stats.commit_activity) |
|
150 | co_day_auth_aggr = json.loads(cur_stats.commit_activity) | |
148 |
|
151 | |||
149 | log.debug('starting parsing %s', parse_limit) |
|
152 | log.debug('starting parsing %s', parse_limit) | |
150 | lmktime = mktime |
|
153 | lmktime = mktime | |
151 |
|
154 | |||
152 | last_rev = last_rev + 1 if last_rev > 0 else last_rev |
|
155 | last_rev = last_rev + 1 if last_rev > 0 else last_rev | |
153 |
|
156 | |||
154 | for cs in repo[last_rev:last_rev + parse_limit]: |
|
157 | for cs in repo[last_rev:last_rev + parse_limit]: | |
155 | last_cs = cs # remember last parsed changeset |
|
158 | last_cs = cs # remember last parsed changeset | |
156 | k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], |
|
159 | k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], | |
157 | cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) |
|
160 | cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) | |
158 |
|
161 | |||
159 | if akc(cs.author) in co_day_auth_aggr: |
|
162 | if akc(cs.author) in co_day_auth_aggr: | |
160 | try: |
|
163 | try: | |
161 | l = [timegetter(x) for x in |
|
164 | l = [timegetter(x) for x in | |
162 | co_day_auth_aggr[akc(cs.author)]['data']] |
|
165 | co_day_auth_aggr[akc(cs.author)]['data']] | |
163 | time_pos = l.index(k) |
|
166 | time_pos = l.index(k) | |
164 | except ValueError: |
|
167 | except ValueError: | |
165 | time_pos = False |
|
168 | time_pos = False | |
166 |
|
169 | |||
167 | if time_pos >= 0 and time_pos is not False: |
|
170 | if time_pos >= 0 and time_pos is not False: | |
168 |
|
171 | |||
169 | datadict = \ |
|
172 | datadict = \ | |
170 | co_day_auth_aggr[akc(cs.author)]['data'][time_pos] |
|
173 | co_day_auth_aggr[akc(cs.author)]['data'][time_pos] | |
171 |
|
174 | |||
172 | datadict["commits"] += 1 |
|
175 | datadict["commits"] += 1 | |
173 | datadict["added"] += len(cs.added) |
|
176 | datadict["added"] += len(cs.added) | |
174 | datadict["changed"] += len(cs.changed) |
|
177 | datadict["changed"] += len(cs.changed) | |
175 | datadict["removed"] += len(cs.removed) |
|
178 | datadict["removed"] += len(cs.removed) | |
176 |
|
179 | |||
177 | else: |
|
180 | else: | |
178 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
181 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: | |
179 |
|
182 | |||
180 | datadict = {"time": k, |
|
183 | datadict = {"time": k, | |
181 | "commits": 1, |
|
184 | "commits": 1, | |
182 | "added": len(cs.added), |
|
185 | "added": len(cs.added), | |
183 | "changed": len(cs.changed), |
|
186 | "changed": len(cs.changed), | |
184 | "removed": len(cs.removed), |
|
187 | "removed": len(cs.removed), | |
185 | } |
|
188 | } | |
186 | co_day_auth_aggr[akc(cs.author)]['data']\ |
|
189 | co_day_auth_aggr[akc(cs.author)]['data']\ | |
187 | .append(datadict) |
|
190 | .append(datadict) | |
188 |
|
191 | |||
189 | else: |
|
192 | else: | |
190 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
193 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: | |
191 | co_day_auth_aggr[akc(cs.author)] = { |
|
194 | co_day_auth_aggr[akc(cs.author)] = { | |
192 | "label": akc(cs.author), |
|
195 | "label": akc(cs.author), | |
193 | "data": [{"time":k, |
|
196 | "data": [{"time":k, | |
194 | "commits":1, |
|
197 | "commits":1, | |
195 | "added":len(cs.added), |
|
198 | "added":len(cs.added), | |
196 | "changed":len(cs.changed), |
|
199 | "changed":len(cs.changed), | |
197 | "removed":len(cs.removed), |
|
200 | "removed":len(cs.removed), | |
198 | }], |
|
201 | }], | |
199 | "schema": ["commits"], |
|
202 | "schema": ["commits"], | |
200 | } |
|
203 | } | |
201 |
|
204 | |||
202 | #gather all data by day |
|
205 | #gather all data by day | |
203 | if k in commits_by_day_aggregate: |
|
206 | if k in commits_by_day_aggregate: | |
204 | commits_by_day_aggregate[k] += 1 |
|
207 | commits_by_day_aggregate[k] += 1 | |
205 | else: |
|
208 | else: | |
206 | commits_by_day_aggregate[k] = 1 |
|
209 | commits_by_day_aggregate[k] = 1 | |
207 |
|
210 | |||
208 | overview_data = sorted(commits_by_day_aggregate.items(), |
|
211 | overview_data = sorted(commits_by_day_aggregate.items(), | |
209 | key=itemgetter(0)) |
|
212 | key=itemgetter(0)) | |
210 |
|
213 | |||
211 | if not co_day_auth_aggr: |
|
214 | if not co_day_auth_aggr: | |
212 | co_day_auth_aggr[akc(repo.contact)] = { |
|
215 | co_day_auth_aggr[akc(repo.contact)] = { | |
213 | "label": akc(repo.contact), |
|
216 | "label": akc(repo.contact), | |
214 | "data": [0, 1], |
|
217 | "data": [0, 1], | |
215 | "schema": ["commits"], |
|
218 | "schema": ["commits"], | |
216 | } |
|
219 | } | |
217 |
|
220 | |||
218 | stats = cur_stats if cur_stats else Statistics() |
|
221 | stats = cur_stats if cur_stats else Statistics() | |
219 | stats.commit_activity = json.dumps(co_day_auth_aggr) |
|
222 | stats.commit_activity = json.dumps(co_day_auth_aggr) | |
220 | stats.commit_activity_combined = json.dumps(overview_data) |
|
223 | stats.commit_activity_combined = json.dumps(overview_data) | |
221 |
|
224 | |||
222 | log.debug('last revison %s', last_rev) |
|
225 | log.debug('last revison %s', last_rev) | |
223 | leftovers = len(repo.revisions[last_rev:]) |
|
226 | leftovers = len(repo.revisions[last_rev:]) | |
224 | log.debug('revisions to parse %s', leftovers) |
|
227 | log.debug('revisions to parse %s', leftovers) | |
225 |
|
228 | |||
226 | if last_rev == 0 or leftovers < parse_limit: |
|
229 | if last_rev == 0 or leftovers < parse_limit: | |
227 | log.debug('getting code trending stats') |
|
230 | log.debug('getting code trending stats') | |
228 | stats.languages = json.dumps(__get_codes_stats(repo_name)) |
|
231 | stats.languages = json.dumps(__get_codes_stats(repo_name)) | |
229 |
|
232 | |||
230 | try: |
|
233 | try: | |
231 | stats.repository = dbrepo |
|
234 | stats.repository = dbrepo | |
232 | stats.stat_on_revision = last_cs.revision if last_cs else 0 |
|
235 | stats.stat_on_revision = last_cs.revision if last_cs else 0 | |
233 | sa.add(stats) |
|
236 | sa.add(stats) | |
234 | sa.commit() |
|
237 | sa.commit() | |
235 | except: |
|
238 | except: | |
236 | log.error(traceback.format_exc()) |
|
239 | log.error(traceback.format_exc()) | |
237 | sa.rollback() |
|
240 | sa.rollback() | |
238 | lock.release() |
|
241 | lock.release() | |
239 | return False |
|
242 | return False | |
240 |
|
243 | |||
241 | #final release |
|
244 | #final release | |
242 | lock.release() |
|
245 | lock.release() | |
243 |
|
246 | |||
244 | #execute another task if celery is enabled |
|
247 | #execute another task if celery is enabled | |
245 | if len(repo.revisions) > 1 and CELERY_ON: |
|
248 | if len(repo.revisions) > 1 and CELERY_ON: | |
246 | run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y) |
|
249 | run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y) | |
247 | return True |
|
250 | return True | |
248 | except LockHeld: |
|
251 | except LockHeld: | |
249 | log.info('LockHeld') |
|
252 | log.info('LockHeld') | |
250 | return 'Task with key %s already running' % lockkey |
|
253 | return 'Task with key %s already running' % lockkey | |
251 |
|
254 | |||
252 |
|
255 | |||
253 | @task(ignore_result=True) |
|
256 | @task(ignore_result=True) | |
254 | def reset_user_password(user_email): |
|
257 | def reset_user_password(user_email): | |
255 | try: |
|
258 | try: | |
256 | log = reset_user_password.get_logger() |
|
259 | log = reset_user_password.get_logger() | |
257 | except: |
|
260 | except: | |
258 | log = logging.getLogger(__name__) |
|
261 | log = logging.getLogger(__name__) | |
259 |
|
262 | |||
260 | from rhodecode.lib import auth |
|
263 | from rhodecode.lib import auth | |
261 | from rhodecode.model.db import User |
|
264 | from rhodecode.model.db import User | |
262 |
|
265 | |||
263 | try: |
|
266 | try: | |
264 | try: |
|
267 | try: | |
265 | sa = get_session() |
|
268 | sa = get_session() | |
266 | user = sa.query(User).filter(User.email == user_email).scalar() |
|
269 | user = sa.query(User).filter(User.email == user_email).scalar() | |
267 | new_passwd = auth.PasswordGenerator().gen_password(8, |
|
270 | new_passwd = auth.PasswordGenerator().gen_password(8, | |
268 | auth.PasswordGenerator.ALPHABETS_BIG_SMALL) |
|
271 | auth.PasswordGenerator.ALPHABETS_BIG_SMALL) | |
269 | if user: |
|
272 | if user: | |
270 | user.password = auth.get_crypt_password(new_passwd) |
|
273 | user.password = auth.get_crypt_password(new_passwd) | |
271 | user.api_key = auth.generate_api_key(user.username) |
|
274 | user.api_key = auth.generate_api_key(user.username) | |
272 | sa.add(user) |
|
275 | sa.add(user) | |
273 | sa.commit() |
|
276 | sa.commit() | |
274 | log.info('change password for %s', user_email) |
|
277 | log.info('change password for %s', user_email) | |
275 | if new_passwd is None: |
|
278 | if new_passwd is None: | |
276 | raise Exception('unable to generate new password') |
|
279 | raise Exception('unable to generate new password') | |
277 |
|
280 | |||
278 | except: |
|
281 | except: | |
279 | log.error(traceback.format_exc()) |
|
282 | log.error(traceback.format_exc()) | |
280 | sa.rollback() |
|
283 | sa.rollback() | |
281 |
|
284 | |||
282 | run_task(send_email, user_email, |
|
285 | run_task(send_email, user_email, | |
283 | "Your new rhodecode password", |
|
286 | "Your new rhodecode password", | |
284 | 'Your new rhodecode password:%s' % (new_passwd)) |
|
287 | 'Your new rhodecode password:%s' % (new_passwd)) | |
285 | log.info('send new password mail to %s', user_email) |
|
288 | log.info('send new password mail to %s', user_email) | |
286 |
|
289 | |||
287 | except: |
|
290 | except: | |
288 | log.error('Failed to update user password') |
|
291 | log.error('Failed to update user password') | |
289 | log.error(traceback.format_exc()) |
|
292 | log.error(traceback.format_exc()) | |
290 |
|
293 | |||
291 | return True |
|
294 | return True | |
292 |
|
295 | |||
293 |
|
296 | |||
294 | @task(ignore_result=True) |
|
297 | @task(ignore_result=True) | |
295 | def send_email(recipients, subject, body): |
|
298 | def send_email(recipients, subject, body): | |
296 | """ |
|
299 | """ | |
297 | Sends an email with defined parameters from the .ini files. |
|
300 | Sends an email with defined parameters from the .ini files. | |
298 |
|
301 | |||
299 | :param recipients: list of recipients, it this is empty the defined email |
|
302 | :param recipients: list of recipients, it this is empty the defined email | |
300 | address from field 'email_to' is used instead |
|
303 | address from field 'email_to' is used instead | |
301 | :param subject: subject of the mail |
|
304 | :param subject: subject of the mail | |
302 | :param body: body of the mail |
|
305 | :param body: body of the mail | |
303 | """ |
|
306 | """ | |
304 | try: |
|
307 | try: | |
305 | log = send_email.get_logger() |
|
308 | log = send_email.get_logger() | |
306 | except: |
|
309 | except: | |
307 | log = logging.getLogger(__name__) |
|
310 | log = logging.getLogger(__name__) | |
308 |
|
311 | |||
309 | email_config = config |
|
312 | email_config = config | |
310 |
|
313 | |||
311 | if not recipients: |
|
314 | if not recipients: | |
312 | recipients = [email_config.get('email_to')] |
|
315 | recipients = [email_config.get('email_to')] | |
313 |
|
316 | |||
314 | mail_from = email_config.get('app_email_from') |
|
317 | mail_from = email_config.get('app_email_from') | |
315 | user = email_config.get('smtp_username') |
|
318 | user = email_config.get('smtp_username') | |
316 | passwd = email_config.get('smtp_password') |
|
319 | passwd = email_config.get('smtp_password') | |
317 | mail_server = email_config.get('smtp_server') |
|
320 | mail_server = email_config.get('smtp_server') | |
318 | mail_port = email_config.get('smtp_port') |
|
321 | mail_port = email_config.get('smtp_port') | |
319 | tls = str2bool(email_config.get('smtp_use_tls')) |
|
322 | tls = str2bool(email_config.get('smtp_use_tls')) | |
320 | ssl = str2bool(email_config.get('smtp_use_ssl')) |
|
323 | ssl = str2bool(email_config.get('smtp_use_ssl')) | |
321 | debug = str2bool(config.get('debug')) |
|
324 | debug = str2bool(config.get('debug')) | |
322 |
|
325 | |||
323 | try: |
|
326 | try: | |
324 | m = SmtpMailer(mail_from, user, passwd, mail_server, |
|
327 | m = SmtpMailer(mail_from, user, passwd, mail_server, | |
325 | mail_port, ssl, tls, debug=debug) |
|
328 | mail_port, ssl, tls, debug=debug) | |
326 | m.send(recipients, subject, body) |
|
329 | m.send(recipients, subject, body) | |
327 | except: |
|
330 | except: | |
328 | log.error('Mail sending failed') |
|
331 | log.error('Mail sending failed') | |
329 | log.error(traceback.format_exc()) |
|
332 | log.error(traceback.format_exc()) | |
330 | return False |
|
333 | return False | |
331 | return True |
|
334 | return True | |
332 |
|
335 | |||
333 |
|
336 | |||
334 | @task(ignore_result=True) |
|
337 | @task(ignore_result=True) | |
335 | def create_repo_fork(form_data, cur_user): |
|
338 | def create_repo_fork(form_data, cur_user): | |
336 | from rhodecode.model.repo import RepoModel |
|
339 | from rhodecode.model.repo import RepoModel | |
337 | from vcs import get_backend |
|
340 | from vcs import get_backend | |
338 |
|
341 | |||
339 | try: |
|
342 | try: | |
340 | log = create_repo_fork.get_logger() |
|
343 | log = create_repo_fork.get_logger() | |
341 | except: |
|
344 | except: | |
342 | log = logging.getLogger(__name__) |
|
345 | log = logging.getLogger(__name__) | |
343 |
|
346 | |||
344 | repo_model = RepoModel(get_session()) |
|
347 | repo_model = RepoModel(get_session()) | |
345 | repo_model.create(form_data, cur_user, just_db=True, fork=True) |
|
348 | repo_model.create(form_data, cur_user, just_db=True, fork=True) | |
346 | repo_name = form_data['repo_name'] |
|
349 | repo_name = form_data['repo_name'] | |
347 | repos_path = get_repos_path() |
|
350 | repos_path = get_repos_path() | |
348 | repo_path = os.path.join(repos_path, repo_name) |
|
351 | repo_path = os.path.join(repos_path, repo_name) | |
349 | repo_fork_path = os.path.join(repos_path, form_data['fork_name']) |
|
352 | repo_fork_path = os.path.join(repos_path, form_data['fork_name']) | |
350 | alias = form_data['repo_type'] |
|
353 | alias = form_data['repo_type'] | |
351 |
|
354 | |||
352 | log.info('creating repo fork %s as %s', repo_name, repo_path) |
|
355 | log.info('creating repo fork %s as %s', repo_name, repo_path) | |
353 | backend = get_backend(alias) |
|
356 | backend = get_backend(alias) | |
354 | backend(str(repo_fork_path), create=True, src_url=str(repo_path)) |
|
357 | backend(str(repo_fork_path), create=True, src_url=str(repo_path)) | |
355 |
|
358 | |||
356 |
|
359 | |||
357 | def __get_codes_stats(repo_name): |
|
360 | def __get_codes_stats(repo_name): | |
358 | repos_path = get_repos_path() |
|
361 | repos_path = get_repos_path() | |
359 | p = os.path.join(repos_path, repo_name) |
|
362 | p = os.path.join(repos_path, repo_name) | |
360 | repo = get_repo(p) |
|
363 | repo = get_repo(p) | |
361 | tip = repo.get_changeset() |
|
364 | tip = repo.get_changeset() | |
362 | code_stats = {} |
|
365 | code_stats = {} | |
363 |
|
366 | |||
364 | def aggregate(cs): |
|
367 | def aggregate(cs): | |
365 | for f in cs[2]: |
|
368 | for f in cs[2]: | |
366 | ext = lower(f.extension) |
|
369 | ext = lower(f.extension) | |
367 | if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary: |
|
370 | if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary: | |
368 | if ext in code_stats: |
|
371 | if ext in code_stats: | |
369 | code_stats[ext] += 1 |
|
372 | code_stats[ext] += 1 | |
370 | else: |
|
373 | else: | |
371 | code_stats[ext] = 1 |
|
374 | code_stats[ext] = 1 | |
372 |
|
375 | |||
373 | map(aggregate, tip.walk('/')) |
|
376 | map(aggregate, tip.walk('/')) | |
374 |
|
377 | |||
375 | return code_stats or {} |
|
378 | return code_stats or {} |
@@ -1,224 +1,224 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.lib.indexers.__init__ |
|
3 | rhodecode.lib.indexers.__init__ | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Whoosh indexing module for RhodeCode |
|
6 | Whoosh indexing module for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Aug 17, 2010 |
|
8 | :created_on: Aug 17, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 | import os |
|
25 | import os | |
26 | import sys |
|
26 | import sys | |
27 | import traceback |
|
27 | import traceback | |
28 | from os.path import dirname as dn, join as jn |
|
28 | from os.path import dirname as dn, join as jn | |
29 |
|
29 | |||
30 | #to get the rhodecode import |
|
30 | #to get the rhodecode import | |
31 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) |
|
31 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) | |
32 |
|
32 | |||
33 | from string import strip |
|
33 | from string import strip | |
34 | from shutil import rmtree |
|
34 | from shutil import rmtree | |
35 |
|
35 | |||
36 | from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter |
|
36 | from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter | |
37 | from whoosh.fields import TEXT, ID, STORED, Schema, FieldType |
|
37 | from whoosh.fields import TEXT, ID, STORED, Schema, FieldType | |
38 | from whoosh.index import create_in, open_dir |
|
38 | from whoosh.index import create_in, open_dir | |
39 | from whoosh.formats import Characters |
|
39 | from whoosh.formats import Characters | |
40 | from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter |
|
40 | from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter | |
41 |
|
41 | |||
42 | from webhelpers.html.builder import escape |
|
42 | from webhelpers.html.builder import escape | |
43 | from sqlalchemy import engine_from_config |
|
43 | from sqlalchemy import engine_from_config | |
44 | from vcs.utils.lazy import LazyProperty |
|
44 | from vcs.utils.lazy import LazyProperty | |
45 |
|
45 | |||
46 | from rhodecode.model import init_model |
|
46 | from rhodecode.model import init_model | |
47 | from rhodecode.model.scm import ScmModel |
|
47 | from rhodecode.model.scm import ScmModel | |
48 | from rhodecode.config.environment import load_environment |
|
48 | from rhodecode.config.environment import load_environment | |
49 | from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP |
|
49 | from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP | |
50 | from rhodecode.lib.utils import BasePasterCommand, Command, add_cache |
|
50 | from rhodecode.lib.utils import BasePasterCommand, Command, add_cache | |
51 |
|
51 | |||
52 | #EXTENSIONS WE WANT TO INDEX CONTENT OFF |
|
52 | #EXTENSIONS WE WANT TO INDEX CONTENT OFF | |
53 | INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys() |
|
53 | INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys() | |
54 |
|
54 | |||
55 | #CUSTOM ANALYZER wordsplit + lowercase filter |
|
55 | #CUSTOM ANALYZER wordsplit + lowercase filter | |
56 | ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter() |
|
56 | ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter() | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | #INDEX SCHEMA DEFINITION |
|
59 | #INDEX SCHEMA DEFINITION | |
60 | SCHEMA = Schema(owner=TEXT(), |
|
60 | SCHEMA = Schema(owner=TEXT(), | |
61 | repository=TEXT(stored=True), |
|
61 | repository=TEXT(stored=True), | |
62 | path=TEXT(stored=True), |
|
62 | path=TEXT(stored=True), | |
63 | content=FieldType(format=Characters(ANALYZER), |
|
63 | content=FieldType(format=Characters(ANALYZER), | |
64 | scorable=True, stored=True), |
|
64 | scorable=True, stored=True), | |
65 | modtime=STORED(), extension=TEXT(stored=True)) |
|
65 | modtime=STORED(), extension=TEXT(stored=True)) | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | IDX_NAME = 'HG_INDEX' |
|
68 | IDX_NAME = 'HG_INDEX' | |
69 | FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') |
|
69 | FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') | |
70 | FRAGMENTER = SimpleFragmenter(200) |
|
70 | FRAGMENTER = SimpleFragmenter(200) | |
71 |
|
71 | |||
72 |
|
72 | |||
73 | class MakeIndex(BasePasterCommand): |
|
73 | class MakeIndex(BasePasterCommand): | |
74 |
|
74 | |||
75 | max_args = 1 |
|
75 | max_args = 1 | |
76 | min_args = 1 |
|
76 | min_args = 1 | |
77 |
|
77 | |||
78 | usage = "CONFIG_FILE" |
|
78 | usage = "CONFIG_FILE" | |
79 | summary = "Creates index for full text search given configuration file" |
|
79 | summary = "Creates index for full text search given configuration file" | |
80 | group_name = "RhodeCode" |
|
80 | group_name = "RhodeCode" | |
81 | takes_config_file = -1 |
|
81 | takes_config_file = -1 | |
82 | parser = Command.standard_parser(verbose=True) |
|
82 | parser = Command.standard_parser(verbose=True) | |
83 |
|
83 | |||
84 | def command(self): |
|
84 | def command(self): | |
85 |
|
85 | |||
86 | from pylons import config |
|
86 | from pylons import config | |
87 | add_cache(config) |
|
87 | add_cache(config) | |
88 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
88 | engine = engine_from_config(config, 'sqlalchemy.db1.') | |
89 | init_model(engine) |
|
89 | init_model(engine) | |
90 |
|
90 | |||
91 | index_location = config['index_dir'] |
|
91 | index_location = config['index_dir'] | |
92 | repo_location = self.options.repo_location |
|
92 | repo_location = self.options.repo_location | |
93 | repo_list = map(strip, self.options.repo_list.split(',')) \ |
|
93 | repo_list = map(strip, self.options.repo_list.split(',')) \ | |
94 | if self.options.repo_list else None |
|
94 | if self.options.repo_list else None | |
95 |
|
95 | |||
96 | #====================================================================== |
|
96 | #====================================================================== | |
97 | # WHOOSH DAEMON |
|
97 | # WHOOSH DAEMON | |
98 | #====================================================================== |
|
98 | #====================================================================== | |
99 | from rhodecode.lib.pidlock import LockHeld, DaemonLock |
|
99 | from rhodecode.lib.pidlock import LockHeld, DaemonLock | |
100 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
100 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon | |
101 | try: |
|
101 | try: | |
102 | l = DaemonLock() |
|
102 | l = DaemonLock(file=jn(dn(dn(index_location)), 'make_index.lock')) | |
103 | WhooshIndexingDaemon(index_location=index_location, |
|
103 | WhooshIndexingDaemon(index_location=index_location, | |
104 | repo_location=repo_location, |
|
104 | repo_location=repo_location, | |
105 | repo_list=repo_list)\ |
|
105 | repo_list=repo_list)\ | |
106 | .run(full_index=self.options.full_index) |
|
106 | .run(full_index=self.options.full_index) | |
107 | l.release() |
|
107 | l.release() | |
108 | except LockHeld: |
|
108 | except LockHeld: | |
109 | sys.exit(1) |
|
109 | sys.exit(1) | |
110 |
|
110 | |||
111 | def update_parser(self): |
|
111 | def update_parser(self): | |
112 | self.parser.add_option('--repo-location', |
|
112 | self.parser.add_option('--repo-location', | |
113 | action='store', |
|
113 | action='store', | |
114 | dest='repo_location', |
|
114 | dest='repo_location', | |
115 | help="Specifies repositories location to index REQUIRED", |
|
115 | help="Specifies repositories location to index REQUIRED", | |
116 | ) |
|
116 | ) | |
117 | self.parser.add_option('--index-only', |
|
117 | self.parser.add_option('--index-only', | |
118 | action='store', |
|
118 | action='store', | |
119 | dest='repo_list', |
|
119 | dest='repo_list', | |
120 | help="Specifies a comma separated list of repositores " |
|
120 | help="Specifies a comma separated list of repositores " | |
121 | "to build index on OPTIONAL", |
|
121 | "to build index on OPTIONAL", | |
122 | ) |
|
122 | ) | |
123 | self.parser.add_option('-f', |
|
123 | self.parser.add_option('-f', | |
124 | action='store_true', |
|
124 | action='store_true', | |
125 | dest='full_index', |
|
125 | dest='full_index', | |
126 | help="Specifies that index should be made full i.e" |
|
126 | help="Specifies that index should be made full i.e" | |
127 | " destroy old and build from scratch", |
|
127 | " destroy old and build from scratch", | |
128 | default=False) |
|
128 | default=False) | |
129 |
|
129 | |||
130 | class ResultWrapper(object): |
|
130 | class ResultWrapper(object): | |
131 | def __init__(self, search_type, searcher, matcher, highlight_items): |
|
131 | def __init__(self, search_type, searcher, matcher, highlight_items): | |
132 | self.search_type = search_type |
|
132 | self.search_type = search_type | |
133 | self.searcher = searcher |
|
133 | self.searcher = searcher | |
134 | self.matcher = matcher |
|
134 | self.matcher = matcher | |
135 | self.highlight_items = highlight_items |
|
135 | self.highlight_items = highlight_items | |
136 | self.fragment_size = 200 / 2 |
|
136 | self.fragment_size = 200 / 2 | |
137 |
|
137 | |||
138 | @LazyProperty |
|
138 | @LazyProperty | |
139 | def doc_ids(self): |
|
139 | def doc_ids(self): | |
140 | docs_id = [] |
|
140 | docs_id = [] | |
141 | while self.matcher.is_active(): |
|
141 | while self.matcher.is_active(): | |
142 | docnum = self.matcher.id() |
|
142 | docnum = self.matcher.id() | |
143 | chunks = [offsets for offsets in self.get_chunks()] |
|
143 | chunks = [offsets for offsets in self.get_chunks()] | |
144 | docs_id.append([docnum, chunks]) |
|
144 | docs_id.append([docnum, chunks]) | |
145 | self.matcher.next() |
|
145 | self.matcher.next() | |
146 | return docs_id |
|
146 | return docs_id | |
147 |
|
147 | |||
148 | def __str__(self): |
|
148 | def __str__(self): | |
149 | return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids)) |
|
149 | return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids)) | |
150 |
|
150 | |||
151 | def __repr__(self): |
|
151 | def __repr__(self): | |
152 | return self.__str__() |
|
152 | return self.__str__() | |
153 |
|
153 | |||
154 | def __len__(self): |
|
154 | def __len__(self): | |
155 | return len(self.doc_ids) |
|
155 | return len(self.doc_ids) | |
156 |
|
156 | |||
157 | def __iter__(self): |
|
157 | def __iter__(self): | |
158 | """ |
|
158 | """ | |
159 | Allows Iteration over results,and lazy generate content |
|
159 | Allows Iteration over results,and lazy generate content | |
160 |
|
160 | |||
161 | *Requires* implementation of ``__getitem__`` method. |
|
161 | *Requires* implementation of ``__getitem__`` method. | |
162 | """ |
|
162 | """ | |
163 | for docid in self.doc_ids: |
|
163 | for docid in self.doc_ids: | |
164 | yield self.get_full_content(docid) |
|
164 | yield self.get_full_content(docid) | |
165 |
|
165 | |||
166 | def __getitem__(self, key): |
|
166 | def __getitem__(self, key): | |
167 | """ |
|
167 | """ | |
168 | Slicing of resultWrapper |
|
168 | Slicing of resultWrapper | |
169 | """ |
|
169 | """ | |
170 | i, j = key.start, key.stop |
|
170 | i, j = key.start, key.stop | |
171 |
|
171 | |||
172 | slice = [] |
|
172 | slice = [] | |
173 | for docid in self.doc_ids[i:j]: |
|
173 | for docid in self.doc_ids[i:j]: | |
174 | slice.append(self.get_full_content(docid)) |
|
174 | slice.append(self.get_full_content(docid)) | |
175 | return slice |
|
175 | return slice | |
176 |
|
176 | |||
177 |
|
177 | |||
178 | def get_full_content(self, docid): |
|
178 | def get_full_content(self, docid): | |
179 | res = self.searcher.stored_fields(docid[0]) |
|
179 | res = self.searcher.stored_fields(docid[0]) | |
180 | f_path = res['path'][res['path'].find(res['repository']) \ |
|
180 | f_path = res['path'][res['path'].find(res['repository']) \ | |
181 | + len(res['repository']):].lstrip('/') |
|
181 | + len(res['repository']):].lstrip('/') | |
182 |
|
182 | |||
183 | content_short = self.get_short_content(res, docid[1]) |
|
183 | content_short = self.get_short_content(res, docid[1]) | |
184 | res.update({'content_short':content_short, |
|
184 | res.update({'content_short':content_short, | |
185 | 'content_short_hl':self.highlight(content_short), |
|
185 | 'content_short_hl':self.highlight(content_short), | |
186 | 'f_path':f_path}) |
|
186 | 'f_path':f_path}) | |
187 |
|
187 | |||
188 | return res |
|
188 | return res | |
189 |
|
189 | |||
190 | def get_short_content(self, res, chunks): |
|
190 | def get_short_content(self, res, chunks): | |
191 |
|
191 | |||
192 | return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks]) |
|
192 | return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks]) | |
193 |
|
193 | |||
194 | def get_chunks(self): |
|
194 | def get_chunks(self): | |
195 | """ |
|
195 | """ | |
196 | Smart function that implements chunking the content |
|
196 | Smart function that implements chunking the content | |
197 | but not overlap chunks so it doesn't highlight the same |
|
197 | but not overlap chunks so it doesn't highlight the same | |
198 | close occurrences twice. |
|
198 | close occurrences twice. | |
199 |
|
199 | |||
200 | :param matcher: |
|
200 | :param matcher: | |
201 | :param size: |
|
201 | :param size: | |
202 | """ |
|
202 | """ | |
203 | memory = [(0, 0)] |
|
203 | memory = [(0, 0)] | |
204 | for span in self.matcher.spans(): |
|
204 | for span in self.matcher.spans(): | |
205 | start = span.startchar or 0 |
|
205 | start = span.startchar or 0 | |
206 | end = span.endchar or 0 |
|
206 | end = span.endchar or 0 | |
207 | start_offseted = max(0, start - self.fragment_size) |
|
207 | start_offseted = max(0, start - self.fragment_size) | |
208 | end_offseted = end + self.fragment_size |
|
208 | end_offseted = end + self.fragment_size | |
209 |
|
209 | |||
210 | if start_offseted < memory[-1][1]: |
|
210 | if start_offseted < memory[-1][1]: | |
211 | start_offseted = memory[-1][1] |
|
211 | start_offseted = memory[-1][1] | |
212 | memory.append((start_offseted, end_offseted,)) |
|
212 | memory.append((start_offseted, end_offseted,)) | |
213 | yield (start_offseted, end_offseted,) |
|
213 | yield (start_offseted, end_offseted,) | |
214 |
|
214 | |||
215 | def highlight(self, content, top=5): |
|
215 | def highlight(self, content, top=5): | |
216 | if self.search_type != 'content': |
|
216 | if self.search_type != 'content': | |
217 | return '' |
|
217 | return '' | |
218 | hl = highlight(escape(content), |
|
218 | hl = highlight(escape(content), | |
219 | self.highlight_items, |
|
219 | self.highlight_items, | |
220 | analyzer=ANALYZER, |
|
220 | analyzer=ANALYZER, | |
221 | fragmenter=FRAGMENTER, |
|
221 | fragmenter=FRAGMENTER, | |
222 | formatter=FORMATTER, |
|
222 | formatter=FORMATTER, | |
223 | top=top) |
|
223 | top=top) | |
224 | return hl |
|
224 | return hl |
@@ -1,605 +1,606 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.lib.utils |
|
3 | rhodecode.lib.utils | |
4 | ~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Utilities library for RhodeCode |
|
6 | Utilities library for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Apr 18, 2010 |
|
8 | :created_on: Apr 18, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import logging |
|
27 | import logging | |
28 | import datetime |
|
28 | import datetime | |
29 | import traceback |
|
29 | import traceback | |
30 | import paste |
|
30 | import paste | |
31 | import beaker |
|
31 | import beaker | |
|
32 | from os.path import dirname as dn, join as jn | |||
32 |
|
33 | |||
33 | from paste.script.command import Command, BadCommand |
|
34 | from paste.script.command import Command, BadCommand | |
34 |
|
35 | |||
35 | from UserDict import DictMixin |
|
36 | from UserDict import DictMixin | |
36 |
|
37 | |||
37 | from mercurial import ui, config, hg |
|
38 | from mercurial import ui, config, hg | |
38 | from mercurial.error import RepoError |
|
39 | from mercurial.error import RepoError | |
39 |
|
40 | |||
40 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
41 | from webhelpers.text import collapse, remove_formatting, strip_tags | |
41 |
|
42 | |||
42 | from vcs.backends.base import BaseChangeset |
|
43 | from vcs.backends.base import BaseChangeset | |
43 | from vcs.utils.lazy import LazyProperty |
|
44 | from vcs.utils.lazy import LazyProperty | |
44 |
|
45 | |||
45 | from rhodecode.model import meta |
|
46 | from rhodecode.model import meta | |
46 | from rhodecode.model.caching_query import FromCache |
|
47 | from rhodecode.model.caching_query import FromCache | |
47 | from rhodecode.model.db import Repository, User, RhodeCodeUi, UserLog, Group, \ |
|
48 | from rhodecode.model.db import Repository, User, RhodeCodeUi, UserLog, Group, \ | |
48 | RhodeCodeSettings |
|
49 | RhodeCodeSettings | |
49 | from rhodecode.model.repo import RepoModel |
|
50 | from rhodecode.model.repo import RepoModel | |
50 | from rhodecode.model.user import UserModel |
|
51 | from rhodecode.model.user import UserModel | |
51 |
|
52 | |||
52 | log = logging.getLogger(__name__) |
|
53 | log = logging.getLogger(__name__) | |
53 |
|
54 | |||
54 |
|
55 | |||
55 | def recursive_replace(str, replace=' '): |
|
56 | def recursive_replace(str, replace=' '): | |
56 | """Recursive replace of given sign to just one instance |
|
57 | """Recursive replace of given sign to just one instance | |
57 |
|
58 | |||
58 | :param str: given string |
|
59 | :param str: given string | |
59 | :param replace: char to find and replace multiple instances |
|
60 | :param replace: char to find and replace multiple instances | |
60 |
|
61 | |||
61 | Examples:: |
|
62 | Examples:: | |
62 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
63 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') | |
63 | 'Mighty-Mighty-Bo-sstones' |
|
64 | 'Mighty-Mighty-Bo-sstones' | |
64 | """ |
|
65 | """ | |
65 |
|
66 | |||
66 | if str.find(replace * 2) == -1: |
|
67 | if str.find(replace * 2) == -1: | |
67 | return str |
|
68 | return str | |
68 | else: |
|
69 | else: | |
69 | str = str.replace(replace * 2, replace) |
|
70 | str = str.replace(replace * 2, replace) | |
70 | return recursive_replace(str, replace) |
|
71 | return recursive_replace(str, replace) | |
71 |
|
72 | |||
72 |
|
73 | |||
73 | def repo_name_slug(value): |
|
74 | def repo_name_slug(value): | |
74 | """Return slug of name of repository |
|
75 | """Return slug of name of repository | |
75 | This function is called on each creation/modification |
|
76 | This function is called on each creation/modification | |
76 | of repository to prevent bad names in repo |
|
77 | of repository to prevent bad names in repo | |
77 | """ |
|
78 | """ | |
78 |
|
79 | |||
79 | slug = remove_formatting(value) |
|
80 | slug = remove_formatting(value) | |
80 | slug = strip_tags(slug) |
|
81 | slug = strip_tags(slug) | |
81 |
|
82 | |||
82 | for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
83 | for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """: | |
83 | slug = slug.replace(c, '-') |
|
84 | slug = slug.replace(c, '-') | |
84 | slug = recursive_replace(slug, '-') |
|
85 | slug = recursive_replace(slug, '-') | |
85 | slug = collapse(slug, '-') |
|
86 | slug = collapse(slug, '-') | |
86 | return slug |
|
87 | return slug | |
87 |
|
88 | |||
88 |
|
89 | |||
89 | def get_repo_slug(request): |
|
90 | def get_repo_slug(request): | |
90 | return request.environ['pylons.routes_dict'].get('repo_name') |
|
91 | return request.environ['pylons.routes_dict'].get('repo_name') | |
91 |
|
92 | |||
92 |
|
93 | |||
93 | def action_logger(user, action, repo, ipaddr='', sa=None): |
|
94 | def action_logger(user, action, repo, ipaddr='', sa=None): | |
94 | """ |
|
95 | """ | |
95 | Action logger for various actions made by users |
|
96 | Action logger for various actions made by users | |
96 |
|
97 | |||
97 | :param user: user that made this action, can be a unique username string or |
|
98 | :param user: user that made this action, can be a unique username string or | |
98 | object containing user_id attribute |
|
99 | object containing user_id attribute | |
99 | :param action: action to log, should be on of predefined unique actions for |
|
100 | :param action: action to log, should be on of predefined unique actions for | |
100 | easy translations |
|
101 | easy translations | |
101 | :param repo: string name of repository or object containing repo_id, |
|
102 | :param repo: string name of repository or object containing repo_id, | |
102 | that action was made on |
|
103 | that action was made on | |
103 | :param ipaddr: optional ip address from what the action was made |
|
104 | :param ipaddr: optional ip address from what the action was made | |
104 | :param sa: optional sqlalchemy session |
|
105 | :param sa: optional sqlalchemy session | |
105 |
|
106 | |||
106 | """ |
|
107 | """ | |
107 |
|
108 | |||
108 | if not sa: |
|
109 | if not sa: | |
109 | sa = meta.Session() |
|
110 | sa = meta.Session() | |
110 |
|
111 | |||
111 | try: |
|
112 | try: | |
112 | um = UserModel() |
|
113 | um = UserModel() | |
113 | if hasattr(user, 'user_id'): |
|
114 | if hasattr(user, 'user_id'): | |
114 | user_obj = user |
|
115 | user_obj = user | |
115 | elif isinstance(user, basestring): |
|
116 | elif isinstance(user, basestring): | |
116 | user_obj = um.get_by_username(user, cache=False) |
|
117 | user_obj = um.get_by_username(user, cache=False) | |
117 | else: |
|
118 | else: | |
118 | raise Exception('You have to provide user object or username') |
|
119 | raise Exception('You have to provide user object or username') | |
119 |
|
120 | |||
120 | rm = RepoModel() |
|
121 | rm = RepoModel() | |
121 | if hasattr(repo, 'repo_id'): |
|
122 | if hasattr(repo, 'repo_id'): | |
122 | repo_obj = rm.get(repo.repo_id, cache=False) |
|
123 | repo_obj = rm.get(repo.repo_id, cache=False) | |
123 | repo_name = repo_obj.repo_name |
|
124 | repo_name = repo_obj.repo_name | |
124 | elif isinstance(repo, basestring): |
|
125 | elif isinstance(repo, basestring): | |
125 | repo_name = repo.lstrip('/') |
|
126 | repo_name = repo.lstrip('/') | |
126 | repo_obj = rm.get_by_repo_name(repo_name, cache=False) |
|
127 | repo_obj = rm.get_by_repo_name(repo_name, cache=False) | |
127 | else: |
|
128 | else: | |
128 | raise Exception('You have to provide repository to action logger') |
|
129 | raise Exception('You have to provide repository to action logger') | |
129 |
|
130 | |||
130 | user_log = UserLog() |
|
131 | user_log = UserLog() | |
131 | user_log.user_id = user_obj.user_id |
|
132 | user_log.user_id = user_obj.user_id | |
132 | user_log.action = action |
|
133 | user_log.action = action | |
133 |
|
134 | |||
134 | user_log.repository_id = repo_obj.repo_id |
|
135 | user_log.repository_id = repo_obj.repo_id | |
135 | user_log.repository_name = repo_name |
|
136 | user_log.repository_name = repo_name | |
136 |
|
137 | |||
137 | user_log.action_date = datetime.datetime.now() |
|
138 | user_log.action_date = datetime.datetime.now() | |
138 | user_log.user_ip = ipaddr |
|
139 | user_log.user_ip = ipaddr | |
139 | sa.add(user_log) |
|
140 | sa.add(user_log) | |
140 | sa.commit() |
|
141 | sa.commit() | |
141 |
|
142 | |||
142 | log.info('Adding user %s, action %s on %s', user_obj, action, repo) |
|
143 | log.info('Adding user %s, action %s on %s', user_obj, action, repo) | |
143 | except: |
|
144 | except: | |
144 | log.error(traceback.format_exc()) |
|
145 | log.error(traceback.format_exc()) | |
145 | sa.rollback() |
|
146 | sa.rollback() | |
146 |
|
147 | |||
147 |
|
148 | |||
148 | def get_repos(path, recursive=False): |
|
149 | def get_repos(path, recursive=False): | |
149 | """ |
|
150 | """ | |
150 | Scans given path for repos and return (name,(type,path)) tuple |
|
151 | Scans given path for repos and return (name,(type,path)) tuple | |
151 |
|
152 | |||
152 | :param path: path to scann for repositories |
|
153 | :param path: path to scann for repositories | |
153 | :param recursive: recursive search and return names with subdirs in front |
|
154 | :param recursive: recursive search and return names with subdirs in front | |
154 | """ |
|
155 | """ | |
155 | from vcs.utils.helpers import get_scm |
|
156 | from vcs.utils.helpers import get_scm | |
156 | from vcs.exceptions import VCSError |
|
157 | from vcs.exceptions import VCSError | |
157 |
|
158 | |||
158 | if path.endswith(os.sep): |
|
159 | if path.endswith(os.sep): | |
159 | #remove ending slash for better results |
|
160 | #remove ending slash for better results | |
160 | path = path[:-1] |
|
161 | path = path[:-1] | |
161 |
|
162 | |||
162 | def _get_repos(p): |
|
163 | def _get_repos(p): | |
163 | if not os.access(p, os.W_OK): |
|
164 | if not os.access(p, os.W_OK): | |
164 | return |
|
165 | return | |
165 | for dirpath in os.listdir(p): |
|
166 | for dirpath in os.listdir(p): | |
166 | if os.path.isfile(os.path.join(p, dirpath)): |
|
167 | if os.path.isfile(os.path.join(p, dirpath)): | |
167 | continue |
|
168 | continue | |
168 | cur_path = os.path.join(p, dirpath) |
|
169 | cur_path = os.path.join(p, dirpath) | |
169 | try: |
|
170 | try: | |
170 | scm_info = get_scm(cur_path) |
|
171 | scm_info = get_scm(cur_path) | |
171 | yield scm_info[1].split(path)[-1].lstrip(os.sep), scm_info |
|
172 | yield scm_info[1].split(path)[-1].lstrip(os.sep), scm_info | |
172 | except VCSError: |
|
173 | except VCSError: | |
173 | if not recursive: |
|
174 | if not recursive: | |
174 | continue |
|
175 | continue | |
175 | #check if this dir containts other repos for recursive scan |
|
176 | #check if this dir containts other repos for recursive scan | |
176 | rec_path = os.path.join(p, dirpath) |
|
177 | rec_path = os.path.join(p, dirpath) | |
177 | if os.path.isdir(rec_path): |
|
178 | if os.path.isdir(rec_path): | |
178 | for inner_scm in _get_repos(rec_path): |
|
179 | for inner_scm in _get_repos(rec_path): | |
179 | yield inner_scm |
|
180 | yield inner_scm | |
180 |
|
181 | |||
181 | return _get_repos(path) |
|
182 | return _get_repos(path) | |
182 |
|
183 | |||
183 |
|
184 | |||
184 | def check_repo_fast(repo_name, base_path): |
|
185 | def check_repo_fast(repo_name, base_path): | |
185 | """ |
|
186 | """ | |
186 | Check given path for existence of directory |
|
187 | Check given path for existence of directory | |
187 | :param repo_name: |
|
188 | :param repo_name: | |
188 | :param base_path: |
|
189 | :param base_path: | |
189 |
|
190 | |||
190 | :return False: if this directory is present |
|
191 | :return False: if this directory is present | |
191 | """ |
|
192 | """ | |
192 | if os.path.isdir(os.path.join(base_path, repo_name)): |
|
193 | if os.path.isdir(os.path.join(base_path, repo_name)): | |
193 | return False |
|
194 | return False | |
194 | return True |
|
195 | return True | |
195 |
|
196 | |||
196 |
|
197 | |||
197 | def check_repo(repo_name, base_path, verify=True): |
|
198 | def check_repo(repo_name, base_path, verify=True): | |
198 |
|
199 | |||
199 | repo_path = os.path.join(base_path, repo_name) |
|
200 | repo_path = os.path.join(base_path, repo_name) | |
200 |
|
201 | |||
201 | try: |
|
202 | try: | |
202 | if not check_repo_fast(repo_name, base_path): |
|
203 | if not check_repo_fast(repo_name, base_path): | |
203 | return False |
|
204 | return False | |
204 | r = hg.repository(ui.ui(), repo_path) |
|
205 | r = hg.repository(ui.ui(), repo_path) | |
205 | if verify: |
|
206 | if verify: | |
206 | hg.verify(r) |
|
207 | hg.verify(r) | |
207 | #here we hnow that repo exists it was verified |
|
208 | #here we hnow that repo exists it was verified | |
208 | log.info('%s repo is already created', repo_name) |
|
209 | log.info('%s repo is already created', repo_name) | |
209 | return False |
|
210 | return False | |
210 | except RepoError: |
|
211 | except RepoError: | |
211 | #it means that there is no valid repo there... |
|
212 | #it means that there is no valid repo there... | |
212 | log.info('%s repo is free for creation', repo_name) |
|
213 | log.info('%s repo is free for creation', repo_name) | |
213 | return True |
|
214 | return True | |
214 |
|
215 | |||
215 |
|
216 | |||
216 | def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): |
|
217 | def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): | |
217 | while True: |
|
218 | while True: | |
218 | ok = raw_input(prompt) |
|
219 | ok = raw_input(prompt) | |
219 | if ok in ('y', 'ye', 'yes'): |
|
220 | if ok in ('y', 'ye', 'yes'): | |
220 | return True |
|
221 | return True | |
221 | if ok in ('n', 'no', 'nop', 'nope'): |
|
222 | if ok in ('n', 'no', 'nop', 'nope'): | |
222 | return False |
|
223 | return False | |
223 | retries = retries - 1 |
|
224 | retries = retries - 1 | |
224 | if retries < 0: |
|
225 | if retries < 0: | |
225 | raise IOError |
|
226 | raise IOError | |
226 | print complaint |
|
227 | print complaint | |
227 |
|
228 | |||
228 | #propagated from mercurial documentation |
|
229 | #propagated from mercurial documentation | |
229 | ui_sections = ['alias', 'auth', |
|
230 | ui_sections = ['alias', 'auth', | |
230 | 'decode/encode', 'defaults', |
|
231 | 'decode/encode', 'defaults', | |
231 | 'diff', 'email', |
|
232 | 'diff', 'email', | |
232 | 'extensions', 'format', |
|
233 | 'extensions', 'format', | |
233 | 'merge-patterns', 'merge-tools', |
|
234 | 'merge-patterns', 'merge-tools', | |
234 | 'hooks', 'http_proxy', |
|
235 | 'hooks', 'http_proxy', | |
235 | 'smtp', 'patch', |
|
236 | 'smtp', 'patch', | |
236 | 'paths', 'profiling', |
|
237 | 'paths', 'profiling', | |
237 | 'server', 'trusted', |
|
238 | 'server', 'trusted', | |
238 | 'ui', 'web', ] |
|
239 | 'ui', 'web', ] | |
239 |
|
240 | |||
240 |
|
241 | |||
241 | def make_ui(read_from='file', path=None, checkpaths=True): |
|
242 | def make_ui(read_from='file', path=None, checkpaths=True): | |
242 | """A function that will read python rc files or database |
|
243 | """A function that will read python rc files or database | |
243 | and make an mercurial ui object from read options |
|
244 | and make an mercurial ui object from read options | |
244 |
|
245 | |||
245 | :param path: path to mercurial config file |
|
246 | :param path: path to mercurial config file | |
246 | :param checkpaths: check the path |
|
247 | :param checkpaths: check the path | |
247 | :param read_from: read from 'file' or 'db' |
|
248 | :param read_from: read from 'file' or 'db' | |
248 | """ |
|
249 | """ | |
249 |
|
250 | |||
250 | baseui = ui.ui() |
|
251 | baseui = ui.ui() | |
251 |
|
252 | |||
252 | #clean the baseui object |
|
253 | #clean the baseui object | |
253 | baseui._ocfg = config.config() |
|
254 | baseui._ocfg = config.config() | |
254 | baseui._ucfg = config.config() |
|
255 | baseui._ucfg = config.config() | |
255 | baseui._tcfg = config.config() |
|
256 | baseui._tcfg = config.config() | |
256 |
|
257 | |||
257 | if read_from == 'file': |
|
258 | if read_from == 'file': | |
258 | if not os.path.isfile(path): |
|
259 | if not os.path.isfile(path): | |
259 | log.warning('Unable to read config file %s' % path) |
|
260 | log.warning('Unable to read config file %s' % path) | |
260 | return False |
|
261 | return False | |
261 | log.debug('reading hgrc from %s', path) |
|
262 | log.debug('reading hgrc from %s', path) | |
262 | cfg = config.config() |
|
263 | cfg = config.config() | |
263 | cfg.read(path) |
|
264 | cfg.read(path) | |
264 | for section in ui_sections: |
|
265 | for section in ui_sections: | |
265 | for k, v in cfg.items(section): |
|
266 | for k, v in cfg.items(section): | |
266 | log.debug('settings ui from file[%s]%s:%s', section, k, v) |
|
267 | log.debug('settings ui from file[%s]%s:%s', section, k, v) | |
267 | baseui.setconfig(section, k, v) |
|
268 | baseui.setconfig(section, k, v) | |
268 |
|
269 | |||
269 | elif read_from == 'db': |
|
270 | elif read_from == 'db': | |
270 | sa = meta.Session() |
|
271 | sa = meta.Session() | |
271 | ret = sa.query(RhodeCodeUi)\ |
|
272 | ret = sa.query(RhodeCodeUi)\ | |
272 | .options(FromCache("sql_cache_short", |
|
273 | .options(FromCache("sql_cache_short", | |
273 | "get_hg_ui_settings")).all() |
|
274 | "get_hg_ui_settings")).all() | |
274 |
|
275 | |||
275 | hg_ui = ret |
|
276 | hg_ui = ret | |
276 | for ui_ in hg_ui: |
|
277 | for ui_ in hg_ui: | |
277 | if ui_.ui_active: |
|
278 | if ui_.ui_active: | |
278 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, |
|
279 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, | |
279 | ui_.ui_key, ui_.ui_value) |
|
280 | ui_.ui_key, ui_.ui_value) | |
280 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
281 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) | |
281 |
|
282 | |||
282 | meta.Session.remove() |
|
283 | meta.Session.remove() | |
283 | return baseui |
|
284 | return baseui | |
284 |
|
285 | |||
285 |
|
286 | |||
286 | def set_rhodecode_config(config): |
|
287 | def set_rhodecode_config(config): | |
287 | """Updates pylons config with new settings from database |
|
288 | """Updates pylons config with new settings from database | |
288 |
|
289 | |||
289 | :param config: |
|
290 | :param config: | |
290 | """ |
|
291 | """ | |
291 | hgsettings = RhodeCodeSettings.get_app_settings() |
|
292 | hgsettings = RhodeCodeSettings.get_app_settings() | |
292 |
|
293 | |||
293 | for k, v in hgsettings.items(): |
|
294 | for k, v in hgsettings.items(): | |
294 | config[k] = v |
|
295 | config[k] = v | |
295 |
|
296 | |||
296 |
|
297 | |||
297 | def invalidate_cache(cache_key, *args): |
|
298 | def invalidate_cache(cache_key, *args): | |
298 | """Puts cache invalidation task into db for |
|
299 | """Puts cache invalidation task into db for | |
299 | further global cache invalidation |
|
300 | further global cache invalidation | |
300 | """ |
|
301 | """ | |
301 |
|
302 | |||
302 | from rhodecode.model.scm import ScmModel |
|
303 | from rhodecode.model.scm import ScmModel | |
303 |
|
304 | |||
304 | if cache_key.startswith('get_repo_cached_'): |
|
305 | if cache_key.startswith('get_repo_cached_'): | |
305 | name = cache_key.split('get_repo_cached_')[-1] |
|
306 | name = cache_key.split('get_repo_cached_')[-1] | |
306 | ScmModel().mark_for_invalidation(name) |
|
307 | ScmModel().mark_for_invalidation(name) | |
307 |
|
308 | |||
308 |
|
309 | |||
309 | class EmptyChangeset(BaseChangeset): |
|
310 | class EmptyChangeset(BaseChangeset): | |
310 | """ |
|
311 | """ | |
311 | An dummy empty changeset. It's possible to pass hash when creating |
|
312 | An dummy empty changeset. It's possible to pass hash when creating | |
312 | an EmptyChangeset |
|
313 | an EmptyChangeset | |
313 | """ |
|
314 | """ | |
314 |
|
315 | |||
315 | def __init__(self, cs='0' * 40, repo=None): |
|
316 | def __init__(self, cs='0' * 40, repo=None): | |
316 | self._empty_cs = cs |
|
317 | self._empty_cs = cs | |
317 | self.revision = -1 |
|
318 | self.revision = -1 | |
318 | self.message = '' |
|
319 | self.message = '' | |
319 | self.author = '' |
|
320 | self.author = '' | |
320 | self.date = '' |
|
321 | self.date = '' | |
321 | self.repository = repo |
|
322 | self.repository = repo | |
322 |
|
323 | |||
323 | @LazyProperty |
|
324 | @LazyProperty | |
324 | def raw_id(self): |
|
325 | def raw_id(self): | |
325 | """Returns raw string identifying this changeset, useful for web |
|
326 | """Returns raw string identifying this changeset, useful for web | |
326 | representation. |
|
327 | representation. | |
327 | """ |
|
328 | """ | |
328 |
|
329 | |||
329 | return self._empty_cs |
|
330 | return self._empty_cs | |
330 |
|
331 | |||
331 | @LazyProperty |
|
332 | @LazyProperty | |
332 | def short_id(self): |
|
333 | def short_id(self): | |
333 | return self.raw_id[:12] |
|
334 | return self.raw_id[:12] | |
334 |
|
335 | |||
335 | def get_file_changeset(self, path): |
|
336 | def get_file_changeset(self, path): | |
336 | return self |
|
337 | return self | |
337 |
|
338 | |||
338 | def get_file_content(self, path): |
|
339 | def get_file_content(self, path): | |
339 | return u'' |
|
340 | return u'' | |
340 |
|
341 | |||
341 | def get_file_size(self, path): |
|
342 | def get_file_size(self, path): | |
342 | return 0 |
|
343 | return 0 | |
343 |
|
344 | |||
344 |
|
345 | |||
345 | def map_groups(groups): |
|
346 | def map_groups(groups): | |
346 | """Checks for groups existence, and creates groups structures. |
|
347 | """Checks for groups existence, and creates groups structures. | |
347 | It returns last group in structure |
|
348 | It returns last group in structure | |
348 |
|
349 | |||
349 | :param groups: list of groups structure |
|
350 | :param groups: list of groups structure | |
350 | """ |
|
351 | """ | |
351 | sa = meta.Session() |
|
352 | sa = meta.Session() | |
352 |
|
353 | |||
353 | parent = None |
|
354 | parent = None | |
354 | group = None |
|
355 | group = None | |
355 | for lvl, group_name in enumerate(groups[:-1]): |
|
356 | for lvl, group_name in enumerate(groups[:-1]): | |
356 | group = sa.query(Group).filter(Group.group_name == group_name).scalar() |
|
357 | group = sa.query(Group).filter(Group.group_name == group_name).scalar() | |
357 |
|
358 | |||
358 | if group is None: |
|
359 | if group is None: | |
359 | group = Group(group_name, parent) |
|
360 | group = Group(group_name, parent) | |
360 | sa.add(group) |
|
361 | sa.add(group) | |
361 | sa.commit() |
|
362 | sa.commit() | |
362 |
|
363 | |||
363 | parent = group |
|
364 | parent = group | |
364 |
|
365 | |||
365 | return group |
|
366 | return group | |
366 |
|
367 | |||
367 |
|
368 | |||
368 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
369 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): | |
369 | """maps all repos given in initial_repo_list, non existing repositories |
|
370 | """maps all repos given in initial_repo_list, non existing repositories | |
370 | are created, if remove_obsolete is True it also check for db entries |
|
371 | are created, if remove_obsolete is True it also check for db entries | |
371 | that are not in initial_repo_list and removes them. |
|
372 | that are not in initial_repo_list and removes them. | |
372 |
|
373 | |||
373 | :param initial_repo_list: list of repositories found by scanning methods |
|
374 | :param initial_repo_list: list of repositories found by scanning methods | |
374 | :param remove_obsolete: check for obsolete entries in database |
|
375 | :param remove_obsolete: check for obsolete entries in database | |
375 | """ |
|
376 | """ | |
376 |
|
377 | |||
377 | sa = meta.Session() |
|
378 | sa = meta.Session() | |
378 | rm = RepoModel() |
|
379 | rm = RepoModel() | |
379 | user = sa.query(User).filter(User.admin == True).first() |
|
380 | user = sa.query(User).filter(User.admin == True).first() | |
380 | added = [] |
|
381 | added = [] | |
381 | for name, repo in initial_repo_list.items(): |
|
382 | for name, repo in initial_repo_list.items(): | |
382 | group = map_groups(name.split('/')) |
|
383 | group = map_groups(name.split('/')) | |
383 | if not rm.get_by_repo_name(name, cache=False): |
|
384 | if not rm.get_by_repo_name(name, cache=False): | |
384 | log.info('repository %s not found creating default', name) |
|
385 | log.info('repository %s not found creating default', name) | |
385 | added.append(name) |
|
386 | added.append(name) | |
386 | form_data = { |
|
387 | form_data = { | |
387 | 'repo_name': name, |
|
388 | 'repo_name': name, | |
388 | 'repo_type': repo.alias, |
|
389 | 'repo_type': repo.alias, | |
389 | 'description': repo.description \ |
|
390 | 'description': repo.description \ | |
390 | if repo.description != 'unknown' else \ |
|
391 | if repo.description != 'unknown' else \ | |
391 | '%s repository' % name, |
|
392 | '%s repository' % name, | |
392 | 'private': False, |
|
393 | 'private': False, | |
393 | 'group_id': getattr(group, 'group_id', None) |
|
394 | 'group_id': getattr(group, 'group_id', None) | |
394 | } |
|
395 | } | |
395 | rm.create(form_data, user, just_db=True) |
|
396 | rm.create(form_data, user, just_db=True) | |
396 |
|
397 | |||
397 | removed = [] |
|
398 | removed = [] | |
398 | if remove_obsolete: |
|
399 | if remove_obsolete: | |
399 | #remove from database those repositories that are not in the filesystem |
|
400 | #remove from database those repositories that are not in the filesystem | |
400 | for repo in sa.query(Repository).all(): |
|
401 | for repo in sa.query(Repository).all(): | |
401 | if repo.repo_name not in initial_repo_list.keys(): |
|
402 | if repo.repo_name not in initial_repo_list.keys(): | |
402 | removed.append(repo.repo_name) |
|
403 | removed.append(repo.repo_name) | |
403 | sa.delete(repo) |
|
404 | sa.delete(repo) | |
404 | sa.commit() |
|
405 | sa.commit() | |
405 |
|
406 | |||
406 | return added, removed |
|
407 | return added, removed | |
407 |
|
408 | |||
408 | #set cache regions for beaker so celery can utilise it |
|
409 | #set cache regions for beaker so celery can utilise it | |
409 | def add_cache(settings): |
|
410 | def add_cache(settings): | |
410 | cache_settings = {'regions': None} |
|
411 | cache_settings = {'regions': None} | |
411 | for key in settings.keys(): |
|
412 | for key in settings.keys(): | |
412 | for prefix in ['beaker.cache.', 'cache.']: |
|
413 | for prefix in ['beaker.cache.', 'cache.']: | |
413 | if key.startswith(prefix): |
|
414 | if key.startswith(prefix): | |
414 | name = key.split(prefix)[1].strip() |
|
415 | name = key.split(prefix)[1].strip() | |
415 | cache_settings[name] = settings[key].strip() |
|
416 | cache_settings[name] = settings[key].strip() | |
416 | if cache_settings['regions']: |
|
417 | if cache_settings['regions']: | |
417 | for region in cache_settings['regions'].split(','): |
|
418 | for region in cache_settings['regions'].split(','): | |
418 | region = region.strip() |
|
419 | region = region.strip() | |
419 | region_settings = {} |
|
420 | region_settings = {} | |
420 | for key, value in cache_settings.items(): |
|
421 | for key, value in cache_settings.items(): | |
421 | if key.startswith(region): |
|
422 | if key.startswith(region): | |
422 | region_settings[key.split('.')[1]] = value |
|
423 | region_settings[key.split('.')[1]] = value | |
423 | region_settings['expire'] = int(region_settings.get('expire', |
|
424 | region_settings['expire'] = int(region_settings.get('expire', | |
424 | 60)) |
|
425 | 60)) | |
425 | region_settings.setdefault('lock_dir', |
|
426 | region_settings.setdefault('lock_dir', | |
426 | cache_settings.get('lock_dir')) |
|
427 | cache_settings.get('lock_dir')) | |
427 | region_settings.setdefault('data_dir', |
|
428 | region_settings.setdefault('data_dir', | |
428 | cache_settings.get('data_dir')) |
|
429 | cache_settings.get('data_dir')) | |
429 |
|
430 | |||
430 | if 'type' not in region_settings: |
|
431 | if 'type' not in region_settings: | |
431 | region_settings['type'] = cache_settings.get('type', |
|
432 | region_settings['type'] = cache_settings.get('type', | |
432 | 'memory') |
|
433 | 'memory') | |
433 | beaker.cache.cache_regions[region] = region_settings |
|
434 | beaker.cache.cache_regions[region] = region_settings | |
434 |
|
435 | |||
435 |
|
436 | |||
436 | def get_current_revision(): |
|
437 | def get_current_revision(): | |
437 | """Returns tuple of (number, id) from repository containing this package |
|
438 | """Returns tuple of (number, id) from repository containing this package | |
438 | or None if repository could not be found. |
|
439 | or None if repository could not be found. | |
439 | """ |
|
440 | """ | |
440 |
|
441 | |||
441 | try: |
|
442 | try: | |
442 | from vcs import get_repo |
|
443 | from vcs import get_repo | |
443 | from vcs.utils.helpers import get_scm |
|
444 | from vcs.utils.helpers import get_scm | |
444 | from vcs.exceptions import RepositoryError, VCSError |
|
445 | from vcs.exceptions import RepositoryError, VCSError | |
445 | repopath = os.path.join(os.path.dirname(__file__), '..', '..') |
|
446 | repopath = os.path.join(os.path.dirname(__file__), '..', '..') | |
446 | scm = get_scm(repopath)[0] |
|
447 | scm = get_scm(repopath)[0] | |
447 | repo = get_repo(path=repopath, alias=scm) |
|
448 | repo = get_repo(path=repopath, alias=scm) | |
448 | tip = repo.get_changeset() |
|
449 | tip = repo.get_changeset() | |
449 | return (tip.revision, tip.short_id) |
|
450 | return (tip.revision, tip.short_id) | |
450 | except (ImportError, RepositoryError, VCSError), err: |
|
451 | except (ImportError, RepositoryError, VCSError), err: | |
451 | logging.debug("Cannot retrieve rhodecode's revision. Original error " |
|
452 | logging.debug("Cannot retrieve rhodecode's revision. Original error " | |
452 | "was: %s" % err) |
|
453 | "was: %s" % err) | |
453 | return None |
|
454 | return None | |
454 |
|
455 | |||
455 |
|
456 | |||
456 | #============================================================================== |
|
457 | #============================================================================== | |
457 | # TEST FUNCTIONS AND CREATORS |
|
458 | # TEST FUNCTIONS AND CREATORS | |
458 | #============================================================================== |
|
459 | #============================================================================== | |
459 | def create_test_index(repo_location, full_index): |
|
460 | def create_test_index(repo_location, full_index): | |
460 | """Makes default test index |
|
461 | """Makes default test index | |
461 | :param repo_location: |
|
462 | :param repo_location: | |
462 | :param full_index: |
|
463 | :param full_index: | |
463 | """ |
|
464 | """ | |
464 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
465 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon | |
465 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
466 | from rhodecode.lib.pidlock import DaemonLock, LockHeld | |
466 | import shutil |
|
467 | import shutil | |
467 |
|
468 | |||
468 | index_location = os.path.join(repo_location, 'index') |
|
469 | index_location = os.path.join(repo_location, 'index') | |
469 | if os.path.exists(index_location): |
|
470 | if os.path.exists(index_location): | |
470 | shutil.rmtree(index_location) |
|
471 | shutil.rmtree(index_location) | |
471 |
|
472 | |||
472 | try: |
|
473 | try: | |
473 | l = DaemonLock() |
|
474 | l = DaemonLock(file=jn(dn(dn(index_location)), 'make_index.lock')) | |
474 | WhooshIndexingDaemon(index_location=index_location, |
|
475 | WhooshIndexingDaemon(index_location=index_location, | |
475 | repo_location=repo_location)\ |
|
476 | repo_location=repo_location)\ | |
476 | .run(full_index=full_index) |
|
477 | .run(full_index=full_index) | |
477 | l.release() |
|
478 | l.release() | |
478 | except LockHeld: |
|
479 | except LockHeld: | |
479 | pass |
|
480 | pass | |
480 |
|
481 | |||
481 |
|
482 | |||
482 | def create_test_env(repos_test_path, config): |
|
483 | def create_test_env(repos_test_path, config): | |
483 | """Makes a fresh database and |
|
484 | """Makes a fresh database and | |
484 | install test repository into tmp dir |
|
485 | install test repository into tmp dir | |
485 | """ |
|
486 | """ | |
486 | from rhodecode.lib.db_manage import DbManage |
|
487 | from rhodecode.lib.db_manage import DbManage | |
487 | from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \ |
|
488 | from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \ | |
488 | HG_FORK, GIT_FORK, TESTS_TMP_PATH |
|
489 | HG_FORK, GIT_FORK, TESTS_TMP_PATH | |
489 | import tarfile |
|
490 | import tarfile | |
490 | import shutil |
|
491 | import shutil | |
491 | from os.path import dirname as dn, join as jn, abspath |
|
492 | from os.path import dirname as dn, join as jn, abspath | |
492 |
|
493 | |||
493 | log = logging.getLogger('TestEnvCreator') |
|
494 | log = logging.getLogger('TestEnvCreator') | |
494 | # create logger |
|
495 | # create logger | |
495 | log.setLevel(logging.DEBUG) |
|
496 | log.setLevel(logging.DEBUG) | |
496 | log.propagate = True |
|
497 | log.propagate = True | |
497 | # create console handler and set level to debug |
|
498 | # create console handler and set level to debug | |
498 | ch = logging.StreamHandler() |
|
499 | ch = logging.StreamHandler() | |
499 | ch.setLevel(logging.DEBUG) |
|
500 | ch.setLevel(logging.DEBUG) | |
500 |
|
501 | |||
501 | # create formatter |
|
502 | # create formatter | |
502 | formatter = logging.Formatter("%(asctime)s - %(name)s -" |
|
503 | formatter = logging.Formatter("%(asctime)s - %(name)s -" | |
503 | " %(levelname)s - %(message)s") |
|
504 | " %(levelname)s - %(message)s") | |
504 |
|
505 | |||
505 | # add formatter to ch |
|
506 | # add formatter to ch | |
506 | ch.setFormatter(formatter) |
|
507 | ch.setFormatter(formatter) | |
507 |
|
508 | |||
508 | # add ch to logger |
|
509 | # add ch to logger | |
509 | log.addHandler(ch) |
|
510 | log.addHandler(ch) | |
510 |
|
511 | |||
511 | #PART ONE create db |
|
512 | #PART ONE create db | |
512 | dbconf = config['sqlalchemy.db1.url'] |
|
513 | dbconf = config['sqlalchemy.db1.url'] | |
513 | log.debug('making test db %s', dbconf) |
|
514 | log.debug('making test db %s', dbconf) | |
514 |
|
515 | |||
515 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], |
|
516 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], | |
516 | tests=True) |
|
517 | tests=True) | |
517 | dbmanage.create_tables(override=True) |
|
518 | dbmanage.create_tables(override=True) | |
518 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) |
|
519 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) | |
519 | dbmanage.create_default_user() |
|
520 | dbmanage.create_default_user() | |
520 | dbmanage.admin_prompt() |
|
521 | dbmanage.admin_prompt() | |
521 | dbmanage.create_permissions() |
|
522 | dbmanage.create_permissions() | |
522 | dbmanage.populate_default_permissions() |
|
523 | dbmanage.populate_default_permissions() | |
523 |
|
524 | |||
524 | #PART TWO make test repo |
|
525 | #PART TWO make test repo | |
525 | log.debug('making test vcs repositories') |
|
526 | log.debug('making test vcs repositories') | |
526 |
|
527 | |||
527 | #remove old one from previos tests |
|
528 | #remove old one from previos tests | |
528 | for r in [HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, HG_FORK, GIT_FORK]: |
|
529 | for r in [HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, HG_FORK, GIT_FORK]: | |
529 |
|
530 | |||
530 | if os.path.isdir(jn(TESTS_TMP_PATH, r)): |
|
531 | if os.path.isdir(jn(TESTS_TMP_PATH, r)): | |
531 | log.debug('removing %s', r) |
|
532 | log.debug('removing %s', r) | |
532 | shutil.rmtree(jn(TESTS_TMP_PATH, r)) |
|
533 | shutil.rmtree(jn(TESTS_TMP_PATH, r)) | |
533 |
|
534 | |||
534 | #CREATE DEFAULT HG REPOSITORY |
|
535 | #CREATE DEFAULT HG REPOSITORY | |
535 | cur_dir = dn(dn(abspath(__file__))) |
|
536 | cur_dir = dn(dn(abspath(__file__))) | |
536 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) |
|
537 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) | |
537 | tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) |
|
538 | tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) | |
538 | tar.close() |
|
539 | tar.close() | |
539 |
|
540 | |||
540 |
|
541 | |||
541 | #============================================================================== |
|
542 | #============================================================================== | |
542 | # PASTER COMMANDS |
|
543 | # PASTER COMMANDS | |
543 | #============================================================================== |
|
544 | #============================================================================== | |
544 | class BasePasterCommand(Command): |
|
545 | class BasePasterCommand(Command): | |
545 | """ |
|
546 | """ | |
546 | Abstract Base Class for paster commands. |
|
547 | Abstract Base Class for paster commands. | |
547 |
|
548 | |||
548 | The celery commands are somewhat aggressive about loading |
|
549 | The celery commands are somewhat aggressive about loading | |
549 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
550 | celery.conf, and since our module sets the `CELERY_LOADER` | |
550 | environment variable to our loader, we have to bootstrap a bit and |
|
551 | environment variable to our loader, we have to bootstrap a bit and | |
551 | make sure we've had a chance to load the pylons config off of the |
|
552 | make sure we've had a chance to load the pylons config off of the | |
552 | command line, otherwise everything fails. |
|
553 | command line, otherwise everything fails. | |
553 | """ |
|
554 | """ | |
554 | min_args = 1 |
|
555 | min_args = 1 | |
555 | min_args_error = "Please provide a paster config file as an argument." |
|
556 | min_args_error = "Please provide a paster config file as an argument." | |
556 | takes_config_file = 1 |
|
557 | takes_config_file = 1 | |
557 | requires_config_file = True |
|
558 | requires_config_file = True | |
558 |
|
559 | |||
559 | def notify_msg(self, msg, log=False): |
|
560 | def notify_msg(self, msg, log=False): | |
560 | """Make a notification to user, additionally if logger is passed |
|
561 | """Make a notification to user, additionally if logger is passed | |
561 | it logs this action using given logger |
|
562 | it logs this action using given logger | |
562 |
|
563 | |||
563 | :param msg: message that will be printed to user |
|
564 | :param msg: message that will be printed to user | |
564 | :param log: logging instance, to use to additionally log this message |
|
565 | :param log: logging instance, to use to additionally log this message | |
565 |
|
566 | |||
566 | """ |
|
567 | """ | |
567 | if log and isinstance(log, logging): |
|
568 | if log and isinstance(log, logging): | |
568 | log(msg) |
|
569 | log(msg) | |
569 |
|
570 | |||
570 | def run(self, args): |
|
571 | def run(self, args): | |
571 | """ |
|
572 | """ | |
572 | Overrides Command.run |
|
573 | Overrides Command.run | |
573 |
|
574 | |||
574 | Checks for a config file argument and loads it. |
|
575 | Checks for a config file argument and loads it. | |
575 | """ |
|
576 | """ | |
576 | if len(args) < self.min_args: |
|
577 | if len(args) < self.min_args: | |
577 | raise BadCommand( |
|
578 | raise BadCommand( | |
578 | self.min_args_error % {'min_args': self.min_args, |
|
579 | self.min_args_error % {'min_args': self.min_args, | |
579 | 'actual_args': len(args)}) |
|
580 | 'actual_args': len(args)}) | |
580 |
|
581 | |||
581 | # Decrement because we're going to lob off the first argument. |
|
582 | # Decrement because we're going to lob off the first argument. | |
582 | # @@ This is hacky |
|
583 | # @@ This is hacky | |
583 | self.min_args -= 1 |
|
584 | self.min_args -= 1 | |
584 | self.bootstrap_config(args[0]) |
|
585 | self.bootstrap_config(args[0]) | |
585 | self.update_parser() |
|
586 | self.update_parser() | |
586 | return super(BasePasterCommand, self).run(args[1:]) |
|
587 | return super(BasePasterCommand, self).run(args[1:]) | |
587 |
|
588 | |||
588 | def update_parser(self): |
|
589 | def update_parser(self): | |
589 | """ |
|
590 | """ | |
590 | Abstract method. Allows for the class's parser to be updated |
|
591 | Abstract method. Allows for the class's parser to be updated | |
591 | before the superclass's `run` method is called. Necessary to |
|
592 | before the superclass's `run` method is called. Necessary to | |
592 | allow options/arguments to be passed through to the underlying |
|
593 | allow options/arguments to be passed through to the underlying | |
593 | celery command. |
|
594 | celery command. | |
594 | """ |
|
595 | """ | |
595 | raise NotImplementedError("Abstract Method.") |
|
596 | raise NotImplementedError("Abstract Method.") | |
596 |
|
597 | |||
597 | def bootstrap_config(self, conf): |
|
598 | def bootstrap_config(self, conf): | |
598 | """ |
|
599 | """ | |
599 | Loads the pylons configuration. |
|
600 | Loads the pylons configuration. | |
600 | """ |
|
601 | """ | |
601 | from pylons import config as pylonsconfig |
|
602 | from pylons import config as pylonsconfig | |
602 |
|
603 | |||
603 | path_to_ini_file = os.path.realpath(conf) |
|
604 | path_to_ini_file = os.path.realpath(conf) | |
604 | conf = paste.deploy.appconfig('config:' + path_to_ini_file) |
|
605 | conf = paste.deploy.appconfig('config:' + path_to_ini_file) | |
605 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
|
606 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
General Comments 0
You need to be logged in to leave comments.
Login now