Show More
@@ -1,476 +1,468 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | rhodecode.lib.hooks |
|
15 | rhodecode.lib.hooks | |
16 | ~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Hooks runned by rhodecode |
|
18 | Hooks runned by rhodecode | |
19 |
|
19 | |||
20 | :created_on: Aug 6, 2010 |
|
20 | :created_on: Aug 6, 2010 | |
21 | :author: marcink |
|
21 | :author: marcink | |
22 | :copyright: (c) 2013 RhodeCode GmbH. |
|
22 | :copyright: (c) 2013 RhodeCode GmbH. | |
23 | :license: GPLv3, see LICENSE for more details. |
|
23 | :license: GPLv3, see LICENSE for more details. | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import sys |
|
27 | import sys | |
28 | import time |
|
28 | import time | |
29 | import binascii |
|
29 | import binascii | |
30 |
|
30 | |||
31 | from rhodecode.lib.vcs.utils.hgcompat import nullrev, revrange |
|
31 | from rhodecode.lib.vcs.utils.hgcompat import nullrev, revrange | |
32 | from rhodecode.lib import helpers as h |
|
32 | from rhodecode.lib import helpers as h | |
33 | from rhodecode.lib.utils import action_logger |
|
33 | from rhodecode.lib.utils import action_logger | |
34 | from rhodecode.lib.vcs.backends.base import EmptyChangeset |
|
34 | from rhodecode.lib.vcs.backends.base import EmptyChangeset | |
35 | from rhodecode.lib.exceptions import HTTPLockedRC, UserCreationError |
|
35 | from rhodecode.lib.exceptions import HTTPLockedRC, UserCreationError | |
36 | from rhodecode.lib.utils2 import safe_str, _extract_extras |
|
36 | from rhodecode.lib.utils2 import safe_str, _extract_extras | |
37 | from rhodecode.model.db import Repository, User |
|
37 | from rhodecode.model.db import Repository, User | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | def _get_scm_size(alias, root_path): |
|
40 | def _get_scm_size(alias, root_path): | |
41 |
|
41 | |||
42 | if not alias.startswith('.'): |
|
42 | if not alias.startswith('.'): | |
43 | alias += '.' |
|
43 | alias += '.' | |
44 |
|
44 | |||
45 | size_scm, size_root = 0, 0 |
|
45 | size_scm, size_root = 0, 0 | |
46 | for path, dirs, files in os.walk(safe_str(root_path)): |
|
46 | for path, dirs, files in os.walk(safe_str(root_path)): | |
47 | if path.find(alias) != -1: |
|
47 | if path.find(alias) != -1: | |
48 | for f in files: |
|
48 | for f in files: | |
49 | try: |
|
49 | try: | |
50 | size_scm += os.path.getsize(os.path.join(path, f)) |
|
50 | size_scm += os.path.getsize(os.path.join(path, f)) | |
51 | except OSError: |
|
51 | except OSError: | |
52 | pass |
|
52 | pass | |
53 | else: |
|
53 | else: | |
54 | for f in files: |
|
54 | for f in files: | |
55 | try: |
|
55 | try: | |
56 | size_root += os.path.getsize(os.path.join(path, f)) |
|
56 | size_root += os.path.getsize(os.path.join(path, f)) | |
57 | except OSError: |
|
57 | except OSError: | |
58 | pass |
|
58 | pass | |
59 |
|
59 | |||
60 | size_scm_f = h.format_byte_size(size_scm) |
|
60 | size_scm_f = h.format_byte_size(size_scm) | |
61 | size_root_f = h.format_byte_size(size_root) |
|
61 | size_root_f = h.format_byte_size(size_root) | |
62 | size_total_f = h.format_byte_size(size_root + size_scm) |
|
62 | size_total_f = h.format_byte_size(size_root + size_scm) | |
63 |
|
63 | |||
64 | return size_scm_f, size_root_f, size_total_f |
|
64 | return size_scm_f, size_root_f, size_total_f | |
65 |
|
65 | |||
66 |
|
66 | |||
67 | def repo_size(ui, repo, hooktype=None, **kwargs): |
|
67 | def repo_size(ui, repo, hooktype=None, **kwargs): | |
68 | """ |
|
68 | """ | |
69 | Presents size of repository after push |
|
69 | Presents size of repository after push | |
70 |
|
70 | |||
71 | :param ui: |
|
71 | :param ui: | |
72 | :param repo: |
|
72 | :param repo: | |
73 | :param hooktype: |
|
73 | :param hooktype: | |
74 | """ |
|
74 | """ | |
75 |
|
75 | |||
76 | size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root) |
|
76 | size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root) | |
77 |
|
77 | |||
78 | last_cs = repo[len(repo) - 1] |
|
78 | last_cs = repo[len(repo) - 1] | |
79 |
|
79 | |||
80 | msg = ('Repository size .hg:%s repo:%s total:%s\n' |
|
80 | msg = ('Repository size .hg:%s repo:%s total:%s\n' | |
81 | 'Last revision is now r%s:%s\n') % ( |
|
81 | 'Last revision is now r%s:%s\n') % ( | |
82 | size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12] |
|
82 | size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12] | |
83 | ) |
|
83 | ) | |
84 |
|
84 | |||
85 | sys.stdout.write(msg) |
|
85 | sys.stdout.write(msg) | |
86 |
|
86 | |||
87 |
|
87 | |||
88 | def pre_push(ui, repo, **kwargs): |
|
88 | def pre_push(ui, repo, **kwargs): | |
89 | # pre push function, currently used to ban pushing when |
|
89 | # pre push function, currently used to ban pushing when | |
90 | # repository is locked |
|
90 | # repository is locked | |
91 | ex = _extract_extras() |
|
91 | ex = _extract_extras() | |
92 |
|
92 | |||
93 | usr = User.get_by_username(ex.username) |
|
93 | usr = User.get_by_username(ex.username) | |
94 | if ex.locked_by[0] and usr.user_id != int(ex.locked_by[0]): |
|
94 | if ex.locked_by[0] and usr.user_id != int(ex.locked_by[0]): | |
95 | locked_by = User.get(ex.locked_by[0]).username |
|
95 | locked_by = User.get(ex.locked_by[0]).username | |
96 | # this exception is interpreted in git/hg middlewares and based |
|
96 | # this exception is interpreted in git/hg middlewares and based | |
97 | # on that proper return code is server to client |
|
97 | # on that proper return code is server to client | |
98 | _http_ret = HTTPLockedRC(ex.repository, locked_by) |
|
98 | _http_ret = HTTPLockedRC(ex.repository, locked_by) | |
99 | if str(_http_ret.code).startswith('2'): |
|
99 | if str(_http_ret.code).startswith('2'): | |
100 | #2xx Codes don't raise exceptions |
|
100 | #2xx Codes don't raise exceptions | |
101 | sys.stdout.write(_http_ret.title) |
|
101 | sys.stdout.write(_http_ret.title) | |
102 | else: |
|
102 | else: | |
103 | raise _http_ret |
|
103 | raise _http_ret | |
104 |
|
104 | |||
105 |
|
105 | |||
106 | def pre_pull(ui, repo, **kwargs): |
|
106 | def pre_pull(ui, repo, **kwargs): | |
107 | # pre push function, currently used to ban pushing when |
|
107 | # pre push function, currently used to ban pushing when | |
108 | # repository is locked |
|
108 | # repository is locked | |
109 | ex = _extract_extras() |
|
109 | ex = _extract_extras() | |
110 | if ex.locked_by[0]: |
|
110 | if ex.locked_by[0]: | |
111 | locked_by = User.get(ex.locked_by[0]).username |
|
111 | locked_by = User.get(ex.locked_by[0]).username | |
112 | # this exception is interpreted in git/hg middlewares and based |
|
112 | # this exception is interpreted in git/hg middlewares and based | |
113 | # on that proper return code is server to client |
|
113 | # on that proper return code is server to client | |
114 | _http_ret = HTTPLockedRC(ex.repository, locked_by) |
|
114 | _http_ret = HTTPLockedRC(ex.repository, locked_by) | |
115 | if str(_http_ret.code).startswith('2'): |
|
115 | if str(_http_ret.code).startswith('2'): | |
116 | #2xx Codes don't raise exceptions |
|
116 | #2xx Codes don't raise exceptions | |
117 | sys.stdout.write(_http_ret.title) |
|
117 | sys.stdout.write(_http_ret.title) | |
118 | else: |
|
118 | else: | |
119 | raise _http_ret |
|
119 | raise _http_ret | |
120 |
|
120 | |||
121 |
|
121 | |||
122 | def log_pull_action(ui, repo, **kwargs): |
|
122 | def log_pull_action(ui, repo, **kwargs): | |
123 | """ |
|
123 | """ | |
124 | Logs user last pull action |
|
124 | Logs user last pull action | |
125 |
|
125 | |||
126 | :param ui: |
|
126 | :param ui: | |
127 | :param repo: |
|
127 | :param repo: | |
128 | """ |
|
128 | """ | |
129 | ex = _extract_extras() |
|
129 | ex = _extract_extras() | |
130 |
|
130 | |||
131 | user = User.get_by_username(ex.username) |
|
131 | user = User.get_by_username(ex.username) | |
132 | action = 'pull' |
|
132 | action = 'pull' | |
133 | action_logger(user, action, ex.repository, ex.ip, commit=True) |
|
133 | action_logger(user, action, ex.repository, ex.ip, commit=True) | |
134 | # extension hook call |
|
134 | # extension hook call | |
135 | from rhodecode import EXTENSIONS |
|
135 | from rhodecode import EXTENSIONS | |
136 | callback = getattr(EXTENSIONS, 'PULL_HOOK', None) |
|
136 | callback = getattr(EXTENSIONS, 'PULL_HOOK', None) | |
137 | if callable(callback): |
|
137 | if callable(callback): | |
138 | kw = {} |
|
138 | kw = {} | |
139 | kw.update(ex) |
|
139 | kw.update(ex) | |
140 | callback(**kw) |
|
140 | callback(**kw) | |
141 |
|
141 | |||
142 | if ex.make_lock is not None and ex.make_lock: |
|
142 | if ex.make_lock is not None and ex.make_lock: | |
143 | Repository.lock(Repository.get_by_repo_name(ex.repository), user.user_id) |
|
143 | Repository.lock(Repository.get_by_repo_name(ex.repository), user.user_id) | |
144 | #msg = 'Made lock on repo `%s`' % repository |
|
144 | #msg = 'Made lock on repo `%s`' % repository | |
145 | #sys.stdout.write(msg) |
|
145 | #sys.stdout.write(msg) | |
146 |
|
146 | |||
147 | if ex.locked_by[0]: |
|
147 | if ex.locked_by[0]: | |
148 | locked_by = User.get(ex.locked_by[0]).username |
|
148 | locked_by = User.get(ex.locked_by[0]).username | |
149 | _http_ret = HTTPLockedRC(ex.repository, locked_by) |
|
149 | _http_ret = HTTPLockedRC(ex.repository, locked_by) | |
150 | if str(_http_ret.code).startswith('2'): |
|
150 | if str(_http_ret.code).startswith('2'): | |
151 | #2xx Codes don't raise exceptions |
|
151 | #2xx Codes don't raise exceptions | |
152 | sys.stdout.write(_http_ret.title) |
|
152 | sys.stdout.write(_http_ret.title) | |
153 | return 0 |
|
153 | return 0 | |
154 |
|
154 | |||
155 |
|
155 | |||
156 | def log_push_action(ui, repo, **kwargs): |
|
156 | def log_push_action(ui, repo, **kwargs): | |
157 | """ |
|
157 | """ | |
158 | Maps user last push action to new changeset id, from mercurial |
|
158 | Maps user last push action to new changeset id, from mercurial | |
159 |
|
159 | |||
160 | :param ui: |
|
160 | :param ui: | |
161 | :param repo: repo object containing the `ui` object |
|
161 | :param repo: repo object containing the `ui` object | |
162 | """ |
|
162 | """ | |
163 |
|
163 | |||
164 | ex = _extract_extras() |
|
164 | ex = _extract_extras() | |
165 |
|
165 | |||
166 | action_tmpl = ex.action + ':%s' |
|
166 | action_tmpl = ex.action + ':%s' | |
167 | revs = [] |
|
167 | revs = [] | |
168 | if ex.scm == 'hg': |
|
168 | if ex.scm == 'hg': | |
169 | node = kwargs['node'] |
|
169 | node = kwargs['node'] | |
170 |
|
170 | |||
171 | def get_revs(repo, rev_opt): |
|
171 | def get_revs(repo, rev_opt): | |
172 | if rev_opt: |
|
172 | if rev_opt: | |
173 | revs = revrange(repo, rev_opt) |
|
173 | revs = revrange(repo, rev_opt) | |
174 |
|
174 | |||
175 | if len(revs) == 0: |
|
175 | if len(revs) == 0: | |
176 | return (nullrev, nullrev) |
|
176 | return (nullrev, nullrev) | |
177 | return max(revs), min(revs) |
|
177 | return max(revs), min(revs) | |
178 | else: |
|
178 | else: | |
179 | return len(repo) - 1, 0 |
|
179 | return len(repo) - 1, 0 | |
180 |
|
180 | |||
181 | stop, start = get_revs(repo, [node + ':']) |
|
181 | stop, start = get_revs(repo, [node + ':']) | |
182 | _h = binascii.hexlify |
|
182 | _h = binascii.hexlify | |
183 | revs = [_h(repo[r].node()) for r in xrange(start, stop + 1)] |
|
183 | revs = [_h(repo[r].node()) for r in xrange(start, stop + 1)] | |
184 | elif ex.scm == 'git': |
|
184 | elif ex.scm == 'git': | |
185 | revs = kwargs.get('_git_revs', []) |
|
185 | revs = kwargs.get('_git_revs', []) | |
186 | if '_git_revs' in kwargs: |
|
186 | if '_git_revs' in kwargs: | |
187 | kwargs.pop('_git_revs') |
|
187 | kwargs.pop('_git_revs') | |
188 |
|
188 | |||
189 | action = action_tmpl % ','.join(revs) |
|
189 | action = action_tmpl % ','.join(revs) | |
190 | action_logger(ex.username, action, ex.repository, ex.ip, commit=True) |
|
190 | action_logger(ex.username, action, ex.repository, ex.ip, commit=True) | |
191 |
|
191 | |||
192 | # extension hook call |
|
192 | # extension hook call | |
193 | from rhodecode import EXTENSIONS |
|
193 | from rhodecode import EXTENSIONS | |
194 | callback = getattr(EXTENSIONS, 'PUSH_HOOK', None) |
|
194 | callback = getattr(EXTENSIONS, 'PUSH_HOOK', None) | |
195 | if callable(callback): |
|
195 | if callable(callback): | |
196 | kw = {'pushed_revs': revs} |
|
196 | kw = {'pushed_revs': revs} | |
197 | kw.update(ex) |
|
197 | kw.update(ex) | |
198 | callback(**kw) |
|
198 | callback(**kw) | |
199 |
|
199 | |||
200 | if ex.make_lock is not None and not ex.make_lock: |
|
200 | if ex.make_lock is not None and not ex.make_lock: | |
201 | Repository.unlock(Repository.get_by_repo_name(ex.repository)) |
|
201 | Repository.unlock(Repository.get_by_repo_name(ex.repository)) | |
202 | msg = 'Released lock on repo `%s`\n' % ex.repository |
|
202 | msg = 'Released lock on repo `%s`\n' % ex.repository | |
203 | sys.stdout.write(msg) |
|
203 | sys.stdout.write(msg) | |
204 |
|
204 | |||
205 | if ex.locked_by[0]: |
|
205 | if ex.locked_by[0]: | |
206 | locked_by = User.get(ex.locked_by[0]).username |
|
206 | locked_by = User.get(ex.locked_by[0]).username | |
207 | _http_ret = HTTPLockedRC(ex.repository, locked_by) |
|
207 | _http_ret = HTTPLockedRC(ex.repository, locked_by) | |
208 | if str(_http_ret.code).startswith('2'): |
|
208 | if str(_http_ret.code).startswith('2'): | |
209 | #2xx Codes don't raise exceptions |
|
209 | #2xx Codes don't raise exceptions | |
210 | sys.stdout.write(_http_ret.title) |
|
210 | sys.stdout.write(_http_ret.title) | |
211 |
|
211 | |||
212 | return 0 |
|
212 | return 0 | |
213 |
|
213 | |||
214 |
|
214 | |||
215 | def log_create_repository(repository_dict, created_by, **kwargs): |
|
215 | def log_create_repository(repository_dict, created_by, **kwargs): | |
216 | """ |
|
216 | """ | |
217 | Post create repository Hook. This is a dummy function for admins to re-use |
|
217 | Post create repository Hook. | |
218 | if needed. It's taken from rhodecode-extensions module and executed |
|
|||
219 | if present |
|
|||
220 |
|
218 | |||
221 | :param repository: dict dump of repository object |
|
219 | :param repository: dict dump of repository object | |
222 | :param created_by: username who created repository |
|
220 | :param created_by: username who created repository | |
223 |
|
221 | |||
224 | available keys of repository_dict: |
|
222 | available keys of repository_dict: | |
225 |
|
223 | |||
226 | 'repo_type', |
|
224 | 'repo_type', | |
227 | 'description', |
|
225 | 'description', | |
228 | 'private', |
|
226 | 'private', | |
229 | 'created_on', |
|
227 | 'created_on', | |
230 | 'enable_downloads', |
|
228 | 'enable_downloads', | |
231 | 'repo_id', |
|
229 | 'repo_id', | |
232 | 'user_id', |
|
230 | 'user_id', | |
233 | 'enable_statistics', |
|
231 | 'enable_statistics', | |
234 | 'clone_uri', |
|
232 | 'clone_uri', | |
235 | 'fork_id', |
|
233 | 'fork_id', | |
236 | 'group_id', |
|
234 | 'group_id', | |
237 | 'repo_name' |
|
235 | 'repo_name' | |
238 |
|
236 | |||
239 | """ |
|
237 | """ | |
240 | from rhodecode import EXTENSIONS |
|
238 | from rhodecode import EXTENSIONS | |
241 | callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None) |
|
239 | callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None) | |
242 | if callable(callback): |
|
240 | if callable(callback): | |
243 | kw = {} |
|
241 | kw = {} | |
244 | kw.update(repository_dict) |
|
242 | kw.update(repository_dict) | |
245 | kw.update({'created_by': created_by}) |
|
243 | kw.update({'created_by': created_by}) | |
246 | kw.update(kwargs) |
|
244 | kw.update(kwargs) | |
247 | return callback(**kw) |
|
245 | return callback(**kw) | |
248 |
|
246 | |||
249 | return 0 |
|
247 | return 0 | |
250 |
|
248 | |||
251 |
|
249 | |||
252 | def check_allowed_create_user(user_dict, created_by, **kwargs): |
|
250 | def check_allowed_create_user(user_dict, created_by, **kwargs): | |
253 | # pre create hooks |
|
251 | # pre create hooks | |
254 | from rhodecode import EXTENSIONS |
|
252 | from rhodecode import EXTENSIONS | |
255 | callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None) |
|
253 | callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None) | |
256 | if callable(callback): |
|
254 | if callable(callback): | |
257 | allowed, reason = callback(created_by=created_by, **user_dict) |
|
255 | allowed, reason = callback(created_by=created_by, **user_dict) | |
258 | if not allowed: |
|
256 | if not allowed: | |
259 | raise UserCreationError(reason) |
|
257 | raise UserCreationError(reason) | |
260 |
|
258 | |||
261 |
|
259 | |||
262 | def log_create_user(user_dict, created_by, **kwargs): |
|
260 | def log_create_user(user_dict, created_by, **kwargs): | |
263 | """ |
|
261 | """ | |
264 | Post create user Hook. This is a dummy function for admins to re-use |
|
262 | Post create user Hook. | |
265 | if needed. It's taken from rhodecode-extensions module and executed |
|
|||
266 | if present |
|
|||
267 |
|
263 | |||
268 | :param user_dict: dict dump of user object |
|
264 | :param user_dict: dict dump of user object | |
269 |
|
265 | |||
270 | available keys for user_dict: |
|
266 | available keys for user_dict: | |
271 |
|
267 | |||
272 | 'username', |
|
268 | 'username', | |
273 | 'full_name_or_username', |
|
269 | 'full_name_or_username', | |
274 | 'full_contact', |
|
270 | 'full_contact', | |
275 | 'user_id', |
|
271 | 'user_id', | |
276 | 'name', |
|
272 | 'name', | |
277 | 'firstname', |
|
273 | 'firstname', | |
278 | 'short_contact', |
|
274 | 'short_contact', | |
279 | 'admin', |
|
275 | 'admin', | |
280 | 'lastname', |
|
276 | 'lastname', | |
281 | 'ip_addresses', |
|
277 | 'ip_addresses', | |
282 | 'ldap_dn', |
|
278 | 'ldap_dn', | |
283 | 'email', |
|
279 | 'email', | |
284 | 'api_key', |
|
280 | 'api_key', | |
285 | 'last_login', |
|
281 | 'last_login', | |
286 | 'full_name', |
|
282 | 'full_name', | |
287 | 'active', |
|
283 | 'active', | |
288 | 'password', |
|
284 | 'password', | |
289 | 'emails', |
|
285 | 'emails', | |
290 | 'inherit_default_permissions' |
|
286 | 'inherit_default_permissions' | |
291 |
|
287 | |||
292 | """ |
|
288 | """ | |
293 | from rhodecode import EXTENSIONS |
|
289 | from rhodecode import EXTENSIONS | |
294 | callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None) |
|
290 | callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None) | |
295 | if callable(callback): |
|
291 | if callable(callback): | |
296 | return callback(created_by=created_by, **user_dict) |
|
292 | return callback(created_by=created_by, **user_dict) | |
297 |
|
293 | |||
298 | return 0 |
|
294 | return 0 | |
299 |
|
295 | |||
300 |
|
296 | |||
301 | def log_delete_repository(repository_dict, deleted_by, **kwargs): |
|
297 | def log_delete_repository(repository_dict, deleted_by, **kwargs): | |
302 | """ |
|
298 | """ | |
303 | Post delete repository Hook. This is a dummy function for admins to re-use |
|
299 | Post delete repository Hook. | |
304 | if needed. It's taken from rhodecode-extensions module and executed |
|
|||
305 | if present |
|
|||
306 |
|
300 | |||
307 | :param repository: dict dump of repository object |
|
301 | :param repository: dict dump of repository object | |
308 | :param deleted_by: username who deleted the repository |
|
302 | :param deleted_by: username who deleted the repository | |
309 |
|
303 | |||
310 | available keys of repository_dict: |
|
304 | available keys of repository_dict: | |
311 |
|
305 | |||
312 | 'repo_type', |
|
306 | 'repo_type', | |
313 | 'description', |
|
307 | 'description', | |
314 | 'private', |
|
308 | 'private', | |
315 | 'created_on', |
|
309 | 'created_on', | |
316 | 'enable_downloads', |
|
310 | 'enable_downloads', | |
317 | 'repo_id', |
|
311 | 'repo_id', | |
318 | 'user_id', |
|
312 | 'user_id', | |
319 | 'enable_statistics', |
|
313 | 'enable_statistics', | |
320 | 'clone_uri', |
|
314 | 'clone_uri', | |
321 | 'fork_id', |
|
315 | 'fork_id', | |
322 | 'group_id', |
|
316 | 'group_id', | |
323 | 'repo_name' |
|
317 | 'repo_name' | |
324 |
|
318 | |||
325 | """ |
|
319 | """ | |
326 | from rhodecode import EXTENSIONS |
|
320 | from rhodecode import EXTENSIONS | |
327 | callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None) |
|
321 | callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None) | |
328 | if callable(callback): |
|
322 | if callable(callback): | |
329 | kw = {} |
|
323 | kw = {} | |
330 | kw.update(repository_dict) |
|
324 | kw.update(repository_dict) | |
331 | kw.update({'deleted_by': deleted_by, |
|
325 | kw.update({'deleted_by': deleted_by, | |
332 | 'deleted_on': time.time()}) |
|
326 | 'deleted_on': time.time()}) | |
333 | kw.update(kwargs) |
|
327 | kw.update(kwargs) | |
334 | return callback(**kw) |
|
328 | return callback(**kw) | |
335 |
|
329 | |||
336 | return 0 |
|
330 | return 0 | |
337 |
|
331 | |||
338 |
|
332 | |||
339 | def log_delete_user(user_dict, deleted_by, **kwargs): |
|
333 | def log_delete_user(user_dict, deleted_by, **kwargs): | |
340 | """ |
|
334 | """ | |
341 | Post delete user Hook. This is a dummy function for admins to re-use |
|
335 | Post delete user Hook. | |
342 | if needed. It's taken from rhodecode-extensions module and executed |
|
|||
343 | if present |
|
|||
344 |
|
336 | |||
345 | :param user_dict: dict dump of user object |
|
337 | :param user_dict: dict dump of user object | |
346 |
|
338 | |||
347 | available keys for user_dict: |
|
339 | available keys for user_dict: | |
348 |
|
340 | |||
349 | 'username', |
|
341 | 'username', | |
350 | 'full_name_or_username', |
|
342 | 'full_name_or_username', | |
351 | 'full_contact', |
|
343 | 'full_contact', | |
352 | 'user_id', |
|
344 | 'user_id', | |
353 | 'name', |
|
345 | 'name', | |
354 | 'firstname', |
|
346 | 'firstname', | |
355 | 'short_contact', |
|
347 | 'short_contact', | |
356 | 'admin', |
|
348 | 'admin', | |
357 | 'lastname', |
|
349 | 'lastname', | |
358 | 'ip_addresses', |
|
350 | 'ip_addresses', | |
359 | 'ldap_dn', |
|
351 | 'ldap_dn', | |
360 | 'email', |
|
352 | 'email', | |
361 | 'api_key', |
|
353 | 'api_key', | |
362 | 'last_login', |
|
354 | 'last_login', | |
363 | 'full_name', |
|
355 | 'full_name', | |
364 | 'active', |
|
356 | 'active', | |
365 | 'password', |
|
357 | 'password', | |
366 | 'emails', |
|
358 | 'emails', | |
367 | 'inherit_default_permissions' |
|
359 | 'inherit_default_permissions' | |
368 |
|
360 | |||
369 | """ |
|
361 | """ | |
370 | from rhodecode import EXTENSIONS |
|
362 | from rhodecode import EXTENSIONS | |
371 | callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None) |
|
363 | callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None) | |
372 | if callable(callback): |
|
364 | if callable(callback): | |
373 | return callback(deleted_by=deleted_by, **user_dict) |
|
365 | return callback(deleted_by=deleted_by, **user_dict) | |
374 |
|
366 | |||
375 | return 0 |
|
367 | return 0 | |
376 |
|
368 | |||
377 |
|
369 | |||
378 | handle_git_pre_receive = (lambda repo_path, revs, env: |
|
370 | handle_git_pre_receive = (lambda repo_path, revs, env: | |
379 | handle_git_receive(repo_path, revs, env, hook_type='pre')) |
|
371 | handle_git_receive(repo_path, revs, env, hook_type='pre')) | |
380 | handle_git_post_receive = (lambda repo_path, revs, env: |
|
372 | handle_git_post_receive = (lambda repo_path, revs, env: | |
381 | handle_git_receive(repo_path, revs, env, hook_type='post')) |
|
373 | handle_git_receive(repo_path, revs, env, hook_type='post')) | |
382 |
|
374 | |||
383 |
|
375 | |||
384 | def handle_git_receive(repo_path, revs, env, hook_type='post'): |
|
376 | def handle_git_receive(repo_path, revs, env, hook_type='post'): | |
385 | """ |
|
377 | """ | |
386 | A really hacky method that is runned by git post-receive hook and logs |
|
378 | A really hacky method that is runned by git post-receive hook and logs | |
387 | an push action together with pushed revisions. It's executed by subprocess |
|
379 | an push action together with pushed revisions. It's executed by subprocess | |
388 | thus needs all info to be able to create a on the fly pylons enviroment, |
|
380 | thus needs all info to be able to create a on the fly pylons enviroment, | |
389 | connect to database and run the logging code. Hacky as sh*t but works. |
|
381 | connect to database and run the logging code. Hacky as sh*t but works. | |
390 |
|
382 | |||
391 | :param repo_path: |
|
383 | :param repo_path: | |
392 | :param revs: |
|
384 | :param revs: | |
393 | :param env: |
|
385 | :param env: | |
394 | """ |
|
386 | """ | |
395 | from paste.deploy import appconfig |
|
387 | from paste.deploy import appconfig | |
396 | from sqlalchemy import engine_from_config |
|
388 | from sqlalchemy import engine_from_config | |
397 | from rhodecode.config.environment import load_environment |
|
389 | from rhodecode.config.environment import load_environment | |
398 | from rhodecode.model import init_model |
|
390 | from rhodecode.model import init_model | |
399 | from rhodecode.model.db import RhodeCodeUi |
|
391 | from rhodecode.model.db import RhodeCodeUi | |
400 | from rhodecode.lib.utils import make_ui |
|
392 | from rhodecode.lib.utils import make_ui | |
401 | extras = _extract_extras(env) |
|
393 | extras = _extract_extras(env) | |
402 |
|
394 | |||
403 | path, ini_name = os.path.split(extras['config']) |
|
395 | path, ini_name = os.path.split(extras['config']) | |
404 | conf = appconfig('config:%s' % ini_name, relative_to=path) |
|
396 | conf = appconfig('config:%s' % ini_name, relative_to=path) | |
405 | load_environment(conf.global_conf, conf.local_conf, test_env=False, |
|
397 | load_environment(conf.global_conf, conf.local_conf, test_env=False, | |
406 | test_index=False) |
|
398 | test_index=False) | |
407 |
|
399 | |||
408 | engine = engine_from_config(conf, 'sqlalchemy.db1.') |
|
400 | engine = engine_from_config(conf, 'sqlalchemy.db1.') | |
409 | init_model(engine) |
|
401 | init_model(engine) | |
410 |
|
402 | |||
411 | baseui = make_ui('db') |
|
403 | baseui = make_ui('db') | |
412 | # fix if it's not a bare repo |
|
404 | # fix if it's not a bare repo | |
413 | if repo_path.endswith(os.sep + '.git'): |
|
405 | if repo_path.endswith(os.sep + '.git'): | |
414 | repo_path = repo_path[:-5] |
|
406 | repo_path = repo_path[:-5] | |
415 |
|
407 | |||
416 | repo = Repository.get_by_full_path(repo_path) |
|
408 | repo = Repository.get_by_full_path(repo_path) | |
417 | if not repo: |
|
409 | if not repo: | |
418 | raise OSError('Repository %s not found in database' |
|
410 | raise OSError('Repository %s not found in database' | |
419 | % (safe_str(repo_path))) |
|
411 | % (safe_str(repo_path))) | |
420 |
|
412 | |||
421 | _hooks = dict(baseui.configitems('hooks')) or {} |
|
413 | _hooks = dict(baseui.configitems('hooks')) or {} | |
422 |
|
414 | |||
423 | if hook_type == 'pre': |
|
415 | if hook_type == 'pre': | |
424 | repo = repo.scm_instance |
|
416 | repo = repo.scm_instance | |
425 | else: |
|
417 | else: | |
426 | #post push shouldn't use the cached instance never |
|
418 | #post push shouldn't use the cached instance never | |
427 | repo = repo.scm_instance_no_cache() |
|
419 | repo = repo.scm_instance_no_cache() | |
428 |
|
420 | |||
429 | if hook_type == 'pre': |
|
421 | if hook_type == 'pre': | |
430 | pre_push(baseui, repo) |
|
422 | pre_push(baseui, repo) | |
431 |
|
423 | |||
432 | # if push hook is enabled via web interface |
|
424 | # if push hook is enabled via web interface | |
433 | elif hook_type == 'post' and _hooks.get(RhodeCodeUi.HOOK_PUSH): |
|
425 | elif hook_type == 'post' and _hooks.get(RhodeCodeUi.HOOK_PUSH): | |
434 | rev_data = [] |
|
426 | rev_data = [] | |
435 | for l in revs: |
|
427 | for l in revs: | |
436 | old_rev, new_rev, ref = l.split(' ') |
|
428 | old_rev, new_rev, ref = l.split(' ') | |
437 | _ref_data = ref.split('/') |
|
429 | _ref_data = ref.split('/') | |
438 | if _ref_data[1] in ['tags', 'heads']: |
|
430 | if _ref_data[1] in ['tags', 'heads']: | |
439 | rev_data.append({'old_rev': old_rev, |
|
431 | rev_data.append({'old_rev': old_rev, | |
440 | 'new_rev': new_rev, |
|
432 | 'new_rev': new_rev, | |
441 | 'ref': ref, |
|
433 | 'ref': ref, | |
442 | 'type': _ref_data[1], |
|
434 | 'type': _ref_data[1], | |
443 | 'name': _ref_data[2].strip()}) |
|
435 | 'name': _ref_data[2].strip()}) | |
444 |
|
436 | |||
445 | git_revs = [] |
|
437 | git_revs = [] | |
446 |
|
438 | |||
447 | for push_ref in rev_data: |
|
439 | for push_ref in rev_data: | |
448 | _type = push_ref['type'] |
|
440 | _type = push_ref['type'] | |
449 | if _type == 'heads': |
|
441 | if _type == 'heads': | |
450 | if push_ref['old_rev'] == EmptyChangeset().raw_id: |
|
442 | if push_ref['old_rev'] == EmptyChangeset().raw_id: | |
451 | # update the symbolic ref if we push new repo |
|
443 | # update the symbolic ref if we push new repo | |
452 | if repo.is_empty(): |
|
444 | if repo.is_empty(): | |
453 | repo._repo.refs.set_symbolic_ref('HEAD', |
|
445 | repo._repo.refs.set_symbolic_ref('HEAD', | |
454 | 'refs/heads/%s' % push_ref['name']) |
|
446 | 'refs/heads/%s' % push_ref['name']) | |
455 |
|
447 | |||
456 | cmd = "for-each-ref --format='%(refname)' 'refs/heads/*'" |
|
448 | cmd = "for-each-ref --format='%(refname)' 'refs/heads/*'" | |
457 | heads = repo.run_git_command(cmd)[0] |
|
449 | heads = repo.run_git_command(cmd)[0] | |
458 | heads = heads.replace(push_ref['ref'], '') |
|
450 | heads = heads.replace(push_ref['ref'], '') | |
459 | heads = ' '.join(map(lambda c: c.strip('\n').strip(), |
|
451 | heads = ' '.join(map(lambda c: c.strip('\n').strip(), | |
460 | heads.splitlines())) |
|
452 | heads.splitlines())) | |
461 | cmd = (('log %(new_rev)s' % push_ref) + |
|
453 | cmd = (('log %(new_rev)s' % push_ref) + | |
462 | ' --reverse --pretty=format:"%H" --not ' + heads) |
|
454 | ' --reverse --pretty=format:"%H" --not ' + heads) | |
463 | git_revs += repo.run_git_command(cmd)[0].splitlines() |
|
455 | git_revs += repo.run_git_command(cmd)[0].splitlines() | |
464 |
|
456 | |||
465 | elif push_ref['new_rev'] == EmptyChangeset().raw_id: |
|
457 | elif push_ref['new_rev'] == EmptyChangeset().raw_id: | |
466 | #delete branch case |
|
458 | #delete branch case | |
467 | git_revs += ['delete_branch=>%s' % push_ref['name']] |
|
459 | git_revs += ['delete_branch=>%s' % push_ref['name']] | |
468 | else: |
|
460 | else: | |
469 | cmd = (('log %(old_rev)s..%(new_rev)s' % push_ref) + |
|
461 | cmd = (('log %(old_rev)s..%(new_rev)s' % push_ref) + | |
470 | ' --reverse --pretty=format:"%H"') |
|
462 | ' --reverse --pretty=format:"%H"') | |
471 | git_revs += repo.run_git_command(cmd)[0].splitlines() |
|
463 | git_revs += repo.run_git_command(cmd)[0].splitlines() | |
472 |
|
464 | |||
473 | elif _type == 'tags': |
|
465 | elif _type == 'tags': | |
474 | git_revs += ['tag=>%s' % push_ref['name']] |
|
466 | git_revs += ['tag=>%s' % push_ref['name']] | |
475 |
|
467 | |||
476 | log_push_action(baseui, repo, _git_revs=git_revs) |
|
468 | log_push_action(baseui, repo, _git_revs=git_revs) |
@@ -1,195 +1,195 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | rhodecode.lib.indexers.__init__ |
|
15 | rhodecode.lib.indexers.__init__ | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Whoosh indexing module for RhodeCode |
|
18 | Whoosh indexing module for RhodeCode | |
19 |
|
19 | |||
20 | :created_on: Aug 17, 2010 |
|
20 | :created_on: Aug 17, 2010 | |
21 | :author: marcink |
|
21 | :author: marcink | |
22 | :copyright: (c) 2013 RhodeCode GmbH. |
|
22 | :copyright: (c) 2013 RhodeCode GmbH. | |
23 | :license: GPLv3, see LICENSE for more details. |
|
23 | :license: GPLv3, see LICENSE for more details. | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import sys |
|
27 | import sys | |
28 | import logging |
|
28 | import logging | |
29 | from os.path import dirname as dn, join as jn |
|
29 | from os.path import dirname as dn, join as jn | |
30 |
|
30 | |||
31 | #to get the rhodecode import |
|
31 | # Add location of top level folder to sys.path | |
32 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) |
|
32 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) | |
33 |
|
33 | |||
34 | from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter |
|
34 | from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter | |
35 | from whoosh.fields import TEXT, ID, STORED, NUMERIC, BOOLEAN, Schema, FieldType, DATETIME |
|
35 | from whoosh.fields import TEXT, ID, STORED, NUMERIC, BOOLEAN, Schema, FieldType, DATETIME | |
36 | from whoosh.formats import Characters |
|
36 | from whoosh.formats import Characters | |
37 | from whoosh.highlight import highlight as whoosh_highlight, HtmlFormatter, ContextFragmenter |
|
37 | from whoosh.highlight import highlight as whoosh_highlight, HtmlFormatter, ContextFragmenter | |
38 | from rhodecode.lib.utils2 import LazyProperty |
|
38 | from rhodecode.lib.utils2 import LazyProperty | |
39 |
|
39 | |||
40 | log = logging.getLogger(__name__) |
|
40 | log = logging.getLogger(__name__) | |
41 |
|
41 | |||
42 | # CUSTOM ANALYZER wordsplit + lowercase filter |
|
42 | # CUSTOM ANALYZER wordsplit + lowercase filter | |
43 | ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter() |
|
43 | ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter() | |
44 |
|
44 | |||
45 | #INDEX SCHEMA DEFINITION |
|
45 | #INDEX SCHEMA DEFINITION | |
46 | SCHEMA = Schema( |
|
46 | SCHEMA = Schema( | |
47 | fileid=ID(unique=True), |
|
47 | fileid=ID(unique=True), | |
48 | owner=TEXT(), |
|
48 | owner=TEXT(), | |
49 | repository=TEXT(stored=True), |
|
49 | repository=TEXT(stored=True), | |
50 | path=TEXT(stored=True), |
|
50 | path=TEXT(stored=True), | |
51 | content=FieldType(format=Characters(), analyzer=ANALYZER, |
|
51 | content=FieldType(format=Characters(), analyzer=ANALYZER, | |
52 | scorable=True, stored=True), |
|
52 | scorable=True, stored=True), | |
53 | modtime=STORED(), |
|
53 | modtime=STORED(), | |
54 | extension=TEXT(stored=True) |
|
54 | extension=TEXT(stored=True) | |
55 | ) |
|
55 | ) | |
56 |
|
56 | |||
57 | IDX_NAME = 'HG_INDEX' |
|
57 | IDX_NAME = 'HG_INDEX' | |
58 | FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') |
|
58 | FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') | |
59 | FRAGMENTER = ContextFragmenter(200) |
|
59 | FRAGMENTER = ContextFragmenter(200) | |
60 |
|
60 | |||
61 | CHGSETS_SCHEMA = Schema( |
|
61 | CHGSETS_SCHEMA = Schema( | |
62 | raw_id=ID(unique=True, stored=True), |
|
62 | raw_id=ID(unique=True, stored=True), | |
63 | date=NUMERIC(stored=True), |
|
63 | date=NUMERIC(stored=True), | |
64 | last=BOOLEAN(), |
|
64 | last=BOOLEAN(), | |
65 | owner=TEXT(), |
|
65 | owner=TEXT(), | |
66 | repository=ID(unique=True, stored=True), |
|
66 | repository=ID(unique=True, stored=True), | |
67 | author=TEXT(stored=True), |
|
67 | author=TEXT(stored=True), | |
68 | message=FieldType(format=Characters(), analyzer=ANALYZER, |
|
68 | message=FieldType(format=Characters(), analyzer=ANALYZER, | |
69 | scorable=True, stored=True), |
|
69 | scorable=True, stored=True), | |
70 | parents=TEXT(), |
|
70 | parents=TEXT(), | |
71 | added=TEXT(), |
|
71 | added=TEXT(), | |
72 | removed=TEXT(), |
|
72 | removed=TEXT(), | |
73 | changed=TEXT(), |
|
73 | changed=TEXT(), | |
74 | ) |
|
74 | ) | |
75 |
|
75 | |||
76 | CHGSET_IDX_NAME = 'CHGSET_INDEX' |
|
76 | CHGSET_IDX_NAME = 'CHGSET_INDEX' | |
77 |
|
77 | |||
78 | # used only to generate queries in journal |
|
78 | # used only to generate queries in journal | |
79 | JOURNAL_SCHEMA = Schema( |
|
79 | JOURNAL_SCHEMA = Schema( | |
80 | username=TEXT(), |
|
80 | username=TEXT(), | |
81 | date=DATETIME(), |
|
81 | date=DATETIME(), | |
82 | action=TEXT(), |
|
82 | action=TEXT(), | |
83 | repository=TEXT(), |
|
83 | repository=TEXT(), | |
84 | ip=TEXT(), |
|
84 | ip=TEXT(), | |
85 | ) |
|
85 | ) | |
86 |
|
86 | |||
87 |
|
87 | |||
88 | class WhooshResultWrapper(object): |
|
88 | class WhooshResultWrapper(object): | |
89 | def __init__(self, search_type, searcher, matcher, highlight_items, |
|
89 | def __init__(self, search_type, searcher, matcher, highlight_items, | |
90 | repo_location): |
|
90 | repo_location): | |
91 | self.search_type = search_type |
|
91 | self.search_type = search_type | |
92 | self.searcher = searcher |
|
92 | self.searcher = searcher | |
93 | self.matcher = matcher |
|
93 | self.matcher = matcher | |
94 | self.highlight_items = highlight_items |
|
94 | self.highlight_items = highlight_items | |
95 | self.fragment_size = 200 |
|
95 | self.fragment_size = 200 | |
96 | self.repo_location = repo_location |
|
96 | self.repo_location = repo_location | |
97 |
|
97 | |||
98 | @LazyProperty |
|
98 | @LazyProperty | |
99 | def doc_ids(self): |
|
99 | def doc_ids(self): | |
100 | docs_id = [] |
|
100 | docs_id = [] | |
101 | while self.matcher.is_active(): |
|
101 | while self.matcher.is_active(): | |
102 | docnum = self.matcher.id() |
|
102 | docnum = self.matcher.id() | |
103 | chunks = [offsets for offsets in self.get_chunks()] |
|
103 | chunks = [offsets for offsets in self.get_chunks()] | |
104 | docs_id.append([docnum, chunks]) |
|
104 | docs_id.append([docnum, chunks]) | |
105 | self.matcher.next() |
|
105 | self.matcher.next() | |
106 | return docs_id |
|
106 | return docs_id | |
107 |
|
107 | |||
108 | def __str__(self): |
|
108 | def __str__(self): | |
109 | return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids)) |
|
109 | return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids)) | |
110 |
|
110 | |||
111 | def __repr__(self): |
|
111 | def __repr__(self): | |
112 | return self.__str__() |
|
112 | return self.__str__() | |
113 |
|
113 | |||
114 | def __len__(self): |
|
114 | def __len__(self): | |
115 | return len(self.doc_ids) |
|
115 | return len(self.doc_ids) | |
116 |
|
116 | |||
117 | def __iter__(self): |
|
117 | def __iter__(self): | |
118 | """ |
|
118 | """ | |
119 | Allows Iteration over results,and lazy generate content |
|
119 | Allows Iteration over results,and lazy generate content | |
120 |
|
120 | |||
121 | *Requires* implementation of ``__getitem__`` method. |
|
121 | *Requires* implementation of ``__getitem__`` method. | |
122 | """ |
|
122 | """ | |
123 | for docid in self.doc_ids: |
|
123 | for docid in self.doc_ids: | |
124 | yield self.get_full_content(docid) |
|
124 | yield self.get_full_content(docid) | |
125 |
|
125 | |||
126 | def __getitem__(self, key): |
|
126 | def __getitem__(self, key): | |
127 | """ |
|
127 | """ | |
128 | Slicing of resultWrapper |
|
128 | Slicing of resultWrapper | |
129 | """ |
|
129 | """ | |
130 | i, j = key.start, key.stop |
|
130 | i, j = key.start, key.stop | |
131 |
|
131 | |||
132 | slices = [] |
|
132 | slices = [] | |
133 | for docid in self.doc_ids[i:j]: |
|
133 | for docid in self.doc_ids[i:j]: | |
134 | slices.append(self.get_full_content(docid)) |
|
134 | slices.append(self.get_full_content(docid)) | |
135 | return slices |
|
135 | return slices | |
136 |
|
136 | |||
137 | def get_full_content(self, docid): |
|
137 | def get_full_content(self, docid): | |
138 | res = self.searcher.stored_fields(docid[0]) |
|
138 | res = self.searcher.stored_fields(docid[0]) | |
139 | log.debug('result: %s' % res) |
|
139 | log.debug('result: %s' % res) | |
140 | if self.search_type == 'content': |
|
140 | if self.search_type == 'content': | |
141 | full_repo_path = jn(self.repo_location, res['repository']) |
|
141 | full_repo_path = jn(self.repo_location, res['repository']) | |
142 | f_path = res['path'].split(full_repo_path)[-1] |
|
142 | f_path = res['path'].split(full_repo_path)[-1] | |
143 | f_path = f_path.lstrip(os.sep) |
|
143 | f_path = f_path.lstrip(os.sep) | |
144 | content_short = self.get_short_content(res, docid[1]) |
|
144 | content_short = self.get_short_content(res, docid[1]) | |
145 | res.update({'content_short': content_short, |
|
145 | res.update({'content_short': content_short, | |
146 | 'content_short_hl': self.highlight(content_short), |
|
146 | 'content_short_hl': self.highlight(content_short), | |
147 | 'f_path': f_path |
|
147 | 'f_path': f_path | |
148 | }) |
|
148 | }) | |
149 | elif self.search_type == 'path': |
|
149 | elif self.search_type == 'path': | |
150 | full_repo_path = jn(self.repo_location, res['repository']) |
|
150 | full_repo_path = jn(self.repo_location, res['repository']) | |
151 | f_path = res['path'].split(full_repo_path)[-1] |
|
151 | f_path = res['path'].split(full_repo_path)[-1] | |
152 | f_path = f_path.lstrip(os.sep) |
|
152 | f_path = f_path.lstrip(os.sep) | |
153 | res.update({'f_path': f_path}) |
|
153 | res.update({'f_path': f_path}) | |
154 | elif self.search_type == 'message': |
|
154 | elif self.search_type == 'message': | |
155 | res.update({'message_hl': self.highlight(res['message'])}) |
|
155 | res.update({'message_hl': self.highlight(res['message'])}) | |
156 |
|
156 | |||
157 | log.debug('result: %s' % res) |
|
157 | log.debug('result: %s' % res) | |
158 |
|
158 | |||
159 | return res |
|
159 | return res | |
160 |
|
160 | |||
161 | def get_short_content(self, res, chunks): |
|
161 | def get_short_content(self, res, chunks): | |
162 |
|
162 | |||
163 | return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks]) |
|
163 | return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks]) | |
164 |
|
164 | |||
165 | def get_chunks(self): |
|
165 | def get_chunks(self): | |
166 | """ |
|
166 | """ | |
167 | Smart function that implements chunking the content |
|
167 | Smart function that implements chunking the content | |
168 | but not overlap chunks so it doesn't highlight the same |
|
168 | but not overlap chunks so it doesn't highlight the same | |
169 | close occurrences twice. |
|
169 | close occurrences twice. | |
170 | """ |
|
170 | """ | |
171 | memory = [(0, 0)] |
|
171 | memory = [(0, 0)] | |
172 | if self.matcher.supports('positions'): |
|
172 | if self.matcher.supports('positions'): | |
173 | for span in self.matcher.spans(): |
|
173 | for span in self.matcher.spans(): | |
174 | start = span.startchar or 0 |
|
174 | start = span.startchar or 0 | |
175 | end = span.endchar or 0 |
|
175 | end = span.endchar or 0 | |
176 | start_offseted = max(0, start - self.fragment_size) |
|
176 | start_offseted = max(0, start - self.fragment_size) | |
177 | end_offseted = end + self.fragment_size |
|
177 | end_offseted = end + self.fragment_size | |
178 |
|
178 | |||
179 | if start_offseted < memory[-1][1]: |
|
179 | if start_offseted < memory[-1][1]: | |
180 | start_offseted = memory[-1][1] |
|
180 | start_offseted = memory[-1][1] | |
181 | memory.append((start_offseted, end_offseted,)) |
|
181 | memory.append((start_offseted, end_offseted,)) | |
182 | yield (start_offseted, end_offseted,) |
|
182 | yield (start_offseted, end_offseted,) | |
183 |
|
183 | |||
184 | def highlight(self, content, top=5): |
|
184 | def highlight(self, content, top=5): | |
185 | if self.search_type not in ['content', 'message']: |
|
185 | if self.search_type not in ['content', 'message']: | |
186 | return '' |
|
186 | return '' | |
187 | hl = whoosh_highlight( |
|
187 | hl = whoosh_highlight( | |
188 | text=content, |
|
188 | text=content, | |
189 | terms=self.highlight_items, |
|
189 | terms=self.highlight_items, | |
190 | analyzer=ANALYZER, |
|
190 | analyzer=ANALYZER, | |
191 | fragmenter=FRAGMENTER, |
|
191 | fragmenter=FRAGMENTER, | |
192 | formatter=FORMATTER, |
|
192 | formatter=FORMATTER, | |
193 | top=top |
|
193 | top=top | |
194 | ) |
|
194 | ) | |
195 | return hl |
|
195 | return hl |
@@ -1,445 +1,445 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | rhodecode.lib.indexers.daemon |
|
15 | rhodecode.lib.indexers.daemon | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | A daemon will read from task table and run tasks |
|
18 | A daemon will read from task table and run tasks | |
19 |
|
19 | |||
20 | :created_on: Jan 26, 2010 |
|
20 | :created_on: Jan 26, 2010 | |
21 | :author: marcink |
|
21 | :author: marcink | |
22 | :copyright: (c) 2013 RhodeCode GmbH. |
|
22 | :copyright: (c) 2013 RhodeCode GmbH. | |
23 | :license: GPLv3, see LICENSE for more details. |
|
23 | :license: GPLv3, see LICENSE for more details. | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | from __future__ import with_statement |
|
26 | from __future__ import with_statement | |
27 |
|
27 | |||
28 | import os |
|
28 | import os | |
29 | import sys |
|
29 | import sys | |
30 | import logging |
|
30 | import logging | |
31 | import traceback |
|
31 | import traceback | |
32 |
|
32 | |||
33 | from shutil import rmtree |
|
33 | from shutil import rmtree | |
34 | from time import mktime |
|
34 | from time import mktime | |
35 |
|
35 | |||
36 | from os.path import dirname as dn |
|
36 | from os.path import dirname as dn | |
37 | from os.path import join as jn |
|
37 | from os.path import join as jn | |
38 |
|
38 | |||
39 | #to get the rhodecode import |
|
39 | # Add location of top level folder to sys.path | |
40 | project_path = dn(dn(dn(dn(os.path.realpath(__file__))))) |
|
40 | project_path = dn(dn(dn(dn(os.path.realpath(__file__))))) | |
41 | sys.path.append(project_path) |
|
41 | sys.path.append(project_path) | |
42 |
|
42 | |||
43 | from rhodecode.config.conf import INDEX_EXTENSIONS |
|
43 | from rhodecode.config.conf import INDEX_EXTENSIONS | |
44 | from rhodecode.model.scm import ScmModel |
|
44 | from rhodecode.model.scm import ScmModel | |
45 | from rhodecode.model.db import Repository |
|
45 | from rhodecode.model.db import Repository | |
46 | from rhodecode.lib.utils2 import safe_unicode, safe_str |
|
46 | from rhodecode.lib.utils2 import safe_unicode, safe_str | |
47 | from rhodecode.lib.indexers import SCHEMA, IDX_NAME, CHGSETS_SCHEMA, \ |
|
47 | from rhodecode.lib.indexers import SCHEMA, IDX_NAME, CHGSETS_SCHEMA, \ | |
48 | CHGSET_IDX_NAME |
|
48 | CHGSET_IDX_NAME | |
49 |
|
49 | |||
50 | from rhodecode.lib.vcs.exceptions import ChangesetError, RepositoryError, \ |
|
50 | from rhodecode.lib.vcs.exceptions import ChangesetError, RepositoryError, \ | |
51 | NodeDoesNotExistError |
|
51 | NodeDoesNotExistError | |
52 |
|
52 | |||
53 | from whoosh.index import create_in, open_dir, exists_in |
|
53 | from whoosh.index import create_in, open_dir, exists_in | |
54 | from whoosh.query import * |
|
54 | from whoosh.query import * | |
55 | from whoosh.qparser import QueryParser |
|
55 | from whoosh.qparser import QueryParser | |
56 |
|
56 | |||
57 | log = logging.getLogger('whoosh_indexer') |
|
57 | log = logging.getLogger('whoosh_indexer') | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | class WhooshIndexingDaemon(object): |
|
60 | class WhooshIndexingDaemon(object): | |
61 | """ |
|
61 | """ | |
62 | Daemon for atomic indexing jobs |
|
62 | Daemon for atomic indexing jobs | |
63 | """ |
|
63 | """ | |
64 |
|
64 | |||
65 | def __init__(self, indexname=IDX_NAME, index_location=None, |
|
65 | def __init__(self, indexname=IDX_NAME, index_location=None, | |
66 | repo_location=None, sa=None, repo_list=None, |
|
66 | repo_location=None, sa=None, repo_list=None, | |
67 | repo_update_list=None): |
|
67 | repo_update_list=None): | |
68 | self.indexname = indexname |
|
68 | self.indexname = indexname | |
69 |
|
69 | |||
70 | self.index_location = index_location |
|
70 | self.index_location = index_location | |
71 | if not index_location: |
|
71 | if not index_location: | |
72 | raise Exception('You have to provide index location') |
|
72 | raise Exception('You have to provide index location') | |
73 |
|
73 | |||
74 | self.repo_location = repo_location |
|
74 | self.repo_location = repo_location | |
75 | if not repo_location: |
|
75 | if not repo_location: | |
76 | raise Exception('You have to provide repositories location') |
|
76 | raise Exception('You have to provide repositories location') | |
77 |
|
77 | |||
78 | self.repo_paths = ScmModel(sa).repo_scan(self.repo_location) |
|
78 | self.repo_paths = ScmModel(sa).repo_scan(self.repo_location) | |
79 |
|
79 | |||
80 | #filter repo list |
|
80 | #filter repo list | |
81 | if repo_list: |
|
81 | if repo_list: | |
82 | #Fix non-ascii repo names to unicode |
|
82 | #Fix non-ascii repo names to unicode | |
83 | repo_list = map(safe_unicode, repo_list) |
|
83 | repo_list = map(safe_unicode, repo_list) | |
84 | self.filtered_repo_paths = {} |
|
84 | self.filtered_repo_paths = {} | |
85 | for repo_name, repo in self.repo_paths.items(): |
|
85 | for repo_name, repo in self.repo_paths.items(): | |
86 | if repo_name in repo_list: |
|
86 | if repo_name in repo_list: | |
87 | self.filtered_repo_paths[repo_name] = repo |
|
87 | self.filtered_repo_paths[repo_name] = repo | |
88 |
|
88 | |||
89 | self.repo_paths = self.filtered_repo_paths |
|
89 | self.repo_paths = self.filtered_repo_paths | |
90 |
|
90 | |||
91 | #filter update repo list |
|
91 | #filter update repo list | |
92 | self.filtered_repo_update_paths = {} |
|
92 | self.filtered_repo_update_paths = {} | |
93 | if repo_update_list: |
|
93 | if repo_update_list: | |
94 | self.filtered_repo_update_paths = {} |
|
94 | self.filtered_repo_update_paths = {} | |
95 | for repo_name, repo in self.repo_paths.items(): |
|
95 | for repo_name, repo in self.repo_paths.items(): | |
96 | if repo_name in repo_update_list: |
|
96 | if repo_name in repo_update_list: | |
97 | self.filtered_repo_update_paths[repo_name] = repo |
|
97 | self.filtered_repo_update_paths[repo_name] = repo | |
98 | self.repo_paths = self.filtered_repo_update_paths |
|
98 | self.repo_paths = self.filtered_repo_update_paths | |
99 |
|
99 | |||
100 | self.initial = True |
|
100 | self.initial = True | |
101 | if not os.path.isdir(self.index_location): |
|
101 | if not os.path.isdir(self.index_location): | |
102 | os.makedirs(self.index_location) |
|
102 | os.makedirs(self.index_location) | |
103 | log.info('Cannot run incremental index since it does not ' |
|
103 | log.info('Cannot run incremental index since it does not ' | |
104 | 'yet exist running full build') |
|
104 | 'yet exist running full build') | |
105 | elif not exists_in(self.index_location, IDX_NAME): |
|
105 | elif not exists_in(self.index_location, IDX_NAME): | |
106 | log.info('Running full index build as the file content ' |
|
106 | log.info('Running full index build as the file content ' | |
107 | 'index does not exist') |
|
107 | 'index does not exist') | |
108 | elif not exists_in(self.index_location, CHGSET_IDX_NAME): |
|
108 | elif not exists_in(self.index_location, CHGSET_IDX_NAME): | |
109 | log.info('Running full index build as the changeset ' |
|
109 | log.info('Running full index build as the changeset ' | |
110 | 'index does not exist') |
|
110 | 'index does not exist') | |
111 | else: |
|
111 | else: | |
112 | self.initial = False |
|
112 | self.initial = False | |
113 |
|
113 | |||
114 | def _get_index_revision(self, repo): |
|
114 | def _get_index_revision(self, repo): | |
115 | db_repo = Repository.get_by_repo_name(repo.name_unicode) |
|
115 | db_repo = Repository.get_by_repo_name(repo.name_unicode) | |
116 | landing_rev = 'tip' |
|
116 | landing_rev = 'tip' | |
117 | if db_repo: |
|
117 | if db_repo: | |
118 | _rev_type, _rev = db_repo.landing_rev |
|
118 | _rev_type, _rev = db_repo.landing_rev | |
119 | landing_rev = _rev |
|
119 | landing_rev = _rev | |
120 | return landing_rev |
|
120 | return landing_rev | |
121 |
|
121 | |||
122 | def _get_index_changeset(self, repo, index_rev=None): |
|
122 | def _get_index_changeset(self, repo, index_rev=None): | |
123 | if not index_rev: |
|
123 | if not index_rev: | |
124 | index_rev = self._get_index_revision(repo) |
|
124 | index_rev = self._get_index_revision(repo) | |
125 | cs = repo.get_changeset(index_rev) |
|
125 | cs = repo.get_changeset(index_rev) | |
126 | return cs |
|
126 | return cs | |
127 |
|
127 | |||
128 | def get_paths(self, repo): |
|
128 | def get_paths(self, repo): | |
129 | """ |
|
129 | """ | |
130 | recursive walk in root dir and return a set of all path in that dir |
|
130 | recursive walk in root dir and return a set of all path in that dir | |
131 | based on repository walk function |
|
131 | based on repository walk function | |
132 | """ |
|
132 | """ | |
133 | index_paths_ = set() |
|
133 | index_paths_ = set() | |
134 | try: |
|
134 | try: | |
135 | cs = self._get_index_changeset(repo) |
|
135 | cs = self._get_index_changeset(repo) | |
136 | for _topnode, _dirs, files in cs.walk('/'): |
|
136 | for _topnode, _dirs, files in cs.walk('/'): | |
137 | for f in files: |
|
137 | for f in files: | |
138 | index_paths_.add(jn(safe_str(repo.path), safe_str(f.path))) |
|
138 | index_paths_.add(jn(safe_str(repo.path), safe_str(f.path))) | |
139 |
|
139 | |||
140 | except RepositoryError: |
|
140 | except RepositoryError: | |
141 | log.debug(traceback.format_exc()) |
|
141 | log.debug(traceback.format_exc()) | |
142 | pass |
|
142 | pass | |
143 | return index_paths_ |
|
143 | return index_paths_ | |
144 |
|
144 | |||
145 | def get_node(self, repo, path, index_rev=None): |
|
145 | def get_node(self, repo, path, index_rev=None): | |
146 | """ |
|
146 | """ | |
147 | gets a filenode based on given full path.It operates on string for |
|
147 | gets a filenode based on given full path.It operates on string for | |
148 | hg git compatability. |
|
148 | hg git compatability. | |
149 |
|
149 | |||
150 | :param repo: scm repo instance |
|
150 | :param repo: scm repo instance | |
151 | :param path: full path including root location |
|
151 | :param path: full path including root location | |
152 | :return: FileNode |
|
152 | :return: FileNode | |
153 | """ |
|
153 | """ | |
154 | root_path = safe_str(repo.path)+'/' |
|
154 | root_path = safe_str(repo.path)+'/' | |
155 | parts = safe_str(path).partition(root_path) |
|
155 | parts = safe_str(path).partition(root_path) | |
156 | cs = self._get_index_changeset(repo, index_rev=index_rev) |
|
156 | cs = self._get_index_changeset(repo, index_rev=index_rev) | |
157 | node = cs.get_node(parts[-1]) |
|
157 | node = cs.get_node(parts[-1]) | |
158 | return node |
|
158 | return node | |
159 |
|
159 | |||
160 | def get_node_mtime(self, node): |
|
160 | def get_node_mtime(self, node): | |
161 | return mktime(node.last_changeset.date.timetuple()) |
|
161 | return mktime(node.last_changeset.date.timetuple()) | |
162 |
|
162 | |||
163 | def add_doc(self, writer, path, repo, repo_name, index_rev=None): |
|
163 | def add_doc(self, writer, path, repo, repo_name, index_rev=None): | |
164 | """ |
|
164 | """ | |
165 | Adding doc to writer this function itself fetches data from |
|
165 | Adding doc to writer this function itself fetches data from | |
166 | the instance of vcs backend |
|
166 | the instance of vcs backend | |
167 | """ |
|
167 | """ | |
168 |
|
168 | |||
169 | node = self.get_node(repo, path, index_rev) |
|
169 | node = self.get_node(repo, path, index_rev) | |
170 | indexed = indexed_w_content = 0 |
|
170 | indexed = indexed_w_content = 0 | |
171 | # we just index the content of chosen files, and skip binary files |
|
171 | # we just index the content of chosen files, and skip binary files | |
172 | if node.extension in INDEX_EXTENSIONS and not node.is_binary: |
|
172 | if node.extension in INDEX_EXTENSIONS and not node.is_binary: | |
173 | u_content = node.content |
|
173 | u_content = node.content | |
174 | if not isinstance(u_content, unicode): |
|
174 | if not isinstance(u_content, unicode): | |
175 | log.warning(' >> %s Could not get this content as unicode ' |
|
175 | log.warning(' >> %s Could not get this content as unicode ' | |
176 | 'replacing with empty content' % path) |
|
176 | 'replacing with empty content' % path) | |
177 | u_content = u'' |
|
177 | u_content = u'' | |
178 | else: |
|
178 | else: | |
179 | log.debug(' >> %s [WITH CONTENT]' % path) |
|
179 | log.debug(' >> %s [WITH CONTENT]' % path) | |
180 | indexed_w_content += 1 |
|
180 | indexed_w_content += 1 | |
181 |
|
181 | |||
182 | else: |
|
182 | else: | |
183 | log.debug(' >> %s' % path) |
|
183 | log.debug(' >> %s' % path) | |
184 | # just index file name without it's content |
|
184 | # just index file name without it's content | |
185 | u_content = u'' |
|
185 | u_content = u'' | |
186 | indexed += 1 |
|
186 | indexed += 1 | |
187 |
|
187 | |||
188 | p = safe_unicode(path) |
|
188 | p = safe_unicode(path) | |
189 | writer.add_document( |
|
189 | writer.add_document( | |
190 | fileid=p, |
|
190 | fileid=p, | |
191 | owner=unicode(repo.contact), |
|
191 | owner=unicode(repo.contact), | |
192 | repository=safe_unicode(repo_name), |
|
192 | repository=safe_unicode(repo_name), | |
193 | path=p, |
|
193 | path=p, | |
194 | content=u_content, |
|
194 | content=u_content, | |
195 | modtime=self.get_node_mtime(node), |
|
195 | modtime=self.get_node_mtime(node), | |
196 | extension=node.extension |
|
196 | extension=node.extension | |
197 | ) |
|
197 | ) | |
198 | return indexed, indexed_w_content |
|
198 | return indexed, indexed_w_content | |
199 |
|
199 | |||
200 | def index_changesets(self, writer, repo_name, repo, start_rev=None): |
|
200 | def index_changesets(self, writer, repo_name, repo, start_rev=None): | |
201 | """ |
|
201 | """ | |
202 | Add all changeset in the vcs repo starting at start_rev |
|
202 | Add all changeset in the vcs repo starting at start_rev | |
203 | to the index writer |
|
203 | to the index writer | |
204 |
|
204 | |||
205 | :param writer: the whoosh index writer to add to |
|
205 | :param writer: the whoosh index writer to add to | |
206 | :param repo_name: name of the repository from whence the |
|
206 | :param repo_name: name of the repository from whence the | |
207 | changeset originates including the repository group |
|
207 | changeset originates including the repository group | |
208 | :param repo: the vcs repository instance to index changesets for, |
|
208 | :param repo: the vcs repository instance to index changesets for, | |
209 | the presumption is the repo has changesets to index |
|
209 | the presumption is the repo has changesets to index | |
210 | :param start_rev=None: the full sha id to start indexing from |
|
210 | :param start_rev=None: the full sha id to start indexing from | |
211 | if start_rev is None then index from the first changeset in |
|
211 | if start_rev is None then index from the first changeset in | |
212 | the repo |
|
212 | the repo | |
213 | """ |
|
213 | """ | |
214 |
|
214 | |||
215 | if start_rev is None: |
|
215 | if start_rev is None: | |
216 | start_rev = repo[0].raw_id |
|
216 | start_rev = repo[0].raw_id | |
217 |
|
217 | |||
218 | log.debug('indexing changesets in %s starting at rev: %s' % |
|
218 | log.debug('indexing changesets in %s starting at rev: %s' % | |
219 | (repo_name, start_rev)) |
|
219 | (repo_name, start_rev)) | |
220 |
|
220 | |||
221 | indexed = 0 |
|
221 | indexed = 0 | |
222 | cs_iter = repo.get_changesets(start=start_rev) |
|
222 | cs_iter = repo.get_changesets(start=start_rev) | |
223 | total = len(cs_iter) |
|
223 | total = len(cs_iter) | |
224 | for cs in cs_iter: |
|
224 | for cs in cs_iter: | |
225 | log.debug(' >> %s/%s' % (cs, total)) |
|
225 | log.debug(' >> %s/%s' % (cs, total)) | |
226 | writer.add_document( |
|
226 | writer.add_document( | |
227 | raw_id=unicode(cs.raw_id), |
|
227 | raw_id=unicode(cs.raw_id), | |
228 | owner=unicode(repo.contact), |
|
228 | owner=unicode(repo.contact), | |
229 | date=cs._timestamp, |
|
229 | date=cs._timestamp, | |
230 | repository=safe_unicode(repo_name), |
|
230 | repository=safe_unicode(repo_name), | |
231 | author=cs.author, |
|
231 | author=cs.author, | |
232 | message=cs.message, |
|
232 | message=cs.message, | |
233 | last=cs.last, |
|
233 | last=cs.last, | |
234 | added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(), |
|
234 | added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(), | |
235 | removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(), |
|
235 | removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(), | |
236 | changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(), |
|
236 | changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(), | |
237 | parents=u' '.join([cs.raw_id for cs in cs.parents]), |
|
237 | parents=u' '.join([cs.raw_id for cs in cs.parents]), | |
238 | ) |
|
238 | ) | |
239 | indexed += 1 |
|
239 | indexed += 1 | |
240 |
|
240 | |||
241 | log.debug('indexed %d changesets for repo %s' % (indexed, repo_name)) |
|
241 | log.debug('indexed %d changesets for repo %s' % (indexed, repo_name)) | |
242 | return indexed |
|
242 | return indexed | |
243 |
|
243 | |||
244 | def index_files(self, file_idx_writer, repo_name, repo): |
|
244 | def index_files(self, file_idx_writer, repo_name, repo): | |
245 | """ |
|
245 | """ | |
246 | Index files for given repo_name |
|
246 | Index files for given repo_name | |
247 |
|
247 | |||
248 | :param file_idx_writer: the whoosh index writer to add to |
|
248 | :param file_idx_writer: the whoosh index writer to add to | |
249 | :param repo_name: name of the repository we're indexing |
|
249 | :param repo_name: name of the repository we're indexing | |
250 | :param repo: instance of vcs repo |
|
250 | :param repo: instance of vcs repo | |
251 | """ |
|
251 | """ | |
252 | i_cnt = iwc_cnt = 0 |
|
252 | i_cnt = iwc_cnt = 0 | |
253 | log.debug('building index for %s @revision:%s' % (repo.path, |
|
253 | log.debug('building index for %s @revision:%s' % (repo.path, | |
254 | self._get_index_revision(repo))) |
|
254 | self._get_index_revision(repo))) | |
255 | index_rev = self._get_index_revision(repo) |
|
255 | index_rev = self._get_index_revision(repo) | |
256 | for idx_path in self.get_paths(repo): |
|
256 | for idx_path in self.get_paths(repo): | |
257 | i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev) |
|
257 | i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev) | |
258 | i_cnt += i |
|
258 | i_cnt += i | |
259 | iwc_cnt += iwc |
|
259 | iwc_cnt += iwc | |
260 |
|
260 | |||
261 | log.debug('added %s files %s with content for repo %s' % |
|
261 | log.debug('added %s files %s with content for repo %s' % | |
262 | (i_cnt + iwc_cnt, iwc_cnt, repo.path)) |
|
262 | (i_cnt + iwc_cnt, iwc_cnt, repo.path)) | |
263 | return i_cnt, iwc_cnt |
|
263 | return i_cnt, iwc_cnt | |
264 |
|
264 | |||
265 | def update_changeset_index(self): |
|
265 | def update_changeset_index(self): | |
266 | idx = open_dir(self.index_location, indexname=CHGSET_IDX_NAME) |
|
266 | idx = open_dir(self.index_location, indexname=CHGSET_IDX_NAME) | |
267 |
|
267 | |||
268 | with idx.searcher() as searcher: |
|
268 | with idx.searcher() as searcher: | |
269 | writer = idx.writer() |
|
269 | writer = idx.writer() | |
270 | writer_is_dirty = False |
|
270 | writer_is_dirty = False | |
271 | try: |
|
271 | try: | |
272 | indexed_total = 0 |
|
272 | indexed_total = 0 | |
273 | repo_name = None |
|
273 | repo_name = None | |
274 | for repo_name, repo in self.repo_paths.items(): |
|
274 | for repo_name, repo in self.repo_paths.items(): | |
275 | # skip indexing if there aren't any revs in the repo |
|
275 | # skip indexing if there aren't any revs in the repo | |
276 | num_of_revs = len(repo) |
|
276 | num_of_revs = len(repo) | |
277 | if num_of_revs < 1: |
|
277 | if num_of_revs < 1: | |
278 | continue |
|
278 | continue | |
279 |
|
279 | |||
280 | qp = QueryParser('repository', schema=CHGSETS_SCHEMA) |
|
280 | qp = QueryParser('repository', schema=CHGSETS_SCHEMA) | |
281 | q = qp.parse(u"last:t AND %s" % repo_name) |
|
281 | q = qp.parse(u"last:t AND %s" % repo_name) | |
282 |
|
282 | |||
283 | results = searcher.search(q) |
|
283 | results = searcher.search(q) | |
284 |
|
284 | |||
285 | # default to scanning the entire repo |
|
285 | # default to scanning the entire repo | |
286 | last_rev = 0 |
|
286 | last_rev = 0 | |
287 | start_id = None |
|
287 | start_id = None | |
288 |
|
288 | |||
289 | if len(results) > 0: |
|
289 | if len(results) > 0: | |
290 | # assuming that there is only one result, if not this |
|
290 | # assuming that there is only one result, if not this | |
291 | # may require a full re-index. |
|
291 | # may require a full re-index. | |
292 | start_id = results[0]['raw_id'] |
|
292 | start_id = results[0]['raw_id'] | |
293 | last_rev = repo.get_changeset(revision=start_id).revision |
|
293 | last_rev = repo.get_changeset(revision=start_id).revision | |
294 |
|
294 | |||
295 | # there are new changesets to index or a new repo to index |
|
295 | # there are new changesets to index or a new repo to index | |
296 | if last_rev == 0 or num_of_revs > last_rev + 1: |
|
296 | if last_rev == 0 or num_of_revs > last_rev + 1: | |
297 | # delete the docs in the index for the previous |
|
297 | # delete the docs in the index for the previous | |
298 | # last changeset(s) |
|
298 | # last changeset(s) | |
299 | for hit in results: |
|
299 | for hit in results: | |
300 | q = qp.parse(u"last:t AND %s AND raw_id:%s" % |
|
300 | q = qp.parse(u"last:t AND %s AND raw_id:%s" % | |
301 | (repo_name, hit['raw_id'])) |
|
301 | (repo_name, hit['raw_id'])) | |
302 | writer.delete_by_query(q) |
|
302 | writer.delete_by_query(q) | |
303 |
|
303 | |||
304 | # index from the previous last changeset + all new ones |
|
304 | # index from the previous last changeset + all new ones | |
305 | indexed_total += self.index_changesets(writer, |
|
305 | indexed_total += self.index_changesets(writer, | |
306 | repo_name, repo, start_id) |
|
306 | repo_name, repo, start_id) | |
307 | writer_is_dirty = True |
|
307 | writer_is_dirty = True | |
308 | log.debug('indexed %s changesets for repo %s' % ( |
|
308 | log.debug('indexed %s changesets for repo %s' % ( | |
309 | indexed_total, repo_name) |
|
309 | indexed_total, repo_name) | |
310 | ) |
|
310 | ) | |
311 | finally: |
|
311 | finally: | |
312 | if writer_is_dirty: |
|
312 | if writer_is_dirty: | |
313 | log.debug('>> COMMITING CHANGES TO CHANGESET INDEX<<') |
|
313 | log.debug('>> COMMITING CHANGES TO CHANGESET INDEX<<') | |
314 | writer.commit(merge=True) |
|
314 | writer.commit(merge=True) | |
315 | log.debug('>>> FINISHED REBUILDING CHANGESET INDEX <<<') |
|
315 | log.debug('>>> FINISHED REBUILDING CHANGESET INDEX <<<') | |
316 | else: |
|
316 | else: | |
317 | log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<') |
|
317 | log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<') | |
318 |
|
318 | |||
319 | def update_file_index(self): |
|
319 | def update_file_index(self): | |
320 | log.debug((u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s ' |
|
320 | log.debug((u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s ' | |
321 | 'AND REPOS %s') % (INDEX_EXTENSIONS, self.repo_paths.keys())) |
|
321 | 'AND REPOS %s') % (INDEX_EXTENSIONS, self.repo_paths.keys())) | |
322 |
|
322 | |||
323 | idx = open_dir(self.index_location, indexname=self.indexname) |
|
323 | idx = open_dir(self.index_location, indexname=self.indexname) | |
324 | # The set of all paths in the index |
|
324 | # The set of all paths in the index | |
325 | indexed_paths = set() |
|
325 | indexed_paths = set() | |
326 | # The set of all paths we need to re-index |
|
326 | # The set of all paths we need to re-index | |
327 | to_index = set() |
|
327 | to_index = set() | |
328 |
|
328 | |||
329 | writer = idx.writer() |
|
329 | writer = idx.writer() | |
330 | writer_is_dirty = False |
|
330 | writer_is_dirty = False | |
331 | try: |
|
331 | try: | |
332 | with idx.reader() as reader: |
|
332 | with idx.reader() as reader: | |
333 |
|
333 | |||
334 | # Loop over the stored fields in the index |
|
334 | # Loop over the stored fields in the index | |
335 | for fields in reader.all_stored_fields(): |
|
335 | for fields in reader.all_stored_fields(): | |
336 | indexed_path = fields['path'] |
|
336 | indexed_path = fields['path'] | |
337 | indexed_repo_path = fields['repository'] |
|
337 | indexed_repo_path = fields['repository'] | |
338 | indexed_paths.add(indexed_path) |
|
338 | indexed_paths.add(indexed_path) | |
339 |
|
339 | |||
340 | if not indexed_repo_path in self.filtered_repo_update_paths: |
|
340 | if not indexed_repo_path in self.filtered_repo_update_paths: | |
341 | continue |
|
341 | continue | |
342 |
|
342 | |||
343 | repo = self.repo_paths[indexed_repo_path] |
|
343 | repo = self.repo_paths[indexed_repo_path] | |
344 |
|
344 | |||
345 | try: |
|
345 | try: | |
346 | node = self.get_node(repo, indexed_path) |
|
346 | node = self.get_node(repo, indexed_path) | |
347 | # Check if this file was changed since it was indexed |
|
347 | # Check if this file was changed since it was indexed | |
348 | indexed_time = fields['modtime'] |
|
348 | indexed_time = fields['modtime'] | |
349 | mtime = self.get_node_mtime(node) |
|
349 | mtime = self.get_node_mtime(node) | |
350 | if mtime > indexed_time: |
|
350 | if mtime > indexed_time: | |
351 | # The file has changed, delete it and add it to |
|
351 | # The file has changed, delete it and add it to | |
352 | # the list of files to reindex |
|
352 | # the list of files to reindex | |
353 | log.debug( |
|
353 | log.debug( | |
354 | 'adding to reindex list %s mtime: %s vs %s' % ( |
|
354 | 'adding to reindex list %s mtime: %s vs %s' % ( | |
355 | indexed_path, mtime, indexed_time) |
|
355 | indexed_path, mtime, indexed_time) | |
356 | ) |
|
356 | ) | |
357 | writer.delete_by_term('fileid', indexed_path) |
|
357 | writer.delete_by_term('fileid', indexed_path) | |
358 | writer_is_dirty = True |
|
358 | writer_is_dirty = True | |
359 |
|
359 | |||
360 | to_index.add(indexed_path) |
|
360 | to_index.add(indexed_path) | |
361 | except (ChangesetError, NodeDoesNotExistError): |
|
361 | except (ChangesetError, NodeDoesNotExistError): | |
362 | # This file was deleted since it was indexed |
|
362 | # This file was deleted since it was indexed | |
363 | log.debug('removing from index %s' % indexed_path) |
|
363 | log.debug('removing from index %s' % indexed_path) | |
364 | writer.delete_by_term('path', indexed_path) |
|
364 | writer.delete_by_term('path', indexed_path) | |
365 | writer_is_dirty = True |
|
365 | writer_is_dirty = True | |
366 |
|
366 | |||
367 | # Loop over the files in the filesystem |
|
367 | # Loop over the files in the filesystem | |
368 | # Assume we have a function that gathers the filenames of the |
|
368 | # Assume we have a function that gathers the filenames of the | |
369 | # documents to be indexed |
|
369 | # documents to be indexed | |
370 | ri_cnt_total = 0 # indexed |
|
370 | ri_cnt_total = 0 # indexed | |
371 | riwc_cnt_total = 0 # indexed with content |
|
371 | riwc_cnt_total = 0 # indexed with content | |
372 | for repo_name, repo in self.repo_paths.items(): |
|
372 | for repo_name, repo in self.repo_paths.items(): | |
373 | # skip indexing if there aren't any revisions |
|
373 | # skip indexing if there aren't any revisions | |
374 | if len(repo) < 1: |
|
374 | if len(repo) < 1: | |
375 | continue |
|
375 | continue | |
376 | ri_cnt = 0 # indexed |
|
376 | ri_cnt = 0 # indexed | |
377 | riwc_cnt = 0 # indexed with content |
|
377 | riwc_cnt = 0 # indexed with content | |
378 | for path in self.get_paths(repo): |
|
378 | for path in self.get_paths(repo): | |
379 | path = safe_unicode(path) |
|
379 | path = safe_unicode(path) | |
380 | if path in to_index or path not in indexed_paths: |
|
380 | if path in to_index or path not in indexed_paths: | |
381 |
|
381 | |||
382 | # This is either a file that's changed, or a new file |
|
382 | # This is either a file that's changed, or a new file | |
383 | # that wasn't indexed before. So index it! |
|
383 | # that wasn't indexed before. So index it! | |
384 | i, iwc = self.add_doc(writer, path, repo, repo_name) |
|
384 | i, iwc = self.add_doc(writer, path, repo, repo_name) | |
385 | writer_is_dirty = True |
|
385 | writer_is_dirty = True | |
386 | log.debug('re indexing %s' % path) |
|
386 | log.debug('re indexing %s' % path) | |
387 | ri_cnt += i |
|
387 | ri_cnt += i | |
388 | ri_cnt_total += 1 |
|
388 | ri_cnt_total += 1 | |
389 | riwc_cnt += iwc |
|
389 | riwc_cnt += iwc | |
390 | riwc_cnt_total += iwc |
|
390 | riwc_cnt_total += iwc | |
391 | log.debug('added %s files %s with content for repo %s' % ( |
|
391 | log.debug('added %s files %s with content for repo %s' % ( | |
392 | ri_cnt + riwc_cnt, riwc_cnt, repo.path) |
|
392 | ri_cnt + riwc_cnt, riwc_cnt, repo.path) | |
393 | ) |
|
393 | ) | |
394 | log.debug('indexed %s files in total and %s with content' % ( |
|
394 | log.debug('indexed %s files in total and %s with content' % ( | |
395 | ri_cnt_total, riwc_cnt_total) |
|
395 | ri_cnt_total, riwc_cnt_total) | |
396 | ) |
|
396 | ) | |
397 | finally: |
|
397 | finally: | |
398 | if writer_is_dirty: |
|
398 | if writer_is_dirty: | |
399 | log.debug('>> COMMITING CHANGES TO FILE INDEX <<') |
|
399 | log.debug('>> COMMITING CHANGES TO FILE INDEX <<') | |
400 | writer.commit(merge=True) |
|
400 | writer.commit(merge=True) | |
401 | log.debug('>>> FINISHED REBUILDING FILE INDEX <<<') |
|
401 | log.debug('>>> FINISHED REBUILDING FILE INDEX <<<') | |
402 | else: |
|
402 | else: | |
403 | log.debug('>> NOTHING TO COMMIT TO FILE INDEX <<') |
|
403 | log.debug('>> NOTHING TO COMMIT TO FILE INDEX <<') | |
404 | writer.cancel() |
|
404 | writer.cancel() | |
405 |
|
405 | |||
406 | def build_indexes(self): |
|
406 | def build_indexes(self): | |
407 | if os.path.exists(self.index_location): |
|
407 | if os.path.exists(self.index_location): | |
408 | log.debug('removing previous index') |
|
408 | log.debug('removing previous index') | |
409 | rmtree(self.index_location) |
|
409 | rmtree(self.index_location) | |
410 |
|
410 | |||
411 | if not os.path.exists(self.index_location): |
|
411 | if not os.path.exists(self.index_location): | |
412 | os.mkdir(self.index_location) |
|
412 | os.mkdir(self.index_location) | |
413 |
|
413 | |||
414 | chgset_idx = create_in(self.index_location, CHGSETS_SCHEMA, |
|
414 | chgset_idx = create_in(self.index_location, CHGSETS_SCHEMA, | |
415 | indexname=CHGSET_IDX_NAME) |
|
415 | indexname=CHGSET_IDX_NAME) | |
416 | chgset_idx_writer = chgset_idx.writer() |
|
416 | chgset_idx_writer = chgset_idx.writer() | |
417 |
|
417 | |||
418 | file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME) |
|
418 | file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME) | |
419 | file_idx_writer = file_idx.writer() |
|
419 | file_idx_writer = file_idx.writer() | |
420 | log.debug('BUILDING INDEX FOR EXTENSIONS %s ' |
|
420 | log.debug('BUILDING INDEX FOR EXTENSIONS %s ' | |
421 | 'AND REPOS %s' % (INDEX_EXTENSIONS, self.repo_paths.keys())) |
|
421 | 'AND REPOS %s' % (INDEX_EXTENSIONS, self.repo_paths.keys())) | |
422 |
|
422 | |||
423 | for repo_name, repo in self.repo_paths.items(): |
|
423 | for repo_name, repo in self.repo_paths.items(): | |
424 | # skip indexing if there aren't any revisions |
|
424 | # skip indexing if there aren't any revisions | |
425 | if len(repo) < 1: |
|
425 | if len(repo) < 1: | |
426 | continue |
|
426 | continue | |
427 |
|
427 | |||
428 | self.index_files(file_idx_writer, repo_name, repo) |
|
428 | self.index_files(file_idx_writer, repo_name, repo) | |
429 | self.index_changesets(chgset_idx_writer, repo_name, repo) |
|
429 | self.index_changesets(chgset_idx_writer, repo_name, repo) | |
430 |
|
430 | |||
431 | log.debug('>> COMMITING CHANGES <<') |
|
431 | log.debug('>> COMMITING CHANGES <<') | |
432 | file_idx_writer.commit(merge=True) |
|
432 | file_idx_writer.commit(merge=True) | |
433 | chgset_idx_writer.commit(merge=True) |
|
433 | chgset_idx_writer.commit(merge=True) | |
434 | log.debug('>>> FINISHED BUILDING INDEX <<<') |
|
434 | log.debug('>>> FINISHED BUILDING INDEX <<<') | |
435 |
|
435 | |||
436 | def update_indexes(self): |
|
436 | def update_indexes(self): | |
437 | self.update_file_index() |
|
437 | self.update_file_index() | |
438 | self.update_changeset_index() |
|
438 | self.update_changeset_index() | |
439 |
|
439 | |||
440 | def run(self, full_index=False): |
|
440 | def run(self, full_index=False): | |
441 | """Run daemon""" |
|
441 | """Run daemon""" | |
442 | if full_index or self.initial: |
|
442 | if full_index or self.initial: | |
443 | self.build_indexes() |
|
443 | self.build_indexes() | |
444 | else: |
|
444 | else: | |
445 | self.update_indexes() |
|
445 | self.update_indexes() |
@@ -1,85 +1,85 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | rhodecode.lib.paster_commands.cache_keys |
|
15 | rhodecode.lib.paster_commands.cache_keys | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | cleanup-keys paster command for RhodeCode |
|
18 | cleanup-keys paster command for RhodeCode | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | :created_on: mar 27, 2013 |
|
21 | :created_on: mar 27, 2013 | |
22 | :author: marcink |
|
22 | :author: marcink | |
23 | :copyright: (c) 2013 RhodeCode GmbH. |
|
23 | :copyright: (c) 2013 RhodeCode GmbH. | |
24 | :license: GPLv3, see LICENSE for more details. |
|
24 | :license: GPLv3, see LICENSE for more details. | |
25 | """ |
|
25 | """ | |
26 |
|
26 | |||
27 | from __future__ import with_statement |
|
27 | from __future__ import with_statement | |
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 | import logging |
|
31 | import logging | |
32 |
|
32 | |||
33 | from rhodecode.model.meta import Session |
|
33 | from rhodecode.model.meta import Session | |
34 | from rhodecode.lib.utils import BasePasterCommand |
|
34 | from rhodecode.lib.utils import BasePasterCommand | |
35 | from rhodecode.model.db import CacheInvalidation |
|
35 | from rhodecode.model.db import CacheInvalidation | |
36 |
|
36 | |||
37 | # fix rhodecode import |
|
37 | # Add location of top level folder to sys.path | |
38 | from os.path import dirname as dn |
|
38 | from os.path import dirname as dn | |
39 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
39 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) | |
40 | sys.path.append(rc_path) |
|
40 | sys.path.append(rc_path) | |
41 |
|
41 | |||
42 | log = logging.getLogger(__name__) |
|
42 | log = logging.getLogger(__name__) | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | class Command(BasePasterCommand): |
|
45 | class Command(BasePasterCommand): | |
46 |
|
46 | |||
47 | max_args = 1 |
|
47 | max_args = 1 | |
48 | min_args = 1 |
|
48 | min_args = 1 | |
49 |
|
49 | |||
50 | usage = "CONFIG_FILE" |
|
50 | usage = "CONFIG_FILE" | |
51 | group_name = "RhodeCode" |
|
51 | group_name = "RhodeCode" | |
52 | takes_config_file = -1 |
|
52 | takes_config_file = -1 | |
53 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
53 | parser = BasePasterCommand.standard_parser(verbose=True) | |
54 | summary = "Cache keys utils" |
|
54 | summary = "Cache keys utils" | |
55 |
|
55 | |||
56 | def command(self): |
|
56 | def command(self): | |
57 | #get SqlAlchemy session |
|
57 | #get SqlAlchemy session | |
58 | self._init_session() |
|
58 | self._init_session() | |
59 | _caches = CacheInvalidation.query().order_by(CacheInvalidation.cache_key).all() |
|
59 | _caches = CacheInvalidation.query().order_by(CacheInvalidation.cache_key).all() | |
60 | if self.options.show: |
|
60 | if self.options.show: | |
61 | for c_obj in _caches: |
|
61 | for c_obj in _caches: | |
62 | print 'key:%s active:%s' % (c_obj.cache_key, c_obj.cache_active) |
|
62 | print 'key:%s active:%s' % (c_obj.cache_key, c_obj.cache_active) | |
63 | elif self.options.cleanup: |
|
63 | elif self.options.cleanup: | |
64 | for c_obj in _caches: |
|
64 | for c_obj in _caches: | |
65 | Session().delete(c_obj) |
|
65 | Session().delete(c_obj) | |
66 | print 'removing key:%s' % (c_obj.cache_key) |
|
66 | print 'removing key:%s' % (c_obj.cache_key) | |
67 | Session().commit() |
|
67 | Session().commit() | |
68 | else: |
|
68 | else: | |
69 | print 'nothing done exiting...' |
|
69 | print 'nothing done exiting...' | |
70 | sys.exit(0) |
|
70 | sys.exit(0) | |
71 |
|
71 | |||
72 | def update_parser(self): |
|
72 | def update_parser(self): | |
73 | self.parser.add_option( |
|
73 | self.parser.add_option( | |
74 | '--show', |
|
74 | '--show', | |
75 | action='store_true', |
|
75 | action='store_true', | |
76 | dest='show', |
|
76 | dest='show', | |
77 | help=("show existing cache keys with together with status") |
|
77 | help=("show existing cache keys with together with status") | |
78 | ) |
|
78 | ) | |
79 |
|
79 | |||
80 | self.parser.add_option( |
|
80 | self.parser.add_option( | |
81 | '--cleanup', |
|
81 | '--cleanup', | |
82 | action="store_true", |
|
82 | action="store_true", | |
83 | dest="cleanup", |
|
83 | dest="cleanup", | |
84 | help="cleanup existing cache keys" |
|
84 | help="cleanup existing cache keys" | |
85 | ) |
|
85 | ) |
@@ -1,76 +1,76 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | rhodecode.lib.paster_commands.ishell |
|
15 | rhodecode.lib.paster_commands.ishell | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | interactive shell paster command for RhodeCode |
|
18 | interactive shell paster command for RhodeCode | |
19 |
|
19 | |||
20 | :created_on: Apr 4, 2013 |
|
20 | :created_on: Apr 4, 2013 | |
21 | :author: marcink |
|
21 | :author: marcink | |
22 | :copyright: (c) 2013 RhodeCode GmbH. |
|
22 | :copyright: (c) 2013 RhodeCode GmbH. | |
23 | :license: GPLv3, see LICENSE for more details. |
|
23 | :license: GPLv3, see LICENSE for more details. | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | from __future__ import with_statement |
|
26 | from __future__ import with_statement | |
27 |
|
27 | |||
28 | import os |
|
28 | import os | |
29 | import sys |
|
29 | import sys | |
30 | import logging |
|
30 | import logging | |
31 |
|
31 | |||
32 | from rhodecode.lib.utils import BasePasterCommand |
|
32 | from rhodecode.lib.utils import BasePasterCommand | |
33 |
|
33 | |||
34 | # fix rhodecode import |
|
34 | # Add location of top level folder to sys.path | |
35 | from os.path import dirname as dn |
|
35 | from os.path import dirname as dn | |
36 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
36 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) | |
37 | sys.path.append(rc_path) |
|
37 | sys.path.append(rc_path) | |
38 |
|
38 | |||
39 | log = logging.getLogger(__name__) |
|
39 | log = logging.getLogger(__name__) | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | class Command(BasePasterCommand): |
|
42 | class Command(BasePasterCommand): | |
43 |
|
43 | |||
44 | max_args = 1 |
|
44 | max_args = 1 | |
45 | min_args = 1 |
|
45 | min_args = 1 | |
46 |
|
46 | |||
47 | usage = "CONFIG_FILE" |
|
47 | usage = "CONFIG_FILE" | |
48 | group_name = "RhodeCode" |
|
48 | group_name = "RhodeCode" | |
49 | takes_config_file = -1 |
|
49 | takes_config_file = -1 | |
50 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
50 | parser = BasePasterCommand.standard_parser(verbose=True) | |
51 | summary = "Interactive shell" |
|
51 | summary = "Interactive shell" | |
52 |
|
52 | |||
53 | def command(self): |
|
53 | def command(self): | |
54 | #get SqlAlchemy session |
|
54 | #get SqlAlchemy session | |
55 | self._init_session() |
|
55 | self._init_session() | |
56 |
|
56 | |||
57 | # imports, used in ipython shell |
|
57 | # imports, used in ipython shell | |
58 | import os |
|
58 | import os | |
59 | import sys |
|
59 | import sys | |
60 | import time |
|
60 | import time | |
61 | import shutil |
|
61 | import shutil | |
62 | import datetime |
|
62 | import datetime | |
63 | from rhodecode.model.db import * |
|
63 | from rhodecode.model.db import * | |
64 |
|
64 | |||
65 | try: |
|
65 | try: | |
66 | from IPython import embed |
|
66 | from IPython import embed | |
67 | from IPython.config.loader import Config |
|
67 | from IPython.config.loader import Config | |
68 | cfg = Config() |
|
68 | cfg = Config() | |
69 | cfg.InteractiveShellEmbed.confirm_exit = False |
|
69 | cfg.InteractiveShellEmbed.confirm_exit = False | |
70 | embed(config=cfg, banner1="RhodeCode IShell.") |
|
70 | embed(config=cfg, banner1="RhodeCode IShell.") | |
71 | except ImportError: |
|
71 | except ImportError: | |
72 | print 'ipython installation required for ishell' |
|
72 | print 'ipython installation required for ishell' | |
73 | sys.exit(-1) |
|
73 | sys.exit(-1) | |
74 |
|
74 | |||
75 | def update_parser(self): |
|
75 | def update_parser(self): | |
76 | pass |
|
76 | pass |
@@ -1,113 +1,113 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | rhodecode.lib.paster_commands.make_index |
|
15 | rhodecode.lib.paster_commands.make_index | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | make-index paster command for RhodeCode |
|
18 | make-index paster command for RhodeCode | |
19 |
|
19 | |||
20 | :created_on: Aug 17, 2010 |
|
20 | :created_on: Aug 17, 2010 | |
21 | :author: marcink |
|
21 | :author: marcink | |
22 | :copyright: (c) 2013 RhodeCode GmbH. |
|
22 | :copyright: (c) 2013 RhodeCode GmbH. | |
23 | :license: GPLv3, see LICENSE for more details. |
|
23 | :license: GPLv3, see LICENSE for more details. | |
24 |
|
24 | |||
25 | """ |
|
25 | """ | |
26 |
|
26 | |||
27 | from __future__ import with_statement |
|
27 | from __future__ import with_statement | |
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 | import logging |
|
31 | import logging | |
32 |
|
32 | |||
33 | from rhodecode.lib.utils import BasePasterCommand |
|
33 | from rhodecode.lib.utils import BasePasterCommand | |
34 | from string import strip |
|
34 | from string import strip | |
35 | from shutil import rmtree |
|
35 | from shutil import rmtree | |
36 | from rhodecode.model.repo import RepoModel |
|
36 | from rhodecode.model.repo import RepoModel | |
37 | from rhodecode.lib.utils import BasePasterCommand, load_rcextensions |
|
37 | from rhodecode.lib.utils import BasePasterCommand, load_rcextensions | |
38 |
|
38 | |||
39 | # fix rhodecode import |
|
39 | # Add location of top level folder to sys.path | |
40 | from os.path import dirname as dn |
|
40 | from os.path import dirname as dn | |
41 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
41 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) | |
42 | sys.path.append(rc_path) |
|
42 | sys.path.append(rc_path) | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | class Command(BasePasterCommand): |
|
45 | class Command(BasePasterCommand): | |
46 |
|
46 | |||
47 | max_args = 1 |
|
47 | max_args = 1 | |
48 | min_args = 1 |
|
48 | min_args = 1 | |
49 |
|
49 | |||
50 | usage = "CONFIG_FILE" |
|
50 | usage = "CONFIG_FILE" | |
51 | group_name = "RhodeCode" |
|
51 | group_name = "RhodeCode" | |
52 | takes_config_file = -1 |
|
52 | takes_config_file = -1 | |
53 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
53 | parser = BasePasterCommand.standard_parser(verbose=True) | |
54 | summary = "Creates or updates full text search index" |
|
54 | summary = "Creates or updates full text search index" | |
55 |
|
55 | |||
56 | def command(self): |
|
56 | def command(self): | |
57 | logging.config.fileConfig(self.path_to_ini_file) |
|
57 | logging.config.fileConfig(self.path_to_ini_file) | |
58 | #get SqlAlchemy session |
|
58 | #get SqlAlchemy session | |
59 | self._init_session() |
|
59 | self._init_session() | |
60 | from pylons import config |
|
60 | from pylons import config | |
61 | index_location = config['index_dir'] |
|
61 | index_location = config['index_dir'] | |
62 | load_rcextensions(config['here']) |
|
62 | load_rcextensions(config['here']) | |
63 |
|
63 | |||
64 | repo_location = self.options.repo_location \ |
|
64 | repo_location = self.options.repo_location \ | |
65 | if self.options.repo_location else RepoModel().repos_path |
|
65 | if self.options.repo_location else RepoModel().repos_path | |
66 | repo_list = map(strip, self.options.repo_list.split(',')) \ |
|
66 | repo_list = map(strip, self.options.repo_list.split(',')) \ | |
67 | if self.options.repo_list else None |
|
67 | if self.options.repo_list else None | |
68 |
|
68 | |||
69 | repo_update_list = map(strip, self.options.repo_update_list.split(',')) \ |
|
69 | repo_update_list = map(strip, self.options.repo_update_list.split(',')) \ | |
70 | if self.options.repo_update_list else None |
|
70 | if self.options.repo_update_list else None | |
71 |
|
71 | |||
72 | #====================================================================== |
|
72 | #====================================================================== | |
73 | # WHOOSH DAEMON |
|
73 | # WHOOSH DAEMON | |
74 | #====================================================================== |
|
74 | #====================================================================== | |
75 | from rhodecode.lib.pidlock import LockHeld, DaemonLock |
|
75 | from rhodecode.lib.pidlock import LockHeld, DaemonLock | |
76 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
76 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon | |
77 | try: |
|
77 | try: | |
78 | l = DaemonLock(file_=os.path.join(dn(dn(index_location)), |
|
78 | l = DaemonLock(file_=os.path.join(dn(dn(index_location)), | |
79 | 'make_index.lock')) |
|
79 | 'make_index.lock')) | |
80 | WhooshIndexingDaemon(index_location=index_location, |
|
80 | WhooshIndexingDaemon(index_location=index_location, | |
81 | repo_location=repo_location, |
|
81 | repo_location=repo_location, | |
82 | repo_list=repo_list, |
|
82 | repo_list=repo_list, | |
83 | repo_update_list=repo_update_list)\ |
|
83 | repo_update_list=repo_update_list)\ | |
84 | .run(full_index=self.options.full_index) |
|
84 | .run(full_index=self.options.full_index) | |
85 | l.release() |
|
85 | l.release() | |
86 | except LockHeld: |
|
86 | except LockHeld: | |
87 | sys.exit(1) |
|
87 | sys.exit(1) | |
88 |
|
88 | |||
89 | def update_parser(self): |
|
89 | def update_parser(self): | |
90 | self.parser.add_option('--repo-location', |
|
90 | self.parser.add_option('--repo-location', | |
91 | action='store', |
|
91 | action='store', | |
92 | dest='repo_location', |
|
92 | dest='repo_location', | |
93 | help="Specifies repositories location to index OPTIONAL", |
|
93 | help="Specifies repositories location to index OPTIONAL", | |
94 | ) |
|
94 | ) | |
95 | self.parser.add_option('--index-only', |
|
95 | self.parser.add_option('--index-only', | |
96 | action='store', |
|
96 | action='store', | |
97 | dest='repo_list', |
|
97 | dest='repo_list', | |
98 | help="Specifies a comma separated list of repositores " |
|
98 | help="Specifies a comma separated list of repositores " | |
99 | "to build index on. If not given all repositories " |
|
99 | "to build index on. If not given all repositories " | |
100 | "are scanned for indexing. OPTIONAL", |
|
100 | "are scanned for indexing. OPTIONAL", | |
101 | ) |
|
101 | ) | |
102 | self.parser.add_option('--update-only', |
|
102 | self.parser.add_option('--update-only', | |
103 | action='store', |
|
103 | action='store', | |
104 | dest='repo_update_list', |
|
104 | dest='repo_update_list', | |
105 | help="Specifies a comma separated list of repositores " |
|
105 | help="Specifies a comma separated list of repositores " | |
106 | "to re-build index on. OPTIONAL", |
|
106 | "to re-build index on. OPTIONAL", | |
107 | ) |
|
107 | ) | |
108 | self.parser.add_option('-f', |
|
108 | self.parser.add_option('-f', | |
109 | action='store_true', |
|
109 | action='store_true', | |
110 | dest='full_index', |
|
110 | dest='full_index', | |
111 | help="Specifies that index should be made full i.e" |
|
111 | help="Specifies that index should be made full i.e" | |
112 | " destroy old and build from scratch", |
|
112 | " destroy old and build from scratch", | |
113 | default=False) |
|
113 | default=False) |
@@ -1,83 +1,83 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | rhodecode.lib.paster_commands.make_rcextensions |
|
15 | rhodecode.lib.paster_commands.make_rcextensions | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | make-rcext paster command for RhodeCode |
|
18 | make-rcext paster command for RhodeCode | |
19 |
|
19 | |||
20 | :created_on: Mar 6, 2012 |
|
20 | :created_on: Mar 6, 2012 | |
21 | :author: marcink |
|
21 | :author: marcink | |
22 | :copyright: (c) 2013 RhodeCode GmbH. |
|
22 | :copyright: (c) 2013 RhodeCode GmbH. | |
23 | :license: GPLv3, see LICENSE for more details. |
|
23 | :license: GPLv3, see LICENSE for more details. | |
24 |
|
24 | |||
25 | """ |
|
25 | """ | |
26 |
|
26 | |||
27 | from __future__ import with_statement |
|
27 | from __future__ import with_statement | |
28 |
|
28 | |||
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 | import logging |
|
31 | import logging | |
32 | import pkg_resources |
|
32 | import pkg_resources | |
33 |
|
33 | |||
34 | from rhodecode.lib.utils import BasePasterCommand, ask_ok |
|
34 | from rhodecode.lib.utils import BasePasterCommand, ask_ok | |
35 |
|
35 | |||
36 | # fix rhodecode import |
|
36 | # Add location of top level folder to sys.path | |
37 | from os.path import dirname as dn |
|
37 | from os.path import dirname as dn | |
38 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
38 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) | |
39 | sys.path.append(rc_path) |
|
39 | sys.path.append(rc_path) | |
40 |
|
40 | |||
41 | log = logging.getLogger(__name__) |
|
41 | log = logging.getLogger(__name__) | |
42 |
|
42 | |||
43 |
|
43 | |||
44 | class Command(BasePasterCommand): |
|
44 | class Command(BasePasterCommand): | |
45 |
|
45 | |||
46 | max_args = 1 |
|
46 | max_args = 1 | |
47 | min_args = 1 |
|
47 | min_args = 1 | |
48 |
|
48 | |||
49 | usage = "CONFIG_FILE" |
|
49 | usage = "CONFIG_FILE" | |
50 | group_name = "RhodeCode" |
|
50 | group_name = "RhodeCode" | |
51 | takes_config_file = -1 |
|
51 | takes_config_file = -1 | |
52 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
52 | parser = BasePasterCommand.standard_parser(verbose=True) | |
53 | summary = "Creates additional extensions for rhodecode" |
|
53 | summary = "Creates additional extensions for rhodecode" | |
54 |
|
54 | |||
55 | def command(self): |
|
55 | def command(self): | |
56 | logging.config.fileConfig(self.path_to_ini_file) |
|
56 | logging.config.fileConfig(self.path_to_ini_file) | |
57 | from pylons import config |
|
57 | from pylons import config | |
58 |
|
58 | |||
59 | def _make_file(ext_file, tmpl): |
|
59 | def _make_file(ext_file, tmpl): | |
60 | bdir = os.path.split(ext_file)[0] |
|
60 | bdir = os.path.split(ext_file)[0] | |
61 | if not os.path.isdir(bdir): |
|
61 | if not os.path.isdir(bdir): | |
62 | os.makedirs(bdir) |
|
62 | os.makedirs(bdir) | |
63 | with open(ext_file, 'wb') as f: |
|
63 | with open(ext_file, 'wb') as f: | |
64 | f.write(tmpl) |
|
64 | f.write(tmpl) | |
65 | log.info('Writen new extensions file to %s' % ext_file) |
|
65 | log.info('Writen new extensions file to %s' % ext_file) | |
66 |
|
66 | |||
67 | here = config['here'] |
|
67 | here = config['here'] | |
68 | tmpl = pkg_resources.resource_string( |
|
68 | tmpl = pkg_resources.resource_string( | |
69 | 'rhodecode', os.path.join('config', 'rcextensions', '__init__.py') |
|
69 | 'rhodecode', os.path.join('config', 'rcextensions', '__init__.py') | |
70 | ) |
|
70 | ) | |
71 | ext_file = os.path.join(here, 'rcextensions', '__init__.py') |
|
71 | ext_file = os.path.join(here, 'rcextensions', '__init__.py') | |
72 | if os.path.exists(ext_file): |
|
72 | if os.path.exists(ext_file): | |
73 | msg = ('Extension file already exists, do you want ' |
|
73 | msg = ('Extension file already exists, do you want ' | |
74 | 'to overwrite it ? [y/n]') |
|
74 | 'to overwrite it ? [y/n]') | |
75 | if ask_ok(msg): |
|
75 | if ask_ok(msg): | |
76 | _make_file(ext_file, tmpl) |
|
76 | _make_file(ext_file, tmpl) | |
77 | else: |
|
77 | else: | |
78 | log.info('nothing done...') |
|
78 | log.info('nothing done...') | |
79 | else: |
|
79 | else: | |
80 | _make_file(ext_file, tmpl) |
|
80 | _make_file(ext_file, tmpl) | |
81 |
|
81 | |||
82 | def update_parser(self): |
|
82 | def update_parser(self): | |
83 | pass |
|
83 | pass |
@@ -1,71 +1,71 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | rhodecode.lib.paster_commands.make_rcextensions |
|
15 | rhodecode.lib.paster_commands.make_rcextensions | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | repo-scan paster command for RhodeCode |
|
18 | repo-scan paster command for RhodeCode | |
19 |
|
19 | |||
20 | :created_on: Feb 9, 2013 |
|
20 | :created_on: Feb 9, 2013 | |
21 | :author: marcink |
|
21 | :author: marcink | |
22 | :copyright: (c) 2013 RhodeCode GmbH. |
|
22 | :copyright: (c) 2013 RhodeCode GmbH. | |
23 | :license: GPLv3, see LICENSE for more details. |
|
23 | :license: GPLv3, see LICENSE for more details. | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | from __future__ import with_statement |
|
26 | from __future__ import with_statement | |
27 |
|
27 | |||
28 | import os |
|
28 | import os | |
29 | import sys |
|
29 | import sys | |
30 | import logging |
|
30 | import logging | |
31 |
|
31 | |||
32 | from rhodecode.model.scm import ScmModel |
|
32 | from rhodecode.model.scm import ScmModel | |
33 | from rhodecode.lib.utils import BasePasterCommand, repo2db_mapper |
|
33 | from rhodecode.lib.utils import BasePasterCommand, repo2db_mapper | |
34 |
|
34 | |||
35 | # fix rhodecode import |
|
35 | # Add location of top level folder to sys.path | |
36 | from os.path import dirname as dn |
|
36 | from os.path import dirname as dn | |
37 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
37 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) | |
38 | sys.path.append(rc_path) |
|
38 | sys.path.append(rc_path) | |
39 |
|
39 | |||
40 | log = logging.getLogger(__name__) |
|
40 | log = logging.getLogger(__name__) | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | class Command(BasePasterCommand): |
|
43 | class Command(BasePasterCommand): | |
44 |
|
44 | |||
45 | max_args = 1 |
|
45 | max_args = 1 | |
46 | min_args = 1 |
|
46 | min_args = 1 | |
47 |
|
47 | |||
48 | usage = "CONFIG_FILE" |
|
48 | usage = "CONFIG_FILE" | |
49 | group_name = "RhodeCode" |
|
49 | group_name = "RhodeCode" | |
50 | takes_config_file = -1 |
|
50 | takes_config_file = -1 | |
51 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
51 | parser = BasePasterCommand.standard_parser(verbose=True) | |
52 | summary = "Rescan default location for new repositories" |
|
52 | summary = "Rescan default location for new repositories" | |
53 |
|
53 | |||
54 | def command(self): |
|
54 | def command(self): | |
55 | #get SqlAlchemy session |
|
55 | #get SqlAlchemy session | |
56 | self._init_session() |
|
56 | self._init_session() | |
57 | rm_obsolete = self.options.delete_obsolete |
|
57 | rm_obsolete = self.options.delete_obsolete | |
58 | log.info('Now scanning root location for new repos...') |
|
58 | log.info('Now scanning root location for new repos...') | |
59 | added, removed = repo2db_mapper(ScmModel().repo_scan(), |
|
59 | added, removed = repo2db_mapper(ScmModel().repo_scan(), | |
60 | remove_obsolete=rm_obsolete) |
|
60 | remove_obsolete=rm_obsolete) | |
61 | added = ', '.join(added) or '-' |
|
61 | added = ', '.join(added) or '-' | |
62 | removed = ', '.join(removed) or '-' |
|
62 | removed = ', '.join(removed) or '-' | |
63 | log.info('Scan completed added: %s removed: %s' % (added, removed)) |
|
63 | log.info('Scan completed added: %s removed: %s' % (added, removed)) | |
64 |
|
64 | |||
65 | def update_parser(self): |
|
65 | def update_parser(self): | |
66 | self.parser.add_option( |
|
66 | self.parser.add_option( | |
67 | '--delete-obsolete', |
|
67 | '--delete-obsolete', | |
68 | action='store_true', |
|
68 | action='store_true', | |
69 | help="Use this flag do delete repositories that are " |
|
69 | help="Use this flag do delete repositories that are " | |
70 | "present in RhodeCode database but not on the filesystem", |
|
70 | "present in RhodeCode database but not on the filesystem", | |
71 | ) |
|
71 | ) |
@@ -1,110 +1,110 b'' | |||||
1 | import os |
|
1 | import os | |
2 | import sys |
|
2 | import sys | |
3 | from paste.script.appinstall import AbstractInstallCommand |
|
3 | from paste.script.appinstall import AbstractInstallCommand | |
4 | from paste.script.command import BadCommand |
|
4 | from paste.script.command import BadCommand | |
5 | from paste.deploy import appconfig |
|
5 | from paste.deploy import appconfig | |
6 |
|
6 | |||
7 | # fix rhodecode import |
|
7 | # Add location of top level folder to sys.path | |
8 | from os.path import dirname as dn |
|
8 | from os.path import dirname as dn | |
9 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
9 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) | |
10 | sys.path.append(rc_path) |
|
10 | sys.path.append(rc_path) | |
11 |
|
11 | |||
12 |
|
12 | |||
13 | class Command(AbstractInstallCommand): |
|
13 | class Command(AbstractInstallCommand): | |
14 |
|
14 | |||
15 | default_verbosity = 1 |
|
15 | default_verbosity = 1 | |
16 | max_args = 1 |
|
16 | max_args = 1 | |
17 | min_args = 1 |
|
17 | min_args = 1 | |
18 | summary = "Setup an application, given a config file" |
|
18 | summary = "Setup an application, given a config file" | |
19 | usage = "CONFIG_FILE" |
|
19 | usage = "CONFIG_FILE" | |
20 | group_name = "RhodeCode" |
|
20 | group_name = "RhodeCode" | |
21 |
|
21 | |||
22 | description = """\ |
|
22 | description = """\ | |
23 |
|
23 | |||
24 | Setup RhodeCode according to its configuration file. This is |
|
24 | Setup RhodeCode according to its configuration file. This is | |
25 | the second part of a two-phase web application installation |
|
25 | the second part of a two-phase web application installation | |
26 | process (the first phase is prepare-app). The setup process |
|
26 | process (the first phase is prepare-app). The setup process | |
27 | consist of things like setting up databases, creating super user |
|
27 | consist of things like setting up databases, creating super user | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | parser = AbstractInstallCommand.standard_parser( |
|
30 | parser = AbstractInstallCommand.standard_parser( | |
31 | simulate=True, quiet=True, interactive=True) |
|
31 | simulate=True, quiet=True, interactive=True) | |
32 | parser.add_option('--user', |
|
32 | parser.add_option('--user', | |
33 | action='store', |
|
33 | action='store', | |
34 | dest='username', |
|
34 | dest='username', | |
35 | default=None, |
|
35 | default=None, | |
36 | help='Admin Username') |
|
36 | help='Admin Username') | |
37 | parser.add_option('--email', |
|
37 | parser.add_option('--email', | |
38 | action='store', |
|
38 | action='store', | |
39 | dest='email', |
|
39 | dest='email', | |
40 | default=None, |
|
40 | default=None, | |
41 | help='Admin Email') |
|
41 | help='Admin Email') | |
42 | parser.add_option('--password', |
|
42 | parser.add_option('--password', | |
43 | action='store', |
|
43 | action='store', | |
44 | dest='password', |
|
44 | dest='password', | |
45 | default=None, |
|
45 | default=None, | |
46 | help='Admin password min 6 chars') |
|
46 | help='Admin password min 6 chars') | |
47 | parser.add_option('--repos', |
|
47 | parser.add_option('--repos', | |
48 | action='store', |
|
48 | action='store', | |
49 | dest='repos_location', |
|
49 | dest='repos_location', | |
50 | default=None, |
|
50 | default=None, | |
51 | help='Absolute path to repositories location') |
|
51 | help='Absolute path to repositories location') | |
52 | parser.add_option('--name', |
|
52 | parser.add_option('--name', | |
53 | action='store', |
|
53 | action='store', | |
54 | dest='section_name', |
|
54 | dest='section_name', | |
55 | default=None, |
|
55 | default=None, | |
56 | help='The name of the section to set up (default: app:main)') |
|
56 | help='The name of the section to set up (default: app:main)') | |
57 | parser.add_option('--force-yes', |
|
57 | parser.add_option('--force-yes', | |
58 | action='store_true', |
|
58 | action='store_true', | |
59 | dest='force_ask', |
|
59 | dest='force_ask', | |
60 | default=None, |
|
60 | default=None, | |
61 | help='Force yes to every question') |
|
61 | help='Force yes to every question') | |
62 | parser.add_option('--force-no', |
|
62 | parser.add_option('--force-no', | |
63 | action='store_false', |
|
63 | action='store_false', | |
64 | dest='force_ask', |
|
64 | dest='force_ask', | |
65 | default=None, |
|
65 | default=None, | |
66 | help='Force no to every question') |
|
66 | help='Force no to every question') | |
67 | parser.add_option('--public-access', |
|
67 | parser.add_option('--public-access', | |
68 | action='store_true', |
|
68 | action='store_true', | |
69 | dest='public_access', |
|
69 | dest='public_access', | |
70 | default=None, |
|
70 | default=None, | |
71 | help='Enable public access on this installation (default)') |
|
71 | help='Enable public access on this installation (default)') | |
72 | parser.add_option('--no-public-access', |
|
72 | parser.add_option('--no-public-access', | |
73 | action='store_false', |
|
73 | action='store_false', | |
74 | dest='public_access', |
|
74 | dest='public_access', | |
75 | default=None, |
|
75 | default=None, | |
76 | help='Disable public access on this installation ') |
|
76 | help='Disable public access on this installation ') | |
77 | def command(self): |
|
77 | def command(self): | |
78 | config_spec = self.args[0] |
|
78 | config_spec = self.args[0] | |
79 | section = self.options.section_name |
|
79 | section = self.options.section_name | |
80 | if section is None: |
|
80 | if section is None: | |
81 | if '#' in config_spec: |
|
81 | if '#' in config_spec: | |
82 | config_spec, section = config_spec.split('#', 1) |
|
82 | config_spec, section = config_spec.split('#', 1) | |
83 | else: |
|
83 | else: | |
84 | section = 'main' |
|
84 | section = 'main' | |
85 | if not ':' in section: |
|
85 | if not ':' in section: | |
86 | plain_section = section |
|
86 | plain_section = section | |
87 | section = 'app:' + section |
|
87 | section = 'app:' + section | |
88 | else: |
|
88 | else: | |
89 | plain_section = section.split(':', 1)[0] |
|
89 | plain_section = section.split(':', 1)[0] | |
90 | if not config_spec.startswith('config:'): |
|
90 | if not config_spec.startswith('config:'): | |
91 | config_spec = 'config:' + config_spec |
|
91 | config_spec = 'config:' + config_spec | |
92 | if plain_section != 'main': |
|
92 | if plain_section != 'main': | |
93 | config_spec += '#' + plain_section |
|
93 | config_spec += '#' + plain_section | |
94 | config_file = config_spec[len('config:'):].split('#', 1)[0] |
|
94 | config_file = config_spec[len('config:'):].split('#', 1)[0] | |
95 | config_file = os.path.join(os.getcwd(), config_file) |
|
95 | config_file = os.path.join(os.getcwd(), config_file) | |
96 | self.logging_file_config(config_file) |
|
96 | self.logging_file_config(config_file) | |
97 | conf = appconfig(config_spec, relative_to=os.getcwd()) |
|
97 | conf = appconfig(config_spec, relative_to=os.getcwd()) | |
98 | ep_name = conf.context.entry_point_name |
|
98 | ep_name = conf.context.entry_point_name | |
99 | ep_group = conf.context.protocol |
|
99 | ep_group = conf.context.protocol | |
100 | dist = conf.context.distribution |
|
100 | dist = conf.context.distribution | |
101 | if dist is None: |
|
101 | if dist is None: | |
102 | raise BadCommand( |
|
102 | raise BadCommand( | |
103 | "The section %r is not the application (probably a filter). " |
|
103 | "The section %r is not the application (probably a filter). " | |
104 | "You should add #section_name, where section_name is the " |
|
104 | "You should add #section_name, where section_name is the " | |
105 | "section that configures your application" % plain_section) |
|
105 | "section that configures your application" % plain_section) | |
106 | installer = self.get_installer(dist, ep_group, ep_name) |
|
106 | installer = self.get_installer(dist, ep_group, ep_name) | |
107 | installer.setup_config( |
|
107 | installer.setup_config( | |
108 | self, config_file, section, self.sysconfig_install_vars(installer)) |
|
108 | self, config_file, section, self.sysconfig_install_vars(installer)) | |
109 | self.call_sysconfig_functions( |
|
109 | self.call_sysconfig_functions( | |
110 | 'post_setup_hook', installer, config_file) |
|
110 | 'post_setup_hook', installer, config_file) |
@@ -1,88 +1,88 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | rhodecode.lib.paster_commands.make_rcextensions |
|
15 | rhodecode.lib.paster_commands.make_rcextensions | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | uodate-repoinfo paster command for RhodeCode |
|
18 | uodate-repoinfo paster command for RhodeCode | |
19 |
|
19 | |||
20 | :created_on: Jul 14, 2012 |
|
20 | :created_on: Jul 14, 2012 | |
21 | :author: marcink |
|
21 | :author: marcink | |
22 | :copyright: (c) 2013 RhodeCode GmbH. |
|
22 | :copyright: (c) 2013 RhodeCode GmbH. | |
23 | :license: GPLv3, see LICENSE for more details. |
|
23 | :license: GPLv3, see LICENSE for more details. | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | from __future__ import with_statement |
|
26 | from __future__ import with_statement | |
27 |
|
27 | |||
28 | import os |
|
28 | import os | |
29 | import sys |
|
29 | import sys | |
30 | import logging |
|
30 | import logging | |
31 | import string |
|
31 | import string | |
32 |
|
32 | |||
33 | from rhodecode.lib.utils import BasePasterCommand |
|
33 | from rhodecode.lib.utils import BasePasterCommand | |
34 | from rhodecode.model.db import Repository |
|
34 | from rhodecode.model.db import Repository | |
35 | from rhodecode.model.repo import RepoModel |
|
35 | from rhodecode.model.repo import RepoModel | |
36 | from rhodecode.model.meta import Session |
|
36 | from rhodecode.model.meta import Session | |
37 |
|
37 | |||
38 | # fix rhodecode import |
|
38 | # Add location of top level folder to sys.path | |
39 | from os.path import dirname as dn |
|
39 | from os.path import dirname as dn | |
40 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) |
|
40 | rc_path = dn(dn(dn(os.path.realpath(__file__)))) | |
41 | sys.path.append(rc_path) |
|
41 | sys.path.append(rc_path) | |
42 |
|
42 | |||
43 | log = logging.getLogger(__name__) |
|
43 | log = logging.getLogger(__name__) | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | class Command(BasePasterCommand): |
|
46 | class Command(BasePasterCommand): | |
47 |
|
47 | |||
48 | max_args = 1 |
|
48 | max_args = 1 | |
49 | min_args = 1 |
|
49 | min_args = 1 | |
50 |
|
50 | |||
51 | usage = "CONFIG_FILE" |
|
51 | usage = "CONFIG_FILE" | |
52 | group_name = "RhodeCode" |
|
52 | group_name = "RhodeCode" | |
53 | takes_config_file = -1 |
|
53 | takes_config_file = -1 | |
54 | parser = BasePasterCommand.standard_parser(verbose=True) |
|
54 | parser = BasePasterCommand.standard_parser(verbose=True) | |
55 | summary = "Updates repositories caches for last changeset" |
|
55 | summary = "Updates repositories caches for last changeset" | |
56 |
|
56 | |||
57 | def command(self): |
|
57 | def command(self): | |
58 | #get SqlAlchemy session |
|
58 | #get SqlAlchemy session | |
59 | self._init_session() |
|
59 | self._init_session() | |
60 |
|
60 | |||
61 | repo_update_list = map(string.strip, |
|
61 | repo_update_list = map(string.strip, | |
62 | self.options.repo_update_list.split(',')) \ |
|
62 | self.options.repo_update_list.split(',')) \ | |
63 | if self.options.repo_update_list else None |
|
63 | if self.options.repo_update_list else None | |
64 |
|
64 | |||
65 | if repo_update_list: |
|
65 | if repo_update_list: | |
66 | repo_list = Repository.query()\ |
|
66 | repo_list = Repository.query()\ | |
67 | .filter(Repository.repo_name.in_(repo_update_list)) |
|
67 | .filter(Repository.repo_name.in_(repo_update_list)) | |
68 | else: |
|
68 | else: | |
69 | repo_list = Repository.getAll() |
|
69 | repo_list = Repository.getAll() | |
70 | RepoModel.update_repoinfo(repositories=repo_list) |
|
70 | RepoModel.update_repoinfo(repositories=repo_list) | |
71 | Session().commit() |
|
71 | Session().commit() | |
72 |
|
72 | |||
73 | if self.options.invalidate_cache: |
|
73 | if self.options.invalidate_cache: | |
74 | for r in repo_list: |
|
74 | for r in repo_list: | |
75 | r.set_invalidate() |
|
75 | r.set_invalidate() | |
76 | log.info('Updated cache for %s repositories' % (len(repo_list))) |
|
76 | log.info('Updated cache for %s repositories' % (len(repo_list))) | |
77 |
|
77 | |||
78 | def update_parser(self): |
|
78 | def update_parser(self): | |
79 | self.parser.add_option('--update-only', |
|
79 | self.parser.add_option('--update-only', | |
80 | action='store', |
|
80 | action='store', | |
81 | dest='repo_update_list', |
|
81 | dest='repo_update_list', | |
82 | help="Specifies a comma separated list of repositores " |
|
82 | help="Specifies a comma separated list of repositores " | |
83 | "to update last commit info for. OPTIONAL") |
|
83 | "to update last commit info for. OPTIONAL") | |
84 | self.parser.add_option('--invalidate-cache', |
|
84 | self.parser.add_option('--invalidate-cache', | |
85 | action='store_true', |
|
85 | action='store_true', | |
86 | dest='invalidate_cache', |
|
86 | dest='invalidate_cache', | |
87 | help="Trigger cache invalidation event for repos. " |
|
87 | help="Trigger cache invalidation event for repos. " | |
88 | "OPTIONAL") |
|
88 | "OPTIONAL") |
General Comments 0
You need to be logged in to leave comments.
Login now