##// END OF EJS Templates
search: goto commit search will now use a safe search option and never...
marcink -
r1411:16beb154 default
parent child Browse files
Show More
@@ -1,288 +1,290 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Home controller for RhodeCode Enterprise
22 Home controller for RhodeCode Enterprise
23 """
23 """
24
24
25 import logging
25 import logging
26 import time
26 import time
27 import re
27 import re
28
28
29 from pylons import tmpl_context as c, request, url, config
29 from pylons import tmpl_context as c, request, url, config
30 from pylons.i18n.translation import _
30 from pylons.i18n.translation import _
31 from sqlalchemy.sql import func
31 from sqlalchemy.sql import func
32
32
33 from rhodecode.lib.auth import (
33 from rhodecode.lib.auth import (
34 LoginRequired, HasPermissionAllDecorator, AuthUser,
34 LoginRequired, HasPermissionAllDecorator, AuthUser,
35 HasRepoGroupPermissionAnyDecorator, XHRRequired)
35 HasRepoGroupPermissionAnyDecorator, XHRRequired)
36 from rhodecode.lib.base import BaseController, render
36 from rhodecode.lib.base import BaseController, render
37 from rhodecode.lib.index import searcher_from_config
37 from rhodecode.lib.index import searcher_from_config
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.utils import jsonify
39 from rhodecode.lib.utils import jsonify
40 from rhodecode.lib.utils2 import safe_unicode, str2bool
40 from rhodecode.lib.utils2 import safe_unicode, str2bool
41 from rhodecode.model.db import Repository, RepoGroup
41 from rhodecode.model.db import Repository, RepoGroup
42 from rhodecode.model.repo import RepoModel
42 from rhodecode.model.repo import RepoModel
43 from rhodecode.model.repo_group import RepoGroupModel
43 from rhodecode.model.repo_group import RepoGroupModel
44 from rhodecode.model.scm import RepoList, RepoGroupList
44 from rhodecode.model.scm import RepoList, RepoGroupList
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class HomeController(BaseController):
50 class HomeController(BaseController):
51 def __before__(self):
51 def __before__(self):
52 super(HomeController, self).__before__()
52 super(HomeController, self).__before__()
53
53
54 def ping(self):
54 def ping(self):
55 """
55 """
56 Ping, doesn't require login, good for checking out the platform
56 Ping, doesn't require login, good for checking out the platform
57 """
57 """
58 instance_id = getattr(c, 'rhodecode_instanceid', '')
58 instance_id = getattr(c, 'rhodecode_instanceid', '')
59 return 'pong[%s] => %s' % (instance_id, self.ip_addr,)
59 return 'pong[%s] => %s' % (instance_id, self.ip_addr,)
60
60
61 @LoginRequired()
61 @LoginRequired()
62 @HasPermissionAllDecorator('hg.admin')
62 @HasPermissionAllDecorator('hg.admin')
63 def error_test(self):
63 def error_test(self):
64 """
64 """
65 Test exception handling and emails on errors
65 Test exception handling and emails on errors
66 """
66 """
67 class TestException(Exception):
67 class TestException(Exception):
68 pass
68 pass
69
69
70 msg = ('RhodeCode Enterprise %s test exception. Generation time: %s'
70 msg = ('RhodeCode Enterprise %s test exception. Generation time: %s'
71 % (c.rhodecode_name, time.time()))
71 % (c.rhodecode_name, time.time()))
72 raise TestException(msg)
72 raise TestException(msg)
73
73
74 def _get_groups_and_repos(self, repo_group_id=None):
74 def _get_groups_and_repos(self, repo_group_id=None):
75 # repo groups groups
75 # repo groups groups
76 repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id)
76 repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id)
77 _perms = ['group.read', 'group.write', 'group.admin']
77 _perms = ['group.read', 'group.write', 'group.admin']
78 repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms)
78 repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms)
79 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
79 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
80 repo_group_list=repo_group_list_acl, admin=False)
80 repo_group_list=repo_group_list_acl, admin=False)
81
81
82 # repositories
82 # repositories
83 repo_list = Repository.get_all_repos(group_id=repo_group_id)
83 repo_list = Repository.get_all_repos(group_id=repo_group_id)
84 _perms = ['repository.read', 'repository.write', 'repository.admin']
84 _perms = ['repository.read', 'repository.write', 'repository.admin']
85 repo_list_acl = RepoList(repo_list, perm_set=_perms)
85 repo_list_acl = RepoList(repo_list, perm_set=_perms)
86 repo_data = RepoModel().get_repos_as_dict(
86 repo_data = RepoModel().get_repos_as_dict(
87 repo_list=repo_list_acl, admin=False)
87 repo_list=repo_list_acl, admin=False)
88
88
89 return repo_data, repo_group_data
89 return repo_data, repo_group_data
90
90
91 @LoginRequired()
91 @LoginRequired()
92 def index(self):
92 def index(self):
93 c.repo_group = None
93 c.repo_group = None
94
94
95 repo_data, repo_group_data = self._get_groups_and_repos()
95 repo_data, repo_group_data = self._get_groups_and_repos()
96 # json used to render the grids
96 # json used to render the grids
97 c.repos_data = json.dumps(repo_data)
97 c.repos_data = json.dumps(repo_data)
98 c.repo_groups_data = json.dumps(repo_group_data)
98 c.repo_groups_data = json.dumps(repo_group_data)
99
99
100 return render('/index.mako')
100 return render('/index.mako')
101
101
102 @LoginRequired()
102 @LoginRequired()
103 @HasRepoGroupPermissionAnyDecorator('group.read', 'group.write',
103 @HasRepoGroupPermissionAnyDecorator('group.read', 'group.write',
104 'group.admin')
104 'group.admin')
105 def index_repo_group(self, group_name):
105 def index_repo_group(self, group_name):
106 """GET /repo_group_name: Show a specific item"""
106 """GET /repo_group_name: Show a specific item"""
107 c.repo_group = RepoGroupModel()._get_repo_group(group_name)
107 c.repo_group = RepoGroupModel()._get_repo_group(group_name)
108 repo_data, repo_group_data = self._get_groups_and_repos(
108 repo_data, repo_group_data = self._get_groups_and_repos(
109 c.repo_group.group_id)
109 c.repo_group.group_id)
110
110
111 # json used to render the grids
111 # json used to render the grids
112 c.repos_data = json.dumps(repo_data)
112 c.repos_data = json.dumps(repo_data)
113 c.repo_groups_data = json.dumps(repo_group_data)
113 c.repo_groups_data = json.dumps(repo_group_data)
114
114
115 return render('index_repo_group.mako')
115 return render('index_repo_group.mako')
116
116
117 def _get_repo_list(self, name_contains=None, repo_type=None, limit=20):
117 def _get_repo_list(self, name_contains=None, repo_type=None, limit=20):
118 query = Repository.query()\
118 query = Repository.query()\
119 .order_by(func.length(Repository.repo_name))\
119 .order_by(func.length(Repository.repo_name))\
120 .order_by(Repository.repo_name)
120 .order_by(Repository.repo_name)
121
121
122 if repo_type:
122 if repo_type:
123 query = query.filter(Repository.repo_type == repo_type)
123 query = query.filter(Repository.repo_type == repo_type)
124
124
125 if name_contains:
125 if name_contains:
126 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
126 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
127 query = query.filter(
127 query = query.filter(
128 Repository.repo_name.ilike(ilike_expression))
128 Repository.repo_name.ilike(ilike_expression))
129 query = query.limit(limit)
129 query = query.limit(limit)
130
130
131 all_repos = query.all()
131 all_repos = query.all()
132 repo_iter = self.scm_model.get_repos(all_repos)
132 repo_iter = self.scm_model.get_repos(all_repos)
133 return [
133 return [
134 {
134 {
135 'id': obj['name'],
135 'id': obj['name'],
136 'text': obj['name'],
136 'text': obj['name'],
137 'type': 'repo',
137 'type': 'repo',
138 'obj': obj['dbrepo'],
138 'obj': obj['dbrepo'],
139 'url': url('summary_home', repo_name=obj['name'])
139 'url': url('summary_home', repo_name=obj['name'])
140 }
140 }
141 for obj in repo_iter]
141 for obj in repo_iter]
142
142
143 def _get_repo_group_list(self, name_contains=None, limit=20):
143 def _get_repo_group_list(self, name_contains=None, limit=20):
144 query = RepoGroup.query()\
144 query = RepoGroup.query()\
145 .order_by(func.length(RepoGroup.group_name))\
145 .order_by(func.length(RepoGroup.group_name))\
146 .order_by(RepoGroup.group_name)
146 .order_by(RepoGroup.group_name)
147
147
148 if name_contains:
148 if name_contains:
149 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
149 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
150 query = query.filter(
150 query = query.filter(
151 RepoGroup.group_name.ilike(ilike_expression))
151 RepoGroup.group_name.ilike(ilike_expression))
152 query = query.limit(limit)
152 query = query.limit(limit)
153
153
154 all_groups = query.all()
154 all_groups = query.all()
155 repo_groups_iter = self.scm_model.get_repo_groups(all_groups)
155 repo_groups_iter = self.scm_model.get_repo_groups(all_groups)
156 return [
156 return [
157 {
157 {
158 'id': obj.group_name,
158 'id': obj.group_name,
159 'text': obj.group_name,
159 'text': obj.group_name,
160 'type': 'group',
160 'type': 'group',
161 'obj': {},
161 'obj': {},
162 'url': url('repo_group_home', group_name=obj.group_name)
162 'url': url('repo_group_home', group_name=obj.group_name)
163 }
163 }
164 for obj in repo_groups_iter]
164 for obj in repo_groups_iter]
165
165
166 def _get_hash_commit_list(self, hash_starts_with=None, limit=20):
166 def _get_hash_commit_list(self, hash_starts_with=None, limit=20):
167 if not hash_starts_with or len(hash_starts_with) < 3:
167 if not hash_starts_with or len(hash_starts_with) < 3:
168 return []
168 return []
169
169
170 commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with)
170 commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with)
171
171
172 if len(commit_hashes) != 1:
172 if len(commit_hashes) != 1:
173 return []
173 return []
174
174
175 commit_hash_prefix = commit_hashes[0]
175 commit_hash_prefix = commit_hashes[0]
176
176
177 auth_user = AuthUser(
177 auth_user = AuthUser(
178 user_id=c.rhodecode_user.user_id, ip_addr=self.ip_addr)
178 user_id=c.rhodecode_user.user_id, ip_addr=self.ip_addr)
179 searcher = searcher_from_config(config)
179 searcher = searcher_from_config(config)
180 result = searcher.search(
180 result = searcher.search(
181 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user)
181 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user,
182 raise_on_exc=False)
182
183
183 return [
184 return [
184 {
185 {
185 'id': entry['commit_id'],
186 'id': entry['commit_id'],
186 'text': entry['commit_id'],
187 'text': entry['commit_id'],
187 'type': 'commit',
188 'type': 'commit',
188 'obj': {'repo': entry['repository']},
189 'obj': {'repo': entry['repository']},
189 'url': url('changeset_home',
190 'url': url('changeset_home',
190 repo_name=entry['repository'], revision=entry['commit_id'])
191 repo_name=entry['repository'],
192 revision=entry['commit_id'])
191 }
193 }
192 for entry in result['results']]
194 for entry in result['results']]
193
195
194 @LoginRequired()
196 @LoginRequired()
195 @XHRRequired()
197 @XHRRequired()
196 @jsonify
198 @jsonify
197 def goto_switcher_data(self):
199 def goto_switcher_data(self):
198 query = request.GET.get('query')
200 query = request.GET.get('query')
199 log.debug('generating goto switcher list, query %s', query)
201 log.debug('generating goto switcher list, query %s', query)
200
202
201 res = []
203 res = []
202 repo_groups = self._get_repo_group_list(query)
204 repo_groups = self._get_repo_group_list(query)
203 if repo_groups:
205 if repo_groups:
204 res.append({
206 res.append({
205 'text': _('Groups'),
207 'text': _('Groups'),
206 'children': repo_groups
208 'children': repo_groups
207 })
209 })
208
210
209 repos = self._get_repo_list(query)
211 repos = self._get_repo_list(query)
210 if repos:
212 if repos:
211 res.append({
213 res.append({
212 'text': _('Repositories'),
214 'text': _('Repositories'),
213 'children': repos
215 'children': repos
214 })
216 })
215
217
216 commits = self._get_hash_commit_list(query)
218 commits = self._get_hash_commit_list(query)
217 if commits:
219 if commits:
218 unique_repos = {}
220 unique_repos = {}
219 for commit in commits:
221 for commit in commits:
220 unique_repos.setdefault(commit['obj']['repo'], []
222 unique_repos.setdefault(commit['obj']['repo'], []
221 ).append(commit)
223 ).append(commit)
222
224
223 for repo in unique_repos:
225 for repo in unique_repos:
224 res.append({
226 res.append({
225 'text': _('Commits in %(repo)s') % {'repo': repo},
227 'text': _('Commits in %(repo)s') % {'repo': repo},
226 'children': unique_repos[repo]
228 'children': unique_repos[repo]
227 })
229 })
228
230
229 data = {
231 data = {
230 'more': False,
232 'more': False,
231 'results': res
233 'results': res
232 }
234 }
233 return data
235 return data
234
236
235 @LoginRequired()
237 @LoginRequired()
236 @XHRRequired()
238 @XHRRequired()
237 @jsonify
239 @jsonify
238 def repo_list_data(self):
240 def repo_list_data(self):
239 query = request.GET.get('query')
241 query = request.GET.get('query')
240 repo_type = request.GET.get('repo_type')
242 repo_type = request.GET.get('repo_type')
241 log.debug('generating repo list, query:%s', query)
243 log.debug('generating repo list, query:%s', query)
242
244
243 res = []
245 res = []
244 repos = self._get_repo_list(query, repo_type=repo_type)
246 repos = self._get_repo_list(query, repo_type=repo_type)
245 if repos:
247 if repos:
246 res.append({
248 res.append({
247 'text': _('Repositories'),
249 'text': _('Repositories'),
248 'children': repos
250 'children': repos
249 })
251 })
250
252
251 data = {
253 data = {
252 'more': False,
254 'more': False,
253 'results': res
255 'results': res
254 }
256 }
255 return data
257 return data
256
258
257 @LoginRequired()
259 @LoginRequired()
258 @XHRRequired()
260 @XHRRequired()
259 @jsonify
261 @jsonify
260 def user_autocomplete_data(self):
262 def user_autocomplete_data(self):
261 query = request.GET.get('query')
263 query = request.GET.get('query')
262 active = str2bool(request.GET.get('active') or True)
264 active = str2bool(request.GET.get('active') or True)
263
265
264 repo_model = RepoModel()
266 repo_model = RepoModel()
265 _users = repo_model.get_users(
267 _users = repo_model.get_users(
266 name_contains=query, only_active=active)
268 name_contains=query, only_active=active)
267
269
268 if request.GET.get('user_groups'):
270 if request.GET.get('user_groups'):
269 # extend with user groups
271 # extend with user groups
270 _user_groups = repo_model.get_user_groups(
272 _user_groups = repo_model.get_user_groups(
271 name_contains=query, only_active=active)
273 name_contains=query, only_active=active)
272 _users = _users + _user_groups
274 _users = _users + _user_groups
273
275
274 return {'suggestions': _users}
276 return {'suggestions': _users}
275
277
276 @LoginRequired()
278 @LoginRequired()
277 @XHRRequired()
279 @XHRRequired()
278 @jsonify
280 @jsonify
279 def user_group_autocomplete_data(self):
281 def user_group_autocomplete_data(self):
280 query = request.GET.get('query')
282 query = request.GET.get('query')
281 active = str2bool(request.GET.get('active') or True)
283 active = str2bool(request.GET.get('active') or True)
282
284
283 repo_model = RepoModel()
285 repo_model = RepoModel()
284 _user_groups = repo_model.get_user_groups(
286 _user_groups = repo_model.get_user_groups(
285 name_contains=query, only_active=active)
287 name_contains=query, only_active=active)
286 _user_groups = _user_groups
288 _user_groups = _user_groups
287
289
288 return {'suggestions': _user_groups}
290 return {'suggestions': _user_groups}
@@ -1,55 +1,57 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Index schema for RhodeCode
22 Index schema for RhodeCode
23 """
23 """
24
24
25 import importlib
25 import importlib
26 import logging
26 import logging
27
27
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30 # leave defaults for backward compat
30 # leave defaults for backward compat
31 default_searcher = 'rhodecode.lib.index.whoosh'
31 default_searcher = 'rhodecode.lib.index.whoosh'
32 default_location = '%(here)s/data/index'
32 default_location = '%(here)s/data/index'
33
33
34
34
35 class BaseSearch(object):
35 class BaseSearch(object):
36 def __init__(self):
36 def __init__(self):
37 pass
37 pass
38
38
39 def cleanup(self):
39 def cleanup(self):
40 pass
40 pass
41
41
42 def search(self, query, document_type, search_user, repo_name=None):
42 def search(self, query, document_type, search_user, repo_name=None,
43 raise_on_exc=True):
43 raise Exception('NotImplemented')
44 raise Exception('NotImplemented')
44
45
46
45 def searcher_from_config(config, prefix='search.'):
47 def searcher_from_config(config, prefix='search.'):
46 _config = {}
48 _config = {}
47 for key in config.keys():
49 for key in config.keys():
48 if key.startswith(prefix):
50 if key.startswith(prefix):
49 _config[key[len(prefix):]] = config[key]
51 _config[key[len(prefix):]] = config[key]
50
52
51 if 'location' not in _config:
53 if 'location' not in _config:
52 _config['location'] = default_location
54 _config['location'] = default_location
53 imported = importlib.import_module(_config.get('module', default_searcher))
55 imported = importlib.import_module(_config.get('module', default_searcher))
54 searcher = imported.Search(config=_config)
56 searcher = imported.Search(config=_config)
55 return searcher
57 return searcher
@@ -1,279 +1,280 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Index schema for RhodeCode
22 Index schema for RhodeCode
23 """
23 """
24
24
25 from __future__ import absolute_import
25 from __future__ import absolute_import
26 import logging
26 import logging
27 import os
27 import os
28 import re
28 import re
29
29
30 from pylons.i18n.translation import _
30 from pylons.i18n.translation import _
31
31
32 from whoosh import query as query_lib, sorting
32 from whoosh import query as query_lib, sorting
33 from whoosh.highlight import HtmlFormatter, ContextFragmenter
33 from whoosh.highlight import HtmlFormatter, ContextFragmenter
34 from whoosh.index import create_in, open_dir, exists_in, EmptyIndexError
34 from whoosh.index import create_in, open_dir, exists_in, EmptyIndexError
35 from whoosh.qparser import QueryParser, QueryParserError
35 from whoosh.qparser import QueryParser, QueryParserError
36
36
37 import rhodecode.lib.helpers as h
37 import rhodecode.lib.helpers as h
38 from rhodecode.lib.index import BaseSearch
38 from rhodecode.lib.index import BaseSearch
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 try:
43 try:
44 # we first try to import from rhodecode tools, fallback to copies if
44 # we first try to import from rhodecode tools, fallback to copies if
45 # we're unable to
45 # we're unable to
46 from rhodecode_tools.lib.fts_index.whoosh_schema import (
46 from rhodecode_tools.lib.fts_index.whoosh_schema import (
47 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
47 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
48 COMMIT_SCHEMA)
48 COMMIT_SCHEMA)
49 except ImportError:
49 except ImportError:
50 log.warning('rhodecode_tools schema not available, doing a fallback '
50 log.warning('rhodecode_tools schema not available, doing a fallback '
51 'import from `rhodecode.lib.index.whoosh_fallback_schema`')
51 'import from `rhodecode.lib.index.whoosh_fallback_schema`')
52 from rhodecode.lib.index.whoosh_fallback_schema import (
52 from rhodecode.lib.index.whoosh_fallback_schema import (
53 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
53 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
54 COMMIT_SCHEMA)
54 COMMIT_SCHEMA)
55
55
56
56
57 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
57 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
58 FRAGMENTER = ContextFragmenter(200)
58 FRAGMENTER = ContextFragmenter(200)
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62
62
63
64 class Search(BaseSearch):
63 class Search(BaseSearch):
65
64
66 name = 'whoosh'
65 name = 'whoosh'
67
66
68 def __init__(self, config):
67 def __init__(self, config):
68 super(Search, self).__init__()
69 self.config = config
69 self.config = config
70 if not os.path.isdir(self.config['location']):
70 if not os.path.isdir(self.config['location']):
71 os.makedirs(self.config['location'])
71 os.makedirs(self.config['location'])
72
72
73 opener = create_in
73 opener = create_in
74 if exists_in(self.config['location'], indexname=FILE_INDEX_NAME):
74 if exists_in(self.config['location'], indexname=FILE_INDEX_NAME):
75 opener = open_dir
75 opener = open_dir
76 file_index = opener(self.config['location'], schema=FILE_SCHEMA,
76 file_index = opener(self.config['location'], schema=FILE_SCHEMA,
77 indexname=FILE_INDEX_NAME)
77 indexname=FILE_INDEX_NAME)
78
78
79 opener = create_in
79 opener = create_in
80 if exists_in(self.config['location'], indexname=COMMIT_INDEX_NAME):
80 if exists_in(self.config['location'], indexname=COMMIT_INDEX_NAME):
81 opener = open_dir
81 opener = open_dir
82 changeset_index = opener(self.config['location'], schema=COMMIT_SCHEMA,
82 changeset_index = opener(self.config['location'], schema=COMMIT_SCHEMA,
83 indexname=COMMIT_INDEX_NAME)
83 indexname=COMMIT_INDEX_NAME)
84
84
85 self.commit_schema = COMMIT_SCHEMA
85 self.commit_schema = COMMIT_SCHEMA
86 self.commit_index = changeset_index
86 self.commit_index = changeset_index
87 self.file_schema = FILE_SCHEMA
87 self.file_schema = FILE_SCHEMA
88 self.file_index = file_index
88 self.file_index = file_index
89 self.searcher = None
89 self.searcher = None
90
90
91 def cleanup(self):
91 def cleanup(self):
92 if self.searcher:
92 if self.searcher:
93 self.searcher.close()
93 self.searcher.close()
94
94
95 def _extend_query(self, query):
95 def _extend_query(self, query):
96 hashes = re.compile('([0-9a-f]{5,40})').findall(query)
96 hashes = re.compile('([0-9a-f]{5,40})').findall(query)
97 if hashes:
97 if hashes:
98 hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes)
98 hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes)
99 query = u'(%s) OR %s' % (query, hashes_or_query)
99 query = u'(%s) OR %s' % (query, hashes_or_query)
100 return query
100 return query
101
101
102 def search(self, query, document_type, search_user, repo_name=None,
102 def search(self, query, document_type, search_user,
103 requested_page=1, page_limit=10, sort=None):
103 repo_name=None, requested_page=1, page_limit=10, sort=None,
104 raise_on_exc=True):
104
105
105 original_query = query
106 original_query = query
106 query = self._extend_query(query)
107 query = self._extend_query(query)
107
108
108 log.debug(u'QUERY: %s on %s', query, document_type)
109 log.debug(u'QUERY: %s on %s', query, document_type)
109 result = {
110 result = {
110 'results': [],
111 'results': [],
111 'count': 0,
112 'count': 0,
112 'error': None,
113 'error': None,
113 'runtime': 0
114 'runtime': 0
114 }
115 }
115 search_type, index_name, schema_defn = self._prepare_for_search(
116 search_type, index_name, schema_defn = self._prepare_for_search(
116 document_type)
117 document_type)
117 self._init_searcher(index_name)
118 self._init_searcher(index_name)
118 try:
119 try:
119 qp = QueryParser(search_type, schema=schema_defn)
120 qp = QueryParser(search_type, schema=schema_defn)
120 allowed_repos_filter = self._get_repo_filter(
121 allowed_repos_filter = self._get_repo_filter(
121 search_user, repo_name)
122 search_user, repo_name)
122 try:
123 try:
123 query = qp.parse(unicode(query))
124 query = qp.parse(unicode(query))
124 log.debug('query: %s (%s)' % (query, repr(query)))
125 log.debug('query: %s (%s)' % (query, repr(query)))
125
126
126 reverse, sortedby = False, None
127 reverse, sortedby = False, None
127 if search_type == 'message':
128 if search_type == 'message':
128 if sort == 'oldfirst':
129 if sort == 'oldfirst':
129 sortedby = 'date'
130 sortedby = 'date'
130 reverse = False
131 reverse = False
131 elif sort == 'newfirst':
132 elif sort == 'newfirst':
132 sortedby = 'date'
133 sortedby = 'date'
133 reverse = True
134 reverse = True
134
135
135 whoosh_results = self.searcher.search(
136 whoosh_results = self.searcher.search(
136 query, filter=allowed_repos_filter, limit=None,
137 query, filter=allowed_repos_filter, limit=None,
137 sortedby=sortedby, reverse=reverse)
138 sortedby=sortedby, reverse=reverse)
138
139
139 # fixes for 32k limit that whoosh uses for highlight
140 # fixes for 32k limit that whoosh uses for highlight
140 whoosh_results.fragmenter.charlimit = None
141 whoosh_results.fragmenter.charlimit = None
141 res_ln = whoosh_results.scored_length()
142 res_ln = whoosh_results.scored_length()
142 result['runtime'] = whoosh_results.runtime
143 result['runtime'] = whoosh_results.runtime
143 result['count'] = res_ln
144 result['count'] = res_ln
144 result['results'] = WhooshResultWrapper(
145 result['results'] = WhooshResultWrapper(
145 search_type, res_ln, whoosh_results)
146 search_type, res_ln, whoosh_results)
146
147
147 except QueryParserError:
148 except QueryParserError:
148 result['error'] = _('Invalid search query. Try quoting it.')
149 result['error'] = _('Invalid search query. Try quoting it.')
149 except (EmptyIndexError, IOError, OSError):
150 except (EmptyIndexError, IOError, OSError):
150 msg = _('There is no index to search in. '
151 msg = _('There is no index to search in. '
151 'Please run whoosh indexer')
152 'Please run whoosh indexer')
152 log.exception(msg)
153 log.exception(msg)
153 result['error'] = msg
154 result['error'] = msg
154 except Exception:
155 except Exception:
155 msg = _('An error occurred during this search operation')
156 msg = _('An error occurred during this search operation')
156 log.exception(msg)
157 log.exception(msg)
157 result['error'] = msg
158 result['error'] = msg
158
159
159 return result
160 return result
160
161
161 def statistics(self):
162 def statistics(self):
162 stats = [
163 stats = [
163 {'key': _('Index Type'), 'value': 'Whoosh'},
164 {'key': _('Index Type'), 'value': 'Whoosh'},
164 {'key': _('File Index'), 'value': str(self.file_index)},
165 {'key': _('File Index'), 'value': str(self.file_index)},
165 {'key': _('Indexed documents'),
166 {'key': _('Indexed documents'),
166 'value': self.file_index.doc_count()},
167 'value': self.file_index.doc_count()},
167 {'key': _('Last update'),
168 {'key': _('Last update'),
168 'value': h.time_to_datetime(self.file_index.last_modified())},
169 'value': h.time_to_datetime(self.file_index.last_modified())},
169 {'key': _('Commit index'), 'value': str(self.commit_index)},
170 {'key': _('Commit index'), 'value': str(self.commit_index)},
170 {'key': _('Indexed documents'),
171 {'key': _('Indexed documents'),
171 'value': str(self.commit_index.doc_count())},
172 'value': str(self.commit_index.doc_count())},
172 {'key': _('Last update'),
173 {'key': _('Last update'),
173 'value': h.time_to_datetime(self.commit_index.last_modified())}
174 'value': h.time_to_datetime(self.commit_index.last_modified())}
174 ]
175 ]
175 return stats
176 return stats
176
177
177 def _get_repo_filter(self, auth_user, repo_name):
178 def _get_repo_filter(self, auth_user, repo_name):
178
179
179 allowed_to_search = [
180 allowed_to_search = [
180 repo for repo, perm in
181 repo for repo, perm in
181 auth_user.permissions['repositories'].items()
182 auth_user.permissions['repositories'].items()
182 if perm != 'repository.none']
183 if perm != 'repository.none']
183
184
184 if repo_name:
185 if repo_name:
185 repo_filter = [query_lib.Term('repository', repo_name)]
186 repo_filter = [query_lib.Term('repository', repo_name)]
186
187
187 elif 'hg.admin' in auth_user.permissions.get('global', []):
188 elif 'hg.admin' in auth_user.permissions.get('global', []):
188 return None
189 return None
189
190
190 else:
191 else:
191 repo_filter = [query_lib.Term('repository', _rn)
192 repo_filter = [query_lib.Term('repository', _rn)
192 for _rn in allowed_to_search]
193 for _rn in allowed_to_search]
193 # in case we're not allowed to search anywhere, it's a trick
194 # in case we're not allowed to search anywhere, it's a trick
194 # to tell whoosh we're filtering, on ALL results
195 # to tell whoosh we're filtering, on ALL results
195 repo_filter = repo_filter or [query_lib.Term('repository', '')]
196 repo_filter = repo_filter or [query_lib.Term('repository', '')]
196
197
197 return query_lib.Or(repo_filter)
198 return query_lib.Or(repo_filter)
198
199
199 def _prepare_for_search(self, cur_type):
200 def _prepare_for_search(self, cur_type):
200 search_type = {
201 search_type = {
201 'content': 'content',
202 'content': 'content',
202 'commit': 'message',
203 'commit': 'message',
203 'path': 'path',
204 'path': 'path',
204 'repository': 'repository'
205 'repository': 'repository'
205 }.get(cur_type, 'content')
206 }.get(cur_type, 'content')
206
207
207 index_name = {
208 index_name = {
208 'content': FILE_INDEX_NAME,
209 'content': FILE_INDEX_NAME,
209 'commit': COMMIT_INDEX_NAME,
210 'commit': COMMIT_INDEX_NAME,
210 'path': FILE_INDEX_NAME
211 'path': FILE_INDEX_NAME
211 }.get(cur_type, FILE_INDEX_NAME)
212 }.get(cur_type, FILE_INDEX_NAME)
212
213
213 schema_defn = {
214 schema_defn = {
214 'content': self.file_schema,
215 'content': self.file_schema,
215 'commit': self.commit_schema,
216 'commit': self.commit_schema,
216 'path': self.file_schema
217 'path': self.file_schema
217 }.get(cur_type, self.file_schema)
218 }.get(cur_type, self.file_schema)
218
219
219 log.debug('IDX: %s' % index_name)
220 log.debug('IDX: %s' % index_name)
220 log.debug('SCHEMA: %s' % schema_defn)
221 log.debug('SCHEMA: %s' % schema_defn)
221 return search_type, index_name, schema_defn
222 return search_type, index_name, schema_defn
222
223
223 def _init_searcher(self, index_name):
224 def _init_searcher(self, index_name):
224 idx = open_dir(self.config['location'], indexname=index_name)
225 idx = open_dir(self.config['location'], indexname=index_name)
225 self.searcher = idx.searcher()
226 self.searcher = idx.searcher()
226 return self.searcher
227 return self.searcher
227
228
228
229
229 class WhooshResultWrapper(object):
230 class WhooshResultWrapper(object):
230 def __init__(self, search_type, total_hits, results):
231 def __init__(self, search_type, total_hits, results):
231 self.search_type = search_type
232 self.search_type = search_type
232 self.results = results
233 self.results = results
233 self.total_hits = total_hits
234 self.total_hits = total_hits
234
235
235 def __str__(self):
236 def __str__(self):
236 return '<%s at %s>' % (self.__class__.__name__, len(self))
237 return '<%s at %s>' % (self.__class__.__name__, len(self))
237
238
238 def __repr__(self):
239 def __repr__(self):
239 return self.__str__()
240 return self.__str__()
240
241
241 def __len__(self):
242 def __len__(self):
242 return self.total_hits
243 return self.total_hits
243
244
244 def __iter__(self):
245 def __iter__(self):
245 """
246 """
246 Allows Iteration over results,and lazy generate content
247 Allows Iteration over results,and lazy generate content
247
248
248 *Requires* implementation of ``__getitem__`` method.
249 *Requires* implementation of ``__getitem__`` method.
249 """
250 """
250 for hit in self.results:
251 for hit in self.results:
251 yield self.get_full_content(hit)
252 yield self.get_full_content(hit)
252
253
253 def __getitem__(self, key):
254 def __getitem__(self, key):
254 """
255 """
255 Slicing of resultWrapper
256 Slicing of resultWrapper
256 """
257 """
257 i, j = key.start, key.stop
258 i, j = key.start, key.stop
258 for hit in self.results[i:j]:
259 for hit in self.results[i:j]:
259 yield self.get_full_content(hit)
260 yield self.get_full_content(hit)
260
261
261 def get_full_content(self, hit):
262 def get_full_content(self, hit):
262 # TODO: marcink: this feels like an overkill, there's a lot of data
263 # TODO: marcink: this feels like an overkill, there's a lot of data
263 # inside hit object, and we don't need all
264 # inside hit object, and we don't need all
264 res = dict(hit)
265 res = dict(hit)
265
266
266 f_path = '' # noqa
267 f_path = '' # noqa
267 if self.search_type in ['content', 'path']:
268 if self.search_type in ['content', 'path']:
268 f_path = res['path'][len(res['repository']):]
269 f_path = res['path'][len(res['repository']):]
269 f_path = f_path.lstrip(os.sep)
270 f_path = f_path.lstrip(os.sep)
270
271
271 if self.search_type == 'content':
272 if self.search_type == 'content':
272 res.update({'content_short_hl': hit.highlights('content'),
273 res.update({'content_short_hl': hit.highlights('content'),
273 'f_path': f_path})
274 'f_path': f_path})
274 elif self.search_type == 'path':
275 elif self.search_type == 'path':
275 res.update({'f_path': f_path})
276 res.update({'f_path': f_path})
276 elif self.search_type == 'message':
277 elif self.search_type == 'message':
277 res.update({'message_hl': hit.highlights('message')})
278 res.update({'message_hl': hit.highlights('message')})
278
279
279 return res
280 return res
General Comments 0
You need to be logged in to leave comments. Login now