##// END OF EJS Templates
search: sort commit messages by date rather than revision
dan -
r71:37d53e52 default
parent child Browse files
Show More
@@ -1,107 +1,111 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Search controller for RhodeCode
22 Search controller for RhodeCode
23 """
23 """
24
24
25 import logging
25 import logging
26 import urllib
26 import urllib
27
27
28 from pylons import request, config, tmpl_context as c
28 from pylons import request, config, tmpl_context as c
29
29
30 from webhelpers.util import update_params
30 from webhelpers.util import update_params
31
31
32 from rhodecode.lib.auth import LoginRequired, AuthUser
32 from rhodecode.lib.auth import LoginRequired, AuthUser
33 from rhodecode.lib.base import BaseRepoController, render
33 from rhodecode.lib.base import BaseRepoController, render
34 from rhodecode.lib.helpers import Page
34 from rhodecode.lib.helpers import Page
35 from rhodecode.lib.utils2 import safe_str, safe_int
35 from rhodecode.lib.utils2 import safe_str, safe_int
36 from rhodecode.lib.index import searcher_from_config
36 from rhodecode.lib.index import searcher_from_config
37 from rhodecode.model import validation_schema
37 from rhodecode.model import validation_schema
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 class SearchController(BaseRepoController):
42 class SearchController(BaseRepoController):
43
43
44 @LoginRequired()
44 @LoginRequired()
45 def index(self, repo_name=None):
45 def index(self, repo_name=None):
46
46
47 searcher = searcher_from_config(config)
47 searcher = searcher_from_config(config)
48 formatted_results = []
48 formatted_results = []
49 execution_time = ''
49 execution_time = ''
50
50
51 schema = validation_schema.SearchParamsSchema()
51 schema = validation_schema.SearchParamsSchema()
52
52
53 search_params = {}
53 search_params = {}
54 errors = []
54 errors = []
55 try:
55 try:
56 search_params = schema.deserialize(
56 search_params = schema.deserialize(
57 dict(search_query=request.GET.get('q'),
57 dict(search_query=request.GET.get('q'),
58 search_type=request.GET.get('type'),
58 search_type=request.GET.get('type'),
59 search_sort=request.GET.get('sort'),
59 page_limit=request.GET.get('page_limit'),
60 page_limit=request.GET.get('page_limit'),
60 requested_page=request.GET.get('page'))
61 requested_page=request.GET.get('page'))
61 )
62 )
62 except validation_schema.Invalid as e:
63 except validation_schema.Invalid as e:
63 errors = e.children
64 errors = e.children
64
65
66 def url_generator(**kw):
67 q = urllib.quote(safe_str(search_query))
68 return update_params(
69 "?q=%s&type=%s" % (q, safe_str(search_type)), **kw)
70
65 search_query = search_params.get('search_query')
71 search_query = search_params.get('search_query')
66 search_type = search_params.get('search_type')
72 search_type = search_params.get('search_type')
67
73 search_sort = search_params.get('search_sort')
68 if search_params.get('search_query'):
74 if search_params.get('search_query'):
69 page_limit = search_params['page_limit']
75 page_limit = search_params['page_limit']
70 requested_page = search_params['requested_page']
76 requested_page = search_params['requested_page']
71
77
72 def url_generator(**kw):
73 q = urllib.quote(safe_str(search_query))
74 return update_params(
75 "?q=%s&type=%s" % (q, safe_str(search_type)), **kw)
76
78
77 c.perm_user = AuthUser(user_id=c.rhodecode_user.user_id,
79 c.perm_user = AuthUser(user_id=c.rhodecode_user.user_id,
78 ip_addr=self.ip_addr)
80 ip_addr=self.ip_addr)
79
81
80 try:
82 try:
81 search_result = searcher.search(
83 search_result = searcher.search(
82 search_query, search_type, c.perm_user, repo_name,
84 search_query, search_type, c.perm_user, repo_name,
83 requested_page, page_limit)
85 requested_page, page_limit, search_sort)
84
86
85 formatted_results = Page(
87 formatted_results = Page(
86 search_result['results'], page=requested_page,
88 search_result['results'], page=requested_page,
87 item_count=search_result['count'],
89 item_count=search_result['count'],
88 items_per_page=page_limit, url=url_generator)
90 items_per_page=page_limit, url=url_generator)
89 finally:
91 finally:
90 searcher.cleanup()
92 searcher.cleanup()
91
93
92 if not search_result['error']:
94 if not search_result['error']:
93 execution_time = '%s results (%.3f seconds)' % (
95 execution_time = '%s results (%.3f seconds)' % (
94 search_result['count'],
96 search_result['count'],
95 search_result['runtime'])
97 search_result['runtime'])
96 elif not errors:
98 elif not errors:
97 node = schema['search_query']
99 node = schema['search_query']
98 errors = [
100 errors = [
99 validation_schema.Invalid(node, search_result['error'])]
101 validation_schema.Invalid(node, search_result['error'])]
100
102
103 c.sort = search_sort
104 c.url_generator = url_generator
101 c.errors = errors
105 c.errors = errors
102 c.formatted_results = formatted_results
106 c.formatted_results = formatted_results
103 c.runtime = execution_time
107 c.runtime = execution_time
104 c.cur_query = search_query
108 c.cur_query = search_query
105 c.search_type = search_type
109 c.search_type = search_type
106 # Return a rendered template
110 # Return a rendered template
107 return render('/search/search.html')
111 return render('/search/search.html')
@@ -1,274 +1,279 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Index schema for RhodeCode
22 Index schema for RhodeCode
23 """
23 """
24
24
25 from __future__ import absolute_import
25 from __future__ import absolute_import
26 import logging
26 import logging
27 import os
27 import os
28 import re
28 import re
29
29
30 from pylons.i18n.translation import _
30 from pylons.i18n.translation import _
31
31
32 from whoosh import query as query_lib, sorting
32 from whoosh import query as query_lib, sorting
33 from whoosh.highlight import HtmlFormatter, ContextFragmenter
33 from whoosh.highlight import HtmlFormatter, ContextFragmenter
34 from whoosh.index import create_in, open_dir, exists_in, EmptyIndexError
34 from whoosh.index import create_in, open_dir, exists_in, EmptyIndexError
35 from whoosh.qparser import QueryParser, QueryParserError
35 from whoosh.qparser import QueryParser, QueryParserError
36
36
37 import rhodecode.lib.helpers as h
37 import rhodecode.lib.helpers as h
38 from rhodecode.lib.index import BaseSearch
38 from rhodecode.lib.index import BaseSearch
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 try:
43 try:
44 # we first try to import from rhodecode tools, fallback to copies if
44 # we first try to import from rhodecode tools, fallback to copies if
45 # we're unable to
45 # we're unable to
46 from rhodecode_tools.lib.fts_index.whoosh_schema import (
46 from rhodecode_tools.lib.fts_index.whoosh_schema import (
47 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
47 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
48 COMMIT_SCHEMA)
48 COMMIT_SCHEMA)
49 except ImportError:
49 except ImportError:
50 log.warning('rhodecode_tools schema not available, doing a fallback '
50 log.warning('rhodecode_tools schema not available, doing a fallback '
51 'import from `rhodecode.lib.index.whoosh_fallback_schema`')
51 'import from `rhodecode.lib.index.whoosh_fallback_schema`')
52 from rhodecode.lib.index.whoosh_fallback_schema import (
52 from rhodecode.lib.index.whoosh_fallback_schema import (
53 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
53 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
54 COMMIT_SCHEMA)
54 COMMIT_SCHEMA)
55
55
56
56
57 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
57 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
58 FRAGMENTER = ContextFragmenter(200)
58 FRAGMENTER = ContextFragmenter(200)
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62
62
63
63
64 class Search(BaseSearch):
64 class Search(BaseSearch):
65
65
66 name = 'whoosh'
66 name = 'whoosh'
67
67
68 def __init__(self, config):
68 def __init__(self, config):
69 self.config = config
69 self.config = config
70 if not os.path.isdir(self.config['location']):
70 if not os.path.isdir(self.config['location']):
71 os.makedirs(self.config['location'])
71 os.makedirs(self.config['location'])
72
72
73 opener = create_in
73 opener = create_in
74 if exists_in(self.config['location'], indexname=FILE_INDEX_NAME):
74 if exists_in(self.config['location'], indexname=FILE_INDEX_NAME):
75 opener = open_dir
75 opener = open_dir
76 file_index = opener(self.config['location'], schema=FILE_SCHEMA,
76 file_index = opener(self.config['location'], schema=FILE_SCHEMA,
77 indexname=FILE_INDEX_NAME)
77 indexname=FILE_INDEX_NAME)
78
78
79 opener = create_in
79 opener = create_in
80 if exists_in(self.config['location'], indexname=COMMIT_INDEX_NAME):
80 if exists_in(self.config['location'], indexname=COMMIT_INDEX_NAME):
81 opener = open_dir
81 opener = open_dir
82 changeset_index = opener(self.config['location'], schema=COMMIT_SCHEMA,
82 changeset_index = opener(self.config['location'], schema=COMMIT_SCHEMA,
83 indexname=COMMIT_INDEX_NAME)
83 indexname=COMMIT_INDEX_NAME)
84
84
85 self.commit_schema = COMMIT_SCHEMA
85 self.commit_schema = COMMIT_SCHEMA
86 self.commit_index = changeset_index
86 self.commit_index = changeset_index
87 self.file_schema = FILE_SCHEMA
87 self.file_schema = FILE_SCHEMA
88 self.file_index = file_index
88 self.file_index = file_index
89 self.searcher = None
89 self.searcher = None
90
90
91 def cleanup(self):
91 def cleanup(self):
92 if self.searcher:
92 if self.searcher:
93 self.searcher.close()
93 self.searcher.close()
94
94
95 def _extend_query(self, query):
95 def _extend_query(self, query):
96 hashes = re.compile('([0-9a-f]{5,40})').findall(query)
96 hashes = re.compile('([0-9a-f]{5,40})').findall(query)
97 if hashes:
97 if hashes:
98 hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes)
98 hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes)
99 query = u'(%s) OR %s' % (query, hashes_or_query)
99 query = u'(%s) OR %s' % (query, hashes_or_query)
100 return query
100 return query
101
101
102 def search(self, query, document_type, search_user, repo_name=None,
102 def search(self, query, document_type, search_user, repo_name=None,
103 requested_page=1, page_limit=10):
103 requested_page=1, page_limit=10, sort=None):
104
104
105 original_query = query
105 original_query = query
106 query = self._extend_query(query)
106 query = self._extend_query(query)
107
107
108 log.debug(u'QUERY: %s on %s', query, document_type)
108 log.debug(u'QUERY: %s on %s', query, document_type)
109 result = {
109 result = {
110 'results': [],
110 'results': [],
111 'count': 0,
111 'count': 0,
112 'error': None,
112 'error': None,
113 'runtime': 0
113 'runtime': 0
114 }
114 }
115 search_type, index_name, schema_defn = self._prepare_for_search(
115 search_type, index_name, schema_defn = self._prepare_for_search(
116 document_type)
116 document_type)
117 self._init_searcher(index_name)
117 self._init_searcher(index_name)
118 try:
118 try:
119 qp = QueryParser(search_type, schema=schema_defn)
119 qp = QueryParser(search_type, schema=schema_defn)
120 allowed_repos_filter = self._get_repo_filter(
120 allowed_repos_filter = self._get_repo_filter(
121 search_user, repo_name)
121 search_user, repo_name)
122 try:
122 try:
123 query = qp.parse(unicode(query))
123 query = qp.parse(unicode(query))
124 log.debug('query: %s (%s)' % (query, repr(query)))
124 log.debug('query: %s (%s)' % (query, repr(query)))
125
125
126 sortedby = None
126 reverse, sortedby = False, None
127 if search_type == 'message':
127 if search_type == 'message':
128 sortedby = sorting.FieldFacet('commit_idx', reverse=True)
128 if sort == 'oldfirst':
129 sortedby = 'date'
130 reverse = False
131 elif sort == 'newfirst':
132 sortedby = 'date'
133 reverse = True
129
134
130 whoosh_results = self.searcher.search(
135 whoosh_results = self.searcher.search(
131 query, filter=allowed_repos_filter, limit=None,
136 query, filter=allowed_repos_filter, limit=None,
132 sortedby=sortedby,)
137 sortedby=sortedby, reverse=reverse)
133
138
134 # fixes for 32k limit that whoosh uses for highlight
139 # fixes for 32k limit that whoosh uses for highlight
135 whoosh_results.fragmenter.charlimit = None
140 whoosh_results.fragmenter.charlimit = None
136 res_ln = whoosh_results.scored_length()
141 res_ln = whoosh_results.scored_length()
137 result['runtime'] = whoosh_results.runtime
142 result['runtime'] = whoosh_results.runtime
138 result['count'] = res_ln
143 result['count'] = res_ln
139 result['results'] = WhooshResultWrapper(
144 result['results'] = WhooshResultWrapper(
140 search_type, res_ln, whoosh_results)
145 search_type, res_ln, whoosh_results)
141
146
142 except QueryParserError:
147 except QueryParserError:
143 result['error'] = _('Invalid search query. Try quoting it.')
148 result['error'] = _('Invalid search query. Try quoting it.')
144 except (EmptyIndexError, IOError, OSError):
149 except (EmptyIndexError, IOError, OSError):
145 msg = _('There is no index to search in. '
150 msg = _('There is no index to search in. '
146 'Please run whoosh indexer')
151 'Please run whoosh indexer')
147 log.exception(msg)
152 log.exception(msg)
148 result['error'] = msg
153 result['error'] = msg
149 except Exception:
154 except Exception:
150 msg = _('An error occurred during this search operation')
155 msg = _('An error occurred during this search operation')
151 log.exception(msg)
156 log.exception(msg)
152 result['error'] = msg
157 result['error'] = msg
153
158
154 return result
159 return result
155
160
156 def statistics(self):
161 def statistics(self):
157 stats = [
162 stats = [
158 {'key': _('Index Type'), 'value': 'Whoosh'},
163 {'key': _('Index Type'), 'value': 'Whoosh'},
159 {'key': _('File Index'), 'value': str(self.file_index)},
164 {'key': _('File Index'), 'value': str(self.file_index)},
160 {'key': _('Indexed documents'),
165 {'key': _('Indexed documents'),
161 'value': self.file_index.doc_count()},
166 'value': self.file_index.doc_count()},
162 {'key': _('Last update'),
167 {'key': _('Last update'),
163 'value': h.time_to_datetime(self.file_index.last_modified())},
168 'value': h.time_to_datetime(self.file_index.last_modified())},
164 {'key': _('Commit index'), 'value': str(self.commit_index)},
169 {'key': _('Commit index'), 'value': str(self.commit_index)},
165 {'key': _('Indexed documents'),
170 {'key': _('Indexed documents'),
166 'value': str(self.commit_index.doc_count())},
171 'value': str(self.commit_index.doc_count())},
167 {'key': _('Last update'),
172 {'key': _('Last update'),
168 'value': h.time_to_datetime(self.commit_index.last_modified())}
173 'value': h.time_to_datetime(self.commit_index.last_modified())}
169 ]
174 ]
170 return stats
175 return stats
171
176
172 def _get_repo_filter(self, auth_user, repo_name):
177 def _get_repo_filter(self, auth_user, repo_name):
173
178
174 allowed_to_search = [
179 allowed_to_search = [
175 repo for repo, perm in
180 repo for repo, perm in
176 auth_user.permissions['repositories'].items()
181 auth_user.permissions['repositories'].items()
177 if perm != 'repository.none']
182 if perm != 'repository.none']
178
183
179 if repo_name:
184 if repo_name:
180 repo_filter = [query_lib.Term('repository', repo_name)]
185 repo_filter = [query_lib.Term('repository', repo_name)]
181
186
182 elif 'hg.admin' in auth_user.permissions.get('global', []):
187 elif 'hg.admin' in auth_user.permissions.get('global', []):
183 return None
188 return None
184
189
185 else:
190 else:
186 repo_filter = [query_lib.Term('repository', _rn)
191 repo_filter = [query_lib.Term('repository', _rn)
187 for _rn in allowed_to_search]
192 for _rn in allowed_to_search]
188 # in case we're not allowed to search anywhere, it's a trick
193 # in case we're not allowed to search anywhere, it's a trick
189 # to tell whoosh we're filtering, on ALL results
194 # to tell whoosh we're filtering, on ALL results
190 repo_filter = repo_filter or [query_lib.Term('repository', '')]
195 repo_filter = repo_filter or [query_lib.Term('repository', '')]
191
196
192 return query_lib.Or(repo_filter)
197 return query_lib.Or(repo_filter)
193
198
194 def _prepare_for_search(self, cur_type):
199 def _prepare_for_search(self, cur_type):
195 search_type = {
200 search_type = {
196 'content': 'content',
201 'content': 'content',
197 'commit': 'message',
202 'commit': 'message',
198 'path': 'path',
203 'path': 'path',
199 'repository': 'repository'
204 'repository': 'repository'
200 }.get(cur_type, 'content')
205 }.get(cur_type, 'content')
201
206
202 index_name = {
207 index_name = {
203 'content': FILE_INDEX_NAME,
208 'content': FILE_INDEX_NAME,
204 'commit': COMMIT_INDEX_NAME,
209 'commit': COMMIT_INDEX_NAME,
205 'path': FILE_INDEX_NAME
210 'path': FILE_INDEX_NAME
206 }.get(cur_type, FILE_INDEX_NAME)
211 }.get(cur_type, FILE_INDEX_NAME)
207
212
208 schema_defn = {
213 schema_defn = {
209 'content': self.file_schema,
214 'content': self.file_schema,
210 'commit': self.commit_schema,
215 'commit': self.commit_schema,
211 'path': self.file_schema
216 'path': self.file_schema
212 }.get(cur_type, self.file_schema)
217 }.get(cur_type, self.file_schema)
213
218
214 log.debug('IDX: %s' % index_name)
219 log.debug('IDX: %s' % index_name)
215 log.debug('SCHEMA: %s' % schema_defn)
220 log.debug('SCHEMA: %s' % schema_defn)
216 return search_type, index_name, schema_defn
221 return search_type, index_name, schema_defn
217
222
218 def _init_searcher(self, index_name):
223 def _init_searcher(self, index_name):
219 idx = open_dir(self.config['location'], indexname=index_name)
224 idx = open_dir(self.config['location'], indexname=index_name)
220 self.searcher = idx.searcher()
225 self.searcher = idx.searcher()
221 return self.searcher
226 return self.searcher
222
227
223
228
224 class WhooshResultWrapper(object):
229 class WhooshResultWrapper(object):
225 def __init__(self, search_type, total_hits, results):
230 def __init__(self, search_type, total_hits, results):
226 self.search_type = search_type
231 self.search_type = search_type
227 self.results = results
232 self.results = results
228 self.total_hits = total_hits
233 self.total_hits = total_hits
229
234
230 def __str__(self):
235 def __str__(self):
231 return '<%s at %s>' % (self.__class__.__name__, len(self))
236 return '<%s at %s>' % (self.__class__.__name__, len(self))
232
237
233 def __repr__(self):
238 def __repr__(self):
234 return self.__str__()
239 return self.__str__()
235
240
236 def __len__(self):
241 def __len__(self):
237 return self.total_hits
242 return self.total_hits
238
243
239 def __iter__(self):
244 def __iter__(self):
240 """
245 """
241 Allows Iteration over results,and lazy generate content
246 Allows Iteration over results,and lazy generate content
242
247
243 *Requires* implementation of ``__getitem__`` method.
248 *Requires* implementation of ``__getitem__`` method.
244 """
249 """
245 for hit in self.results:
250 for hit in self.results:
246 yield self.get_full_content(hit)
251 yield self.get_full_content(hit)
247
252
248 def __getitem__(self, key):
253 def __getitem__(self, key):
249 """
254 """
250 Slicing of resultWrapper
255 Slicing of resultWrapper
251 """
256 """
252 i, j = key.start, key.stop
257 i, j = key.start, key.stop
253 for hit in self.results[i:j]:
258 for hit in self.results[i:j]:
254 yield self.get_full_content(hit)
259 yield self.get_full_content(hit)
255
260
256 def get_full_content(self, hit):
261 def get_full_content(self, hit):
257 # TODO: marcink: this feels like an overkill, there's a lot of data
262 # TODO: marcink: this feels like an overkill, there's a lot of data
258 # inside hit object, and we don't need all
263 # inside hit object, and we don't need all
259 res = dict(hit)
264 res = dict(hit)
260
265
261 f_path = '' # noqa
266 f_path = '' # noqa
262 if self.search_type in ['content', 'path']:
267 if self.search_type in ['content', 'path']:
263 f_path = res['path'].split(res['repository'])[-1]
268 f_path = res['path'].split(res['repository'])[-1]
264 f_path = f_path.lstrip(os.sep)
269 f_path = f_path.lstrip(os.sep)
265
270
266 if self.search_type == 'content':
271 if self.search_type == 'content':
267 res.update({'content_short_hl': hit.highlights('content'),
272 res.update({'content_short_hl': hit.highlights('content'),
268 'f_path': f_path})
273 'f_path': f_path})
269 elif self.search_type == 'path':
274 elif self.search_type == 'path':
270 res.update({'f_path': f_path})
275 res.update({'f_path': f_path})
271 elif self.search_type == 'message':
276 elif self.search_type == 'message':
272 res.update({'message_hl': hit.highlights('message')})
277 res.update({'message_hl': hit.highlights('message')})
273
278
274 return res
279 return res
@@ -1,61 +1,66 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2016 RhodeCode GmbH
3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import colander
21 import colander
22 from colander import Invalid # noqa
22 from colander import Invalid # noqa
23
23
24
24
25 class GroupNameType(colander.String):
25 class GroupNameType(colander.String):
26 SEPARATOR = '/'
26 SEPARATOR = '/'
27
27
28 def deserialize(self, node, cstruct):
28 def deserialize(self, node, cstruct):
29 result = super(GroupNameType, self).deserialize(node, cstruct)
29 result = super(GroupNameType, self).deserialize(node, cstruct)
30 return self._replace_extra_slashes(result)
30 return self._replace_extra_slashes(result)
31
31
32 def _replace_extra_slashes(self, path):
32 def _replace_extra_slashes(self, path):
33 path = path.split(self.SEPARATOR)
33 path = path.split(self.SEPARATOR)
34 path = [item for item in path if item]
34 path = [item for item in path if item]
35 return self.SEPARATOR.join(path)
35 return self.SEPARATOR.join(path)
36
36
37
37
38 class RepoGroupSchema(colander.Schema):
38 class RepoGroupSchema(colander.Schema):
39 group_name = colander.SchemaNode(GroupNameType())
39 group_name = colander.SchemaNode(GroupNameType())
40
40
41
41
42 class RepoSchema(colander.Schema):
42 class RepoSchema(colander.Schema):
43 repo_name = colander.SchemaNode(GroupNameType())
43 repo_name = colander.SchemaNode(GroupNameType())
44
44
45
45
46 class SearchParamsSchema(colander.MappingSchema):
46 class SearchParamsSchema(colander.MappingSchema):
47 search_query = colander.SchemaNode(
47 search_query = colander.SchemaNode(
48 colander.String(),
48 colander.String(),
49 missing='')
49 missing='')
50 search_type = colander.SchemaNode(
50 search_type = colander.SchemaNode(
51 colander.String(),
51 colander.String(),
52 missing='content',
52 missing='content',
53 validator=colander.OneOf(['content', 'path', 'commit', 'repository']))
53 validator=colander.OneOf(['content', 'path', 'commit', 'repository']))
54 search_sort = colander.SchemaNode(
55 colander.String(),
56 missing='newfirst',
57 validator=colander.OneOf(
58 ['oldfirst', 'newfirst']))
54 page_limit = colander.SchemaNode(
59 page_limit = colander.SchemaNode(
55 colander.Integer(),
60 colander.Integer(),
56 missing=10,
61 missing=10,
57 validator=colander.Range(1, 500))
62 validator=colander.Range(1, 500))
58 requested_page = colander.SchemaNode(
63 requested_page = colander.SchemaNode(
59 colander.Integer(),
64 colander.Integer(),
60 missing=1)
65 missing=1)
61
66
@@ -1,76 +1,82 b''
1 <%namespace name="base" file="/base/base.html"/>
1 <%namespace name="base" file="/base/base.html"/>
2
2
3 <table class="rctable search-results">
3 <table class="rctable search-results">
4 <tr>
4 <tr>
5 <th>${_('Repository')}</th>
5 <th>${_('Repository')}</th>
6 <th>${_('Commit')}</th>
6 <th>${_('Commit')}</th>
7 <th></th>
7 <th></th>
8 <th>${_('Commit message')}</th>
8 <th>${_('Commit message')}</th>
9 <th>${_('Age')}</th>
9 <th>
10 %if c.sort == 'newfirst':
11 <a href="${c.url_generator(sort='oldfirst')}">${_('Age (new first)')}</a>
12 %else:
13 <a href="${c.url_generator(sort='newfirst')}">${_('Age (old first)')}</a>
14 %endif
15 </th>
10 <th>${_('Author')}</th>
16 <th>${_('Author')}</th>
11 </tr>
17 </tr>
12 %for entry in c.formatted_results:
18 %for entry in c.formatted_results:
13 ## search results are additionally filtered, and this check is just a safe gate
19 ## search results are additionally filtered, and this check is just a safe gate
14 % if h.HasRepoPermissionAny('repository.write','repository.read','repository.admin')(entry['repository'], 'search results commit check'):
20 % if h.HasRepoPermissionAny('repository.write','repository.read','repository.admin')(entry['repository'], 'search results commit check'):
15 <tr class="body">
21 <tr class="body">
16 <td class="td-componentname">
22 <td class="td-componentname">
17 %if h.get_repo_type_by_name(entry.get('repository')) == 'hg':
23 %if h.get_repo_type_by_name(entry.get('repository')) == 'hg':
18 <i class="icon-hg"></i>
24 <i class="icon-hg"></i>
19 %elif h.get_repo_type_by_name(entry.get('repository')) == 'git':
25 %elif h.get_repo_type_by_name(entry.get('repository')) == 'git':
20 <i class="icon-git"></i>
26 <i class="icon-git"></i>
21 %elif h.get_repo_type_by_name(entry.get('repository')) == 'svn':
27 %elif h.get_repo_type_by_name(entry.get('repository')) == 'svn':
22 <i class="icon-svn"></i>
28 <i class="icon-svn"></i>
23 %endif
29 %endif
24 ${h.link_to(entry['repository'], h.url('summary_home',repo_name=entry['repository']))}
30 ${h.link_to(entry['repository'], h.url('summary_home',repo_name=entry['repository']))}
25 </td>
31 </td>
26 <td class="td-commit">
32 <td class="td-commit">
27 ${h.link_to(h._shorten_commit_id(entry['commit_id']),
33 ${h.link_to(h._shorten_commit_id(entry['commit_id']),
28 h.url('changeset_home',repo_name=entry['repository'],revision=entry['commit_id']))}
34 h.url('changeset_home',repo_name=entry['repository'],revision=entry['commit_id']))}
29 </td>
35 </td>
30 <td class="td-message expand_commit search open" data-commit-id="${h.md5_safe(entry['repository'])+entry['commit_id']}" id="t-${h.md5_safe(entry['repository'])+entry['commit_id']}" title="${_('Expand commit message')}">
36 <td class="td-message expand_commit search open" data-commit-id="${h.md5_safe(entry['repository'])+entry['commit_id']}" id="t-${h.md5_safe(entry['repository'])+entry['commit_id']}" title="${_('Expand commit message')}">
31 <div class="show_more_col">
37 <div class="show_more_col">
32 <i class="show_more"></i>&nbsp;
38 <i class="show_more"></i>&nbsp;
33 </div>
39 </div>
34 </td>
40 </td>
35 <td data-commit-id="${h.md5_safe(entry['repository'])+entry['commit_id']}" id="c-${h.md5_safe(entry['repository'])+entry['commit_id']}" class="message td-description open">
41 <td data-commit-id="${h.md5_safe(entry['repository'])+entry['commit_id']}" id="c-${h.md5_safe(entry['repository'])+entry['commit_id']}" class="message td-description open">
36 %if entry.get('message_hl'):
42 %if entry.get('message_hl'):
37 ${h.literal(entry['message_hl'])}
43 ${h.literal(entry['message_hl'])}
38 %else:
44 %else:
39 ${h.urlify_commit_message(entry['message'], entry['repository'])}
45 ${h.urlify_commit_message(entry['message'], entry['repository'])}
40 %endif
46 %endif
41 </td>
47 </td>
42 <td class="td-time">
48 <td class="td-time">
43 ${h.age_component(h.time_to_datetime(entry['date']))}
49 ${h.age_component(h.time_to_datetime(entry['date']))}
44 </td>
50 </td>
45
51
46 <td class="td-user author">
52 <td class="td-user author">
47 ${base.gravatar_with_user(entry['author'])}
53 ${base.gravatar_with_user(entry['author'])}
48 </td>
54 </td>
49 </tr>
55 </tr>
50 % endif
56 % endif
51 %endfor
57 %endfor
52 </table>
58 </table>
53
59
54 %if c.cur_query and c.formatted_results:
60 %if c.cur_query and c.formatted_results:
55 <div class="pagination-wh pagination-left">
61 <div class="pagination-wh pagination-left">
56 ${c.formatted_results.pager('$link_previous ~2~ $link_next')}
62 ${c.formatted_results.pager('$link_previous ~2~ $link_next')}
57 </div>
63 </div>
58 %endif
64 %endif
59
65
60 <script>
66 <script>
61 $('.expand_commit').on('click',function(e){
67 $('.expand_commit').on('click',function(e){
62 var target_expand = $(this);
68 var target_expand = $(this);
63 var cid = target_expand.data('commit-id');
69 var cid = target_expand.data('commit-id');
64
70
65 if (target_expand.hasClass('open')){
71 if (target_expand.hasClass('open')){
66 $('#c-'+cid).css({'height': '1.5em', 'white-space': 'nowrap', 'text-overflow': 'ellipsis', 'overflow':'hidden'})
72 $('#c-'+cid).css({'height': '1.5em', 'white-space': 'nowrap', 'text-overflow': 'ellipsis', 'overflow':'hidden'})
67 $('#t-'+cid).css({'height': 'auto', 'line-height': '.9em', 'text-overflow': 'ellipsis', 'overflow':'hidden'})
73 $('#t-'+cid).css({'height': 'auto', 'line-height': '.9em', 'text-overflow': 'ellipsis', 'overflow':'hidden'})
68 target_expand.removeClass('open');
74 target_expand.removeClass('open');
69 }
75 }
70 else {
76 else {
71 $('#c-'+cid).css({'height': 'auto', 'white-space': 'normal', 'text-overflow': 'initial', 'overflow':'visible'})
77 $('#c-'+cid).css({'height': 'auto', 'white-space': 'normal', 'text-overflow': 'initial', 'overflow':'visible'})
72 $('#t-'+cid).css({'height': 'auto', 'max-height': 'none', 'text-overflow': 'initial', 'overflow':'visible'})
78 $('#t-'+cid).css({'height': 'auto', 'max-height': 'none', 'text-overflow': 'initial', 'overflow':'visible'})
73 target_expand.addClass('open');
79 target_expand.addClass('open');
74 }
80 }
75 });
81 });
76 </script>
82 </script>
General Comments 0
You need to be logged in to leave comments. Login now