Show More
@@ -73,7 +73,7 b" FORMATTER = HtmlFormatter('span', betwee" | |||||
73 | FRAGMENTER = ContextFragmenter(200) |
|
73 | FRAGMENTER = ContextFragmenter(200) | |
74 |
|
74 | |||
75 | CHGSETS_SCHEMA = Schema( |
|
75 | CHGSETS_SCHEMA = Schema( | |
76 |
|
|
76 | raw_id=ID(unique=True, stored=True), | |
77 | revision=NUMERIC(unique=True, stored=True), |
|
77 | revision=NUMERIC(unique=True, stored=True), | |
78 | last=BOOLEAN(), |
|
78 | last=BOOLEAN(), | |
79 | owner=TEXT(), |
|
79 | owner=TEXT(), | |
@@ -209,15 +209,15 b' class WhooshResultWrapper(object):' | |||||
209 | def get_full_content(self, docid): |
|
209 | def get_full_content(self, docid): | |
210 | res = self.searcher.stored_fields(docid[0]) |
|
210 | res = self.searcher.stored_fields(docid[0]) | |
211 | log.debug('result: %s' % res) |
|
211 | log.debug('result: %s' % res) | |
212 | full_repo_path = jn(self.repo_location, res['repository']) |
|
|||
213 | f_path = res['path'].split(full_repo_path)[-1] |
|
|||
214 | f_path = f_path.lstrip(os.sep) |
|
|||
215 | res.update({'f_path': f_path}) |
|
|||
216 |
|
||||
217 | if self.search_type == 'content': |
|
212 | if self.search_type == 'content': | |
|
213 | full_repo_path = jn(self.repo_location, res['repository']) | |||
|
214 | f_path = res['path'].split(full_repo_path)[-1] | |||
|
215 | f_path = f_path.lstrip(os.sep) | |||
218 | content_short = self.get_short_content(res, docid[1]) |
|
216 | content_short = self.get_short_content(res, docid[1]) | |
219 | res.update({'content_short': content_short, |
|
217 | res.update({'content_short': content_short, | |
220 |
'content_short_hl': self.highlight(content_short) |
|
218 | 'content_short_hl': self.highlight(content_short), | |
|
219 | 'f_path': f_path | |||
|
220 | }) | |||
221 | elif self.search_type == 'message': |
|
221 | elif self.search_type == 'message': | |
222 | res.update({'message_hl': self.highlight(res['message'])}) |
|
222 | res.update({'message_hl': self.highlight(res['message'])}) | |
223 |
|
223 |
@@ -179,7 +179,7 b' class WhooshIndexingDaemon(object):' | |||||
179 | indexed=0 |
|
179 | indexed=0 | |
180 | for cs in repo[start_rev:]: |
|
180 | for cs in repo[start_rev:]: | |
181 | writer.add_document( |
|
181 | writer.add_document( | |
182 |
|
|
182 | raw_id=unicode(cs.raw_id), | |
183 | owner=unicode(repo.contact), |
|
183 | owner=unicode(repo.contact), | |
184 | repository=safe_unicode(repo_name), |
|
184 | repository=safe_unicode(repo_name), | |
185 | author=cs.author, |
|
185 | author=cs.author, | |
@@ -231,8 +231,8 b' class WhooshIndexingDaemon(object):' | |||||
231 | if last_rev == 0 or len(revs) > last_rev + 1: |
|
231 | if last_rev == 0 or len(revs) > last_rev + 1: | |
232 | # delete the docs in the index for the previous last changeset(s) |
|
232 | # delete the docs in the index for the previous last changeset(s) | |
233 | for hit in results: |
|
233 | for hit in results: | |
234 |
q = qp.parse(u"last:t AND %s AND |
|
234 | q = qp.parse(u"last:t AND %s AND raw_id:%s" % | |
235 |
(repo_name, hit[' |
|
235 | (repo_name, hit['raw_id'])) | |
236 | writer.delete_by_query(q) |
|
236 | writer.delete_by_query(q) | |
237 |
|
237 | |||
238 | # index from the previous last changeset + all new ones |
|
238 | # index from the previous last changeset + all new ones |
@@ -5,8 +5,8 b'' | |||||
5 | <div class="table"> |
|
5 | <div class="table"> | |
6 | <div id="body${cnt}" class="codeblock"> |
|
6 | <div id="body${cnt}" class="codeblock"> | |
7 | <div class="code-header"> |
|
7 | <div class="code-header"> | |
8 |
<div class="search-path">${h.link_to(h.literal('%s » %s' % (sr['repository'],sr[' |
|
8 | <div class="search-path">${h.link_to(h.literal('%s » %s' % (sr['repository'],sr['raw_id'])), | |
9 |
h.url('changeset_home',repo_name=sr['repository'],revision=sr[' |
|
9 | h.url('changeset_home',repo_name=sr['repository'],revision=sr['raw_id']))} | |
10 | </div> |
|
10 | </div> | |
11 | </div> |
|
11 | </div> | |
12 | <div class="left"> |
|
12 | <div class="left"> |
General Comments 0
You need to be logged in to leave comments.
Login now