Show More
@@ -0,0 +1,112 b'' | |||
|
1 | #!/usr/bin/env python | |
|
2 | # encoding: utf-8 | |
|
3 | # search controller for pylons | |
|
4 | # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> | |
|
5 | # | |
|
6 | # This program is free software; you can redistribute it and/or | |
|
7 | # modify it under the terms of the GNU General Public License | |
|
8 | # as published by the Free Software Foundation; version 2 | |
|
9 | # of the License or (at your opinion) any later version of the license. | |
|
10 | # | |
|
11 | # This program is distributed in the hope that it will be useful, | |
|
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
14 | # GNU General Public License for more details. | |
|
15 | # | |
|
16 | # You should have received a copy of the GNU General Public License | |
|
17 | # along with this program; if not, write to the Free Software | |
|
18 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, | |
|
19 | # MA 02110-1301, USA. | |
|
20 | """ | |
|
21 | Created on Aug 7, 2010 | |
|
22 | search controller for pylons | |
|
23 | @author: marcink | |
|
24 | """ | |
|
25 | from pylons import request, response, session, tmpl_context as c, url | |
|
26 | from pylons.controllers.util import abort, redirect | |
|
27 | from pylons_app.lib.auth import LoginRequired | |
|
28 | from pylons_app.lib.base import BaseController, render | |
|
29 | from pylons_app.lib.indexers import ANALYZER, IDX_LOCATION, SCHEMA | |
|
30 | from webhelpers.html.builder import escape | |
|
31 | from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter, \ | |
|
32 | ContextFragmenter | |
|
33 | from whoosh.index import open_dir, EmptyIndexError | |
|
34 | from whoosh.qparser import QueryParser, QueryParserError | |
|
35 | from whoosh.query import Phrase | |
|
36 | import logging | |
|
37 | import traceback | |
|
38 | ||
|
39 | log = logging.getLogger(__name__) | |
|
40 | ||
|
41 | class SearchController(BaseController): | |
|
42 | ||
|
43 | @LoginRequired() | |
|
44 | def __before__(self): | |
|
45 | super(SearchController, self).__before__() | |
|
46 | ||
|
47 | ||
|
48 | def index(self): | |
|
49 | c.formated_results = [] | |
|
50 | c.runtime = '' | |
|
51 | search_items = set() | |
|
52 | c.cur_query = request.GET.get('q', None) | |
|
53 | if c.cur_query: | |
|
54 | cur_query = c.cur_query.lower() | |
|
55 | ||
|
56 | ||
|
57 | if c.cur_query: | |
|
58 | try: | |
|
59 | idx = open_dir(IDX_LOCATION, indexname='HG_INDEX') | |
|
60 | searcher = idx.searcher() | |
|
61 | ||
|
62 | qp = QueryParser("content", schema=SCHEMA) | |
|
63 | try: | |
|
64 | query = qp.parse(unicode(cur_query)) | |
|
65 | ||
|
66 | if isinstance(query, Phrase): | |
|
67 | search_items.update(query.words) | |
|
68 | else: | |
|
69 | for i in query.all_terms(): | |
|
70 | search_items.add(i[1]) | |
|
71 | ||
|
72 | log.debug(query) | |
|
73 | log.debug(search_items) | |
|
74 | results = searcher.search(query) | |
|
75 | c.runtime = '%s results (%.3f seconds)' \ | |
|
76 | % (len(results), results.runtime) | |
|
77 | ||
|
78 | analyzer = ANALYZER | |
|
79 | formatter = HtmlFormatter('span', | |
|
80 | between='\n<span class="break">...</span>\n') | |
|
81 | ||
|
82 | #how the parts are splitted within the same text part | |
|
83 | fragmenter = SimpleFragmenter(200) | |
|
84 | #fragmenter = ContextFragmenter(search_items) | |
|
85 | ||
|
86 | for res in results: | |
|
87 | d = {} | |
|
88 | d.update(res) | |
|
89 | hl = highlight(escape(res['content']), search_items, | |
|
90 | analyzer=analyzer, | |
|
91 | fragmenter=fragmenter, | |
|
92 | formatter=formatter, | |
|
93 | top=5) | |
|
94 | f_path = res['path'][res['path'].find(res['repository']) \ | |
|
95 | + len(res['repository']):].lstrip('/') | |
|
96 | d.update({'content_short':hl, | |
|
97 | 'f_path':f_path}) | |
|
98 | #del d['content'] | |
|
99 | c.formated_results.append(d) | |
|
100 | ||
|
101 | except QueryParserError: | |
|
102 | c.runtime = 'Invalid search query. Try quoting it.' | |
|
103 | ||
|
104 | except (EmptyIndexError, IOError): | |
|
105 | log.error(traceback.format_exc()) | |
|
106 | log.error('Empty Index data') | |
|
107 | c.runtime = 'There is no index to search in. Please run whoosh indexer' | |
|
108 | ||
|
109 | ||
|
110 | ||
|
111 | # Return a rendered template | |
|
112 | return render('/search/search.html') |
@@ -0,0 +1,36 b'' | |||
|
1 | import sys | |
|
2 | import os | |
|
3 | from pidlock import LockHeld, DaemonLock | |
|
4 | import traceback | |
|
5 | ||
|
6 | from os.path import dirname as dn | |
|
7 | from os.path import join as jn | |
|
8 | ||
|
9 | #to get the pylons_app import | |
|
10 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) | |
|
11 | ||
|
12 | from pylons_app.config.environment import load_environment | |
|
13 | from pylons_app.model.hg_model import HgModel | |
|
14 | from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter | |
|
15 | from whoosh.fields import TEXT, ID, STORED, Schema | |
|
16 | from whoosh.index import create_in, open_dir | |
|
17 | from shutil import rmtree | |
|
18 | ||
|
19 | #LOCATION WE KEEP THE INDEX | |
|
20 | IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index') | |
|
21 | ||
|
22 | #EXTENSION TO SKIP READING CONTENT ON | |
|
23 | EXCLUDE_EXTENSIONS = ['pyc', 'mo', 'png', 'jpg', 'jpeg', 'gif', 'swf', | |
|
24 | 'dll', 'ttf', 'psd', 'svg', 'pdf', 'bmp', 'dll'] | |
|
25 | ||
|
26 | #CUSTOM ANALYZER wordsplit + lowercase filter | |
|
27 | ANALYZER = RegexTokenizer() | LowercaseFilter() | |
|
28 | ||
|
29 | #INDEX SCHEMA DEFINITION | |
|
30 | SCHEMA = Schema(owner=TEXT(), | |
|
31 | repository=TEXT(stored=True), | |
|
32 | path=ID(stored=True, unique=True), | |
|
33 | content=TEXT(stored=True, analyzer=ANALYZER), | |
|
34 | modtime=STORED()) | |
|
35 | ||
|
36 | IDX_NAME = 'HG_INDEX' |
@@ -0,0 +1,181 b'' | |||
|
1 | #!/usr/bin/env python | |
|
2 | # encoding: utf-8 | |
|
3 | # whoosh indexer daemon for hg-app | |
|
4 | # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> | |
|
5 | # | |
|
6 | # This program is free software; you can redistribute it and/or | |
|
7 | # modify it under the terms of the GNU General Public License | |
|
8 | # as published by the Free Software Foundation; version 2 | |
|
9 | # of the License or (at your opinion) any later version of the license. | |
|
10 | # | |
|
11 | # This program is distributed in the hope that it will be useful, | |
|
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
14 | # GNU General Public License for more details. | |
|
15 | # | |
|
16 | # You should have received a copy of the GNU General Public License | |
|
17 | # along with this program; if not, write to the Free Software | |
|
18 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, | |
|
19 | # MA 02110-1301, USA. | |
|
20 | """ | |
|
21 | Created on Jan 26, 2010 | |
|
22 | ||
|
23 | @author: marcink | |
|
24 | A deamon will read from task table and run tasks | |
|
25 | """ | |
|
26 | import sys | |
|
27 | import os | |
|
28 | from pidlock import LockHeld, DaemonLock | |
|
29 | import traceback | |
|
30 | ||
|
31 | from os.path import dirname as dn | |
|
32 | from os.path import join as jn | |
|
33 | ||
|
34 | #to get the pylons_app import | |
|
35 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) | |
|
36 | ||
|
37 | from pylons_app.config.environment import load_environment | |
|
38 | from pylons_app.model.hg_model import HgModel | |
|
39 | from whoosh.index import create_in, open_dir | |
|
40 | from shutil import rmtree | |
|
41 | from pylons_app.lib.indexers import ANALYZER, EXCLUDE_EXTENSIONS, IDX_LOCATION, SCHEMA, IDX_NAME | |
|
42 | import logging | |
|
43 | log = logging.getLogger(__name__) | |
|
44 | ||
|
45 | ||
|
46 | location = '/home/marcink/python_workspace_dirty/*' | |
|
47 | ||
|
48 | def scan_paths(root_location): | |
|
49 | return HgModel.repo_scan('/', root_location, None, True) | |
|
50 | ||
|
51 | class WhooshIndexingDaemon(object): | |
|
52 | """Deamon for atomic jobs""" | |
|
53 | ||
|
54 | def __init__(self, indexname='HG_INDEX'): | |
|
55 | self.indexname = indexname | |
|
56 | ||
|
57 | ||
|
58 | def get_paths(self, root_dir): | |
|
59 | """recursive walk in root dir and return a set of all path in that dir | |
|
60 | excluding files in .hg dir""" | |
|
61 | index_paths_ = set() | |
|
62 | for path, dirs, files in os.walk(root_dir): | |
|
63 | if path.find('.hg') == -1: | |
|
64 | for f in files: | |
|
65 | index_paths_.add(jn(path, f)) | |
|
66 | ||
|
67 | return index_paths_ | |
|
68 | ||
|
69 | def add_doc(self, writer, path, repo): | |
|
70 | """Adding doc to writer""" | |
|
71 | ||
|
72 | #we don't won't to read excluded file extensions just index them | |
|
73 | if path.split('/')[-1].split('.')[-1].lower() not in EXCLUDE_EXTENSIONS: | |
|
74 | fobj = open(path, 'rb') | |
|
75 | content = fobj.read() | |
|
76 | fobj.close() | |
|
77 | try: | |
|
78 | u_content = unicode(content) | |
|
79 | except UnicodeDecodeError: | |
|
80 | #incase we have a decode error just represent as byte string | |
|
81 | u_content = unicode(str(content).encode('string_escape')) | |
|
82 | else: | |
|
83 | u_content = u'' | |
|
84 | writer.add_document(owner=unicode(repo.contact), | |
|
85 | repository=u"%s" % repo.name, | |
|
86 | path=u"%s" % path, | |
|
87 | content=u_content, | |
|
88 | modtime=os.path.getmtime(path)) | |
|
89 | ||
|
90 | def build_index(self): | |
|
91 | if os.path.exists(IDX_LOCATION): | |
|
92 | rmtree(IDX_LOCATION) | |
|
93 | ||
|
94 | if not os.path.exists(IDX_LOCATION): | |
|
95 | os.mkdir(IDX_LOCATION) | |
|
96 | ||
|
97 | idx = create_in(IDX_LOCATION, SCHEMA, indexname=IDX_NAME) | |
|
98 | writer = idx.writer() | |
|
99 | ||
|
100 | for cnt, repo in enumerate(scan_paths(location).values()): | |
|
101 | log.debug('building index @ %s' % repo.path) | |
|
102 | ||
|
103 | for idx_path in self.get_paths(repo.path): | |
|
104 | log.debug(' >> %s' % idx_path) | |
|
105 | self.add_doc(writer, idx_path, repo) | |
|
106 | writer.commit(merge=True) | |
|
107 | ||
|
108 | log.debug('>>> FINISHED BUILDING INDEX <<<') | |
|
109 | ||
|
110 | ||
|
111 | def update_index(self): | |
|
112 | log.debug('STARTING INCREMENTAL INDEXING UPDATE') | |
|
113 | ||
|
114 | idx = open_dir(IDX_LOCATION, indexname=self.indexname) | |
|
115 | # The set of all paths in the index | |
|
116 | indexed_paths = set() | |
|
117 | # The set of all paths we need to re-index | |
|
118 | to_index = set() | |
|
119 | ||
|
120 | reader = idx.reader() | |
|
121 | writer = idx.writer() | |
|
122 | ||
|
123 | # Loop over the stored fields in the index | |
|
124 | for fields in reader.all_stored_fields(): | |
|
125 | indexed_path = fields['path'] | |
|
126 | indexed_paths.add(indexed_path) | |
|
127 | ||
|
128 | if not os.path.exists(indexed_path): | |
|
129 | # This file was deleted since it was indexed | |
|
130 | log.debug('removing from index %s' % indexed_path) | |
|
131 | writer.delete_by_term('path', indexed_path) | |
|
132 | ||
|
133 | else: | |
|
134 | # Check if this file was changed since it | |
|
135 | # was indexed | |
|
136 | indexed_time = fields['modtime'] | |
|
137 | ||
|
138 | mtime = os.path.getmtime(indexed_path) | |
|
139 | ||
|
140 | if mtime > indexed_time: | |
|
141 | ||
|
142 | # The file has changed, delete it and add it to the list of | |
|
143 | # files to reindex | |
|
144 | log.debug('adding to reindex list %s' % indexed_path) | |
|
145 | writer.delete_by_term('path', indexed_path) | |
|
146 | to_index.add(indexed_path) | |
|
147 | #writer.commit() | |
|
148 | ||
|
149 | # Loop over the files in the filesystem | |
|
150 | # Assume we have a function that gathers the filenames of the | |
|
151 | # documents to be indexed | |
|
152 | for repo in scan_paths(location).values(): | |
|
153 | for path in self.get_paths(repo.path): | |
|
154 | if path in to_index or path not in indexed_paths: | |
|
155 | # This is either a file that's changed, or a new file | |
|
156 | # that wasn't indexed before. So index it! | |
|
157 | self.add_doc(writer, path, repo) | |
|
158 | log.debug('reindexing %s' % path) | |
|
159 | ||
|
160 | writer.commit(merge=True) | |
|
161 | #idx.optimize() | |
|
162 | log.debug('>>> FINISHED <<<') | |
|
163 | ||
|
164 | def run(self, full_index=False): | |
|
165 | """Run daemon""" | |
|
166 | if full_index: | |
|
167 | self.build_index() | |
|
168 | else: | |
|
169 | self.update_index() | |
|
170 | ||
|
171 | if __name__ == "__main__": | |
|
172 | ||
|
173 | #config = load_environment() | |
|
174 | #print config | |
|
175 | try: | |
|
176 | l = DaemonLock() | |
|
177 | WhooshIndexingDaemon().run(full_index=True) | |
|
178 | l.release() | |
|
179 | except LockHeld: | |
|
180 | sys.exit(1) | |
|
181 |
@@ -0,0 +1,176 b'' | |||
|
1 | from multiprocessing import Process, Queue, cpu_count, Lock | |
|
2 | import socket, sys | |
|
3 | import time | |
|
4 | import os | |
|
5 | import sys | |
|
6 | from os.path import dirname as dn | |
|
7 | from multiprocessing.dummy import current_process | |
|
8 | from shutil import rmtree | |
|
9 | ||
|
10 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) | |
|
11 | ||
|
12 | from pylons_app.model.hg_model import HgModel | |
|
13 | from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter | |
|
14 | from whoosh.fields import TEXT, ID, STORED, Schema | |
|
15 | from whoosh.index import create_in, open_dir | |
|
16 | from datetime import datetime | |
|
17 | from multiprocessing.process import current_process | |
|
18 | from multiprocessing import Array, Value | |
|
19 | ||
|
20 | root = dn(dn(os.path.dirname(os.path.abspath(__file__)))) | |
|
21 | idx_location = os.path.join(root, 'data', 'index') | |
|
22 | root_path = '/home/marcink/python_workspace_dirty/*' | |
|
23 | ||
|
24 | exclude_extensions = ['pyc', 'mo', 'png', 'jpg', 'jpeg', 'gif', 'swf', | |
|
25 | 'dll', 'ttf', 'psd', 'svg', 'pdf', 'bmp', 'dll'] | |
|
26 | ||
|
27 | my_analyzer = RegexTokenizer() | LowercaseFilter() | |
|
28 | def scan_paths(root_location): | |
|
29 | return HgModel.repo_scan('/', root_location, None, True) | |
|
30 | ||
|
31 | def index_paths(root_dir): | |
|
32 | index_paths_ = set() | |
|
33 | for path, dirs, files in os.walk(root_dir): | |
|
34 | if path.find('.hg') == -1: | |
|
35 | #if path.find('.hg') == -1 and path.find('bel-epa') != -1: | |
|
36 | for f in files: | |
|
37 | index_paths_.add(os.path.join(path, f)) | |
|
38 | ||
|
39 | return index_paths_ | |
|
40 | ||
|
41 | def get_schema(): | |
|
42 | return Schema(owner=TEXT(), | |
|
43 | repository=TEXT(stored=True), | |
|
44 | path=ID(stored=True, unique=True), | |
|
45 | content=TEXT(stored=True, analyzer=my_analyzer), | |
|
46 | modtime=STORED()) | |
|
47 | ||
|
48 | def add_doc(writer, path, repo_name, contact): | |
|
49 | """ | |
|
50 | Adding doc to writer | |
|
51 | @param writer: | |
|
52 | @param path: | |
|
53 | @param repo: | |
|
54 | @param fname: | |
|
55 | """ | |
|
56 | ||
|
57 | #we don't won't to read excluded file extensions just index them | |
|
58 | if path.split('/')[-1].split('.')[-1].lower() not in exclude_extensions: | |
|
59 | fobj = open(path, 'rb') | |
|
60 | content = fobj.read() | |
|
61 | fobj.close() | |
|
62 | try: | |
|
63 | u_content = unicode(content) | |
|
64 | except UnicodeDecodeError: | |
|
65 | #incase we have a decode error just represent as byte string | |
|
66 | u_content = unicode(str(content).encode('string_escape')) | |
|
67 | else: | |
|
68 | u_content = u'' | |
|
69 | writer.add_document(repository=u"%s" % repo_name, | |
|
70 | owner=unicode(contact), | |
|
71 | path=u"%s" % path, | |
|
72 | content=u_content, | |
|
73 | modtime=os.path.getmtime(path)) | |
|
74 | ||
|
75 | ||
|
76 | class MultiProcessIndexer(object): | |
|
77 | """ multiprocessing whoosh indexer """ | |
|
78 | ||
|
79 | def __init__(self, idx, work_set=set(), nr_processes=cpu_count()): | |
|
80 | q = Queue() | |
|
81 | l = Lock() | |
|
82 | work_set = work_set | |
|
83 | writer = None | |
|
84 | #writer = idx.writer() | |
|
85 | ||
|
86 | for q_task in work_set: | |
|
87 | q.put(q_task) | |
|
88 | ||
|
89 | q.put('COMMIT') | |
|
90 | ||
|
91 | #to stop all processes we have to put STOP to queue and | |
|
92 | #break the loop for each process | |
|
93 | for _ in xrange(nr_processes): | |
|
94 | q.put('STOP') | |
|
95 | ||
|
96 | ||
|
97 | for _ in xrange(nr_processes): | |
|
98 | p = Process(target=self.work_func, args=(q, l, idx, writer)) | |
|
99 | p.start() | |
|
100 | ||
|
101 | ||
|
102 | ||
|
103 | def work_func(self, q, l, idx, writer): | |
|
104 | """ worker class invoked by process """ | |
|
105 | ||
|
106 | ||
|
107 | writer = idx.writer() | |
|
108 | ||
|
109 | while True: | |
|
110 | q_task = q.get() | |
|
111 | proc = current_process() | |
|
112 | ||
|
113 | # if q_task == 'COMMIT': | |
|
114 | # l.acquire() | |
|
115 | # sys.stdout.write('%s commiting and STOP\n' % proc._name) | |
|
116 | # writer.commit(merge=False) | |
|
117 | # l.release() | |
|
118 | # break | |
|
119 | # l.acquire() | |
|
120 | # writer = idx.writer() | |
|
121 | # l.release() | |
|
122 | ||
|
123 | if q_task == 'STOP': | |
|
124 | sys.stdout.write('%s STOP\n' % proc._name) | |
|
125 | break | |
|
126 | ||
|
127 | if q_task != 'COMMIT': | |
|
128 | l.acquire() | |
|
129 | ||
|
130 | sys.stdout.write(' >> %s %s %s @ ' % q_task) | |
|
131 | sys.stdout.write(' %s \n' % proc._name) | |
|
132 | ||
|
133 | l.release() | |
|
134 | add_doc(writer, q_task[0], q_task[1], q_task[2]) | |
|
135 | ||
|
136 | l.acquire() | |
|
137 | writer.commit(merge=True) | |
|
138 | l.release() | |
|
139 | ||
|
140 | ||
|
141 | if __name__ == "__main__": | |
|
142 | #build queue | |
|
143 | do = True if len(sys.argv) > 1 else False | |
|
144 | q_tasks = [] | |
|
145 | ||
|
146 | if os.path.exists(idx_location): | |
|
147 | rmtree(idx_location) | |
|
148 | ||
|
149 | if not os.path.exists(idx_location): | |
|
150 | os.mkdir(idx_location) | |
|
151 | ||
|
152 | idx = create_in(idx_location, get_schema() , indexname='HG_INDEX') | |
|
153 | ||
|
154 | ||
|
155 | if do: | |
|
156 | sys.stdout.write('Building queue...') | |
|
157 | for cnt, repo in enumerate(scan_paths(root_path).values()): | |
|
158 | if repo.name != 'evoice_py': | |
|
159 | continue | |
|
160 | q_tasks.extend([(idx_path, repo.name, repo.contact) for idx_path in index_paths(repo.path)]) | |
|
161 | if cnt == 4: | |
|
162 | break | |
|
163 | ||
|
164 | sys.stdout.write('done\n') | |
|
165 | ||
|
166 | mpi = MultiProcessIndexer(idx, q_tasks) | |
|
167 | ||
|
168 | ||
|
169 | else: | |
|
170 | print 'checking index' | |
|
171 | reader = idx.reader() | |
|
172 | all = reader.all_stored_fields() | |
|
173 | #print all | |
|
174 | for fields in all: | |
|
175 | print fields['path'] | |
|
176 |
@@ -0,0 +1,127 b'' | |||
|
1 | import os, time | |
|
2 | import sys | |
|
3 | from warnings import warn | |
|
4 | ||
|
5 | class LockHeld(Exception):pass | |
|
6 | ||
|
7 | ||
|
8 | class DaemonLock(object): | |
|
9 | '''daemon locking | |
|
10 | USAGE: | |
|
11 | try: | |
|
12 | l = lock() | |
|
13 | main() | |
|
14 | l.release() | |
|
15 | except LockHeld: | |
|
16 | sys.exit(1) | |
|
17 | ''' | |
|
18 | ||
|
19 | def __init__(self, file=None, callbackfn=None, | |
|
20 | desc='daemon lock', debug=False): | |
|
21 | ||
|
22 | self.pidfile = file if file else os.path.join(os.path.dirname(__file__), | |
|
23 | 'running.lock') | |
|
24 | self.callbackfn = callbackfn | |
|
25 | self.desc = desc | |
|
26 | self.debug = debug | |
|
27 | self.held = False | |
|
28 | #run the lock automatically ! | |
|
29 | self.lock() | |
|
30 | ||
|
31 | def __del__(self): | |
|
32 | if self.held: | |
|
33 | ||
|
34 | # warn("use lock.release instead of del lock", | |
|
35 | # category = DeprecationWarning, | |
|
36 | # stacklevel = 2) | |
|
37 | ||
|
38 | # ensure the lock will be removed | |
|
39 | self.release() | |
|
40 | ||
|
41 | ||
|
42 | def lock(self): | |
|
43 | ''' | |
|
44 | locking function, if lock is present it will raise LockHeld exception | |
|
45 | ''' | |
|
46 | lockname = '%s' % (os.getpid()) | |
|
47 | ||
|
48 | self.trylock() | |
|
49 | self.makelock(lockname, self.pidfile) | |
|
50 | return True | |
|
51 | ||
|
52 | def trylock(self): | |
|
53 | running_pid = False | |
|
54 | try: | |
|
55 | pidfile = open(self.pidfile, "r") | |
|
56 | pidfile.seek(0) | |
|
57 | running_pid = pidfile.readline() | |
|
58 | if self.debug: | |
|
59 | print 'lock file present running_pid: %s, checking for execution'\ | |
|
60 | % running_pid | |
|
61 | # Now we check the PID from lock file matches to the current | |
|
62 | # process PID | |
|
63 | if running_pid: | |
|
64 | if os.path.exists("/proc/%s" % running_pid): | |
|
65 | print "You already have an instance of the program running" | |
|
66 | print "It is running as process %s" % running_pid | |
|
67 | raise LockHeld | |
|
68 | else: | |
|
69 | print "Lock File is there but the program is not running" | |
|
70 | print "Removing lock file for the: %s" % running_pid | |
|
71 | self.release() | |
|
72 | except IOError, e: | |
|
73 | if e.errno != 2: | |
|
74 | raise | |
|
75 | ||
|
76 | ||
|
77 | def release(self): | |
|
78 | ''' | |
|
79 | releases the pid by removing the pidfile | |
|
80 | ''' | |
|
81 | if self.callbackfn: | |
|
82 | #execute callback function on release | |
|
83 | if self.debug: | |
|
84 | print 'executing callback function %s' % self.callbackfn | |
|
85 | self.callbackfn() | |
|
86 | try: | |
|
87 | if self.debug: | |
|
88 | print 'removing pidfile %s' % self.pidfile | |
|
89 | os.remove(self.pidfile) | |
|
90 | self.held = False | |
|
91 | except OSError, e: | |
|
92 | if self.debug: | |
|
93 | print 'removing pidfile failed %s' % e | |
|
94 | pass | |
|
95 | ||
|
96 | def makelock(self, lockname, pidfile): | |
|
97 | ''' | |
|
98 | this function will make an actual lock | |
|
99 | @param lockname: acctual pid of file | |
|
100 | @param pidfile: the file to write the pid in | |
|
101 | ''' | |
|
102 | if self.debug: | |
|
103 | print 'creating a file %s and pid: %s' % (pidfile, lockname) | |
|
104 | pidfile = open(self.pidfile, "wb") | |
|
105 | pidfile.write(lockname) | |
|
106 | pidfile.close | |
|
107 | self.held = True | |
|
108 | ||
|
109 | ||
|
110 | def main(): | |
|
111 | print 'func is running' | |
|
112 | cnt = 20 | |
|
113 | while 1: | |
|
114 | print cnt | |
|
115 | if cnt == 0: | |
|
116 | break | |
|
117 | time.sleep(1) | |
|
118 | cnt -= 1 | |
|
119 | ||
|
120 | ||
|
121 | if __name__ == "__main__": | |
|
122 | try: | |
|
123 | l = DaemonLock(desc='test lock') | |
|
124 | main() | |
|
125 | l.release() | |
|
126 | except LockHeld: | |
|
127 | sys.exit(1) |
@@ -0,0 +1,69 b'' | |||
|
1 | ## -*- coding: utf-8 -*- | |
|
2 | <%inherit file="/base/base.html"/> | |
|
3 | <%def name="title()"> | |
|
4 | ${_('Search')}: ${c.cur_query} | |
|
5 | </%def> | |
|
6 | <%def name="breadcrumbs()"> | |
|
7 | ${c.hg_app_name} | |
|
8 | </%def> | |
|
9 | <%def name="page_nav()"> | |
|
10 | ${self.menu('home')} | |
|
11 | </%def> | |
|
12 | <%def name="main()"> | |
|
13 | ||
|
14 | <div class="box"> | |
|
15 | <!-- box / title --> | |
|
16 | <div class="title"> | |
|
17 | <h5>${_('Search')}</h5> | |
|
18 | </div> | |
|
19 | <!-- end box / title --> | |
|
20 | ${h.form('search',method='get')} | |
|
21 | <div class="form"> | |
|
22 | <div class="fields"> | |
|
23 | ||
|
24 | <div class="field "> | |
|
25 | <div class="label"> | |
|
26 | <label for="q">${_('Search:')}</label> | |
|
27 | </div> | |
|
28 | <div class="input"> | |
|
29 | ${h.text('q',c.cur_query,class_="small")} | |
|
30 | <div class="button highlight"> | |
|
31 | <input type="submit" value="${_('Search')}" class="ui-button ui-widget ui-state-default ui-corner-all"/> | |
|
32 | </div> | |
|
33 | <div style="font-weight: bold;clear:both;padding: 5px">${c.runtime}</div> | |
|
34 | </div> | |
|
35 | </div> | |
|
36 | </div> | |
|
37 | </div> | |
|
38 | ${h.end_form()} | |
|
39 | ||
|
40 | %for cnt,sr in enumerate(c.formated_results): | |
|
41 | %if h.HasRepoPermissionAny('repository.write','repository.read','repository.admin')(sr['repository'],'search results check'): | |
|
42 | <div class="table"> | |
|
43 | <div id="body${cnt}" class="codeblock"> | |
|
44 | <div class="code-header"> | |
|
45 | <div class="revision">${h.link_to(h.literal('%s » %s' % (sr['repository'],sr['f_path'])), | |
|
46 | h.url('files_home',repo_name=sr['repository'],revision='tip',f_path=sr['f_path']))}</div> | |
|
47 | </div> | |
|
48 | <div class="code-body"> | |
|
49 | <pre>${h.literal(sr['content_short'])}</pre> | |
|
50 | </div> | |
|
51 | </div> | |
|
52 | </div> | |
|
53 | %else: | |
|
54 | %if cnt == 0: | |
|
55 | <div class="table"> | |
|
56 | <div id="body${cnt}" class="codeblock"> | |
|
57 | <div class="error">${_('Permission denied')}</div> | |
|
58 | </div> | |
|
59 | </div> | |
|
60 | %endif | |
|
61 | ||
|
62 | %endif | |
|
63 | %endfor | |
|
64 | ||
|
65 | ||
|
66 | ||
|
67 | </div> | |
|
68 | ||
|
69 | </%def> |
@@ -0,0 +1,7 b'' | |||
|
1 | from pylons_app.tests import * | |
|
2 | ||
|
3 | class TestSearchController(TestController): | |
|
4 | ||
|
5 | def test_index(self): | |
|
6 | response = self.app.get(url(controller='search', action='index')) | |
|
7 | # Test response... |
@@ -1,163 +1,165 b'' | |||
|
1 | 1 | """Routes configuration |
|
2 | 2 | |
|
3 | 3 | The more specific and detailed routes should be defined first so they |
|
4 | 4 | may take precedent over the more generic routes. For more information |
|
5 | 5 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
6 | 6 | """ |
|
7 | 7 | from __future__ import with_statement |
|
8 | 8 | from routes import Mapper |
|
9 | 9 | from pylons_app.lib.utils import check_repo_fast as cr |
|
10 | 10 | |
|
11 | 11 | def make_map(config): |
|
12 | 12 | """Create, configure and return the routes Mapper""" |
|
13 | 13 | map = Mapper(directory=config['pylons.paths']['controllers'], |
|
14 | 14 | always_scan=config['debug']) |
|
15 | 15 | map.minimization = False |
|
16 | 16 | map.explicit = False |
|
17 | 17 | |
|
18 | 18 | # The ErrorController route (handles 404/500 error pages); it should |
|
19 | 19 | # likely stay at the top, ensuring it can always be resolved |
|
20 | 20 | map.connect('/error/{action}', controller='error') |
|
21 | 21 | map.connect('/error/{action}/{id}', controller='error') |
|
22 | 22 | |
|
23 | 23 | # CUSTOM ROUTES HERE |
|
24 | 24 | map.connect('hg_home', '/', controller='hg', action='index') |
|
25 | 25 | |
|
26 | 26 | def check_repo(environ, match_dict): |
|
27 | 27 | """ |
|
28 | 28 | check for valid repository for proper 404 handling |
|
29 | 29 | @param environ: |
|
30 | 30 | @param match_dict: |
|
31 | 31 | """ |
|
32 | 32 | repo_name = match_dict.get('repo_name') |
|
33 | 33 | return not cr(repo_name, config['base_path']) |
|
34 | 34 | |
|
35 | 35 | #REST REPO MAP |
|
36 | 36 | with map.submapper(path_prefix='/_admin', controller='admin/repos') as m: |
|
37 | 37 | m.connect("repos", "/repos", |
|
38 | 38 | action="create", conditions=dict(method=["POST"])) |
|
39 | 39 | m.connect("repos", "/repos", |
|
40 | 40 | action="index", conditions=dict(method=["GET"])) |
|
41 | 41 | m.connect("formatted_repos", "/repos.{format}", |
|
42 | 42 | action="index", |
|
43 | 43 | conditions=dict(method=["GET"])) |
|
44 | 44 | m.connect("new_repo", "/repos/new", |
|
45 | 45 | action="new", conditions=dict(method=["GET"])) |
|
46 | 46 | m.connect("formatted_new_repo", "/repos/new.{format}", |
|
47 | 47 | action="new", conditions=dict(method=["GET"])) |
|
48 | 48 | m.connect("/repos/{repo_name:.*}", |
|
49 | 49 | action="update", conditions=dict(method=["PUT"], |
|
50 | 50 | function=check_repo)) |
|
51 | 51 | m.connect("/repos/{repo_name:.*}", |
|
52 | 52 | action="delete", conditions=dict(method=["DELETE"], |
|
53 | 53 | function=check_repo)) |
|
54 | 54 | m.connect("edit_repo", "/repos/{repo_name:.*}/edit", |
|
55 | 55 | action="edit", conditions=dict(method=["GET"], |
|
56 | 56 | function=check_repo)) |
|
57 | 57 | m.connect("formatted_edit_repo", "/repos/{repo_name:.*}.{format}/edit", |
|
58 | 58 | action="edit", conditions=dict(method=["GET"], |
|
59 | 59 | function=check_repo)) |
|
60 | 60 | m.connect("repo", "/repos/{repo_name:.*}", |
|
61 | 61 | action="show", conditions=dict(method=["GET"], |
|
62 | 62 | function=check_repo)) |
|
63 | 63 | m.connect("formatted_repo", "/repos/{repo_name:.*}.{format}", |
|
64 | 64 | action="show", conditions=dict(method=["GET"], |
|
65 | 65 | function=check_repo)) |
|
66 | 66 | #ajax delete repo perm user |
|
67 | 67 | m.connect('delete_repo_user', "/repos_delete_user/{repo_name:.*}", |
|
68 | 68 | action="delete_perm_user", conditions=dict(method=["DELETE"], |
|
69 | 69 | function=check_repo)) |
|
70 | 70 | |
|
71 | 71 | map.resource('user', 'users', controller='admin/users', path_prefix='/_admin') |
|
72 | 72 | map.resource('permission', 'permissions', controller='admin/permissions', path_prefix='/_admin') |
|
73 | 73 | |
|
74 | 74 | #map.resource('setting', 'settings', controller='admin/settings', path_prefix='/_admin', name_prefix='admin_') |
|
75 | 75 | #REST SETTINGS MAP |
|
76 | 76 | with map.submapper(path_prefix='/_admin', controller='admin/settings') as m: |
|
77 | 77 | m.connect("admin_settings", "/settings", |
|
78 | 78 | action="create", conditions=dict(method=["POST"])) |
|
79 | 79 | m.connect("admin_settings", "/settings", |
|
80 | 80 | action="index", conditions=dict(method=["GET"])) |
|
81 | 81 | m.connect("admin_formatted_settings", "/settings.{format}", |
|
82 | 82 | action="index", conditions=dict(method=["GET"])) |
|
83 | 83 | m.connect("admin_new_setting", "/settings/new", |
|
84 | 84 | action="new", conditions=dict(method=["GET"])) |
|
85 | 85 | m.connect("admin_formatted_new_setting", "/settings/new.{format}", |
|
86 | 86 | action="new", conditions=dict(method=["GET"])) |
|
87 | 87 | m.connect("/settings/{setting_id}", |
|
88 | 88 | action="update", conditions=dict(method=["PUT"])) |
|
89 | 89 | m.connect("/settings/{setting_id}", |
|
90 | 90 | action="delete", conditions=dict(method=["DELETE"])) |
|
91 | 91 | m.connect("admin_edit_setting", "/settings/{setting_id}/edit", |
|
92 | 92 | action="edit", conditions=dict(method=["GET"])) |
|
93 | 93 | m.connect("admin_formatted_edit_setting", "/settings/{setting_id}.{format}/edit", |
|
94 | 94 | action="edit", conditions=dict(method=["GET"])) |
|
95 | 95 | m.connect("admin_setting", "/settings/{setting_id}", |
|
96 | 96 | action="show", conditions=dict(method=["GET"])) |
|
97 | 97 | m.connect("admin_formatted_setting", "/settings/{setting_id}.{format}", |
|
98 | 98 | action="show", conditions=dict(method=["GET"])) |
|
99 | 99 | m.connect("admin_settings_my_account", "/my_account", |
|
100 | 100 | action="my_account", conditions=dict(method=["GET"])) |
|
101 | 101 | m.connect("admin_settings_my_account_update", "/my_account_update", |
|
102 | 102 | action="my_account_update", conditions=dict(method=["PUT"])) |
|
103 | 103 | m.connect("admin_settings_create_repository", "/create_repository", |
|
104 | 104 | action="create_repository", conditions=dict(method=["GET"])) |
|
105 | 105 | |
|
106 | 106 | #ADMIN |
|
107 | 107 | with map.submapper(path_prefix='/_admin', controller='admin/admin') as m: |
|
108 | 108 | m.connect('admin_home', '', action='index')#main page |
|
109 | 109 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', |
|
110 | 110 | action='add_repo') |
|
111 | #SEARCH | |
|
112 | map.connect('search', '/_admin/search', controller='search') | |
|
111 | 113 | |
|
112 | 114 | #LOGIN/LOGOUT |
|
113 | 115 | map.connect('login_home', '/_admin/login', controller='login') |
|
114 | 116 | map.connect('logout_home', '/_admin/logout', controller='login', action='logout') |
|
115 | 117 | map.connect('register', '/_admin/register', controller='login', action='register') |
|
116 | 118 | |
|
117 | 119 | #FEEDS |
|
118 | 120 | map.connect('rss_feed_home', '/{repo_name:.*}/feed/rss', |
|
119 | 121 | controller='feed', action='rss', |
|
120 | 122 | conditions=dict(function=check_repo)) |
|
121 | 123 | map.connect('atom_feed_home', '/{repo_name:.*}/feed/atom', |
|
122 | 124 | controller='feed', action='atom', |
|
123 | 125 | conditions=dict(function=check_repo)) |
|
124 | 126 | |
|
125 | 127 | |
|
126 | 128 | #OTHERS |
|
127 | 129 | map.connect('changeset_home', '/{repo_name:.*}/changeset/{revision}', |
|
128 | 130 | controller='changeset', revision='tip', |
|
129 | 131 | conditions=dict(function=check_repo)) |
|
130 | 132 | map.connect('summary_home', '/{repo_name:.*}/summary', |
|
131 | 133 | controller='summary', conditions=dict(function=check_repo)) |
|
132 | 134 | map.connect('shortlog_home', '/{repo_name:.*}/shortlog', |
|
133 | 135 | controller='shortlog', conditions=dict(function=check_repo)) |
|
134 | 136 | map.connect('branches_home', '/{repo_name:.*}/branches', |
|
135 | 137 | controller='branches', conditions=dict(function=check_repo)) |
|
136 | 138 | map.connect('tags_home', '/{repo_name:.*}/tags', |
|
137 | 139 | controller='tags', conditions=dict(function=check_repo)) |
|
138 | 140 | map.connect('changelog_home', '/{repo_name:.*}/changelog', |
|
139 | 141 | controller='changelog', conditions=dict(function=check_repo)) |
|
140 | 142 | map.connect('files_home', '/{repo_name:.*}/files/{revision}/{f_path:.*}', |
|
141 | 143 | controller='files', revision='tip', f_path='', |
|
142 | 144 | conditions=dict(function=check_repo)) |
|
143 | 145 | map.connect('files_diff_home', '/{repo_name:.*}/diff/{f_path:.*}', |
|
144 | 146 | controller='files', action='diff', revision='tip', f_path='', |
|
145 | 147 | conditions=dict(function=check_repo)) |
|
146 | 148 | map.connect('files_raw_home', '/{repo_name:.*}/rawfile/{revision}/{f_path:.*}', |
|
147 | 149 | controller='files', action='rawfile', revision='tip', f_path='', |
|
148 | 150 | conditions=dict(function=check_repo)) |
|
149 | 151 | map.connect('files_annotate_home', '/{repo_name:.*}/annotate/{revision}/{f_path:.*}', |
|
150 | 152 | controller='files', action='annotate', revision='tip', f_path='', |
|
151 | 153 | conditions=dict(function=check_repo)) |
|
152 | 154 | map.connect('files_archive_home', '/{repo_name:.*}/archive/{revision}/{fileformat}', |
|
153 | 155 | controller='files', action='archivefile', revision='tip', |
|
154 | 156 | conditions=dict(function=check_repo)) |
|
155 | 157 | map.connect('repo_settings_update', '/{repo_name:.*}/settings', |
|
156 | 158 | controller='settings', action="update", |
|
157 | 159 | conditions=dict(method=["PUT"], function=check_repo)) |
|
158 | 160 | map.connect('repo_settings_home', '/{repo_name:.*}/settings', |
|
159 | 161 | controller='settings', action='index', |
|
160 | 162 | conditions=dict(function=check_repo)) |
|
161 | 163 | |
|
162 | 164 | |
|
163 | 165 | return map |
@@ -1,121 +1,121 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # encoding: utf-8 |
|
3 | 3 | # summary controller for pylons |
|
4 | 4 | # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> |
|
5 | 5 | # |
|
6 | 6 | # This program is free software; you can redistribute it and/or |
|
7 | 7 | # modify it under the terms of the GNU General Public License |
|
8 | 8 | # as published by the Free Software Foundation; version 2 |
|
9 | 9 | # of the License or (at your opinion) any later version of the license. |
|
10 | 10 | # |
|
11 | 11 | # This program is distributed in the hope that it will be useful, |
|
12 | 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
13 | 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
14 | 14 | # GNU General Public License for more details. |
|
15 | 15 | # |
|
16 | 16 | # You should have received a copy of the GNU General Public License |
|
17 | 17 | # along with this program; if not, write to the Free Software |
|
18 | 18 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, |
|
19 | 19 | # MA 02110-1301, USA. |
|
20 | 20 | """ |
|
21 | 21 | Created on April 18, 2010 |
|
22 | 22 | summary controller for pylons |
|
23 | 23 | @author: marcink |
|
24 | 24 | """ |
|
25 | 25 | from datetime import datetime, timedelta |
|
26 | 26 | from pylons import tmpl_context as c, request |
|
27 | 27 | from pylons_app.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
28 | 28 | from pylons_app.lib.base import BaseController, render |
|
29 | 29 | from pylons_app.lib.helpers import person |
|
30 | 30 | from pylons_app.lib.utils import OrderedDict |
|
31 | 31 | from pylons_app.model.hg_model import HgModel |
|
32 | 32 | from time import mktime |
|
33 | 33 | from webhelpers.paginate import Page |
|
34 | 34 | import calendar |
|
35 | 35 | import logging |
|
36 | 36 | |
|
37 | 37 | log = logging.getLogger(__name__) |
|
38 | 38 | |
|
39 | 39 | class SummaryController(BaseController): |
|
40 | 40 | |
|
41 | 41 | @LoginRequired() |
|
42 | 42 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
43 | 43 | 'repository.admin') |
|
44 | 44 | def __before__(self): |
|
45 | 45 | super(SummaryController, self).__before__() |
|
46 | 46 | |
|
47 | 47 | def index(self): |
|
48 | 48 | hg_model = HgModel() |
|
49 | 49 | c.repo_info = hg_model.get_repo(c.repo_name) |
|
50 | 50 | c.repo_changesets = Page(list(c.repo_info[:10]), page=1, items_per_page=20) |
|
51 | 51 | e = request.environ |
|
52 | 52 | uri = u'%(protocol)s://%(user)s@%(host)s/%(repo_name)s' % { |
|
53 | 53 | 'protocol': e.get('wsgi.url_scheme'), |
|
54 | 54 | 'user':str(c.hg_app_user.username), |
|
55 | 55 | 'host':e.get('HTTP_HOST'), |
|
56 | 56 | 'repo_name':c.repo_name, } |
|
57 | 57 | c.clone_repo_url = uri |
|
58 | 58 | c.repo_tags = OrderedDict() |
|
59 | 59 | for name, hash in c.repo_info.tags.items()[:10]: |
|
60 | 60 | c.repo_tags[name] = c.repo_info.get_changeset(hash) |
|
61 | 61 | |
|
62 | 62 | c.repo_branches = OrderedDict() |
|
63 | 63 | for name, hash in c.repo_info.branches.items()[:10]: |
|
64 | 64 | c.repo_branches[name] = c.repo_info.get_changeset(hash) |
|
65 | 65 | |
|
66 | 66 | c.commit_data = self.__get_commit_stats(c.repo_info) |
|
67 | 67 | |
|
68 | 68 | return render('summary/summary.html') |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | |
|
72 | 72 | def __get_commit_stats(self, repo): |
|
73 | 73 | aggregate = OrderedDict() |
|
74 | 74 | |
|
75 | 75 | #graph range |
|
76 | 76 | td = datetime.today() + timedelta(days=1) |
|
77 | 77 | y = td.year |
|
78 | 78 | m = td.month |
|
79 | 79 | d = td.day |
|
80 | 80 | c.ts_min = mktime((y, (td - timedelta(days=calendar.mdays[m] - 1)).month, d, 0, 0, 0, 0, 0, 0,)) |
|
81 | 81 | c.ts_max = mktime((y, m, d, 0, 0, 0, 0, 0, 0,)) |
|
82 | 82 | |
|
83 | 83 | |
|
84 | 84 | def author_key_cleaner(k): |
|
85 | 85 | k = person(k) |
|
86 | 86 | return k |
|
87 | 87 | |
|
88 | 88 | for cs in repo: |
|
89 | 89 | k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1], |
|
90 | 90 | cs.date.timetuple()[2]) |
|
91 | 91 | timetupple = [int(x) for x in k.split('-')] |
|
92 | 92 | timetupple.extend([0 for _ in xrange(6)]) |
|
93 | 93 | k = mktime(timetupple) |
|
94 | 94 | if aggregate.has_key(author_key_cleaner(cs.author)): |
|
95 | 95 | if aggregate[author_key_cleaner(cs.author)].has_key(k): |
|
96 | 96 | aggregate[author_key_cleaner(cs.author)][k] += 1 |
|
97 | 97 | else: |
|
98 | 98 | #aggregate[author_key_cleaner(cs.author)].update(dates_range) |
|
99 | 99 | if k >= c.ts_min and k <= c.ts_max: |
|
100 | 100 | aggregate[author_key_cleaner(cs.author)][k] = 1 |
|
101 | 101 | else: |
|
102 | 102 | if k >= c.ts_min and k <= c.ts_max: |
|
103 | 103 | aggregate[author_key_cleaner(cs.author)] = OrderedDict() |
|
104 | 104 | #aggregate[author_key_cleaner(cs.author)].update(dates_range) |
|
105 | 105 | aggregate[author_key_cleaner(cs.author)][k] = 1 |
|
106 | 106 | |
|
107 | 107 | d = '' |
|
108 | 108 | tmpl0 = u""""%s":%s""" |
|
109 | 109 | tmpl1 = u"""{label:"%s",data:%s},""" |
|
110 | 110 | for author in aggregate: |
|
111 | 111 | d += tmpl0 % (author.decode('utf8'), |
|
112 | 112 | tmpl1 \ |
|
113 | 113 | % (author.decode('utf8'), |
|
114 | 114 | [[x, aggregate[author][x]] for x in aggregate[author]])) |
|
115 | 115 | if d == '': |
|
116 |
d = '"%s":{label:"%s",data:[[0, |
|
|
116 | d = '"%s":{label:"%s",data:[[0,1],]}' \ | |
|
117 | 117 | % (author_key_cleaner(repo.contact), |
|
118 | 118 | author_key_cleaner(repo.contact)) |
|
119 | 119 | return d |
|
120 | 120 | |
|
121 | 121 |
@@ -1,45 +1,46 b'' | |||
|
1 | 1 | from pylons_app import get_version |
|
2 | 2 | try: |
|
3 | 3 | from setuptools import setup, find_packages |
|
4 | 4 | except ImportError: |
|
5 | 5 | from ez_setup import use_setuptools |
|
6 | 6 | use_setuptools() |
|
7 | 7 | from setuptools import setup, find_packages |
|
8 | 8 | |
|
9 | 9 | setup( |
|
10 |
name=' |
|
|
10 | name='hg_app', | |
|
11 | 11 | version=get_version(), |
|
12 | 12 | description='Mercurial repository serving and browsing app', |
|
13 | 13 | keywords='mercurial web hgwebdir replacement serving hgweb', |
|
14 | 14 | license='BSD', |
|
15 | 15 | author='marcin kuzminski', |
|
16 | 16 | author_email='marcin@python-works.com', |
|
17 | 17 | url='http://hg.python-works.com', |
|
18 | 18 | install_requires=[ |
|
19 | 19 | "Pylons>=1.0.0", |
|
20 | 20 | "SQLAlchemy>=0.6", |
|
21 | 21 | "Mako>=0.3.2", |
|
22 | 22 | "vcs>=0.1.4", |
|
23 | 23 | "pygments>=1.3.0", |
|
24 | 24 | "mercurial>=1.6", |
|
25 | "pysqlite" | |
|
25 | "pysqlite", | |
|
26 | "whoosh==1.0.0b5", | |
|
26 | 27 | ], |
|
27 | 28 | setup_requires=["PasteScript>=1.6.3"], |
|
28 | 29 | packages=find_packages(exclude=['ez_setup']), |
|
29 | 30 | include_package_data=True, |
|
30 | 31 | test_suite='nose.collector', |
|
31 | 32 | package_data={'pylons_app': ['i18n/*/LC_MESSAGES/*.mo']}, |
|
32 | 33 | message_extractors={'pylons_app': [ |
|
33 | 34 | ('**.py', 'python', None), |
|
34 | 35 | ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}), |
|
35 | 36 | ('public/**', 'ignore', None)]}, |
|
36 | 37 | zip_safe=False, |
|
37 | 38 | paster_plugins=['PasteScript', 'Pylons'], |
|
38 | 39 | entry_points=""" |
|
39 | 40 | [paste.app_factory] |
|
40 | 41 | main = pylons_app.config.middleware:make_app |
|
41 | 42 | |
|
42 | 43 | [paste.app_install] |
|
43 | 44 | main = pylons.util:PylonsInstaller |
|
44 | 45 | """, |
|
45 | 46 | ) |
General Comments 0
You need to be logged in to leave comments.
Login now