##// END OF EJS Templates
Implemented locking for task, to prevent for running the same tasks,...
marcink -
r497:fb0c3af6 celery
parent child Browse files
Show More
@@ -1,29 +1,66 b''
1 from pylons_app.lib.pidlock import DaemonLock, LockHeld
1 from vcs.utils.lazy import LazyProperty
2 from vcs.utils.lazy import LazyProperty
3 from decorator import decorator
2 import logging
4 import logging
3 import os
5 import os
4 import sys
6 import sys
5 import traceback
7 import traceback
6
8 from hashlib import md5
7 log = logging.getLogger(__name__)
9 log = logging.getLogger(__name__)
8
10
9 class ResultWrapper(object):
11 class ResultWrapper(object):
10 def __init__(self, task):
12 def __init__(self, task):
11 self.task = task
13 self.task = task
12
14
13 @LazyProperty
15 @LazyProperty
14 def result(self):
16 def result(self):
15 return self.task
17 return self.task
16
18
17 def run_task(task, *args, **kwargs):
19 def run_task(task, *args, **kwargs):
18 try:
20 try:
19 t = task.delay(*args, **kwargs)
21 t = task.delay(*args, **kwargs)
20 log.info('running task %s', t.task_id)
22 log.info('running task %s', t.task_id)
21 return t
23 return t
22 except Exception, e:
24 except Exception, e:
25 print e
23 if e.errno == 111:
26 if e.errno == 111:
24 log.debug('Unnable to connect. Sync execution')
27 log.debug('Unnable to connect. Sync execution')
25 else:
28 else:
26 log.error(traceback.format_exc())
29 log.error(traceback.format_exc())
27 #pure sync version
30 #pure sync version
28 return ResultWrapper(task(*args, **kwargs))
31 return ResultWrapper(task(*args, **kwargs))
29
32
33
34 class LockTask(object):
35 """LockTask decorator"""
36
37 def __init__(self, func):
38 self.func = func
39
40 def __call__(self, func):
41 return decorator(self.__wrapper, func)
42
43 def __wrapper(self, func, *fargs, **fkwargs):
44 params = []
45 params.extend(fargs)
46 params.extend(fkwargs.values())
47 lockkey = 'task_%s' % \
48 md5(str(self.func) + '-' + '-'.join(map(str, params))).hexdigest()
49 log.info('running task with lockkey %s', lockkey)
50 try:
51 l = DaemonLock(lockkey)
52 return func(*fargs, **fkwargs)
53 l.release()
54 except LockHeld:
55 log.info('LockHeld')
56 return 'Task with key %s already running' % lockkey
57
58
59
60
61
62
63
64
65
66
@@ -1,266 +1,270 b''
1 from celery.decorators import task
1 from celery.decorators import task
2 from celery.task.sets import subtask
2 from celery.task.sets import subtask
3 from celeryconfig import PYLONS_CONFIG as config
3 from celeryconfig import PYLONS_CONFIG as config
4 from pylons.i18n.translation import _
4 from pylons.i18n.translation import _
5 from pylons_app.lib.celerylib import run_task
5 from pylons_app.lib.celerylib import run_task, LockTask
6 from pylons_app.lib.helpers import person
6 from pylons_app.lib.helpers import person
7 from pylons_app.lib.smtp_mailer import SmtpMailer
7 from pylons_app.lib.smtp_mailer import SmtpMailer
8 from pylons_app.lib.utils import OrderedDict
8 from pylons_app.lib.utils import OrderedDict
9 from operator import itemgetter
9 from operator import itemgetter
10 from vcs.backends.hg import MercurialRepository
10 from vcs.backends.hg import MercurialRepository
11 from time import mktime
11 from time import mktime
12 import traceback
12 import traceback
13 import json
13 import json
14
14
15 __all__ = ['whoosh_index', 'get_commits_stats',
15 __all__ = ['whoosh_index', 'get_commits_stats',
16 'reset_user_password', 'send_email']
16 'reset_user_password', 'send_email']
17
17
18 def get_session():
18 def get_session():
19 from sqlalchemy import engine_from_config
19 from sqlalchemy import engine_from_config
20 from sqlalchemy.orm import sessionmaker, scoped_session
20 from sqlalchemy.orm import sessionmaker, scoped_session
21 engine = engine_from_config(dict(config.items('app:main')), 'sqlalchemy.db1.')
21 engine = engine_from_config(dict(config.items('app:main')), 'sqlalchemy.db1.')
22 sa = scoped_session(sessionmaker(bind=engine))
22 sa = scoped_session(sessionmaker(bind=engine))
23 return sa
23 return sa
24
24
25 def get_hg_settings():
25 def get_hg_settings():
26 from pylons_app.model.db import HgAppSettings
26 from pylons_app.model.db import HgAppSettings
27 try:
27 try:
28 sa = get_session()
28 sa = get_session()
29 ret = sa.query(HgAppSettings).all()
29 ret = sa.query(HgAppSettings).all()
30 finally:
30 finally:
31 sa.remove()
31 sa.remove()
32
32
33 if not ret:
33 if not ret:
34 raise Exception('Could not get application settings !')
34 raise Exception('Could not get application settings !')
35 settings = {}
35 settings = {}
36 for each in ret:
36 for each in ret:
37 settings['hg_app_' + each.app_settings_name] = each.app_settings_value
37 settings['hg_app_' + each.app_settings_name] = each.app_settings_value
38
38
39 return settings
39 return settings
40
40
41 def get_hg_ui_settings():
41 def get_hg_ui_settings():
42 from pylons_app.model.db import HgAppUi
42 from pylons_app.model.db import HgAppUi
43 try:
43 try:
44 sa = get_session()
44 sa = get_session()
45 ret = sa.query(HgAppUi).all()
45 ret = sa.query(HgAppUi).all()
46 finally:
46 finally:
47 sa.remove()
47 sa.remove()
48
48
49 if not ret:
49 if not ret:
50 raise Exception('Could not get application ui settings !')
50 raise Exception('Could not get application ui settings !')
51 settings = {}
51 settings = {}
52 for each in ret:
52 for each in ret:
53 k = each.ui_key
53 k = each.ui_key
54 v = each.ui_value
54 v = each.ui_value
55 if k == '/':
55 if k == '/':
56 k = 'root_path'
56 k = 'root_path'
57
57
58 if k.find('.') != -1:
58 if k.find('.') != -1:
59 k = k.replace('.', '_')
59 k = k.replace('.', '_')
60
60
61 if each.ui_section == 'hooks':
61 if each.ui_section == 'hooks':
62 v = each.ui_active
62 v = each.ui_active
63
63
64 settings[each.ui_section + '_' + k] = v
64 settings[each.ui_section + '_' + k] = v
65
65
66 return settings
66 return settings
67
67
68 @task
68 @task
69 def whoosh_index(repo_location, full_index):
69 def whoosh_index(repo_location, full_index):
70 log = whoosh_index.get_logger()
70 log = whoosh_index.get_logger()
71 from pylons_app.lib.indexers import DaemonLock
71 from pylons_app.lib.pidlock import DaemonLock
72 from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon, LockHeld
72 from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon, LockHeld
73 try:
73 try:
74 l = DaemonLock()
74 l = DaemonLock()
75 WhooshIndexingDaemon(repo_location=repo_location)\
75 WhooshIndexingDaemon(repo_location=repo_location)\
76 .run(full_index=full_index)
76 .run(full_index=full_index)
77 l.release()
77 l.release()
78 return 'Done'
78 return 'Done'
79 except LockHeld:
79 except LockHeld:
80 log.info('LockHeld')
80 log.info('LockHeld')
81 return 'LockHeld'
81 return 'LockHeld'
82
82
83
83 @task
84 @task
85 @LockTask('get_commits_stats')
84 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
86 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
85 author_key_cleaner = lambda k: person(k).replace('"', "") #for js data compatibilty
87 author_key_cleaner = lambda k: person(k).replace('"', "") #for js data compatibilty
86
88
87 from pylons_app.model.db import Statistics, Repository
89 from pylons_app.model.db import Statistics, Repository
88 log = get_commits_stats.get_logger()
90 log = get_commits_stats.get_logger()
89 commits_by_day_author_aggregate = {}
91 commits_by_day_author_aggregate = {}
90 commits_by_day_aggregate = {}
92 commits_by_day_aggregate = {}
91 repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
93 repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
92 repo = MercurialRepository(repos_path + repo_name)
94 repo = MercurialRepository(repos_path + repo_name)
93
95
94 skip_date_limit = True
96 skip_date_limit = True
95 parse_limit = 500 #limit for single task changeset parsing
97 parse_limit = 350 #limit for single task changeset parsing
96 last_rev = 0
98 last_rev = 0
97 last_cs = None
99 last_cs = None
98 timegetter = itemgetter('time')
100 timegetter = itemgetter('time')
99
101
100 sa = get_session()
102 sa = get_session()
101
103
102 dbrepo = sa.query(Repository)\
104 dbrepo = sa.query(Repository)\
103 .filter(Repository.repo_name == repo_name).scalar()
105 .filter(Repository.repo_name == repo_name).scalar()
104 cur_stats = sa.query(Statistics)\
106 cur_stats = sa.query(Statistics)\
105 .filter(Statistics.repository == dbrepo).scalar()
107 .filter(Statistics.repository == dbrepo).scalar()
106 if cur_stats:
108 if cur_stats:
107 last_rev = cur_stats.stat_on_revision
109 last_rev = cur_stats.stat_on_revision
108
110
109 if last_rev == repo.revisions[-1]:
111 if last_rev == repo.revisions[-1]:
110 #pass silently without any work
112 #pass silently without any work
111 return True
113 return True
112
114
113 if cur_stats:
115 if cur_stats:
114 commits_by_day_aggregate = OrderedDict(
116 commits_by_day_aggregate = OrderedDict(
115 json.loads(
117 json.loads(
116 cur_stats.commit_activity_combined))
118 cur_stats.commit_activity_combined))
117 commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
119 commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
118
120
119 for cnt, rev in enumerate(repo.revisions[last_rev:]):
121 for cnt, rev in enumerate(repo.revisions[last_rev:]):
120 last_cs = cs = repo.get_changeset(rev)
122 last_cs = cs = repo.get_changeset(rev)
121 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
123 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
122 cs.date.timetuple()[2])
124 cs.date.timetuple()[2])
123 timetupple = [int(x) for x in k.split('-')]
125 timetupple = [int(x) for x in k.split('-')]
124 timetupple.extend([0 for _ in xrange(6)])
126 timetupple.extend([0 for _ in xrange(6)])
125 k = mktime(timetupple)
127 k = mktime(timetupple)
126 if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
128 if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
127 try:
129 try:
128 l = [timegetter(x) for x in commits_by_day_author_aggregate\
130 l = [timegetter(x) for x in commits_by_day_author_aggregate\
129 [author_key_cleaner(cs.author)]['data']]
131 [author_key_cleaner(cs.author)]['data']]
130 time_pos = l.index(k)
132 time_pos = l.index(k)
131 except ValueError:
133 except ValueError:
132 time_pos = False
134 time_pos = False
133
135
134 if time_pos >= 0 and time_pos is not False:
136 if time_pos >= 0 and time_pos is not False:
135
137
136 datadict = commits_by_day_author_aggregate\
138 datadict = commits_by_day_author_aggregate\
137 [author_key_cleaner(cs.author)]['data'][time_pos]
139 [author_key_cleaner(cs.author)]['data'][time_pos]
138
140
139 datadict["commits"] += 1
141 datadict["commits"] += 1
140 datadict["added"] += len(cs.added)
142 datadict["added"] += len(cs.added)
141 datadict["changed"] += len(cs.changed)
143 datadict["changed"] += len(cs.changed)
142 datadict["removed"] += len(cs.removed)
144 datadict["removed"] += len(cs.removed)
143 #print datadict
145 #print datadict
144
146
145 else:
147 else:
146 #print 'ELSE !!!!'
148 #print 'ELSE !!!!'
147 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
149 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
148
150
149 datadict = {"time":k,
151 datadict = {"time":k,
150 "commits":1,
152 "commits":1,
151 "added":len(cs.added),
153 "added":len(cs.added),
152 "changed":len(cs.changed),
154 "changed":len(cs.changed),
153 "removed":len(cs.removed),
155 "removed":len(cs.removed),
154 }
156 }
155 commits_by_day_author_aggregate\
157 commits_by_day_author_aggregate\
156 [author_key_cleaner(cs.author)]['data'].append(datadict)
158 [author_key_cleaner(cs.author)]['data'].append(datadict)
157
159
158 else:
160 else:
159 #print k, 'nokey ADDING'
161 #print k, 'nokey ADDING'
160 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
162 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
161 commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
163 commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
162 "label":author_key_cleaner(cs.author),
164 "label":author_key_cleaner(cs.author),
163 "data":[{"time":k,
165 "data":[{"time":k,
164 "commits":1,
166 "commits":1,
165 "added":len(cs.added),
167 "added":len(cs.added),
166 "changed":len(cs.changed),
168 "changed":len(cs.changed),
167 "removed":len(cs.removed),
169 "removed":len(cs.removed),
168 }],
170 }],
169 "schema":["commits"],
171 "schema":["commits"],
170 }
172 }
171
173
172 # #gather all data by day
174 # #gather all data by day
173 if commits_by_day_aggregate.has_key(k):
175 if commits_by_day_aggregate.has_key(k):
174 commits_by_day_aggregate[k] += 1
176 commits_by_day_aggregate[k] += 1
175 else:
177 else:
176 commits_by_day_aggregate[k] = 1
178 commits_by_day_aggregate[k] = 1
177
179
178 if cnt >= parse_limit:
180 if cnt >= parse_limit:
179 #don't fetch to much data since we can freeze application
181 #don't fetch to much data since we can freeze application
180 break
182 break
181
183
182 overview_data = []
184 overview_data = []
183 for k, v in commits_by_day_aggregate.items():
185 for k, v in commits_by_day_aggregate.items():
184 overview_data.append([k, v])
186 overview_data.append([k, v])
185 overview_data = sorted(overview_data, key=itemgetter(0))
187 overview_data = sorted(overview_data, key=itemgetter(0))
186
188
187 if not commits_by_day_author_aggregate:
189 if not commits_by_day_author_aggregate:
188 commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
190 commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
189 "label":author_key_cleaner(repo.contact),
191 "label":author_key_cleaner(repo.contact),
190 "data":[0, 1],
192 "data":[0, 1],
191 "schema":["commits"],
193 "schema":["commits"],
192 }
194 }
193
195
194 stats = cur_stats if cur_stats else Statistics()
196 stats = cur_stats if cur_stats else Statistics()
195 stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
197 stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
196 stats.commit_activity_combined = json.dumps(overview_data)
198 stats.commit_activity_combined = json.dumps(overview_data)
197 stats.repository = dbrepo
199 stats.repository = dbrepo
198 stats.stat_on_revision = last_cs.revision
200 stats.stat_on_revision = last_cs.revision
199 stats.languages = json.dumps({'_TOTAL_':0, '':0})
201 stats.languages = json.dumps({'_TOTAL_':0, '':0})
200
202
201 try:
203 try:
202 sa.add(stats)
204 sa.add(stats)
203 sa.commit()
205 sa.commit()
204 except:
206 except:
205 log.error(traceback.format_exc())
207 log.error(traceback.format_exc())
206 sa.rollback()
208 sa.rollback()
207 return False
209 return False
208
210
211 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
212
209 return True
213 return True
210
214
211 @task
215 @task
212 def reset_user_password(user_email):
216 def reset_user_password(user_email):
213 log = reset_user_password.get_logger()
217 log = reset_user_password.get_logger()
214 from pylons_app.lib import auth
218 from pylons_app.lib import auth
215 from pylons_app.model.db import User
219 from pylons_app.model.db import User
216
220
217 try:
221 try:
218 try:
222 try:
219 sa = get_session()
223 sa = get_session()
220 user = sa.query(User).filter(User.email == user_email).scalar()
224 user = sa.query(User).filter(User.email == user_email).scalar()
221 new_passwd = auth.PasswordGenerator().gen_password(8,
225 new_passwd = auth.PasswordGenerator().gen_password(8,
222 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
226 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
223 if user:
227 if user:
224 user.password = auth.get_crypt_password(new_passwd)
228 user.password = auth.get_crypt_password(new_passwd)
225 sa.add(user)
229 sa.add(user)
226 sa.commit()
230 sa.commit()
227 log.info('change password for %s', user_email)
231 log.info('change password for %s', user_email)
228 if new_passwd is None:
232 if new_passwd is None:
229 raise Exception('unable to generate new password')
233 raise Exception('unable to generate new password')
230
234
231 except:
235 except:
232 log.error(traceback.format_exc())
236 log.error(traceback.format_exc())
233 sa.rollback()
237 sa.rollback()
234
238
235 run_task(send_email, user_email,
239 run_task(send_email, user_email,
236 "Your new hg-app password",
240 "Your new hg-app password",
237 'Your new hg-app password:%s' % (new_passwd))
241 'Your new hg-app password:%s' % (new_passwd))
238 log.info('send new password mail to %s', user_email)
242 log.info('send new password mail to %s', user_email)
239
243
240
244
241 except:
245 except:
242 log.error('Failed to update user password')
246 log.error('Failed to update user password')
243 log.error(traceback.format_exc())
247 log.error(traceback.format_exc())
244 return True
248 return True
245
249
246 @task
250 @task
247 def send_email(recipients, subject, body):
251 def send_email(recipients, subject, body):
248 log = send_email.get_logger()
252 log = send_email.get_logger()
249 email_config = dict(config.items('DEFAULT'))
253 email_config = dict(config.items('DEFAULT'))
250 mail_from = email_config.get('app_email_from')
254 mail_from = email_config.get('app_email_from')
251 user = email_config.get('smtp_username')
255 user = email_config.get('smtp_username')
252 passwd = email_config.get('smtp_password')
256 passwd = email_config.get('smtp_password')
253 mail_server = email_config.get('smtp_server')
257 mail_server = email_config.get('smtp_server')
254 mail_port = email_config.get('smtp_port')
258 mail_port = email_config.get('smtp_port')
255 tls = email_config.get('smtp_use_tls')
259 tls = email_config.get('smtp_use_tls')
256 ssl = False
260 ssl = False
257
261
258 try:
262 try:
259 m = SmtpMailer(mail_from, user, passwd, mail_server,
263 m = SmtpMailer(mail_from, user, passwd, mail_server,
260 mail_port, ssl, tls)
264 mail_port, ssl, tls)
261 m.send(recipients, subject, body)
265 m.send(recipients, subject, body)
262 except:
266 except:
263 log.error('Mail sending failed')
267 log.error('Mail sending failed')
264 log.error(traceback.format_exc())
268 log.error(traceback.format_exc())
265 return False
269 return False
266 return True
270 return True
@@ -1,140 +1,139 b''
1 from os.path import dirname as dn, join as jn
1 from os.path import dirname as dn, join as jn
2 from pidlock import LockHeld, DaemonLock
3 from pylons_app.config.environment import load_environment
2 from pylons_app.config.environment import load_environment
4 from pylons_app.model.hg_model import HgModel
3 from pylons_app.model.hg_model import HgModel
5 from shutil import rmtree
4 from shutil import rmtree
6 from webhelpers.html.builder import escape
5 from webhelpers.html.builder import escape
7 from vcs.utils.lazy import LazyProperty
6 from vcs.utils.lazy import LazyProperty
8
7
9 from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
8 from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
10 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
9 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
11 from whoosh.index import create_in, open_dir
10 from whoosh.index import create_in, open_dir
12 from whoosh.formats import Characters
11 from whoosh.formats import Characters
13 from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter
12 from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter
14
13
15 import os
14 import os
16 import sys
15 import sys
17 import traceback
16 import traceback
18
17
19 #to get the pylons_app import
18 #to get the pylons_app import
20 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
19 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
21
20
22
21
23 #LOCATION WE KEEP THE INDEX
22 #LOCATION WE KEEP THE INDEX
24 IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index')
23 IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index')
25
24
26 #EXTENSIONS WE WANT TO INDEX CONTENT OFF
25 #EXTENSIONS WE WANT TO INDEX CONTENT OFF
27 INDEX_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx', 'aspx', 'asx', 'axd', 'c',
26 INDEX_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx', 'aspx', 'asx', 'axd', 'c',
28 'cfg', 'cfm', 'cpp', 'cs', 'css', 'diff', 'do', 'el', 'erl',
27 'cfg', 'cfm', 'cpp', 'cs', 'css', 'diff', 'do', 'el', 'erl',
29 'h', 'htm', 'html', 'ini', 'java', 'js', 'jsp', 'jspx', 'lisp',
28 'h', 'htm', 'html', 'ini', 'java', 'js', 'jsp', 'jspx', 'lisp',
30 'lua', 'm', 'mako', 'ml', 'pas', 'patch', 'php', 'php3',
29 'lua', 'm', 'mako', 'ml', 'pas', 'patch', 'php', 'php3',
31 'php4', 'phtml', 'pm', 'py', 'rb', 'rst', 's', 'sh', 'sql',
30 'php4', 'phtml', 'pm', 'py', 'rb', 'rst', 's', 'sh', 'sql',
32 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt',
31 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt',
33 'yaws']
32 'yaws']
34
33
35 #CUSTOM ANALYZER wordsplit + lowercase filter
34 #CUSTOM ANALYZER wordsplit + lowercase filter
36 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
35 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
37
36
38
37
39 #INDEX SCHEMA DEFINITION
38 #INDEX SCHEMA DEFINITION
40 SCHEMA = Schema(owner=TEXT(),
39 SCHEMA = Schema(owner=TEXT(),
41 repository=TEXT(stored=True),
40 repository=TEXT(stored=True),
42 path=ID(stored=True, unique=True),
41 path=ID(stored=True, unique=True),
43 content=FieldType(format=Characters(ANALYZER),
42 content=FieldType(format=Characters(ANALYZER),
44 scorable=True, stored=True),
43 scorable=True, stored=True),
45 modtime=STORED(), extension=TEXT(stored=True))
44 modtime=STORED(), extension=TEXT(stored=True))
46
45
47
46
48 IDX_NAME = 'HG_INDEX'
47 IDX_NAME = 'HG_INDEX'
49 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
48 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
50 FRAGMENTER = SimpleFragmenter(200)
49 FRAGMENTER = SimpleFragmenter(200)
51
50
52 class ResultWrapper(object):
51 class ResultWrapper(object):
53 def __init__(self, searcher, matcher, highlight_items):
52 def __init__(self, searcher, matcher, highlight_items):
54 self.searcher = searcher
53 self.searcher = searcher
55 self.matcher = matcher
54 self.matcher = matcher
56 self.highlight_items = highlight_items
55 self.highlight_items = highlight_items
57 self.fragment_size = 200 / 2
56 self.fragment_size = 200 / 2
58
57
59 @LazyProperty
58 @LazyProperty
60 def doc_ids(self):
59 def doc_ids(self):
61 docs_id = []
60 docs_id = []
62 while self.matcher.is_active():
61 while self.matcher.is_active():
63 docnum = self.matcher.id()
62 docnum = self.matcher.id()
64 chunks = [offsets for offsets in self.get_chunks()]
63 chunks = [offsets for offsets in self.get_chunks()]
65 docs_id.append([docnum, chunks])
64 docs_id.append([docnum, chunks])
66 self.matcher.next()
65 self.matcher.next()
67 return docs_id
66 return docs_id
68
67
69 def __str__(self):
68 def __str__(self):
70 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
69 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
71
70
72 def __repr__(self):
71 def __repr__(self):
73 return self.__str__()
72 return self.__str__()
74
73
75 def __len__(self):
74 def __len__(self):
76 return len(self.doc_ids)
75 return len(self.doc_ids)
77
76
78 def __iter__(self):
77 def __iter__(self):
79 """
78 """
80 Allows Iteration over results,and lazy generate content
79 Allows Iteration over results,and lazy generate content
81
80
82 *Requires* implementation of ``__getitem__`` method.
81 *Requires* implementation of ``__getitem__`` method.
83 """
82 """
84 for docid in self.doc_ids:
83 for docid in self.doc_ids:
85 yield self.get_full_content(docid)
84 yield self.get_full_content(docid)
86
85
87 def __getslice__(self, i, j):
86 def __getslice__(self, i, j):
88 """
87 """
89 Slicing of resultWrapper
88 Slicing of resultWrapper
90 """
89 """
91 slice = []
90 slice = []
92 for docid in self.doc_ids[i:j]:
91 for docid in self.doc_ids[i:j]:
93 slice.append(self.get_full_content(docid))
92 slice.append(self.get_full_content(docid))
94 return slice
93 return slice
95
94
96
95
97 def get_full_content(self, docid):
96 def get_full_content(self, docid):
98 res = self.searcher.stored_fields(docid[0])
97 res = self.searcher.stored_fields(docid[0])
99 f_path = res['path'][res['path'].find(res['repository']) \
98 f_path = res['path'][res['path'].find(res['repository']) \
100 + len(res['repository']):].lstrip('/')
99 + len(res['repository']):].lstrip('/')
101
100
102 content_short = self.get_short_content(res, docid[1])
101 content_short = self.get_short_content(res, docid[1])
103 res.update({'content_short':content_short,
102 res.update({'content_short':content_short,
104 'content_short_hl':self.highlight(content_short),
103 'content_short_hl':self.highlight(content_short),
105 'f_path':f_path})
104 'f_path':f_path})
106
105
107 return res
106 return res
108
107
109 def get_short_content(self, res, chunks):
108 def get_short_content(self, res, chunks):
110
109
111 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
110 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
112
111
113 def get_chunks(self):
112 def get_chunks(self):
114 """
113 """
115 Smart function that implements chunking the content
114 Smart function that implements chunking the content
116 but not overlap chunks so it doesn't highlight the same
115 but not overlap chunks so it doesn't highlight the same
117 close occurences twice.
116 close occurences twice.
118 @param matcher:
117 @param matcher:
119 @param size:
118 @param size:
120 """
119 """
121 memory = [(0, 0)]
120 memory = [(0, 0)]
122 for span in self.matcher.spans():
121 for span in self.matcher.spans():
123 start = span.startchar or 0
122 start = span.startchar or 0
124 end = span.endchar or 0
123 end = span.endchar or 0
125 start_offseted = max(0, start - self.fragment_size)
124 start_offseted = max(0, start - self.fragment_size)
126 end_offseted = end + self.fragment_size
125 end_offseted = end + self.fragment_size
127
126
128 if start_offseted < memory[-1][1]:
127 if start_offseted < memory[-1][1]:
129 start_offseted = memory[-1][1]
128 start_offseted = memory[-1][1]
130 memory.append((start_offseted, end_offseted,))
129 memory.append((start_offseted, end_offseted,))
131 yield (start_offseted, end_offseted,)
130 yield (start_offseted, end_offseted,)
132
131
133 def highlight(self, content, top=5):
132 def highlight(self, content, top=5):
134 hl = highlight(escape(content),
133 hl = highlight(escape(content),
135 self.highlight_items,
134 self.highlight_items,
136 analyzer=ANALYZER,
135 analyzer=ANALYZER,
137 fragmenter=FRAGMENTER,
136 fragmenter=FRAGMENTER,
138 formatter=FORMATTER,
137 formatter=FORMATTER,
139 top=top)
138 top=top)
140 return hl
139 return hl
@@ -1,238 +1,238 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3 # whoosh indexer daemon for hg-app
3 # whoosh indexer daemon for hg-app
4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
5 #
5 #
6 # This program is free software; you can redistribute it and/or
6 # This program is free software; you can redistribute it and/or
7 # modify it under the terms of the GNU General Public License
7 # modify it under the terms of the GNU General Public License
8 # as published by the Free Software Foundation; version 2
8 # as published by the Free Software Foundation; version 2
9 # of the License or (at your opinion) any later version of the license.
9 # of the License or (at your opinion) any later version of the license.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
18 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
19 # MA 02110-1301, USA.
19 # MA 02110-1301, USA.
20 """
20 """
21 Created on Jan 26, 2010
21 Created on Jan 26, 2010
22
22
23 @author: marcink
23 @author: marcink
24 A deamon will read from task table and run tasks
24 A deamon will read from task table and run tasks
25 """
25 """
26 import sys
26 import sys
27 import os
27 import os
28 from os.path import dirname as dn
28 from os.path import dirname as dn
29 from os.path import join as jn
29 from os.path import join as jn
30
30
31 #to get the pylons_app import
31 #to get the pylons_app import
32 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
32 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
33 sys.path.append(project_path)
33 sys.path.append(project_path)
34
34
35 from pidlock import LockHeld, DaemonLock
35 from pylons_app.lib.pidlock import LockHeld, DaemonLock
36 from pylons_app.model.hg_model import HgModel
36 from pylons_app.model.hg_model import HgModel
37 from pylons_app.lib.helpers import safe_unicode
37 from pylons_app.lib.helpers import safe_unicode
38 from whoosh.index import create_in, open_dir
38 from whoosh.index import create_in, open_dir
39 from shutil import rmtree
39 from shutil import rmtree
40 from pylons_app.lib.indexers import INDEX_EXTENSIONS, IDX_LOCATION, SCHEMA, IDX_NAME
40 from pylons_app.lib.indexers import INDEX_EXTENSIONS, IDX_LOCATION, SCHEMA, IDX_NAME
41
41
42 import logging
42 import logging
43
43
44 log = logging.getLogger('whooshIndexer')
44 log = logging.getLogger('whooshIndexer')
45 # create logger
45 # create logger
46 log.setLevel(logging.DEBUG)
46 log.setLevel(logging.DEBUG)
47 log.propagate = False
47 log.propagate = False
48 # create console handler and set level to debug
48 # create console handler and set level to debug
49 ch = logging.StreamHandler()
49 ch = logging.StreamHandler()
50 ch.setLevel(logging.DEBUG)
50 ch.setLevel(logging.DEBUG)
51
51
52 # create formatter
52 # create formatter
53 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
53 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
54
54
55 # add formatter to ch
55 # add formatter to ch
56 ch.setFormatter(formatter)
56 ch.setFormatter(formatter)
57
57
58 # add ch to logger
58 # add ch to logger
59 log.addHandler(ch)
59 log.addHandler(ch)
60
60
61 def scan_paths(root_location):
61 def scan_paths(root_location):
62 return HgModel.repo_scan('/', root_location, None, True)
62 return HgModel.repo_scan('/', root_location, None, True)
63
63
64 class WhooshIndexingDaemon(object):
64 class WhooshIndexingDaemon(object):
65 """Deamon for atomic jobs"""
65 """Deamon for atomic jobs"""
66
66
67 def __init__(self, indexname='HG_INDEX', repo_location=None):
67 def __init__(self, indexname='HG_INDEX', repo_location=None):
68 self.indexname = indexname
68 self.indexname = indexname
69 self.repo_location = repo_location
69 self.repo_location = repo_location
70 self.initial = False
70 self.initial = False
71 if not os.path.isdir(IDX_LOCATION):
71 if not os.path.isdir(IDX_LOCATION):
72 os.mkdir(IDX_LOCATION)
72 os.mkdir(IDX_LOCATION)
73 log.info('Cannot run incremental index since it does not'
73 log.info('Cannot run incremental index since it does not'
74 ' yet exist running full build')
74 ' yet exist running full build')
75 self.initial = True
75 self.initial = True
76
76
77 def get_paths(self, root_dir):
77 def get_paths(self, root_dir):
78 """recursive walk in root dir and return a set of all path in that dir
78 """recursive walk in root dir and return a set of all path in that dir
79 excluding files in .hg dir"""
79 excluding files in .hg dir"""
80 index_paths_ = set()
80 index_paths_ = set()
81 for path, dirs, files in os.walk(root_dir):
81 for path, dirs, files in os.walk(root_dir):
82 if path.find('.hg') == -1:
82 if path.find('.hg') == -1:
83 for f in files:
83 for f in files:
84 index_paths_.add(jn(path, f))
84 index_paths_.add(jn(path, f))
85
85
86 return index_paths_
86 return index_paths_
87
87
88 def add_doc(self, writer, path, repo):
88 def add_doc(self, writer, path, repo):
89 """Adding doc to writer"""
89 """Adding doc to writer"""
90
90
91 ext = unicode(path.split('/')[-1].split('.')[-1].lower())
91 ext = unicode(path.split('/')[-1].split('.')[-1].lower())
92 #we just index the content of choosen files
92 #we just index the content of choosen files
93 if ext in INDEX_EXTENSIONS:
93 if ext in INDEX_EXTENSIONS:
94 log.debug(' >> %s [WITH CONTENT]' % path)
94 log.debug(' >> %s [WITH CONTENT]' % path)
95 fobj = open(path, 'rb')
95 fobj = open(path, 'rb')
96 content = fobj.read()
96 content = fobj.read()
97 fobj.close()
97 fobj.close()
98 u_content = safe_unicode(content)
98 u_content = safe_unicode(content)
99 else:
99 else:
100 log.debug(' >> %s' % path)
100 log.debug(' >> %s' % path)
101 #just index file name without it's content
101 #just index file name without it's content
102 u_content = u''
102 u_content = u''
103
103
104
104
105
105
106 try:
106 try:
107 os.stat(path)
107 os.stat(path)
108 writer.add_document(owner=unicode(repo.contact),
108 writer.add_document(owner=unicode(repo.contact),
109 repository=u"%s" % repo.name,
109 repository=u"%s" % repo.name,
110 path=u"%s" % path,
110 path=u"%s" % path,
111 content=u_content,
111 content=u_content,
112 modtime=os.path.getmtime(path),
112 modtime=os.path.getmtime(path),
113 extension=ext)
113 extension=ext)
114 except OSError, e:
114 except OSError, e:
115 import errno
115 import errno
116 if e.errno == errno.ENOENT:
116 if e.errno == errno.ENOENT:
117 log.debug('path %s does not exist or is a broken symlink' % path)
117 log.debug('path %s does not exist or is a broken symlink' % path)
118 else:
118 else:
119 raise e
119 raise e
120
120
121
121
122 def build_index(self):
122 def build_index(self):
123 if os.path.exists(IDX_LOCATION):
123 if os.path.exists(IDX_LOCATION):
124 log.debug('removing previos index')
124 log.debug('removing previos index')
125 rmtree(IDX_LOCATION)
125 rmtree(IDX_LOCATION)
126
126
127 if not os.path.exists(IDX_LOCATION):
127 if not os.path.exists(IDX_LOCATION):
128 os.mkdir(IDX_LOCATION)
128 os.mkdir(IDX_LOCATION)
129
129
130 idx = create_in(IDX_LOCATION, SCHEMA, indexname=IDX_NAME)
130 idx = create_in(IDX_LOCATION, SCHEMA, indexname=IDX_NAME)
131 writer = idx.writer()
131 writer = idx.writer()
132
132
133 for cnt, repo in enumerate(scan_paths(self.repo_location).values()):
133 for cnt, repo in enumerate(scan_paths(self.repo_location).values()):
134 log.debug('building index @ %s' % repo.path)
134 log.debug('building index @ %s' % repo.path)
135
135
136 for idx_path in self.get_paths(repo.path):
136 for idx_path in self.get_paths(repo.path):
137 self.add_doc(writer, idx_path, repo)
137 self.add_doc(writer, idx_path, repo)
138 writer.commit(merge=True)
138 writer.commit(merge=True)
139
139
140 log.debug('>>> FINISHED BUILDING INDEX <<<')
140 log.debug('>>> FINISHED BUILDING INDEX <<<')
141
141
142
142
143 def update_index(self):
143 def update_index(self):
144 log.debug('STARTING INCREMENTAL INDEXING UPDATE')
144 log.debug('STARTING INCREMENTAL INDEXING UPDATE')
145
145
146 idx = open_dir(IDX_LOCATION, indexname=self.indexname)
146 idx = open_dir(IDX_LOCATION, indexname=self.indexname)
147 # The set of all paths in the index
147 # The set of all paths in the index
148 indexed_paths = set()
148 indexed_paths = set()
149 # The set of all paths we need to re-index
149 # The set of all paths we need to re-index
150 to_index = set()
150 to_index = set()
151
151
152 reader = idx.reader()
152 reader = idx.reader()
153 writer = idx.writer()
153 writer = idx.writer()
154
154
155 # Loop over the stored fields in the index
155 # Loop over the stored fields in the index
156 for fields in reader.all_stored_fields():
156 for fields in reader.all_stored_fields():
157 indexed_path = fields['path']
157 indexed_path = fields['path']
158 indexed_paths.add(indexed_path)
158 indexed_paths.add(indexed_path)
159
159
160 if not os.path.exists(indexed_path):
160 if not os.path.exists(indexed_path):
161 # This file was deleted since it was indexed
161 # This file was deleted since it was indexed
162 log.debug('removing from index %s' % indexed_path)
162 log.debug('removing from index %s' % indexed_path)
163 writer.delete_by_term('path', indexed_path)
163 writer.delete_by_term('path', indexed_path)
164
164
165 else:
165 else:
166 # Check if this file was changed since it
166 # Check if this file was changed since it
167 # was indexed
167 # was indexed
168 indexed_time = fields['modtime']
168 indexed_time = fields['modtime']
169
169
170 mtime = os.path.getmtime(indexed_path)
170 mtime = os.path.getmtime(indexed_path)
171
171
172 if mtime > indexed_time:
172 if mtime > indexed_time:
173
173
174 # The file has changed, delete it and add it to the list of
174 # The file has changed, delete it and add it to the list of
175 # files to reindex
175 # files to reindex
176 log.debug('adding to reindex list %s' % indexed_path)
176 log.debug('adding to reindex list %s' % indexed_path)
177 writer.delete_by_term('path', indexed_path)
177 writer.delete_by_term('path', indexed_path)
178 to_index.add(indexed_path)
178 to_index.add(indexed_path)
179 #writer.commit()
179 #writer.commit()
180
180
181 # Loop over the files in the filesystem
181 # Loop over the files in the filesystem
182 # Assume we have a function that gathers the filenames of the
182 # Assume we have a function that gathers the filenames of the
183 # documents to be indexed
183 # documents to be indexed
184 for repo in scan_paths(self.repo_location).values():
184 for repo in scan_paths(self.repo_location).values():
185 for path in self.get_paths(repo.path):
185 for path in self.get_paths(repo.path):
186 if path in to_index or path not in indexed_paths:
186 if path in to_index or path not in indexed_paths:
187 # This is either a file that's changed, or a new file
187 # This is either a file that's changed, or a new file
188 # that wasn't indexed before. So index it!
188 # that wasn't indexed before. So index it!
189 self.add_doc(writer, path, repo)
189 self.add_doc(writer, path, repo)
190 log.debug('reindexing %s' % path)
190 log.debug('reindexing %s' % path)
191
191
192 writer.commit(merge=True)
192 writer.commit(merge=True)
193 #idx.optimize()
193 #idx.optimize()
194 log.debug('>>> FINISHED <<<')
194 log.debug('>>> FINISHED <<<')
195
195
196 def run(self, full_index=False):
196 def run(self, full_index=False):
197 """Run daemon"""
197 """Run daemon"""
198 if full_index or self.initial:
198 if full_index or self.initial:
199 self.build_index()
199 self.build_index()
200 else:
200 else:
201 self.update_index()
201 self.update_index()
202
202
203 if __name__ == "__main__":
203 if __name__ == "__main__":
204 arg = sys.argv[1:]
204 arg = sys.argv[1:]
205 if len(arg) != 2:
205 if len(arg) != 2:
206 sys.stderr.write('Please specify indexing type [full|incremental]'
206 sys.stderr.write('Please specify indexing type [full|incremental]'
207 'and path to repositories as script args \n')
207 'and path to repositories as script args \n')
208 sys.exit()
208 sys.exit()
209
209
210
210
211 if arg[0] == 'full':
211 if arg[0] == 'full':
212 full_index = True
212 full_index = True
213 elif arg[0] == 'incremental':
213 elif arg[0] == 'incremental':
214 # False means looking just for changes
214 # False means looking just for changes
215 full_index = False
215 full_index = False
216 else:
216 else:
217 sys.stdout.write('Please use [full|incremental]'
217 sys.stdout.write('Please use [full|incremental]'
218 ' as script first arg \n')
218 ' as script first arg \n')
219 sys.exit()
219 sys.exit()
220
220
221 if not os.path.isdir(arg[1]):
221 if not os.path.isdir(arg[1]):
222 sys.stderr.write('%s is not a valid path \n' % arg[1])
222 sys.stderr.write('%s is not a valid path \n' % arg[1])
223 sys.exit()
223 sys.exit()
224 else:
224 else:
225 if arg[1].endswith('/'):
225 if arg[1].endswith('/'):
226 repo_location = arg[1] + '*'
226 repo_location = arg[1] + '*'
227 else:
227 else:
228 repo_location = arg[1] + '/*'
228 repo_location = arg[1] + '/*'
229
229
230 try:
230 try:
231 l = DaemonLock()
231 l = DaemonLock()
232 WhooshIndexingDaemon(repo_location=repo_location)\
232 WhooshIndexingDaemon(repo_location=repo_location)\
233 .run(full_index=full_index)
233 .run(full_index=full_index)
234 l.release()
234 l.release()
235 reload(logging)
235 reload(logging)
236 except LockHeld:
236 except LockHeld:
237 sys.exit(1)
237 sys.exit(1)
238
238
@@ -1,127 +1,127 b''
1 import os, time
1 import os, time
2 import sys
2 import sys
3 from warnings import warn
3 from warnings import warn
4
4
5 class LockHeld(Exception):pass
5 class LockHeld(Exception):pass
6
6
7
7
8 class DaemonLock(object):
8 class DaemonLock(object):
9 '''daemon locking
9 """daemon locking
10 USAGE:
10 USAGE:
11 try:
11 try:
12 l = lock()
12 l = lock()
13 main()
13 main()
14 l.release()
14 l.release()
15 except LockHeld:
15 except LockHeld:
16 sys.exit(1)
16 sys.exit(1)
17 '''
17 """
18
18
19 def __init__(self, file=None, callbackfn=None,
19 def __init__(self, file=None, callbackfn=None,
20 desc='daemon lock', debug=False):
20 desc='daemon lock', debug=False):
21
21
22 self.pidfile = file if file else os.path.join(os.path.dirname(__file__),
22 self.pidfile = file if file else os.path.join(os.path.dirname(__file__),
23 'running.lock')
23 'running.lock')
24 self.callbackfn = callbackfn
24 self.callbackfn = callbackfn
25 self.desc = desc
25 self.desc = desc
26 self.debug = debug
26 self.debug = debug
27 self.held = False
27 self.held = False
28 #run the lock automatically !
28 #run the lock automatically !
29 self.lock()
29 self.lock()
30
30
31 def __del__(self):
31 def __del__(self):
32 if self.held:
32 if self.held:
33
33
34 # warn("use lock.release instead of del lock",
34 # warn("use lock.release instead of del lock",
35 # category = DeprecationWarning,
35 # category = DeprecationWarning,
36 # stacklevel = 2)
36 # stacklevel = 2)
37
37
38 # ensure the lock will be removed
38 # ensure the lock will be removed
39 self.release()
39 self.release()
40
40
41
41
42 def lock(self):
42 def lock(self):
43 '''
43 """
44 locking function, if lock is present it will raise LockHeld exception
44 locking function, if lock is present it will raise LockHeld exception
45 '''
45 """
46 lockname = '%s' % (os.getpid())
46 lockname = '%s' % (os.getpid())
47
47
48 self.trylock()
48 self.trylock()
49 self.makelock(lockname, self.pidfile)
49 self.makelock(lockname, self.pidfile)
50 return True
50 return True
51
51
52 def trylock(self):
52 def trylock(self):
53 running_pid = False
53 running_pid = False
54 try:
54 try:
55 pidfile = open(self.pidfile, "r")
55 pidfile = open(self.pidfile, "r")
56 pidfile.seek(0)
56 pidfile.seek(0)
57 running_pid = pidfile.readline()
57 running_pid = pidfile.readline()
58 if self.debug:
58 if self.debug:
59 print 'lock file present running_pid: %s, checking for execution'\
59 print 'lock file present running_pid: %s, checking for execution'\
60 % running_pid
60 % running_pid
61 # Now we check the PID from lock file matches to the current
61 # Now we check the PID from lock file matches to the current
62 # process PID
62 # process PID
63 if running_pid:
63 if running_pid:
64 if os.path.exists("/proc/%s" % running_pid):
64 if os.path.exists("/proc/%s" % running_pid):
65 print "You already have an instance of the program running"
65 print "You already have an instance of the program running"
66 print "It is running as process %s" % running_pid
66 print "It is running as process %s" % running_pid
67 raise LockHeld
67 raise LockHeld
68 else:
68 else:
69 print "Lock File is there but the program is not running"
69 print "Lock File is there but the program is not running"
70 print "Removing lock file for the: %s" % running_pid
70 print "Removing lock file for the: %s" % running_pid
71 self.release()
71 self.release()
72 except IOError, e:
72 except IOError, e:
73 if e.errno != 2:
73 if e.errno != 2:
74 raise
74 raise
75
75
76
76
77 def release(self):
77 def release(self):
78 '''
78 """
79 releases the pid by removing the pidfile
79 releases the pid by removing the pidfile
80 '''
80 """
81 if self.callbackfn:
81 if self.callbackfn:
82 #execute callback function on release
82 #execute callback function on release
83 if self.debug:
83 if self.debug:
84 print 'executing callback function %s' % self.callbackfn
84 print 'executing callback function %s' % self.callbackfn
85 self.callbackfn()
85 self.callbackfn()
86 try:
86 try:
87 if self.debug:
87 if self.debug:
88 print 'removing pidfile %s' % self.pidfile
88 print 'removing pidfile %s' % self.pidfile
89 os.remove(self.pidfile)
89 os.remove(self.pidfile)
90 self.held = False
90 self.held = False
91 except OSError, e:
91 except OSError, e:
92 if self.debug:
92 if self.debug:
93 print 'removing pidfile failed %s' % e
93 print 'removing pidfile failed %s' % e
94 pass
94 pass
95
95
96 def makelock(self, lockname, pidfile):
96 def makelock(self, lockname, pidfile):
97 '''
97 """
98 this function will make an actual lock
98 this function will make an actual lock
99 @param lockname: acctual pid of file
99 @param lockname: acctual pid of file
100 @param pidfile: the file to write the pid in
100 @param pidfile: the file to write the pid in
101 '''
101 """
102 if self.debug:
102 if self.debug:
103 print 'creating a file %s and pid: %s' % (pidfile, lockname)
103 print 'creating a file %s and pid: %s' % (pidfile, lockname)
104 pidfile = open(self.pidfile, "wb")
104 pidfile = open(self.pidfile, "wb")
105 pidfile.write(lockname)
105 pidfile.write(lockname)
106 pidfile.close
106 pidfile.close
107 self.held = True
107 self.held = True
108
108
109
109
110 def main():
110 def main():
111 print 'func is running'
111 print 'func is running'
112 cnt = 20
112 cnt = 20
113 while 1:
113 while 1:
114 print cnt
114 print cnt
115 if cnt == 0:
115 if cnt == 0:
116 break
116 break
117 time.sleep(1)
117 time.sleep(1)
118 cnt -= 1
118 cnt -= 1
119
119
120
120
121 if __name__ == "__main__":
121 if __name__ == "__main__":
122 try:
122 try:
123 l = DaemonLock(desc='test lock')
123 l = DaemonLock(desc='test lock')
124 main()
124 main()
125 l.release()
125 l.release()
126 except LockHeld:
126 except LockHeld:
127 sys.exit(1)
127 sys.exit(1)
@@ -1,439 +1,438 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3 # Utilities for hg app
3 # Utilities for hg app
4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
5 # This program is free software; you can redistribute it and/or
5 # This program is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU General Public License
6 # modify it under the terms of the GNU General Public License
7 # as published by the Free Software Foundation; version 2
7 # as published by the Free Software Foundation; version 2
8 # of the License or (at your opinion) any later version of the license.
8 # of the License or (at your opinion) any later version of the license.
9 #
9 #
10 # This program is distributed in the hope that it will be useful,
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
13 # GNU General Public License for more details.
14 #
14 #
15 # You should have received a copy of the GNU General Public License
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
17 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
18 # MA 02110-1301, USA.
18 # MA 02110-1301, USA.
19
19
20 """
20 """
21 Created on April 18, 2010
21 Created on April 18, 2010
22 Utilities for hg app
22 Utilities for hg app
23 @author: marcink
23 @author: marcink
24 """
24 """
25 from beaker.cache import cache_region
25 from beaker.cache import cache_region
26 from mercurial import ui, config, hg
26 from mercurial import ui, config, hg
27 from mercurial.error import RepoError
27 from mercurial.error import RepoError
28 from pylons_app.model import meta
28 from pylons_app.model import meta
29 from pylons_app.model.db import Repository, User, HgAppUi, HgAppSettings
29 from pylons_app.model.db import Repository, User, HgAppUi, HgAppSettings
30 from vcs.backends.base import BaseChangeset
30 from vcs.backends.base import BaseChangeset
31 from vcs.utils.lazy import LazyProperty
31 from vcs.utils.lazy import LazyProperty
32 import logging
32 import logging
33 import os
33 import os
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37
37
38 def get_repo_slug(request):
38 def get_repo_slug(request):
39 return request.environ['pylons.routes_dict'].get('repo_name')
39 return request.environ['pylons.routes_dict'].get('repo_name')
40
40
41 def is_mercurial(environ):
41 def is_mercurial(environ):
42 """
42 """
43 Returns True if request's target is mercurial server - header
43 Returns True if request's target is mercurial server - header
44 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
44 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
45 """
45 """
46 http_accept = environ.get('HTTP_ACCEPT')
46 http_accept = environ.get('HTTP_ACCEPT')
47 if http_accept and http_accept.startswith('application/mercurial'):
47 if http_accept and http_accept.startswith('application/mercurial'):
48 return True
48 return True
49 return False
49 return False
50
50
51 def check_repo_dir(paths):
51 def check_repo_dir(paths):
52 repos_path = paths[0][1].split('/')
52 repos_path = paths[0][1].split('/')
53 if repos_path[-1] in ['*', '**']:
53 if repos_path[-1] in ['*', '**']:
54 repos_path = repos_path[:-1]
54 repos_path = repos_path[:-1]
55 if repos_path[0] != '/':
55 if repos_path[0] != '/':
56 repos_path[0] = '/'
56 repos_path[0] = '/'
57 if not os.path.isdir(os.path.join(*repos_path)):
57 if not os.path.isdir(os.path.join(*repos_path)):
58 raise Exception('Not a valid repository in %s' % paths[0][1])
58 raise Exception('Not a valid repository in %s' % paths[0][1])
59
59
60 def check_repo_fast(repo_name, base_path):
60 def check_repo_fast(repo_name, base_path):
61 if os.path.isdir(os.path.join(base_path, repo_name)):return False
61 if os.path.isdir(os.path.join(base_path, repo_name)):return False
62 return True
62 return True
63
63
64 def check_repo(repo_name, base_path, verify=True):
64 def check_repo(repo_name, base_path, verify=True):
65
65
66 repo_path = os.path.join(base_path, repo_name)
66 repo_path = os.path.join(base_path, repo_name)
67
67
68 try:
68 try:
69 if not check_repo_fast(repo_name, base_path):
69 if not check_repo_fast(repo_name, base_path):
70 return False
70 return False
71 r = hg.repository(ui.ui(), repo_path)
71 r = hg.repository(ui.ui(), repo_path)
72 if verify:
72 if verify:
73 hg.verify(r)
73 hg.verify(r)
74 #here we hnow that repo exists it was verified
74 #here we hnow that repo exists it was verified
75 log.info('%s repo is already created', repo_name)
75 log.info('%s repo is already created', repo_name)
76 return False
76 return False
77 except RepoError:
77 except RepoError:
78 #it means that there is no valid repo there...
78 #it means that there is no valid repo there...
79 log.info('%s repo is free for creation', repo_name)
79 log.info('%s repo is free for creation', repo_name)
80 return True
80 return True
81
81
82 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
82 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
83 while True:
83 while True:
84 ok = raw_input(prompt)
84 ok = raw_input(prompt)
85 if ok in ('y', 'ye', 'yes'): return True
85 if ok in ('y', 'ye', 'yes'): return True
86 if ok in ('n', 'no', 'nop', 'nope'): return False
86 if ok in ('n', 'no', 'nop', 'nope'): return False
87 retries = retries - 1
87 retries = retries - 1
88 if retries < 0: raise IOError
88 if retries < 0: raise IOError
89 print complaint
89 print complaint
90
90
91 @cache_region('super_short_term', 'cached_hg_ui')
91 @cache_region('super_short_term', 'cached_hg_ui')
92 def get_hg_ui_cached():
92 def get_hg_ui_cached():
93 try:
93 try:
94 sa = meta.Session
94 sa = meta.Session
95 ret = sa.query(HgAppUi).all()
95 ret = sa.query(HgAppUi).all()
96 finally:
96 finally:
97 meta.Session.remove()
97 meta.Session.remove()
98 return ret
98 return ret
99
99
100
100
101 def get_hg_settings():
101 def get_hg_settings():
102 try:
102 try:
103 sa = meta.Session
103 sa = meta.Session
104 ret = sa.query(HgAppSettings).all()
104 ret = sa.query(HgAppSettings).all()
105 finally:
105 finally:
106 meta.Session.remove()
106 meta.Session.remove()
107
107
108 if not ret:
108 if not ret:
109 raise Exception('Could not get application settings !')
109 raise Exception('Could not get application settings !')
110 settings = {}
110 settings = {}
111 for each in ret:
111 for each in ret:
112 settings['hg_app_' + each.app_settings_name] = each.app_settings_value
112 settings['hg_app_' + each.app_settings_name] = each.app_settings_value
113
113
114 return settings
114 return settings
115
115
116 def get_hg_ui_settings():
116 def get_hg_ui_settings():
117 try:
117 try:
118 sa = meta.Session
118 sa = meta.Session
119 ret = sa.query(HgAppUi).all()
119 ret = sa.query(HgAppUi).all()
120 finally:
120 finally:
121 meta.Session.remove()
121 meta.Session.remove()
122
122
123 if not ret:
123 if not ret:
124 raise Exception('Could not get application ui settings !')
124 raise Exception('Could not get application ui settings !')
125 settings = {}
125 settings = {}
126 for each in ret:
126 for each in ret:
127 k = each.ui_key
127 k = each.ui_key
128 v = each.ui_value
128 v = each.ui_value
129 if k == '/':
129 if k == '/':
130 k = 'root_path'
130 k = 'root_path'
131
131
132 if k.find('.') != -1:
132 if k.find('.') != -1:
133 k = k.replace('.', '_')
133 k = k.replace('.', '_')
134
134
135 if each.ui_section == 'hooks':
135 if each.ui_section == 'hooks':
136 v = each.ui_active
136 v = each.ui_active
137
137
138 settings[each.ui_section + '_' + k] = v
138 settings[each.ui_section + '_' + k] = v
139
139
140 return settings
140 return settings
141
141
142 #propagated from mercurial documentation
142 #propagated from mercurial documentation
143 ui_sections = ['alias', 'auth',
143 ui_sections = ['alias', 'auth',
144 'decode/encode', 'defaults',
144 'decode/encode', 'defaults',
145 'diff', 'email',
145 'diff', 'email',
146 'extensions', 'format',
146 'extensions', 'format',
147 'merge-patterns', 'merge-tools',
147 'merge-patterns', 'merge-tools',
148 'hooks', 'http_proxy',
148 'hooks', 'http_proxy',
149 'smtp', 'patch',
149 'smtp', 'patch',
150 'paths', 'profiling',
150 'paths', 'profiling',
151 'server', 'trusted',
151 'server', 'trusted',
152 'ui', 'web', ]
152 'ui', 'web', ]
153
153
154 def make_ui(read_from='file', path=None, checkpaths=True):
154 def make_ui(read_from='file', path=None, checkpaths=True):
155 """
155 """
156 A function that will read python rc files or database
156 A function that will read python rc files or database
157 and make an mercurial ui object from read options
157 and make an mercurial ui object from read options
158
158
159 @param path: path to mercurial config file
159 @param path: path to mercurial config file
160 @param checkpaths: check the path
160 @param checkpaths: check the path
161 @param read_from: read from 'file' or 'db'
161 @param read_from: read from 'file' or 'db'
162 """
162 """
163
163
164 baseui = ui.ui()
164 baseui = ui.ui()
165
165
166 if read_from == 'file':
166 if read_from == 'file':
167 if not os.path.isfile(path):
167 if not os.path.isfile(path):
168 log.warning('Unable to read config file %s' % path)
168 log.warning('Unable to read config file %s' % path)
169 return False
169 return False
170 log.debug('reading hgrc from %s', path)
170 log.debug('reading hgrc from %s', path)
171 cfg = config.config()
171 cfg = config.config()
172 cfg.read(path)
172 cfg.read(path)
173 for section in ui_sections:
173 for section in ui_sections:
174 for k, v in cfg.items(section):
174 for k, v in cfg.items(section):
175 baseui.setconfig(section, k, v)
175 baseui.setconfig(section, k, v)
176 log.debug('settings ui from file[%s]%s:%s', section, k, v)
176 log.debug('settings ui from file[%s]%s:%s', section, k, v)
177 if checkpaths:check_repo_dir(cfg.items('paths'))
177 if checkpaths:check_repo_dir(cfg.items('paths'))
178
178
179
179
180 elif read_from == 'db':
180 elif read_from == 'db':
181 hg_ui = get_hg_ui_cached()
181 hg_ui = get_hg_ui_cached()
182 for ui_ in hg_ui:
182 for ui_ in hg_ui:
183 if ui_.ui_active:
183 if ui_.ui_active:
184 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, ui_.ui_key, ui_.ui_value)
184 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, ui_.ui_key, ui_.ui_value)
185 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
185 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
186
186
187
187
188 return baseui
188 return baseui
189
189
190
190
191 def set_hg_app_config(config):
191 def set_hg_app_config(config):
192 hgsettings = get_hg_settings()
192 hgsettings = get_hg_settings()
193
193
194 for k, v in hgsettings.items():
194 for k, v in hgsettings.items():
195 config[k] = v
195 config[k] = v
196
196
197 def invalidate_cache(name, *args):
197 def invalidate_cache(name, *args):
198 """Invalidates given name cache"""
198 """Invalidates given name cache"""
199
199
200 from beaker.cache import region_invalidate
200 from beaker.cache import region_invalidate
201 log.info('INVALIDATING CACHE FOR %s', name)
201 log.info('INVALIDATING CACHE FOR %s', name)
202
202
203 """propagate our arguments to make sure invalidation works. First
203 """propagate our arguments to make sure invalidation works. First
204 argument has to be the name of cached func name give to cache decorator
204 argument has to be the name of cached func name give to cache decorator
205 without that the invalidation would not work"""
205 without that the invalidation would not work"""
206 tmp = [name]
206 tmp = [name]
207 tmp.extend(args)
207 tmp.extend(args)
208 args = tuple(tmp)
208 args = tuple(tmp)
209
209
210 if name == 'cached_repo_list':
210 if name == 'cached_repo_list':
211 from pylons_app.model.hg_model import _get_repos_cached
211 from pylons_app.model.hg_model import _get_repos_cached
212 region_invalidate(_get_repos_cached, None, *args)
212 region_invalidate(_get_repos_cached, None, *args)
213
213
214 if name == 'full_changelog':
214 if name == 'full_changelog':
215 from pylons_app.model.hg_model import _full_changelog_cached
215 from pylons_app.model.hg_model import _full_changelog_cached
216 region_invalidate(_full_changelog_cached, None, *args)
216 region_invalidate(_full_changelog_cached, None, *args)
217
217
218 class EmptyChangeset(BaseChangeset):
218 class EmptyChangeset(BaseChangeset):
219
219
220 revision = -1
220 revision = -1
221 message = ''
221 message = ''
222 author = ''
222 author = ''
223
223
224 @LazyProperty
224 @LazyProperty
225 def raw_id(self):
225 def raw_id(self):
226 """
226 """
227 Returns raw string identifing this changeset, useful for web
227 Returns raw string identifing this changeset, useful for web
228 representation.
228 representation.
229 """
229 """
230 return '0' * 12
230 return '0' * 12
231
231
232
232
233 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
233 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
234 """
234 """
235 maps all found repositories into db
235 maps all found repositories into db
236 """
236 """
237 from pylons_app.model.repo_model import RepoModel
237 from pylons_app.model.repo_model import RepoModel
238
238
239 sa = meta.Session
239 sa = meta.Session
240 user = sa.query(User).filter(User.admin == True).first()
240 user = sa.query(User).filter(User.admin == True).first()
241
241
242 rm = RepoModel()
242 rm = RepoModel()
243
243
244 for name, repo in initial_repo_list.items():
244 for name, repo in initial_repo_list.items():
245 if not sa.query(Repository).filter(Repository.repo_name == name).scalar():
245 if not sa.query(Repository).filter(Repository.repo_name == name).scalar():
246 log.info('repository %s not found creating default', name)
246 log.info('repository %s not found creating default', name)
247
247
248 form_data = {
248 form_data = {
249 'repo_name':name,
249 'repo_name':name,
250 'description':repo.description if repo.description != 'unknown' else \
250 'description':repo.description if repo.description != 'unknown' else \
251 'auto description for %s' % name,
251 'auto description for %s' % name,
252 'private':False
252 'private':False
253 }
253 }
254 rm.create(form_data, user, just_db=True)
254 rm.create(form_data, user, just_db=True)
255
255
256
256
257 if remove_obsolete:
257 if remove_obsolete:
258 #remove from database those repositories that are not in the filesystem
258 #remove from database those repositories that are not in the filesystem
259 for repo in sa.query(Repository).all():
259 for repo in sa.query(Repository).all():
260 if repo.repo_name not in initial_repo_list.keys():
260 if repo.repo_name not in initial_repo_list.keys():
261 sa.delete(repo)
261 sa.delete(repo)
262 sa.commit()
262 sa.commit()
263
263
264
264
265 meta.Session.remove()
265 meta.Session.remove()
266
266
267 from UserDict import DictMixin
267 from UserDict import DictMixin
268
268
269 class OrderedDict(dict, DictMixin):
269 class OrderedDict(dict, DictMixin):
270
270
271 def __init__(self, *args, **kwds):
271 def __init__(self, *args, **kwds):
272 if len(args) > 1:
272 if len(args) > 1:
273 raise TypeError('expected at most 1 arguments, got %d' % len(args))
273 raise TypeError('expected at most 1 arguments, got %d' % len(args))
274 try:
274 try:
275 self.__end
275 self.__end
276 except AttributeError:
276 except AttributeError:
277 self.clear()
277 self.clear()
278 self.update(*args, **kwds)
278 self.update(*args, **kwds)
279
279
280 def clear(self):
280 def clear(self):
281 self.__end = end = []
281 self.__end = end = []
282 end += [None, end, end] # sentinel node for doubly linked list
282 end += [None, end, end] # sentinel node for doubly linked list
283 self.__map = {} # key --> [key, prev, next]
283 self.__map = {} # key --> [key, prev, next]
284 dict.clear(self)
284 dict.clear(self)
285
285
286 def __setitem__(self, key, value):
286 def __setitem__(self, key, value):
287 if key not in self:
287 if key not in self:
288 end = self.__end
288 end = self.__end
289 curr = end[1]
289 curr = end[1]
290 curr[2] = end[1] = self.__map[key] = [key, curr, end]
290 curr[2] = end[1] = self.__map[key] = [key, curr, end]
291 dict.__setitem__(self, key, value)
291 dict.__setitem__(self, key, value)
292
292
293 def __delitem__(self, key):
293 def __delitem__(self, key):
294 dict.__delitem__(self, key)
294 dict.__delitem__(self, key)
295 key, prev, next = self.__map.pop(key)
295 key, prev, next = self.__map.pop(key)
296 prev[2] = next
296 prev[2] = next
297 next[1] = prev
297 next[1] = prev
298
298
299 def __iter__(self):
299 def __iter__(self):
300 end = self.__end
300 end = self.__end
301 curr = end[2]
301 curr = end[2]
302 while curr is not end:
302 while curr is not end:
303 yield curr[0]
303 yield curr[0]
304 curr = curr[2]
304 curr = curr[2]
305
305
306 def __reversed__(self):
306 def __reversed__(self):
307 end = self.__end
307 end = self.__end
308 curr = end[1]
308 curr = end[1]
309 while curr is not end:
309 while curr is not end:
310 yield curr[0]
310 yield curr[0]
311 curr = curr[1]
311 curr = curr[1]
312
312
313 def popitem(self, last=True):
313 def popitem(self, last=True):
314 if not self:
314 if not self:
315 raise KeyError('dictionary is empty')
315 raise KeyError('dictionary is empty')
316 if last:
316 if last:
317 key = reversed(self).next()
317 key = reversed(self).next()
318 else:
318 else:
319 key = iter(self).next()
319 key = iter(self).next()
320 value = self.pop(key)
320 value = self.pop(key)
321 return key, value
321 return key, value
322
322
323 def __reduce__(self):
323 def __reduce__(self):
324 items = [[k, self[k]] for k in self]
324 items = [[k, self[k]] for k in self]
325 tmp = self.__map, self.__end
325 tmp = self.__map, self.__end
326 del self.__map, self.__end
326 del self.__map, self.__end
327 inst_dict = vars(self).copy()
327 inst_dict = vars(self).copy()
328 self.__map, self.__end = tmp
328 self.__map, self.__end = tmp
329 if inst_dict:
329 if inst_dict:
330 return (self.__class__, (items,), inst_dict)
330 return (self.__class__, (items,), inst_dict)
331 return self.__class__, (items,)
331 return self.__class__, (items,)
332
332
333 def keys(self):
333 def keys(self):
334 return list(self)
334 return list(self)
335
335
336 setdefault = DictMixin.setdefault
336 setdefault = DictMixin.setdefault
337 update = DictMixin.update
337 update = DictMixin.update
338 pop = DictMixin.pop
338 pop = DictMixin.pop
339 values = DictMixin.values
339 values = DictMixin.values
340 items = DictMixin.items
340 items = DictMixin.items
341 iterkeys = DictMixin.iterkeys
341 iterkeys = DictMixin.iterkeys
342 itervalues = DictMixin.itervalues
342 itervalues = DictMixin.itervalues
343 iteritems = DictMixin.iteritems
343 iteritems = DictMixin.iteritems
344
344
345 def __repr__(self):
345 def __repr__(self):
346 if not self:
346 if not self:
347 return '%s()' % (self.__class__.__name__,)
347 return '%s()' % (self.__class__.__name__,)
348 return '%s(%r)' % (self.__class__.__name__, self.items())
348 return '%s(%r)' % (self.__class__.__name__, self.items())
349
349
350 def copy(self):
350 def copy(self):
351 return self.__class__(self)
351 return self.__class__(self)
352
352
353 @classmethod
353 @classmethod
354 def fromkeys(cls, iterable, value=None):
354 def fromkeys(cls, iterable, value=None):
355 d = cls()
355 d = cls()
356 for key in iterable:
356 for key in iterable:
357 d[key] = value
357 d[key] = value
358 return d
358 return d
359
359
360 def __eq__(self, other):
360 def __eq__(self, other):
361 if isinstance(other, OrderedDict):
361 if isinstance(other, OrderedDict):
362 return len(self) == len(other) and self.items() == other.items()
362 return len(self) == len(other) and self.items() == other.items()
363 return dict.__eq__(self, other)
363 return dict.__eq__(self, other)
364
364
365 def __ne__(self, other):
365 def __ne__(self, other):
366 return not self == other
366 return not self == other
367
367
368
368
369 #===============================================================================
369 #===============================================================================
370 # TEST FUNCTIONS
370 # TEST FUNCTIONS
371 #===============================================================================
371 #===============================================================================
372 def create_test_index(repo_location, full_index):
372 def create_test_index(repo_location, full_index):
373 """Makes default test index
373 """Makes default test index
374 @param repo_location:
374 @param repo_location:
375 @param full_index:
375 @param full_index:
376 """
376 """
377 from pylons_app.lib.indexers import daemon
378 from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon
377 from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon
379 from pylons_app.lib.indexers.pidlock import DaemonLock, LockHeld
378 from pylons_app.lib.pidlock import DaemonLock, LockHeld
380 from pylons_app.lib.indexers import IDX_LOCATION
379 from pylons_app.lib.indexers import IDX_LOCATION
381 import shutil
380 import shutil
382
381
383 if os.path.exists(IDX_LOCATION):
382 if os.path.exists(IDX_LOCATION):
384 shutil.rmtree(IDX_LOCATION)
383 shutil.rmtree(IDX_LOCATION)
385
384
386 try:
385 try:
387 l = DaemonLock()
386 l = DaemonLock()
388 WhooshIndexingDaemon(repo_location=repo_location)\
387 WhooshIndexingDaemon(repo_location=repo_location)\
389 .run(full_index=full_index)
388 .run(full_index=full_index)
390 l.release()
389 l.release()
391 except LockHeld:
390 except LockHeld:
392 pass
391 pass
393
392
394 def create_test_env(repos_test_path, config):
393 def create_test_env(repos_test_path, config):
395 """Makes a fresh database and
394 """Makes a fresh database and
396 install test repository into tmp dir
395 install test repository into tmp dir
397 """
396 """
398 from pylons_app.lib.db_manage import DbManage
397 from pylons_app.lib.db_manage import DbManage
399 import tarfile
398 import tarfile
400 import shutil
399 import shutil
401 from os.path import dirname as dn, join as jn, abspath
400 from os.path import dirname as dn, join as jn, abspath
402
401
403 log = logging.getLogger('TestEnvCreator')
402 log = logging.getLogger('TestEnvCreator')
404 # create logger
403 # create logger
405 log.setLevel(logging.DEBUG)
404 log.setLevel(logging.DEBUG)
406 log.propagate = True
405 log.propagate = True
407 # create console handler and set level to debug
406 # create console handler and set level to debug
408 ch = logging.StreamHandler()
407 ch = logging.StreamHandler()
409 ch.setLevel(logging.DEBUG)
408 ch.setLevel(logging.DEBUG)
410
409
411 # create formatter
410 # create formatter
412 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
411 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
413
412
414 # add formatter to ch
413 # add formatter to ch
415 ch.setFormatter(formatter)
414 ch.setFormatter(formatter)
416
415
417 # add ch to logger
416 # add ch to logger
418 log.addHandler(ch)
417 log.addHandler(ch)
419
418
420 #PART ONE create db
419 #PART ONE create db
421 log.debug('making test db')
420 log.debug('making test db')
422 dbname = config['sqlalchemy.db1.url'].split('/')[-1]
421 dbname = config['sqlalchemy.db1.url'].split('/')[-1]
423 dbmanage = DbManage(log_sql=True, dbname=dbname, tests=True)
422 dbmanage = DbManage(log_sql=True, dbname=dbname, tests=True)
424 dbmanage.create_tables(override=True)
423 dbmanage.create_tables(override=True)
425 dbmanage.config_prompt(repos_test_path)
424 dbmanage.config_prompt(repos_test_path)
426 dbmanage.create_default_user()
425 dbmanage.create_default_user()
427 dbmanage.admin_prompt()
426 dbmanage.admin_prompt()
428 dbmanage.create_permissions()
427 dbmanage.create_permissions()
429 dbmanage.populate_default_permissions()
428 dbmanage.populate_default_permissions()
430
429
431 #PART TWO make test repo
430 #PART TWO make test repo
432 log.debug('making test vcs repo')
431 log.debug('making test vcs repo')
433 if os.path.isdir('/tmp/vcs_test'):
432 if os.path.isdir('/tmp/vcs_test'):
434 shutil.rmtree('/tmp/vcs_test')
433 shutil.rmtree('/tmp/vcs_test')
435
434
436 cur_dir = dn(dn(abspath(__file__)))
435 cur_dir = dn(dn(abspath(__file__)))
437 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test.tar.gz"))
436 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test.tar.gz"))
438 tar.extractall('/tmp')
437 tar.extractall('/tmp')
439 tar.close()
438 tar.close()
@@ -1,77 +1,78 b''
1 <%def name="file_class(node)">
1 <%def name="file_class(node)">
2 %if node.is_file():
2 %if node.is_file():
3 <%return "browser-file" %>
3 <%return "browser-file" %>
4 %else:
4 %else:
5 <%return "browser-dir"%>
5 <%return "browser-dir"%>
6 %endif
6 %endif
7 </%def>
7 </%def>
8 <div id="body" class="browserblock">
8 <div id="body" class="browserblock">
9 <div class="browser-header">
9 <div class="browser-header">
10 ${h.form(h.url.current())}
10 ${h.form(h.url.current())}
11 <div class="info_box">
11 <div class="info_box">
12 <span >${_('view')}@rev</span>
12 <span >${_('view')}@rev</span>
13 <a href="${c.url_prev}">&laquo;</a>
13 <a href="${c.url_prev}">&laquo;</a>
14 ${h.text('at_rev',value=c.rev_nr,size=3)}
14 ${h.text('at_rev',value=c.rev_nr,size=3)}
15 <a href="${c.url_next}">&raquo;</a>
15 <a href="${c.url_next}">&raquo;</a>
16 ${h.submit('view','view')}
16 ${h.submit('view','view')}
17 </div>
17 </div>
18 ${h.end_form()}
18 ${h.end_form()}
19 </div>
19 </div>
20 <div class="browser-body">
20 <div class="browser-body">
21 <table class="code-browser">
21 <table class="code-browser">
22 <thead>
22 <thead>
23 <tr>
23 <tr>
24 <th>${_('Name')}</th>
24 <th>${_('Name')}</th>
25 <th>${_('Size')}</th>
25 <th>${_('Size')}</th>
26 <th>${_('Mimetype')}</th>
26 <th>${_('Mimetype')}</th>
27 <th>${_('Revision')}</th>
27 <th>${_('Revision')}</th>
28 <th>${_('Last modified')}</th>
28 <th>${_('Last modified')}</th>
29 <th>${_('Last commiter')}</th>
29 <th>${_('Last commiter')}</th>
30 </tr>
30 </tr>
31 </thead>
31 </thead>
32
33 % if c.files_list.parent:
32 <tr class="parity0">
34 <tr class="parity0">
33 <td>
35 <td>
34 % if c.files_list.parent:
35 ${h.link_to('..',h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.files_list.parent.path),class_="browser-dir")}
36 ${h.link_to('..',h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.files_list.parent.path),class_="browser-dir")}
36 %endif
37 </td>
37 </td>
38 <td></td>
38 <td></td>
39 <td></td>
39 <td></td>
40 <td></td>
40 <td></td>
41 <td></td>
41 <td></td>
42 <td></td>
42 </tr>
43 </tr>
44 %endif
45
43 %for cnt,node in enumerate(c.files_list,1):
46 %for cnt,node in enumerate(c.files_list,1):
44 <tr class="parity${cnt%2}">
47 <tr class="parity${cnt%2}">
45 <td>
48 <td>
46 ${h.link_to(node.name,h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=node.path),class_=file_class(node))}
49 ${h.link_to(node.name,h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=node.path),class_=file_class(node))}
47 </td>
50 </td>
48 <td>
51 <td>
49 %if node.is_file():
50 ${h.format_byte_size(node.size,binary=True)}
52 ${h.format_byte_size(node.size,binary=True)}
51 %endif
52 </td>
53 </td>
53 <td>
54 <td>
54 %if node.is_file():
55 %if node.is_file():
55 ${node.mimetype}
56 ${node.mimetype}
56 %endif
57 %endif
57 </td>
58 </td>
58 <td>
59 <td>
59 %if node.is_file():
60 %if node.is_file():
60 ${node.last_changeset.revision}
61 ${node.last_changeset.revision}
61 %endif
62 %endif
62 </td>
63 </td>
63 <td>
64 <td>
64 %if node.is_file():
65 %if node.is_file():
65 ${h.age(node.last_changeset._ctx.date())} - ${node.last_changeset.date}
66 ${h.age(node.last_changeset._ctx.date())} - ${node.last_changeset.date}
66 %endif
67 %endif
67 </td>
68 </td>
68 <td>
69 <td>
69 %if node.is_file():
70 %if node.is_file():
70 ${node.last_changeset.author}
71 ${node.last_changeset.author}
71 %endif
72 %endif
72 </td>
73 </td>
73 </tr>
74 </tr>
74 %endfor
75 %endfor
75 </table>
76 </table>
76 </div>
77 </div>
77 </div> No newline at end of file
78 </div>
General Comments 0
You need to be logged in to leave comments. Login now