##// END OF EJS Templates
complete rewrite of paster commands,...
marcink -
r785:277427ac beta
parent child Browse files
Show More
@@ -1,389 +1,365 b''
1 from celery.decorators import task
1 from celery.decorators import task
2
2
3 import os
3 import os
4 import traceback
4 import traceback
5 import beaker
6 from time import mktime
5 from time import mktime
7 from operator import itemgetter
6 from operator import itemgetter
8
7
9 from pylons import config
8 from pylons import config
10 from pylons.i18n.translation import _
9 from pylons.i18n.translation import _
11
10
12 from rhodecode.lib.celerylib import run_task, locked_task, str2bool
11 from rhodecode.lib.celerylib import run_task, locked_task, str2bool
13 from rhodecode.lib.helpers import person
12 from rhodecode.lib.helpers import person
14 from rhodecode.lib.smtp_mailer import SmtpMailer
13 from rhodecode.lib.smtp_mailer import SmtpMailer
15 from rhodecode.lib.utils import OrderedDict
14 from rhodecode.lib.utils import OrderedDict, add_cache
16 from rhodecode.model import init_model
15 from rhodecode.model import init_model
17 from rhodecode.model import meta
16 from rhodecode.model import meta
18 from rhodecode.model.db import RhodeCodeUi
17 from rhodecode.model.db import RhodeCodeUi
19
18
20 from vcs.backends import get_repo
19 from vcs.backends import get_repo
21
20
22 from sqlalchemy import engine_from_config
21 from sqlalchemy import engine_from_config
23
22
24 #set cache regions for beaker so celery can utilise it
25 def add_cache(settings):
26 cache_settings = {'regions':None}
27 for key in settings.keys():
28 for prefix in ['beaker.cache.', 'cache.']:
29 if key.startswith(prefix):
30 name = key.split(prefix)[1].strip()
31 cache_settings[name] = settings[key].strip()
32 if cache_settings['regions']:
33 for region in cache_settings['regions'].split(','):
34 region = region.strip()
35 region_settings = {}
36 for key, value in cache_settings.items():
37 if key.startswith(region):
38 region_settings[key.split('.')[1]] = value
39 region_settings['expire'] = int(region_settings.get('expire',
40 60))
41 region_settings.setdefault('lock_dir',
42 cache_settings.get('lock_dir'))
43 if 'type' not in region_settings:
44 region_settings['type'] = cache_settings.get('type',
45 'memory')
46 beaker.cache.cache_regions[region] = region_settings
47 add_cache(config)
23 add_cache(config)
48
24
49 try:
25 try:
50 import json
26 import json
51 except ImportError:
27 except ImportError:
52 #python 2.5 compatibility
28 #python 2.5 compatibility
53 import simplejson as json
29 import simplejson as json
54
30
55 __all__ = ['whoosh_index', 'get_commits_stats',
31 __all__ = ['whoosh_index', 'get_commits_stats',
56 'reset_user_password', 'send_email']
32 'reset_user_password', 'send_email']
57
33
58 CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
34 CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
59
35
60 def get_session():
36 def get_session():
61 if CELERY_ON:
37 if CELERY_ON:
62 engine = engine_from_config(config, 'sqlalchemy.db1.')
38 engine = engine_from_config(config, 'sqlalchemy.db1.')
63 init_model(engine)
39 init_model(engine)
64 sa = meta.Session()
40 sa = meta.Session()
65 return sa
41 return sa
66
42
67 def get_repos_path():
43 def get_repos_path():
68 sa = get_session()
44 sa = get_session()
69 q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
45 q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
70 return q.ui_value
46 return q.ui_value
71
47
72 @task
48 @task
73 @locked_task
49 @locked_task
74 def whoosh_index(repo_location, full_index):
50 def whoosh_index(repo_location, full_index):
75 log = whoosh_index.get_logger()
51 log = whoosh_index.get_logger()
76 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
52 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
77 index_location = config['index_dir']
53 index_location = config['index_dir']
78 WhooshIndexingDaemon(index_location=index_location,
54 WhooshIndexingDaemon(index_location=index_location,
79 repo_location=repo_location, sa=get_session())\
55 repo_location=repo_location, sa=get_session())\
80 .run(full_index=full_index)
56 .run(full_index=full_index)
81
57
82 @task
58 @task
83 @locked_task
59 @locked_task
84 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
60 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
85 from rhodecode.model.db import Statistics, Repository
61 from rhodecode.model.db import Statistics, Repository
86 log = get_commits_stats.get_logger()
62 log = get_commits_stats.get_logger()
87
63
88 #for js data compatibilty
64 #for js data compatibilty
89 author_key_cleaner = lambda k: person(k).replace('"', "")
65 author_key_cleaner = lambda k: person(k).replace('"', "")
90
66
91 commits_by_day_author_aggregate = {}
67 commits_by_day_author_aggregate = {}
92 commits_by_day_aggregate = {}
68 commits_by_day_aggregate = {}
93 repos_path = get_repos_path()
69 repos_path = get_repos_path()
94 p = os.path.join(repos_path, repo_name)
70 p = os.path.join(repos_path, repo_name)
95 repo = get_repo(p)
71 repo = get_repo(p)
96
72
97 skip_date_limit = True
73 skip_date_limit = True
98 parse_limit = 250 #limit for single task changeset parsing optimal for
74 parse_limit = 250 #limit for single task changeset parsing optimal for
99 last_rev = 0
75 last_rev = 0
100 last_cs = None
76 last_cs = None
101 timegetter = itemgetter('time')
77 timegetter = itemgetter('time')
102
78
103 sa = get_session()
79 sa = get_session()
104
80
105 dbrepo = sa.query(Repository)\
81 dbrepo = sa.query(Repository)\
106 .filter(Repository.repo_name == repo_name).scalar()
82 .filter(Repository.repo_name == repo_name).scalar()
107 cur_stats = sa.query(Statistics)\
83 cur_stats = sa.query(Statistics)\
108 .filter(Statistics.repository == dbrepo).scalar()
84 .filter(Statistics.repository == dbrepo).scalar()
109 if cur_stats:
85 if cur_stats:
110 last_rev = cur_stats.stat_on_revision
86 last_rev = cur_stats.stat_on_revision
111 if not repo.revisions:
87 if not repo.revisions:
112 return True
88 return True
113
89
114 if last_rev == repo.revisions[-1] and len(repo.revisions) > 1:
90 if last_rev == repo.revisions[-1] and len(repo.revisions) > 1:
115 #pass silently without any work if we're not on first revision or
91 #pass silently without any work if we're not on first revision or
116 #current state of parsing revision(from db marker) is the last revision
92 #current state of parsing revision(from db marker) is the last revision
117 return True
93 return True
118
94
119 if cur_stats:
95 if cur_stats:
120 commits_by_day_aggregate = OrderedDict(
96 commits_by_day_aggregate = OrderedDict(
121 json.loads(
97 json.loads(
122 cur_stats.commit_activity_combined))
98 cur_stats.commit_activity_combined))
123 commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
99 commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
124
100
125 log.debug('starting parsing %s', parse_limit)
101 log.debug('starting parsing %s', parse_limit)
126 lmktime = mktime
102 lmktime = mktime
127
103
128 for cnt, rev in enumerate(repo.revisions[last_rev:]):
104 for cnt, rev in enumerate(repo.revisions[last_rev:]):
129 last_cs = cs = repo.get_changeset(rev)
105 last_cs = cs = repo.get_changeset(rev)
130 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
106 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
131 cs.date.timetuple()[2])
107 cs.date.timetuple()[2])
132 timetupple = [int(x) for x in k.split('-')]
108 timetupple = [int(x) for x in k.split('-')]
133 timetupple.extend([0 for _ in xrange(6)])
109 timetupple.extend([0 for _ in xrange(6)])
134 k = lmktime(timetupple)
110 k = lmktime(timetupple)
135 if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
111 if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
136 try:
112 try:
137 l = [timegetter(x) for x in commits_by_day_author_aggregate\
113 l = [timegetter(x) for x in commits_by_day_author_aggregate\
138 [author_key_cleaner(cs.author)]['data']]
114 [author_key_cleaner(cs.author)]['data']]
139 time_pos = l.index(k)
115 time_pos = l.index(k)
140 except ValueError:
116 except ValueError:
141 time_pos = False
117 time_pos = False
142
118
143 if time_pos >= 0 and time_pos is not False:
119 if time_pos >= 0 and time_pos is not False:
144
120
145 datadict = commits_by_day_author_aggregate\
121 datadict = commits_by_day_author_aggregate\
146 [author_key_cleaner(cs.author)]['data'][time_pos]
122 [author_key_cleaner(cs.author)]['data'][time_pos]
147
123
148 datadict["commits"] += 1
124 datadict["commits"] += 1
149 datadict["added"] += len(cs.added)
125 datadict["added"] += len(cs.added)
150 datadict["changed"] += len(cs.changed)
126 datadict["changed"] += len(cs.changed)
151 datadict["removed"] += len(cs.removed)
127 datadict["removed"] += len(cs.removed)
152
128
153 else:
129 else:
154 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
130 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
155
131
156 datadict = {"time":k,
132 datadict = {"time":k,
157 "commits":1,
133 "commits":1,
158 "added":len(cs.added),
134 "added":len(cs.added),
159 "changed":len(cs.changed),
135 "changed":len(cs.changed),
160 "removed":len(cs.removed),
136 "removed":len(cs.removed),
161 }
137 }
162 commits_by_day_author_aggregate\
138 commits_by_day_author_aggregate\
163 [author_key_cleaner(cs.author)]['data'].append(datadict)
139 [author_key_cleaner(cs.author)]['data'].append(datadict)
164
140
165 else:
141 else:
166 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
142 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
167 commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
143 commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
168 "label":author_key_cleaner(cs.author),
144 "label":author_key_cleaner(cs.author),
169 "data":[{"time":k,
145 "data":[{"time":k,
170 "commits":1,
146 "commits":1,
171 "added":len(cs.added),
147 "added":len(cs.added),
172 "changed":len(cs.changed),
148 "changed":len(cs.changed),
173 "removed":len(cs.removed),
149 "removed":len(cs.removed),
174 }],
150 }],
175 "schema":["commits"],
151 "schema":["commits"],
176 }
152 }
177
153
178 #gather all data by day
154 #gather all data by day
179 if commits_by_day_aggregate.has_key(k):
155 if commits_by_day_aggregate.has_key(k):
180 commits_by_day_aggregate[k] += 1
156 commits_by_day_aggregate[k] += 1
181 else:
157 else:
182 commits_by_day_aggregate[k] = 1
158 commits_by_day_aggregate[k] = 1
183
159
184 if cnt >= parse_limit:
160 if cnt >= parse_limit:
185 #don't fetch to much data since we can freeze application
161 #don't fetch to much data since we can freeze application
186 break
162 break
187 overview_data = []
163 overview_data = []
188 for k, v in commits_by_day_aggregate.items():
164 for k, v in commits_by_day_aggregate.items():
189 overview_data.append([k, v])
165 overview_data.append([k, v])
190 overview_data = sorted(overview_data, key=itemgetter(0))
166 overview_data = sorted(overview_data, key=itemgetter(0))
191 if not commits_by_day_author_aggregate:
167 if not commits_by_day_author_aggregate:
192 commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
168 commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
193 "label":author_key_cleaner(repo.contact),
169 "label":author_key_cleaner(repo.contact),
194 "data":[0, 1],
170 "data":[0, 1],
195 "schema":["commits"],
171 "schema":["commits"],
196 }
172 }
197
173
198 stats = cur_stats if cur_stats else Statistics()
174 stats = cur_stats if cur_stats else Statistics()
199 stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
175 stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
200 stats.commit_activity_combined = json.dumps(overview_data)
176 stats.commit_activity_combined = json.dumps(overview_data)
201
177
202 log.debug('last revison %s', last_rev)
178 log.debug('last revison %s', last_rev)
203 leftovers = len(repo.revisions[last_rev:])
179 leftovers = len(repo.revisions[last_rev:])
204 log.debug('revisions to parse %s', leftovers)
180 log.debug('revisions to parse %s', leftovers)
205
181
206 if last_rev == 0 or leftovers < parse_limit:
182 if last_rev == 0 or leftovers < parse_limit:
207 stats.languages = json.dumps(__get_codes_stats(repo_name))
183 stats.languages = json.dumps(__get_codes_stats(repo_name))
208
184
209 stats.repository = dbrepo
185 stats.repository = dbrepo
210 stats.stat_on_revision = last_cs.revision
186 stats.stat_on_revision = last_cs.revision
211
187
212 try:
188 try:
213 sa.add(stats)
189 sa.add(stats)
214 sa.commit()
190 sa.commit()
215 except:
191 except:
216 log.error(traceback.format_exc())
192 log.error(traceback.format_exc())
217 sa.rollback()
193 sa.rollback()
218 return False
194 return False
219 if len(repo.revisions) > 1:
195 if len(repo.revisions) > 1:
220 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
196 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
221
197
222 return True
198 return True
223
199
224 @task
200 @task
225 def reset_user_password(user_email):
201 def reset_user_password(user_email):
226 log = reset_user_password.get_logger()
202 log = reset_user_password.get_logger()
227 from rhodecode.lib import auth
203 from rhodecode.lib import auth
228 from rhodecode.model.db import User
204 from rhodecode.model.db import User
229
205
230 try:
206 try:
231 try:
207 try:
232 sa = get_session()
208 sa = get_session()
233 user = sa.query(User).filter(User.email == user_email).scalar()
209 user = sa.query(User).filter(User.email == user_email).scalar()
234 new_passwd = auth.PasswordGenerator().gen_password(8,
210 new_passwd = auth.PasswordGenerator().gen_password(8,
235 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
211 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
236 if user:
212 if user:
237 user.password = auth.get_crypt_password(new_passwd)
213 user.password = auth.get_crypt_password(new_passwd)
238 sa.add(user)
214 sa.add(user)
239 sa.commit()
215 sa.commit()
240 log.info('change password for %s', user_email)
216 log.info('change password for %s', user_email)
241 if new_passwd is None:
217 if new_passwd is None:
242 raise Exception('unable to generate new password')
218 raise Exception('unable to generate new password')
243
219
244 except:
220 except:
245 log.error(traceback.format_exc())
221 log.error(traceback.format_exc())
246 sa.rollback()
222 sa.rollback()
247
223
248 run_task(send_email, user_email,
224 run_task(send_email, user_email,
249 "Your new rhodecode password",
225 "Your new rhodecode password",
250 'Your new rhodecode password:%s' % (new_passwd))
226 'Your new rhodecode password:%s' % (new_passwd))
251 log.info('send new password mail to %s', user_email)
227 log.info('send new password mail to %s', user_email)
252
228
253
229
254 except:
230 except:
255 log.error('Failed to update user password')
231 log.error('Failed to update user password')
256 log.error(traceback.format_exc())
232 log.error(traceback.format_exc())
257
233
258 return True
234 return True
259
235
260 @task
236 @task
261 def send_email(recipients, subject, body):
237 def send_email(recipients, subject, body):
262 """
238 """
263 Sends an email with defined parameters from the .ini files.
239 Sends an email with defined parameters from the .ini files.
264
240
265
241
266 :param recipients: list of recipients, it this is empty the defined email
242 :param recipients: list of recipients, it this is empty the defined email
267 address from field 'email_to' is used instead
243 address from field 'email_to' is used instead
268 :param subject: subject of the mail
244 :param subject: subject of the mail
269 :param body: body of the mail
245 :param body: body of the mail
270 """
246 """
271 log = send_email.get_logger()
247 log = send_email.get_logger()
272 email_config = config
248 email_config = config
273
249
274 if not recipients:
250 if not recipients:
275 recipients = [email_config.get('email_to')]
251 recipients = [email_config.get('email_to')]
276
252
277 mail_from = email_config.get('app_email_from')
253 mail_from = email_config.get('app_email_from')
278 user = email_config.get('smtp_username')
254 user = email_config.get('smtp_username')
279 passwd = email_config.get('smtp_password')
255 passwd = email_config.get('smtp_password')
280 mail_server = email_config.get('smtp_server')
256 mail_server = email_config.get('smtp_server')
281 mail_port = email_config.get('smtp_port')
257 mail_port = email_config.get('smtp_port')
282 tls = str2bool(email_config.get('smtp_use_tls'))
258 tls = str2bool(email_config.get('smtp_use_tls'))
283 ssl = str2bool(email_config.get('smtp_use_ssl'))
259 ssl = str2bool(email_config.get('smtp_use_ssl'))
284
260
285 try:
261 try:
286 m = SmtpMailer(mail_from, user, passwd, mail_server,
262 m = SmtpMailer(mail_from, user, passwd, mail_server,
287 mail_port, ssl, tls)
263 mail_port, ssl, tls)
288 m.send(recipients, subject, body)
264 m.send(recipients, subject, body)
289 except:
265 except:
290 log.error('Mail sending failed')
266 log.error('Mail sending failed')
291 log.error(traceback.format_exc())
267 log.error(traceback.format_exc())
292 return False
268 return False
293 return True
269 return True
294
270
295 @task
271 @task
296 def create_repo_fork(form_data, cur_user):
272 def create_repo_fork(form_data, cur_user):
297 from rhodecode.model.repo import RepoModel
273 from rhodecode.model.repo import RepoModel
298 from vcs import get_backend
274 from vcs import get_backend
299 log = create_repo_fork.get_logger()
275 log = create_repo_fork.get_logger()
300 repo_model = RepoModel(get_session())
276 repo_model = RepoModel(get_session())
301 repo_model.create(form_data, cur_user, just_db=True, fork=True)
277 repo_model.create(form_data, cur_user, just_db=True, fork=True)
302 repo_name = form_data['repo_name']
278 repo_name = form_data['repo_name']
303 repos_path = get_repos_path()
279 repos_path = get_repos_path()
304 repo_path = os.path.join(repos_path, repo_name)
280 repo_path = os.path.join(repos_path, repo_name)
305 repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
281 repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
306 alias = form_data['repo_type']
282 alias = form_data['repo_type']
307
283
308 log.info('creating repo fork %s as %s', repo_name, repo_path)
284 log.info('creating repo fork %s as %s', repo_name, repo_path)
309 backend = get_backend(alias)
285 backend = get_backend(alias)
310 backend(str(repo_fork_path), create=True, src_url=str(repo_path))
286 backend(str(repo_fork_path), create=True, src_url=str(repo_path))
311
287
312 def __get_codes_stats(repo_name):
288 def __get_codes_stats(repo_name):
313 LANGUAGES_EXTENSIONS_MAP = {'scm': 'Scheme', 'asmx': 'VbNetAspx', 'Rout':
289 LANGUAGES_EXTENSIONS_MAP = {'scm': 'Scheme', 'asmx': 'VbNetAspx', 'Rout':
314 'RConsole', 'rest': 'Rst', 'abap': 'ABAP', 'go': 'Go', 'phtml': 'HtmlPhp',
290 'RConsole', 'rest': 'Rst', 'abap': 'ABAP', 'go': 'Go', 'phtml': 'HtmlPhp',
315 'ns2': 'Newspeak', 'xml': 'EvoqueXml', 'sh-session': 'BashSession', 'ads':
291 'ns2': 'Newspeak', 'xml': 'EvoqueXml', 'sh-session': 'BashSession', 'ads':
316 'Ada', 'clj': 'Clojure', 'll': 'Llvm', 'ebuild': 'Bash', 'adb': 'Ada',
292 'Ada', 'clj': 'Clojure', 'll': 'Llvm', 'ebuild': 'Bash', 'adb': 'Ada',
317 'ada': 'Ada', 'c++-objdump': 'CppObjdump', 'aspx':
293 'ada': 'Ada', 'c++-objdump': 'CppObjdump', 'aspx':
318 'VbNetAspx', 'ksh': 'Bash', 'coffee': 'CoffeeScript', 'vert': 'GLShader',
294 'VbNetAspx', 'ksh': 'Bash', 'coffee': 'CoffeeScript', 'vert': 'GLShader',
319 'Makefile.*': 'Makefile', 'di': 'D', 'dpatch': 'DarcsPatch', 'rake':
295 'Makefile.*': 'Makefile', 'di': 'D', 'dpatch': 'DarcsPatch', 'rake':
320 'Ruby', 'moo': 'MOOCode', 'erl-sh': 'ErlangShell', 'geo': 'GLShader',
296 'Ruby', 'moo': 'MOOCode', 'erl-sh': 'ErlangShell', 'geo': 'GLShader',
321 'pov': 'Povray', 'bas': 'VbNet', 'bat': 'Batch', 'd': 'D', 'lisp':
297 'pov': 'Povray', 'bas': 'VbNet', 'bat': 'Batch', 'd': 'D', 'lisp':
322 'CommonLisp', 'h': 'C', 'rbx': 'Ruby', 'tcl': 'Tcl', 'c++': 'Cpp', 'md':
298 'CommonLisp', 'h': 'C', 'rbx': 'Ruby', 'tcl': 'Tcl', 'c++': 'Cpp', 'md':
323 'MiniD', '.vimrc': 'Vim', 'xsd': 'Xml', 'ml': 'Ocaml', 'el': 'CommonLisp',
299 'MiniD', '.vimrc': 'Vim', 'xsd': 'Xml', 'ml': 'Ocaml', 'el': 'CommonLisp',
324 'befunge': 'Befunge', 'xsl': 'Xslt', 'pyx': 'Cython', 'cfm':
300 'befunge': 'Befunge', 'xsl': 'Xslt', 'pyx': 'Cython', 'cfm':
325 'ColdfusionHtml', 'evoque': 'Evoque', 'cfg': 'Ini', 'htm': 'Html',
301 'ColdfusionHtml', 'evoque': 'Evoque', 'cfg': 'Ini', 'htm': 'Html',
326 'Makefile': 'Makefile', 'cfc': 'ColdfusionHtml', 'tex': 'Tex', 'cs':
302 'Makefile': 'Makefile', 'cfc': 'ColdfusionHtml', 'tex': 'Tex', 'cs':
327 'CSharp', 'mxml': 'Mxml', 'patch': 'Diff', 'apache.conf': 'ApacheConf',
303 'CSharp', 'mxml': 'Mxml', 'patch': 'Diff', 'apache.conf': 'ApacheConf',
328 'scala': 'Scala', 'applescript': 'AppleScript', 'GNUmakefile': 'Makefile',
304 'scala': 'Scala', 'applescript': 'AppleScript', 'GNUmakefile': 'Makefile',
329 'c-objdump': 'CObjdump', 'lua': 'Lua', 'apache2.conf': 'ApacheConf', 'rb':
305 'c-objdump': 'CObjdump', 'lua': 'Lua', 'apache2.conf': 'ApacheConf', 'rb':
330 'Ruby', 'gemspec': 'Ruby', 'rl': 'RagelObjectiveC', 'vala': 'Vala', 'tmpl':
306 'Ruby', 'gemspec': 'Ruby', 'rl': 'RagelObjectiveC', 'vala': 'Vala', 'tmpl':
331 'Cheetah', 'bf': 'Brainfuck', 'plt': 'Gnuplot', 'G': 'AntlrRuby', 'xslt':
307 'Cheetah', 'bf': 'Brainfuck', 'plt': 'Gnuplot', 'G': 'AntlrRuby', 'xslt':
332 'Xslt', 'flxh': 'Felix', 'asax': 'VbNetAspx', 'Rakefile': 'Ruby', 'S': 'S',
308 'Xslt', 'flxh': 'Felix', 'asax': 'VbNetAspx', 'Rakefile': 'Ruby', 'S': 'S',
333 'wsdl': 'Xml', 'js': 'Javascript', 'autodelegate': 'Myghty', 'properties':
309 'wsdl': 'Xml', 'js': 'Javascript', 'autodelegate': 'Myghty', 'properties':
334 'Ini', 'bash': 'Bash', 'c': 'C', 'g': 'AntlrRuby', 'r3': 'Rebol', 's':
310 'Ini', 'bash': 'Bash', 'c': 'C', 'g': 'AntlrRuby', 'r3': 'Rebol', 's':
335 'Gas', 'ashx': 'VbNetAspx', 'cxx': 'Cpp', 'boo': 'Boo', 'prolog': 'Prolog',
311 'Gas', 'ashx': 'VbNetAspx', 'cxx': 'Cpp', 'boo': 'Boo', 'prolog': 'Prolog',
336 'sqlite3-console': 'SqliteConsole', 'cl': 'CommonLisp', 'cc': 'Cpp', 'pot':
312 'sqlite3-console': 'SqliteConsole', 'cl': 'CommonLisp', 'cc': 'Cpp', 'pot':
337 'Gettext', 'vim': 'Vim', 'pxi': 'Cython', 'yaml': 'Yaml', 'SConstruct':
313 'Gettext', 'vim': 'Vim', 'pxi': 'Cython', 'yaml': 'Yaml', 'SConstruct':
338 'Python', 'diff': 'Diff', 'txt': 'Text', 'cw': 'Redcode', 'pxd': 'Cython',
314 'Python', 'diff': 'Diff', 'txt': 'Text', 'cw': 'Redcode', 'pxd': 'Cython',
339 'plot': 'Gnuplot', 'java': 'Java', 'hrl': 'Erlang', 'py': 'Python',
315 'plot': 'Gnuplot', 'java': 'Java', 'hrl': 'Erlang', 'py': 'Python',
340 'makefile': 'Makefile', 'squid.conf': 'SquidConf', 'asm': 'Nasm', 'toc':
316 'makefile': 'Makefile', 'squid.conf': 'SquidConf', 'asm': 'Nasm', 'toc':
341 'Tex', 'kid': 'Genshi', 'rhtml': 'Rhtml', 'po': 'Gettext', 'pl': 'Prolog',
317 'Tex', 'kid': 'Genshi', 'rhtml': 'Rhtml', 'po': 'Gettext', 'pl': 'Prolog',
342 'pm': 'Perl', 'hx': 'Haxe', 'ascx': 'VbNetAspx', 'ooc': 'Ooc', 'asy':
318 'pm': 'Perl', 'hx': 'Haxe', 'ascx': 'VbNetAspx', 'ooc': 'Ooc', 'asy':
343 'Asymptote', 'hs': 'Haskell', 'SConscript': 'Python', 'pytb':
319 'Asymptote', 'hs': 'Haskell', 'SConscript': 'Python', 'pytb':
344 'PythonTraceback', 'myt': 'Myghty', 'hh': 'Cpp', 'R': 'S', 'aux': 'Tex',
320 'PythonTraceback', 'myt': 'Myghty', 'hh': 'Cpp', 'R': 'S', 'aux': 'Tex',
345 'rst': 'Rst', 'cpp-objdump': 'CppObjdump', 'lgt': 'Logtalk', 'rss': 'Xml',
321 'rst': 'Rst', 'cpp-objdump': 'CppObjdump', 'lgt': 'Logtalk', 'rss': 'Xml',
346 'flx': 'Felix', 'b': 'Brainfuck', 'f': 'Fortran', 'rbw': 'Ruby',
322 'flx': 'Felix', 'b': 'Brainfuck', 'f': 'Fortran', 'rbw': 'Ruby',
347 '.htaccess': 'ApacheConf', 'cxx-objdump': 'CppObjdump', 'j': 'ObjectiveJ',
323 '.htaccess': 'ApacheConf', 'cxx-objdump': 'CppObjdump', 'j': 'ObjectiveJ',
348 'mll': 'Ocaml', 'yml': 'Yaml', 'mu': 'MuPAD', 'r': 'Rebol', 'ASM': 'Nasm',
324 'mll': 'Ocaml', 'yml': 'Yaml', 'mu': 'MuPAD', 'r': 'Rebol', 'ASM': 'Nasm',
349 'erl': 'Erlang', 'mly': 'Ocaml', 'mo': 'Modelica', 'def': 'Modula2', 'ini':
325 'erl': 'Erlang', 'mly': 'Ocaml', 'mo': 'Modelica', 'def': 'Modula2', 'ini':
350 'Ini', 'control': 'DebianControl', 'vb': 'VbNet', 'vapi': 'Vala', 'pro':
326 'Ini', 'control': 'DebianControl', 'vb': 'VbNet', 'vapi': 'Vala', 'pro':
351 'Prolog', 'spt': 'Cheetah', 'mli': 'Ocaml', 'as': 'ActionScript3', 'cmd':
327 'Prolog', 'spt': 'Cheetah', 'mli': 'Ocaml', 'as': 'ActionScript3', 'cmd':
352 'Batch', 'cpp': 'Cpp', 'io': 'Io', 'tac': 'Python', 'haml': 'Haml', 'rkt':
328 'Batch', 'cpp': 'Cpp', 'io': 'Io', 'tac': 'Python', 'haml': 'Haml', 'rkt':
353 'Racket', 'st':'Smalltalk', 'inc': 'Povray', 'pas': 'Delphi', 'cmake':
329 'Racket', 'st':'Smalltalk', 'inc': 'Povray', 'pas': 'Delphi', 'cmake':
354 'CMake', 'csh':'Tcsh', 'hpp': 'Cpp', 'feature': 'Gherkin', 'html': 'Html',
330 'CMake', 'csh':'Tcsh', 'hpp': 'Cpp', 'feature': 'Gherkin', 'html': 'Html',
355 'php':'Php', 'php3':'Php', 'php4':'Php', 'php5':'Php', 'xhtml': 'Html',
331 'php':'Php', 'php3':'Php', 'php4':'Php', 'php5':'Php', 'xhtml': 'Html',
356 'hxx': 'Cpp', 'eclass': 'Bash', 'css': 'Css',
332 'hxx': 'Cpp', 'eclass': 'Bash', 'css': 'Css',
357 'frag': 'GLShader', 'd-objdump': 'DObjdump', 'weechatlog': 'IrcLogs',
333 'frag': 'GLShader', 'd-objdump': 'DObjdump', 'weechatlog': 'IrcLogs',
358 'tcsh': 'Tcsh', 'objdump': 'Objdump', 'pyw': 'Python', 'h++': 'Cpp',
334 'tcsh': 'Tcsh', 'objdump': 'Objdump', 'pyw': 'Python', 'h++': 'Cpp',
359 'py3tb': 'Python3Traceback', 'jsp': 'Jsp', 'sql': 'Sql', 'mak': 'Makefile',
335 'py3tb': 'Python3Traceback', 'jsp': 'Jsp', 'sql': 'Sql', 'mak': 'Makefile',
360 'php': 'Php', 'mao': 'Mako', 'man': 'Groff', 'dylan': 'Dylan', 'sass':
336 'php': 'Php', 'mao': 'Mako', 'man': 'Groff', 'dylan': 'Dylan', 'sass':
361 'Sass', 'cfml': 'ColdfusionHtml', 'darcspatch': 'DarcsPatch', 'tpl':
337 'Sass', 'cfml': 'ColdfusionHtml', 'darcspatch': 'DarcsPatch', 'tpl':
362 'Smarty', 'm': 'ObjectiveC', 'f90': 'Fortran', 'mod': 'Modula2', 'sh':
338 'Smarty', 'm': 'ObjectiveC', 'f90': 'Fortran', 'mod': 'Modula2', 'sh':
363 'Bash', 'lhs': 'LiterateHaskell', 'sources.list': 'SourcesList', 'axd':
339 'Bash', 'lhs': 'LiterateHaskell', 'sources.list': 'SourcesList', 'axd':
364 'VbNetAspx', 'sc': 'Python'}
340 'VbNetAspx', 'sc': 'Python'}
365
341
366 repos_path = get_repos_path()
342 repos_path = get_repos_path()
367 p = os.path.join(repos_path, repo_name)
343 p = os.path.join(repos_path, repo_name)
368 repo = get_repo(p)
344 repo = get_repo(p)
369 tip = repo.get_changeset()
345 tip = repo.get_changeset()
370 code_stats = {}
346 code_stats = {}
371
347
372 def aggregate(cs):
348 def aggregate(cs):
373 for f in cs[2]:
349 for f in cs[2]:
374 ext = f.extension
350 ext = f.extension
375 key = LANGUAGES_EXTENSIONS_MAP.get(ext, ext)
351 key = LANGUAGES_EXTENSIONS_MAP.get(ext, ext)
376 key = key or ext
352 key = key or ext
377 if ext in LANGUAGES_EXTENSIONS_MAP.keys():
353 if ext in LANGUAGES_EXTENSIONS_MAP.keys():
378 if code_stats.has_key(key):
354 if code_stats.has_key(key):
379 code_stats[key] += 1
355 code_stats[key] += 1
380 else:
356 else:
381 code_stats[key] = 1
357 code_stats[key] = 1
382
358
383 map(aggregate, tip.walk('/'))
359 map(aggregate, tip.walk('/'))
384
360
385 return code_stats or {}
361 return code_stats or {}
386
362
387
363
388
364
389
365
@@ -1,143 +1,90 b''
1 import os
1 from rhodecode.lib.utils import BasePasterCommand, Command
2 from paste.script.command import Command, BadCommand
3 import paste.deploy
4 from pylons import config
5
2
6
3
7 __all__ = ['CeleryDaemonCommand', 'CeleryBeatCommand',
4 __all__ = ['CeleryDaemonCommand', 'CeleryBeatCommand',
8 'CAMQPAdminCommand', 'CeleryEventCommand']
5 'CAMQPAdminCommand', 'CeleryEventCommand']
9
6
10
7
11 class CeleryCommand(Command):
8 class CeleryDaemonCommand(BasePasterCommand):
12 """
13 Abstract Base Class for celery commands.
14
15 The celery commands are somewhat aggressive about loading
16 celery.conf, and since our module sets the `CELERY_LOADER`
17 environment variable to our loader, we have to bootstrap a bit and
18 make sure we've had a chance to load the pylons config off of the
19 command line, otherwise everything fails.
20 """
21 min_args = 1
22 min_args_error = "Please provide a paster config file as an argument."
23 takes_config_file = 1
24 requires_config_file = True
25
26 def run(self, args):
27 """
28 Overrides Command.run
29
30 Checks for a config file argument and loads it.
31 """
32 if len(args) < self.min_args:
33 raise BadCommand(
34 self.min_args_error % {'min_args': self.min_args,
35 'actual_args': len(args)})
36 # Decrement because we're going to lob off the first argument.
37 # @@ This is hacky
38 self.min_args -= 1
39 self.bootstrap_config(args[0])
40 self.update_parser()
41 return super(CeleryCommand, self).run(args[1:])
42
43 def update_parser(self):
44 """
45 Abstract method. Allows for the class's parser to be updated
46 before the superclass's `run` method is called. Necessary to
47 allow options/arguments to be passed through to the underlying
48 celery command.
49 """
50 raise NotImplementedError("Abstract Method.")
51
52 def bootstrap_config(self, conf):
53 """
54 Loads the pylons configuration.
55 """
56 path_to_ini_file = os.path.realpath(conf)
57 conf = paste.deploy.appconfig('config:' + path_to_ini_file)
58 config.init_app(conf.global_conf, conf.local_conf)
59
60
61 class CeleryDaemonCommand(CeleryCommand):
62 """Start the celery worker
9 """Start the celery worker
63
10
64 Starts the celery worker that uses a paste.deploy configuration
11 Starts the celery worker that uses a paste.deploy configuration
65 file.
12 file.
66 """
13 """
67 usage = 'CONFIG_FILE [celeryd options...]'
14 usage = 'CONFIG_FILE [celeryd options...]'
68 summary = __doc__.splitlines()[0]
15 summary = __doc__.splitlines()[0]
69 description = "".join(__doc__.splitlines()[2:])
16 description = "".join(__doc__.splitlines()[2:])
70
17
71 parser = Command.standard_parser(quiet=True)
18 parser = Command.standard_parser(quiet=True)
72
19
73 def update_parser(self):
20 def update_parser(self):
74 from celery.bin import celeryd
21 from celery.bin import celeryd
75 for x in celeryd.WorkerCommand().get_options():
22 for x in celeryd.WorkerCommand().get_options():
76 self.parser.add_option(x)
23 self.parser.add_option(x)
77
24
78 def command(self):
25 def command(self):
79 from celery.bin import celeryd
26 from celery.bin import celeryd
80 return celeryd.WorkerCommand().run(**vars(self.options))
27 return celeryd.WorkerCommand().run(**vars(self.options))
81
28
82
29
83 class CeleryBeatCommand(CeleryCommand):
30 class CeleryBeatCommand(BasePasterCommand):
84 """Start the celery beat server
31 """Start the celery beat server
85
32
86 Starts the celery beat server using a paste.deploy configuration
33 Starts the celery beat server using a paste.deploy configuration
87 file.
34 file.
88 """
35 """
89 usage = 'CONFIG_FILE [celerybeat options...]'
36 usage = 'CONFIG_FILE [celerybeat options...]'
90 summary = __doc__.splitlines()[0]
37 summary = __doc__.splitlines()[0]
91 description = "".join(__doc__.splitlines()[2:])
38 description = "".join(__doc__.splitlines()[2:])
92
39
93 parser = Command.standard_parser(quiet=True)
40 parser = Command.standard_parser(quiet=True)
94
41
95 def update_parser(self):
42 def update_parser(self):
96 from celery.bin import celerybeat
43 from celery.bin import celerybeat
97 for x in celerybeat.BeatCommand().get_options():
44 for x in celerybeat.BeatCommand().get_options():
98 self.parser.add_option(x)
45 self.parser.add_option(x)
99
46
100 def command(self):
47 def command(self):
101 from celery.bin import celerybeat
48 from celery.bin import celerybeat
102 return celerybeat.BeatCommand(**vars(self.options))
49 return celerybeat.BeatCommand(**vars(self.options))
103
50
104 class CAMQPAdminCommand(CeleryCommand):
51 class CAMQPAdminCommand(BasePasterCommand):
105 """CAMQP Admin
52 """CAMQP Admin
106
53
107 CAMQP celery admin tool.
54 CAMQP celery admin tool.
108 """
55 """
109 usage = 'CONFIG_FILE [camqadm options...]'
56 usage = 'CONFIG_FILE [camqadm options...]'
110 summary = __doc__.splitlines()[0]
57 summary = __doc__.splitlines()[0]
111 description = "".join(__doc__.splitlines()[2:])
58 description = "".join(__doc__.splitlines()[2:])
112
59
113 parser = Command.standard_parser(quiet=True)
60 parser = Command.standard_parser(quiet=True)
114
61
115 def update_parser(self):
62 def update_parser(self):
116 from celery.bin import camqadm
63 from celery.bin import camqadm
117 for x in camqadm.OPTION_LIST:
64 for x in camqadm.OPTION_LIST:
118 self.parser.add_option(x)
65 self.parser.add_option(x)
119
66
120 def command(self):
67 def command(self):
121 from celery.bin import camqadm
68 from celery.bin import camqadm
122 return camqadm.camqadm(*self.args, **vars(self.options))
69 return camqadm.camqadm(*self.args, **vars(self.options))
123
70
124
71
125 class CeleryEventCommand(CeleryCommand):
72 class CeleryEventCommand(BasePasterCommand):
126 """Celery event commandd.
73 """Celery event commandd.
127
74
128 Capture celery events.
75 Capture celery events.
129 """
76 """
130 usage = 'CONFIG_FILE [celeryev options...]'
77 usage = 'CONFIG_FILE [celeryev options...]'
131 summary = __doc__.splitlines()[0]
78 summary = __doc__.splitlines()[0]
132 description = "".join(__doc__.splitlines()[2:])
79 description = "".join(__doc__.splitlines()[2:])
133
80
134 parser = Command.standard_parser(quiet=True)
81 parser = Command.standard_parser(quiet=True)
135
82
136 def update_parser(self):
83 def update_parser(self):
137 from celery.bin import celeryev
84 from celery.bin import celeryev
138 for x in celeryev.OPTION_LIST:
85 for x in celeryev.OPTION_LIST:
139 self.parser.add_option(x)
86 self.parser.add_option(x)
140
87
141 def command(self):
88 def command(self):
142 from celery.bin import celeryev
89 from celery.bin import celeryev
143 return celeryev.run_celeryev(**vars(self.options))
90 return celeryev.run_celeryev(**vars(self.options))
@@ -1,188 +1,193 b''
1 import os
1 import os
2 import sys
2 import sys
3 import traceback
3 from os.path import dirname as dn, join as jn
4 from os.path import dirname as dn, join as jn
4
5
5 #to get the rhodecode import
6 #to get the rhodecode import
6 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
7 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
7
8
9 from rhodecode.model import init_model
10 from rhodecode.model.scm import ScmModel
8 from rhodecode.config.environment import load_environment
11 from rhodecode.config.environment import load_environment
9 from rhodecode.model.scm import ScmModel
12 from rhodecode.lib.utils import BasePasterCommand, Command, add_cache
13
10 from shutil import rmtree
14 from shutil import rmtree
11 from webhelpers.html.builder import escape
15 from webhelpers.html.builder import escape
12 from vcs.utils.lazy import LazyProperty
16 from vcs.utils.lazy import LazyProperty
13
17
18 from sqlalchemy import engine_from_config
19
14 from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
20 from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
15 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
21 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
16 from whoosh.index import create_in, open_dir
22 from whoosh.index import create_in, open_dir
17 from whoosh.formats import Characters
23 from whoosh.formats import Characters
18 from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter
24 from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter
19
25
20 import traceback
21
26
22 #EXTENSIONS WE WANT TO INDEX CONTENT OFF
27 #EXTENSIONS WE WANT TO INDEX CONTENT OFF
23 INDEX_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx', 'aspx', 'asx', 'axd', 'c',
28 INDEX_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx', 'aspx', 'asx', 'axd', 'c',
24 'cfg', 'cfm', 'cpp', 'cs', 'css', 'diff', 'do', 'el', 'erl',
29 'cfg', 'cfm', 'cpp', 'cs', 'css', 'diff', 'do', 'el', 'erl',
25 'h', 'htm', 'html', 'ini', 'java', 'js', 'jsp', 'jspx', 'lisp',
30 'h', 'htm', 'html', 'ini', 'java', 'js', 'jsp', 'jspx', 'lisp',
26 'lua', 'm', 'mako', 'ml', 'pas', 'patch', 'php', 'php3',
31 'lua', 'm', 'mako', 'ml', 'pas', 'patch', 'php', 'php3',
27 'php4', 'phtml', 'pm', 'py', 'rb', 'rst', 's', 'sh', 'sql',
32 'php4', 'phtml', 'pm', 'py', 'rb', 'rst', 's', 'sh', 'sql',
28 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt',
33 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt',
29 'yaws']
34 'yaws']
30
35
31 #CUSTOM ANALYZER wordsplit + lowercase filter
36 #CUSTOM ANALYZER wordsplit + lowercase filter
32 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
37 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
33
38
34
39
35 #INDEX SCHEMA DEFINITION
40 #INDEX SCHEMA DEFINITION
36 SCHEMA = Schema(owner=TEXT(),
41 SCHEMA = Schema(owner=TEXT(),
37 repository=TEXT(stored=True),
42 repository=TEXT(stored=True),
38 path=TEXT(stored=True),
43 path=TEXT(stored=True),
39 content=FieldType(format=Characters(ANALYZER),
44 content=FieldType(format=Characters(ANALYZER),
40 scorable=True, stored=True),
45 scorable=True, stored=True),
41 modtime=STORED(), extension=TEXT(stored=True))
46 modtime=STORED(), extension=TEXT(stored=True))
42
47
43
48
44 IDX_NAME = 'HG_INDEX'
49 IDX_NAME = 'HG_INDEX'
45 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
50 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
46 FRAGMENTER = SimpleFragmenter(200)
51 FRAGMENTER = SimpleFragmenter(200)
47
52
48 from paste.script import command
49 import ConfigParser
50
53
51 class MakeIndex(command.Command):
54 class MakeIndex(BasePasterCommand):
52
55
53 max_args = 1
56 max_args = 1
54 min_args = 1
57 min_args = 1
55
58
56 usage = "CONFIG_FILE"
59 usage = "CONFIG_FILE"
57 summary = "Creates index for full text search given configuration file"
60 summary = "Creates index for full text search given configuration file"
58 group_name = "RhodeCode"
61 group_name = "RhodeCode"
59 takes_config_file = -1
62 takes_config_file = -1
60 parser = command.Command.standard_parser(verbose=True)
63 parser = Command.standard_parser(verbose=True)
61 parser.add_option('--repo-location',
64
62 action='store',
63 dest='repo_location',
64 help="Specifies repositories location to index REQUIRED",
65 )
66 parser.add_option('-f',
67 action='store_true',
68 dest='full_index',
69 help="Specifies that index should be made full i.e"
70 " destroy old and build from scratch",
71 default=False)
72 def command(self):
65 def command(self):
73 config_name = self.args[0]
74 p = config_name.split('/')
75 root = '.' if len(p) == 1 else '/'.join(p[:-1])
76 config = ConfigParser.ConfigParser({'here':root})
77 config.read(config_name)
78
66
79 index_location = dict(config.items('app:main'))['index_dir']
67 from pylons import config
68 add_cache(config)
69 engine = engine_from_config(config, 'sqlalchemy.db1.')
70 init_model(engine)
71
72 index_location = config['index_dir']
80 repo_location = self.options.repo_location
73 repo_location = self.options.repo_location
81
74
82 #======================================================================
75 #======================================================================
83 # WHOOSH DAEMON
76 # WHOOSH DAEMON
84 #======================================================================
77 #======================================================================
85 from rhodecode.lib.pidlock import LockHeld, DaemonLock
78 from rhodecode.lib.pidlock import LockHeld, DaemonLock
86 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
79 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
87 try:
80 try:
88 l = DaemonLock()
81 l = DaemonLock()
89 WhooshIndexingDaemon(index_location=index_location,
82 WhooshIndexingDaemon(index_location=index_location,
90 repo_location=repo_location)\
83 repo_location=repo_location)\
91 .run(full_index=self.options.full_index)
84 .run(full_index=self.options.full_index)
92 l.release()
85 l.release()
93 except LockHeld:
86 except LockHeld:
94 sys.exit(1)
87 sys.exit(1)
95
88
89 def update_parser(self):
90 self.parser.add_option('--repo-location',
91 action='store',
92 dest='repo_location',
93 help="Specifies repositories location to index REQUIRED",
94 )
95 self.parser.add_option('-f',
96 action='store_true',
97 dest='full_index',
98 help="Specifies that index should be made full i.e"
99 " destroy old and build from scratch",
100 default=False)
96
101
97 class ResultWrapper(object):
102 class ResultWrapper(object):
98 def __init__(self, search_type, searcher, matcher, highlight_items):
103 def __init__(self, search_type, searcher, matcher, highlight_items):
99 self.search_type = search_type
104 self.search_type = search_type
100 self.searcher = searcher
105 self.searcher = searcher
101 self.matcher = matcher
106 self.matcher = matcher
102 self.highlight_items = highlight_items
107 self.highlight_items = highlight_items
103 self.fragment_size = 200 / 2
108 self.fragment_size = 200 / 2
104
109
105 @LazyProperty
110 @LazyProperty
106 def doc_ids(self):
111 def doc_ids(self):
107 docs_id = []
112 docs_id = []
108 while self.matcher.is_active():
113 while self.matcher.is_active():
109 docnum = self.matcher.id()
114 docnum = self.matcher.id()
110 chunks = [offsets for offsets in self.get_chunks()]
115 chunks = [offsets for offsets in self.get_chunks()]
111 docs_id.append([docnum, chunks])
116 docs_id.append([docnum, chunks])
112 self.matcher.next()
117 self.matcher.next()
113 return docs_id
118 return docs_id
114
119
115 def __str__(self):
120 def __str__(self):
116 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
121 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
117
122
118 def __repr__(self):
123 def __repr__(self):
119 return self.__str__()
124 return self.__str__()
120
125
121 def __len__(self):
126 def __len__(self):
122 return len(self.doc_ids)
127 return len(self.doc_ids)
123
128
124 def __iter__(self):
129 def __iter__(self):
125 """
130 """
126 Allows Iteration over results,and lazy generate content
131 Allows Iteration over results,and lazy generate content
127
132
128 *Requires* implementation of ``__getitem__`` method.
133 *Requires* implementation of ``__getitem__`` method.
129 """
134 """
130 for docid in self.doc_ids:
135 for docid in self.doc_ids:
131 yield self.get_full_content(docid)
136 yield self.get_full_content(docid)
132
137
133 def __getslice__(self, i, j):
138 def __getslice__(self, i, j):
134 """
139 """
135 Slicing of resultWrapper
140 Slicing of resultWrapper
136 """
141 """
137 slice = []
142 slice = []
138 for docid in self.doc_ids[i:j]:
143 for docid in self.doc_ids[i:j]:
139 slice.append(self.get_full_content(docid))
144 slice.append(self.get_full_content(docid))
140 return slice
145 return slice
141
146
142
147
143 def get_full_content(self, docid):
148 def get_full_content(self, docid):
144 res = self.searcher.stored_fields(docid[0])
149 res = self.searcher.stored_fields(docid[0])
145 f_path = res['path'][res['path'].find(res['repository']) \
150 f_path = res['path'][res['path'].find(res['repository']) \
146 + len(res['repository']):].lstrip('/')
151 + len(res['repository']):].lstrip('/')
147
152
148 content_short = self.get_short_content(res, docid[1])
153 content_short = self.get_short_content(res, docid[1])
149 res.update({'content_short':content_short,
154 res.update({'content_short':content_short,
150 'content_short_hl':self.highlight(content_short),
155 'content_short_hl':self.highlight(content_short),
151 'f_path':f_path})
156 'f_path':f_path})
152
157
153 return res
158 return res
154
159
155 def get_short_content(self, res, chunks):
160 def get_short_content(self, res, chunks):
156
161
157 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
162 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
158
163
159 def get_chunks(self):
164 def get_chunks(self):
160 """
165 """
161 Smart function that implements chunking the content
166 Smart function that implements chunking the content
162 but not overlap chunks so it doesn't highlight the same
167 but not overlap chunks so it doesn't highlight the same
163 close occurrences twice.
168 close occurrences twice.
164 @param matcher:
169 @param matcher:
165 @param size:
170 @param size:
166 """
171 """
167 memory = [(0, 0)]
172 memory = [(0, 0)]
168 for span in self.matcher.spans():
173 for span in self.matcher.spans():
169 start = span.startchar or 0
174 start = span.startchar or 0
170 end = span.endchar or 0
175 end = span.endchar or 0
171 start_offseted = max(0, start - self.fragment_size)
176 start_offseted = max(0, start - self.fragment_size)
172 end_offseted = end + self.fragment_size
177 end_offseted = end + self.fragment_size
173
178
174 if start_offseted < memory[-1][1]:
179 if start_offseted < memory[-1][1]:
175 start_offseted = memory[-1][1]
180 start_offseted = memory[-1][1]
176 memory.append((start_offseted, end_offseted,))
181 memory.append((start_offseted, end_offseted,))
177 yield (start_offseted, end_offseted,)
182 yield (start_offseted, end_offseted,)
178
183
179 def highlight(self, content, top=5):
184 def highlight(self, content, top=5):
180 if self.search_type != 'content':
185 if self.search_type != 'content':
181 return ''
186 return ''
182 hl = highlight(escape(content),
187 hl = highlight(escape(content),
183 self.highlight_items,
188 self.highlight_items,
184 analyzer=ANALYZER,
189 analyzer=ANALYZER,
185 fragmenter=FRAGMENTER,
190 fragmenter=FRAGMENTER,
186 formatter=FORMATTER,
191 formatter=FORMATTER,
187 top=top)
192 top=top)
188 return hl
193 return hl
@@ -1,524 +1,611 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 package.rhodecode.lib.utils
3 package.rhodecode.lib.utils
4 ~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software; you can redistribute it and/or
13 # This program is free software; you can redistribute it and/or
14 # modify it under the terms of the GNU General Public License
14 # modify it under the terms of the GNU General Public License
15 # as published by the Free Software Foundation; version 2
15 # as published by the Free Software Foundation; version 2
16 # of the License or (at your opinion) any later version of the license.
16 # of the License or (at your opinion) any later version of the license.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
25 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
26 # MA 02110-1301, USA.
26 # MA 02110-1301, USA.
27
27
28 import os
28 import os
29 import logging
29 import logging
30 import datetime
30 import datetime
31 import traceback
31 import traceback
32 import ConfigParser
32 import ConfigParser
33
33
34 from UserDict import DictMixin
34 from UserDict import DictMixin
35
35
36 from mercurial import ui, config, hg
36 from mercurial import ui, config, hg
37 from mercurial.error import RepoError
37 from mercurial.error import RepoError
38
38
39 from paste.script import command
39 import paste
40 import beaker
41 from paste.script.command import Command, BadCommand
42
40 from vcs.backends.base import BaseChangeset
43 from vcs.backends.base import BaseChangeset
41 from vcs.utils.lazy import LazyProperty
44 from vcs.utils.lazy import LazyProperty
42
45
43 from rhodecode.model import meta
46 from rhodecode.model import meta
44 from rhodecode.model.caching_query import FromCache
47 from rhodecode.model.caching_query import FromCache
45 from rhodecode.model.db import Repository, User, RhodeCodeUi, UserLog
48 from rhodecode.model.db import Repository, User, RhodeCodeUi, UserLog
46 from rhodecode.model.repo import RepoModel
49 from rhodecode.model.repo import RepoModel
47 from rhodecode.model.user import UserModel
50 from rhodecode.model.user import UserModel
48
51
49 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
50
53
51
54
52 def get_repo_slug(request):
55 def get_repo_slug(request):
53 return request.environ['pylons.routes_dict'].get('repo_name')
56 return request.environ['pylons.routes_dict'].get('repo_name')
54
57
55 def action_logger(user, action, repo, ipaddr='', sa=None):
58 def action_logger(user, action, repo, ipaddr='', sa=None):
56 """
59 """
57 Action logger for various actions made by users
60 Action logger for various actions made by users
58
61
59 :param user: user that made this action, can be a unique username string or
62 :param user: user that made this action, can be a unique username string or
60 object containing user_id attribute
63 object containing user_id attribute
61 :param action: action to log, should be on of predefined unique actions for
64 :param action: action to log, should be on of predefined unique actions for
62 easy translations
65 easy translations
63 :param repo: string name of repository or object containing repo_id,
66 :param repo: string name of repository or object containing repo_id,
64 that action was made on
67 that action was made on
65 :param ipaddr: optional ip address from what the action was made
68 :param ipaddr: optional ip address from what the action was made
66 :param sa: optional sqlalchemy session
69 :param sa: optional sqlalchemy session
67
70
68 """
71 """
69
72
70 if not sa:
73 if not sa:
71 sa = meta.Session()
74 sa = meta.Session()
72
75
73 try:
76 try:
74 um = UserModel()
77 um = UserModel()
75 if hasattr(user, 'user_id'):
78 if hasattr(user, 'user_id'):
76 user_obj = user
79 user_obj = user
77 elif isinstance(user, basestring):
80 elif isinstance(user, basestring):
78 user_obj = um.get_by_username(user, cache=False)
81 user_obj = um.get_by_username(user, cache=False)
79 else:
82 else:
80 raise Exception('You have to provide user object or username')
83 raise Exception('You have to provide user object or username')
81
84
82
85
83 rm = RepoModel()
86 rm = RepoModel()
84 if hasattr(repo, 'repo_id'):
87 if hasattr(repo, 'repo_id'):
85 repo_obj = rm.get(repo.repo_id, cache=False)
88 repo_obj = rm.get(repo.repo_id, cache=False)
86 repo_name = repo_obj.repo_name
89 repo_name = repo_obj.repo_name
87 elif isinstance(repo, basestring):
90 elif isinstance(repo, basestring):
88 repo_name = repo.lstrip('/')
91 repo_name = repo.lstrip('/')
89 repo_obj = rm.get_by_repo_name(repo_name, cache=False)
92 repo_obj = rm.get_by_repo_name(repo_name, cache=False)
90 else:
93 else:
91 raise Exception('You have to provide repository to action logger')
94 raise Exception('You have to provide repository to action logger')
92
95
93
96
94 user_log = UserLog()
97 user_log = UserLog()
95 user_log.user_id = user_obj.user_id
98 user_log.user_id = user_obj.user_id
96 user_log.action = action
99 user_log.action = action
97
100
98 user_log.repository_id = repo_obj.repo_id
101 user_log.repository_id = repo_obj.repo_id
99 user_log.repository_name = repo_name
102 user_log.repository_name = repo_name
100
103
101 user_log.action_date = datetime.datetime.now()
104 user_log.action_date = datetime.datetime.now()
102 user_log.user_ip = ipaddr
105 user_log.user_ip = ipaddr
103 sa.add(user_log)
106 sa.add(user_log)
104 sa.commit()
107 sa.commit()
105
108
106 log.info('Adding user %s, action %s on %s', user_obj, action, repo)
109 log.info('Adding user %s, action %s on %s', user_obj, action, repo)
107 except:
110 except:
108 log.error(traceback.format_exc())
111 log.error(traceback.format_exc())
109 sa.rollback()
112 sa.rollback()
110
113
111 def get_repos(path, recursive=False, initial=False):
114 def get_repos(path, recursive=False, initial=False):
112 """
115 """
113 Scans given path for repos and return (name,(type,path)) tuple
116 Scans given path for repos and return (name,(type,path)) tuple
114 :param prefix:
117 :param prefix:
115 :param path:
118 :param path:
116 :param recursive:
119 :param recursive:
117 :param initial:
120 :param initial:
118 """
121 """
119 from vcs.utils.helpers import get_scm
122 from vcs.utils.helpers import get_scm
120 from vcs.exceptions import VCSError
123 from vcs.exceptions import VCSError
121
124
122 try:
125 try:
123 scm = get_scm(path)
126 scm = get_scm(path)
124 except:
127 except:
125 pass
128 pass
126 else:
129 else:
127 raise Exception('The given path %s should not be a repository got %s',
130 raise Exception('The given path %s should not be a repository got %s',
128 path, scm)
131 path, scm)
129
132
130 for dirpath in os.listdir(path):
133 for dirpath in os.listdir(path):
131 try:
134 try:
132 yield dirpath, get_scm(os.path.join(path, dirpath))
135 yield dirpath, get_scm(os.path.join(path, dirpath))
133 except VCSError:
136 except VCSError:
134 pass
137 pass
135
138
136 def check_repo_fast(repo_name, base_path):
139 def check_repo_fast(repo_name, base_path):
137 """
140 """
138 Check given path for existance of directory
141 Check given path for existance of directory
139 :param repo_name:
142 :param repo_name:
140 :param base_path:
143 :param base_path:
141
144
142 :return False: if this directory is present
145 :return False: if this directory is present
143 """
146 """
144 if os.path.isdir(os.path.join(base_path, repo_name)):return False
147 if os.path.isdir(os.path.join(base_path, repo_name)):return False
145 return True
148 return True
146
149
147 def check_repo(repo_name, base_path, verify=True):
150 def check_repo(repo_name, base_path, verify=True):
148
151
149 repo_path = os.path.join(base_path, repo_name)
152 repo_path = os.path.join(base_path, repo_name)
150
153
151 try:
154 try:
152 if not check_repo_fast(repo_name, base_path):
155 if not check_repo_fast(repo_name, base_path):
153 return False
156 return False
154 r = hg.repository(ui.ui(), repo_path)
157 r = hg.repository(ui.ui(), repo_path)
155 if verify:
158 if verify:
156 hg.verify(r)
159 hg.verify(r)
157 #here we hnow that repo exists it was verified
160 #here we hnow that repo exists it was verified
158 log.info('%s repo is already created', repo_name)
161 log.info('%s repo is already created', repo_name)
159 return False
162 return False
160 except RepoError:
163 except RepoError:
161 #it means that there is no valid repo there...
164 #it means that there is no valid repo there...
162 log.info('%s repo is free for creation', repo_name)
165 log.info('%s repo is free for creation', repo_name)
163 return True
166 return True
164
167
165 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
168 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
166 while True:
169 while True:
167 ok = raw_input(prompt)
170 ok = raw_input(prompt)
168 if ok in ('y', 'ye', 'yes'): return True
171 if ok in ('y', 'ye', 'yes'): return True
169 if ok in ('n', 'no', 'nop', 'nope'): return False
172 if ok in ('n', 'no', 'nop', 'nope'): return False
170 retries = retries - 1
173 retries = retries - 1
171 if retries < 0: raise IOError
174 if retries < 0: raise IOError
172 print complaint
175 print complaint
173
176
174 #propagated from mercurial documentation
177 #propagated from mercurial documentation
175 ui_sections = ['alias', 'auth',
178 ui_sections = ['alias', 'auth',
176 'decode/encode', 'defaults',
179 'decode/encode', 'defaults',
177 'diff', 'email',
180 'diff', 'email',
178 'extensions', 'format',
181 'extensions', 'format',
179 'merge-patterns', 'merge-tools',
182 'merge-patterns', 'merge-tools',
180 'hooks', 'http_proxy',
183 'hooks', 'http_proxy',
181 'smtp', 'patch',
184 'smtp', 'patch',
182 'paths', 'profiling',
185 'paths', 'profiling',
183 'server', 'trusted',
186 'server', 'trusted',
184 'ui', 'web', ]
187 'ui', 'web', ]
185
188
186 def make_ui(read_from='file', path=None, checkpaths=True):
189 def make_ui(read_from='file', path=None, checkpaths=True):
187 """
190 """
188 A function that will read python rc files or database
191 A function that will read python rc files or database
189 and make an mercurial ui object from read options
192 and make an mercurial ui object from read options
190
193
191 :param path: path to mercurial config file
194 :param path: path to mercurial config file
192 :param checkpaths: check the path
195 :param checkpaths: check the path
193 :param read_from: read from 'file' or 'db'
196 :param read_from: read from 'file' or 'db'
194 """
197 """
195
198
196 baseui = ui.ui()
199 baseui = ui.ui()
197
200
198 #clean the baseui object
201 #clean the baseui object
199 baseui._ocfg = config.config()
202 baseui._ocfg = config.config()
200 baseui._ucfg = config.config()
203 baseui._ucfg = config.config()
201 baseui._tcfg = config.config()
204 baseui._tcfg = config.config()
202
205
203 if read_from == 'file':
206 if read_from == 'file':
204 if not os.path.isfile(path):
207 if not os.path.isfile(path):
205 log.warning('Unable to read config file %s' % path)
208 log.warning('Unable to read config file %s' % path)
206 return False
209 return False
207 log.debug('reading hgrc from %s', path)
210 log.debug('reading hgrc from %s', path)
208 cfg = config.config()
211 cfg = config.config()
209 cfg.read(path)
212 cfg.read(path)
210 for section in ui_sections:
213 for section in ui_sections:
211 for k, v in cfg.items(section):
214 for k, v in cfg.items(section):
212 log.debug('settings ui from file[%s]%s:%s', section, k, v)
215 log.debug('settings ui from file[%s]%s:%s', section, k, v)
213 baseui.setconfig(section, k, v)
216 baseui.setconfig(section, k, v)
214
217
215
218
216 elif read_from == 'db':
219 elif read_from == 'db':
217 sa = meta.Session()
220 sa = meta.Session()
218 ret = sa.query(RhodeCodeUi)\
221 ret = sa.query(RhodeCodeUi)\
219 .options(FromCache("sql_cache_short",
222 .options(FromCache("sql_cache_short",
220 "get_hg_ui_settings")).all()
223 "get_hg_ui_settings")).all()
221
224
222 hg_ui = ret
225 hg_ui = ret
223 for ui_ in hg_ui:
226 for ui_ in hg_ui:
224 if ui_.ui_active:
227 if ui_.ui_active:
225 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
228 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
226 ui_.ui_key, ui_.ui_value)
229 ui_.ui_key, ui_.ui_value)
227 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
230 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
228
231
229 meta.Session.remove()
232 meta.Session.remove()
230 return baseui
233 return baseui
231
234
232
235
233 def set_rhodecode_config(config):
236 def set_rhodecode_config(config):
234 """
237 """
235 Updates pylons config with new settings from database
238 Updates pylons config with new settings from database
236 :param config:
239 :param config:
237 """
240 """
238 from rhodecode.model.settings import SettingsModel
241 from rhodecode.model.settings import SettingsModel
239 hgsettings = SettingsModel().get_app_settings()
242 hgsettings = SettingsModel().get_app_settings()
240
243
241 for k, v in hgsettings.items():
244 for k, v in hgsettings.items():
242 config[k] = v
245 config[k] = v
243
246
244 def invalidate_cache(cache_key, *args):
247 def invalidate_cache(cache_key, *args):
245 """
248 """
246 Puts cache invalidation task into db for
249 Puts cache invalidation task into db for
247 further global cache invalidation
250 further global cache invalidation
248 """
251 """
249 from rhodecode.model.scm import ScmModel
252 from rhodecode.model.scm import ScmModel
250
253
251 if cache_key.startswith('get_repo_cached_'):
254 if cache_key.startswith('get_repo_cached_'):
252 name = cache_key.split('get_repo_cached_')[-1]
255 name = cache_key.split('get_repo_cached_')[-1]
253 ScmModel().mark_for_invalidation(name)
256 ScmModel().mark_for_invalidation(name)
254
257
255 class EmptyChangeset(BaseChangeset):
258 class EmptyChangeset(BaseChangeset):
256 """
259 """
257 An dummy empty changeset. It's possible to pass hash when creating
260 An dummy empty changeset. It's possible to pass hash when creating
258 an EmptyChangeset
261 an EmptyChangeset
259 """
262 """
260
263
261 def __init__(self, cs='0' * 40):
264 def __init__(self, cs='0' * 40):
262 self._empty_cs = cs
265 self._empty_cs = cs
263 self.revision = -1
266 self.revision = -1
264 self.message = ''
267 self.message = ''
265 self.author = ''
268 self.author = ''
266 self.date = ''
269 self.date = ''
267
270
268 @LazyProperty
271 @LazyProperty
269 def raw_id(self):
272 def raw_id(self):
270 """
273 """
271 Returns raw string identifying this changeset, useful for web
274 Returns raw string identifying this changeset, useful for web
272 representation.
275 representation.
273 """
276 """
274 return self._empty_cs
277 return self._empty_cs
275
278
276 @LazyProperty
279 @LazyProperty
277 def short_id(self):
280 def short_id(self):
278 return self.raw_id[:12]
281 return self.raw_id[:12]
279
282
280 def get_file_changeset(self, path):
283 def get_file_changeset(self, path):
281 return self
284 return self
282
285
283 def get_file_content(self, path):
286 def get_file_content(self, path):
284 return u''
287 return u''
285
288
286 def get_file_size(self, path):
289 def get_file_size(self, path):
287 return 0
290 return 0
288
291
289 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
292 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
290 """
293 """
291 maps all found repositories into db
294 maps all found repositories into db
292 """
295 """
293
296
294 sa = meta.Session()
297 sa = meta.Session()
295 rm = RepoModel()
298 rm = RepoModel()
296 user = sa.query(User).filter(User.admin == True).first()
299 user = sa.query(User).filter(User.admin == True).first()
297
300
298 for name, repo in initial_repo_list.items():
301 for name, repo in initial_repo_list.items():
299 if not rm.get_by_repo_name(name, cache=False):
302 if not rm.get_by_repo_name(name, cache=False):
300 log.info('repository %s not found creating default', name)
303 log.info('repository %s not found creating default', name)
301
304
302 form_data = {
305 form_data = {
303 'repo_name':name,
306 'repo_name':name,
304 'repo_type':repo.alias,
307 'repo_type':repo.alias,
305 'description':repo.description \
308 'description':repo.description \
306 if repo.description != 'unknown' else \
309 if repo.description != 'unknown' else \
307 '%s repository' % name,
310 '%s repository' % name,
308 'private':False
311 'private':False
309 }
312 }
310 rm.create(form_data, user, just_db=True)
313 rm.create(form_data, user, just_db=True)
311
314
312 if remove_obsolete:
315 if remove_obsolete:
313 #remove from database those repositories that are not in the filesystem
316 #remove from database those repositories that are not in the filesystem
314 for repo in sa.query(Repository).all():
317 for repo in sa.query(Repository).all():
315 if repo.repo_name not in initial_repo_list.keys():
318 if repo.repo_name not in initial_repo_list.keys():
316 sa.delete(repo)
319 sa.delete(repo)
317 sa.commit()
320 sa.commit()
318
321
319 class OrderedDict(dict, DictMixin):
322 class OrderedDict(dict, DictMixin):
320
323
321 def __init__(self, *args, **kwds):
324 def __init__(self, *args, **kwds):
322 if len(args) > 1:
325 if len(args) > 1:
323 raise TypeError('expected at most 1 arguments, got %d' % len(args))
326 raise TypeError('expected at most 1 arguments, got %d' % len(args))
324 try:
327 try:
325 self.__end
328 self.__end
326 except AttributeError:
329 except AttributeError:
327 self.clear()
330 self.clear()
328 self.update(*args, **kwds)
331 self.update(*args, **kwds)
329
332
330 def clear(self):
333 def clear(self):
331 self.__end = end = []
334 self.__end = end = []
332 end += [None, end, end] # sentinel node for doubly linked list
335 end += [None, end, end] # sentinel node for doubly linked list
333 self.__map = {} # key --> [key, prev, next]
336 self.__map = {} # key --> [key, prev, next]
334 dict.clear(self)
337 dict.clear(self)
335
338
336 def __setitem__(self, key, value):
339 def __setitem__(self, key, value):
337 if key not in self:
340 if key not in self:
338 end = self.__end
341 end = self.__end
339 curr = end[1]
342 curr = end[1]
340 curr[2] = end[1] = self.__map[key] = [key, curr, end]
343 curr[2] = end[1] = self.__map[key] = [key, curr, end]
341 dict.__setitem__(self, key, value)
344 dict.__setitem__(self, key, value)
342
345
343 def __delitem__(self, key):
346 def __delitem__(self, key):
344 dict.__delitem__(self, key)
347 dict.__delitem__(self, key)
345 key, prev, next = self.__map.pop(key)
348 key, prev, next = self.__map.pop(key)
346 prev[2] = next
349 prev[2] = next
347 next[1] = prev
350 next[1] = prev
348
351
349 def __iter__(self):
352 def __iter__(self):
350 end = self.__end
353 end = self.__end
351 curr = end[2]
354 curr = end[2]
352 while curr is not end:
355 while curr is not end:
353 yield curr[0]
356 yield curr[0]
354 curr = curr[2]
357 curr = curr[2]
355
358
356 def __reversed__(self):
359 def __reversed__(self):
357 end = self.__end
360 end = self.__end
358 curr = end[1]
361 curr = end[1]
359 while curr is not end:
362 while curr is not end:
360 yield curr[0]
363 yield curr[0]
361 curr = curr[1]
364 curr = curr[1]
362
365
363 def popitem(self, last=True):
366 def popitem(self, last=True):
364 if not self:
367 if not self:
365 raise KeyError('dictionary is empty')
368 raise KeyError('dictionary is empty')
366 if last:
369 if last:
367 key = reversed(self).next()
370 key = reversed(self).next()
368 else:
371 else:
369 key = iter(self).next()
372 key = iter(self).next()
370 value = self.pop(key)
373 value = self.pop(key)
371 return key, value
374 return key, value
372
375
373 def __reduce__(self):
376 def __reduce__(self):
374 items = [[k, self[k]] for k in self]
377 items = [[k, self[k]] for k in self]
375 tmp = self.__map, self.__end
378 tmp = self.__map, self.__end
376 del self.__map, self.__end
379 del self.__map, self.__end
377 inst_dict = vars(self).copy()
380 inst_dict = vars(self).copy()
378 self.__map, self.__end = tmp
381 self.__map, self.__end = tmp
379 if inst_dict:
382 if inst_dict:
380 return (self.__class__, (items,), inst_dict)
383 return (self.__class__, (items,), inst_dict)
381 return self.__class__, (items,)
384 return self.__class__, (items,)
382
385
383 def keys(self):
386 def keys(self):
384 return list(self)
387 return list(self)
385
388
386 setdefault = DictMixin.setdefault
389 setdefault = DictMixin.setdefault
387 update = DictMixin.update
390 update = DictMixin.update
388 pop = DictMixin.pop
391 pop = DictMixin.pop
389 values = DictMixin.values
392 values = DictMixin.values
390 items = DictMixin.items
393 items = DictMixin.items
391 iterkeys = DictMixin.iterkeys
394 iterkeys = DictMixin.iterkeys
392 itervalues = DictMixin.itervalues
395 itervalues = DictMixin.itervalues
393 iteritems = DictMixin.iteritems
396 iteritems = DictMixin.iteritems
394
397
395 def __repr__(self):
398 def __repr__(self):
396 if not self:
399 if not self:
397 return '%s()' % (self.__class__.__name__,)
400 return '%s()' % (self.__class__.__name__,)
398 return '%s(%r)' % (self.__class__.__name__, self.items())
401 return '%s(%r)' % (self.__class__.__name__, self.items())
399
402
400 def copy(self):
403 def copy(self):
401 return self.__class__(self)
404 return self.__class__(self)
402
405
403 @classmethod
406 @classmethod
404 def fromkeys(cls, iterable, value=None):
407 def fromkeys(cls, iterable, value=None):
405 d = cls()
408 d = cls()
406 for key in iterable:
409 for key in iterable:
407 d[key] = value
410 d[key] = value
408 return d
411 return d
409
412
410 def __eq__(self, other):
413 def __eq__(self, other):
411 if isinstance(other, OrderedDict):
414 if isinstance(other, OrderedDict):
412 return len(self) == len(other) and self.items() == other.items()
415 return len(self) == len(other) and self.items() == other.items()
413 return dict.__eq__(self, other)
416 return dict.__eq__(self, other)
414
417
415 def __ne__(self, other):
418 def __ne__(self, other):
416 return not self == other
419 return not self == other
417
420
418
421
422 #set cache regions for beaker so celery can utilise it
423 def add_cache(settings):
424 cache_settings = {'regions':None}
425 for key in settings.keys():
426 for prefix in ['beaker.cache.', 'cache.']:
427 if key.startswith(prefix):
428 name = key.split(prefix)[1].strip()
429 cache_settings[name] = settings[key].strip()
430 if cache_settings['regions']:
431 for region in cache_settings['regions'].split(','):
432 region = region.strip()
433 region_settings = {}
434 for key, value in cache_settings.items():
435 if key.startswith(region):
436 region_settings[key.split('.')[1]] = value
437 region_settings['expire'] = int(region_settings.get('expire',
438 60))
439 region_settings.setdefault('lock_dir',
440 cache_settings.get('lock_dir'))
441 if 'type' not in region_settings:
442 region_settings['type'] = cache_settings.get('type',
443 'memory')
444 beaker.cache.cache_regions[region] = region_settings
445
446
419 #===============================================================================
447 #===============================================================================
420 # TEST FUNCTIONS AND CREATORS
448 # TEST FUNCTIONS AND CREATORS
421 #===============================================================================
449 #===============================================================================
422 def create_test_index(repo_location, full_index):
450 def create_test_index(repo_location, full_index):
423 """Makes default test index
451 """Makes default test index
424 :param repo_location:
452 :param repo_location:
425 :param full_index:
453 :param full_index:
426 """
454 """
427 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
455 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
428 from rhodecode.lib.pidlock import DaemonLock, LockHeld
456 from rhodecode.lib.pidlock import DaemonLock, LockHeld
429 import shutil
457 import shutil
430
458
431 index_location = os.path.join(repo_location, 'index')
459 index_location = os.path.join(repo_location, 'index')
432 if os.path.exists(index_location):
460 if os.path.exists(index_location):
433 shutil.rmtree(index_location)
461 shutil.rmtree(index_location)
434
462
435 try:
463 try:
436 l = DaemonLock()
464 l = DaemonLock()
437 WhooshIndexingDaemon(index_location=index_location,
465 WhooshIndexingDaemon(index_location=index_location,
438 repo_location=repo_location)\
466 repo_location=repo_location)\
439 .run(full_index=full_index)
467 .run(full_index=full_index)
440 l.release()
468 l.release()
441 except LockHeld:
469 except LockHeld:
442 pass
470 pass
443
471
444 def create_test_env(repos_test_path, config):
472 def create_test_env(repos_test_path, config):
445 """Makes a fresh database and
473 """Makes a fresh database and
446 install test repository into tmp dir
474 install test repository into tmp dir
447 """
475 """
448 from rhodecode.lib.db_manage import DbManage
476 from rhodecode.lib.db_manage import DbManage
449 from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \
477 from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \
450 HG_FORK, GIT_FORK, TESTS_TMP_PATH
478 HG_FORK, GIT_FORK, TESTS_TMP_PATH
451 import tarfile
479 import tarfile
452 import shutil
480 import shutil
453 from os.path import dirname as dn, join as jn, abspath
481 from os.path import dirname as dn, join as jn, abspath
454
482
455 log = logging.getLogger('TestEnvCreator')
483 log = logging.getLogger('TestEnvCreator')
456 # create logger
484 # create logger
457 log.setLevel(logging.DEBUG)
485 log.setLevel(logging.DEBUG)
458 log.propagate = True
486 log.propagate = True
459 # create console handler and set level to debug
487 # create console handler and set level to debug
460 ch = logging.StreamHandler()
488 ch = logging.StreamHandler()
461 ch.setLevel(logging.DEBUG)
489 ch.setLevel(logging.DEBUG)
462
490
463 # create formatter
491 # create formatter
464 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
492 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
465
493
466 # add formatter to ch
494 # add formatter to ch
467 ch.setFormatter(formatter)
495 ch.setFormatter(formatter)
468
496
469 # add ch to logger
497 # add ch to logger
470 log.addHandler(ch)
498 log.addHandler(ch)
471
499
472 #PART ONE create db
500 #PART ONE create db
473 dbconf = config['sqlalchemy.db1.url']
501 dbconf = config['sqlalchemy.db1.url']
474 log.debug('making test db %s', dbconf)
502 log.debug('making test db %s', dbconf)
475
503
476 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
504 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
477 tests=True)
505 tests=True)
478 dbmanage.create_tables(override=True)
506 dbmanage.create_tables(override=True)
479 dbmanage.config_prompt(repos_test_path)
507 dbmanage.config_prompt(repos_test_path)
480 dbmanage.create_default_user()
508 dbmanage.create_default_user()
481 dbmanage.admin_prompt()
509 dbmanage.admin_prompt()
482 dbmanage.create_permissions()
510 dbmanage.create_permissions()
483 dbmanage.populate_default_permissions()
511 dbmanage.populate_default_permissions()
484
512
485 #PART TWO make test repo
513 #PART TWO make test repo
486 log.debug('making test vcs repositories')
514 log.debug('making test vcs repositories')
487
515
488 #remove old one from previos tests
516 #remove old one from previos tests
489 for r in [HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, HG_FORK, GIT_FORK]:
517 for r in [HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, HG_FORK, GIT_FORK]:
490
518
491 if os.path.isdir(jn(TESTS_TMP_PATH, r)):
519 if os.path.isdir(jn(TESTS_TMP_PATH, r)):
492 log.debug('removing %s', r)
520 log.debug('removing %s', r)
493 shutil.rmtree(jn(TESTS_TMP_PATH, r))
521 shutil.rmtree(jn(TESTS_TMP_PATH, r))
494
522
495 #CREATE DEFAULT HG REPOSITORY
523 #CREATE DEFAULT HG REPOSITORY
496 cur_dir = dn(dn(abspath(__file__)))
524 cur_dir = dn(dn(abspath(__file__)))
497 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
525 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
498 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
526 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
499 tar.close()
527 tar.close()
500
528
501 class UpgradeDb(command.Command):
529
530 #==============================================================================
531 # PASTER COMMANDS
532 #==============================================================================
533
534 class BasePasterCommand(Command):
535 """
536 Abstract Base Class for paster commands.
537
538 The celery commands are somewhat aggressive about loading
539 celery.conf, and since our module sets the `CELERY_LOADER`
540 environment variable to our loader, we have to bootstrap a bit and
541 make sure we've had a chance to load the pylons config off of the
542 command line, otherwise everything fails.
543 """
544 min_args = 1
545 min_args_error = "Please provide a paster config file as an argument."
546 takes_config_file = 1
547 requires_config_file = True
548
549 def run(self, args):
550 """
551 Overrides Command.run
552
553 Checks for a config file argument and loads it.
554 """
555 if len(args) < self.min_args:
556 raise BadCommand(
557 self.min_args_error % {'min_args': self.min_args,
558 'actual_args': len(args)})
559
560 # Decrement because we're going to lob off the first argument.
561 # @@ This is hacky
562 self.min_args -= 1
563 self.bootstrap_config(args[0])
564 self.update_parser()
565 return super(BasePasterCommand, self).run(args[1:])
566
567 def update_parser(self):
568 """
569 Abstract method. Allows for the class's parser to be updated
570 before the superclass's `run` method is called. Necessary to
571 allow options/arguments to be passed through to the underlying
572 celery command.
573 """
574 raise NotImplementedError("Abstract Method.")
575
576 def bootstrap_config(self, conf):
577 """
578 Loads the pylons configuration.
579 """
580 from pylons import config as pylonsconfig
581
582 path_to_ini_file = os.path.realpath(conf)
583 conf = paste.deploy.appconfig('config:' + path_to_ini_file)
584 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
585
586
587
588 class UpgradeDb(BasePasterCommand):
502 """Command used for paster to upgrade our database to newer version
589 """Command used for paster to upgrade our database to newer version
503 """
590 """
504
591
505 max_args = 1
592 max_args = 1
506 min_args = 1
593 min_args = 1
507
594
508 usage = "CONFIG_FILE"
595 usage = "CONFIG_FILE"
509 summary = "Upgrades current db to newer version given configuration file"
596 summary = "Upgrades current db to newer version given configuration file"
510 group_name = "RhodeCode"
597 group_name = "RhodeCode"
511
598
512 parser = command.Command.standard_parser(verbose=True)
599 parser = Command.standard_parser(verbose=True)
513
600
514 parser.add_option('--sql',
601 def command(self):
602 from pylons import config
603 raise NotImplementedError('Not implemented yet')
604
605
606 def update_parser(self):
607 self.parser.add_option('--sql',
515 action='store_true',
608 action='store_true',
516 dest='just_sql',
609 dest='just_sql',
517 help="Prints upgrade sql for further investigation",
610 help="Prints upgrade sql for further investigation",
518 default=False)
611 default=False)
519 def command(self):
520 config_name = self.args[0]
521 p = config_name.split('/')
522 root = '.' if len(p) == 1 else '/'.join(p[:-1])
523 config = ConfigParser.ConfigParser({'here':root})
524 config.read(config_name)
General Comments 0
You need to be logged in to leave comments. Login now