Show More
@@ -0,0 +1,74 b'' | |||
|
1 | # List of modules to import when celery starts. | |
|
2 | import sys | |
|
3 | import os | |
|
4 | import ConfigParser | |
|
5 | root = os.getcwd() | |
|
6 | ||
|
7 | PYLONS_CONFIG_NAME = 'development.ini' | |
|
8 | ||
|
9 | sys.path.append(root) | |
|
10 | config = ConfigParser.ConfigParser({'here':root}) | |
|
11 | config.read('%s/%s' % (root, PYLONS_CONFIG_NAME)) | |
|
12 | PYLONS_CONFIG = config | |
|
13 | ||
|
14 | CELERY_IMPORTS = ("pylons_app.lib.celerylib.tasks",) | |
|
15 | ||
|
16 | ## Result store settings. | |
|
17 | CELERY_RESULT_BACKEND = "database" | |
|
18 | CELERY_RESULT_DBURI = dict(config.items('app:main'))['sqlalchemy.db1.url'] | |
|
19 | CELERY_RESULT_SERIALIZER = 'json' | |
|
20 | ||
|
21 | ||
|
22 | BROKER_CONNECTION_MAX_RETRIES = 30 | |
|
23 | ||
|
24 | ## Broker settings. | |
|
25 | BROKER_HOST = "localhost" | |
|
26 | BROKER_PORT = 5672 | |
|
27 | BROKER_VHOST = "rabbitmqhost" | |
|
28 | BROKER_USER = "rabbitmq" | |
|
29 | BROKER_PASSWORD = "qweqwe" | |
|
30 | ||
|
31 | ## Worker settings | |
|
32 | ## If you're doing mostly I/O you can have more processes, | |
|
33 | ## but if mostly spending CPU, try to keep it close to the | |
|
34 | ## number of CPUs on your machine. If not set, the number of CPUs/cores | |
|
35 | ## available will be used. | |
|
36 | CELERYD_CONCURRENCY = 2 | |
|
37 | # CELERYD_LOG_FILE = "celeryd.log" | |
|
38 | CELERYD_LOG_LEVEL = "DEBUG" | |
|
39 | CELERYD_MAX_TASKS_PER_CHILD = 1 | |
|
40 | ||
|
41 | #Tasks will never be sent to the queue, but executed locally instead. | |
|
42 | CELERY_ALWAYS_EAGER = False | |
|
43 | ||
|
44 | #=============================================================================== | |
|
45 | # EMAIL SETTINGS | |
|
46 | #=============================================================================== | |
|
47 | pylons_email_config = dict(config.items('DEFAULT')) | |
|
48 | ||
|
49 | CELERY_SEND_TASK_ERROR_EMAILS = True | |
|
50 | ||
|
51 | #List of (name, email_address) tuples for the admins that should receive error e-mails. | |
|
52 | ADMINS = [('Administrator', pylons_email_config.get('email_to'))] | |
|
53 | ||
|
54 | #The e-mail address this worker sends e-mails from. Default is "celery@localhost". | |
|
55 | SERVER_EMAIL = pylons_email_config.get('error_email_from') | |
|
56 | ||
|
57 | #The mail server to use. Default is "localhost". | |
|
58 | MAIL_HOST = pylons_email_config.get('smtp_server') | |
|
59 | ||
|
60 | #Username (if required) to log on to the mail server with. | |
|
61 | MAIL_HOST_USER = pylons_email_config.get('smtp_username') | |
|
62 | ||
|
63 | #Password (if required) to log on to the mail server with. | |
|
64 | MAIL_HOST_PASSWORD = pylons_email_config.get('smtp_password') | |
|
65 | ||
|
66 | MAIL_PORT = pylons_email_config.get('smtp_port') | |
|
67 | ||
|
68 | ||
|
69 | #=============================================================================== | |
|
70 | # INSTRUCTIONS FOR RABBITMQ | |
|
71 | #=============================================================================== | |
|
72 | # rabbitmqctl add_user rabbitmq qweqwe | |
|
73 | # rabbitmqctl add_vhost rabbitmqhost | |
|
74 | # rabbitmqctl set_permissions -p rabbitmqhost rabbitmq ".*" ".*" ".*" |
@@ -0,0 +1,66 b'' | |||
|
1 | from pylons_app.lib.pidlock import DaemonLock, LockHeld | |
|
2 | from vcs.utils.lazy import LazyProperty | |
|
3 | from decorator import decorator | |
|
4 | import logging | |
|
5 | import os | |
|
6 | import sys | |
|
7 | import traceback | |
|
8 | from hashlib import md5 | |
|
9 | log = logging.getLogger(__name__) | |
|
10 | ||
|
11 | class ResultWrapper(object): | |
|
12 | def __init__(self, task): | |
|
13 | self.task = task | |
|
14 | ||
|
15 | @LazyProperty | |
|
16 | def result(self): | |
|
17 | return self.task | |
|
18 | ||
|
19 | def run_task(task, *args, **kwargs): | |
|
20 | try: | |
|
21 | t = task.delay(*args, **kwargs) | |
|
22 | log.info('running task %s', t.task_id) | |
|
23 | return t | |
|
24 | except Exception, e: | |
|
25 | print e | |
|
26 | if e.errno == 111: | |
|
27 | log.debug('Unnable to connect. Sync execution') | |
|
28 | else: | |
|
29 | log.error(traceback.format_exc()) | |
|
30 | #pure sync version | |
|
31 | return ResultWrapper(task(*args, **kwargs)) | |
|
32 | ||
|
33 | ||
|
34 | class LockTask(object): | |
|
35 | """LockTask decorator""" | |
|
36 | ||
|
37 | def __init__(self, func): | |
|
38 | self.func = func | |
|
39 | ||
|
40 | def __call__(self, func): | |
|
41 | return decorator(self.__wrapper, func) | |
|
42 | ||
|
43 | def __wrapper(self, func, *fargs, **fkwargs): | |
|
44 | params = [] | |
|
45 | params.extend(fargs) | |
|
46 | params.extend(fkwargs.values()) | |
|
47 | lockkey = 'task_%s' % \ | |
|
48 | md5(str(self.func) + '-' + '-'.join(map(str, params))).hexdigest() | |
|
49 | log.info('running task with lockkey %s', lockkey) | |
|
50 | try: | |
|
51 | l = DaemonLock(lockkey) | |
|
52 | return func(*fargs, **fkwargs) | |
|
53 | l.release() | |
|
54 | except LockHeld: | |
|
55 | log.info('LockHeld') | |
|
56 | return 'Task with key %s already running' % lockkey | |
|
57 | ||
|
58 | ||
|
59 | ||
|
60 | ||
|
61 | ||
|
62 | ||
|
63 | ||
|
64 | ||
|
65 | ||
|
66 |
@@ -0,0 +1,270 b'' | |||
|
1 | from celery.decorators import task | |
|
2 | from celery.task.sets import subtask | |
|
3 | from celeryconfig import PYLONS_CONFIG as config | |
|
4 | from pylons.i18n.translation import _ | |
|
5 | from pylons_app.lib.celerylib import run_task, LockTask | |
|
6 | from pylons_app.lib.helpers import person | |
|
7 | from pylons_app.lib.smtp_mailer import SmtpMailer | |
|
8 | from pylons_app.lib.utils import OrderedDict | |
|
9 | from operator import itemgetter | |
|
10 | from vcs.backends.hg import MercurialRepository | |
|
11 | from time import mktime | |
|
12 | import traceback | |
|
13 | import json | |
|
14 | ||
|
15 | __all__ = ['whoosh_index', 'get_commits_stats', | |
|
16 | 'reset_user_password', 'send_email'] | |
|
17 | ||
|
18 | def get_session(): | |
|
19 | from sqlalchemy import engine_from_config | |
|
20 | from sqlalchemy.orm import sessionmaker, scoped_session | |
|
21 | engine = engine_from_config(dict(config.items('app:main')), 'sqlalchemy.db1.') | |
|
22 | sa = scoped_session(sessionmaker(bind=engine)) | |
|
23 | return sa | |
|
24 | ||
|
25 | def get_hg_settings(): | |
|
26 | from pylons_app.model.db import HgAppSettings | |
|
27 | try: | |
|
28 | sa = get_session() | |
|
29 | ret = sa.query(HgAppSettings).all() | |
|
30 | finally: | |
|
31 | sa.remove() | |
|
32 | ||
|
33 | if not ret: | |
|
34 | raise Exception('Could not get application settings !') | |
|
35 | settings = {} | |
|
36 | for each in ret: | |
|
37 | settings['hg_app_' + each.app_settings_name] = each.app_settings_value | |
|
38 | ||
|
39 | return settings | |
|
40 | ||
|
41 | def get_hg_ui_settings(): | |
|
42 | from pylons_app.model.db import HgAppUi | |
|
43 | try: | |
|
44 | sa = get_session() | |
|
45 | ret = sa.query(HgAppUi).all() | |
|
46 | finally: | |
|
47 | sa.remove() | |
|
48 | ||
|
49 | if not ret: | |
|
50 | raise Exception('Could not get application ui settings !') | |
|
51 | settings = {} | |
|
52 | for each in ret: | |
|
53 | k = each.ui_key | |
|
54 | v = each.ui_value | |
|
55 | if k == '/': | |
|
56 | k = 'root_path' | |
|
57 | ||
|
58 | if k.find('.') != -1: | |
|
59 | k = k.replace('.', '_') | |
|
60 | ||
|
61 | if each.ui_section == 'hooks': | |
|
62 | v = each.ui_active | |
|
63 | ||
|
64 | settings[each.ui_section + '_' + k] = v | |
|
65 | ||
|
66 | return settings | |
|
67 | ||
|
68 | @task | |
|
69 | def whoosh_index(repo_location, full_index): | |
|
70 | log = whoosh_index.get_logger() | |
|
71 | from pylons_app.lib.pidlock import DaemonLock | |
|
72 | from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon, LockHeld | |
|
73 | try: | |
|
74 | l = DaemonLock() | |
|
75 | WhooshIndexingDaemon(repo_location=repo_location)\ | |
|
76 | .run(full_index=full_index) | |
|
77 | l.release() | |
|
78 | return 'Done' | |
|
79 | except LockHeld: | |
|
80 | log.info('LockHeld') | |
|
81 | return 'LockHeld' | |
|
82 | ||
|
83 | ||
|
84 | @task | |
|
85 | @LockTask('get_commits_stats') | |
|
86 | def get_commits_stats(repo_name, ts_min_y, ts_max_y): | |
|
87 | author_key_cleaner = lambda k: person(k).replace('"', "") #for js data compatibilty | |
|
88 | ||
|
89 | from pylons_app.model.db import Statistics, Repository | |
|
90 | log = get_commits_stats.get_logger() | |
|
91 | commits_by_day_author_aggregate = {} | |
|
92 | commits_by_day_aggregate = {} | |
|
93 | repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '') | |
|
94 | repo = MercurialRepository(repos_path + repo_name) | |
|
95 | ||
|
96 | skip_date_limit = True | |
|
97 | parse_limit = 350 #limit for single task changeset parsing | |
|
98 | last_rev = 0 | |
|
99 | last_cs = None | |
|
100 | timegetter = itemgetter('time') | |
|
101 | ||
|
102 | sa = get_session() | |
|
103 | ||
|
104 | dbrepo = sa.query(Repository)\ | |
|
105 | .filter(Repository.repo_name == repo_name).scalar() | |
|
106 | cur_stats = sa.query(Statistics)\ | |
|
107 | .filter(Statistics.repository == dbrepo).scalar() | |
|
108 | if cur_stats: | |
|
109 | last_rev = cur_stats.stat_on_revision | |
|
110 | ||
|
111 | if last_rev == repo.revisions[-1]: | |
|
112 | #pass silently without any work | |
|
113 | return True | |
|
114 | ||
|
115 | if cur_stats: | |
|
116 | commits_by_day_aggregate = OrderedDict( | |
|
117 | json.loads( | |
|
118 | cur_stats.commit_activity_combined)) | |
|
119 | commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity) | |
|
120 | ||
|
121 | for cnt, rev in enumerate(repo.revisions[last_rev:]): | |
|
122 | last_cs = cs = repo.get_changeset(rev) | |
|
123 | k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1], | |
|
124 | cs.date.timetuple()[2]) | |
|
125 | timetupple = [int(x) for x in k.split('-')] | |
|
126 | timetupple.extend([0 for _ in xrange(6)]) | |
|
127 | k = mktime(timetupple) | |
|
128 | if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)): | |
|
129 | try: | |
|
130 | l = [timegetter(x) for x in commits_by_day_author_aggregate\ | |
|
131 | [author_key_cleaner(cs.author)]['data']] | |
|
132 | time_pos = l.index(k) | |
|
133 | except ValueError: | |
|
134 | time_pos = False | |
|
135 | ||
|
136 | if time_pos >= 0 and time_pos is not False: | |
|
137 | ||
|
138 | datadict = commits_by_day_author_aggregate\ | |
|
139 | [author_key_cleaner(cs.author)]['data'][time_pos] | |
|
140 | ||
|
141 | datadict["commits"] += 1 | |
|
142 | datadict["added"] += len(cs.added) | |
|
143 | datadict["changed"] += len(cs.changed) | |
|
144 | datadict["removed"] += len(cs.removed) | |
|
145 | #print datadict | |
|
146 | ||
|
147 | else: | |
|
148 | #print 'ELSE !!!!' | |
|
149 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: | |
|
150 | ||
|
151 | datadict = {"time":k, | |
|
152 | "commits":1, | |
|
153 | "added":len(cs.added), | |
|
154 | "changed":len(cs.changed), | |
|
155 | "removed":len(cs.removed), | |
|
156 | } | |
|
157 | commits_by_day_author_aggregate\ | |
|
158 | [author_key_cleaner(cs.author)]['data'].append(datadict) | |
|
159 | ||
|
160 | else: | |
|
161 | #print k, 'nokey ADDING' | |
|
162 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: | |
|
163 | commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = { | |
|
164 | "label":author_key_cleaner(cs.author), | |
|
165 | "data":[{"time":k, | |
|
166 | "commits":1, | |
|
167 | "added":len(cs.added), | |
|
168 | "changed":len(cs.changed), | |
|
169 | "removed":len(cs.removed), | |
|
170 | }], | |
|
171 | "schema":["commits"], | |
|
172 | } | |
|
173 | ||
|
174 | # #gather all data by day | |
|
175 | if commits_by_day_aggregate.has_key(k): | |
|
176 | commits_by_day_aggregate[k] += 1 | |
|
177 | else: | |
|
178 | commits_by_day_aggregate[k] = 1 | |
|
179 | ||
|
180 | if cnt >= parse_limit: | |
|
181 | #don't fetch to much data since we can freeze application | |
|
182 | break | |
|
183 | ||
|
184 | overview_data = [] | |
|
185 | for k, v in commits_by_day_aggregate.items(): | |
|
186 | overview_data.append([k, v]) | |
|
187 | overview_data = sorted(overview_data, key=itemgetter(0)) | |
|
188 | ||
|
189 | if not commits_by_day_author_aggregate: | |
|
190 | commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = { | |
|
191 | "label":author_key_cleaner(repo.contact), | |
|
192 | "data":[0, 1], | |
|
193 | "schema":["commits"], | |
|
194 | } | |
|
195 | ||
|
196 | stats = cur_stats if cur_stats else Statistics() | |
|
197 | stats.commit_activity = json.dumps(commits_by_day_author_aggregate) | |
|
198 | stats.commit_activity_combined = json.dumps(overview_data) | |
|
199 | stats.repository = dbrepo | |
|
200 | stats.stat_on_revision = last_cs.revision | |
|
201 | stats.languages = json.dumps({'_TOTAL_':0, '':0}) | |
|
202 | ||
|
203 | try: | |
|
204 | sa.add(stats) | |
|
205 | sa.commit() | |
|
206 | except: | |
|
207 | log.error(traceback.format_exc()) | |
|
208 | sa.rollback() | |
|
209 | return False | |
|
210 | ||
|
211 | run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y) | |
|
212 | ||
|
213 | return True | |
|
214 | ||
|
215 | @task | |
|
216 | def reset_user_password(user_email): | |
|
217 | log = reset_user_password.get_logger() | |
|
218 | from pylons_app.lib import auth | |
|
219 | from pylons_app.model.db import User | |
|
220 | ||
|
221 | try: | |
|
222 | try: | |
|
223 | sa = get_session() | |
|
224 | user = sa.query(User).filter(User.email == user_email).scalar() | |
|
225 | new_passwd = auth.PasswordGenerator().gen_password(8, | |
|
226 | auth.PasswordGenerator.ALPHABETS_BIG_SMALL) | |
|
227 | if user: | |
|
228 | user.password = auth.get_crypt_password(new_passwd) | |
|
229 | sa.add(user) | |
|
230 | sa.commit() | |
|
231 | log.info('change password for %s', user_email) | |
|
232 | if new_passwd is None: | |
|
233 | raise Exception('unable to generate new password') | |
|
234 | ||
|
235 | except: | |
|
236 | log.error(traceback.format_exc()) | |
|
237 | sa.rollback() | |
|
238 | ||
|
239 | run_task(send_email, user_email, | |
|
240 | "Your new hg-app password", | |
|
241 | 'Your new hg-app password:%s' % (new_passwd)) | |
|
242 | log.info('send new password mail to %s', user_email) | |
|
243 | ||
|
244 | ||
|
245 | except: | |
|
246 | log.error('Failed to update user password') | |
|
247 | log.error(traceback.format_exc()) | |
|
248 | return True | |
|
249 | ||
|
250 | @task | |
|
251 | def send_email(recipients, subject, body): | |
|
252 | log = send_email.get_logger() | |
|
253 | email_config = dict(config.items('DEFAULT')) | |
|
254 | mail_from = email_config.get('app_email_from') | |
|
255 | user = email_config.get('smtp_username') | |
|
256 | passwd = email_config.get('smtp_password') | |
|
257 | mail_server = email_config.get('smtp_server') | |
|
258 | mail_port = email_config.get('smtp_port') | |
|
259 | tls = email_config.get('smtp_use_tls') | |
|
260 | ssl = False | |
|
261 | ||
|
262 | try: | |
|
263 | m = SmtpMailer(mail_from, user, passwd, mail_server, | |
|
264 | mail_port, ssl, tls) | |
|
265 | m.send(recipients, subject, body) | |
|
266 | except: | |
|
267 | log.error('Mail sending failed') | |
|
268 | log.error(traceback.format_exc()) | |
|
269 | return False | |
|
270 | return True |
@@ -0,0 +1,118 b'' | |||
|
1 | import logging | |
|
2 | import smtplib | |
|
3 | import mimetypes | |
|
4 | from email.mime.multipart import MIMEMultipart | |
|
5 | from email.mime.image import MIMEImage | |
|
6 | from email.mime.audio import MIMEAudio | |
|
7 | from email.mime.base import MIMEBase | |
|
8 | from email.mime.text import MIMEText | |
|
9 | from email.utils import formatdate | |
|
10 | from email import encoders | |
|
11 | ||
|
12 | class SmtpMailer(object): | |
|
13 | """simple smtp mailer class | |
|
14 | ||
|
15 | mailer = SmtpMailer(mail_from, user, passwd, mail_server, mail_port, ssl, tls) | |
|
16 | mailer.send(recipients, subject, body, attachment_files) | |
|
17 | ||
|
18 | :param recipients might be a list of string or single string | |
|
19 | :param attachment_files is a dict of {filename:location} | |
|
20 | it tries to guess the mimetype and attach the file | |
|
21 | """ | |
|
22 | ||
|
23 | def __init__(self, mail_from, user, passwd, mail_server, | |
|
24 | mail_port=None, ssl=False, tls=False): | |
|
25 | ||
|
26 | self.mail_from = mail_from | |
|
27 | self.mail_server = mail_server | |
|
28 | self.mail_port = mail_port | |
|
29 | self.user = user | |
|
30 | self.passwd = passwd | |
|
31 | self.ssl = ssl | |
|
32 | self.tls = tls | |
|
33 | self.debug = False | |
|
34 | ||
|
35 | def send(self, recipients=[], subject='', body='', attachment_files={}): | |
|
36 | ||
|
37 | if isinstance(recipients, basestring): | |
|
38 | recipients = [recipients] | |
|
39 | if self.ssl: | |
|
40 | smtp_serv = smtplib.SMTP_SSL(self.mail_server, self.mail_port) | |
|
41 | else: | |
|
42 | smtp_serv = smtplib.SMTP(self.mail_server, self.mail_port) | |
|
43 | ||
|
44 | if self.tls: | |
|
45 | smtp_serv.starttls() | |
|
46 | ||
|
47 | if self.debug: | |
|
48 | smtp_serv.set_debuglevel(1) | |
|
49 | ||
|
50 | smtp_serv.ehlo("mailer") | |
|
51 | ||
|
52 | #if server requires authorization you must provide login and password | |
|
53 | smtp_serv.login(self.user, self.passwd) | |
|
54 | ||
|
55 | date_ = formatdate(localtime=True) | |
|
56 | msg = MIMEMultipart() | |
|
57 | msg['From'] = self.mail_from | |
|
58 | msg['To'] = ','.join(recipients) | |
|
59 | msg['Date'] = date_ | |
|
60 | msg['Subject'] = subject | |
|
61 | msg.preamble = 'You will not see this in a MIME-aware mail reader.\n' | |
|
62 | ||
|
63 | msg.attach(MIMEText(body)) | |
|
64 | ||
|
65 | if attachment_files: | |
|
66 | self.__atach_files(msg, attachment_files) | |
|
67 | ||
|
68 | smtp_serv.sendmail(self.mail_from, recipients, msg.as_string()) | |
|
69 | logging.info('MAIL SEND TO: %s' % recipients) | |
|
70 | smtp_serv.quit() | |
|
71 | ||
|
72 | ||
|
73 | def __atach_files(self, msg, attachment_files): | |
|
74 | if isinstance(attachment_files, dict): | |
|
75 | for f_name, msg_file in attachment_files.items(): | |
|
76 | ctype, encoding = mimetypes.guess_type(f_name) | |
|
77 | logging.info("guessing file %s type based on %s" , ctype, f_name) | |
|
78 | if ctype is None or encoding is not None: | |
|
79 | # No guess could be made, or the file is encoded (compressed), so | |
|
80 | # use a generic bag-of-bits type. | |
|
81 | ctype = 'application/octet-stream' | |
|
82 | maintype, subtype = ctype.split('/', 1) | |
|
83 | if maintype == 'text': | |
|
84 | # Note: we should handle calculating the charset | |
|
85 | file_part = MIMEText(self.get_content(msg_file), | |
|
86 | _subtype=subtype) | |
|
87 | elif maintype == 'image': | |
|
88 | file_part = MIMEImage(self.get_content(msg_file), | |
|
89 | _subtype=subtype) | |
|
90 | elif maintype == 'audio': | |
|
91 | file_part = MIMEAudio(self.get_content(msg_file), | |
|
92 | _subtype=subtype) | |
|
93 | else: | |
|
94 | file_part = MIMEBase(maintype, subtype) | |
|
95 | file_part.set_payload(self.get_content(msg_file)) | |
|
96 | # Encode the payload using Base64 | |
|
97 | encoders.encode_base64(msg) | |
|
98 | # Set the filename parameter | |
|
99 | file_part.add_header('Content-Disposition', 'attachment', | |
|
100 | filename=f_name) | |
|
101 | file_part.add_header('Content-Type', ctype, name=f_name) | |
|
102 | msg.attach(file_part) | |
|
103 | else: | |
|
104 | raise Exception('Attachment files should be' | |
|
105 | 'a dict in format {"filename":"filepath"}') | |
|
106 | ||
|
107 | def get_content(self, msg_file): | |
|
108 | ''' | |
|
109 | Get content based on type, if content is a string do open first | |
|
110 | else just read because it's a probably open file object | |
|
111 | @param msg_file: | |
|
112 | ''' | |
|
113 | if isinstance(msg_file, str): | |
|
114 | return open(msg_file, "rb").read() | |
|
115 | else: | |
|
116 | #just for safe seek to 0 | |
|
117 | msg_file.seek(0) | |
|
118 | return msg_file.read() |
@@ -0,0 +1,267 b'' | |||
|
1 | """caching_query.py | |
|
2 | ||
|
3 | Represent persistence structures which allow the usage of | |
|
4 | Beaker caching with SQLAlchemy. | |
|
5 | ||
|
6 | The three new concepts introduced here are: | |
|
7 | ||
|
8 | * CachingQuery - a Query subclass that caches and | |
|
9 | retrieves results in/from Beaker. | |
|
10 | * FromCache - a query option that establishes caching | |
|
11 | parameters on a Query | |
|
12 | * RelationshipCache - a variant of FromCache which is specific | |
|
13 | to a query invoked during a lazy load. | |
|
14 | * _params_from_query - extracts value parameters from | |
|
15 | a Query. | |
|
16 | ||
|
17 | The rest of what's here are standard SQLAlchemy and | |
|
18 | Beaker constructs. | |
|
19 | ||
|
20 | """ | |
|
21 | from sqlalchemy.orm.interfaces import MapperOption | |
|
22 | from sqlalchemy.orm.query import Query | |
|
23 | from sqlalchemy.sql import visitors | |
|
24 | ||
|
25 | class CachingQuery(Query): | |
|
26 | """A Query subclass which optionally loads full results from a Beaker | |
|
27 | cache region. | |
|
28 | ||
|
29 | The CachingQuery stores additional state that allows it to consult | |
|
30 | a Beaker cache before accessing the database: | |
|
31 | ||
|
32 | * A "region", which is a cache region argument passed to a | |
|
33 | Beaker CacheManager, specifies a particular cache configuration | |
|
34 | (including backend implementation, expiration times, etc.) | |
|
35 | * A "namespace", which is a qualifying name that identifies a | |
|
36 | group of keys within the cache. A query that filters on a name | |
|
37 | might use the name "by_name", a query that filters on a date range | |
|
38 | to a joined table might use the name "related_date_range". | |
|
39 | ||
|
40 | When the above state is present, a Beaker cache is retrieved. | |
|
41 | ||
|
42 | The "namespace" name is first concatenated with | |
|
43 | a string composed of the individual entities and columns the Query | |
|
44 | requests, i.e. such as ``Query(User.id, User.name)``. | |
|
45 | ||
|
46 | The Beaker cache is then loaded from the cache manager based | |
|
47 | on the region and composed namespace. The key within the cache | |
|
48 | itself is then constructed against the bind parameters specified | |
|
49 | by this query, which are usually literals defined in the | |
|
50 | WHERE clause. | |
|
51 | ||
|
52 | The FromCache and RelationshipCache mapper options below represent | |
|
53 | the "public" method of configuring this state upon the CachingQuery. | |
|
54 | ||
|
55 | """ | |
|
56 | ||
|
57 | def __init__(self, manager, *args, **kw): | |
|
58 | self.cache_manager = manager | |
|
59 | Query.__init__(self, *args, **kw) | |
|
60 | ||
|
61 | def __iter__(self): | |
|
62 | """override __iter__ to pull results from Beaker | |
|
63 | if particular attributes have been configured. | |
|
64 | ||
|
65 | Note that this approach does *not* detach the loaded objects from | |
|
66 | the current session. If the cache backend is an in-process cache | |
|
67 | (like "memory") and lives beyond the scope of the current session's | |
|
68 | transaction, those objects may be expired. The method here can be | |
|
69 | modified to first expunge() each loaded item from the current | |
|
70 | session before returning the list of items, so that the items | |
|
71 | in the cache are not the same ones in the current Session. | |
|
72 | ||
|
73 | """ | |
|
74 | if hasattr(self, '_cache_parameters'): | |
|
75 | return self.get_value(createfunc=lambda: list(Query.__iter__(self))) | |
|
76 | else: | |
|
77 | return Query.__iter__(self) | |
|
78 | ||
|
79 | def invalidate(self): | |
|
80 | """Invalidate the value represented by this Query.""" | |
|
81 | ||
|
82 | cache, cache_key = _get_cache_parameters(self) | |
|
83 | cache.remove(cache_key) | |
|
84 | ||
|
85 | def get_value(self, merge=True, createfunc=None): | |
|
86 | """Return the value from the cache for this query. | |
|
87 | ||
|
88 | Raise KeyError if no value present and no | |
|
89 | createfunc specified. | |
|
90 | ||
|
91 | """ | |
|
92 | cache, cache_key = _get_cache_parameters(self) | |
|
93 | ret = cache.get_value(cache_key, createfunc=createfunc) | |
|
94 | if merge: | |
|
95 | ret = self.merge_result(ret, load=False) | |
|
96 | return ret | |
|
97 | ||
|
98 | def set_value(self, value): | |
|
99 | """Set the value in the cache for this query.""" | |
|
100 | ||
|
101 | cache, cache_key = _get_cache_parameters(self) | |
|
102 | cache.put(cache_key, value) | |
|
103 | ||
|
104 | def query_callable(manager): | |
|
105 | def query(*arg, **kw): | |
|
106 | return CachingQuery(manager, *arg, **kw) | |
|
107 | return query | |
|
108 | ||
|
109 | def _get_cache_parameters(query): | |
|
110 | """For a query with cache_region and cache_namespace configured, | |
|
111 | return the correspoinding Cache instance and cache key, based | |
|
112 | on this query's current criterion and parameter values. | |
|
113 | ||
|
114 | """ | |
|
115 | if not hasattr(query, '_cache_parameters'): | |
|
116 | raise ValueError("This Query does not have caching parameters configured.") | |
|
117 | ||
|
118 | region, namespace, cache_key = query._cache_parameters | |
|
119 | ||
|
120 | namespace = _namespace_from_query(namespace, query) | |
|
121 | ||
|
122 | if cache_key is None: | |
|
123 | # cache key - the value arguments from this query's parameters. | |
|
124 | args = _params_from_query(query) | |
|
125 | cache_key = " ".join([str(x) for x in args]) | |
|
126 | ||
|
127 | # get cache | |
|
128 | cache = query.cache_manager.get_cache_region(namespace, region) | |
|
129 | ||
|
130 | # optional - hash the cache_key too for consistent length | |
|
131 | # import uuid | |
|
132 | # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key)) | |
|
133 | ||
|
134 | return cache, cache_key | |
|
135 | ||
|
136 | def _namespace_from_query(namespace, query): | |
|
137 | # cache namespace - the token handed in by the | |
|
138 | # option + class we're querying against | |
|
139 | namespace = " ".join([namespace] + [str(x) for x in query._entities]) | |
|
140 | ||
|
141 | # memcached wants this | |
|
142 | namespace = namespace.replace(' ', '_') | |
|
143 | ||
|
144 | return namespace | |
|
145 | ||
|
146 | def _set_cache_parameters(query, region, namespace, cache_key): | |
|
147 | ||
|
148 | if hasattr(query, '_cache_parameters'): | |
|
149 | region, namespace, cache_key = query._cache_parameters | |
|
150 | raise ValueError("This query is already configured " | |
|
151 | "for region %r namespace %r" % | |
|
152 | (region, namespace) | |
|
153 | ) | |
|
154 | query._cache_parameters = region, namespace, cache_key | |
|
155 | ||
|
156 | class FromCache(MapperOption): | |
|
157 | """Specifies that a Query should load results from a cache.""" | |
|
158 | ||
|
159 | propagate_to_loaders = False | |
|
160 | ||
|
161 | def __init__(self, region, namespace, cache_key=None): | |
|
162 | """Construct a new FromCache. | |
|
163 | ||
|
164 | :param region: the cache region. Should be a | |
|
165 | region configured in the Beaker CacheManager. | |
|
166 | ||
|
167 | :param namespace: the cache namespace. Should | |
|
168 | be a name uniquely describing the target Query's | |
|
169 | lexical structure. | |
|
170 | ||
|
171 | :param cache_key: optional. A string cache key | |
|
172 | that will serve as the key to the query. Use this | |
|
173 | if your query has a huge amount of parameters (such | |
|
174 | as when using in_()) which correspond more simply to | |
|
175 | some other identifier. | |
|
176 | ||
|
177 | """ | |
|
178 | self.region = region | |
|
179 | self.namespace = namespace | |
|
180 | self.cache_key = cache_key | |
|
181 | ||
|
182 | def process_query(self, query): | |
|
183 | """Process a Query during normal loading operation.""" | |
|
184 | ||
|
185 | _set_cache_parameters(query, self.region, self.namespace, self.cache_key) | |
|
186 | ||
|
187 | class RelationshipCache(MapperOption): | |
|
188 | """Specifies that a Query as called within a "lazy load" | |
|
189 | should load results from a cache.""" | |
|
190 | ||
|
191 | propagate_to_loaders = True | |
|
192 | ||
|
193 | def __init__(self, region, namespace, attribute): | |
|
194 | """Construct a new RelationshipCache. | |
|
195 | ||
|
196 | :param region: the cache region. Should be a | |
|
197 | region configured in the Beaker CacheManager. | |
|
198 | ||
|
199 | :param namespace: the cache namespace. Should | |
|
200 | be a name uniquely describing the target Query's | |
|
201 | lexical structure. | |
|
202 | ||
|
203 | :param attribute: A Class.attribute which | |
|
204 | indicates a particular class relationship() whose | |
|
205 | lazy loader should be pulled from the cache. | |
|
206 | ||
|
207 | """ | |
|
208 | self.region = region | |
|
209 | self.namespace = namespace | |
|
210 | self._relationship_options = { | |
|
211 | (attribute.property.parent.class_, attribute.property.key) : self | |
|
212 | } | |
|
213 | ||
|
214 | def process_query_conditionally(self, query): | |
|
215 | """Process a Query that is used within a lazy loader. | |
|
216 | ||
|
217 | (the process_query_conditionally() method is a SQLAlchemy | |
|
218 | hook invoked only within lazyload.) | |
|
219 | ||
|
220 | """ | |
|
221 | if query._current_path: | |
|
222 | mapper, key = query._current_path[-2:] | |
|
223 | ||
|
224 | for cls in mapper.class_.__mro__: | |
|
225 | if (cls, key) in self._relationship_options: | |
|
226 | relationship_option = self._relationship_options[(cls, key)] | |
|
227 | _set_cache_parameters( | |
|
228 | query, | |
|
229 | relationship_option.region, | |
|
230 | relationship_option.namespace, | |
|
231 | None) | |
|
232 | ||
|
233 | def and_(self, option): | |
|
234 | """Chain another RelationshipCache option to this one. | |
|
235 | ||
|
236 | While many RelationshipCache objects can be specified on a single | |
|
237 | Query separately, chaining them together allows for a more efficient | |
|
238 | lookup during load. | |
|
239 | ||
|
240 | """ | |
|
241 | self._relationship_options.update(option._relationship_options) | |
|
242 | return self | |
|
243 | ||
|
244 | ||
|
245 | def _params_from_query(query): | |
|
246 | """Pull the bind parameter values from a query. | |
|
247 | ||
|
248 | This takes into account any scalar attribute bindparam set up. | |
|
249 | ||
|
250 | E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7))) | |
|
251 | would return [5, 7]. | |
|
252 | ||
|
253 | """ | |
|
254 | v = [] | |
|
255 | def visit_bindparam(bind): | |
|
256 | value = query._params.get(bind.key, bind.value) | |
|
257 | ||
|
258 | # lazyloader may dig a callable in here, intended | |
|
259 | # to late-evaluate params after autoflush is called. | |
|
260 | # convert to a scalar value. | |
|
261 | if callable(value): | |
|
262 | value = value() | |
|
263 | ||
|
264 | v.append(value) | |
|
265 | if query._criterion is not None: | |
|
266 | visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam}) | |
|
267 | return v |
@@ -0,0 +1,54 b'' | |||
|
1 | ## -*- coding: utf-8 -*- | |
|
2 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> | |
|
3 | <html xmlns="http://www.w3.org/1999/xhtml" id="mainhtml"> | |
|
4 | <head> | |
|
5 | <title>${_('Reset You password to hg-app')}</title> | |
|
6 | <meta http-equiv="Content-Type" content="text/html;charset=utf-8" /> | |
|
7 | <link rel="icon" href="/images/hgicon.png" type="image/png" /> | |
|
8 | <meta name="robots" content="index, nofollow"/> | |
|
9 | ||
|
10 | <!-- stylesheets --> | |
|
11 | <link rel="stylesheet" type="text/css" href="/css/reset.css" /> | |
|
12 | <link rel="stylesheet" type="text/css" href="/css/style.css" media="screen" /> | |
|
13 | <link id="color" rel="stylesheet" type="text/css" href="/css/colors/blue.css" /> | |
|
14 | ||
|
15 | <!-- scripts --> | |
|
16 | ||
|
17 | </head> | |
|
18 | <body> | |
|
19 | <div id="register"> | |
|
20 | ||
|
21 | <div class="title"> | |
|
22 | <h5>${_('Reset You password to hg-app')}</h5> | |
|
23 | <div class="corner tl"></div> | |
|
24 | <div class="corner tr"></div> | |
|
25 | </div> | |
|
26 | <div class="inner"> | |
|
27 | ${h.form(url('password_reset'))} | |
|
28 | <div class="form"> | |
|
29 | <!-- fields --> | |
|
30 | <div class="fields"> | |
|
31 | ||
|
32 | <div class="field"> | |
|
33 | <div class="label"> | |
|
34 | <label for="email">${_('Email address')}:</label> | |
|
35 | </div> | |
|
36 | <div class="input"> | |
|
37 | ${h.text('email')} | |
|
38 | </div> | |
|
39 | </div> | |
|
40 | ||
|
41 | <div class="buttons"> | |
|
42 | <div class="nohighlight"> | |
|
43 | ${h.submit('send','Reset my password',class_="ui-button ui-widget ui-state-default ui-corner-all")} | |
|
44 | <div class="activation_msg">${_('Your new password will be send to matching email address')}</div> | |
|
45 | </div> | |
|
46 | </div> | |
|
47 | </div> | |
|
48 | </div> | |
|
49 | ${h.end_form()} | |
|
50 | </div> | |
|
51 | </div> | |
|
52 | </body> | |
|
53 | </html> | |
|
54 |
@@ -11,9 +11,12 b' Fully customizable, with authentication,' | |||
|
11 | 11 | - full permissions per project read/write/admin access even on mercurial request |
|
12 | 12 | - mako templates let's you cusmotize look and feel of application. |
|
13 | 13 | - diffs annotations and source code all colored by pygments. |
|
14 | - mercurial branch graph and yui-flot powered graphs | |
|
14 | - mercurial branch graph and yui-flot powered graphs with zooming | |
|
15 | 15 | - admin interface for performing user/permission managments as well as repository |
|
16 | 16 | managment. |
|
17 | - full text search of source codes with indexing daemons using whoosh | |
|
18 | (no external search servers required all in one application) | |
|
19 | - async tasks for speed and performance using celery (works without them too) | |
|
17 | 20 | - Additional settings for mercurial web, (hooks editable from admin |
|
18 | 21 | panel !) also manage paths, archive, remote messages |
|
19 | 22 | - backup scripts can do backup of whole app and send it over scp to desired location |
@@ -27,11 +30,11 b' Fully customizable, with authentication,' | |||
|
27 | 30 | **Incoming** |
|
28 | 31 | |
|
29 | 32 | - code review based on hg-review (when it's stable) |
|
30 | - git support (when vcs can handle it) | |
|
31 | - full text search of source codes with indexing daemons using whoosh | |
|
32 | (no external search servers required all in one application) | |
|
33 | - manage hg ui() per repo, add hooks settings, per repo, and not globally | |
|
34 | - other cools stuff that i can figure out | |
|
33 | - git support (when vcs can handle it - almost there !) | |
|
34 | - commit based wikis | |
|
35 | - in server forks | |
|
36 | - clonning from remote repositories into hg-app | |
|
37 | - other cools stuff that i can figure out (or You can help me figure out) | |
|
35 | 38 | |
|
36 | 39 | .. note:: |
|
37 | 40 | This software is still in beta mode. |
@@ -47,10 +50,10 b' Installation' | |||
|
47 | 50 | |
|
48 | 51 | - create new virtualenv and activate it - highly recommend that you use separate |
|
49 | 52 | virtual-env for whole application |
|
50 |
- download hg app from default |
|
|
53 | - download hg app from default branch from bitbucket and run | |
|
51 | 54 | 'python setup.py install' this will install all required dependencies needed |
|
52 | 55 | - run paster setup-app production.ini it should create all needed tables |
|
53 | and an admin account. | |
|
56 | and an admin account make sure You specify correct path to repositories. | |
|
54 | 57 | - remember that the given path for mercurial repositories must be write |
|
55 | 58 | accessible for the application |
|
56 | 59 | - run paster serve development.ini - or you can use manage-hg_app script. |
@@ -58,4 +61,9 b' Installation' | |||
|
58 | 61 | - use admin account you created to login. |
|
59 | 62 | - default permissions on each repository is read, and owner is admin. So remember |
|
60 | 63 | to update these. |
|
64 | - in order to use full power of async tasks, You must install message broker | |
|
65 | preferrably rabbitmq and start celeryd daemon. The app should gain some speed | |
|
66 | than. For installation instructions | |
|
67 | You can visit: http://ask.github.com/celery/getting-started/index.html. All | |
|
68 | needed configs are inside hg-app ie. celeryconfig.py | |
|
61 | 69 | No newline at end of file |
@@ -1,32 +1,37 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 |
# |
|
|
3 | # hg-app - Pylons environment configuration # | |
|
4 | 4 | # # |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | ############################################ | |
|
11 | ## Uncomment and replace with the address ## | |
|
12 | ## which should receive any error reports ## | |
|
13 | ############################################ | |
|
10 | ################################################################################ | |
|
11 | ## Uncomment and replace with the address which should receive ## | |
|
12 | ## any error reports after application crash ## | |
|
13 | ## Additionally those settings will be used by hg-app mailing system ## | |
|
14 | ################################################################################ | |
|
14 | 15 | #email_to = admin@localhost |
|
16 | #error_email_from = paste_error@localhost | |
|
17 | #app_email_from = hg-app-noreply@localhost | |
|
18 | #error_message = | |
|
19 | ||
|
15 | 20 | #smtp_server = mail.server.com |
|
16 | #error_email_from = paste_error@localhost | |
|
17 | 21 | #smtp_username = |
|
18 | 22 |
#smtp_password = |
|
19 | #error_message = 'mercurial crash !' | |
|
23 | #smtp_port = | |
|
24 | #smtp_use_tls = | |
|
20 | 25 | |
|
21 | 26 | [server:main] |
|
22 | 27 | ##nr of threads to spawn |
|
23 | 28 | threadpool_workers = 5 |
|
24 | 29 | |
|
25 | 30 | ##max request before |
|
26 |
threadpool_max_requests = |
|
|
31 | threadpool_max_requests = 6 | |
|
27 | 32 | |
|
28 | 33 | ##option to use threads of process |
|
29 |
use_threadpool = |
|
|
34 | use_threadpool = false | |
|
30 | 35 | |
|
31 | 36 | use = egg:Paste#http |
|
32 | 37 | host = 127.0.0.1 |
@@ -56,7 +61,7 b' beaker.cache.super_short_term.expire=10' | |||
|
56 | 61 | ### BEAKER SESSION #### |
|
57 | 62 | #################################### |
|
58 | 63 | ## Type of storage used for the session, current types are |
|
59 |
## |
|
|
64 | ## "dbm", "file", "memcached", "database", and "memory". | |
|
60 | 65 | ## The storage uses the Container API |
|
61 | 66 | ##that is also used by the cache system. |
|
62 | 67 | beaker.session.type = file |
@@ -1,28 +1,33 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 |
# |
|
|
3 | # hg-app - Pylons environment configuration # | |
|
4 | 4 | # # |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | ############################################ | |
|
11 | ## Uncomment and replace with the address ## | |
|
12 | ## which should receive any error reports ## | |
|
13 | ############################################ | |
|
10 | ################################################################################ | |
|
11 | ## Uncomment and replace with the address which should receive ## | |
|
12 | ## any error reports after application crash ## | |
|
13 | ## Additionally those settings will be used by hg-app mailing system ## | |
|
14 | ################################################################################ | |
|
14 | 15 | #email_to = admin@localhost |
|
16 | #error_email_from = paste_error@localhost | |
|
17 | #app_email_from = hg-app-noreply@localhost | |
|
18 | #error_message = | |
|
19 | ||
|
15 | 20 | #smtp_server = mail.server.com |
|
16 | #error_email_from = paste_error@localhost | |
|
17 | 21 | #smtp_username = |
|
18 | 22 | #smtp_password = |
|
19 | #error_message = 'mercurial crash !' | |
|
23 | #smtp_port = | |
|
24 | #smtp_use_tls = false | |
|
20 | 25 | |
|
21 | 26 | [server:main] |
|
22 | 27 | ##nr of threads to spawn |
|
23 | 28 | threadpool_workers = 5 |
|
24 | 29 | |
|
25 | ##max request before | |
|
30 | ##max request before thread respawn | |
|
26 | 31 | threadpool_max_requests = 2 |
|
27 | 32 | |
|
28 | 33 | ##option to use threads of process |
@@ -20,10 +20,11 b'' | |||
|
20 | 20 | """ |
|
21 | 21 | Created on April 9, 2010 |
|
22 | 22 | Hg app, a web based mercurial repository managment based on pylons |
|
23 | versioning implementation: http://semver.org/ | |
|
23 | 24 | @author: marcink |
|
24 | 25 | """ |
|
25 | 26 | |
|
26 |
VERSION = (0, 8, |
|
|
27 | VERSION = (0, 8, 3, 'beta') | |
|
27 | 28 | |
|
28 | 29 | __version__ = '.'.join((str(each) for each in VERSION[:4])) |
|
29 | 30 |
@@ -7,16 +7,21 b'' | |||
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | ############################################ | |
|
11 | ## Uncomment and replace with the address ## | |
|
12 | ## which should receive any error reports ## | |
|
13 | ############################################ | |
|
10 | ################################################################################ | |
|
11 | ## Uncomment and replace with the address which should receive ## | |
|
12 | ## any error reports after application crash ## | |
|
13 | ## Additionally those settings will be used by hg-app mailing system ## | |
|
14 | ################################################################################ | |
|
14 | 15 | #email_to = admin@localhost |
|
16 | #error_email_from = paste_error@localhost | |
|
17 | #app_email_from = hg-app-noreply@localhost | |
|
18 | #error_message = | |
|
19 | ||
|
15 | 20 | #smtp_server = mail.server.com |
|
16 | #error_email_from = paste_error@localhost | |
|
17 | 21 | #smtp_username = |
|
18 | 22 | #smtp_password = |
|
19 | #error_message = 'hp-app crash !' | |
|
23 | #smtp_port = | |
|
24 | #smtp_use_tls = false | |
|
20 | 25 | |
|
21 | 26 | [server:main] |
|
22 | 27 | ##nr of threads to spawn |
@@ -49,7 +49,12 b' def load_environment(global_conf, app_co' | |||
|
49 | 49 | |
|
50 | 50 | #sets the c attribute access when don't existing attribute are accessed |
|
51 | 51 | config['pylons.strict_tmpl_context'] = True |
|
52 |
test = os.path.split(config['__file__'])[-1] == 'test |
|
|
52 | test = os.path.split(config['__file__'])[-1] == 'test.ini' | |
|
53 | if test: | |
|
54 | from pylons_app.lib.utils import create_test_env, create_test_index | |
|
55 | create_test_env('/tmp', config) | |
|
56 | create_test_index('/tmp/*', True) | |
|
57 | ||
|
53 | 58 | #MULTIPLE DB configs |
|
54 | 59 | # Setup the SQLAlchemy database engine |
|
55 | 60 | if config['debug'] and not test: |
@@ -110,10 +110,11 b' def make_map(config):' | |||
|
110 | 110 | #SEARCH |
|
111 | 111 | map.connect('search', '/_admin/search', controller='search') |
|
112 | 112 | |
|
113 | #LOGIN/LOGOUT | |
|
113 | #LOGIN/LOGOUT/REGISTER/SIGN IN | |
|
114 | 114 | map.connect('login_home', '/_admin/login', controller='login') |
|
115 | 115 | map.connect('logout_home', '/_admin/logout', controller='login', action='logout') |
|
116 | 116 | map.connect('register', '/_admin/register', controller='login', action='register') |
|
117 | map.connect('reset_password', '/_admin/password_reset', controller='login', action='password_reset') | |
|
117 | 118 | |
|
118 | 119 | #FEEDS |
|
119 | 120 | map.connect('rss_feed_home', '/{repo_name:.*}/feed/rss', |
@@ -147,9 +148,12 b' def make_map(config):' | |||
|
147 | 148 | map.connect('files_diff_home', '/{repo_name:.*}/diff/{f_path:.*}', |
|
148 | 149 | controller='files', action='diff', revision='tip', f_path='', |
|
149 | 150 | conditions=dict(function=check_repo)) |
|
150 | map.connect('files_raw_home', '/{repo_name:.*}/rawfile/{revision}/{f_path:.*}', | |
|
151 | map.connect('files_rawfile_home', '/{repo_name:.*}/rawfile/{revision}/{f_path:.*}', | |
|
151 | 152 | controller='files', action='rawfile', revision='tip', f_path='', |
|
152 | 153 | conditions=dict(function=check_repo)) |
|
154 | map.connect('files_raw_home', '/{repo_name:.*}/raw/{revision}/{f_path:.*}', | |
|
155 | controller='files', action='raw', revision='tip', f_path='', | |
|
156 | conditions=dict(function=check_repo)) | |
|
153 | 157 | map.connect('files_annotate_home', '/{repo_name:.*}/annotate/{revision}/{f_path:.*}', |
|
154 | 158 | controller='files', action='annotate', revision='tip', f_path='', |
|
155 | 159 | conditions=dict(function=check_repo)) |
@@ -38,6 +38,7 b' from pylons_app.model.forms import UserF' | |||
|
38 | 38 | ApplicationUiSettingsForm |
|
39 | 39 | from pylons_app.model.hg_model import HgModel |
|
40 | 40 | from pylons_app.model.user_model import UserModel |
|
41 | from pylons_app.lib.celerylib import tasks, run_task | |
|
41 | 42 | import formencode |
|
42 | 43 | import logging |
|
43 | 44 | import traceback |
@@ -102,6 +103,12 b' class SettingsController(BaseController)' | |||
|
102 | 103 | invalidate_cache('cached_repo_list') |
|
103 | 104 | h.flash(_('Repositories sucessfully rescanned'), category='success') |
|
104 | 105 | |
|
106 | if setting_id == 'whoosh': | |
|
107 | repo_location = get_hg_ui_settings()['paths_root_path'] | |
|
108 | full_index = request.POST.get('full_index', False) | |
|
109 | task = run_task(tasks.whoosh_index, repo_location, full_index) | |
|
110 | ||
|
111 | h.flash(_('Whoosh reindex task scheduled'), category='success') | |
|
105 | 112 | if setting_id == 'global': |
|
106 | 113 | |
|
107 | 114 | application_form = ApplicationSettingsForm()() |
@@ -253,7 +260,8 b' class SettingsController(BaseController)' | |||
|
253 | 260 | # url('admin_settings_my_account_update', id=ID) |
|
254 | 261 | user_model = UserModel() |
|
255 | 262 | uid = c.hg_app_user.user_id |
|
256 |
_form = UserForm(edit=True, old_data={'user_id':uid |
|
|
263 | _form = UserForm(edit=True, old_data={'user_id':uid, | |
|
264 | 'email':c.hg_app_user.email})() | |
|
257 | 265 | form_result = {} |
|
258 | 266 | try: |
|
259 | 267 | form_result = _form.to_python(dict(request.POST)) |
@@ -262,7 +270,11 b' class SettingsController(BaseController)' | |||
|
262 | 270 | category='success') |
|
263 | 271 | |
|
264 | 272 | except formencode.Invalid as errors: |
|
265 |
|
|
|
273 | c.user = self.sa.query(User).get(c.hg_app_user.user_id) | |
|
274 | c.user_repos = [] | |
|
275 | for repo in c.cached_repo_list.values(): | |
|
276 | if repo.dbrepo.user.username == c.user.username: | |
|
277 | c.user_repos.append(repo) | |
|
266 | 278 | return htmlfill.render( |
|
267 | 279 | render('admin/users/user_edit_my_account.html'), |
|
268 | 280 | defaults=errors.value, |
@@ -98,7 +98,10 b' class UsersController(BaseController):' | |||
|
98 | 98 | # method='put') |
|
99 | 99 | # url('user', id=ID) |
|
100 | 100 | user_model = UserModel() |
|
101 | _form = UserForm(edit=True, old_data={'user_id':id})() | |
|
101 | c.user = user_model.get_user(id) | |
|
102 | ||
|
103 | _form = UserForm(edit=True, old_data={'user_id':id, | |
|
104 | 'email':c.user.email})() | |
|
102 | 105 | form_result = {} |
|
103 | 106 | try: |
|
104 | 107 | form_result = _form.to_python(dict(request.POST)) |
@@ -106,7 +109,6 b' class UsersController(BaseController):' | |||
|
106 | 109 | h.flash(_('User updated succesfully'), category='success') |
|
107 | 110 | |
|
108 | 111 | except formencode.Invalid as errors: |
|
109 | c.user = user_model.get_user(id) | |
|
110 | 112 | return htmlfill.render( |
|
111 | 113 | render('admin/users/user_edit.html'), |
|
112 | 114 | defaults=errors.value, |
@@ -148,6 +150,8 b' class UsersController(BaseController):' | |||
|
148 | 150 | """GET /users/id/edit: Form to edit an existing item""" |
|
149 | 151 | # url('edit_user', id=ID) |
|
150 | 152 | c.user = self.sa.query(User).get(id) |
|
153 | if not c.user: | |
|
154 | return redirect(url('users')) | |
|
151 | 155 | if c.user.username == 'default': |
|
152 | 156 | h.flash(_("You can't edit this user since it's" |
|
153 | 157 | " crucial for entire application"), category='warning') |
@@ -45,6 +45,7 b' class FilesController(BaseController):' | |||
|
45 | 45 | 'repository.admin') |
|
46 | 46 | def __before__(self): |
|
47 | 47 | super(FilesController, self).__before__() |
|
48 | c.file_size_limit = 250 * 1024 #limit of file size to display | |
|
48 | 49 | |
|
49 | 50 | def index(self, repo_name, revision, f_path): |
|
50 | 51 | hg_model = HgModel() |
@@ -77,7 +78,6 b' class FilesController(BaseController):' | |||
|
77 | 78 | |
|
78 | 79 | c.changeset = repo.get_changeset(revision) |
|
79 | 80 | |
|
80 | ||
|
81 | 81 | c.cur_rev = c.changeset.raw_id |
|
82 | 82 | c.rev_nr = c.changeset.revision |
|
83 | 83 | c.files_list = c.changeset.get_node(f_path) |
@@ -97,6 +97,14 b' class FilesController(BaseController):' | |||
|
97 | 97 | % f_path.split('/')[-1] |
|
98 | 98 | return file_node.content |
|
99 | 99 | |
|
100 | def raw(self, repo_name, revision, f_path): | |
|
101 | hg_model = HgModel() | |
|
102 | c.repo = hg_model.get_repo(c.repo_name) | |
|
103 | file_node = c.repo.get_changeset(revision).get_node(f_path) | |
|
104 | response.content_type = 'text/plain' | |
|
105 | ||
|
106 | return file_node.content | |
|
107 | ||
|
100 | 108 | def annotate(self, repo_name, revision, f_path): |
|
101 | 109 | hg_model = HgModel() |
|
102 | 110 | c.repo = hg_model.get_repo(c.repo_name) |
@@ -28,7 +28,9 b' from pylons import request, response, se' | |||
|
28 | 28 | from pylons.controllers.util import abort, redirect |
|
29 | 29 | from pylons_app.lib.auth import AuthUser, HasPermissionAnyDecorator |
|
30 | 30 | from pylons_app.lib.base import BaseController, render |
|
31 | from pylons_app.model.forms import LoginForm, RegisterForm | |
|
31 | import pylons_app.lib.helpers as h | |
|
32 | from pylons.i18n.translation import _ | |
|
33 | from pylons_app.model.forms import LoginForm, RegisterForm, PasswordResetForm | |
|
32 | 34 | from pylons_app.model.user_model import UserModel |
|
33 | 35 | import formencode |
|
34 | 36 | import logging |
@@ -99,6 +101,8 b' class LoginController(BaseController):' | |||
|
99 | 101 | form_result = register_form.to_python(dict(request.POST)) |
|
100 | 102 | form_result['active'] = c.auto_active |
|
101 | 103 | user_model.create_registration(form_result) |
|
104 | h.flash(_('You have successfully registered into hg-app'), | |
|
105 | category='success') | |
|
102 | 106 | return redirect(url('login_home')) |
|
103 | 107 | |
|
104 | 108 | except formencode.Invalid as errors: |
@@ -111,6 +115,28 b' class LoginController(BaseController):' | |||
|
111 | 115 | |
|
112 | 116 | return render('/register.html') |
|
113 | 117 | |
|
118 | def password_reset(self): | |
|
119 | user_model = UserModel() | |
|
120 | if request.POST: | |
|
121 | ||
|
122 | password_reset_form = PasswordResetForm()() | |
|
123 | try: | |
|
124 | form_result = password_reset_form.to_python(dict(request.POST)) | |
|
125 | user_model.reset_password(form_result) | |
|
126 | h.flash(_('Your new password was sent'), | |
|
127 | category='success') | |
|
128 | return redirect(url('login_home')) | |
|
129 | ||
|
130 | except formencode.Invalid as errors: | |
|
131 | return htmlfill.render( | |
|
132 | render('/password_reset.html'), | |
|
133 | defaults=errors.value, | |
|
134 | errors=errors.error_dict or {}, | |
|
135 | prefix_error=False, | |
|
136 | encoding="UTF-8") | |
|
137 | ||
|
138 | return render('/password_reset.html') | |
|
139 | ||
|
114 | 140 | def logout(self): |
|
115 | 141 | session['hg_app_user'] = AuthUser() |
|
116 | 142 | session.save() |
@@ -26,10 +26,9 b' from pylons import request, response, se' | |||
|
26 | 26 | from pylons.controllers.util import abort, redirect |
|
27 | 27 | from pylons_app.lib.auth import LoginRequired |
|
28 | 28 | from pylons_app.lib.base import BaseController, render |
|
29 |
from pylons_app.lib.indexers import |
|
|
30 |
from webhelpers. |
|
|
31 | from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter, \ | |
|
32 | ContextFragmenter | |
|
29 | from pylons_app.lib.indexers import IDX_LOCATION, SCHEMA, IDX_NAME, ResultWrapper | |
|
30 | from webhelpers.paginate import Page | |
|
31 | from webhelpers.util import update_params | |
|
33 | 32 | from pylons.i18n.translation import _ |
|
34 | 33 | from whoosh.index import open_dir, EmptyIndexError |
|
35 | 34 | from whoosh.qparser import QueryParser, QueryParserError |
@@ -45,17 +44,16 b' class SearchController(BaseController):' | |||
|
45 | 44 | def __before__(self): |
|
46 | 45 | super(SearchController, self).__before__() |
|
47 | 46 | |
|
48 | ||
|
49 | 47 | def index(self): |
|
50 | 48 | c.formated_results = [] |
|
51 | 49 | c.runtime = '' |
|
52 | search_items = set() | |
|
53 | 50 | c.cur_query = request.GET.get('q', None) |
|
54 | 51 | if c.cur_query: |
|
55 | 52 | cur_query = c.cur_query.lower() |
|
56 | 53 | |
|
57 | ||
|
58 | 54 | if c.cur_query: |
|
55 | p = int(request.params.get('page', 1)) | |
|
56 | highlight_items = set() | |
|
59 | 57 | try: |
|
60 | 58 | idx = open_dir(IDX_LOCATION, indexname=IDX_NAME) |
|
61 | 59 | searcher = idx.searcher() |
@@ -65,49 +63,36 b' class SearchController(BaseController):' | |||
|
65 | 63 | query = qp.parse(unicode(cur_query)) |
|
66 | 64 | |
|
67 | 65 | if isinstance(query, Phrase): |
|
68 |
|
|
|
66 | highlight_items.update(query.words) | |
|
69 | 67 | else: |
|
70 | 68 | for i in query.all_terms(): |
|
71 |
|
|
|
69 | if i[0] == 'content': | |
|
70 | highlight_items.add(i[1]) | |
|
71 | ||
|
72 | matcher = query.matcher(searcher) | |
|
72 | 73 |
|
|
73 | 74 | log.debug(query) |
|
74 |
log.debug( |
|
|
75 | log.debug(highlight_items) | |
|
75 | 76 | results = searcher.search(query) |
|
77 | res_ln = len(results) | |
|
76 | 78 | c.runtime = '%s results (%.3f seconds)' \ |
|
77 |
% ( |
|
|
78 | ||
|
79 | analyzer = ANALYZER | |
|
80 | formatter = HtmlFormatter('span', | |
|
81 | between='\n<span class="break">...</span>\n') | |
|
82 | ||
|
83 | #how the parts are splitted within the same text part | |
|
84 | fragmenter = SimpleFragmenter(200) | |
|
85 | #fragmenter = ContextFragmenter(search_items) | |
|
79 | % (res_ln, results.runtime) | |
|
86 | 80 | |
|
87 | for res in results: | |
|
88 | d = {} | |
|
89 | d.update(res) | |
|
90 | hl = highlight(escape(res['content']), search_items, | |
|
91 | analyzer=analyzer, | |
|
92 | fragmenter=fragmenter, | |
|
93 | formatter=formatter, | |
|
94 | top=5) | |
|
95 | f_path = res['path'][res['path'].find(res['repository']) \ | |
|
96 | + len(res['repository']):].lstrip('/') | |
|
97 | d.update({'content_short':hl, | |
|
98 | 'f_path':f_path}) | |
|
99 | #del d['content'] | |
|
100 | c.formated_results.append(d) | |
|
81 | def url_generator(**kw): | |
|
82 | return update_params("?q=%s" % c.cur_query, **kw) | |
|
83 | ||
|
84 | c.formated_results = Page( | |
|
85 | ResultWrapper(searcher, matcher, highlight_items), | |
|
86 | page=p, item_count=res_ln, | |
|
87 | items_per_page=10, url=url_generator) | |
|
101 | 88 |
|
|
102 | 89 | except QueryParserError: |
|
103 | 90 | c.runtime = _('Invalid search query. Try quoting it.') |
|
104 | ||
|
91 | searcher.close() | |
|
105 | 92 | except (EmptyIndexError, IOError): |
|
106 | 93 | log.error(traceback.format_exc()) |
|
107 | 94 | log.error('Empty Index data') |
|
108 | 95 | c.runtime = _('There is no index to search in. Please run whoosh indexer') |
|
109 | 96 | |
|
110 | ||
|
111 | ||
|
112 | 97 | # Return a rendered template |
|
113 | 98 | return render('/search/search.html') |
@@ -22,15 +22,17 b' Created on April 18, 2010' | |||
|
22 | 22 | summary controller for pylons |
|
23 | 23 | @author: marcink |
|
24 | 24 | """ |
|
25 | from datetime import datetime, timedelta | |
|
26 | from pylons import tmpl_context as c, request | |
|
25 | from pylons import tmpl_context as c, request, url | |
|
27 | 26 | from pylons_app.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
28 | 27 | from pylons_app.lib.base import BaseController, render |
|
29 | from pylons_app.lib.helpers import person | |
|
30 | 28 | from pylons_app.lib.utils import OrderedDict |
|
31 | 29 | from pylons_app.model.hg_model import HgModel |
|
30 | from pylons_app.model.db import Statistics | |
|
31 | from webhelpers.paginate import Page | |
|
32 | from pylons_app.lib.celerylib import run_task | |
|
33 | from pylons_app.lib.celerylib.tasks import get_commits_stats | |
|
34 | from datetime import datetime, timedelta | |
|
32 | 35 | from time import mktime |
|
33 | from webhelpers.paginate import Page | |
|
34 | 36 | import calendar |
|
35 | 37 | import logging |
|
36 | 38 | |
@@ -63,77 +65,32 b' class SummaryController(BaseController):' | |||
|
63 | 65 | for name, hash in c.repo_info.branches.items()[:10]: |
|
64 | 66 | c.repo_branches[name] = c.repo_info.get_changeset(hash) |
|
65 | 67 | |
|
66 | c.commit_data = self.__get_commit_stats(c.repo_info) | |
|
68 | td = datetime.today() + timedelta(days=1) | |
|
69 | y, m, d = td.year, td.month, td.day | |
|
70 | ||
|
71 | ts_min_y = mktime((y - 1, (td - timedelta(days=calendar.mdays[m])).month, | |
|
72 | d, 0, 0, 0, 0, 0, 0,)) | |
|
73 | ts_min_m = mktime((y, (td - timedelta(days=calendar.mdays[m])).month, | |
|
74 | d, 0, 0, 0, 0, 0, 0,)) | |
|
75 | ||
|
76 | ts_max_y = mktime((y, m, d, 0, 0, 0, 0, 0, 0,)) | |
|
77 | ||
|
78 | run_task(get_commits_stats, c.repo_info.name, ts_min_y, ts_max_y) | |
|
79 | c.ts_min = ts_min_m | |
|
80 | c.ts_max = ts_max_y | |
|
81 | ||
|
82 | ||
|
83 | stats = self.sa.query(Statistics)\ | |
|
84 | .filter(Statistics.repository == c.repo_info.dbrepo)\ | |
|
85 | .scalar() | |
|
86 | ||
|
87 | if stats: | |
|
88 | c.commit_data = stats.commit_activity | |
|
89 | c.overview_data = stats.commit_activity_combined | |
|
90 | else: | |
|
91 | import json | |
|
92 | c.commit_data = json.dumps({}) | |
|
93 | c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 0] ]) | |
|
67 | 94 | |
|
68 | 95 | return render('summary/summary.html') |
|
69 | 96 | |
|
70 | ||
|
71 | ||
|
72 | def __get_commit_stats(self, repo): | |
|
73 | aggregate = OrderedDict() | |
|
74 | ||
|
75 | #graph range | |
|
76 | td = datetime.today() + timedelta(days=1) | |
|
77 | y, m, d = td.year, td.month, td.day | |
|
78 | c.ts_min = mktime((y, (td - timedelta(days=calendar.mdays[m])).month, | |
|
79 | d, 0, 0, 0, 0, 0, 0,)) | |
|
80 | c.ts_max = mktime((y, m, d, 0, 0, 0, 0, 0, 0,)) | |
|
81 | ||
|
82 | def author_key_cleaner(k): | |
|
83 | k = person(k) | |
|
84 | k = k.replace('"', "'") #for js data compatibilty | |
|
85 | return k | |
|
86 | ||
|
87 | for cs in repo[:200]:#added limit 200 until fix #29 is made | |
|
88 | k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1], | |
|
89 | cs.date.timetuple()[2]) | |
|
90 | timetupple = [int(x) for x in k.split('-')] | |
|
91 | timetupple.extend([0 for _ in xrange(6)]) | |
|
92 | k = mktime(timetupple) | |
|
93 | if aggregate.has_key(author_key_cleaner(cs.author)): | |
|
94 | if aggregate[author_key_cleaner(cs.author)].has_key(k): | |
|
95 | aggregate[author_key_cleaner(cs.author)][k]["commits"] += 1 | |
|
96 | aggregate[author_key_cleaner(cs.author)][k]["added"] += len(cs.added) | |
|
97 | aggregate[author_key_cleaner(cs.author)][k]["changed"] += len(cs.changed) | |
|
98 | aggregate[author_key_cleaner(cs.author)][k]["removed"] += len(cs.removed) | |
|
99 | ||
|
100 | else: | |
|
101 | #aggregate[author_key_cleaner(cs.author)].update(dates_range) | |
|
102 | if k >= c.ts_min and k <= c.ts_max: | |
|
103 | aggregate[author_key_cleaner(cs.author)][k] = {} | |
|
104 | aggregate[author_key_cleaner(cs.author)][k]["commits"] = 1 | |
|
105 | aggregate[author_key_cleaner(cs.author)][k]["added"] = len(cs.added) | |
|
106 | aggregate[author_key_cleaner(cs.author)][k]["changed"] = len(cs.changed) | |
|
107 | aggregate[author_key_cleaner(cs.author)][k]["removed"] = len(cs.removed) | |
|
108 | ||
|
109 | else: | |
|
110 | if k >= c.ts_min and k <= c.ts_max: | |
|
111 | aggregate[author_key_cleaner(cs.author)] = OrderedDict() | |
|
112 | #aggregate[author_key_cleaner(cs.author)].update(dates_range) | |
|
113 | aggregate[author_key_cleaner(cs.author)][k] = {} | |
|
114 | aggregate[author_key_cleaner(cs.author)][k]["commits"] = 1 | |
|
115 | aggregate[author_key_cleaner(cs.author)][k]["added"] = len(cs.added) | |
|
116 | aggregate[author_key_cleaner(cs.author)][k]["changed"] = len(cs.changed) | |
|
117 | aggregate[author_key_cleaner(cs.author)][k]["removed"] = len(cs.removed) | |
|
118 | ||
|
119 | d = '' | |
|
120 | tmpl0 = u""""%s":%s""" | |
|
121 | tmpl1 = u"""{label:"%s",data:%s,schema:["commits"]},""" | |
|
122 | for author in aggregate: | |
|
123 | ||
|
124 | d += tmpl0 % (author, | |
|
125 | tmpl1 \ | |
|
126 | % (author, | |
|
127 | [{"time":x, | |
|
128 | "commits":aggregate[author][x]['commits'], | |
|
129 | "added":aggregate[author][x]['added'], | |
|
130 | "changed":aggregate[author][x]['changed'], | |
|
131 | "removed":aggregate[author][x]['removed'], | |
|
132 | } for x in aggregate[author]])) | |
|
133 | if d == '': | |
|
134 | d = '"%s":{label:"%s",data:[[0,1],]}' \ | |
|
135 | % (author_key_cleaner(repo.contact), | |
|
136 | author_key_cleaner(repo.contact)) | |
|
137 | return d | |
|
138 | ||
|
139 |
@@ -34,9 +34,36 b' from sqlalchemy.orm.exc import NoResultF' | |||
|
34 | 34 | import bcrypt |
|
35 | 35 | from decorator import decorator |
|
36 | 36 | import logging |
|
37 | import random | |
|
37 | 38 | |
|
38 | 39 | log = logging.getLogger(__name__) |
|
39 | 40 | |
|
41 | class PasswordGenerator(object): | |
|
42 | """This is a simple class for generating password from | |
|
43 | different sets of characters | |
|
44 | usage: | |
|
45 | passwd_gen = PasswordGenerator() | |
|
46 | #print 8-letter password containing only big and small letters of alphabet | |
|
47 | print passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL) | |
|
48 | """ | |
|
49 | ALPHABETS_NUM = r'''1234567890'''#[0] | |
|
50 | ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''#[1] | |
|
51 | ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''#[2] | |
|
52 | ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?''' #[3] | |
|
53 | ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM + ALPHABETS_SPECIAL#[4] | |
|
54 | ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM#[5] | |
|
55 | ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL | |
|
56 | ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM#[6] | |
|
57 | ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM#[7] | |
|
58 | ||
|
59 | def __init__(self, passwd=''): | |
|
60 | self.passwd = passwd | |
|
61 | ||
|
62 | def gen_password(self, len, type): | |
|
63 | self.passwd = ''.join([random.choice(type) for _ in xrange(len)]) | |
|
64 | return self.passwd | |
|
65 | ||
|
66 | ||
|
40 | 67 | def get_crypt_password(password): |
|
41 | 68 | """Cryptographic function used for password hashing based on sha1 |
|
42 | 69 | @param password: password to hash |
@@ -84,11 +84,13 b' class DbManage(object):' | |||
|
84 | 84 | import getpass |
|
85 | 85 | username = raw_input('Specify admin username:') |
|
86 | 86 | password = getpass.getpass('Specify admin password:') |
|
87 | self.create_user(username, password, True) | |
|
87 | email = raw_input('Specify admin email:') | |
|
88 | self.create_user(username, password, email, True) | |
|
88 | 89 | else: |
|
89 | 90 | log.info('creating admin and regular test users') |
|
90 | self.create_user('test_admin', 'test', True) | |
|
91 | self.create_user('test_regular', 'test', False) | |
|
91 | self.create_user('test_admin', 'test', 'test_admin@mail.com', True) | |
|
92 | self.create_user('test_regular', 'test', 'test_regular@mail.com', False) | |
|
93 | self.create_user('test_regular2', 'test', 'test_regular2@mail.com', False) | |
|
92 | 94 | |
|
93 | 95 | |
|
94 | 96 | |
@@ -166,14 +168,14 b' class DbManage(object):' | |||
|
166 | 168 | raise |
|
167 | 169 | log.info('created ui config') |
|
168 | 170 | |
|
169 | def create_user(self, username, password, admin=False): | |
|
171 | def create_user(self, username, password, email='', admin=False): | |
|
170 | 172 | log.info('creating administrator user %s', username) |
|
171 | 173 | new_user = User() |
|
172 | 174 | new_user.username = username |
|
173 | 175 | new_user.password = get_crypt_password(password) |
|
174 | 176 | new_user.name = 'Hg' |
|
175 | 177 | new_user.lastname = 'Admin' |
|
176 |
new_user.email = |
|
|
178 | new_user.email = email | |
|
177 | 179 | new_user.admin = admin |
|
178 | 180 | new_user.active = True |
|
179 | 181 |
@@ -277,13 +277,17 b' def pygmentize_annotation(filenode, **kw' | |||
|
277 | 277 | return literal(annotate_highlight(filenode, url_func, **kwargs)) |
|
278 | 278 | |
|
279 | 279 | def repo_name_slug(value): |
|
280 | """Return slug of name of repository | |
|
281 | This function is called on each creation/modification | |
|
282 | of repository to prevent bad names in repo | |
|
280 | 283 |
|
|
281 | Return slug of name of repository | |
|
282 | """ | |
|
283 | slug = urlify(value) | |
|
284 | slug = remove_formatting(value) | |
|
285 | slug = strip_tags(slug) | |
|
286 | ||
|
284 | 287 | for c in """=[]\;'"<>,/~!@#$%^&*()+{}|:""": |
|
285 | 288 | slug = slug.replace(c, '-') |
|
286 | 289 | slug = recursive_replace(slug, '-') |
|
290 | slug = collapse(slug, '-') | |
|
287 | 291 | return slug |
|
288 | 292 | |
|
289 | 293 | def get_changeset_safe(repo, rev): |
@@ -321,6 +325,7 b" isodate = lambda x: util.datestr(x, '%Y" | |||
|
321 | 325 | isodatesec = lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2') |
|
322 | 326 | localdate = lambda x: (x[0], util.makedate()[1]) |
|
323 | 327 | rfc822date = lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2") |
|
328 | rfc822date_notz = lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S") | |
|
324 | 329 | rfc3339date = lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2") |
|
325 | 330 | time_ago = lambda x: util.datestr(_age(x), "%a, %d %b %Y %H:%M:%S %1%2") |
|
326 | 331 |
@@ -1,28 +1,31 b'' | |||
|
1 | import sys | |
|
1 | from os.path import dirname as dn, join as jn | |
|
2 | from pylons_app.config.environment import load_environment | |
|
3 | from pylons_app.model.hg_model import HgModel | |
|
4 | from shutil import rmtree | |
|
5 | from webhelpers.html.builder import escape | |
|
6 | from vcs.utils.lazy import LazyProperty | |
|
7 | ||
|
8 | from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter | |
|
9 | from whoosh.fields import TEXT, ID, STORED, Schema, FieldType | |
|
10 | from whoosh.index import create_in, open_dir | |
|
11 | from whoosh.formats import Characters | |
|
12 | from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter | |
|
13 | ||
|
2 | 14 | import os |
|
3 | from pidlock import LockHeld, DaemonLock | |
|
15 | import sys | |
|
4 | 16 | import traceback |
|
5 | 17 | |
|
6 | from os.path import dirname as dn | |
|
7 | from os.path import join as jn | |
|
8 | ||
|
9 | 18 | #to get the pylons_app import |
|
10 | 19 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) |
|
11 | 20 | |
|
12 | from pylons_app.config.environment import load_environment | |
|
13 | from pylons_app.model.hg_model import HgModel | |
|
14 | from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter | |
|
15 | from whoosh.fields import TEXT, ID, STORED, Schema | |
|
16 | from whoosh.index import create_in, open_dir | |
|
17 | from shutil import rmtree | |
|
18 | 21 | |
|
19 | 22 | #LOCATION WE KEEP THE INDEX |
|
20 | 23 | IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index') |
|
21 | 24 | |
|
22 | 25 | #EXTENSIONS WE WANT TO INDEX CONTENT OFF |
|
23 | 26 |
INDEX_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx', 'aspx', 'asx', 'axd', 'c', |
|
24 |
'cfm', 'cpp', 'cs', 'css', 'diff', 'do', 'el', 'erl', |
|
|
25 |
'htm', 'html', 'ini', 'java', 'js', 'jsp', 'jspx', 'lisp', |
|
|
27 | 'cfg', 'cfm', 'cpp', 'cs', 'css', 'diff', 'do', 'el', 'erl', | |
|
28 | 'h', 'htm', 'html', 'ini', 'java', 'js', 'jsp', 'jspx', 'lisp', | |
|
26 | 29 |
'lua', 'm', 'mako', 'ml', 'pas', 'patch', 'php', 'php3', |
|
27 | 30 |
'php4', 'phtml', 'pm', 'py', 'rb', 'rst', 's', 'sh', 'sql', |
|
28 | 31 |
'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml','xsl','xslt', |
@@ -31,11 +34,106 b" INDEX_EXTENSIONS = ['action', 'adp', 'as" | |||
|
31 | 34 | #CUSTOM ANALYZER wordsplit + lowercase filter |
|
32 | 35 | ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter() |
|
33 | 36 | |
|
37 | ||
|
34 | 38 | #INDEX SCHEMA DEFINITION |
|
35 | 39 | SCHEMA = Schema(owner=TEXT(), |
|
36 | 40 | repository=TEXT(stored=True), |
|
37 | 41 | path=ID(stored=True, unique=True), |
|
38 |
content= |
|
|
42 | content=FieldType(format=Characters(ANALYZER), | |
|
43 | scorable=True, stored=True), | |
|
39 | 44 | modtime=STORED(),extension=TEXT(stored=True)) |
|
40 | 45 | |
|
41 | IDX_NAME = 'HG_INDEX' No newline at end of file | |
|
46 | ||
|
47 | IDX_NAME = 'HG_INDEX' | |
|
48 | FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') | |
|
49 | FRAGMENTER = SimpleFragmenter(200) | |
|
50 | ||
|
51 | class ResultWrapper(object): | |
|
52 | def __init__(self, searcher, matcher, highlight_items): | |
|
53 | self.searcher = searcher | |
|
54 | self.matcher = matcher | |
|
55 | self.highlight_items = highlight_items | |
|
56 | self.fragment_size = 200 / 2 | |
|
57 | ||
|
58 | @LazyProperty | |
|
59 | def doc_ids(self): | |
|
60 | docs_id = [] | |
|
61 | while self.matcher.is_active(): | |
|
62 | docnum = self.matcher.id() | |
|
63 | chunks = [offsets for offsets in self.get_chunks()] | |
|
64 | docs_id.append([docnum, chunks]) | |
|
65 | self.matcher.next() | |
|
66 | return docs_id | |
|
67 | ||
|
68 | def __str__(self): | |
|
69 | return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids)) | |
|
70 | ||
|
71 | def __repr__(self): | |
|
72 | return self.__str__() | |
|
73 | ||
|
74 | def __len__(self): | |
|
75 | return len(self.doc_ids) | |
|
76 | ||
|
77 | def __iter__(self): | |
|
78 | """ | |
|
79 | Allows Iteration over results,and lazy generate content | |
|
80 | ||
|
81 | *Requires* implementation of ``__getitem__`` method. | |
|
82 | """ | |
|
83 | for docid in self.doc_ids: | |
|
84 | yield self.get_full_content(docid) | |
|
85 | ||
|
86 | def __getslice__(self, i, j): | |
|
87 | """ | |
|
88 | Slicing of resultWrapper | |
|
89 | """ | |
|
90 | slice = [] | |
|
91 | for docid in self.doc_ids[i:j]: | |
|
92 | slice.append(self.get_full_content(docid)) | |
|
93 | return slice | |
|
94 | ||
|
95 | ||
|
96 | def get_full_content(self, docid): | |
|
97 | res = self.searcher.stored_fields(docid[0]) | |
|
98 | f_path = res['path'][res['path'].find(res['repository']) \ | |
|
99 | + len(res['repository']):].lstrip('/') | |
|
100 | ||
|
101 | content_short = self.get_short_content(res, docid[1]) | |
|
102 | res.update({'content_short':content_short, | |
|
103 | 'content_short_hl':self.highlight(content_short), | |
|
104 | 'f_path':f_path}) | |
|
105 | ||
|
106 | return res | |
|
107 | ||
|
108 | def get_short_content(self, res, chunks): | |
|
109 | ||
|
110 | return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks]) | |
|
111 | ||
|
112 | def get_chunks(self): | |
|
113 | """ | |
|
114 | Smart function that implements chunking the content | |
|
115 | but not overlap chunks so it doesn't highlight the same | |
|
116 | close occurences twice. | |
|
117 | @param matcher: | |
|
118 | @param size: | |
|
119 | """ | |
|
120 | memory = [(0, 0)] | |
|
121 | for span in self.matcher.spans(): | |
|
122 | start = span.startchar or 0 | |
|
123 | end = span.endchar or 0 | |
|
124 | start_offseted = max(0, start - self.fragment_size) | |
|
125 | end_offseted = end + self.fragment_size | |
|
126 | ||
|
127 | if start_offseted < memory[-1][1]: | |
|
128 | start_offseted = memory[-1][1] | |
|
129 | memory.append((start_offseted, end_offseted,)) | |
|
130 | yield (start_offseted, end_offseted,) | |
|
131 | ||
|
132 | def highlight(self, content, top=5): | |
|
133 | hl = highlight(escape(content), | |
|
134 | self.highlight_items, | |
|
135 | analyzer=ANALYZER, | |
|
136 | fragmenter=FRAGMENTER, | |
|
137 | formatter=FORMATTER, | |
|
138 | top=top) | |
|
139 | return hl |
@@ -32,20 +32,31 b' from os.path import join as jn' | |||
|
32 | 32 | project_path = dn(dn(dn(dn(os.path.realpath(__file__))))) |
|
33 | 33 | sys.path.append(project_path) |
|
34 | 34 | |
|
35 | from pidlock import LockHeld, DaemonLock | |
|
36 | import traceback | |
|
37 | from pylons_app.config.environment import load_environment | |
|
35 | from pylons_app.lib.pidlock import LockHeld, DaemonLock | |
|
38 | 36 | from pylons_app.model.hg_model import HgModel |
|
39 | 37 | from pylons_app.lib.helpers import safe_unicode |
|
40 | 38 | from whoosh.index import create_in, open_dir |
|
41 | 39 | from shutil import rmtree |
|
42 |
from pylons_app.lib.indexers import |
|
|
43 | SCHEMA, IDX_NAME | |
|
40 | from pylons_app.lib.indexers import INDEX_EXTENSIONS, IDX_LOCATION, SCHEMA, IDX_NAME | |
|
44 | 41 | |
|
45 | 42 | import logging |
|
46 | import logging.config | |
|
47 | logging.config.fileConfig(jn(project_path, 'development.ini')) | |
|
43 | ||
|
48 | 44 | log = logging.getLogger('whooshIndexer') |
|
45 | # create logger | |
|
46 | log.setLevel(logging.DEBUG) | |
|
47 | log.propagate = False | |
|
48 | # create console handler and set level to debug | |
|
49 | ch = logging.StreamHandler() | |
|
50 | ch.setLevel(logging.DEBUG) | |
|
51 | ||
|
52 | # create formatter | |
|
53 | formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") | |
|
54 | ||
|
55 | # add formatter to ch | |
|
56 | ch.setFormatter(formatter) | |
|
57 | ||
|
58 | # add ch to logger | |
|
59 | log.addHandler(ch) | |
|
49 | 60 | |
|
50 | 61 | def scan_paths(root_location): |
|
51 | 62 | return HgModel.repo_scan('/', root_location, None, True) |
@@ -221,6 +232,7 b' if __name__ == "__main__":' | |||
|
221 | 232 | WhooshIndexingDaemon(repo_location=repo_location)\ |
|
222 | 233 | .run(full_index=full_index) |
|
223 | 234 | l.release() |
|
235 | reload(logging) | |
|
224 | 236 | except LockHeld: |
|
225 | 237 | sys.exit(1) |
|
226 | 238 |
@@ -6,7 +6,7 b' class LockHeld(Exception):pass' | |||
|
6 | 6 | |
|
7 | 7 | |
|
8 | 8 | class DaemonLock(object): |
|
9 |
|
|
|
9 | """daemon locking | |
|
10 | 10 | USAGE: |
|
11 | 11 | try: |
|
12 | 12 | l = lock() |
@@ -14,7 +14,7 b' class DaemonLock(object):' | |||
|
14 | 14 | l.release() |
|
15 | 15 | except LockHeld: |
|
16 | 16 | sys.exit(1) |
|
17 | ''' | |
|
17 | """ | |
|
18 | 18 | |
|
19 | 19 | def __init__(self, file=None, callbackfn=None, |
|
20 | 20 | desc='daemon lock', debug=False): |
@@ -40,9 +40,9 b' class DaemonLock(object):' | |||
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | def lock(self): |
|
43 |
|
|
|
43 | """ | |
|
44 | 44 | locking function, if lock is present it will raise LockHeld exception |
|
45 |
|
|
|
45 | """ | |
|
46 | 46 | lockname = '%s' % (os.getpid()) |
|
47 | 47 | |
|
48 | 48 | self.trylock() |
@@ -75,9 +75,9 b' class DaemonLock(object):' | |||
|
75 | 75 | |
|
76 | 76 | |
|
77 | 77 | def release(self): |
|
78 |
|
|
|
78 | """ | |
|
79 | 79 | releases the pid by removing the pidfile |
|
80 |
|
|
|
80 | """ | |
|
81 | 81 | if self.callbackfn: |
|
82 | 82 | #execute callback function on release |
|
83 | 83 | if self.debug: |
@@ -94,11 +94,11 b' class DaemonLock(object):' | |||
|
94 | 94 | pass |
|
95 | 95 | |
|
96 | 96 | def makelock(self, lockname, pidfile): |
|
97 |
|
|
|
97 | """ | |
|
98 | 98 | this function will make an actual lock |
|
99 | 99 | @param lockname: acctual pid of file |
|
100 | 100 | @param pidfile: the file to write the pid in |
|
101 |
|
|
|
101 | """ | |
|
102 | 102 | if self.debug: |
|
103 | 103 | print 'creating a file %s and pid: %s' % (pidfile, lockname) |
|
104 | 104 | pidfile = open(self.pidfile, "wb") |
@@ -1,7 +1,6 b'' | |||
|
1 | 1 | from sqlalchemy.interfaces import ConnectionProxy |
|
2 | 2 | import time |
|
3 | import logging | |
|
4 | log = logging.getLogger('timerproxy') | |
|
3 | from sqlalchemy import log | |
|
5 | 4 | BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38) |
|
6 | 5 | |
|
7 | 6 | def color_sql(sql): |
@@ -39,19 +38,22 b' def format_sql(sql):' | |||
|
39 | 38 | |
|
40 | 39 | |
|
41 | 40 | class TimerProxy(ConnectionProxy): |
|
41 | ||
|
42 | def __init__(self): | |
|
43 | super(TimerProxy, self).__init__() | |
|
44 | self.logging_name = 'timerProxy' | |
|
45 | self.log = log.instance_logger(self, True) | |
|
46 | ||
|
42 | 47 | def cursor_execute(self, execute, cursor, statement, parameters, context, executemany): |
|
48 | ||
|
43 | 49 | now = time.time() |
|
44 | 50 | try: |
|
45 | log.info(">>>>> STARTING QUERY >>>>>") | |
|
51 | self.log.info(">>>>> STARTING QUERY >>>>>") | |
|
46 | 52 | return execute(cursor, statement, parameters, context) |
|
47 | 53 | finally: |
|
48 | 54 | total = time.time() - now |
|
49 | 55 | try: |
|
50 | log.info(format_sql("Query: %s" % statement % parameters)) | |
|
56 | self.log.info(format_sql("Query: %s" % statement % parameters)) | |
|
51 | 57 | except TypeError: |
|
52 | log.info(format_sql("Query: %s %s" % (statement, parameters))) | |
|
53 | log.info("<<<<< TOTAL TIME: %f <<<<<" % total) | |
|
54 | ||
|
55 | ||
|
56 | ||
|
57 | ||
|
58 | self.log.info(format_sql("Query: %s %s" % (statement, parameters))) | |
|
59 | self.log.info("<<<<< TOTAL TIME: %f <<<<<" % total) |
@@ -31,6 +31,7 b' from vcs.backends.base import BaseChange' | |||
|
31 | 31 | from vcs.utils.lazy import LazyProperty |
|
32 | 32 | import logging |
|
33 | 33 | import os |
|
34 | ||
|
34 | 35 | log = logging.getLogger(__name__) |
|
35 | 36 | |
|
36 | 37 | |
@@ -218,6 +219,7 b' class EmptyChangeset(BaseChangeset):' | |||
|
218 | 219 | |
|
219 | 220 | revision = -1 |
|
220 | 221 | message = '' |
|
222 | author = '' | |
|
221 | 223 | |
|
222 | 224 | @LazyProperty |
|
223 | 225 | def raw_id(self): |
@@ -362,3 +364,75 b' class OrderedDict(dict, DictMixin):' | |||
|
362 | 364 | |
|
363 | 365 | def __ne__(self, other): |
|
364 | 366 | return not self == other |
|
367 | ||
|
368 | ||
|
369 | #=============================================================================== | |
|
370 | # TEST FUNCTIONS | |
|
371 | #=============================================================================== | |
|
372 | def create_test_index(repo_location, full_index): | |
|
373 | """Makes default test index | |
|
374 | @param repo_location: | |
|
375 | @param full_index: | |
|
376 | """ | |
|
377 | from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon | |
|
378 | from pylons_app.lib.pidlock import DaemonLock, LockHeld | |
|
379 | from pylons_app.lib.indexers import IDX_LOCATION | |
|
380 | import shutil | |
|
381 | ||
|
382 | if os.path.exists(IDX_LOCATION): | |
|
383 | shutil.rmtree(IDX_LOCATION) | |
|
384 | ||
|
385 | try: | |
|
386 | l = DaemonLock() | |
|
387 | WhooshIndexingDaemon(repo_location=repo_location)\ | |
|
388 | .run(full_index=full_index) | |
|
389 | l.release() | |
|
390 | except LockHeld: | |
|
391 | pass | |
|
392 | ||
|
393 | def create_test_env(repos_test_path, config): | |
|
394 | """Makes a fresh database and | |
|
395 | install test repository into tmp dir | |
|
396 | """ | |
|
397 | from pylons_app.lib.db_manage import DbManage | |
|
398 | import tarfile | |
|
399 | import shutil | |
|
400 | from os.path import dirname as dn, join as jn, abspath | |
|
401 | ||
|
402 | log = logging.getLogger('TestEnvCreator') | |
|
403 | # create logger | |
|
404 | log.setLevel(logging.DEBUG) | |
|
405 | log.propagate = True | |
|
406 | # create console handler and set level to debug | |
|
407 | ch = logging.StreamHandler() | |
|
408 | ch.setLevel(logging.DEBUG) | |
|
409 | ||
|
410 | # create formatter | |
|
411 | formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") | |
|
412 | ||
|
413 | # add formatter to ch | |
|
414 | ch.setFormatter(formatter) | |
|
415 | ||
|
416 | # add ch to logger | |
|
417 | log.addHandler(ch) | |
|
418 | ||
|
419 | #PART ONE create db | |
|
420 | log.debug('making test db') | |
|
421 | dbname = config['sqlalchemy.db1.url'].split('/')[-1] | |
|
422 | dbmanage = DbManage(log_sql=True, dbname=dbname, tests=True) | |
|
423 | dbmanage.create_tables(override=True) | |
|
424 | dbmanage.config_prompt(repos_test_path) | |
|
425 | dbmanage.create_default_user() | |
|
426 | dbmanage.admin_prompt() | |
|
427 | dbmanage.create_permissions() | |
|
428 | dbmanage.populate_default_permissions() | |
|
429 | ||
|
430 | #PART TWO make test repo | |
|
431 | log.debug('making test vcs repo') | |
|
432 | if os.path.isdir('/tmp/vcs_test'): | |
|
433 | shutil.rmtree('/tmp/vcs_test') | |
|
434 | ||
|
435 | cur_dir = dn(dn(abspath(__file__))) | |
|
436 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test.tar.gz")) | |
|
437 | tar.extractall('/tmp') | |
|
438 | tar.close() |
@@ -1,15 +1,8 b'' | |||
|
1 | 1 | """The application's model objects""" |
|
2 | 2 | import logging |
|
3 | import sqlalchemy as sa | |
|
4 | from sqlalchemy import orm | |
|
5 | 3 | from pylons_app.model import meta |
|
6 | from pylons_app.model.meta import Session | |
|
7 | 4 | log = logging.getLogger(__name__) |
|
8 | 5 | |
|
9 | # Add these two imports: | |
|
10 | import datetime | |
|
11 | from sqlalchemy import schema, types | |
|
12 | ||
|
13 | 6 | def init_model(engine): |
|
14 | 7 | """Call me before using any of the tables or classes in the model""" |
|
15 | 8 | log.info("INITIALIZING DB MODELS") |
@@ -26,7 +26,7 b' class HgAppUi(Base):' | |||
|
26 | 26 | |
|
27 | 27 | class User(Base): |
|
28 | 28 | __tablename__ = 'users' |
|
29 | __table_args__ = (UniqueConstraint('username'), {'useexisting':True}) | |
|
29 | __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'useexisting':True}) | |
|
30 | 30 | user_id = Column("user_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True) |
|
31 | 31 | username = Column("username", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
32 | 32 | password = Column("password", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
@@ -120,6 +120,15 b' class UserToPerm(Base):' | |||
|
120 | 120 | user = relation('User') |
|
121 | 121 | permission = relation('Permission') |
|
122 | 122 | |
|
123 | ||
|
123 | class Statistics(Base): | |
|
124 | __tablename__ = 'statistics' | |
|
125 | __table_args__ = (UniqueConstraint('repository_id'), {'useexisting':True}) | |
|
126 | stat_id = Column("stat_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True) | |
|
127 | repository_id = Column("repository_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=True, default=None) | |
|
128 | stat_on_revision = Column("stat_on_revision", INTEGER(), nullable=False) | |
|
129 | commit_activity = Column("commit_activity", BLOB(), nullable=False)#JSON data | |
|
130 | commit_activity_combined = Column("commit_activity_combined", BLOB(), nullable=False)#JSON data | |
|
131 | languages = Column("languages", BLOB(), nullable=False)#JSON data | |
|
124 | 132 | |
|
133 | repository = relation('Repository') | |
|
125 | 134 |
@@ -209,6 +209,36 b' class ValidPath(formencode.validators.Fa' | |||
|
209 | 209 | raise formencode.Invalid(msg, value, state, |
|
210 | 210 | error_dict={'paths_root_path':msg}) |
|
211 | 211 | |
|
212 | def UniqSystemEmail(old_data): | |
|
213 | class _UniqSystemEmail(formencode.validators.FancyValidator): | |
|
214 | def to_python(self, value, state): | |
|
215 | if old_data.get('email') != value: | |
|
216 | sa = meta.Session | |
|
217 | try: | |
|
218 | user = sa.query(User).filter(User.email == value).scalar() | |
|
219 | if user: | |
|
220 | raise formencode.Invalid(_("That e-mail address is already taken") , | |
|
221 | value, state) | |
|
222 | finally: | |
|
223 | meta.Session.remove() | |
|
224 | ||
|
225 | return value | |
|
226 | ||
|
227 | return _UniqSystemEmail | |
|
228 | ||
|
229 | class ValidSystemEmail(formencode.validators.FancyValidator): | |
|
230 | def to_python(self, value, state): | |
|
231 | sa = meta.Session | |
|
232 | try: | |
|
233 | user = sa.query(User).filter(User.email == value).scalar() | |
|
234 | if user is None: | |
|
235 | raise formencode.Invalid(_("That e-mail address doesn't exist.") , | |
|
236 | value, state) | |
|
237 | finally: | |
|
238 | meta.Session.remove() | |
|
239 | ||
|
240 | return value | |
|
241 | ||
|
212 | 242 | #=============================================================================== |
|
213 | 243 | # FORMS |
|
214 | 244 | #=============================================================================== |
@@ -250,12 +280,18 b' def UserForm(edit=False, old_data={}):' | |||
|
250 | 280 | active = StringBoolean(if_missing=False) |
|
251 | 281 | name = UnicodeString(strip=True, min=3, not_empty=True) |
|
252 | 282 | lastname = UnicodeString(strip=True, min=3, not_empty=True) |
|
253 | email = Email(not_empty=True) | |
|
283 | email = All(Email(not_empty=True), UniqSystemEmail(old_data)) | |
|
254 | 284 | |
|
255 | 285 | return _UserForm |
|
256 | 286 | |
|
257 | 287 | RegisterForm = UserForm |
|
258 | 288 | |
|
289 | def PasswordResetForm(): | |
|
290 | class _PasswordResetForm(formencode.Schema): | |
|
291 | allow_extra_fields = True | |
|
292 | filter_extra_fields = True | |
|
293 | email = All(ValidSystemEmail(), Email(not_empty=True)) | |
|
294 | return _PasswordResetForm | |
|
259 | 295 | |
|
260 | 296 | def RepoForm(edit=False, old_data={}): |
|
261 | 297 | class _RepoForm(formencode.Schema): |
@@ -43,16 +43,14 b' except ImportError:' | |||
|
43 | 43 | raise Exception('Unable to import vcs') |
|
44 | 44 | |
|
45 | 45 | def _get_repos_cached_initial(app_globals, initial): |
|
46 | """ | |
|
47 | return cached dict with repos | |
|
46 | """return cached dict with repos | |
|
48 | 47 | """ |
|
49 | 48 | g = app_globals |
|
50 | 49 | return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui, initial) |
|
51 | 50 | |
|
52 | 51 | @cache_region('long_term', 'cached_repo_list') |
|
53 | 52 | def _get_repos_cached(): |
|
54 | """ | |
|
55 | return cached dict with repos | |
|
53 | """return cached dict with repos | |
|
56 | 54 | """ |
|
57 | 55 | log.info('getting all repositories list') |
|
58 | 56 | from pylons import app_globals as g |
@@ -61,11 +59,12 b' def _get_repos_cached():' | |||
|
61 | 59 | @cache_region('super_short_term', 'cached_repos_switcher_list') |
|
62 | 60 | def _get_repos_switcher_cached(cached_repo_list): |
|
63 | 61 | repos_lst = [] |
|
64 |
for repo in |
|
|
65 |
if HasRepoPermissionAny('repository.write', 'repository.read', |
|
|
66 | repos_lst.append(repo) | |
|
62 | for repo in [x for x in cached_repo_list.values()]: | |
|
63 | if HasRepoPermissionAny('repository.write', 'repository.read', | |
|
64 | 'repository.admin')(repo.name.lower(), 'main page check'): | |
|
65 | repos_lst.append((repo.name, repo.dbrepo.private,)) | |
|
67 | 66 | |
|
68 | return repos_lst | |
|
67 | return sorted(repos_lst, key=lambda k:k[0]) | |
|
69 | 68 | |
|
70 | 69 | @cache_region('long_term', 'full_changelog') |
|
71 | 70 | def _full_changelog_cached(repo_name): |
@@ -73,14 +72,11 b' def _full_changelog_cached(repo_name):' | |||
|
73 | 72 | return list(reversed(list(HgModel().get_repo(repo_name)))) |
|
74 | 73 | |
|
75 | 74 | class HgModel(object): |
|
76 | """ | |
|
77 | Mercurial Model | |
|
75 | """Mercurial Model | |
|
78 | 76 | """ |
|
79 | 77 | |
|
80 | 78 | def __init__(self): |
|
81 |
|
|
|
82 | Constructor | |
|
83 | """ | |
|
79 | pass | |
|
84 | 80 | |
|
85 | 81 | @staticmethod |
|
86 | 82 | def repo_scan(repos_prefix, repos_path, baseui, initial=False): |
@@ -92,8 +88,7 b' class HgModel(object):' | |||
|
92 | 88 | """ |
|
93 | 89 | sa = meta.Session() |
|
94 | 90 | def check_repo_dir(path): |
|
95 | """ | |
|
96 | Checks the repository | |
|
91 | """Checks the repository | |
|
97 | 92 | :param path: |
|
98 | 93 | """ |
|
99 | 94 | repos_path = path.split('/') |
@@ -102,7 +97,7 b' class HgModel(object):' | |||
|
102 | 97 | if repos_path[0] != '/': |
|
103 | 98 | repos_path[0] = '/' |
|
104 | 99 | if not os.path.isdir(os.path.join(*repos_path)): |
|
105 |
raise RepositoryError('Not a valid repository in %s' % path |
|
|
100 | raise RepositoryError('Not a valid repository in %s' % path) | |
|
106 | 101 | if not repos_path.endswith('*'): |
|
107 | 102 | raise VCSError('You need to specify * or ** at the end of path ' |
|
108 | 103 | 'for recursive scanning') |
@@ -1,15 +1,58 b'' | |||
|
1 | 1 | """SQLAlchemy Metadata and Session object""" |
|
2 | 2 | from sqlalchemy.ext.declarative import declarative_base |
|
3 | 3 | from sqlalchemy.orm import scoped_session, sessionmaker |
|
4 | from pylons_app.model import caching_query | |
|
5 | from beaker import cache | |
|
6 | import os | |
|
7 | from os.path import join as jn, dirname as dn, abspath | |
|
8 | import time | |
|
9 | ||
|
10 | # Beaker CacheManager. A home base for cache configurations. | |
|
11 | cache_manager = cache.CacheManager() | |
|
4 | 12 | |
|
5 | 13 | __all__ = ['Base', 'Session'] |
|
6 | 14 | # |
|
7 | 15 | # SQLAlchemy session manager. Updated by model.init_model() |
|
8 | 16 | # |
|
9 |
Session = scoped_session( |
|
|
10 | # | |
|
17 | Session = scoped_session( | |
|
18 | sessionmaker( | |
|
19 | query_cls=caching_query.query_callable(cache_manager) | |
|
20 | ) | |
|
21 | ) | |
|
11 | 22 | |
|
12 | 23 | # The declarative Base |
|
13 | 24 | Base = declarative_base() |
|
14 | 25 | #For another db... |
|
15 | 26 | #Base2 = declarative_base() |
|
27 | ||
|
28 | #=============================================================================== | |
|
29 | # CACHE OPTIONS | |
|
30 | #=============================================================================== | |
|
31 | cache_dir = jn(dn(dn(dn(abspath(__file__)))), 'data', 'cache') | |
|
32 | if not os.path.isdir(cache_dir): | |
|
33 | os.mkdir(cache_dir) | |
|
34 | # set start_time to current time | |
|
35 | # to re-cache everything | |
|
36 | # upon application startup | |
|
37 | start_time = time.time() | |
|
38 | # configure the "sqlalchemy" cache region. | |
|
39 | cache_manager.regions['sql_cache_short'] = { | |
|
40 | 'type':'memory', | |
|
41 | 'data_dir':cache_dir, | |
|
42 | 'expire':10, | |
|
43 | 'start_time':start_time | |
|
44 | } | |
|
45 | cache_manager.regions['sql_cache_med'] = { | |
|
46 | 'type':'memory', | |
|
47 | 'data_dir':cache_dir, | |
|
48 | 'expire':360, | |
|
49 | 'start_time':start_time | |
|
50 | } | |
|
51 | cache_manager.regions['sql_cache_long'] = { | |
|
52 | 'type':'file', | |
|
53 | 'data_dir':cache_dir, | |
|
54 | 'expire':3600, | |
|
55 | 'start_time':start_time | |
|
56 | } | |
|
57 | #to use cache use this in query | |
|
58 | #.options(FromCache("sqlalchemy_cache_type", "cachekey")) |
@@ -2,7 +2,7 b'' | |||
|
2 | 2 | # encoding: utf-8 |
|
3 | 3 | # Model for users |
|
4 | 4 | # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> |
|
5 | ||
|
5 | # | |
|
6 | 6 | # This program is free software; you can redistribute it and/or |
|
7 | 7 | # modify it under the terms of the GNU General Public License |
|
8 | 8 | # as published by the Free Software Foundation; version 2 |
@@ -23,10 +23,12 b' Created on April 9, 2010' | |||
|
23 | 23 | Model for users |
|
24 | 24 | @author: marcink |
|
25 | 25 | """ |
|
26 | ||
|
26 | from pylons_app.lib import auth | |
|
27 | from pylons.i18n.translation import _ | |
|
28 | from pylons_app.lib.celerylib import tasks, run_task | |
|
27 | 29 | from pylons_app.model.db import User |
|
28 | 30 | from pylons_app.model.meta import Session |
|
29 | from pylons.i18n.translation import _ | |
|
31 | import traceback | |
|
30 | 32 | import logging |
|
31 | 33 | log = logging.getLogger(__name__) |
|
32 | 34 | |
@@ -54,8 +56,8 b' class UserModel(object):' | |||
|
54 | 56 | |
|
55 | 57 | self.sa.add(new_user) |
|
56 | 58 | self.sa.commit() |
|
57 |
except |
|
|
58 | log.error(e) | |
|
59 | except: | |
|
60 | log.error(traceback.format_exc()) | |
|
59 | 61 | self.sa.rollback() |
|
60 | 62 | raise |
|
61 | 63 | |
@@ -68,8 +70,8 b' class UserModel(object):' | |||
|
68 | 70 | |
|
69 | 71 | self.sa.add(new_user) |
|
70 | 72 | self.sa.commit() |
|
71 |
except |
|
|
72 | log.error(e) | |
|
73 | except: | |
|
74 | log.error(traceback.format_exc()) | |
|
73 | 75 | self.sa.rollback() |
|
74 | 76 | raise |
|
75 | 77 | |
@@ -88,8 +90,8 b' class UserModel(object):' | |||
|
88 | 90 | |
|
89 | 91 | self.sa.add(new_user) |
|
90 | 92 | self.sa.commit() |
|
91 |
except |
|
|
92 | log.error(e) | |
|
93 | except: | |
|
94 | log.error(traceback.format_exc()) | |
|
93 | 95 | self.sa.rollback() |
|
94 | 96 | raise |
|
95 | 97 | |
@@ -109,13 +111,12 b' class UserModel(object):' | |||
|
109 | 111 | |
|
110 | 112 | self.sa.add(new_user) |
|
111 | 113 | self.sa.commit() |
|
112 |
except |
|
|
113 | log.error(e) | |
|
114 | except: | |
|
115 | log.error(traceback.format_exc()) | |
|
114 | 116 | self.sa.rollback() |
|
115 | 117 | raise |
|
116 | 118 | |
|
117 | 119 | def delete(self, id): |
|
118 | ||
|
119 | 120 | try: |
|
120 | 121 | |
|
121 | 122 | user = self.sa.query(User).get(id) |
@@ -125,7 +126,10 b' class UserModel(object):' | |||
|
125 | 126 | " crucial for entire application")) |
|
126 | 127 | self.sa.delete(user) |
|
127 | 128 | self.sa.commit() |
|
128 |
except |
|
|
129 | log.error(e) | |
|
129 | except: | |
|
130 | log.error(traceback.format_exc()) | |
|
130 | 131 | self.sa.rollback() |
|
131 | 132 | raise |
|
133 | ||
|
134 | def reset_password(self, data): | |
|
135 | run_task(tasks.reset_user_password, data['email']) |
@@ -505,6 +505,33 b' div.options a:hover' | |||
|
505 | 505 | |
|
506 | 506 | |
|
507 | 507 | /*ICONS*/ |
|
508 | #header #header-inner #quick li ul li a.journal, | |
|
509 | #header #header-inner #quick li ul li a.journal:hover | |
|
510 | { | |
|
511 | background:url("../images/icons/book.png") no-repeat scroll 4px 9px #FFFFFF; | |
|
512 | margin:0; | |
|
513 | padding:12px 9px 7px 24px; | |
|
514 | width:167px; | |
|
515 | ||
|
516 | } | |
|
517 | #header #header-inner #quick li ul li a.private_repo, | |
|
518 | #header #header-inner #quick li ul li a.private_repo:hover | |
|
519 | { | |
|
520 | background:url("../images/icons/lock.png") no-repeat scroll 4px 9px #FFFFFF; | |
|
521 | margin:0; | |
|
522 | padding:12px 9px 7px 24px; | |
|
523 | width:167px; | |
|
524 | ||
|
525 | } | |
|
526 | #header #header-inner #quick li ul li a.public_repo, | |
|
527 | #header #header-inner #quick li ul li a.public_repo:hover | |
|
528 | { | |
|
529 | background:url("../images/icons/lock_open.png") no-repeat scroll 4px 9px #FFFFFF; | |
|
530 | margin:0; | |
|
531 | padding:12px 9px 7px 24px; | |
|
532 | width:167px; | |
|
533 | ||
|
534 | } | |
|
508 | 535 | |
|
509 | 536 | #header #header-inner #quick li ul li a.repos, |
|
510 | 537 | #header #header-inner #quick li ul li a.repos:hover |
@@ -2877,7 +2904,7 b' div.form div.fields div.buttons input' | |||
|
2877 | 2904 | #register div.form div.fields div.buttons |
|
2878 | 2905 | { |
|
2879 | 2906 | margin: 0; |
|
2880 |
padding: 10px 0 0 |
|
|
2907 | padding: 10px 0 0 114px; | |
|
2881 | 2908 | clear: both; |
|
2882 | 2909 | overflow: hidden; |
|
2883 | 2910 | border-top: 1px solid #DDDDDD; |
@@ -11,8 +11,8 b'' | |||
|
11 | 11 | |
|
12 | 12 | %for cnt,l in enumerate(c.users_log): |
|
13 | 13 | <tr class="parity${cnt%2}"> |
|
14 | <td>${l.user.username}</td> | |
|
15 | <td>${l.repository}</td> | |
|
14 | <td>${h.link_to(l.user.username,h.url('edit_user', id=l.user.user_id))}</td> | |
|
15 | <td>${h.link_to(l.repository,h.url('summary_home',repo_name=l.repository))}</td> | |
|
16 | 16 | <td>${l.action}</td> |
|
17 | 17 | <td>${l.action_date}</td> |
|
18 | 18 | <td>${l.user_ip}</td> |
@@ -29,7 +29,7 b'' | |||
|
29 | 29 | |
|
30 | 30 | <div class="field"> |
|
31 | 31 | <div class="label"> |
|
32 |
<label for="default_perm">${_(' |
|
|
32 | <label for="default_perm">${_('Repository permission')}:</label> | |
|
33 | 33 | </div> |
|
34 | 34 | <div class="select"> |
|
35 | 35 | ${h.select('default_perm','',c.perms_choices)} |
@@ -51,7 +51,7 b'' | |||
|
51 | 51 | </div> |
|
52 | 52 | <div class="field"> |
|
53 | 53 | <div class="label"> |
|
54 |
<label for="default_create">${_(' |
|
|
54 | <label for="default_create">${_('Repository creation')}:</label> | |
|
55 | 55 | </div> |
|
56 | 56 | <div class="select"> |
|
57 | 57 | ${h.select('default_create','',c.create_choices)} |
@@ -48,6 +48,31 b'' | |||
|
48 | 48 | </div> |
|
49 | 49 | ${h.end_form()} |
|
50 | 50 |
|
|
51 | <h3>${_('Whoosh indexing')}</h3> | |
|
52 | ${h.form(url('admin_setting', setting_id='whoosh'),method='put')} | |
|
53 | <div class="form"> | |
|
54 | <!-- fields --> | |
|
55 | ||
|
56 | <div class="fields"> | |
|
57 | <div class="field"> | |
|
58 | <div class="label label-checkbox"> | |
|
59 | <label for="destroy">${_('index build option')}:</label> | |
|
60 | </div> | |
|
61 | <div class="checkboxes"> | |
|
62 | <div class="checkbox"> | |
|
63 | ${h.checkbox('full_index',True)} | |
|
64 | <label for="checkbox-1">${_('build from scratch')}</label> | |
|
65 | </div> | |
|
66 | </div> | |
|
67 | </div> | |
|
68 | ||
|
69 | <div class="buttons"> | |
|
70 | ${h.submit('reindex','reindex',class_="ui-button ui-widget ui-state-default ui-corner-all")} | |
|
71 | </div> | |
|
72 | </div> | |
|
73 | </div> | |
|
74 | ${h.end_form()} | |
|
75 | ||
|
51 | 76 | <h3>${_('Global application settings')}</h3> |
|
52 | 77 | ${h.form(url('admin_setting', setting_id='global'),method='put')} |
|
53 | 78 | <div class="form"> |
@@ -97,8 +97,12 b'' | |||
|
97 | 97 | <span>↓</span> |
|
98 | 98 | </a> |
|
99 | 99 | <ul class="repo_switcher"> |
|
100 | %for repo in c.repo_switcher_list: | |
|
101 | <li>${h.link_to(repo,h.url('summary_home',repo_name=repo))}</li> | |
|
100 | %for repo,private in c.repo_switcher_list: | |
|
101 | %if private: | |
|
102 | <li>${h.link_to(repo,h.url('summary_home',repo_name=repo),class_="private_repo")}</li> | |
|
103 | %else: | |
|
104 | <li>${h.link_to(repo,h.url('summary_home',repo_name=repo),class_="public_repo")}</li> | |
|
105 | %endif | |
|
102 | 106 | %endfor |
|
103 | 107 | </ul> |
|
104 | 108 | </li> |
@@ -203,6 +207,7 b'' | |||
|
203 | 207 | <span>${_('Admin')}</span> |
|
204 | 208 | </a> |
|
205 | 209 | <ul> |
|
210 | <li>${h.link_to(_('journal'),h.url('admin_home'),class_='journal')}</li> | |
|
206 | 211 | <li>${h.link_to(_('repositories'),h.url('repos'),class_='repos')}</li> |
|
207 | 212 | <li>${h.link_to(_('users'),h.url('users'),class_='users')}</li> |
|
208 | 213 | <li>${h.link_to(_('permissions'),h.url('edit_permission',id='default'),class_='permissions')}</li> |
@@ -23,18 +23,22 b'' | |||
|
23 | 23 | </div> |
|
24 | 24 | <div class="table"> |
|
25 | 25 | <div id="files_data"> |
|
26 |
<h |
|
|
26 | <h3 class="files_location">${_('Location')}: ${h.files_breadcrumbs(c.repo_name,c.cur_rev,c.file.path)}</h3> | |
|
27 | 27 | <dl class="overview"> |
|
28 | 28 | <dt>${_('Last revision')}</dt> |
|
29 | 29 | <dd>${h.link_to("r%s:%s" % (c.file.last_changeset.revision,c.file.last_changeset._short), |
|
30 | 30 | h.url('files_annotate_home',repo_name=c.repo_name,revision=c.file.last_changeset._short,f_path=c.f_path))} </dd> |
|
31 | 31 | <dt>${_('Size')}</dt> |
|
32 | 32 | <dd>${h.format_byte_size(c.file.size,binary=True)}</dd> |
|
33 | <dt>${_('Mimetype')}</dt> | |
|
34 | <dd>${c.file.mimetype}</dd> | |
|
33 | 35 | <dt>${_('Options')}</dt> |
|
34 | 36 | <dd>${h.link_to(_('show source'), |
|
35 | 37 | h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))} |
|
38 | / ${h.link_to(_('show as raw'), | |
|
39 | h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))} | |
|
36 | 40 | / ${h.link_to(_('download as raw'), |
|
37 | h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))} | |
|
41 | h.url('files_rawfile_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))} | |
|
38 | 42 | </dd> |
|
39 | 43 | </dl> |
|
40 | 44 | <div id="body" class="codeblock"> |
@@ -43,7 +47,12 b'' | |||
|
43 | 47 | <div class="commit">"${c.file_msg}"</div> |
|
44 | 48 | </div> |
|
45 | 49 | <div class="code-body"> |
|
50 | % if c.file.size < c.file_size_limit: | |
|
46 | 51 | ${h.pygmentize_annotation(c.file,linenos=True,anchorlinenos=True,lineanchors='S',cssclass="code-highlight")} |
|
52 | %else: | |
|
53 | ${_('File is to big to display')} ${h.link_to(_('show as raw'), | |
|
54 | h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))} | |
|
55 | %endif | |
|
47 | 56 | </div> |
|
48 | 57 | </div> |
|
49 | 58 | </div> |
@@ -23,30 +23,37 b'' | |||
|
23 | 23 | <tr> |
|
24 | 24 | <th>${_('Name')}</th> |
|
25 | 25 | <th>${_('Size')}</th> |
|
26 | <th>${_('Mimetype')}</th> | |
|
26 | 27 | <th>${_('Revision')}</th> |
|
27 | 28 | <th>${_('Last modified')}</th> |
|
28 | 29 | <th>${_('Last commiter')}</th> |
|
29 | 30 | </tr> |
|
30 | 31 | </thead> |
|
32 | ||
|
33 | % if c.files_list.parent: | |
|
31 | 34 |
|
|
32 | 35 |
|
|
33 | % if c.files_list.parent: | |
|
34 | 36 |
|
|
35 | %endif | |
|
36 | 37 |
|
|
37 | 38 |
|
|
38 | 39 |
|
|
39 | 40 |
|
|
40 | 41 |
|
|
42 | <td></td> | |
|
41 | 43 |
|
|
44 | %endif | |
|
45 | ||
|
42 | 46 | %for cnt,node in enumerate(c.files_list,1): |
|
43 | 47 | <tr class="parity${cnt%2}"> |
|
44 | 48 | <td> |
|
45 | 49 | ${h.link_to(node.name,h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=node.path),class_=file_class(node))} |
|
46 | 50 | </td> |
|
47 | 51 | <td> |
|
52 | ${h.format_byte_size(node.size,binary=True)} | |
|
53 | </td> | |
|
54 | <td> | |
|
48 | 55 |
|
|
49 | ${h.format_byte_size(node.size,binary=True)} | |
|
56 | ${node.mimetype} | |
|
50 | 57 |
|
|
51 | 58 | </td> |
|
52 | 59 | <td> |
@@ -6,11 +6,15 b'' | |||
|
6 | 6 | </dd> |
|
7 | 7 | <dt>${_('Size')}</dt> |
|
8 | 8 | <dd>${h.format_byte_size(c.files_list.size,binary=True)}</dd> |
|
9 | <dt>${_('Mimetype')}</dt> | |
|
10 | <dd>${c.files_list.mimetype}</dd> | |
|
9 | 11 | <dt>${_('Options')}</dt> |
|
10 | 12 | <dd>${h.link_to(_('show annotation'), |
|
11 | 13 |
h.url('files_annotate_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))} |
|
14 | / ${h.link_to(_('show as raw'), | |
|
15 | h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))} | |
|
12 | 16 | / ${h.link_to(_('download as raw'), |
|
13 | h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))} | |
|
17 | h.url('files_rawfile_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))} | |
|
14 | 18 | </dd> |
|
15 | 19 | <dt>${_('History')}</dt> |
|
16 | 20 | <dd> |
@@ -32,7 +36,12 b'' | |||
|
32 | 36 | <div class="commit">"${c.files_list.last_changeset.message}"</div> |
|
33 | 37 | </div> |
|
34 | 38 | <div class="code-body"> |
|
39 | % if c.files_list.size < c.file_size_limit: | |
|
35 | 40 | ${h.pygmentize(c.files_list,linenos=True,anchorlinenos=True,lineanchors='S',cssclass="code-highlight")} |
|
41 | %else: | |
|
42 | ${_('File is to big to display')} ${h.link_to(_('show as raw'), | |
|
43 | h.url('files_raw_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.f_path))} | |
|
44 | %endif | |
|
36 | 45 | </div> |
|
37 | 46 | </div> |
|
38 | 47 |
@@ -60,7 +60,7 b'' | |||
|
60 | 60 | <!-- end fields --> |
|
61 | 61 | <!-- links --> |
|
62 | 62 | <div class="links"> |
|
63 |
${h.link_to(_('Forgot your password ?'),h.url(' |
|
|
63 | ${h.link_to(_('Forgot your password ?'),h.url('reset_password'))} | |
|
64 | 64 | %if h.HasPermissionAny('hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate')(): |
|
65 | 65 | / |
|
66 | 66 | ${h.link_to(_("Don't have an account ?"),h.url('register'))} |
@@ -46,7 +46,7 b'' | |||
|
46 | 46 | h.url('files_home',repo_name=sr['repository'],revision='tip',f_path=sr['f_path']))}</div> |
|
47 | 47 | </div> |
|
48 | 48 | <div class="code-body"> |
|
49 | <pre>${h.literal(sr['content_short'])}</pre> | |
|
49 | <pre>${h.literal(sr['content_short_hl'])}</pre> | |
|
50 | 50 | </div> |
|
51 | 51 | </div> |
|
52 | 52 | </div> |
@@ -61,9 +61,11 b'' | |||
|
61 | 61 | |
|
62 | 62 | %endif |
|
63 | 63 | %endfor |
|
64 | ||
|
65 | ||
|
66 | ||
|
64 | %if c.cur_query: | |
|
65 | <div class="pagination-wh pagination-left"> | |
|
66 | ${c.formated_results.pager('$link_previous ~2~ $link_next')} | |
|
67 | </div> | |
|
68 | %endif | |
|
67 | 69 | </div> |
|
68 | 70 | |
|
69 | 71 | </%def> |
@@ -13,7 +13,7 b'' | |||
|
13 | 13 | </tr> |
|
14 | 14 | %for cnt,cs in enumerate(c.repo_changesets): |
|
15 | 15 | <tr class="parity${cnt%2}"> |
|
16 | <td>${h.age(cs._ctx.date())}</td> | |
|
16 | <td>${h.age(cs._ctx.date())} - ${h.rfc822date_notz(cs._ctx.date())} </td> | |
|
17 | 17 | <td title="${cs.author}">${h.person(cs.author)}</td> |
|
18 | 18 | <td>r${cs.revision}:${cs.raw_id}</td> |
|
19 | 19 | <td> |
@@ -77,6 +77,8 b' E.onDOMReady(function(e){' | |||
|
77 | 77 | </div> |
|
78 | 78 | <div class="input-short"> |
|
79 | 79 | ${h.age(c.repo_info.last_change)} - ${h.rfc822date(c.repo_info.last_change)} |
|
80 | ${_('by')} ${h.get_changeset_safe(c.repo_info,'tip').author} | |
|
81 | ||
|
80 | 82 | </div> |
|
81 | 83 | </div> |
|
82 | 84 | |
@@ -121,64 +123,114 b' E.onDOMReady(function(e){' | |||
|
121 | 123 | <div class="box box-right" style="min-height:455px"> |
|
122 | 124 | <!-- box / title --> |
|
123 | 125 | <div class="title"> |
|
124 |
<h5>${_(' |
|
|
126 | <h5>${_('Commit activity by day / author')}</h5> | |
|
125 | 127 | </div> |
|
126 | 128 | |
|
127 | 129 | <div class="table"> |
|
128 | 130 | <div id="commit_history" style="width:560px;height:300px;float:left"></div> |
|
129 | <div id="legend_data"> | |
|
131 | <div style="clear: both;height: 10px"></div> | |
|
132 | <div id="overview" style="width:560px;height:100px;float:left"></div> | |
|
133 | ||
|
134 | <div id="legend_data" style="clear:both;margin-top:10px;"> | |
|
130 | 135 | <div id="legend_container"></div> |
|
131 | 136 | <div id="legend_choices"> |
|
132 | 137 | <table id="legend_choices_tables" style="font-size:smaller;color:#545454"></table> |
|
133 | 138 | </div> |
|
134 | 139 | </div> |
|
135 | 140 | <script type="text/javascript"> |
|
136 | ||
|
137 | (function () { | |
|
138 | var datasets = {${c.commit_data|n}}; | |
|
139 | var i = 0; | |
|
141 | /** | |
|
142 | * Plots summary graph | |
|
143 | * | |
|
144 | * @class SummaryPlot | |
|
145 | * @param {from} initial from for detailed graph | |
|
146 | * @param {to} initial to for detailed graph | |
|
147 | * @param {dataset} | |
|
148 | * @param {overview_dataset} | |
|
149 | */ | |
|
150 | function SummaryPlot(from,to,dataset,overview_dataset) { | |
|
151 | var initial_ranges = { | |
|
152 | "xaxis":{ | |
|
153 | "from":from, | |
|
154 | "to":to, | |
|
155 | }, | |
|
156 | }; | |
|
157 | var dataset = dataset; | |
|
158 | var overview_dataset = [overview_dataset]; | |
|
140 | 159 | var choiceContainer = YAHOO.util.Dom.get("legend_choices"); |
|
141 | 160 | var choiceContainerTable = YAHOO.util.Dom.get("legend_choices_tables"); |
|
142 | for(var key in datasets) { | |
|
143 | datasets[key].color = i; | |
|
144 |
|
|
|
145 | choiceContainerTable.innerHTML += '<tr><td>'+ | |
|
146 | '<input type="checkbox" name="' + key +'" checked="checked" />' | |
|
147 | +datasets[key].label+ | |
|
148 | '</td></tr>'; | |
|
161 | var plotContainer = YAHOO.util.Dom.get('commit_history'); | |
|
162 | var overviewContainer = YAHOO.util.Dom.get('overview'); | |
|
163 | ||
|
164 | var plot_options = { | |
|
165 | bars: {show:true,align:'center',lineWidth:4}, | |
|
166 | legend: {show:true, container:"legend_container"}, | |
|
167 | points: {show:true,radius:0,fill:false}, | |
|
168 | yaxis: {tickDecimals:0,}, | |
|
169 | xaxis: { | |
|
170 | mode: "time", | |
|
171 | timeformat: "%d/%m", | |
|
172 | min:from, | |
|
173 | max:to, | |
|
174 | }, | |
|
175 | grid: { | |
|
176 | hoverable: true, | |
|
177 | clickable: true, | |
|
178 | autoHighlight:true, | |
|
179 | color: "#999" | |
|
180 | }, | |
|
181 | //selection: {mode: "x"} | |
|
182 | }; | |
|
183 | var overview_options = { | |
|
184 | legend:{show:false}, | |
|
185 | bars: {show:true,barWidth: 2,}, | |
|
186 | shadowSize: 0, | |
|
187 | xaxis: {mode: "time", timeformat: "%d/%m/%y",}, | |
|
188 | yaxis: {ticks: 3, min: 0,}, | |
|
189 | grid: {color: "#999",}, | |
|
190 | selection: {mode: "x"} | |
|
149 | 191 |
|
|
150 | 192 | |
|
151 | ||
|
152 | function plotAccordingToChoices() { | |
|
153 | var data = []; | |
|
154 | ||
|
155 | var inputs = choiceContainer.getElementsByTagName("input"); | |
|
156 | for(var i=0; i<inputs.length; i++) { | |
|
157 | var key = inputs[i].name; | |
|
158 | if (key && datasets[key]){ | |
|
159 | if(!inputs[i].checked){ | |
|
160 | data.push({label:key,data:[[0,1],]}); | |
|
193 | /** | |
|
194 | *get dummy data needed in few places | |
|
195 | */ | |
|
196 | function getDummyData(label){ | |
|
197 | return {"label":label, | |
|
198 | "data":[{"time":0, | |
|
199 | "commits":0, | |
|
200 | "added":0, | |
|
201 | "changed":0, | |
|
202 | "removed":0, | |
|
203 | }], | |
|
204 | "schema":["commits"], | |
|
205 | "color":'#ffffff', | |
|
161 | 206 |
|
|
162 | else{ | |
|
163 | data.push(datasets[key]); | |
|
164 | 207 | } |
|
165 | 208 | |
|
209 | /** | |
|
210 | * generate checkboxes accordindly to data | |
|
211 | * @param keys | |
|
212 | * @returns | |
|
213 | */ | |
|
214 | function generateCheckboxes(data) { | |
|
215 | //append checkboxes | |
|
216 | var i = 0; | |
|
217 | choiceContainerTable.innerHTML = ''; | |
|
218 | for(var pos in data) { | |
|
219 | ||
|
220 | data[pos].color = i; | |
|
221 | i++; | |
|
222 | if(data[pos].label != ''){ | |
|
223 | choiceContainerTable.innerHTML += '<tr><td>'+ | |
|
224 | '<input type="checkbox" name="' + data[pos].label +'" checked="checked" />' | |
|
225 | +data[pos].label+ | |
|
226 | '</td></tr>'; | |
|
227 | } | |
|
228 | } | |
|
166 | 229 |
|
|
167 | 230 |
|
|
168 | }; | |
|
169 | ||
|
170 | if (data.length > 0){ | |
|
171 | ||
|
172 | var plot = YAHOO.widget.Flot("commit_history", data, | |
|
173 | { bars: { show: true, align:'center',lineWidth:4 }, | |
|
174 | points: { show: true, radius:0,fill:true }, | |
|
175 | legend:{show:true, container:"legend_container"}, | |
|
176 | selection: { mode: "xy" }, | |
|
177 | yaxis: {tickDecimals:0}, | |
|
178 | xaxis: { mode: "time", timeformat: "%d",tickSize:[1, "day"],min:${c.ts_min},max:${c.ts_max} }, | |
|
179 | grid: { hoverable: true, clickable: true,autoHighlight:true }, | |
|
180 | }); | |
|
181 | ||
|
231 | /** | |
|
232 | * ToolTip show | |
|
233 | */ | |
|
182 | 234 |
|
|
183 | 235 |
|
|
184 | 236 |
|
@@ -199,8 +251,143 b' E.onDOMReady(function(e){' | |||
|
199 | 251 |
|
|
200 | 252 |
|
|
201 | 253 | |
|
254 | /** | |
|
255 | * This function will detect if selected period has some changesets for this user | |
|
256 | if it does this data is then pushed for displaying | |
|
257 | Additionally it will only display users that are selected by the checkbox | |
|
258 | */ | |
|
259 | function getDataAccordingToRanges(ranges) { | |
|
260 | ||
|
261 | var data = []; | |
|
262 | var keys = []; | |
|
263 | for(var key in dataset){ | |
|
264 | var push = false; | |
|
265 | //method1 slow !! | |
|
266 | ///* | |
|
267 | for(var ds in dataset[key].data){ | |
|
268 | commit_data = dataset[key].data[ds]; | |
|
269 | //console.log(key); | |
|
270 | //console.log(new Date(commit_data.time*1000)); | |
|
271 | //console.log(new Date(ranges.xaxis.from*1000)); | |
|
272 | //console.log(new Date(ranges.xaxis.to*1000)); | |
|
273 | if (commit_data.time >= ranges.xaxis.from && commit_data.time <= ranges.xaxis.to){ | |
|
274 | push = true; | |
|
275 | break; | |
|
276 | } | |
|
277 | } | |
|
278 | //*/ | |
|
279 | /*//method2 sorted commit data !!! | |
|
280 | var first_commit = dataset[key].data[0].time; | |
|
281 | var last_commit = dataset[key].data[dataset[key].data.length-1].time; | |
|
282 | ||
|
283 | console.log(first_commit); | |
|
284 | console.log(last_commit); | |
|
285 | ||
|
286 | if (first_commit >= ranges.xaxis.from && last_commit <= ranges.xaxis.to){ | |
|
287 | push = true; | |
|
288 | } | |
|
289 | */ | |
|
290 | if(push){ | |
|
291 | data.push(dataset[key]); | |
|
292 | } | |
|
293 | } | |
|
294 | if(data.length >= 1){ | |
|
295 | return data; | |
|
296 | } | |
|
297 | else{ | |
|
298 | //just return dummy data for graph to plot itself | |
|
299 | return [getDummyData('')]; | |
|
300 | } | |
|
301 | ||
|
302 | } | |
|
303 | ||
|
304 | /** | |
|
305 | * redraw using new checkbox data | |
|
306 | */ | |
|
307 | function plotchoiced(e,args){ | |
|
308 | var cur_data = args[0]; | |
|
309 | var cur_ranges = args[1]; | |
|
310 | ||
|
311 | var new_data = []; | |
|
312 | var inputs = choiceContainer.getElementsByTagName("input"); | |
|
313 | ||
|
314 | //show only checked labels | |
|
315 | for(var i=0; i<inputs.length; i++) { | |
|
316 | var checkbox_key = inputs[i].name; | |
|
317 | ||
|
318 | if(inputs[i].checked){ | |
|
319 | for(var d in cur_data){ | |
|
320 | if(cur_data[d].label == checkbox_key){ | |
|
321 | new_data.push(cur_data[d]); | |
|
322 | } | |
|
323 | } | |
|
324 | } | |
|
325 | else{ | |
|
326 | //push dummy data to not hide the label | |
|
327 | new_data.push(getDummyData(checkbox_key)); | |
|
328 | } | |
|
329 | } | |
|
330 | ||
|
331 | var new_options = YAHOO.lang.merge(plot_options, { | |
|
332 | xaxis: { | |
|
333 | min: cur_ranges.xaxis.from, | |
|
334 | max: cur_ranges.xaxis.to, | |
|
335 | mode:"time", | |
|
336 | timeformat: "%d/%m", | |
|
337 | } | |
|
338 | }); | |
|
339 | if (!new_data){ | |
|
340 | new_data = [[0,1]]; | |
|
341 | } | |
|
342 | // do the zooming | |
|
343 | plot = YAHOO.widget.Flot(plotContainer, new_data, new_options); | |
|
344 | ||
|
345 | plot.subscribe("plotselected", plotselected); | |
|
346 | ||
|
347 | //resubscribe plothover | |
|
348 | plot.subscribe("plothover", plothover); | |
|
349 | ||
|
350 | // don't fire event on the overview to prevent eternal loop | |
|
351 | overview.setSelection(cur_ranges, true); | |
|
352 | ||
|
353 | } | |
|
354 | ||
|
355 | /** | |
|
356 | * plot only selected items from overview | |
|
357 | * @param ranges | |
|
358 | * @returns | |
|
359 | */ | |
|
360 | function plotselected(ranges,cur_data) { | |
|
361 | //updates the data for new plot | |
|
362 | data = getDataAccordingToRanges(ranges); | |
|
363 | generateCheckboxes(data); | |
|
364 | ||
|
365 | var new_options = YAHOO.lang.merge(plot_options, { | |
|
366 | xaxis: { | |
|
367 | min: ranges.xaxis.from, | |
|
368 | max: ranges.xaxis.to, | |
|
369 | mode:"time", | |
|
370 | timeformat: "%d/%m", | |
|
371 | } | |
|
372 | }); | |
|
373 | // do the zooming | |
|
374 | plot = YAHOO.widget.Flot(plotContainer, data, new_options); | |
|
375 | ||
|
376 | plot.subscribe("plotselected", plotselected); | |
|
377 | ||
|
378 | //resubscribe plothover | |
|
379 | plot.subscribe("plothover", plothover); | |
|
380 | ||
|
381 | // don't fire event on the overview to prevent eternal loop | |
|
382 | overview.setSelection(ranges, true); | |
|
383 | ||
|
384 | //resubscribe choiced | |
|
385 | YAHOO.util.Event.on(choiceContainer.getElementsByTagName("input"), "click", plotchoiced, [data, ranges]); | |
|
386 | } | |
|
387 | ||
|
202 | 388 |
|
|
203 | plot.subscribe("plothover", function (o) { | |
|
389 | ||
|
390 | function plothover(o) { | |
|
204 | 391 |
|
|
205 | 392 |
|
|
206 | 393 |
|
@@ -221,10 +408,10 b' E.onDOMReady(function(e){' | |||
|
221 | 408 |
|
|
222 | 409 |
|
|
223 | 410 |
|
|
224 | var fd = d.getFullYear()+'-'+(d.getMonth()+1)+'-'+d.getDate(); | |
|
411 | var fd = d.toDateString() | |
|
225 | 412 |
|
|
226 | 413 |
|
|
227 |
|
|
|
414 | var cur_data = dataset[item.series.label].data[item.dataIndex]; | |
|
228 | 415 |
|
|
229 | 416 |
|
|
230 | 417 |
|
@@ -256,15 +443,35 b' E.onDOMReady(function(e){' | |||
|
256 | 443 |
|
|
257 | 444 |
|
|
258 | 445 |
|
|
259 | }); | |
|
260 | ||
|
261 | } | |
|
262 | 446 | } |
|
263 | 447 | |
|
264 | YAHOO.util.Event.on(choiceContainer.getElementsByTagName("input"), "click", plotAccordingToChoices); | |
|
448 | /** | |
|
449 | * MAIN EXECUTION | |
|
450 | */ | |
|
451 | ||
|
452 | var data = getDataAccordingToRanges(initial_ranges); | |
|
453 | generateCheckboxes(data); | |
|
454 | ||
|
455 | //main plot | |
|
456 | var plot = YAHOO.widget.Flot(plotContainer,data,plot_options); | |
|
457 | ||
|
458 | //overview | |
|
459 | var overview = YAHOO.widget.Flot(overviewContainer, overview_dataset, overview_options); | |
|
265 | 460 | |
|
266 | plotAccordingToChoices(); | |
|
267 | })(); | |
|
461 | //show initial selection on overview | |
|
462 | overview.setSelection(initial_ranges); | |
|
463 | ||
|
464 | plot.subscribe("plotselected", plotselected); | |
|
465 | ||
|
466 | overview.subscribe("plotselected", function (ranges) { | |
|
467 | plot.setSelection(ranges); | |
|
468 | }); | |
|
469 | ||
|
470 | plot.subscribe("plothover", plothover); | |
|
471 | ||
|
472 | YAHOO.util.Event.on(choiceContainer.getElementsByTagName("input"), "click", plotchoiced, [data, initial_ranges]); | |
|
473 | } | |
|
474 | SummaryPlot(${c.ts_min},${c.ts_max},${c.commit_data|n},${c.overview_data|n}); | |
|
268 | 475 |
|
|
269 | 476 | |
|
270 | 477 | </div> |
@@ -16,12 +16,18 b' from routes.util import URLGenerator' | |||
|
16 | 16 | from webtest import TestApp |
|
17 | 17 | import os |
|
18 | 18 | from pylons_app.model import meta |
|
19 | import logging | |
|
20 | ||
|
21 | ||
|
22 | log = logging.getLogger(__name__) | |
|
23 | ||
|
19 | 24 | import pylons.test |
|
20 | 25 | |
|
21 | 26 | __all__ = ['environ', 'url', 'TestController'] |
|
22 | 27 | |
|
23 | 28 | # Invoke websetup with the current config file |
|
24 |
SetupCommand('setup-app').run([ |
|
|
29 | #SetupCommand('setup-app').run([config_file]) | |
|
30 | ||
|
25 | 31 | |
|
26 | 32 | environ = {} |
|
27 | 33 | |
@@ -33,13 +39,13 b' class TestController(TestCase):' | |||
|
33 | 39 | self.app = TestApp(wsgiapp) |
|
34 | 40 | url._push_object(URLGenerator(config['routes.map'], environ)) |
|
35 | 41 | self.sa = meta.Session |
|
42 | ||
|
36 | 43 | TestCase.__init__(self, *args, **kwargs) |
|
37 | 44 | |
|
38 | ||
|
39 | def log_user(self): | |
|
45 | def log_user(self, username='test_admin', password='test'): | |
|
40 | 46 | response = self.app.post(url(controller='login', action='index'), |
|
41 |
{'username': |
|
|
42 |
'password': |
|
|
47 | {'username':username, | |
|
48 | 'password':password}) | |
|
43 | 49 | assert response.status == '302 Found', 'Wrong response code from login got %s' % response.status |
|
44 | 50 | assert response.session['hg_app_user'].username == 'test_admin', 'wrong logged in user' |
|
45 |
return response.follow() |
|
|
51 | return response.follow() |
@@ -3,5 +3,7 b' from pylons_app.tests import *' | |||
|
3 | 3 | class TestAdminController(TestController): |
|
4 | 4 | |
|
5 | 5 | def test_index(self): |
|
6 | self.log_user() | |
|
6 | 7 | response = self.app.get(url(controller='admin/admin', action='index')) |
|
8 | assert 'Admin dashboard - journal' in response.body,'No proper title in dashboard' | |
|
7 | 9 | # Test response... |
@@ -1,4 +1,5 b'' | |||
|
1 | 1 | from pylons_app.tests import * |
|
2 | from pylons_app.model.db import User | |
|
2 | 3 | |
|
3 | 4 | class TestSettingsController(TestController): |
|
4 | 5 | |
@@ -41,3 +42,75 b' class TestSettingsController(TestControl' | |||
|
41 | 42 | |
|
42 | 43 | def test_edit_as_xml(self): |
|
43 | 44 | response = self.app.get(url('formatted_admin_edit_setting', setting_id=1, format='xml')) |
|
45 | ||
|
46 | def test_my_account(self): | |
|
47 | self.log_user() | |
|
48 | response = self.app.get(url('admin_settings_my_account')) | |
|
49 | print response | |
|
50 | assert 'value="test_admin' in response.body | |
|
51 | ||
|
52 | ||
|
53 | ||
|
54 | def test_my_account_update(self): | |
|
55 | self.log_user() | |
|
56 | new_email = 'new@mail.pl' | |
|
57 | response = self.app.post(url('admin_settings_my_account_update'), params=dict( | |
|
58 | _method='put', | |
|
59 | username='test_admin', | |
|
60 | new_password='test', | |
|
61 | password='', | |
|
62 | name='NewName', | |
|
63 | lastname='NewLastname', | |
|
64 | email=new_email,)) | |
|
65 | response.follow() | |
|
66 | print response | |
|
67 | ||
|
68 | print 'x' * 100 | |
|
69 | print response.session | |
|
70 | assert 'Your account was updated succesfully' in response.session['flash'][0][1], 'no flash message about success of change' | |
|
71 | user = self.sa.query(User).filter(User.username == 'test_admin').one() | |
|
72 | assert user.email == new_email , 'incorrect user email after update got %s vs %s' % (user.email, new_email) | |
|
73 | ||
|
74 | def test_my_account_update_own_email_ok(self): | |
|
75 | self.log_user() | |
|
76 | ||
|
77 | new_email = 'new@mail.pl' | |
|
78 | response = self.app.post(url('admin_settings_my_account_update'), params=dict( | |
|
79 | _method='put', | |
|
80 | username='test_admin', | |
|
81 | new_password='test', | |
|
82 | name='NewName', | |
|
83 | lastname='NewLastname', | |
|
84 | email=new_email,)) | |
|
85 | print response | |
|
86 | ||
|
87 | def test_my_account_update_err_email_exists(self): | |
|
88 | self.log_user() | |
|
89 | ||
|
90 | new_email = 'test_regular@mail.com'#already exisitn email | |
|
91 | response = self.app.post(url('admin_settings_my_account_update'), params=dict( | |
|
92 | _method='put', | |
|
93 | username='test_admin', | |
|
94 | new_password='test', | |
|
95 | name='NewName', | |
|
96 | lastname='NewLastname', | |
|
97 | email=new_email,)) | |
|
98 | print response | |
|
99 | ||
|
100 | assert 'That e-mail address is already taken' in response.body, 'Missing error message about existing email' | |
|
101 | ||
|
102 | ||
|
103 | def test_my_account_update_err(self): | |
|
104 | self.log_user() | |
|
105 | ||
|
106 | new_email = 'newmail.pl' | |
|
107 | response = self.app.post(url('admin_settings_my_account_update'), params=dict( | |
|
108 | _method='put', | |
|
109 | username='test_regular2', | |
|
110 | new_password='test', | |
|
111 | name='NewName', | |
|
112 | lastname='NewLastname', | |
|
113 | email=new_email,)) | |
|
114 | print response | |
|
115 | assert 'An email address must contain a single @' in response.body, 'Missing error message about wrong email' | |
|
116 | assert 'This username already exists' in response.body, 'Missing error message about existing user' |
@@ -3,5 +3,6 b' from pylons_app.tests import *' | |||
|
3 | 3 | class TestBranchesController(TestController): |
|
4 | 4 | |
|
5 | 5 | def test_index(self): |
|
6 | self.log_user() | |
|
6 | 7 | response = self.app.get(url(controller='branches', action='index',repo_name='vcs_test')) |
|
7 | 8 | # Test response... |
@@ -3,5 +3,6 b' from pylons_app.tests import *' | |||
|
3 | 3 | class TestChangelogController(TestController): |
|
4 | 4 | |
|
5 | 5 | def test_index(self): |
|
6 | self.log_user() | |
|
6 | 7 | response = self.app.get(url(controller='changelog', action='index',repo_name='vcs_test')) |
|
7 | 8 | # Test response... |
@@ -3,11 +3,13 b' from pylons_app.tests import *' | |||
|
3 | 3 | class TestFeedController(TestController): |
|
4 | 4 | |
|
5 | 5 | def test_rss(self): |
|
6 | self.log_user() | |
|
6 | 7 | response = self.app.get(url(controller='feed', action='rss', |
|
7 | 8 | repo_name='vcs_test')) |
|
8 | 9 | # Test response... |
|
9 | 10 | |
|
10 | 11 | def test_atom(self): |
|
12 | self.log_user() | |
|
11 | 13 | response = self.app.get(url(controller='feed', action='atom', |
|
12 | 14 | repo_name='vcs_test')) |
|
13 | 15 | # Test response... No newline at end of file |
@@ -3,6 +3,7 b' from pylons_app.tests import *' | |||
|
3 | 3 | class TestFilesController(TestController): |
|
4 | 4 | |
|
5 | 5 | def test_index(self): |
|
6 | self.log_user() | |
|
6 | 7 | response = self.app.get(url(controller='files', action='index', |
|
7 | 8 | repo_name='vcs_test', |
|
8 | 9 | revision='tip', |
@@ -82,9 +82,9 b' class TestLoginController(TestController' | |||
|
82 | 82 | |
|
83 | 83 | |
|
84 | 84 | def test_register_ok(self): |
|
85 |
username = 'test_regular |
|
|
85 | username = 'test_regular4' | |
|
86 | 86 | password = 'qweqwe' |
|
87 |
email = ' |
|
|
87 | email = 'marcin@test.com' | |
|
88 | 88 | name = 'testname' |
|
89 | 89 | lastname = 'testlastname' |
|
90 | 90 | |
@@ -94,10 +94,11 b' class TestLoginController(TestController' | |||
|
94 | 94 | 'email':email, |
|
95 | 95 | 'name':name, |
|
96 | 96 | 'lastname':lastname}) |
|
97 | ||
|
97 | print response.body | |
|
98 | 98 | assert response.status == '302 Found', 'Wrong response from register page got %s' % response.status |
|
99 | assert 'You have successfully registered into hg-app' in response.session['flash'][0], 'No flash message about user registration' | |
|
99 | 100 | |
|
100 |
ret = self.sa.query(User).filter(User.username == 'test_regular |
|
|
101 | ret = self.sa.query(User).filter(User.username == 'test_regular4').one() | |
|
101 | 102 | assert ret.username == username , 'field mismatch %s %s' % (ret.username, username) |
|
102 | 103 | assert check_password(password,ret.password) == True , 'password mismatch' |
|
103 | 104 | assert ret.email == email , 'field mismatch %s %s' % (ret.email, email) |
@@ -105,7 +106,34 b' class TestLoginController(TestController' | |||
|
105 | 106 | assert ret.lastname == lastname , 'field mismatch %s %s' % (ret.lastname, lastname) |
|
106 | 107 | |
|
107 | 108 | |
|
109 | def test_forgot_password_wrong_mail(self): | |
|
110 | response = self.app.post(url(controller='login', action='password_reset'), | |
|
111 | {'email':'marcin@wrongmail.org', }) | |
|
112 | ||
|
113 | assert "That e-mail address doesn't exist" in response.body, 'Missing error message about wrong email' | |
|
114 | ||
|
115 | def test_forgot_password(self): | |
|
116 | response = self.app.get(url(controller='login', action='password_reset')) | |
|
117 | assert response.status == '200 OK', 'Wrong response from login page got %s' % response.status | |
|
118 | ||
|
119 | username = 'test_password_reset_1' | |
|
120 | password = 'qweqwe' | |
|
121 | email = 'marcin@python-works.com' | |
|
122 | name = 'passwd' | |
|
123 | lastname = 'reset' | |
|
124 | ||
|
125 | response = self.app.post(url(controller='login', action='register'), | |
|
126 | {'username':username, | |
|
127 | 'password':password, | |
|
128 | 'email':email, | |
|
129 | 'name':name, | |
|
130 | 'lastname':lastname}) | |
|
131 | #register new user for email test | |
|
132 | response = self.app.post(url(controller='login', action='password_reset'), | |
|
133 | {'email':email, }) | |
|
134 | print response.session['flash'] | |
|
135 | assert 'You have successfully registered into hg-app' in response.session['flash'][0], 'No flash message about user registration' | |
|
136 | assert 'Your new password was sent' in response.session['flash'][1], 'No flash message about password reset' | |
|
108 | 137 | |
|
109 | 138 | |
|
110 | 139 | |
|
111 |
@@ -23,7 +23,16 b' class TestSearchController(TestControlle' | |||
|
23 | 23 | |
|
24 | 24 | def test_normal_search(self): |
|
25 | 25 | self.log_user() |
|
26 |
response = self.app.get(url(controller='search', action='index'),{'q':'def |
|
|
26 | response = self.app.get(url(controller='search', action='index'), {'q':'def repo'}) | |
|
27 | 27 | print response.body |
|
28 |
assert ' |
|
|
28 | assert '10 results' in response.body, 'no message about proper search results' | |
|
29 | assert 'Permission denied' not in response.body, 'Wrong permissions settings for that repo and user' | |
|
30 | ||
|
29 | 31 |
|
|
32 | def test_repo_search(self): | |
|
33 | self.log_user() | |
|
34 | response = self.app.get(url(controller='search', action='index'), {'q':'repository:vcs_test def test'}) | |
|
35 | print response.body | |
|
36 | assert '4 results' in response.body, 'no message about proper search results' | |
|
37 | assert 'Permission denied' not in response.body, 'Wrong permissions settings for that repo and user' | |
|
38 |
@@ -3,6 +3,7 b' from pylons_app.tests import *' | |||
|
3 | 3 | class TestSettingsController(TestController): |
|
4 | 4 | |
|
5 | 5 | def test_index(self): |
|
6 | self.log_user() | |
|
6 | 7 | response = self.app.get(url(controller='settings', action='index', |
|
7 | 8 | repo_name='vcs_test')) |
|
8 | 9 | # Test response... |
@@ -3,5 +3,6 b' from pylons_app.tests import *' | |||
|
3 | 3 | class TestShortlogController(TestController): |
|
4 | 4 | |
|
5 | 5 | def test_index(self): |
|
6 | self.log_user() | |
|
6 | 7 | response = self.app.get(url(controller='shortlog', action='index',repo_name='vcs_test')) |
|
7 | 8 | # Test response... |
@@ -3,5 +3,6 b' from pylons_app.tests import *' | |||
|
3 | 3 | class TestSummaryController(TestController): |
|
4 | 4 | |
|
5 | 5 | def test_index(self): |
|
6 | self.log_user() | |
|
6 | 7 | response = self.app.get(url(controller='summary', action='index',repo_name='vcs_test')) |
|
7 | 8 | # Test response... |
@@ -3,5 +3,6 b' from pylons_app.tests import *' | |||
|
3 | 3 | class TestTagsController(TestController): |
|
4 | 4 | |
|
5 | 5 | def test_index(self): |
|
6 | self.log_user() | |
|
6 | 7 | response = self.app.get(url(controller='tags', action='index',repo_name='vcs_test')) |
|
7 | 8 | # Test response... |
@@ -1,40 +1,25 b'' | |||
|
1 | 1 | """Setup the pylons_app application""" |
|
2 | 2 | |
|
3 |
from os.path import dirname as dn |
|
|
3 | from os.path import dirname as dn | |
|
4 | 4 | from pylons_app.config.environment import load_environment |
|
5 | 5 | from pylons_app.lib.db_manage import DbManage |
|
6 | import datetime | |
|
7 | from time import mktime | |
|
8 | 6 | import logging |
|
9 | 7 | import os |
|
10 | 8 | import sys |
|
11 | import tarfile | |
|
12 | 9 | |
|
13 | 10 | log = logging.getLogger(__name__) |
|
14 | 11 | |
|
15 | 12 | ROOT = dn(dn(os.path.realpath(__file__))) |
|
16 | 13 | sys.path.append(ROOT) |
|
17 | 14 | |
|
15 | ||
|
18 | 16 | def setup_app(command, conf, vars): |
|
19 | 17 | """Place any commands to setup pylons_app here""" |
|
20 | 18 | log_sql = True |
|
21 | 19 | tests = False |
|
20 | REPO_TEST_PATH = None | |
|
22 | 21 | |
|
23 | 22 | dbname = os.path.split(conf['sqlalchemy.db1.url'])[-1] |
|
24 | filename = os.path.split(conf.filename)[-1] | |
|
25 | ||
|
26 | if filename == 'tests.ini': | |
|
27 | uniq_suffix = str(int(mktime(datetime.datetime.now().timetuple()))) | |
|
28 | REPO_TEST_PATH = '/tmp/hg_app_test_%s' % uniq_suffix | |
|
29 | ||
|
30 | if not os.path.isdir(REPO_TEST_PATH): | |
|
31 | os.mkdir(REPO_TEST_PATH) | |
|
32 | cur_dir = dn(os.path.abspath(__file__)) | |
|
33 | tar = tarfile.open(jn(cur_dir,'tests',"vcs_test.tar.gz")) | |
|
34 | tar.extractall(REPO_TEST_PATH) | |
|
35 | tar.close() | |
|
36 | ||
|
37 | tests = True | |
|
38 | 23 | |
|
39 | 24 | dbmanage = DbManage(log_sql, dbname, tests) |
|
40 | 25 | dbmanage.create_tables(override=True) |
@@ -8,7 +8,7 b' find_links = http://www.pylonshq.com/dow' | |||
|
8 | 8 | [nosetests] |
|
9 | 9 | verbose=True |
|
10 | 10 | verbosity=2 |
|
11 |
with-pylons=test |
|
|
11 | with-pylons=test.ini | |
|
12 | 12 | detailed-errors=1 |
|
13 | 13 | |
|
14 | 14 | # Babel configuration |
@@ -20,12 +20,13 b' setup(' | |||
|
20 | 20 | "SQLAlchemy>=0.6", |
|
21 | 21 | "babel", |
|
22 | 22 | "Mako>=0.3.2", |
|
23 |
"vcs>=0.1. |
|
|
23 | "vcs>=0.1.5", | |
|
24 | 24 | "pygments>=1.3.0", |
|
25 | 25 | "mercurial>=1.6", |
|
26 | 26 | "pysqlite", |
|
27 |
"whoosh==1.0.0b1 |
|
|
27 | "whoosh==1.0.0b17", | |
|
28 | 28 | "py-bcrypt", |
|
29 | "celery", | |
|
29 | 30 | ], |
|
30 | 31 | setup_requires=["PasteScript>=1.6.3"], |
|
31 | 32 | packages=find_packages(exclude=['ez_setup']), |
@@ -1,28 +1,33 b'' | |||
|
1 | 1 | ################################################################################ |
|
2 | 2 | ################################################################################ |
|
3 |
# |
|
|
3 | # hg-app - Pylons environment configuration # | |
|
4 | 4 | # # |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | ############################################ | |
|
11 | ## Uncomment and replace with the address ## | |
|
12 | ## which should receive any error reports ## | |
|
13 | ############################################ | |
|
10 | ################################################################################ | |
|
11 | ## Uncomment and replace with the address which should receive ## | |
|
12 | ## any error reports after application crash ## | |
|
13 | ## Additionally those settings will be used by hg-app mailing system ## | |
|
14 | ################################################################################ | |
|
14 | 15 | #email_to = admin@localhost |
|
16 | #error_email_from = paste_error@localhost | |
|
17 | #app_email_from = hg-app-noreply@localhost | |
|
18 | #error_message = | |
|
19 | ||
|
15 | 20 | #smtp_server = mail.server.com |
|
16 | #error_email_from = paste_error@localhost | |
|
17 | 21 | #smtp_username = |
|
18 | 22 | #smtp_password = |
|
19 | #error_message = 'mercurial crash !' | |
|
23 | #smtp_port = | |
|
24 | #smtp_use_tls = false | |
|
20 | 25 | |
|
21 | 26 | [server:main] |
|
22 | 27 | ##nr of threads to spawn |
|
23 | 28 | threadpool_workers = 5 |
|
24 | 29 | |
|
25 | ##max request before | |
|
30 | ##max request before thread respawn | |
|
26 | 31 | threadpool_max_requests = 2 |
|
27 | 32 | |
|
28 | 33 | ##option to use threads of process |
@@ -56,7 +61,7 b' beaker.cache.super_short_term.expire=10' | |||
|
56 | 61 | ### BEAKER SESSION #### |
|
57 | 62 | #################################### |
|
58 | 63 | ## Type of storage used for the session, current types are |
|
59 |
## |
|
|
64 | ## "dbm", "file", "memcached", "database", and "memory". | |
|
60 | 65 | ## The storage uses the Container API |
|
61 | 66 | ##that is also used by the cache system. |
|
62 | 67 | beaker.session.type = file |
General Comments 0
You need to be logged in to leave comments.
Login now