##// END OF EJS Templates
some fixups in cache, added fallback and cache invalidation when key not found in cached repos list,...
marcink -
r535:72778dda default
parent child Browse files
Show More
@@ -1,77 +1,77 b''
1 1 # List of modules to import when celery starts.
2 2 import sys
3 3 import os
4 4 import ConfigParser
5 5 root = os.getcwd()
6 6
7 PYLONS_CONFIG_NAME = 'test.ini'
7 PYLONS_CONFIG_NAME = 'production.ini'
8 8
9 9 sys.path.append(root)
10 10 config = ConfigParser.ConfigParser({'here':root})
11 11 config.read('%s/%s' % (root, PYLONS_CONFIG_NAME))
12 12 PYLONS_CONFIG = config
13 13
14 14 CELERY_IMPORTS = ("pylons_app.lib.celerylib.tasks",)
15 15
16 16 ## Result store settings.
17 17 CELERY_RESULT_BACKEND = "database"
18 18 CELERY_RESULT_DBURI = dict(config.items('app:main'))['sqlalchemy.db1.url']
19 19 CELERY_RESULT_SERIALIZER = 'json'
20 20
21 21
22 22 BROKER_CONNECTION_MAX_RETRIES = 30
23 23
24 24 ## Broker settings.
25 25 BROKER_HOST = "localhost"
26 26 BROKER_PORT = 5672
27 27 BROKER_VHOST = "rabbitmqhost"
28 28 BROKER_USER = "rabbitmq"
29 29 BROKER_PASSWORD = "qweqwe"
30 30
31 31 ## Worker settings
32 32 ## If you're doing mostly I/O you can have more processes,
33 33 ## but if mostly spending CPU, try to keep it close to the
34 34 ## number of CPUs on your machine. If not set, the number of CPUs/cores
35 35 ## available will be used.
36 36 CELERYD_CONCURRENCY = 2
37 37 # CELERYD_LOG_FILE = "celeryd.log"
38 38 CELERYD_LOG_LEVEL = "DEBUG"
39 39 CELERYD_MAX_TASKS_PER_CHILD = 3
40 40
41 41 #Tasks will never be sent to the queue, but executed locally instead.
42 42 CELERY_ALWAYS_EAGER = False
43 43 if PYLONS_CONFIG_NAME == 'test.ini':
44 44 #auto eager for tests
45 45 CELERY_ALWAYS_EAGER = True
46 46
47 47 #===============================================================================
48 48 # EMAIL SETTINGS
49 49 #===============================================================================
50 50 pylons_email_config = dict(config.items('DEFAULT'))
51 51
52 52 CELERY_SEND_TASK_ERROR_EMAILS = True
53 53
54 54 #List of (name, email_address) tuples for the admins that should receive error e-mails.
55 55 ADMINS = [('Administrator', pylons_email_config.get('email_to'))]
56 56
57 57 #The e-mail address this worker sends e-mails from. Default is "celery@localhost".
58 58 SERVER_EMAIL = pylons_email_config.get('error_email_from')
59 59
60 60 #The mail server to use. Default is "localhost".
61 61 MAIL_HOST = pylons_email_config.get('smtp_server')
62 62
63 63 #Username (if required) to log on to the mail server with.
64 64 MAIL_HOST_USER = pylons_email_config.get('smtp_username')
65 65
66 66 #Password (if required) to log on to the mail server with.
67 67 MAIL_HOST_PASSWORD = pylons_email_config.get('smtp_password')
68 68
69 69 MAIL_PORT = pylons_email_config.get('smtp_port')
70 70
71 71
72 72 #===============================================================================
73 73 # INSTRUCTIONS FOR RABBITMQ
74 74 #===============================================================================
75 75 # rabbitmqctl add_user rabbitmq qweqwe
76 76 # rabbitmqctl add_vhost rabbitmqhost
77 77 # rabbitmqctl set_permissions -p rabbitmqhost rabbitmq ".*" ".*" ".*"
@@ -1,40 +1,46 b''
1 1 """The base Controller API
2 2
3 3 Provides the BaseController class for subclassing.
4 4 """
5 5 from pylons import config, tmpl_context as c, request, session
6 6 from pylons.controllers import WSGIController
7 7 from pylons.templating import render_mako as render
8 8 from pylons_app import __version__
9 9 from pylons_app.lib import auth
10 10 from pylons_app.lib.utils import get_repo_slug
11 11 from pylons_app.model import meta
12 12 from pylons_app.model.hg_model import _get_repos_cached, \
13 13 _get_repos_switcher_cached
14 14
15 15 class BaseController(WSGIController):
16 16
17 17 def __before__(self):
18 18 c.hg_app_version = __version__
19 19 c.hg_app_name = config['hg_app_title']
20 20 c.repo_name = get_repo_slug(request)
21 21 c.cached_repo_list = _get_repos_cached()
22 22 c.repo_switcher_list = _get_repos_switcher_cached(c.cached_repo_list)
23 23
24 24 if c.repo_name:
25 c.repository_tags = c.cached_repo_list[c.repo_name].tags
26 c.repository_branches = c.cached_repo_list[c.repo_name].branches
25 cached_repo = c.cached_repo_list.get(c.repo_name)
26
27 if cached_repo:
28 c.repository_tags = cached_repo.tags
29 c.repository_branches = cached_repo.branches
30 else:
31 c.repository_tags = {}
32 c.repository_branches = {}
27 33
28 34 self.sa = meta.Session
29 35
30 36 def __call__(self, environ, start_response):
31 37 """Invoke the Controller"""
32 38 # WSGIController.__call__ dispatches to the Controller method
33 39 # the request is routed to. This routing information is
34 40 # available in environ['pylons.routes_dict']
35 41 try:
36 42 #putting this here makes sure that we update permissions every time
37 43 c.hg_app_user = auth.get_user(session)
38 44 return WSGIController.__call__(self, environ, start_response)
39 45 finally:
40 46 meta.Session.remove()
@@ -1,318 +1,316 b''
1 1 from celery.decorators import task
2 2 from celery.task.sets import subtask
3 3 from celeryconfig import PYLONS_CONFIG as config
4 4 from operator import itemgetter
5 5 from pylons.i18n.translation import _
6 6 from pylons_app.lib.celerylib import run_task, locked_task
7 7 from pylons_app.lib.helpers import person
8 8 from pylons_app.lib.smtp_mailer import SmtpMailer
9 9 from pylons_app.lib.utils import OrderedDict
10 10 from time import mktime
11 11 from vcs.backends.hg import MercurialRepository
12 12 import json
13 13 import traceback
14 14
15 15 __all__ = ['whoosh_index', 'get_commits_stats',
16 16 'reset_user_password', 'send_email']
17 17
18 18 def get_session():
19 19 from sqlalchemy import engine_from_config
20 20 from sqlalchemy.orm import sessionmaker, scoped_session
21 21 engine = engine_from_config(dict(config.items('app:main')), 'sqlalchemy.db1.')
22 22 sa = scoped_session(sessionmaker(bind=engine))
23 23 return sa
24 24
25 25 def get_hg_settings():
26 26 from pylons_app.model.db import HgAppSettings
27 27 try:
28 28 sa = get_session()
29 29 ret = sa.query(HgAppSettings).all()
30 30 finally:
31 31 sa.remove()
32 32
33 33 if not ret:
34 34 raise Exception('Could not get application settings !')
35 35 settings = {}
36 36 for each in ret:
37 37 settings['hg_app_' + each.app_settings_name] = each.app_settings_value
38 38
39 39 return settings
40 40
41 41 def get_hg_ui_settings():
42 42 from pylons_app.model.db import HgAppUi
43 43 try:
44 44 sa = get_session()
45 45 ret = sa.query(HgAppUi).all()
46 46 finally:
47 47 sa.remove()
48 48
49 49 if not ret:
50 50 raise Exception('Could not get application ui settings !')
51 51 settings = {}
52 52 for each in ret:
53 53 k = each.ui_key
54 54 v = each.ui_value
55 55 if k == '/':
56 56 k = 'root_path'
57 57
58 58 if k.find('.') != -1:
59 59 k = k.replace('.', '_')
60 60
61 61 if each.ui_section == 'hooks':
62 62 v = each.ui_active
63 63
64 64 settings[each.ui_section + '_' + k] = v
65 65
66 66 return settings
67 67
68 68 @task
69 69 @locked_task
70 70 def whoosh_index(repo_location, full_index):
71 71 log = whoosh_index.get_logger()
72 72 from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon
73 73 WhooshIndexingDaemon(repo_location=repo_location).run(full_index=full_index)
74 74
75 75 @task
76 76 @locked_task
77 77 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
78 78 from pylons_app.model.db import Statistics, Repository
79 79 log = get_commits_stats.get_logger()
80 80 author_key_cleaner = lambda k: person(k).replace('"', "") #for js data compatibilty
81 81
82 82 commits_by_day_author_aggregate = {}
83 83 commits_by_day_aggregate = {}
84 84 repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
85 85 repo = MercurialRepository(repos_path + repo_name)
86 86
87 87 skip_date_limit = True
88 88 parse_limit = 350 #limit for single task changeset parsing optimal for
89 89 last_rev = 0
90 90 last_cs = None
91 91 timegetter = itemgetter('time')
92 92
93 93 sa = get_session()
94 94
95 95 dbrepo = sa.query(Repository)\
96 96 .filter(Repository.repo_name == repo_name).scalar()
97 97 cur_stats = sa.query(Statistics)\
98 98 .filter(Statistics.repository == dbrepo).scalar()
99 99 if cur_stats:
100 100 last_rev = cur_stats.stat_on_revision
101 101 if not repo.revisions:
102 102 return True
103 103
104 104 if last_rev == repo.revisions[-1] and len(repo.revisions) > 1:
105 105 #pass silently without any work if we're not on first revision or current
106 106 #state of parsing revision(from db marker) is the last revision
107 107 return True
108 108
109 109 if cur_stats:
110 110 commits_by_day_aggregate = OrderedDict(
111 111 json.loads(
112 112 cur_stats.commit_activity_combined))
113 113 commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
114 114
115 115 log.debug('starting parsing %s', parse_limit)
116 116 for cnt, rev in enumerate(repo.revisions[last_rev:]):
117 117 last_cs = cs = repo.get_changeset(rev)
118 118 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
119 119 cs.date.timetuple()[2])
120 120 timetupple = [int(x) for x in k.split('-')]
121 121 timetupple.extend([0 for _ in xrange(6)])
122 122 k = mktime(timetupple)
123 123 if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
124 124 try:
125 125 l = [timegetter(x) for x in commits_by_day_author_aggregate\
126 126 [author_key_cleaner(cs.author)]['data']]
127 127 time_pos = l.index(k)
128 128 except ValueError:
129 129 time_pos = False
130 130
131 131 if time_pos >= 0 and time_pos is not False:
132 132
133 133 datadict = commits_by_day_author_aggregate\
134 134 [author_key_cleaner(cs.author)]['data'][time_pos]
135 135
136 136 datadict["commits"] += 1
137 137 datadict["added"] += len(cs.added)
138 138 datadict["changed"] += len(cs.changed)
139 139 datadict["removed"] += len(cs.removed)
140 140 #print datadict
141 141
142 142 else:
143 143 #print 'ELSE !!!!'
144 144 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
145 145
146 146 datadict = {"time":k,
147 147 "commits":1,
148 148 "added":len(cs.added),
149 149 "changed":len(cs.changed),
150 150 "removed":len(cs.removed),
151 151 }
152 152 commits_by_day_author_aggregate\
153 153 [author_key_cleaner(cs.author)]['data'].append(datadict)
154 154
155 155 else:
156 156 #print k, 'nokey ADDING'
157 157 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
158 158 commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
159 159 "label":author_key_cleaner(cs.author),
160 160 "data":[{"time":k,
161 161 "commits":1,
162 162 "added":len(cs.added),
163 163 "changed":len(cs.changed),
164 164 "removed":len(cs.removed),
165 165 }],
166 166 "schema":["commits"],
167 167 }
168 168
169 169 # #gather all data by day
170 170 if commits_by_day_aggregate.has_key(k):
171 171 commits_by_day_aggregate[k] += 1
172 172 else:
173 173 commits_by_day_aggregate[k] = 1
174 174
175 175 if cnt >= parse_limit:
176 176 #don't fetch to much data since we can freeze application
177 177 break
178 178
179 179 overview_data = []
180 180 for k, v in commits_by_day_aggregate.items():
181 181 overview_data.append([k, v])
182 182 overview_data = sorted(overview_data, key=itemgetter(0))
183 183
184 184 if not commits_by_day_author_aggregate:
185 185 commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
186 186 "label":author_key_cleaner(repo.contact),
187 187 "data":[0, 1],
188 188 "schema":["commits"],
189 189 }
190 190
191 191 stats = cur_stats if cur_stats else Statistics()
192 192 stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
193 193 stats.commit_activity_combined = json.dumps(overview_data)
194 194
195 195 log.debug('last revison %s', last_rev)
196 196 leftovers = len(repo.revisions[last_rev:])
197 197 log.debug('revisions to parse %s', leftovers)
198 198
199 199 if last_rev == 0 or leftovers < parse_limit:
200 200 stats.languages = json.dumps(__get_codes_stats(repo_name))
201 201
202 202 stats.repository = dbrepo
203 203 stats.stat_on_revision = last_cs.revision
204 204
205 205 try:
206 206 sa.add(stats)
207 207 sa.commit()
208 208 except:
209 209 log.error(traceback.format_exc())
210 210 sa.rollback()
211 211 return False
212 212 if len(repo.revisions) > 1:
213 213 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
214 214
215 215 return True
216 216
217 217 @task
218 218 def reset_user_password(user_email):
219 219 log = reset_user_password.get_logger()
220 220 from pylons_app.lib import auth
221 221 from pylons_app.model.db import User
222 222
223 223 try:
224 224 try:
225 225 sa = get_session()
226 226 user = sa.query(User).filter(User.email == user_email).scalar()
227 227 new_passwd = auth.PasswordGenerator().gen_password(8,
228 228 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
229 229 if user:
230 230 user.password = auth.get_crypt_password(new_passwd)
231 231 sa.add(user)
232 232 sa.commit()
233 233 log.info('change password for %s', user_email)
234 234 if new_passwd is None:
235 235 raise Exception('unable to generate new password')
236 236
237 237 except:
238 238 log.error(traceback.format_exc())
239 239 sa.rollback()
240 240
241 241 run_task(send_email, user_email,
242 242 "Your new hg-app password",
243 243 'Your new hg-app password:%s' % (new_passwd))
244 244 log.info('send new password mail to %s', user_email)
245 245
246 246
247 247 except:
248 248 log.error('Failed to update user password')
249 249 log.error(traceback.format_exc())
250 250 return True
251 251
252 252 @task
253 253 def send_email(recipients, subject, body):
254 254 log = send_email.get_logger()
255 255 email_config = dict(config.items('DEFAULT'))
256 256 mail_from = email_config.get('app_email_from')
257 257 user = email_config.get('smtp_username')
258 258 passwd = email_config.get('smtp_password')
259 259 mail_server = email_config.get('smtp_server')
260 260 mail_port = email_config.get('smtp_port')
261 261 tls = email_config.get('smtp_use_tls')
262 262 ssl = False
263 263
264 264 try:
265 265 m = SmtpMailer(mail_from, user, passwd, mail_server,
266 266 mail_port, ssl, tls)
267 267 m.send(recipients, subject, body)
268 268 except:
269 269 log.error('Mail sending failed')
270 270 log.error(traceback.format_exc())
271 271 return False
272 272 return True
273 273
274 274 @task
275 275 def create_repo_fork(form_data, cur_user):
276 276 import os
277 from pylons_app.lib.utils import invalidate_cache
278 277 from pylons_app.model.repo_model import RepoModel
279 278 sa = get_session()
280 279 rm = RepoModel(sa)
281 280
282 281 rm.create(form_data, cur_user, just_db=True, fork=True)
283 282
284 283 repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
285 284 repo_path = os.path.join(repos_path, form_data['repo_name'])
286 285 repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
287 286
288 287 MercurialRepository(str(repo_fork_path), True, clone_url=str(repo_path))
289 #invalidate_cache('cached_repo_list')
290 288
291 289
292 290 def __get_codes_stats(repo_name):
293 291 LANGUAGES_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx', 'aspx', 'asx', 'axd', 'c',
294 292 'cfg', 'cfm', 'cpp', 'cs', 'diff', 'do', 'el', 'erl',
295 293 'h', 'java', 'js', 'jsp', 'jspx', 'lisp',
296 294 'lua', 'm', 'mako', 'ml', 'pas', 'patch', 'php', 'php3',
297 295 'php4', 'phtml', 'pm', 'py', 'rb', 'rst', 's', 'sh',
298 296 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt',
299 297 'yaws']
300 298 repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
301 299 repo = MercurialRepository(repos_path + repo_name)
302 300
303 301 code_stats = {}
304 302 for topnode, dirs, files in repo.walk('/', 'tip'):
305 303 for f in files:
306 304 k = f.mimetype
307 305 if f.extension in LANGUAGES_EXTENSIONS:
308 306 if code_stats.has_key(k):
309 307 code_stats[k] += 1
310 308 else:
311 309 code_stats[k] = 1
312 310
313 311 return code_stats or {}
314 312
315 313
316 314
317 315
318 316
@@ -1,172 +1,186 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 # Model for hg app
4 4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
5 5 #
6 6 # This program is free software; you can redistribute it and/or
7 7 # modify it under the terms of the GNU General Public License
8 8 # as published by the Free Software Foundation; version 2
9 9 # of the License or (at your opinion) any later version of the license.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software
18 18 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
19 19 # MA 02110-1301, USA.
20 20 """
21 21 Created on April 9, 2010
22 22 Model for hg app
23 23 @author: marcink
24 24 """
25 25 from beaker.cache import cache_region
26 26 from mercurial import ui
27 27 from mercurial.hgweb.hgwebdir_mod import findrepos
28 28 from pylons.i18n.translation import _
29 from pylons_app.lib import helpers as h
30 from pylons_app.lib.utils import invalidate_cache
29 31 from pylons_app.lib.auth import HasRepoPermissionAny
30 32 from pylons_app.model import meta
31 33 from pylons_app.model.db import Repository, User
32 from pylons_app.lib import helpers as h
34 from sqlalchemy.orm import joinedload
33 35 from vcs.exceptions import RepositoryError, VCSError
34 from sqlalchemy.orm import joinedload
35 36 import logging
36 37 import os
37 38 import sys
38 39 log = logging.getLogger(__name__)
39 40
40 41 try:
41 42 from vcs.backends.hg import MercurialRepository
42 43 except ImportError:
43 44 sys.stderr.write('You have to import vcs module')
44 45 raise Exception('Unable to import vcs')
45 46
46 47 def _get_repos_cached_initial(app_globals, initial):
47 48 """return cached dict with repos
48 49 """
49 50 g = app_globals
50 51 return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui, initial)
51 52
52 53 @cache_region('long_term', 'cached_repo_list')
53 54 def _get_repos_cached():
54 55 """return cached dict with repos
55 56 """
56 57 log.info('getting all repositories list')
57 58 from pylons import app_globals as g
58 59 return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui)
59 60
60 61 @cache_region('super_short_term', 'cached_repos_switcher_list')
61 62 def _get_repos_switcher_cached(cached_repo_list):
62 63 repos_lst = []
63 64 for repo in [x for x in cached_repo_list.values()]:
64 65 if HasRepoPermissionAny('repository.write', 'repository.read',
65 66 'repository.admin')(repo.name, 'main page check'):
66 67 repos_lst.append((repo.name, repo.dbrepo.private,))
67 68
68 69 return sorted(repos_lst, key=lambda k:k[0].lower())
69 70
70 71 @cache_region('long_term', 'full_changelog')
71 72 def _full_changelog_cached(repo_name):
72 73 log.info('getting full changelog for %s', repo_name)
73 74 return list(reversed(list(HgModel().get_repo(repo_name))))
74 75
75 76 class HgModel(object):
76 77 """Mercurial Model
77 78 """
78 79
79 80 def __init__(self):
80 81 pass
81 82
82 83 @staticmethod
83 84 def repo_scan(repos_prefix, repos_path, baseui, initial=False):
84 85 """
85 86 Listing of repositories in given path. This path should not be a
86 87 repository itself. Return a dictionary of repository objects
87 88 :param repos_path: path to directory it could take syntax with
88 89 * or ** for deep recursive displaying repositories
89 90 """
90 91 sa = meta.Session()
91 92 def check_repo_dir(path):
92 93 """Checks the repository
93 94 :param path:
94 95 """
95 96 repos_path = path.split('/')
96 97 if repos_path[-1] in ['*', '**']:
97 98 repos_path = repos_path[:-1]
98 99 if repos_path[0] != '/':
99 100 repos_path[0] = '/'
100 101 if not os.path.isdir(os.path.join(*repos_path)):
101 102 raise RepositoryError('Not a valid repository in %s' % path)
102 103 if not repos_path.endswith('*'):
103 104 raise VCSError('You need to specify * or ** at the end of path '
104 105 'for recursive scanning')
105 106
106 107 check_repo_dir(repos_path)
107 108 log.info('scanning for repositories in %s', repos_path)
108 109 repos = findrepos([(repos_prefix, repos_path)])
109 110 if not isinstance(baseui, ui.ui):
110 111 baseui = ui.ui()
111 112
112 113 repos_list = {}
113 114 for name, path in repos:
114 115 try:
115 116 #name = name.split('/')[-1]
116 117 if repos_list.has_key(name):
117 118 raise RepositoryError('Duplicate repository name %s found in'
118 119 ' %s' % (name, path))
119 120 else:
120 121
121 122 repos_list[name] = MercurialRepository(path, baseui=baseui)
122 123 repos_list[name].name = name
123 124
124 125 dbrepo = None
125 126 if not initial:
127 #for initial scann on application first run we don't
128 #have db repos yet.
126 129 dbrepo = sa.query(Repository)\
127 130 .options(joinedload(Repository.fork))\
128 131 .filter(Repository.repo_name == name)\
129 132 .scalar()
130 133
131 134 if dbrepo:
132 135 log.info('Adding db instance to cached list')
133 136 repos_list[name].dbrepo = dbrepo
134 137 repos_list[name].description = dbrepo.description
135 138 if dbrepo.user:
136 139 repos_list[name].contact = dbrepo.user.full_contact
137 140 else:
138 141 repos_list[name].contact = sa.query(User)\
139 142 .filter(User.admin == True).first().full_contact
140 143 except OSError:
141 144 continue
142 145 meta.Session.remove()
143 146 return repos_list
144 147
145 148 def get_repos(self):
146 149 for name, repo in _get_repos_cached().items():
147 150 if repo._get_hidden():
148 151 #skip hidden web repository
149 152 continue
150 153
151 154 last_change = repo.last_change
152 155 tip = h.get_changeset_safe(repo, 'tip')
153 156
154 157 tmp_d = {}
155 158 tmp_d['name'] = repo.name
156 159 tmp_d['name_sort'] = tmp_d['name'].lower()
157 160 tmp_d['description'] = repo.description
158 161 tmp_d['description_sort'] = tmp_d['description']
159 162 tmp_d['last_change'] = last_change
160 163 tmp_d['last_change_sort'] = last_change[1] - last_change[0]
161 164 tmp_d['tip'] = tip.short_id
162 165 tmp_d['tip_sort'] = tip.revision
163 166 tmp_d['rev'] = tip.revision
164 167 tmp_d['contact'] = repo.contact
165 168 tmp_d['contact_sort'] = tmp_d['contact']
166 169 tmp_d['repo_archives'] = list(repo._get_archives())
167 170 tmp_d['last_msg'] = tip.message
168 171 tmp_d['repo'] = repo
169 172 yield tmp_d
170 173
171 174 def get_repo(self, repo_name):
172 return _get_repos_cached()[repo_name]
175 try:
176 repo = _get_repos_cached()[repo_name]
177 return repo
178 except KeyError:
179 #i we're here and we got key errors let's try to invalidate the
180 #cahce and try again
181 invalidate_cache('cached_repo_list')
182 repo = _get_repos_cached()[repo_name]
183 return repo
184
185
186
@@ -1,56 +1,58 b''
1 1 """Pylons application test package
2 2
3 3 This package assumes the Pylons environment is already loaded, such as
4 4 when this script is imported from the `nosetests --with-pylons=test.ini`
5 5 command.
6 6
7 7 This module initializes the application via ``websetup`` (`paster
8 8 setup-app`) and provides the base testing objects.
9 9 """
10 10 from unittest import TestCase
11 11
12 12 from paste.deploy import loadapp
13 13 from paste.script.appinstall import SetupCommand
14 14 from pylons import config, url
15 15 from routes.util import URLGenerator
16 16 from webtest import TestApp
17 17 import os
18 18 from pylons_app.model import meta
19 19 import logging
20 20
21 21
22 22 log = logging.getLogger(__name__)
23 23
24 24 import pylons.test
25 25
26 26 __all__ = ['environ', 'url', 'TestController']
27 27
28 28 # Invoke websetup with the current config file
29 29 #SetupCommand('setup-app').run([config_file])
30 30
31 ##RUNNING DESIRED TESTS
32 #nosetests pylons_app.tests.functional.test_admin_settings:TestSettingsController.test_my_account
31 33
32 34 environ = {}
33 35
34 36 class TestController(TestCase):
35 37
36 38 def __init__(self, *args, **kwargs):
37 39 wsgiapp = pylons.test.pylonsapp
38 40 config = wsgiapp.config
39 41 self.app = TestApp(wsgiapp)
40 42 url._push_object(URLGenerator(config['routes.map'], environ))
41 43 self.sa = meta.Session
42 44
43 45 TestCase.__init__(self, *args, **kwargs)
44 46
45 47 def log_user(self, username='test_admin', password='test12'):
46 48 response = self.app.post(url(controller='login', action='index'),
47 49 {'username':username,
48 50 'password':password})
49 51 print response
50 52
51 53 if 'invalid user name' in response.body:
52 54 assert False, 'could not login using %s %s' % (username, password)
53 55
54 56 assert response.status == '302 Found', 'Wrong response code from login got %s' % response.status
55 57 assert response.session['hg_app_user'].username == username, 'wrong logged in user got %s expected %s' % (response.session['hg_app_user'].username, username)
56 58 return response.follow()
@@ -1,46 +1,55 b''
1 1 from pylons_app.model.db import Repository
2 2 from pylons_app.tests import *
3 3
4 4 class TestSettingsController(TestController):
5 5
6 6 def test_index(self):
7 7 self.log_user()
8 8 response = self.app.get(url(controller='settings', action='index',
9 9 repo_name='vcs_test'))
10 10 # Test response...
11 11
12 12 def test_fork(self):
13 13 self.log_user()
14 14 response = self.app.get(url(controller='settings', action='fork',
15 15 repo_name='vcs_test'))
16 16
17 17
18 18 def test_fork_create(self):
19 19 self.log_user()
20 20 fork_name = 'vcs_test_fork'
21 21 description = 'fork of vcs test'
22 22 repo_name = 'vcs_test'
23 23 response = self.app.post(url(controller='settings', action='fork_create',
24 24 repo_name=repo_name),
25 25 {'fork_name':fork_name,
26 26 'description':description,
27 27 'private':'False'})
28 28
29 29
30 30 print response
31 31
32 32 #test if we have a message that fork is ok
33 33 assert 'fork %s repository as %s task added' \
34 34 % (repo_name, fork_name) in response.session['flash'][0], 'No flash message about fork'
35 35
36 36 #test if the fork was created in the database
37 37 fork_repo = self.sa.query(Repository).filter(Repository.repo_name == fork_name).one()
38 38
39 39 assert fork_repo.repo_name == fork_name, 'wrong name of repo name in new db fork repo'
40 40 assert fork_repo.fork.repo_name == repo_name, 'wron fork parrent'
41 41
42 42
43 43 #test if fork is visible in the list ?
44 44 response = response.follow()
45
46
47 #check if fork is marked as fork
48 response = self.app.get(url(controller='summary', action='index',
49 repo_name=fork_name))
50
45 51
46 52 print response
53
54 assert 'Fork of %s' % repo_name in response.body, 'no message about that this repo is a fork'
55
General Comments 0
You need to be logged in to leave comments. Login now