##// END OF EJS Templates
Implemented locking for task, to prevent for running the same tasks,...
marcink -
r497:fb0c3af6 celery
parent child Browse files
Show More
@@ -1,29 +1,66 b''
1 from pylons_app.lib.pidlock import DaemonLock, LockHeld
1 2 from vcs.utils.lazy import LazyProperty
3 from decorator import decorator
2 4 import logging
3 5 import os
4 6 import sys
5 7 import traceback
6
8 from hashlib import md5
7 9 log = logging.getLogger(__name__)
8 10
9 11 class ResultWrapper(object):
10 12 def __init__(self, task):
11 13 self.task = task
12 14
13 15 @LazyProperty
14 16 def result(self):
15 17 return self.task
16 18
17 19 def run_task(task, *args, **kwargs):
18 20 try:
19 21 t = task.delay(*args, **kwargs)
20 22 log.info('running task %s', t.task_id)
21 23 return t
22 24 except Exception, e:
25 print e
23 26 if e.errno == 111:
24 27 log.debug('Unnable to connect. Sync execution')
25 28 else:
26 29 log.error(traceback.format_exc())
27 30 #pure sync version
28 31 return ResultWrapper(task(*args, **kwargs))
32
33
34 class LockTask(object):
35 """LockTask decorator"""
29 36
37 def __init__(self, func):
38 self.func = func
39
40 def __call__(self, func):
41 return decorator(self.__wrapper, func)
42
43 def __wrapper(self, func, *fargs, **fkwargs):
44 params = []
45 params.extend(fargs)
46 params.extend(fkwargs.values())
47 lockkey = 'task_%s' % \
48 md5(str(self.func) + '-' + '-'.join(map(str, params))).hexdigest()
49 log.info('running task with lockkey %s', lockkey)
50 try:
51 l = DaemonLock(lockkey)
52 return func(*fargs, **fkwargs)
53 l.release()
54 except LockHeld:
55 log.info('LockHeld')
56 return 'Task with key %s already running' % lockkey
57
58
59
60
61
62
63
64
65
66
@@ -1,266 +1,270 b''
1 1 from celery.decorators import task
2 2 from celery.task.sets import subtask
3 3 from celeryconfig import PYLONS_CONFIG as config
4 4 from pylons.i18n.translation import _
5 from pylons_app.lib.celerylib import run_task
5 from pylons_app.lib.celerylib import run_task, LockTask
6 6 from pylons_app.lib.helpers import person
7 7 from pylons_app.lib.smtp_mailer import SmtpMailer
8 8 from pylons_app.lib.utils import OrderedDict
9 9 from operator import itemgetter
10 10 from vcs.backends.hg import MercurialRepository
11 11 from time import mktime
12 12 import traceback
13 13 import json
14 14
15 15 __all__ = ['whoosh_index', 'get_commits_stats',
16 16 'reset_user_password', 'send_email']
17 17
18 18 def get_session():
19 19 from sqlalchemy import engine_from_config
20 20 from sqlalchemy.orm import sessionmaker, scoped_session
21 21 engine = engine_from_config(dict(config.items('app:main')), 'sqlalchemy.db1.')
22 22 sa = scoped_session(sessionmaker(bind=engine))
23 23 return sa
24 24
25 25 def get_hg_settings():
26 26 from pylons_app.model.db import HgAppSettings
27 27 try:
28 28 sa = get_session()
29 29 ret = sa.query(HgAppSettings).all()
30 30 finally:
31 31 sa.remove()
32 32
33 33 if not ret:
34 34 raise Exception('Could not get application settings !')
35 35 settings = {}
36 36 for each in ret:
37 37 settings['hg_app_' + each.app_settings_name] = each.app_settings_value
38 38
39 39 return settings
40 40
41 41 def get_hg_ui_settings():
42 42 from pylons_app.model.db import HgAppUi
43 43 try:
44 44 sa = get_session()
45 45 ret = sa.query(HgAppUi).all()
46 46 finally:
47 47 sa.remove()
48 48
49 49 if not ret:
50 50 raise Exception('Could not get application ui settings !')
51 51 settings = {}
52 52 for each in ret:
53 53 k = each.ui_key
54 54 v = each.ui_value
55 55 if k == '/':
56 56 k = 'root_path'
57 57
58 58 if k.find('.') != -1:
59 59 k = k.replace('.', '_')
60 60
61 61 if each.ui_section == 'hooks':
62 62 v = each.ui_active
63 63
64 64 settings[each.ui_section + '_' + k] = v
65 65
66 66 return settings
67 67
68 68 @task
69 69 def whoosh_index(repo_location, full_index):
70 70 log = whoosh_index.get_logger()
71 from pylons_app.lib.indexers import DaemonLock
71 from pylons_app.lib.pidlock import DaemonLock
72 72 from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon, LockHeld
73 73 try:
74 74 l = DaemonLock()
75 75 WhooshIndexingDaemon(repo_location=repo_location)\
76 76 .run(full_index=full_index)
77 77 l.release()
78 78 return 'Done'
79 79 except LockHeld:
80 80 log.info('LockHeld')
81 81 return 'LockHeld'
82 82
83
83 84 @task
85 @LockTask('get_commits_stats')
84 86 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
85 87 author_key_cleaner = lambda k: person(k).replace('"', "") #for js data compatibilty
86 88
87 89 from pylons_app.model.db import Statistics, Repository
88 90 log = get_commits_stats.get_logger()
89 91 commits_by_day_author_aggregate = {}
90 92 commits_by_day_aggregate = {}
91 93 repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
92 94 repo = MercurialRepository(repos_path + repo_name)
93 95
94 96 skip_date_limit = True
95 parse_limit = 500 #limit for single task changeset parsing
97 parse_limit = 350 #limit for single task changeset parsing
96 98 last_rev = 0
97 99 last_cs = None
98 100 timegetter = itemgetter('time')
99 101
100 102 sa = get_session()
101 103
102 104 dbrepo = sa.query(Repository)\
103 105 .filter(Repository.repo_name == repo_name).scalar()
104 106 cur_stats = sa.query(Statistics)\
105 107 .filter(Statistics.repository == dbrepo).scalar()
106 108 if cur_stats:
107 109 last_rev = cur_stats.stat_on_revision
108 110
109 111 if last_rev == repo.revisions[-1]:
110 112 #pass silently without any work
111 113 return True
112 114
113 115 if cur_stats:
114 116 commits_by_day_aggregate = OrderedDict(
115 117 json.loads(
116 118 cur_stats.commit_activity_combined))
117 119 commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
118 120
119 121 for cnt, rev in enumerate(repo.revisions[last_rev:]):
120 122 last_cs = cs = repo.get_changeset(rev)
121 123 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
122 124 cs.date.timetuple()[2])
123 125 timetupple = [int(x) for x in k.split('-')]
124 126 timetupple.extend([0 for _ in xrange(6)])
125 127 k = mktime(timetupple)
126 128 if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
127 129 try:
128 130 l = [timegetter(x) for x in commits_by_day_author_aggregate\
129 131 [author_key_cleaner(cs.author)]['data']]
130 132 time_pos = l.index(k)
131 133 except ValueError:
132 134 time_pos = False
133 135
134 136 if time_pos >= 0 and time_pos is not False:
135 137
136 138 datadict = commits_by_day_author_aggregate\
137 139 [author_key_cleaner(cs.author)]['data'][time_pos]
138 140
139 141 datadict["commits"] += 1
140 142 datadict["added"] += len(cs.added)
141 143 datadict["changed"] += len(cs.changed)
142 144 datadict["removed"] += len(cs.removed)
143 145 #print datadict
144 146
145 147 else:
146 148 #print 'ELSE !!!!'
147 149 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
148 150
149 151 datadict = {"time":k,
150 152 "commits":1,
151 153 "added":len(cs.added),
152 154 "changed":len(cs.changed),
153 155 "removed":len(cs.removed),
154 156 }
155 157 commits_by_day_author_aggregate\
156 158 [author_key_cleaner(cs.author)]['data'].append(datadict)
157 159
158 160 else:
159 161 #print k, 'nokey ADDING'
160 162 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
161 163 commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
162 164 "label":author_key_cleaner(cs.author),
163 165 "data":[{"time":k,
164 166 "commits":1,
165 167 "added":len(cs.added),
166 168 "changed":len(cs.changed),
167 169 "removed":len(cs.removed),
168 170 }],
169 171 "schema":["commits"],
170 172 }
171 173
172 174 # #gather all data by day
173 175 if commits_by_day_aggregate.has_key(k):
174 176 commits_by_day_aggregate[k] += 1
175 177 else:
176 178 commits_by_day_aggregate[k] = 1
177 179
178 180 if cnt >= parse_limit:
179 181 #don't fetch to much data since we can freeze application
180 182 break
181 183
182 184 overview_data = []
183 185 for k, v in commits_by_day_aggregate.items():
184 186 overview_data.append([k, v])
185 187 overview_data = sorted(overview_data, key=itemgetter(0))
186 188
187 189 if not commits_by_day_author_aggregate:
188 190 commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
189 191 "label":author_key_cleaner(repo.contact),
190 192 "data":[0, 1],
191 193 "schema":["commits"],
192 194 }
193 195
194 196 stats = cur_stats if cur_stats else Statistics()
195 197 stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
196 198 stats.commit_activity_combined = json.dumps(overview_data)
197 199 stats.repository = dbrepo
198 200 stats.stat_on_revision = last_cs.revision
199 201 stats.languages = json.dumps({'_TOTAL_':0, '':0})
200 202
201 203 try:
202 204 sa.add(stats)
203 205 sa.commit()
204 206 except:
205 207 log.error(traceback.format_exc())
206 208 sa.rollback()
207 209 return False
208
210
211 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
212
209 213 return True
210 214
211 215 @task
212 216 def reset_user_password(user_email):
213 217 log = reset_user_password.get_logger()
214 218 from pylons_app.lib import auth
215 219 from pylons_app.model.db import User
216 220
217 221 try:
218 222 try:
219 223 sa = get_session()
220 224 user = sa.query(User).filter(User.email == user_email).scalar()
221 225 new_passwd = auth.PasswordGenerator().gen_password(8,
222 226 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
223 227 if user:
224 228 user.password = auth.get_crypt_password(new_passwd)
225 229 sa.add(user)
226 230 sa.commit()
227 231 log.info('change password for %s', user_email)
228 232 if new_passwd is None:
229 233 raise Exception('unable to generate new password')
230 234
231 235 except:
232 236 log.error(traceback.format_exc())
233 237 sa.rollback()
234 238
235 239 run_task(send_email, user_email,
236 240 "Your new hg-app password",
237 241 'Your new hg-app password:%s' % (new_passwd))
238 242 log.info('send new password mail to %s', user_email)
239 243
240 244
241 245 except:
242 246 log.error('Failed to update user password')
243 247 log.error(traceback.format_exc())
244 248 return True
245 249
246 250 @task
247 251 def send_email(recipients, subject, body):
248 252 log = send_email.get_logger()
249 253 email_config = dict(config.items('DEFAULT'))
250 254 mail_from = email_config.get('app_email_from')
251 255 user = email_config.get('smtp_username')
252 256 passwd = email_config.get('smtp_password')
253 257 mail_server = email_config.get('smtp_server')
254 258 mail_port = email_config.get('smtp_port')
255 259 tls = email_config.get('smtp_use_tls')
256 260 ssl = False
257 261
258 262 try:
259 263 m = SmtpMailer(mail_from, user, passwd, mail_server,
260 264 mail_port, ssl, tls)
261 265 m.send(recipients, subject, body)
262 266 except:
263 267 log.error('Mail sending failed')
264 268 log.error(traceback.format_exc())
265 269 return False
266 270 return True
@@ -1,140 +1,139 b''
1 1 from os.path import dirname as dn, join as jn
2 from pidlock import LockHeld, DaemonLock
3 2 from pylons_app.config.environment import load_environment
4 3 from pylons_app.model.hg_model import HgModel
5 4 from shutil import rmtree
6 5 from webhelpers.html.builder import escape
7 6 from vcs.utils.lazy import LazyProperty
8 7
9 8 from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
10 9 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
11 10 from whoosh.index import create_in, open_dir
12 11 from whoosh.formats import Characters
13 12 from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter
14 13
15 14 import os
16 15 import sys
17 16 import traceback
18 17
19 18 #to get the pylons_app import
20 19 sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
21 20
22 21
23 22 #LOCATION WE KEEP THE INDEX
24 23 IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index')
25 24
26 25 #EXTENSIONS WE WANT TO INDEX CONTENT OFF
27 26 INDEX_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx', 'aspx', 'asx', 'axd', 'c',
28 27 'cfg', 'cfm', 'cpp', 'cs', 'css', 'diff', 'do', 'el', 'erl',
29 28 'h', 'htm', 'html', 'ini', 'java', 'js', 'jsp', 'jspx', 'lisp',
30 29 'lua', 'm', 'mako', 'ml', 'pas', 'patch', 'php', 'php3',
31 30 'php4', 'phtml', 'pm', 'py', 'rb', 'rst', 's', 'sh', 'sql',
32 31 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt',
33 32 'yaws']
34 33
35 34 #CUSTOM ANALYZER wordsplit + lowercase filter
36 35 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
37 36
38 37
39 38 #INDEX SCHEMA DEFINITION
40 39 SCHEMA = Schema(owner=TEXT(),
41 40 repository=TEXT(stored=True),
42 41 path=ID(stored=True, unique=True),
43 42 content=FieldType(format=Characters(ANALYZER),
44 43 scorable=True, stored=True),
45 44 modtime=STORED(), extension=TEXT(stored=True))
46 45
47 46
48 47 IDX_NAME = 'HG_INDEX'
49 48 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
50 49 FRAGMENTER = SimpleFragmenter(200)
51 50
52 51 class ResultWrapper(object):
53 52 def __init__(self, searcher, matcher, highlight_items):
54 53 self.searcher = searcher
55 54 self.matcher = matcher
56 55 self.highlight_items = highlight_items
57 56 self.fragment_size = 200 / 2
58 57
59 58 @LazyProperty
60 59 def doc_ids(self):
61 60 docs_id = []
62 61 while self.matcher.is_active():
63 62 docnum = self.matcher.id()
64 63 chunks = [offsets for offsets in self.get_chunks()]
65 64 docs_id.append([docnum, chunks])
66 65 self.matcher.next()
67 66 return docs_id
68 67
69 68 def __str__(self):
70 69 return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
71 70
72 71 def __repr__(self):
73 72 return self.__str__()
74 73
75 74 def __len__(self):
76 75 return len(self.doc_ids)
77 76
78 77 def __iter__(self):
79 78 """
80 79 Allows Iteration over results,and lazy generate content
81 80
82 81 *Requires* implementation of ``__getitem__`` method.
83 82 """
84 83 for docid in self.doc_ids:
85 84 yield self.get_full_content(docid)
86 85
87 86 def __getslice__(self, i, j):
88 87 """
89 88 Slicing of resultWrapper
90 89 """
91 90 slice = []
92 91 for docid in self.doc_ids[i:j]:
93 92 slice.append(self.get_full_content(docid))
94 93 return slice
95 94
96 95
97 96 def get_full_content(self, docid):
98 97 res = self.searcher.stored_fields(docid[0])
99 98 f_path = res['path'][res['path'].find(res['repository']) \
100 99 + len(res['repository']):].lstrip('/')
101 100
102 101 content_short = self.get_short_content(res, docid[1])
103 102 res.update({'content_short':content_short,
104 103 'content_short_hl':self.highlight(content_short),
105 104 'f_path':f_path})
106 105
107 106 return res
108 107
109 108 def get_short_content(self, res, chunks):
110 109
111 110 return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
112 111
113 112 def get_chunks(self):
114 113 """
115 114 Smart function that implements chunking the content
116 115 but not overlap chunks so it doesn't highlight the same
117 116 close occurences twice.
118 117 @param matcher:
119 118 @param size:
120 119 """
121 120 memory = [(0, 0)]
122 121 for span in self.matcher.spans():
123 122 start = span.startchar or 0
124 123 end = span.endchar or 0
125 124 start_offseted = max(0, start - self.fragment_size)
126 125 end_offseted = end + self.fragment_size
127 126
128 127 if start_offseted < memory[-1][1]:
129 128 start_offseted = memory[-1][1]
130 129 memory.append((start_offseted, end_offseted,))
131 130 yield (start_offseted, end_offseted,)
132 131
133 132 def highlight(self, content, top=5):
134 133 hl = highlight(escape(content),
135 134 self.highlight_items,
136 135 analyzer=ANALYZER,
137 136 fragmenter=FRAGMENTER,
138 137 formatter=FORMATTER,
139 138 top=top)
140 139 return hl
@@ -1,238 +1,238 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 # whoosh indexer daemon for hg-app
4 4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
5 5 #
6 6 # This program is free software; you can redistribute it and/or
7 7 # modify it under the terms of the GNU General Public License
8 8 # as published by the Free Software Foundation; version 2
9 9 # of the License or (at your opinion) any later version of the license.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software
18 18 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
19 19 # MA 02110-1301, USA.
20 20 """
21 21 Created on Jan 26, 2010
22 22
23 23 @author: marcink
24 24 A deamon will read from task table and run tasks
25 25 """
26 26 import sys
27 27 import os
28 28 from os.path import dirname as dn
29 29 from os.path import join as jn
30 30
31 31 #to get the pylons_app import
32 32 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
33 33 sys.path.append(project_path)
34 34
35 from pidlock import LockHeld, DaemonLock
35 from pylons_app.lib.pidlock import LockHeld, DaemonLock
36 36 from pylons_app.model.hg_model import HgModel
37 37 from pylons_app.lib.helpers import safe_unicode
38 38 from whoosh.index import create_in, open_dir
39 39 from shutil import rmtree
40 40 from pylons_app.lib.indexers import INDEX_EXTENSIONS, IDX_LOCATION, SCHEMA, IDX_NAME
41 41
42 42 import logging
43 43
44 44 log = logging.getLogger('whooshIndexer')
45 45 # create logger
46 46 log.setLevel(logging.DEBUG)
47 47 log.propagate = False
48 48 # create console handler and set level to debug
49 49 ch = logging.StreamHandler()
50 50 ch.setLevel(logging.DEBUG)
51 51
52 52 # create formatter
53 53 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
54 54
55 55 # add formatter to ch
56 56 ch.setFormatter(formatter)
57 57
58 58 # add ch to logger
59 59 log.addHandler(ch)
60 60
61 61 def scan_paths(root_location):
62 62 return HgModel.repo_scan('/', root_location, None, True)
63 63
64 64 class WhooshIndexingDaemon(object):
65 65 """Deamon for atomic jobs"""
66 66
67 67 def __init__(self, indexname='HG_INDEX', repo_location=None):
68 68 self.indexname = indexname
69 69 self.repo_location = repo_location
70 70 self.initial = False
71 71 if not os.path.isdir(IDX_LOCATION):
72 72 os.mkdir(IDX_LOCATION)
73 73 log.info('Cannot run incremental index since it does not'
74 74 ' yet exist running full build')
75 75 self.initial = True
76 76
77 77 def get_paths(self, root_dir):
78 78 """recursive walk in root dir and return a set of all path in that dir
79 79 excluding files in .hg dir"""
80 80 index_paths_ = set()
81 81 for path, dirs, files in os.walk(root_dir):
82 82 if path.find('.hg') == -1:
83 83 for f in files:
84 84 index_paths_.add(jn(path, f))
85 85
86 86 return index_paths_
87 87
88 88 def add_doc(self, writer, path, repo):
89 89 """Adding doc to writer"""
90 90
91 91 ext = unicode(path.split('/')[-1].split('.')[-1].lower())
92 92 #we just index the content of choosen files
93 93 if ext in INDEX_EXTENSIONS:
94 94 log.debug(' >> %s [WITH CONTENT]' % path)
95 95 fobj = open(path, 'rb')
96 96 content = fobj.read()
97 97 fobj.close()
98 98 u_content = safe_unicode(content)
99 99 else:
100 100 log.debug(' >> %s' % path)
101 101 #just index file name without it's content
102 102 u_content = u''
103 103
104 104
105 105
106 106 try:
107 107 os.stat(path)
108 108 writer.add_document(owner=unicode(repo.contact),
109 109 repository=u"%s" % repo.name,
110 110 path=u"%s" % path,
111 111 content=u_content,
112 112 modtime=os.path.getmtime(path),
113 113 extension=ext)
114 114 except OSError, e:
115 115 import errno
116 116 if e.errno == errno.ENOENT:
117 117 log.debug('path %s does not exist or is a broken symlink' % path)
118 118 else:
119 119 raise e
120 120
121 121
122 122 def build_index(self):
123 123 if os.path.exists(IDX_LOCATION):
124 124 log.debug('removing previos index')
125 125 rmtree(IDX_LOCATION)
126 126
127 127 if not os.path.exists(IDX_LOCATION):
128 128 os.mkdir(IDX_LOCATION)
129 129
130 130 idx = create_in(IDX_LOCATION, SCHEMA, indexname=IDX_NAME)
131 131 writer = idx.writer()
132 132
133 133 for cnt, repo in enumerate(scan_paths(self.repo_location).values()):
134 134 log.debug('building index @ %s' % repo.path)
135 135
136 136 for idx_path in self.get_paths(repo.path):
137 137 self.add_doc(writer, idx_path, repo)
138 138 writer.commit(merge=True)
139 139
140 140 log.debug('>>> FINISHED BUILDING INDEX <<<')
141 141
142 142
143 143 def update_index(self):
144 144 log.debug('STARTING INCREMENTAL INDEXING UPDATE')
145 145
146 146 idx = open_dir(IDX_LOCATION, indexname=self.indexname)
147 147 # The set of all paths in the index
148 148 indexed_paths = set()
149 149 # The set of all paths we need to re-index
150 150 to_index = set()
151 151
152 152 reader = idx.reader()
153 153 writer = idx.writer()
154 154
155 155 # Loop over the stored fields in the index
156 156 for fields in reader.all_stored_fields():
157 157 indexed_path = fields['path']
158 158 indexed_paths.add(indexed_path)
159 159
160 160 if not os.path.exists(indexed_path):
161 161 # This file was deleted since it was indexed
162 162 log.debug('removing from index %s' % indexed_path)
163 163 writer.delete_by_term('path', indexed_path)
164 164
165 165 else:
166 166 # Check if this file was changed since it
167 167 # was indexed
168 168 indexed_time = fields['modtime']
169 169
170 170 mtime = os.path.getmtime(indexed_path)
171 171
172 172 if mtime > indexed_time:
173 173
174 174 # The file has changed, delete it and add it to the list of
175 175 # files to reindex
176 176 log.debug('adding to reindex list %s' % indexed_path)
177 177 writer.delete_by_term('path', indexed_path)
178 178 to_index.add(indexed_path)
179 179 #writer.commit()
180 180
181 181 # Loop over the files in the filesystem
182 182 # Assume we have a function that gathers the filenames of the
183 183 # documents to be indexed
184 184 for repo in scan_paths(self.repo_location).values():
185 185 for path in self.get_paths(repo.path):
186 186 if path in to_index or path not in indexed_paths:
187 187 # This is either a file that's changed, or a new file
188 188 # that wasn't indexed before. So index it!
189 189 self.add_doc(writer, path, repo)
190 190 log.debug('reindexing %s' % path)
191 191
192 192 writer.commit(merge=True)
193 193 #idx.optimize()
194 194 log.debug('>>> FINISHED <<<')
195 195
196 196 def run(self, full_index=False):
197 197 """Run daemon"""
198 198 if full_index or self.initial:
199 199 self.build_index()
200 200 else:
201 201 self.update_index()
202 202
203 203 if __name__ == "__main__":
204 204 arg = sys.argv[1:]
205 205 if len(arg) != 2:
206 206 sys.stderr.write('Please specify indexing type [full|incremental]'
207 207 'and path to repositories as script args \n')
208 208 sys.exit()
209 209
210 210
211 211 if arg[0] == 'full':
212 212 full_index = True
213 213 elif arg[0] == 'incremental':
214 214 # False means looking just for changes
215 215 full_index = False
216 216 else:
217 217 sys.stdout.write('Please use [full|incremental]'
218 218 ' as script first arg \n')
219 219 sys.exit()
220 220
221 221 if not os.path.isdir(arg[1]):
222 222 sys.stderr.write('%s is not a valid path \n' % arg[1])
223 223 sys.exit()
224 224 else:
225 225 if arg[1].endswith('/'):
226 226 repo_location = arg[1] + '*'
227 227 else:
228 228 repo_location = arg[1] + '/*'
229 229
230 230 try:
231 231 l = DaemonLock()
232 232 WhooshIndexingDaemon(repo_location=repo_location)\
233 233 .run(full_index=full_index)
234 234 l.release()
235 235 reload(logging)
236 236 except LockHeld:
237 237 sys.exit(1)
238 238
@@ -1,127 +1,127 b''
1 1 import os, time
2 2 import sys
3 3 from warnings import warn
4 4
5 5 class LockHeld(Exception):pass
6 6
7 7
8 8 class DaemonLock(object):
9 '''daemon locking
9 """daemon locking
10 10 USAGE:
11 11 try:
12 12 l = lock()
13 13 main()
14 14 l.release()
15 15 except LockHeld:
16 16 sys.exit(1)
17 '''
17 """
18 18
19 19 def __init__(self, file=None, callbackfn=None,
20 20 desc='daemon lock', debug=False):
21 21
22 22 self.pidfile = file if file else os.path.join(os.path.dirname(__file__),
23 23 'running.lock')
24 24 self.callbackfn = callbackfn
25 25 self.desc = desc
26 26 self.debug = debug
27 27 self.held = False
28 28 #run the lock automatically !
29 29 self.lock()
30 30
31 31 def __del__(self):
32 32 if self.held:
33 33
34 34 # warn("use lock.release instead of del lock",
35 35 # category = DeprecationWarning,
36 36 # stacklevel = 2)
37 37
38 38 # ensure the lock will be removed
39 39 self.release()
40 40
41 41
42 42 def lock(self):
43 '''
43 """
44 44 locking function, if lock is present it will raise LockHeld exception
45 '''
45 """
46 46 lockname = '%s' % (os.getpid())
47 47
48 48 self.trylock()
49 49 self.makelock(lockname, self.pidfile)
50 50 return True
51 51
52 52 def trylock(self):
53 53 running_pid = False
54 54 try:
55 55 pidfile = open(self.pidfile, "r")
56 56 pidfile.seek(0)
57 57 running_pid = pidfile.readline()
58 58 if self.debug:
59 59 print 'lock file present running_pid: %s, checking for execution'\
60 60 % running_pid
61 61 # Now we check the PID from lock file matches to the current
62 62 # process PID
63 63 if running_pid:
64 64 if os.path.exists("/proc/%s" % running_pid):
65 65 print "You already have an instance of the program running"
66 66 print "It is running as process %s" % running_pid
67 67 raise LockHeld
68 68 else:
69 69 print "Lock File is there but the program is not running"
70 70 print "Removing lock file for the: %s" % running_pid
71 71 self.release()
72 72 except IOError, e:
73 73 if e.errno != 2:
74 74 raise
75 75
76 76
77 77 def release(self):
78 '''
78 """
79 79 releases the pid by removing the pidfile
80 '''
80 """
81 81 if self.callbackfn:
82 82 #execute callback function on release
83 83 if self.debug:
84 84 print 'executing callback function %s' % self.callbackfn
85 85 self.callbackfn()
86 86 try:
87 87 if self.debug:
88 88 print 'removing pidfile %s' % self.pidfile
89 89 os.remove(self.pidfile)
90 90 self.held = False
91 91 except OSError, e:
92 92 if self.debug:
93 93 print 'removing pidfile failed %s' % e
94 94 pass
95 95
96 96 def makelock(self, lockname, pidfile):
97 '''
97 """
98 98 this function will make an actual lock
99 99 @param lockname: acctual pid of file
100 100 @param pidfile: the file to write the pid in
101 '''
101 """
102 102 if self.debug:
103 103 print 'creating a file %s and pid: %s' % (pidfile, lockname)
104 104 pidfile = open(self.pidfile, "wb")
105 105 pidfile.write(lockname)
106 106 pidfile.close
107 107 self.held = True
108 108
109 109
110 110 def main():
111 111 print 'func is running'
112 112 cnt = 20
113 113 while 1:
114 114 print cnt
115 115 if cnt == 0:
116 116 break
117 117 time.sleep(1)
118 118 cnt -= 1
119 119
120 120
121 121 if __name__ == "__main__":
122 122 try:
123 123 l = DaemonLock(desc='test lock')
124 124 main()
125 125 l.release()
126 126 except LockHeld:
127 127 sys.exit(1)
@@ -1,439 +1,438 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 # Utilities for hg app
4 4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
5 5 # This program is free software; you can redistribute it and/or
6 6 # modify it under the terms of the GNU General Public License
7 7 # as published by the Free Software Foundation; version 2
8 8 # of the License or (at your opinion) any later version of the license.
9 9 #
10 10 # This program is distributed in the hope that it will be useful,
11 11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 13 # GNU General Public License for more details.
14 14 #
15 15 # You should have received a copy of the GNU General Public License
16 16 # along with this program; if not, write to the Free Software
17 17 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
18 18 # MA 02110-1301, USA.
19 19
20 20 """
21 21 Created on April 18, 2010
22 22 Utilities for hg app
23 23 @author: marcink
24 24 """
25 25 from beaker.cache import cache_region
26 26 from mercurial import ui, config, hg
27 27 from mercurial.error import RepoError
28 28 from pylons_app.model import meta
29 29 from pylons_app.model.db import Repository, User, HgAppUi, HgAppSettings
30 30 from vcs.backends.base import BaseChangeset
31 31 from vcs.utils.lazy import LazyProperty
32 32 import logging
33 33 import os
34 34
35 35 log = logging.getLogger(__name__)
36 36
37 37
38 38 def get_repo_slug(request):
39 39 return request.environ['pylons.routes_dict'].get('repo_name')
40 40
41 41 def is_mercurial(environ):
42 42 """
43 43 Returns True if request's target is mercurial server - header
44 44 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
45 45 """
46 46 http_accept = environ.get('HTTP_ACCEPT')
47 47 if http_accept and http_accept.startswith('application/mercurial'):
48 48 return True
49 49 return False
50 50
51 51 def check_repo_dir(paths):
52 52 repos_path = paths[0][1].split('/')
53 53 if repos_path[-1] in ['*', '**']:
54 54 repos_path = repos_path[:-1]
55 55 if repos_path[0] != '/':
56 56 repos_path[0] = '/'
57 57 if not os.path.isdir(os.path.join(*repos_path)):
58 58 raise Exception('Not a valid repository in %s' % paths[0][1])
59 59
60 60 def check_repo_fast(repo_name, base_path):
61 61 if os.path.isdir(os.path.join(base_path, repo_name)):return False
62 62 return True
63 63
64 64 def check_repo(repo_name, base_path, verify=True):
65 65
66 66 repo_path = os.path.join(base_path, repo_name)
67 67
68 68 try:
69 69 if not check_repo_fast(repo_name, base_path):
70 70 return False
71 71 r = hg.repository(ui.ui(), repo_path)
72 72 if verify:
73 73 hg.verify(r)
74 74 #here we hnow that repo exists it was verified
75 75 log.info('%s repo is already created', repo_name)
76 76 return False
77 77 except RepoError:
78 78 #it means that there is no valid repo there...
79 79 log.info('%s repo is free for creation', repo_name)
80 80 return True
81 81
82 82 def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
83 83 while True:
84 84 ok = raw_input(prompt)
85 85 if ok in ('y', 'ye', 'yes'): return True
86 86 if ok in ('n', 'no', 'nop', 'nope'): return False
87 87 retries = retries - 1
88 88 if retries < 0: raise IOError
89 89 print complaint
90 90
91 91 @cache_region('super_short_term', 'cached_hg_ui')
92 92 def get_hg_ui_cached():
93 93 try:
94 94 sa = meta.Session
95 95 ret = sa.query(HgAppUi).all()
96 96 finally:
97 97 meta.Session.remove()
98 98 return ret
99 99
100 100
101 101 def get_hg_settings():
102 102 try:
103 103 sa = meta.Session
104 104 ret = sa.query(HgAppSettings).all()
105 105 finally:
106 106 meta.Session.remove()
107 107
108 108 if not ret:
109 109 raise Exception('Could not get application settings !')
110 110 settings = {}
111 111 for each in ret:
112 112 settings['hg_app_' + each.app_settings_name] = each.app_settings_value
113 113
114 114 return settings
115 115
116 116 def get_hg_ui_settings():
117 117 try:
118 118 sa = meta.Session
119 119 ret = sa.query(HgAppUi).all()
120 120 finally:
121 121 meta.Session.remove()
122 122
123 123 if not ret:
124 124 raise Exception('Could not get application ui settings !')
125 125 settings = {}
126 126 for each in ret:
127 127 k = each.ui_key
128 128 v = each.ui_value
129 129 if k == '/':
130 130 k = 'root_path'
131 131
132 132 if k.find('.') != -1:
133 133 k = k.replace('.', '_')
134 134
135 135 if each.ui_section == 'hooks':
136 136 v = each.ui_active
137 137
138 138 settings[each.ui_section + '_' + k] = v
139 139
140 140 return settings
141 141
142 142 #propagated from mercurial documentation
143 143 ui_sections = ['alias', 'auth',
144 144 'decode/encode', 'defaults',
145 145 'diff', 'email',
146 146 'extensions', 'format',
147 147 'merge-patterns', 'merge-tools',
148 148 'hooks', 'http_proxy',
149 149 'smtp', 'patch',
150 150 'paths', 'profiling',
151 151 'server', 'trusted',
152 152 'ui', 'web', ]
153 153
154 154 def make_ui(read_from='file', path=None, checkpaths=True):
155 155 """
156 156 A function that will read python rc files or database
157 157 and make an mercurial ui object from read options
158 158
159 159 @param path: path to mercurial config file
160 160 @param checkpaths: check the path
161 161 @param read_from: read from 'file' or 'db'
162 162 """
163 163
164 164 baseui = ui.ui()
165 165
166 166 if read_from == 'file':
167 167 if not os.path.isfile(path):
168 168 log.warning('Unable to read config file %s' % path)
169 169 return False
170 170 log.debug('reading hgrc from %s', path)
171 171 cfg = config.config()
172 172 cfg.read(path)
173 173 for section in ui_sections:
174 174 for k, v in cfg.items(section):
175 175 baseui.setconfig(section, k, v)
176 176 log.debug('settings ui from file[%s]%s:%s', section, k, v)
177 177 if checkpaths:check_repo_dir(cfg.items('paths'))
178 178
179 179
180 180 elif read_from == 'db':
181 181 hg_ui = get_hg_ui_cached()
182 182 for ui_ in hg_ui:
183 183 if ui_.ui_active:
184 184 log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, ui_.ui_key, ui_.ui_value)
185 185 baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
186 186
187 187
188 188 return baseui
189 189
190 190
191 191 def set_hg_app_config(config):
192 192 hgsettings = get_hg_settings()
193 193
194 194 for k, v in hgsettings.items():
195 195 config[k] = v
196 196
197 197 def invalidate_cache(name, *args):
198 198 """Invalidates given name cache"""
199 199
200 200 from beaker.cache import region_invalidate
201 201 log.info('INVALIDATING CACHE FOR %s', name)
202 202
203 203 """propagate our arguments to make sure invalidation works. First
204 204 argument has to be the name of cached func name give to cache decorator
205 205 without that the invalidation would not work"""
206 206 tmp = [name]
207 207 tmp.extend(args)
208 208 args = tuple(tmp)
209 209
210 210 if name == 'cached_repo_list':
211 211 from pylons_app.model.hg_model import _get_repos_cached
212 212 region_invalidate(_get_repos_cached, None, *args)
213 213
214 214 if name == 'full_changelog':
215 215 from pylons_app.model.hg_model import _full_changelog_cached
216 216 region_invalidate(_full_changelog_cached, None, *args)
217 217
218 218 class EmptyChangeset(BaseChangeset):
219 219
220 220 revision = -1
221 221 message = ''
222 222 author = ''
223 223
224 224 @LazyProperty
225 225 def raw_id(self):
226 226 """
227 227 Returns raw string identifing this changeset, useful for web
228 228 representation.
229 229 """
230 230 return '0' * 12
231 231
232 232
233 233 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
234 234 """
235 235 maps all found repositories into db
236 236 """
237 237 from pylons_app.model.repo_model import RepoModel
238 238
239 239 sa = meta.Session
240 240 user = sa.query(User).filter(User.admin == True).first()
241 241
242 242 rm = RepoModel()
243 243
244 244 for name, repo in initial_repo_list.items():
245 245 if not sa.query(Repository).filter(Repository.repo_name == name).scalar():
246 246 log.info('repository %s not found creating default', name)
247 247
248 248 form_data = {
249 249 'repo_name':name,
250 250 'description':repo.description if repo.description != 'unknown' else \
251 251 'auto description for %s' % name,
252 252 'private':False
253 253 }
254 254 rm.create(form_data, user, just_db=True)
255 255
256 256
257 257 if remove_obsolete:
258 258 #remove from database those repositories that are not in the filesystem
259 259 for repo in sa.query(Repository).all():
260 260 if repo.repo_name not in initial_repo_list.keys():
261 261 sa.delete(repo)
262 262 sa.commit()
263 263
264 264
265 265 meta.Session.remove()
266 266
267 267 from UserDict import DictMixin
268 268
269 269 class OrderedDict(dict, DictMixin):
270 270
271 271 def __init__(self, *args, **kwds):
272 272 if len(args) > 1:
273 273 raise TypeError('expected at most 1 arguments, got %d' % len(args))
274 274 try:
275 275 self.__end
276 276 except AttributeError:
277 277 self.clear()
278 278 self.update(*args, **kwds)
279 279
280 280 def clear(self):
281 281 self.__end = end = []
282 282 end += [None, end, end] # sentinel node for doubly linked list
283 283 self.__map = {} # key --> [key, prev, next]
284 284 dict.clear(self)
285 285
286 286 def __setitem__(self, key, value):
287 287 if key not in self:
288 288 end = self.__end
289 289 curr = end[1]
290 290 curr[2] = end[1] = self.__map[key] = [key, curr, end]
291 291 dict.__setitem__(self, key, value)
292 292
293 293 def __delitem__(self, key):
294 294 dict.__delitem__(self, key)
295 295 key, prev, next = self.__map.pop(key)
296 296 prev[2] = next
297 297 next[1] = prev
298 298
299 299 def __iter__(self):
300 300 end = self.__end
301 301 curr = end[2]
302 302 while curr is not end:
303 303 yield curr[0]
304 304 curr = curr[2]
305 305
306 306 def __reversed__(self):
307 307 end = self.__end
308 308 curr = end[1]
309 309 while curr is not end:
310 310 yield curr[0]
311 311 curr = curr[1]
312 312
313 313 def popitem(self, last=True):
314 314 if not self:
315 315 raise KeyError('dictionary is empty')
316 316 if last:
317 317 key = reversed(self).next()
318 318 else:
319 319 key = iter(self).next()
320 320 value = self.pop(key)
321 321 return key, value
322 322
323 323 def __reduce__(self):
324 324 items = [[k, self[k]] for k in self]
325 325 tmp = self.__map, self.__end
326 326 del self.__map, self.__end
327 327 inst_dict = vars(self).copy()
328 328 self.__map, self.__end = tmp
329 329 if inst_dict:
330 330 return (self.__class__, (items,), inst_dict)
331 331 return self.__class__, (items,)
332 332
333 333 def keys(self):
334 334 return list(self)
335 335
336 336 setdefault = DictMixin.setdefault
337 337 update = DictMixin.update
338 338 pop = DictMixin.pop
339 339 values = DictMixin.values
340 340 items = DictMixin.items
341 341 iterkeys = DictMixin.iterkeys
342 342 itervalues = DictMixin.itervalues
343 343 iteritems = DictMixin.iteritems
344 344
345 345 def __repr__(self):
346 346 if not self:
347 347 return '%s()' % (self.__class__.__name__,)
348 348 return '%s(%r)' % (self.__class__.__name__, self.items())
349 349
350 350 def copy(self):
351 351 return self.__class__(self)
352 352
353 353 @classmethod
354 354 def fromkeys(cls, iterable, value=None):
355 355 d = cls()
356 356 for key in iterable:
357 357 d[key] = value
358 358 return d
359 359
360 360 def __eq__(self, other):
361 361 if isinstance(other, OrderedDict):
362 362 return len(self) == len(other) and self.items() == other.items()
363 363 return dict.__eq__(self, other)
364 364
365 365 def __ne__(self, other):
366 366 return not self == other
367 367
368 368
369 369 #===============================================================================
370 370 # TEST FUNCTIONS
371 371 #===============================================================================
372 372 def create_test_index(repo_location, full_index):
373 373 """Makes default test index
374 374 @param repo_location:
375 375 @param full_index:
376 376 """
377 from pylons_app.lib.indexers import daemon
378 377 from pylons_app.lib.indexers.daemon import WhooshIndexingDaemon
379 from pylons_app.lib.indexers.pidlock import DaemonLock, LockHeld
378 from pylons_app.lib.pidlock import DaemonLock, LockHeld
380 379 from pylons_app.lib.indexers import IDX_LOCATION
381 380 import shutil
382 381
383 382 if os.path.exists(IDX_LOCATION):
384 383 shutil.rmtree(IDX_LOCATION)
385 384
386 385 try:
387 386 l = DaemonLock()
388 387 WhooshIndexingDaemon(repo_location=repo_location)\
389 388 .run(full_index=full_index)
390 389 l.release()
391 390 except LockHeld:
392 391 pass
393 392
394 393 def create_test_env(repos_test_path, config):
395 394 """Makes a fresh database and
396 395 install test repository into tmp dir
397 396 """
398 397 from pylons_app.lib.db_manage import DbManage
399 398 import tarfile
400 399 import shutil
401 400 from os.path import dirname as dn, join as jn, abspath
402 401
403 402 log = logging.getLogger('TestEnvCreator')
404 403 # create logger
405 404 log.setLevel(logging.DEBUG)
406 405 log.propagate = True
407 406 # create console handler and set level to debug
408 407 ch = logging.StreamHandler()
409 408 ch.setLevel(logging.DEBUG)
410 409
411 410 # create formatter
412 411 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
413 412
414 413 # add formatter to ch
415 414 ch.setFormatter(formatter)
416 415
417 416 # add ch to logger
418 417 log.addHandler(ch)
419 418
420 419 #PART ONE create db
421 420 log.debug('making test db')
422 421 dbname = config['sqlalchemy.db1.url'].split('/')[-1]
423 422 dbmanage = DbManage(log_sql=True, dbname=dbname, tests=True)
424 423 dbmanage.create_tables(override=True)
425 424 dbmanage.config_prompt(repos_test_path)
426 425 dbmanage.create_default_user()
427 426 dbmanage.admin_prompt()
428 427 dbmanage.create_permissions()
429 428 dbmanage.populate_default_permissions()
430 429
431 430 #PART TWO make test repo
432 431 log.debug('making test vcs repo')
433 432 if os.path.isdir('/tmp/vcs_test'):
434 433 shutil.rmtree('/tmp/vcs_test')
435 434
436 435 cur_dir = dn(dn(abspath(__file__)))
437 436 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test.tar.gz"))
438 437 tar.extractall('/tmp')
439 438 tar.close()
@@ -1,77 +1,78 b''
1 1 <%def name="file_class(node)">
2 2 %if node.is_file():
3 3 <%return "browser-file" %>
4 4 %else:
5 5 <%return "browser-dir"%>
6 6 %endif
7 7 </%def>
8 8 <div id="body" class="browserblock">
9 9 <div class="browser-header">
10 10 ${h.form(h.url.current())}
11 11 <div class="info_box">
12 12 <span >${_('view')}@rev</span>
13 13 <a href="${c.url_prev}">&laquo;</a>
14 14 ${h.text('at_rev',value=c.rev_nr,size=3)}
15 15 <a href="${c.url_next}">&raquo;</a>
16 16 ${h.submit('view','view')}
17 17 </div>
18 18 ${h.end_form()}
19 19 </div>
20 20 <div class="browser-body">
21 21 <table class="code-browser">
22 22 <thead>
23 23 <tr>
24 24 <th>${_('Name')}</th>
25 25 <th>${_('Size')}</th>
26 26 <th>${_('Mimetype')}</th>
27 27 <th>${_('Revision')}</th>
28 28 <th>${_('Last modified')}</th>
29 29 <th>${_('Last commiter')}</th>
30 30 </tr>
31 31 </thead>
32 <tr class="parity0">
33 <td>
34 % if c.files_list.parent:
35 ${h.link_to('..',h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.files_list.parent.path),class_="browser-dir")}
36 %endif
37 </td>
38 <td></td>
39 <td></td>
40 <td></td>
41 <td></td>
42 </tr>
32
33 % if c.files_list.parent:
34 <tr class="parity0">
35 <td>
36 ${h.link_to('..',h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=c.files_list.parent.path),class_="browser-dir")}
37 </td>
38 <td></td>
39 <td></td>
40 <td></td>
41 <td></td>
42 <td></td>
43 </tr>
44 %endif
45
43 46 %for cnt,node in enumerate(c.files_list,1):
44 47 <tr class="parity${cnt%2}">
45 48 <td>
46 49 ${h.link_to(node.name,h.url('files_home',repo_name=c.repo_name,revision=c.cur_rev,f_path=node.path),class_=file_class(node))}
47 50 </td>
48 51 <td>
49 %if node.is_file():
50 ${h.format_byte_size(node.size,binary=True)}
51 %endif
52 ${h.format_byte_size(node.size,binary=True)}
52 53 </td>
53 54 <td>
54 55 %if node.is_file():
55 56 ${node.mimetype}
56 57 %endif
57 58 </td>
58 59 <td>
59 60 %if node.is_file():
60 61 ${node.last_changeset.revision}
61 62 %endif
62 63 </td>
63 64 <td>
64 65 %if node.is_file():
65 66 ${h.age(node.last_changeset._ctx.date())} - ${node.last_changeset.date}
66 67 %endif
67 68 </td>
68 69 <td>
69 70 %if node.is_file():
70 71 ${node.last_changeset.author}
71 72 %endif
72 73 </td>
73 74 </tr>
74 75 %endfor
75 76 </table>
76 77 </div>
77 78 </div> No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now