Show More
@@ -1,97 +1,97 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.paster_commands.install_iis |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | IIS installation tools for Kallithea |
|
19 | 19 | """ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import os |
|
23 | 23 | import sys |
|
24 | 24 | |
|
25 | 25 | from kallithea.lib.paster_commands.common import BasePasterCommand |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | dispath_py_template = '''\ |
|
29 | 29 | # Created by Kallithea 'gearbox install-iis' |
|
30 | 30 | import sys |
|
31 | 31 | |
|
32 | 32 | if hasattr(sys, "isapidllhandle"): |
|
33 | 33 | import win32traceutil |
|
34 | 34 | |
|
35 | 35 | import isapi_wsgi |
|
36 | 36 | import os |
|
37 | 37 | |
|
38 | 38 | def __ExtensionFactory__(): |
|
39 | 39 | from paste.deploy import loadapp |
|
40 | 40 | from paste.script.util.logging_config import fileConfig |
|
41 | 41 | fileConfig('%(inifile)s') |
|
42 | 42 | application = loadapp('config:%(inifile)s') |
|
43 | 43 | |
|
44 | 44 | def app(environ, start_response): |
|
45 | 45 | user = environ.get('REMOTE_USER', None) |
|
46 | 46 | if user is not None: |
|
47 | 47 | os.environ['REMOTE_USER'] = user |
|
48 | 48 | return application(environ, start_response) |
|
49 | 49 | |
|
50 | 50 | return isapi_wsgi.ISAPIThreadPoolHandler(app) |
|
51 | 51 | |
|
52 | 52 | if __name__=='__main__': |
|
53 | 53 | from isapi.install import * |
|
54 | 54 | params = ISAPIParameters() |
|
55 | 55 | sm = [ScriptMapParams(Extension="*", Flags=0)] |
|
56 | 56 | vd = VirtualDirParameters(Name="%(virtualdir)s", |
|
57 | 57 | Description = "Kallithea", |
|
58 | 58 | ScriptMaps = sm, |
|
59 | 59 | ScriptMapUpdate = "replace") |
|
60 | 60 | params.VirtualDirs = [vd] |
|
61 | 61 | HandleCommandLine(params) |
|
62 | 62 | ''' |
|
63 | 63 | |
|
64 | 64 | class Command(BasePasterCommand): |
|
65 | 65 | '''Kallithea: Install into IIS using isapi-wsgi''' |
|
66 | 66 | |
|
67 | 67 | requires_db_session = False |
|
68 | 68 | |
|
69 | 69 | def take_action(self, args): |
|
70 | 70 | config_file = os.path.abspath(args.config_file) |
|
71 | 71 | try: |
|
72 | 72 | import isapi_wsgi |
|
73 | 73 | except ImportError: |
|
74 | 74 | self.error('missing requirement: isapi-wsgi not installed') |
|
75 | 75 | |
|
76 | 76 | dispatchfile = os.path.join(os.getcwd(), 'dispatch.py') |
|
77 | 77 | print 'Writing %s' % dispatchfile |
|
78 |
with |
|
|
78 | with open(dispatchfile, 'w') as f: | |
|
79 | 79 | f.write(dispath_py_template % { |
|
80 | 80 | 'inifile': config_file.replace('\\', '\\\\'), |
|
81 | 81 | 'virtualdir': args.virtualdir, |
|
82 | 82 | }) |
|
83 | 83 | |
|
84 | 84 | print ('Run \'python "%s" install\' with administrative privileges ' |
|
85 | 85 | 'to generate the _dispatch.dll file and install it into the ' |
|
86 | 86 | 'default web site') % (dispatchfile,) |
|
87 | 87 | |
|
88 | 88 | def get_parser(self, prog_name): |
|
89 | 89 | parser = super(Command, self).get_parser(prog_name) |
|
90 | 90 | |
|
91 | 91 | parser.add_argument('--virtualdir', |
|
92 | 92 | action='store', |
|
93 | 93 | dest='virtualdir', |
|
94 | 94 | default='/', |
|
95 | 95 | help='The virtual folder to install into on IIS') |
|
96 | 96 | |
|
97 | 97 | return parser |
@@ -1,291 +1,291 b'' | |||
|
1 | 1 | import os |
|
2 | 2 | import re |
|
3 | 3 | |
|
4 | 4 | import mock |
|
5 | 5 | import routes.util |
|
6 | 6 | |
|
7 | 7 | from kallithea.tests.base import * |
|
8 | 8 | from kallithea.lib import helpers as h |
|
9 | 9 | from kallithea.model.db import User, Notification, UserNotification |
|
10 | 10 | from kallithea.model.user import UserModel |
|
11 | 11 | from kallithea.model.meta import Session |
|
12 | 12 | from kallithea.model.notification import NotificationModel, EmailNotificationModel |
|
13 | 13 | |
|
14 | 14 | import kallithea.lib.celerylib |
|
15 | 15 | import kallithea.lib.celerylib.tasks |
|
16 | 16 | |
|
17 | 17 | from tg.util.webtest import test_context |
|
18 | 18 | |
|
19 | 19 | class TestNotifications(TestController): |
|
20 | 20 | |
|
21 | 21 | def setup_method(self, method): |
|
22 | 22 | Session.remove() |
|
23 | 23 | u1 = UserModel().create_or_update(username=u'u1', |
|
24 | 24 | password=u'qweqwe', |
|
25 | 25 | email=u'u1@example.com', |
|
26 | 26 | firstname=u'u1', lastname=u'u1') |
|
27 | 27 | Session().commit() |
|
28 | 28 | self.u1 = u1.user_id |
|
29 | 29 | |
|
30 | 30 | u2 = UserModel().create_or_update(username=u'u2', |
|
31 | 31 | password=u'qweqwe', |
|
32 | 32 | email=u'u2@example.com', |
|
33 | 33 | firstname=u'u2', lastname=u'u3') |
|
34 | 34 | Session().commit() |
|
35 | 35 | self.u2 = u2.user_id |
|
36 | 36 | |
|
37 | 37 | u3 = UserModel().create_or_update(username=u'u3', |
|
38 | 38 | password=u'qweqwe', |
|
39 | 39 | email=u'u3@example.com', |
|
40 | 40 | firstname=u'u3', lastname=u'u3') |
|
41 | 41 | Session().commit() |
|
42 | 42 | self.u3 = u3.user_id |
|
43 | 43 | |
|
44 | 44 | self.remove_all_notifications() |
|
45 | 45 | assert [] == Notification.query().all() |
|
46 | 46 | assert [] == UserNotification.query().all() |
|
47 | 47 | |
|
48 | 48 | def test_create_notification(self): |
|
49 | 49 | with test_context(self.app): |
|
50 | 50 | usrs = [self.u1, self.u2] |
|
51 | 51 | def send_email(recipients, subject, body='', html_body='', headers=None, author=None): |
|
52 | 52 | assert recipients == ['u2@example.com'] |
|
53 | 53 | assert subject == 'Test Message' |
|
54 | 54 | assert body == u"hi there" |
|
55 | 55 | assert '>hi there<' in html_body |
|
56 | 56 | assert author.username == 'u1' |
|
57 | 57 | with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email): |
|
58 | 58 | notification = NotificationModel().create(created_by=self.u1, |
|
59 | 59 | subject=u'subj', body=u'hi there', |
|
60 | 60 | recipients=usrs) |
|
61 | 61 | Session().commit() |
|
62 | 62 | u1 = User.get(self.u1) |
|
63 | 63 | u2 = User.get(self.u2) |
|
64 | 64 | u3 = User.get(self.u3) |
|
65 | 65 | notifications = Notification.query().all() |
|
66 | 66 | assert len(notifications) == 1 |
|
67 | 67 | |
|
68 | 68 | assert notifications[0].recipients == [u1, u2] |
|
69 | 69 | assert notification.notification_id == notifications[0].notification_id |
|
70 | 70 | |
|
71 | 71 | unotification = UserNotification.query() \ |
|
72 | 72 | .filter(UserNotification.notification == notification).all() |
|
73 | 73 | |
|
74 | 74 | assert len(unotification) == len(usrs) |
|
75 | 75 | assert set([x.user_id for x in unotification]) == set(usrs) |
|
76 | 76 | |
|
77 | 77 | def test_user_notifications(self): |
|
78 | 78 | with test_context(self.app): |
|
79 | 79 | notification1 = NotificationModel().create(created_by=self.u1, |
|
80 | 80 | subject=u'subj', body=u'hi there1', |
|
81 | 81 | recipients=[self.u3]) |
|
82 | 82 | Session().commit() |
|
83 | 83 | notification2 = NotificationModel().create(created_by=self.u1, |
|
84 | 84 | subject=u'subj', body=u'hi there2', |
|
85 | 85 | recipients=[self.u3]) |
|
86 | 86 | Session().commit() |
|
87 | 87 | u3 = Session().query(User).get(self.u3) |
|
88 | 88 | |
|
89 | 89 | assert sorted([x.notification for x in u3.notifications]) == sorted([notification2, notification1]) |
|
90 | 90 | |
|
91 | 91 | def test_delete_notifications(self): |
|
92 | 92 | with test_context(self.app): |
|
93 | 93 | notification = NotificationModel().create(created_by=self.u1, |
|
94 | 94 | subject=u'title', body=u'hi there3', |
|
95 | 95 | recipients=[self.u3, self.u1, self.u2]) |
|
96 | 96 | Session().commit() |
|
97 | 97 | notifications = Notification.query().all() |
|
98 | 98 | assert notification in notifications |
|
99 | 99 | |
|
100 | 100 | Notification.delete(notification.notification_id) |
|
101 | 101 | Session().commit() |
|
102 | 102 | |
|
103 | 103 | notifications = Notification.query().all() |
|
104 | 104 | assert not notification in notifications |
|
105 | 105 | |
|
106 | 106 | un = UserNotification.query().filter(UserNotification.notification |
|
107 | 107 | == notification).all() |
|
108 | 108 | assert un == [] |
|
109 | 109 | |
|
110 | 110 | def test_delete_association(self): |
|
111 | 111 | with test_context(self.app): |
|
112 | 112 | notification = NotificationModel().create(created_by=self.u1, |
|
113 | 113 | subject=u'title', body=u'hi there3', |
|
114 | 114 | recipients=[self.u3, self.u1, self.u2]) |
|
115 | 115 | Session().commit() |
|
116 | 116 | |
|
117 | 117 | unotification = UserNotification.query() \ |
|
118 | 118 | .filter(UserNotification.notification == |
|
119 | 119 | notification) \ |
|
120 | 120 | .filter(UserNotification.user_id == self.u3) \ |
|
121 | 121 | .scalar() |
|
122 | 122 | |
|
123 | 123 | assert unotification.user_id == self.u3 |
|
124 | 124 | |
|
125 | 125 | NotificationModel().delete(self.u3, |
|
126 | 126 | notification.notification_id) |
|
127 | 127 | Session().commit() |
|
128 | 128 | |
|
129 | 129 | u3notification = UserNotification.query() \ |
|
130 | 130 | .filter(UserNotification.notification == |
|
131 | 131 | notification) \ |
|
132 | 132 | .filter(UserNotification.user_id == self.u3) \ |
|
133 | 133 | .scalar() |
|
134 | 134 | |
|
135 | 135 | assert u3notification == None |
|
136 | 136 | |
|
137 | 137 | # notification object is still there |
|
138 | 138 | assert Notification.query().all() == [notification] |
|
139 | 139 | |
|
140 | 140 | #u1 and u2 still have assignments |
|
141 | 141 | u1notification = UserNotification.query() \ |
|
142 | 142 | .filter(UserNotification.notification == |
|
143 | 143 | notification) \ |
|
144 | 144 | .filter(UserNotification.user_id == self.u1) \ |
|
145 | 145 | .scalar() |
|
146 | 146 | assert u1notification != None |
|
147 | 147 | u2notification = UserNotification.query() \ |
|
148 | 148 | .filter(UserNotification.notification == |
|
149 | 149 | notification) \ |
|
150 | 150 | .filter(UserNotification.user_id == self.u2) \ |
|
151 | 151 | .scalar() |
|
152 | 152 | assert u2notification != None |
|
153 | 153 | |
|
154 | 154 | def test_notification_counter(self): |
|
155 | 155 | with test_context(self.app): |
|
156 | 156 | NotificationModel().create(created_by=self.u1, |
|
157 | 157 | subject=u'title', body=u'hi there_delete', |
|
158 | 158 | recipients=[self.u3, self.u1]) |
|
159 | 159 | Session().commit() |
|
160 | 160 | |
|
161 | 161 | assert NotificationModel().get_unread_cnt_for_user(self.u1) == 0 |
|
162 | 162 | assert NotificationModel().get_unread_cnt_for_user(self.u2) == 0 |
|
163 | 163 | assert NotificationModel().get_unread_cnt_for_user(self.u3) == 1 |
|
164 | 164 | |
|
165 | 165 | notification = NotificationModel().create(created_by=self.u1, |
|
166 | 166 | subject=u'title', body=u'hi there3', |
|
167 | 167 | recipients=[self.u3, self.u1, self.u2]) |
|
168 | 168 | Session().commit() |
|
169 | 169 | |
|
170 | 170 | assert NotificationModel().get_unread_cnt_for_user(self.u1) == 0 |
|
171 | 171 | assert NotificationModel().get_unread_cnt_for_user(self.u2) == 1 |
|
172 | 172 | assert NotificationModel().get_unread_cnt_for_user(self.u3) == 2 |
|
173 | 173 | |
|
174 | 174 | @mock.patch.object(h, 'canonical_url', (lambda arg, **kwargs: 'http://%s/?%s' % (arg, '&'.join('%s=%s' % (k, v) for (k, v) in sorted(kwargs.items()))))) |
|
175 | 175 | def test_dump_html_mails(self): |
|
176 | 176 | # Exercise all notification types and dump them to one big html file |
|
177 | 177 | l = [] |
|
178 | 178 | |
|
179 | 179 | def send_email(recipients, subject, body='', html_body='', headers=None, author=None): |
|
180 | 180 | l.append('<hr/>\n') |
|
181 | 181 | l.append('<h1>%s</h1>\n' % desc) # desc is from outer scope |
|
182 | 182 | l.append('<pre>\n') |
|
183 | 183 | l.append('From: %s\n' % author.username) |
|
184 | 184 | l.append('To: %s\n' % ' '.join(recipients)) |
|
185 | 185 | l.append('Subject: %s\n' % subject) |
|
186 | 186 | l.append('</pre>\n') |
|
187 | 187 | l.append('<hr/>\n') |
|
188 | 188 | l.append('<pre>%s</pre>\n' % body) |
|
189 | 189 | l.append('<hr/>\n') |
|
190 | 190 | l.append(html_body) |
|
191 | 191 | l.append('<hr/>\n') |
|
192 | 192 | |
|
193 | 193 | with test_context(self.app): |
|
194 | 194 | with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email): |
|
195 | 195 | pr_kwargs = dict( |
|
196 | 196 | pr_nice_id='#7', |
|
197 | 197 | pr_title='The Title', |
|
198 | 198 | pr_title_short='The Title', |
|
199 | 199 | pr_url='http://pr.org/7', |
|
200 | 200 | pr_target_repo='http://mainline.com/repo', |
|
201 | 201 | pr_target_branch='trunk', |
|
202 | 202 | pr_source_repo='https://dev.org/repo', |
|
203 | 203 | pr_source_branch='devbranch', |
|
204 | 204 | pr_owner=User.get(self.u2), |
|
205 | 205 | pr_owner_username='u2' |
|
206 | 206 | ) |
|
207 | 207 | |
|
208 | 208 | for type_, body, kwargs in [ |
|
209 | 209 | (Notification.TYPE_CHANGESET_COMMENT, |
|
210 | 210 | u'This is the new comment.\n\n - and here it ends indented.', |
|
211 | 211 | dict( |
|
212 | 212 | short_id='cafe1234', |
|
213 | 213 | raw_id='cafe1234c0ffeecafe', |
|
214 | 214 | branch='brunch', |
|
215 | 215 | cs_comment_user='Opinionated User (jsmith)', |
|
216 | 216 | cs_comment_url='http://comment.org', |
|
217 | 217 | is_mention=[False, True], |
|
218 | 218 | message='This changeset did something clever which is hard to explain', |
|
219 | 219 | message_short='This changeset did something cl...', |
|
220 | 220 | status_change=[None, 'Approved'], |
|
221 | 221 | cs_target_repo='repo_target', |
|
222 | 222 | cs_url='http://changeset.com', |
|
223 | 223 | cs_author=User.get(self.u2))), |
|
224 | 224 | (Notification.TYPE_MESSAGE, |
|
225 | 225 | u'This is the body of the test message\n - nothing interesting here except indentation.', |
|
226 | 226 | dict()), |
|
227 | 227 | #(Notification.TYPE_MENTION, '$body', None), # not used |
|
228 | 228 | (Notification.TYPE_REGISTRATION, |
|
229 | 229 | u'Registration body', |
|
230 | 230 | dict( |
|
231 | 231 | new_username='newbie', |
|
232 | 232 | registered_user_url='http://newbie.org', |
|
233 | 233 | new_email='new@email.com', |
|
234 | 234 | new_full_name='New Full Name')), |
|
235 | 235 | (Notification.TYPE_PULL_REQUEST, |
|
236 | 236 | u'This PR is awesome because it does stuff\n - please approve indented!', |
|
237 | 237 | dict( |
|
238 | 238 | pr_user_created='Requesting User (root)', # pr_owner should perhaps be used for @mention in description ... |
|
239 | 239 | is_mention=[False, True], |
|
240 | 240 | pr_revisions=[('123abc'*7, "Introduce one and two\n\nand that's it"), ('567fed'*7, 'Make one plus two equal tree')], |
|
241 | 241 | org_repo_name='repo_org', |
|
242 | 242 | **pr_kwargs)), |
|
243 | 243 | (Notification.TYPE_PULL_REQUEST_COMMENT, |
|
244 | 244 | u'Me too!\n\n - and indented on second line', |
|
245 | 245 | dict( |
|
246 | 246 | closing_pr=[False, True], |
|
247 | 247 | is_mention=[False, True], |
|
248 | 248 | pr_comment_user='Opinionated User (jsmith)', |
|
249 | 249 | pr_comment_url='http://pr.org/comment', |
|
250 | 250 | status_change=[None, 'Under Review'], |
|
251 | 251 | **pr_kwargs)), |
|
252 | 252 | ]: |
|
253 | 253 | kwargs['repo_name'] = u'repo/name' |
|
254 | 254 | params = [(type_, type_, body, kwargs)] |
|
255 | 255 | for param_name in ['is_mention', 'status_change', 'closing_pr']: # TODO: inline/general |
|
256 | 256 | if not isinstance(kwargs.get(param_name), list): |
|
257 | 257 | continue |
|
258 | 258 | new_params = [] |
|
259 | 259 | for v in kwargs[param_name]: |
|
260 | 260 | for desc, type_, body, kwargs in params: |
|
261 | 261 | kwargs = dict(kwargs) |
|
262 | 262 | kwargs[param_name] = v |
|
263 | 263 | new_params.append(('%s, %s=%r' % (desc, param_name, v), type_, body, kwargs)) |
|
264 | 264 | params = new_params |
|
265 | 265 | |
|
266 | 266 | for desc, type_, body, kwargs in params: |
|
267 | 267 | # desc is used as "global" variable |
|
268 | 268 | notification = NotificationModel().create(created_by=self.u1, |
|
269 | 269 | subject=u'unused', body=body, email_kwargs=kwargs, |
|
270 | 270 | recipients=[self.u2], type_=type_) |
|
271 | 271 | |
|
272 | 272 | # Email type TYPE_PASSWORD_RESET has no corresponding notification type - test it directly: |
|
273 | 273 | desc = 'TYPE_PASSWORD_RESET' |
|
274 | 274 | kwargs = dict(user='John Doe', reset_token='decbf64715098db5b0bd23eab44bd792670ab746', reset_url='http://reset.com/decbf64715098db5b0bd23eab44bd792670ab746') |
|
275 | 275 | kallithea.lib.celerylib.tasks.send_email(['john@doe.com'], |
|
276 | 276 | "Password reset link", |
|
277 | 277 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'txt', **kwargs), |
|
278 | 278 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'html', **kwargs), |
|
279 | 279 | author=User.get(self.u1)) |
|
280 | 280 | |
|
281 | 281 | out = '<!doctype html>\n<html lang="en">\n<head><title>Notifications</title><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head>\n<body>\n%s\n</body>\n</html>\n' % \ |
|
282 | 282 | re.sub(r'<(/?(?:!doctype|html|head|title|meta|body)\b[^>]*)>', r'<!--\1-->', ''.join(l)) |
|
283 | 283 | |
|
284 | 284 | outfn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.out.html') |
|
285 | 285 | reffn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.ref.html') |
|
286 |
with |
|
|
286 | with open(outfn, 'w') as f: | |
|
287 | 287 | f.write(out) |
|
288 |
with |
|
|
288 | with open(reffn) as f: | |
|
289 | 289 | ref = f.read() |
|
290 | 290 | assert ref == out # copy test_dump_html_mails.out.html to test_dump_html_mails.ref.html to update expectations |
|
291 | 291 | os.unlink(outfn) |
@@ -1,79 +1,79 b'' | |||
|
1 | 1 | #!/usr/bin/env python2 |
|
2 | 2 | |
|
3 | 3 | """ |
|
4 | 4 | Consistent formatting of rst section titles |
|
5 | 5 | """ |
|
6 | 6 | |
|
7 | 7 | import re |
|
8 | 8 | import subprocess |
|
9 | 9 | |
|
10 | 10 | spaces = [ |
|
11 | 11 | (0, 1), # we assume this is a over-and-underlined header |
|
12 | 12 | (2, 1), |
|
13 | 13 | (1, 1), |
|
14 | 14 | (1, 0), |
|
15 | 15 | (1, 0), |
|
16 | 16 | ] |
|
17 | 17 | |
|
18 | 18 | # http://sphinx-doc.org/rest.html : |
|
19 | 19 | # for the Python documentation, this convention is used which you may follow: |
|
20 | 20 | # # with overline, for parts |
|
21 | 21 | # * with overline, for chapters |
|
22 | 22 | # =, for sections |
|
23 | 23 | # -, for subsections |
|
24 | 24 | # ^, for subsubsections |
|
25 | 25 | # ", for paragraphs |
|
26 | 26 | pystyles = ['#', '*', '=', '-', '^', '"'] |
|
27 | 27 | |
|
28 | 28 | # match on a header line underlined with one of the valid characters |
|
29 | 29 | headermatch = re.compile(r'''\n*(.+)\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n+''', flags=re.MULTILINE) |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | def main(): |
|
33 | 33 | for fn in subprocess.check_output(['hg', 'loc', 'set:**.rst+kallithea/i18n/how_to']).splitlines(): |
|
34 | 34 | print 'processing %s:' % fn |
|
35 |
s = |
|
|
35 | s = open(fn).read() | |
|
36 | 36 | |
|
37 | 37 | # find levels and their styles |
|
38 | 38 | lastpos = 0 |
|
39 | 39 | styles = [] |
|
40 | 40 | for markup in headermatch.findall(s): |
|
41 | 41 | style = markup[1] |
|
42 | 42 | if style in styles: |
|
43 | 43 | stylepos = styles.index(style) |
|
44 | 44 | if stylepos > lastpos + 1: |
|
45 | 45 | print 'bad style %r with level %s - was at %s' % (style, stylepos, lastpos) |
|
46 | 46 | else: |
|
47 | 47 | stylepos = len(styles) |
|
48 | 48 | if stylepos > lastpos + 1: |
|
49 | 49 | print 'bad new style %r - expected %r' % (style, styles[lastpos + 1]) |
|
50 | 50 | else: |
|
51 | 51 | styles.append(style) |
|
52 | 52 | lastpos = stylepos |
|
53 | 53 | |
|
54 | 54 | # remove superfluous spacing (may however be restored by header spacing) |
|
55 | 55 | s = re.sub(r'''(\n\n)\n*''', r'\1', s, flags=re.MULTILINE) |
|
56 | 56 | |
|
57 | 57 | if styles: |
|
58 | 58 | newstyles = pystyles[pystyles.index(styles[0]):] |
|
59 | 59 | |
|
60 | 60 | def subf(m): |
|
61 | 61 | title, style = m.groups() |
|
62 | 62 | level = styles.index(style) |
|
63 | 63 | before, after = spaces[level] |
|
64 | 64 | newstyle = newstyles[level] |
|
65 | 65 | return '\n' * (before + 1) + title + '\n' + newstyle * len(title) + '\n' * (after + 1) |
|
66 | 66 | s = headermatch.sub(subf, s) |
|
67 | 67 | |
|
68 | 68 | # remove superfluous spacing when headers are adjacent |
|
69 | 69 | s = re.sub(r'''(\n.+\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n\n\n)\n*''', r'\1', s, flags=re.MULTILINE) |
|
70 | 70 | # fix trailing space and spacing before link sections |
|
71 | 71 | s = s.strip() + '\n' |
|
72 | 72 | s = re.sub(r'''\n+((?:\.\. _[^\n]*\n)+)$''', r'\n\n\n\1', s) |
|
73 | 73 | |
|
74 |
|
|
|
74 | open(fn, 'w').write(s) | |
|
75 | 75 | print subprocess.check_output(['hg', 'diff', fn]) |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | if __name__ == '__main__': |
|
79 | 79 | main() |
@@ -1,155 +1,155 b'' | |||
|
1 | 1 | #!/usr/bin/env python2 |
|
2 | 2 | """ |
|
3 | 3 | Based on kallithea/lib/paster_commands/template.ini.mako, generate |
|
4 | 4 | development.ini |
|
5 | 5 | kallithea/tests/test.ini |
|
6 | 6 | """ |
|
7 | 7 | |
|
8 | 8 | import re |
|
9 | 9 | |
|
10 | 10 | makofile = 'kallithea/lib/paster_commands/template.ini.mako' |
|
11 | 11 | |
|
12 | 12 | # the mako conditionals used in all other ini files and templates |
|
13 | 13 | selected_mako_conditionals = set([ |
|
14 | 14 | "database_engine == 'sqlite'", |
|
15 | 15 | "http_server == 'waitress'", |
|
16 | 16 | "error_aggregation_service == 'appenlight'", |
|
17 | 17 | "error_aggregation_service == 'sentry'", |
|
18 | 18 | ]) |
|
19 | 19 | |
|
20 | 20 | # the mako variables used in all other ini files and templates |
|
21 | 21 | mako_variable_values = { |
|
22 | 22 | 'host': '127.0.0.1', |
|
23 | 23 | 'port': '5000', |
|
24 | 24 | 'uuid()': '${app_instance_uuid}', |
|
25 | 25 | } |
|
26 | 26 | |
|
27 | 27 | # files to be generated from the mako template |
|
28 | 28 | ini_files = [ |
|
29 | 29 | ('kallithea/tests/test.ini', |
|
30 | 30 | ''' |
|
31 | 31 | Kallithea - config for tests: |
|
32 | 32 | sqlalchemy and kallithea_test.sqlite |
|
33 | 33 | custom logging |
|
34 | 34 | ''', |
|
35 | 35 | { |
|
36 | 36 | '[server:main]': { |
|
37 | 37 | 'port': '4999', |
|
38 | 38 | }, |
|
39 | 39 | '[app:main]': { |
|
40 | 40 | 'app_instance_uuid': 'test', |
|
41 | 41 | 'show_revision_number': 'true', |
|
42 | 42 | 'beaker.cache.sql_cache_short.expire': '1', |
|
43 | 43 | 'beaker.session.secret': '{74e0cd75-b339-478b-b129-07dd221def1f}', |
|
44 | 44 | }, |
|
45 | 45 | '[handler_console]': { |
|
46 | 46 | 'level': 'DEBUG', |
|
47 | 47 | 'formatter': 'color_formatter', |
|
48 | 48 | }, |
|
49 | 49 | # The 'handler_console_sql' block is very similar to the one in |
|
50 | 50 | # development.ini, but without the explicit 'level=DEBUG' setting: |
|
51 | 51 | # it causes duplicate sqlalchemy debug logs, one through |
|
52 | 52 | # handler_console_sql and another through another path. |
|
53 | 53 | '[handler_console_sql]': { |
|
54 | 54 | 'formatter': 'color_formatter_sql', |
|
55 | 55 | }, |
|
56 | 56 | }, |
|
57 | 57 | ), |
|
58 | 58 | ('development.ini', |
|
59 | 59 | ''' |
|
60 | 60 | Kallithea - Development config: |
|
61 | 61 | listening on *:5000 |
|
62 | 62 | sqlite and kallithea.db |
|
63 | 63 | initial_repo_scan = true |
|
64 | 64 | debug = true |
|
65 | 65 | verbose and colorful logging |
|
66 | 66 | ''', |
|
67 | 67 | { |
|
68 | 68 | '[server:main]': { |
|
69 | 69 | 'host': '0.0.0.0', |
|
70 | 70 | }, |
|
71 | 71 | '[app:main]': { |
|
72 | 72 | 'initial_repo_scan': 'true', |
|
73 | 73 | 'debug': 'true', |
|
74 | 74 | 'app_instance_uuid': 'development-not-secret', |
|
75 | 75 | 'beaker.session.secret': 'development-not-secret', |
|
76 | 76 | }, |
|
77 | 77 | '[handler_console]': { |
|
78 | 78 | 'level': 'DEBUG', |
|
79 | 79 | 'formatter': 'color_formatter', |
|
80 | 80 | }, |
|
81 | 81 | '[handler_console_sql]': { |
|
82 | 82 | 'level': 'DEBUG', |
|
83 | 83 | 'formatter': 'color_formatter_sql', |
|
84 | 84 | }, |
|
85 | 85 | }, |
|
86 | 86 | ), |
|
87 | 87 | ] |
|
88 | 88 | |
|
89 | 89 | |
|
90 | 90 | def main(): |
|
91 | 91 | # make sure all mako lines starting with '#' (the '##' comments) are marked up as <text> |
|
92 | 92 | print 'reading:', makofile |
|
93 |
mako_org = |
|
|
93 | mako_org = open(makofile).read() | |
|
94 | 94 | mako_no_text_markup = re.sub(r'</?%text>', '', mako_org) |
|
95 | 95 | mako_marked_up = re.sub(r'\n(##.*)', r'\n<%text>\1</%text>', mako_no_text_markup, flags=re.MULTILINE) |
|
96 | 96 | if mako_marked_up != mako_org: |
|
97 | 97 | print 'writing:', makofile |
|
98 |
|
|
|
98 | open(makofile, 'w').write(mako_marked_up) | |
|
99 | 99 | |
|
100 | 100 | # select the right mako conditionals for the other less sophisticated formats |
|
101 | 101 | def sub_conditionals(m): |
|
102 | 102 | """given a %if...%endif match, replace with just the selected |
|
103 | 103 | conditional sections enabled and the rest as comments |
|
104 | 104 | """ |
|
105 | 105 | conditional_lines = m.group(1) |
|
106 | 106 | def sub_conditional(m): |
|
107 | 107 | """given a conditional and the corresponding lines, return them raw |
|
108 | 108 | or commented out, based on whether conditional is selected |
|
109 | 109 | """ |
|
110 | 110 | criteria, lines = m.groups() |
|
111 | 111 | if criteria not in selected_mako_conditionals: |
|
112 | 112 | lines = '\n'.join((l if not l or l.startswith('#') else '#' + l) for l in lines.split('\n')) |
|
113 | 113 | return lines |
|
114 | 114 | conditional_lines = re.sub(r'^%(?:el)?if (.*):\n((?:^[^%\n].*\n|\n)*)', |
|
115 | 115 | sub_conditional, conditional_lines, flags=re.MULTILINE) |
|
116 | 116 | return conditional_lines |
|
117 | 117 | mako_no_conditionals = re.sub(r'^(%if .*\n(?:[^%\n].*\n|%elif .*\n|\n)*)%endif\n', |
|
118 | 118 | sub_conditionals, mako_no_text_markup, flags=re.MULTILINE) |
|
119 | 119 | |
|
120 | 120 | # expand mako variables |
|
121 | 121 | def pyrepl(m): |
|
122 | 122 | return mako_variable_values.get(m.group(1), m.group(0)) |
|
123 | 123 | mako_no_variables = re.sub(r'\${([^}]*)}', pyrepl, mako_no_conditionals) |
|
124 | 124 | |
|
125 | 125 | # remove utf-8 coding header |
|
126 | 126 | base_ini = re.sub(r'^## -\*- coding: utf-8 -\*-\n', '', mako_no_variables) |
|
127 | 127 | |
|
128 | 128 | # create ini files |
|
129 | 129 | for fn, desc, settings in ini_files: |
|
130 | 130 | print 'updating:', fn |
|
131 | 131 | ini_lines = re.sub( |
|
132 | 132 | '# Kallithea - config file generated with kallithea-config *#\n', |
|
133 | 133 | ''.join('# %-77s#\n' % l.strip() for l in desc.strip().split('\n')), |
|
134 | 134 | base_ini) |
|
135 | 135 | def process_section(m): |
|
136 | 136 | """process a ini section, replacing values as necessary""" |
|
137 | 137 | sectionname, lines = m.groups() |
|
138 | 138 | if sectionname in settings: |
|
139 | 139 | section_settings = settings[sectionname] |
|
140 | 140 | def process_line(m): |
|
141 | 141 | """process a section line and update value if necessary""" |
|
142 | 142 | setting, value = m.groups() |
|
143 | 143 | line = m.group(0) |
|
144 | 144 | if setting in section_settings: |
|
145 | 145 | line = '%s = %s' % (setting, section_settings[setting]) |
|
146 | 146 | if '$' not in value: |
|
147 | 147 | line = '#%s = %s\n%s' % (setting, value, line) |
|
148 | 148 | return line.rstrip() |
|
149 | 149 | lines = re.sub(r'^([^#\n].*) = ?(.*)', process_line, lines, flags=re.MULTILINE) |
|
150 | 150 | return sectionname + '\n' + lines |
|
151 | 151 | ini_lines = re.sub(r'^(\[.*\])\n((?:(?:[^[\n].*)?\n)*)', process_section, ini_lines, flags=re.MULTILINE) |
|
152 |
|
|
|
152 | open(fn, 'w').write(ini_lines) | |
|
153 | 153 | |
|
154 | 154 | if __name__ == '__main__': |
|
155 | 155 | main() |
@@ -1,39 +1,39 b'' | |||
|
1 | 1 | #!/usr/bin/env python2 |
|
2 | 2 | |
|
3 | 3 | import re |
|
4 | 4 | import sys |
|
5 | 5 | |
|
6 | 6 | if len(sys.argv) < 2: |
|
7 | 7 | print 'Cleanup of superfluous % formatting of log statements.' |
|
8 | 8 | print 'Usage:' |
|
9 | 9 | print ''' hg revert `hg loc '*.py'|grep -v logformat.py` && scripts/logformat.py `hg loc '*.py'` && hg diff''' |
|
10 | 10 | raise SystemExit(1) |
|
11 | 11 | |
|
12 | 12 | |
|
13 | 13 | logre = r''' |
|
14 | 14 | (log\.(?:error|info|warning|debug) |
|
15 | 15 | [(][ \n]* |
|
16 | 16 | ) |
|
17 | 17 | %s |
|
18 | 18 | ( |
|
19 | 19 | [ \n]*[)] |
|
20 | 20 | ) |
|
21 | 21 | ''' |
|
22 | 22 | res = [ |
|
23 | 23 | # handle % () - keeping spaces around the old % |
|
24 | 24 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) \( ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) \) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
25 | 25 | # handle % without () - keeping spaces around the old % |
|
26 | 26 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
27 | 27 | # remove extra space if it is on next line |
|
28 | 28 | (re.compile(logre % r'''("[^"]*"|'[^']*') , (\n [ ]) ([ ][\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
29 | 29 | # remove extra space if it is on same line |
|
30 | 30 | (re.compile(logre % r'''("[^"]*"|'[^']*') , [ ]+ () ( [\n ]+) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
31 | 31 | # remove trailing , and space |
|
32 | 32 | (re.compile(logre % r'''("[^"]*"|'[^']*') , () ( [\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* [^(), \n] ) [ ,]*''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
33 | 33 | ] |
|
34 | 34 | |
|
35 | 35 | for f in sys.argv[1:]: |
|
36 |
s = |
|
|
36 | s = open(f).read() | |
|
37 | 37 | for r, t in res: |
|
38 | 38 | s = r.sub(t, s) |
|
39 |
|
|
|
39 | open(f, 'w').write(s) |
@@ -1,253 +1,253 b'' | |||
|
1 | 1 | #!/usr/bin/env python2 |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | |
|
4 | 4 | """ |
|
5 | 5 | Kallithea script for maintaining contributor lists from version control |
|
6 | 6 | history. |
|
7 | 7 | |
|
8 | 8 | This script and the data in it is a best effort attempt at reverse engineering |
|
9 | 9 | previous attributions and correlate that with version control history while |
|
10 | 10 | preserving all existing copyright statements and attribution. This script is |
|
11 | 11 | processing and summarizing information found elsewhere - it is not by itself |
|
12 | 12 | making any claims. Comments in the script are an attempt at reverse engineering |
|
13 | 13 | possible explanations - they are not showing any intent or confirming it is |
|
14 | 14 | correct. |
|
15 | 15 | |
|
16 | 16 | Three files are generated / modified by this script: |
|
17 | 17 | |
|
18 | 18 | kallithea/templates/about.html claims to show copyright holders, and the GPL |
|
19 | 19 | license requires such existing "legal notices" to be preserved. We also try to |
|
20 | 20 | keep it updated with copyright holders, but do not claim it is a correct list. |
|
21 | 21 | |
|
22 | 22 | CONTRIBUTORS has the purpose of giving credit where credit is due and list all |
|
23 | 23 | the contributor names in the source. |
|
24 | 24 | |
|
25 | 25 | kallithea/templates/base/base.html contains the copyright years in the page |
|
26 | 26 | footer. |
|
27 | 27 | |
|
28 | 28 | Both make a best effort of listing all copyright holders, but revision control |
|
29 | 29 | history might be a better and more definitive source. |
|
30 | 30 | |
|
31 | 31 | Contributors are sorted "fairly" by copyright year and amount of |
|
32 | 32 | contribution. |
|
33 | 33 | |
|
34 | 34 | New contributors are listed, without considering if the contribution contains |
|
35 | 35 | copyrightable work. |
|
36 | 36 | |
|
37 | 37 | When the copyright might belong to a different legal entity than the |
|
38 | 38 | contributor, the legal entity is given credit too. |
|
39 | 39 | """ |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | # Some committers are so wrong that it doesn't point at any contributor: |
|
43 | 43 | total_ignore = set() |
|
44 | 44 | total_ignore.add('*** failed to import extension hggit: No module named hggit') |
|
45 | 45 | total_ignore.add('<>') |
|
46 | 46 | |
|
47 | 47 | # Normalize some committer names where people have contributed under different |
|
48 | 48 | # names or email addresses: |
|
49 | 49 | name_fixes = {} |
|
50 | 50 | name_fixes['Andrew Shadura'] = "Andrew Shadura <andrew@shadura.me>" |
|
51 | 51 | name_fixes['aparkar'] = "Aparkar <aparkar@icloud.com>" |
|
52 | 52 | name_fixes['Aras Pranckevicius'] = "Aras PranckeviΔius <aras@unity3d.com>" |
|
53 | 53 | name_fixes['Augosto Hermann'] = "Augusto Herrmann <augusto.herrmann@planejamento.gov.br>" |
|
54 | 54 | name_fixes['"Bradley M. Kuhn" <bkuhn@ebb.org>'] = "Bradley M. Kuhn <bkuhn@sfconservancy.org>" |
|
55 | 55 | name_fixes['dmitri.kuznetsov'] = "Dmitri Kuznetsov" |
|
56 | 56 | name_fixes['Dmitri Kuznetsov'] = "Dmitri Kuznetsov" |
|
57 | 57 | name_fixes['domruf'] = "Dominik Ruf <dominikruf@gmail.com>" |
|
58 | 58 | name_fixes['Ingo von borstel'] = "Ingo von Borstel <kallithea@planetmaker.de>" |
|
59 | 59 | name_fixes['Jan Heylen'] = "Jan Heylen <heyleke@gmail.com>" |
|
60 | 60 | name_fixes['Jason F. Harris'] = "Jason Harris <jason@jasonfharris.com>" |
|
61 | 61 | name_fixes['Jelmer Vernooij'] = "Jelmer VernooΔ³ <jelmer@samba.org>" |
|
62 | 62 | name_fixes['jfh <jason@jasonfharris.com>'] = "Jason Harris <jason@jasonfharris.com>" |
|
63 | 63 | name_fixes['Leonardo Carneiro<leonardo@unity3d.com>'] = "Leonardo Carneiro <leonardo@unity3d.com>" |
|
64 | 64 | name_fixes['leonardo'] = "Leonardo Carneiro <leonardo@unity3d.com>" |
|
65 | 65 | name_fixes['Leonardo <leo@unity3d.com>'] = "Leonardo Carneiro <leonardo@unity3d.com>" |
|
66 | 66 | name_fixes['Les Peabody'] = "Les Peabody <lpeabody@gmail.com>" |
|
67 | 67 | name_fixes['"Lorenzo M. Catucci" <lorenzo@sancho.ccd.uniroma2.it>'] = "Lorenzo M. Catucci <lorenzo@sancho.ccd.uniroma2.it>" |
|
68 | 68 | name_fixes['Lukasz Balcerzak'] = "Εukasz Balcerzak <lukaszbalcerzak@gmail.com>" |
|
69 | 69 | name_fixes['mao <mao@lins.fju.edu.tw>'] = "Ching-Chen Mao <mao@lins.fju.edu.tw>" |
|
70 | 70 | name_fixes['marcink'] = "Marcin KuΕΊmiΕski <marcin@python-works.com>" |
|
71 | 71 | name_fixes['Marcin Kuzminski'] = "Marcin KuΕΊmiΕski <marcin@python-works.com>" |
|
72 | 72 | name_fixes['nansenat16@null.tw'] = "nansenat16 <nansenat16@null.tw>" |
|
73 | 73 | name_fixes['Peter Vitt'] = "Peter Vitt <petervitt@web.de>" |
|
74 | 74 | name_fixes['philip.j@hostdime.com'] = "Philip Jameson <philip.j@hostdime.com>" |
|
75 | 75 | name_fixes['SΓΈren LΓΈvborg'] = "SΓΈren LΓΈvborg <sorenl@unity3d.com>" |
|
76 | 76 | name_fixes['Thomas De Schampheleire'] = "Thomas De Schampheleire <thomas.de.schampheleire@gmail.com>" |
|
77 | 77 | name_fixes['Weblate'] = "<>" |
|
78 | 78 | name_fixes['xpol'] = "xpol <xpolife@gmail.com>" |
|
79 | 79 | |
|
80 | 80 | |
|
81 | 81 | # Some committer email address domains that indicate that another entity might |
|
82 | 82 | # hold some copyright too: |
|
83 | 83 | domain_extra = {} |
|
84 | 84 | domain_extra['unity3d.com'] = "Unity Technologies" |
|
85 | 85 | domain_extra['rhodecode.com'] = "RhodeCode GmbH" |
|
86 | 86 | |
|
87 | 87 | # Repository history show some old contributions that traditionally hasn't been |
|
88 | 88 | # listed in about.html - preserve that: |
|
89 | 89 | no_about = set(total_ignore) |
|
90 | 90 | # The following contributors were traditionally not listed in about.html and it |
|
91 | 91 | # seems unclear if the copyright is personal or belongs to a company. |
|
92 | 92 | no_about.add(('Thayne Harbaugh <thayne@fusionio.com>', '2011')) |
|
93 | 93 | no_about.add(('Dies Koper <diesk@fast.au.fujitsu.com>', '2012')) |
|
94 | 94 | no_about.add(('Erwin Kroon <e.kroon@smartmetersolutions.nl>', '2012')) |
|
95 | 95 | no_about.add(('Vincent Caron <vcaron@bearstech.com>', '2012')) |
|
96 | 96 | # These contributors' contributions might be too small to be copyrightable: |
|
97 | 97 | no_about.add(('philip.j@hostdime.com', '2012')) |
|
98 | 98 | no_about.add(('Stefan Engel <mail@engel-stefan.de>', '2012')) |
|
99 | 99 | no_about.add(('Ton Plomp <tcplomp@gmail.com>', '2013')) |
|
100 | 100 | # Was reworked and contributed later and shadowed by other contributions: |
|
101 | 101 | no_about.add(('Sean Farley <sean.michael.farley@gmail.com>', '2013')) |
|
102 | 102 | |
|
103 | 103 | # Preserve contributors listed in about.html but not appearing in repository |
|
104 | 104 | # history: |
|
105 | 105 | other_about = [ |
|
106 | 106 | ("2011", "Aparkar <aparkar@icloud.com>"), |
|
107 | 107 | ("2010", "RhodeCode GmbH"), |
|
108 | 108 | ("2011", "RhodeCode GmbH"), |
|
109 | 109 | ("2012", "RhodeCode GmbH"), |
|
110 | 110 | ("2013", "RhodeCode GmbH"), |
|
111 | 111 | ] |
|
112 | 112 | |
|
113 | 113 | # Preserve contributors listed in CONTRIBUTORS but not appearing in repository |
|
114 | 114 | # history: |
|
115 | 115 | other_contributors = [ |
|
116 | 116 | ("", "Andrew Kesterson <andrew@aklabs.net>"), |
|
117 | 117 | ("", "cejones"), |
|
118 | 118 | ("", "David A. SjΓΈen <david.sjoen@westcon.no>"), |
|
119 | 119 | ("", "James Rhodes <jrhodes@redpointsoftware.com.au>"), |
|
120 | 120 | ("", "Jonas Oberschweiber <jonas.oberschweiber@d-velop.de>"), |
|
121 | 121 | ("", "larikale"), |
|
122 | 122 | ("", "RhodeCode GmbH"), |
|
123 | 123 | ("", "Sebastian Kreutzberger <sebastian@rhodecode.com>"), |
|
124 | 124 | ("", "Steve Romanow <slestak989@gmail.com>"), |
|
125 | 125 | ("", "SteveCohen"), |
|
126 | 126 | ("", "Thomas <thomas@rhodecode.com>"), |
|
127 | 127 | ("", "Thomas Waldmann <tw-public@gmx.de>"), |
|
128 | 128 | ] |
|
129 | 129 | |
|
130 | 130 | |
|
131 | 131 | import os |
|
132 | 132 | import re |
|
133 | 133 | from collections import defaultdict |
|
134 | 134 | |
|
135 | 135 | |
|
136 | 136 | def sortkey(x): |
|
137 | 137 | """Return key for sorting contributors "fairly": |
|
138 | 138 | * latest contribution |
|
139 | 139 | * first contribution |
|
140 | 140 | * number of contribution years |
|
141 | 141 | * name (with some unicode normalization) |
|
142 | 142 | The entries must be 2-tuples of a list of string years and the unicode name""" |
|
143 | 143 | return (x[0] and -int(x[0][-1]), |
|
144 | 144 | x[0] and int(x[0][0]), |
|
145 | 145 | -len(x[0]), |
|
146 | 146 | x[1].decode('utf8').lower().replace(u'\xe9', u'e').replace(u'\u0142', u'l') |
|
147 | 147 | ) |
|
148 | 148 | |
|
149 | 149 | |
|
150 | 150 | def nice_years(l, dash='-', join=' '): |
|
151 | 151 | """Convert a list of years into brief range like '1900-1901, 1921'.""" |
|
152 | 152 | if not l: |
|
153 | 153 | return '' |
|
154 | 154 | start = end = int(l[0]) |
|
155 | 155 | ranges = [] |
|
156 | 156 | for year in l[1:] + [0]: |
|
157 | 157 | year = int(year) |
|
158 | 158 | if year == end + 1: |
|
159 | 159 | end = year |
|
160 | 160 | continue |
|
161 | 161 | if start == end: |
|
162 | 162 | ranges.append('%s' % start) |
|
163 | 163 | else: |
|
164 | 164 | ranges.append('%s%s%s' % (start, dash, end)) |
|
165 | 165 | start = end = year |
|
166 | 166 | assert start == 0 and end == 0, (start, end) |
|
167 | 167 | return join.join(ranges) |
|
168 | 168 | |
|
169 | 169 | |
|
170 | 170 | def insert_entries( |
|
171 | 171 | filename, |
|
172 | 172 | all_entries, |
|
173 | 173 | no_entries, |
|
174 | 174 | domain_extra, |
|
175 | 175 | split_re, |
|
176 | 176 | normalize_name, |
|
177 | 177 | format_f): |
|
178 | 178 | """Update file with contributor information. |
|
179 | 179 | all_entries: list of tuples with year and name |
|
180 | 180 | no_entries: set of names or name and year tuples to ignore |
|
181 | 181 | domain_extra: map domain name to extra credit name |
|
182 | 182 | split_re: regexp matching the part of file to rewrite |
|
183 | 183 | normalize_name: function to normalize names for grouping and display |
|
184 | 184 | format_f: function formatting year list and name to a string |
|
185 | 185 | """ |
|
186 | 186 | name_years = defaultdict(set) |
|
187 | 187 | |
|
188 | 188 | for year, name in all_entries: |
|
189 | 189 | if name in no_entries or (name, year) in no_entries: |
|
190 | 190 | continue |
|
191 | 191 | domain = name.split('@', 1)[-1].rstrip('>') |
|
192 | 192 | if domain in domain_extra: |
|
193 | 193 | name_years[domain_extra[domain]].add(year) |
|
194 | 194 | name_years[normalize_name(name)].add(year) |
|
195 | 195 | |
|
196 | 196 | l = [(list(sorted(year for year in years if year)), name) |
|
197 | 197 | for name, years in name_years.items()] |
|
198 | 198 | l.sort(key=sortkey) |
|
199 | 199 | |
|
200 |
with |
|
|
200 | with open(filename) as f: | |
|
201 | 201 | pre, post = re.split(split_re, f.read()) |
|
202 | 202 | |
|
203 |
with |
|
|
203 | with open(filename, 'w') as f: | |
|
204 | 204 | f.write(pre + |
|
205 | 205 | ''.join(format_f(years, name) for years, name in l) + |
|
206 | 206 | post) |
|
207 | 207 | |
|
208 | 208 | |
|
209 | 209 | def main(): |
|
210 | 210 | repo_entries = [ |
|
211 | 211 | (year, name_fixes.get(name) or name_fixes.get(name.rsplit('<', 1)[0].strip()) or name) |
|
212 | 212 | for year, name in |
|
213 | 213 | (line.strip().split(' ', 1) |
|
214 | 214 | for line in os.popen("""hg log -r '::.' -T '{date(date,"%Y")} {author}\n'""").readlines()) |
|
215 | 215 | ] |
|
216 | 216 | |
|
217 | 217 | insert_entries( |
|
218 | 218 | filename='kallithea/templates/about.html', |
|
219 | 219 | all_entries=repo_entries + other_about, |
|
220 | 220 | no_entries=no_about, |
|
221 | 221 | domain_extra=domain_extra, |
|
222 | 222 | split_re=r'(?: <li>Copyright © [^\n]*</li>\n)*', |
|
223 | 223 | normalize_name=lambda name: name.split('<', 1)[0].strip(), |
|
224 | 224 | format_f=lambda years, name: ' <li>Copyright © %s, %s</li>\n' % (nice_years(years, '–', ', '), name), |
|
225 | 225 | ) |
|
226 | 226 | |
|
227 | 227 | insert_entries( |
|
228 | 228 | filename='CONTRIBUTORS', |
|
229 | 229 | all_entries=repo_entries + other_contributors, |
|
230 | 230 | no_entries=total_ignore, |
|
231 | 231 | domain_extra=domain_extra, |
|
232 | 232 | split_re=r'(?: [^\n]*\n)*', |
|
233 | 233 | normalize_name=lambda name: name, |
|
234 | 234 | format_f=lambda years, name: (' %s%s%s\n' % (name, ' ' if years else '', nice_years(years))), |
|
235 | 235 | ) |
|
236 | 236 | |
|
237 | 237 | insert_entries( |
|
238 | 238 | filename='kallithea/templates/base/base.html', |
|
239 | 239 | all_entries=repo_entries, |
|
240 | 240 | no_entries=total_ignore, |
|
241 | 241 | domain_extra={}, |
|
242 | 242 | split_re=r'(?<=©) .* (?=by various authors)', |
|
243 | 243 | normalize_name=lambda name: '', |
|
244 | 244 | format_f=lambda years, name: ' ' + nice_years(years, '–', ', ') + ' ', |
|
245 | 245 | ) |
|
246 | 246 | |
|
247 | 247 | |
|
248 | 248 | if __name__ == '__main__': |
|
249 | 249 | main() |
|
250 | 250 | |
|
251 | 251 | |
|
252 | 252 | # To list new contributors since last tagging: |
|
253 | 253 | # { hg log -r '::tagged()' -T ' {author}\n {author}\n'; hg log -r '::.' -T ' {author}\n' | sort | uniq; } | sort | uniq -u |
General Comments 0
You need to be logged in to leave comments.
Login now