Show More
@@ -1,97 +1,97 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.paster_commands.install_iis |
|
15 | kallithea.lib.paster_commands.install_iis | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | IIS installation tools for Kallithea |
|
18 | IIS installation tools for Kallithea | |
19 | """ |
|
19 | """ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import os |
|
22 | import os | |
23 | import sys |
|
23 | import sys | |
24 |
|
24 | |||
25 | from kallithea.lib.paster_commands.common import BasePasterCommand |
|
25 | from kallithea.lib.paster_commands.common import BasePasterCommand | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | dispath_py_template = '''\ |
|
28 | dispath_py_template = '''\ | |
29 | # Created by Kallithea 'gearbox install-iis' |
|
29 | # Created by Kallithea 'gearbox install-iis' | |
30 | import sys |
|
30 | import sys | |
31 |
|
31 | |||
32 | if hasattr(sys, "isapidllhandle"): |
|
32 | if hasattr(sys, "isapidllhandle"): | |
33 | import win32traceutil |
|
33 | import win32traceutil | |
34 |
|
34 | |||
35 | import isapi_wsgi |
|
35 | import isapi_wsgi | |
36 | import os |
|
36 | import os | |
37 |
|
37 | |||
38 | def __ExtensionFactory__(): |
|
38 | def __ExtensionFactory__(): | |
39 | from paste.deploy import loadapp |
|
39 | from paste.deploy import loadapp | |
40 | from paste.script.util.logging_config import fileConfig |
|
40 | from paste.script.util.logging_config import fileConfig | |
41 | fileConfig('%(inifile)s') |
|
41 | fileConfig('%(inifile)s') | |
42 | application = loadapp('config:%(inifile)s') |
|
42 | application = loadapp('config:%(inifile)s') | |
43 |
|
43 | |||
44 | def app(environ, start_response): |
|
44 | def app(environ, start_response): | |
45 | user = environ.get('REMOTE_USER', None) |
|
45 | user = environ.get('REMOTE_USER', None) | |
46 | if user is not None: |
|
46 | if user is not None: | |
47 | os.environ['REMOTE_USER'] = user |
|
47 | os.environ['REMOTE_USER'] = user | |
48 | return application(environ, start_response) |
|
48 | return application(environ, start_response) | |
49 |
|
49 | |||
50 | return isapi_wsgi.ISAPIThreadPoolHandler(app) |
|
50 | return isapi_wsgi.ISAPIThreadPoolHandler(app) | |
51 |
|
51 | |||
52 | if __name__=='__main__': |
|
52 | if __name__=='__main__': | |
53 | from isapi.install import * |
|
53 | from isapi.install import * | |
54 | params = ISAPIParameters() |
|
54 | params = ISAPIParameters() | |
55 | sm = [ScriptMapParams(Extension="*", Flags=0)] |
|
55 | sm = [ScriptMapParams(Extension="*", Flags=0)] | |
56 | vd = VirtualDirParameters(Name="%(virtualdir)s", |
|
56 | vd = VirtualDirParameters(Name="%(virtualdir)s", | |
57 | Description = "Kallithea", |
|
57 | Description = "Kallithea", | |
58 | ScriptMaps = sm, |
|
58 | ScriptMaps = sm, | |
59 | ScriptMapUpdate = "replace") |
|
59 | ScriptMapUpdate = "replace") | |
60 | params.VirtualDirs = [vd] |
|
60 | params.VirtualDirs = [vd] | |
61 | HandleCommandLine(params) |
|
61 | HandleCommandLine(params) | |
62 | ''' |
|
62 | ''' | |
63 |
|
63 | |||
64 | class Command(BasePasterCommand): |
|
64 | class Command(BasePasterCommand): | |
65 | '''Kallithea: Install into IIS using isapi-wsgi''' |
|
65 | '''Kallithea: Install into IIS using isapi-wsgi''' | |
66 |
|
66 | |||
67 | requires_db_session = False |
|
67 | requires_db_session = False | |
68 |
|
68 | |||
69 | def take_action(self, args): |
|
69 | def take_action(self, args): | |
70 | config_file = os.path.abspath(args.config_file) |
|
70 | config_file = os.path.abspath(args.config_file) | |
71 | try: |
|
71 | try: | |
72 | import isapi_wsgi |
|
72 | import isapi_wsgi | |
73 | except ImportError: |
|
73 | except ImportError: | |
74 | self.error('missing requirement: isapi-wsgi not installed') |
|
74 | self.error('missing requirement: isapi-wsgi not installed') | |
75 |
|
75 | |||
76 | dispatchfile = os.path.join(os.getcwd(), 'dispatch.py') |
|
76 | dispatchfile = os.path.join(os.getcwd(), 'dispatch.py') | |
77 | print 'Writing %s' % dispatchfile |
|
77 | print 'Writing %s' % dispatchfile | |
78 |
with |
|
78 | with open(dispatchfile, 'w') as f: | |
79 | f.write(dispath_py_template % { |
|
79 | f.write(dispath_py_template % { | |
80 | 'inifile': config_file.replace('\\', '\\\\'), |
|
80 | 'inifile': config_file.replace('\\', '\\\\'), | |
81 | 'virtualdir': args.virtualdir, |
|
81 | 'virtualdir': args.virtualdir, | |
82 | }) |
|
82 | }) | |
83 |
|
83 | |||
84 | print ('Run \'python "%s" install\' with administrative privileges ' |
|
84 | print ('Run \'python "%s" install\' with administrative privileges ' | |
85 | 'to generate the _dispatch.dll file and install it into the ' |
|
85 | 'to generate the _dispatch.dll file and install it into the ' | |
86 | 'default web site') % (dispatchfile,) |
|
86 | 'default web site') % (dispatchfile,) | |
87 |
|
87 | |||
88 | def get_parser(self, prog_name): |
|
88 | def get_parser(self, prog_name): | |
89 | parser = super(Command, self).get_parser(prog_name) |
|
89 | parser = super(Command, self).get_parser(prog_name) | |
90 |
|
90 | |||
91 | parser.add_argument('--virtualdir', |
|
91 | parser.add_argument('--virtualdir', | |
92 | action='store', |
|
92 | action='store', | |
93 | dest='virtualdir', |
|
93 | dest='virtualdir', | |
94 | default='/', |
|
94 | default='/', | |
95 | help='The virtual folder to install into on IIS') |
|
95 | help='The virtual folder to install into on IIS') | |
96 |
|
96 | |||
97 | return parser |
|
97 | return parser |
@@ -1,291 +1,291 b'' | |||||
1 | import os |
|
1 | import os | |
2 | import re |
|
2 | import re | |
3 |
|
3 | |||
4 | import mock |
|
4 | import mock | |
5 | import routes.util |
|
5 | import routes.util | |
6 |
|
6 | |||
7 | from kallithea.tests.base import * |
|
7 | from kallithea.tests.base import * | |
8 | from kallithea.lib import helpers as h |
|
8 | from kallithea.lib import helpers as h | |
9 | from kallithea.model.db import User, Notification, UserNotification |
|
9 | from kallithea.model.db import User, Notification, UserNotification | |
10 | from kallithea.model.user import UserModel |
|
10 | from kallithea.model.user import UserModel | |
11 | from kallithea.model.meta import Session |
|
11 | from kallithea.model.meta import Session | |
12 | from kallithea.model.notification import NotificationModel, EmailNotificationModel |
|
12 | from kallithea.model.notification import NotificationModel, EmailNotificationModel | |
13 |
|
13 | |||
14 | import kallithea.lib.celerylib |
|
14 | import kallithea.lib.celerylib | |
15 | import kallithea.lib.celerylib.tasks |
|
15 | import kallithea.lib.celerylib.tasks | |
16 |
|
16 | |||
17 | from tg.util.webtest import test_context |
|
17 | from tg.util.webtest import test_context | |
18 |
|
18 | |||
19 | class TestNotifications(TestController): |
|
19 | class TestNotifications(TestController): | |
20 |
|
20 | |||
21 | def setup_method(self, method): |
|
21 | def setup_method(self, method): | |
22 | Session.remove() |
|
22 | Session.remove() | |
23 | u1 = UserModel().create_or_update(username=u'u1', |
|
23 | u1 = UserModel().create_or_update(username=u'u1', | |
24 | password=u'qweqwe', |
|
24 | password=u'qweqwe', | |
25 | email=u'u1@example.com', |
|
25 | email=u'u1@example.com', | |
26 | firstname=u'u1', lastname=u'u1') |
|
26 | firstname=u'u1', lastname=u'u1') | |
27 | Session().commit() |
|
27 | Session().commit() | |
28 | self.u1 = u1.user_id |
|
28 | self.u1 = u1.user_id | |
29 |
|
29 | |||
30 | u2 = UserModel().create_or_update(username=u'u2', |
|
30 | u2 = UserModel().create_or_update(username=u'u2', | |
31 | password=u'qweqwe', |
|
31 | password=u'qweqwe', | |
32 | email=u'u2@example.com', |
|
32 | email=u'u2@example.com', | |
33 | firstname=u'u2', lastname=u'u3') |
|
33 | firstname=u'u2', lastname=u'u3') | |
34 | Session().commit() |
|
34 | Session().commit() | |
35 | self.u2 = u2.user_id |
|
35 | self.u2 = u2.user_id | |
36 |
|
36 | |||
37 | u3 = UserModel().create_or_update(username=u'u3', |
|
37 | u3 = UserModel().create_or_update(username=u'u3', | |
38 | password=u'qweqwe', |
|
38 | password=u'qweqwe', | |
39 | email=u'u3@example.com', |
|
39 | email=u'u3@example.com', | |
40 | firstname=u'u3', lastname=u'u3') |
|
40 | firstname=u'u3', lastname=u'u3') | |
41 | Session().commit() |
|
41 | Session().commit() | |
42 | self.u3 = u3.user_id |
|
42 | self.u3 = u3.user_id | |
43 |
|
43 | |||
44 | self.remove_all_notifications() |
|
44 | self.remove_all_notifications() | |
45 | assert [] == Notification.query().all() |
|
45 | assert [] == Notification.query().all() | |
46 | assert [] == UserNotification.query().all() |
|
46 | assert [] == UserNotification.query().all() | |
47 |
|
47 | |||
48 | def test_create_notification(self): |
|
48 | def test_create_notification(self): | |
49 | with test_context(self.app): |
|
49 | with test_context(self.app): | |
50 | usrs = [self.u1, self.u2] |
|
50 | usrs = [self.u1, self.u2] | |
51 | def send_email(recipients, subject, body='', html_body='', headers=None, author=None): |
|
51 | def send_email(recipients, subject, body='', html_body='', headers=None, author=None): | |
52 | assert recipients == ['u2@example.com'] |
|
52 | assert recipients == ['u2@example.com'] | |
53 | assert subject == 'Test Message' |
|
53 | assert subject == 'Test Message' | |
54 | assert body == u"hi there" |
|
54 | assert body == u"hi there" | |
55 | assert '>hi there<' in html_body |
|
55 | assert '>hi there<' in html_body | |
56 | assert author.username == 'u1' |
|
56 | assert author.username == 'u1' | |
57 | with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email): |
|
57 | with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email): | |
58 | notification = NotificationModel().create(created_by=self.u1, |
|
58 | notification = NotificationModel().create(created_by=self.u1, | |
59 | subject=u'subj', body=u'hi there', |
|
59 | subject=u'subj', body=u'hi there', | |
60 | recipients=usrs) |
|
60 | recipients=usrs) | |
61 | Session().commit() |
|
61 | Session().commit() | |
62 | u1 = User.get(self.u1) |
|
62 | u1 = User.get(self.u1) | |
63 | u2 = User.get(self.u2) |
|
63 | u2 = User.get(self.u2) | |
64 | u3 = User.get(self.u3) |
|
64 | u3 = User.get(self.u3) | |
65 | notifications = Notification.query().all() |
|
65 | notifications = Notification.query().all() | |
66 | assert len(notifications) == 1 |
|
66 | assert len(notifications) == 1 | |
67 |
|
67 | |||
68 | assert notifications[0].recipients == [u1, u2] |
|
68 | assert notifications[0].recipients == [u1, u2] | |
69 | assert notification.notification_id == notifications[0].notification_id |
|
69 | assert notification.notification_id == notifications[0].notification_id | |
70 |
|
70 | |||
71 | unotification = UserNotification.query() \ |
|
71 | unotification = UserNotification.query() \ | |
72 | .filter(UserNotification.notification == notification).all() |
|
72 | .filter(UserNotification.notification == notification).all() | |
73 |
|
73 | |||
74 | assert len(unotification) == len(usrs) |
|
74 | assert len(unotification) == len(usrs) | |
75 | assert set([x.user_id for x in unotification]) == set(usrs) |
|
75 | assert set([x.user_id for x in unotification]) == set(usrs) | |
76 |
|
76 | |||
77 | def test_user_notifications(self): |
|
77 | def test_user_notifications(self): | |
78 | with test_context(self.app): |
|
78 | with test_context(self.app): | |
79 | notification1 = NotificationModel().create(created_by=self.u1, |
|
79 | notification1 = NotificationModel().create(created_by=self.u1, | |
80 | subject=u'subj', body=u'hi there1', |
|
80 | subject=u'subj', body=u'hi there1', | |
81 | recipients=[self.u3]) |
|
81 | recipients=[self.u3]) | |
82 | Session().commit() |
|
82 | Session().commit() | |
83 | notification2 = NotificationModel().create(created_by=self.u1, |
|
83 | notification2 = NotificationModel().create(created_by=self.u1, | |
84 | subject=u'subj', body=u'hi there2', |
|
84 | subject=u'subj', body=u'hi there2', | |
85 | recipients=[self.u3]) |
|
85 | recipients=[self.u3]) | |
86 | Session().commit() |
|
86 | Session().commit() | |
87 | u3 = Session().query(User).get(self.u3) |
|
87 | u3 = Session().query(User).get(self.u3) | |
88 |
|
88 | |||
89 | assert sorted([x.notification for x in u3.notifications]) == sorted([notification2, notification1]) |
|
89 | assert sorted([x.notification for x in u3.notifications]) == sorted([notification2, notification1]) | |
90 |
|
90 | |||
91 | def test_delete_notifications(self): |
|
91 | def test_delete_notifications(self): | |
92 | with test_context(self.app): |
|
92 | with test_context(self.app): | |
93 | notification = NotificationModel().create(created_by=self.u1, |
|
93 | notification = NotificationModel().create(created_by=self.u1, | |
94 | subject=u'title', body=u'hi there3', |
|
94 | subject=u'title', body=u'hi there3', | |
95 | recipients=[self.u3, self.u1, self.u2]) |
|
95 | recipients=[self.u3, self.u1, self.u2]) | |
96 | Session().commit() |
|
96 | Session().commit() | |
97 | notifications = Notification.query().all() |
|
97 | notifications = Notification.query().all() | |
98 | assert notification in notifications |
|
98 | assert notification in notifications | |
99 |
|
99 | |||
100 | Notification.delete(notification.notification_id) |
|
100 | Notification.delete(notification.notification_id) | |
101 | Session().commit() |
|
101 | Session().commit() | |
102 |
|
102 | |||
103 | notifications = Notification.query().all() |
|
103 | notifications = Notification.query().all() | |
104 | assert not notification in notifications |
|
104 | assert not notification in notifications | |
105 |
|
105 | |||
106 | un = UserNotification.query().filter(UserNotification.notification |
|
106 | un = UserNotification.query().filter(UserNotification.notification | |
107 | == notification).all() |
|
107 | == notification).all() | |
108 | assert un == [] |
|
108 | assert un == [] | |
109 |
|
109 | |||
110 | def test_delete_association(self): |
|
110 | def test_delete_association(self): | |
111 | with test_context(self.app): |
|
111 | with test_context(self.app): | |
112 | notification = NotificationModel().create(created_by=self.u1, |
|
112 | notification = NotificationModel().create(created_by=self.u1, | |
113 | subject=u'title', body=u'hi there3', |
|
113 | subject=u'title', body=u'hi there3', | |
114 | recipients=[self.u3, self.u1, self.u2]) |
|
114 | recipients=[self.u3, self.u1, self.u2]) | |
115 | Session().commit() |
|
115 | Session().commit() | |
116 |
|
116 | |||
117 | unotification = UserNotification.query() \ |
|
117 | unotification = UserNotification.query() \ | |
118 | .filter(UserNotification.notification == |
|
118 | .filter(UserNotification.notification == | |
119 | notification) \ |
|
119 | notification) \ | |
120 | .filter(UserNotification.user_id == self.u3) \ |
|
120 | .filter(UserNotification.user_id == self.u3) \ | |
121 | .scalar() |
|
121 | .scalar() | |
122 |
|
122 | |||
123 | assert unotification.user_id == self.u3 |
|
123 | assert unotification.user_id == self.u3 | |
124 |
|
124 | |||
125 | NotificationModel().delete(self.u3, |
|
125 | NotificationModel().delete(self.u3, | |
126 | notification.notification_id) |
|
126 | notification.notification_id) | |
127 | Session().commit() |
|
127 | Session().commit() | |
128 |
|
128 | |||
129 | u3notification = UserNotification.query() \ |
|
129 | u3notification = UserNotification.query() \ | |
130 | .filter(UserNotification.notification == |
|
130 | .filter(UserNotification.notification == | |
131 | notification) \ |
|
131 | notification) \ | |
132 | .filter(UserNotification.user_id == self.u3) \ |
|
132 | .filter(UserNotification.user_id == self.u3) \ | |
133 | .scalar() |
|
133 | .scalar() | |
134 |
|
134 | |||
135 | assert u3notification == None |
|
135 | assert u3notification == None | |
136 |
|
136 | |||
137 | # notification object is still there |
|
137 | # notification object is still there | |
138 | assert Notification.query().all() == [notification] |
|
138 | assert Notification.query().all() == [notification] | |
139 |
|
139 | |||
140 | #u1 and u2 still have assignments |
|
140 | #u1 and u2 still have assignments | |
141 | u1notification = UserNotification.query() \ |
|
141 | u1notification = UserNotification.query() \ | |
142 | .filter(UserNotification.notification == |
|
142 | .filter(UserNotification.notification == | |
143 | notification) \ |
|
143 | notification) \ | |
144 | .filter(UserNotification.user_id == self.u1) \ |
|
144 | .filter(UserNotification.user_id == self.u1) \ | |
145 | .scalar() |
|
145 | .scalar() | |
146 | assert u1notification != None |
|
146 | assert u1notification != None | |
147 | u2notification = UserNotification.query() \ |
|
147 | u2notification = UserNotification.query() \ | |
148 | .filter(UserNotification.notification == |
|
148 | .filter(UserNotification.notification == | |
149 | notification) \ |
|
149 | notification) \ | |
150 | .filter(UserNotification.user_id == self.u2) \ |
|
150 | .filter(UserNotification.user_id == self.u2) \ | |
151 | .scalar() |
|
151 | .scalar() | |
152 | assert u2notification != None |
|
152 | assert u2notification != None | |
153 |
|
153 | |||
154 | def test_notification_counter(self): |
|
154 | def test_notification_counter(self): | |
155 | with test_context(self.app): |
|
155 | with test_context(self.app): | |
156 | NotificationModel().create(created_by=self.u1, |
|
156 | NotificationModel().create(created_by=self.u1, | |
157 | subject=u'title', body=u'hi there_delete', |
|
157 | subject=u'title', body=u'hi there_delete', | |
158 | recipients=[self.u3, self.u1]) |
|
158 | recipients=[self.u3, self.u1]) | |
159 | Session().commit() |
|
159 | Session().commit() | |
160 |
|
160 | |||
161 | assert NotificationModel().get_unread_cnt_for_user(self.u1) == 0 |
|
161 | assert NotificationModel().get_unread_cnt_for_user(self.u1) == 0 | |
162 | assert NotificationModel().get_unread_cnt_for_user(self.u2) == 0 |
|
162 | assert NotificationModel().get_unread_cnt_for_user(self.u2) == 0 | |
163 | assert NotificationModel().get_unread_cnt_for_user(self.u3) == 1 |
|
163 | assert NotificationModel().get_unread_cnt_for_user(self.u3) == 1 | |
164 |
|
164 | |||
165 | notification = NotificationModel().create(created_by=self.u1, |
|
165 | notification = NotificationModel().create(created_by=self.u1, | |
166 | subject=u'title', body=u'hi there3', |
|
166 | subject=u'title', body=u'hi there3', | |
167 | recipients=[self.u3, self.u1, self.u2]) |
|
167 | recipients=[self.u3, self.u1, self.u2]) | |
168 | Session().commit() |
|
168 | Session().commit() | |
169 |
|
169 | |||
170 | assert NotificationModel().get_unread_cnt_for_user(self.u1) == 0 |
|
170 | assert NotificationModel().get_unread_cnt_for_user(self.u1) == 0 | |
171 | assert NotificationModel().get_unread_cnt_for_user(self.u2) == 1 |
|
171 | assert NotificationModel().get_unread_cnt_for_user(self.u2) == 1 | |
172 | assert NotificationModel().get_unread_cnt_for_user(self.u3) == 2 |
|
172 | assert NotificationModel().get_unread_cnt_for_user(self.u3) == 2 | |
173 |
|
173 | |||
174 | @mock.patch.object(h, 'canonical_url', (lambda arg, **kwargs: 'http://%s/?%s' % (arg, '&'.join('%s=%s' % (k, v) for (k, v) in sorted(kwargs.items()))))) |
|
174 | @mock.patch.object(h, 'canonical_url', (lambda arg, **kwargs: 'http://%s/?%s' % (arg, '&'.join('%s=%s' % (k, v) for (k, v) in sorted(kwargs.items()))))) | |
175 | def test_dump_html_mails(self): |
|
175 | def test_dump_html_mails(self): | |
176 | # Exercise all notification types and dump them to one big html file |
|
176 | # Exercise all notification types and dump them to one big html file | |
177 | l = [] |
|
177 | l = [] | |
178 |
|
178 | |||
179 | def send_email(recipients, subject, body='', html_body='', headers=None, author=None): |
|
179 | def send_email(recipients, subject, body='', html_body='', headers=None, author=None): | |
180 | l.append('<hr/>\n') |
|
180 | l.append('<hr/>\n') | |
181 | l.append('<h1>%s</h1>\n' % desc) # desc is from outer scope |
|
181 | l.append('<h1>%s</h1>\n' % desc) # desc is from outer scope | |
182 | l.append('<pre>\n') |
|
182 | l.append('<pre>\n') | |
183 | l.append('From: %s\n' % author.username) |
|
183 | l.append('From: %s\n' % author.username) | |
184 | l.append('To: %s\n' % ' '.join(recipients)) |
|
184 | l.append('To: %s\n' % ' '.join(recipients)) | |
185 | l.append('Subject: %s\n' % subject) |
|
185 | l.append('Subject: %s\n' % subject) | |
186 | l.append('</pre>\n') |
|
186 | l.append('</pre>\n') | |
187 | l.append('<hr/>\n') |
|
187 | l.append('<hr/>\n') | |
188 | l.append('<pre>%s</pre>\n' % body) |
|
188 | l.append('<pre>%s</pre>\n' % body) | |
189 | l.append('<hr/>\n') |
|
189 | l.append('<hr/>\n') | |
190 | l.append(html_body) |
|
190 | l.append(html_body) | |
191 | l.append('<hr/>\n') |
|
191 | l.append('<hr/>\n') | |
192 |
|
192 | |||
193 | with test_context(self.app): |
|
193 | with test_context(self.app): | |
194 | with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email): |
|
194 | with mock.patch.object(kallithea.lib.celerylib.tasks, 'send_email', send_email): | |
195 | pr_kwargs = dict( |
|
195 | pr_kwargs = dict( | |
196 | pr_nice_id='#7', |
|
196 | pr_nice_id='#7', | |
197 | pr_title='The Title', |
|
197 | pr_title='The Title', | |
198 | pr_title_short='The Title', |
|
198 | pr_title_short='The Title', | |
199 | pr_url='http://pr.org/7', |
|
199 | pr_url='http://pr.org/7', | |
200 | pr_target_repo='http://mainline.com/repo', |
|
200 | pr_target_repo='http://mainline.com/repo', | |
201 | pr_target_branch='trunk', |
|
201 | pr_target_branch='trunk', | |
202 | pr_source_repo='https://dev.org/repo', |
|
202 | pr_source_repo='https://dev.org/repo', | |
203 | pr_source_branch='devbranch', |
|
203 | pr_source_branch='devbranch', | |
204 | pr_owner=User.get(self.u2), |
|
204 | pr_owner=User.get(self.u2), | |
205 | pr_owner_username='u2' |
|
205 | pr_owner_username='u2' | |
206 | ) |
|
206 | ) | |
207 |
|
207 | |||
208 | for type_, body, kwargs in [ |
|
208 | for type_, body, kwargs in [ | |
209 | (Notification.TYPE_CHANGESET_COMMENT, |
|
209 | (Notification.TYPE_CHANGESET_COMMENT, | |
210 | u'This is the new comment.\n\n - and here it ends indented.', |
|
210 | u'This is the new comment.\n\n - and here it ends indented.', | |
211 | dict( |
|
211 | dict( | |
212 | short_id='cafe1234', |
|
212 | short_id='cafe1234', | |
213 | raw_id='cafe1234c0ffeecafe', |
|
213 | raw_id='cafe1234c0ffeecafe', | |
214 | branch='brunch', |
|
214 | branch='brunch', | |
215 | cs_comment_user='Opinionated User (jsmith)', |
|
215 | cs_comment_user='Opinionated User (jsmith)', | |
216 | cs_comment_url='http://comment.org', |
|
216 | cs_comment_url='http://comment.org', | |
217 | is_mention=[False, True], |
|
217 | is_mention=[False, True], | |
218 | message='This changeset did something clever which is hard to explain', |
|
218 | message='This changeset did something clever which is hard to explain', | |
219 | message_short='This changeset did something cl...', |
|
219 | message_short='This changeset did something cl...', | |
220 | status_change=[None, 'Approved'], |
|
220 | status_change=[None, 'Approved'], | |
221 | cs_target_repo='repo_target', |
|
221 | cs_target_repo='repo_target', | |
222 | cs_url='http://changeset.com', |
|
222 | cs_url='http://changeset.com', | |
223 | cs_author=User.get(self.u2))), |
|
223 | cs_author=User.get(self.u2))), | |
224 | (Notification.TYPE_MESSAGE, |
|
224 | (Notification.TYPE_MESSAGE, | |
225 | u'This is the body of the test message\n - nothing interesting here except indentation.', |
|
225 | u'This is the body of the test message\n - nothing interesting here except indentation.', | |
226 | dict()), |
|
226 | dict()), | |
227 | #(Notification.TYPE_MENTION, '$body', None), # not used |
|
227 | #(Notification.TYPE_MENTION, '$body', None), # not used | |
228 | (Notification.TYPE_REGISTRATION, |
|
228 | (Notification.TYPE_REGISTRATION, | |
229 | u'Registration body', |
|
229 | u'Registration body', | |
230 | dict( |
|
230 | dict( | |
231 | new_username='newbie', |
|
231 | new_username='newbie', | |
232 | registered_user_url='http://newbie.org', |
|
232 | registered_user_url='http://newbie.org', | |
233 | new_email='new@email.com', |
|
233 | new_email='new@email.com', | |
234 | new_full_name='New Full Name')), |
|
234 | new_full_name='New Full Name')), | |
235 | (Notification.TYPE_PULL_REQUEST, |
|
235 | (Notification.TYPE_PULL_REQUEST, | |
236 | u'This PR is awesome because it does stuff\n - please approve indented!', |
|
236 | u'This PR is awesome because it does stuff\n - please approve indented!', | |
237 | dict( |
|
237 | dict( | |
238 | pr_user_created='Requesting User (root)', # pr_owner should perhaps be used for @mention in description ... |
|
238 | pr_user_created='Requesting User (root)', # pr_owner should perhaps be used for @mention in description ... | |
239 | is_mention=[False, True], |
|
239 | is_mention=[False, True], | |
240 | pr_revisions=[('123abc'*7, "Introduce one and two\n\nand that's it"), ('567fed'*7, 'Make one plus two equal tree')], |
|
240 | pr_revisions=[('123abc'*7, "Introduce one and two\n\nand that's it"), ('567fed'*7, 'Make one plus two equal tree')], | |
241 | org_repo_name='repo_org', |
|
241 | org_repo_name='repo_org', | |
242 | **pr_kwargs)), |
|
242 | **pr_kwargs)), | |
243 | (Notification.TYPE_PULL_REQUEST_COMMENT, |
|
243 | (Notification.TYPE_PULL_REQUEST_COMMENT, | |
244 | u'Me too!\n\n - and indented on second line', |
|
244 | u'Me too!\n\n - and indented on second line', | |
245 | dict( |
|
245 | dict( | |
246 | closing_pr=[False, True], |
|
246 | closing_pr=[False, True], | |
247 | is_mention=[False, True], |
|
247 | is_mention=[False, True], | |
248 | pr_comment_user='Opinionated User (jsmith)', |
|
248 | pr_comment_user='Opinionated User (jsmith)', | |
249 | pr_comment_url='http://pr.org/comment', |
|
249 | pr_comment_url='http://pr.org/comment', | |
250 | status_change=[None, 'Under Review'], |
|
250 | status_change=[None, 'Under Review'], | |
251 | **pr_kwargs)), |
|
251 | **pr_kwargs)), | |
252 | ]: |
|
252 | ]: | |
253 | kwargs['repo_name'] = u'repo/name' |
|
253 | kwargs['repo_name'] = u'repo/name' | |
254 | params = [(type_, type_, body, kwargs)] |
|
254 | params = [(type_, type_, body, kwargs)] | |
255 | for param_name in ['is_mention', 'status_change', 'closing_pr']: # TODO: inline/general |
|
255 | for param_name in ['is_mention', 'status_change', 'closing_pr']: # TODO: inline/general | |
256 | if not isinstance(kwargs.get(param_name), list): |
|
256 | if not isinstance(kwargs.get(param_name), list): | |
257 | continue |
|
257 | continue | |
258 | new_params = [] |
|
258 | new_params = [] | |
259 | for v in kwargs[param_name]: |
|
259 | for v in kwargs[param_name]: | |
260 | for desc, type_, body, kwargs in params: |
|
260 | for desc, type_, body, kwargs in params: | |
261 | kwargs = dict(kwargs) |
|
261 | kwargs = dict(kwargs) | |
262 | kwargs[param_name] = v |
|
262 | kwargs[param_name] = v | |
263 | new_params.append(('%s, %s=%r' % (desc, param_name, v), type_, body, kwargs)) |
|
263 | new_params.append(('%s, %s=%r' % (desc, param_name, v), type_, body, kwargs)) | |
264 | params = new_params |
|
264 | params = new_params | |
265 |
|
265 | |||
266 | for desc, type_, body, kwargs in params: |
|
266 | for desc, type_, body, kwargs in params: | |
267 | # desc is used as "global" variable |
|
267 | # desc is used as "global" variable | |
268 | notification = NotificationModel().create(created_by=self.u1, |
|
268 | notification = NotificationModel().create(created_by=self.u1, | |
269 | subject=u'unused', body=body, email_kwargs=kwargs, |
|
269 | subject=u'unused', body=body, email_kwargs=kwargs, | |
270 | recipients=[self.u2], type_=type_) |
|
270 | recipients=[self.u2], type_=type_) | |
271 |
|
271 | |||
272 | # Email type TYPE_PASSWORD_RESET has no corresponding notification type - test it directly: |
|
272 | # Email type TYPE_PASSWORD_RESET has no corresponding notification type - test it directly: | |
273 | desc = 'TYPE_PASSWORD_RESET' |
|
273 | desc = 'TYPE_PASSWORD_RESET' | |
274 | kwargs = dict(user='John Doe', reset_token='decbf64715098db5b0bd23eab44bd792670ab746', reset_url='http://reset.com/decbf64715098db5b0bd23eab44bd792670ab746') |
|
274 | kwargs = dict(user='John Doe', reset_token='decbf64715098db5b0bd23eab44bd792670ab746', reset_url='http://reset.com/decbf64715098db5b0bd23eab44bd792670ab746') | |
275 | kallithea.lib.celerylib.tasks.send_email(['john@doe.com'], |
|
275 | kallithea.lib.celerylib.tasks.send_email(['john@doe.com'], | |
276 | "Password reset link", |
|
276 | "Password reset link", | |
277 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'txt', **kwargs), |
|
277 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'txt', **kwargs), | |
278 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'html', **kwargs), |
|
278 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'html', **kwargs), | |
279 | author=User.get(self.u1)) |
|
279 | author=User.get(self.u1)) | |
280 |
|
280 | |||
281 | out = '<!doctype html>\n<html lang="en">\n<head><title>Notifications</title><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head>\n<body>\n%s\n</body>\n</html>\n' % \ |
|
281 | out = '<!doctype html>\n<html lang="en">\n<head><title>Notifications</title><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head>\n<body>\n%s\n</body>\n</html>\n' % \ | |
282 | re.sub(r'<(/?(?:!doctype|html|head|title|meta|body)\b[^>]*)>', r'<!--\1-->', ''.join(l)) |
|
282 | re.sub(r'<(/?(?:!doctype|html|head|title|meta|body)\b[^>]*)>', r'<!--\1-->', ''.join(l)) | |
283 |
|
283 | |||
284 | outfn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.out.html') |
|
284 | outfn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.out.html') | |
285 | reffn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.ref.html') |
|
285 | reffn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.ref.html') | |
286 |
with |
|
286 | with open(outfn, 'w') as f: | |
287 | f.write(out) |
|
287 | f.write(out) | |
288 |
with |
|
288 | with open(reffn) as f: | |
289 | ref = f.read() |
|
289 | ref = f.read() | |
290 | assert ref == out # copy test_dump_html_mails.out.html to test_dump_html_mails.ref.html to update expectations |
|
290 | assert ref == out # copy test_dump_html_mails.out.html to test_dump_html_mails.ref.html to update expectations | |
291 | os.unlink(outfn) |
|
291 | os.unlink(outfn) |
@@ -1,79 +1,79 b'' | |||||
1 | #!/usr/bin/env python2 |
|
1 | #!/usr/bin/env python2 | |
2 |
|
2 | |||
3 | """ |
|
3 | """ | |
4 | Consistent formatting of rst section titles |
|
4 | Consistent formatting of rst section titles | |
5 | """ |
|
5 | """ | |
6 |
|
6 | |||
7 | import re |
|
7 | import re | |
8 | import subprocess |
|
8 | import subprocess | |
9 |
|
9 | |||
10 | spaces = [ |
|
10 | spaces = [ | |
11 | (0, 1), # we assume this is a over-and-underlined header |
|
11 | (0, 1), # we assume this is a over-and-underlined header | |
12 | (2, 1), |
|
12 | (2, 1), | |
13 | (1, 1), |
|
13 | (1, 1), | |
14 | (1, 0), |
|
14 | (1, 0), | |
15 | (1, 0), |
|
15 | (1, 0), | |
16 | ] |
|
16 | ] | |
17 |
|
17 | |||
18 | # http://sphinx-doc.org/rest.html : |
|
18 | # http://sphinx-doc.org/rest.html : | |
19 | # for the Python documentation, this convention is used which you may follow: |
|
19 | # for the Python documentation, this convention is used which you may follow: | |
20 | # # with overline, for parts |
|
20 | # # with overline, for parts | |
21 | # * with overline, for chapters |
|
21 | # * with overline, for chapters | |
22 | # =, for sections |
|
22 | # =, for sections | |
23 | # -, for subsections |
|
23 | # -, for subsections | |
24 | # ^, for subsubsections |
|
24 | # ^, for subsubsections | |
25 | # ", for paragraphs |
|
25 | # ", for paragraphs | |
26 | pystyles = ['#', '*', '=', '-', '^', '"'] |
|
26 | pystyles = ['#', '*', '=', '-', '^', '"'] | |
27 |
|
27 | |||
28 | # match on a header line underlined with one of the valid characters |
|
28 | # match on a header line underlined with one of the valid characters | |
29 | headermatch = re.compile(r'''\n*(.+)\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n+''', flags=re.MULTILINE) |
|
29 | headermatch = re.compile(r'''\n*(.+)\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n+''', flags=re.MULTILINE) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | def main(): |
|
32 | def main(): | |
33 | for fn in subprocess.check_output(['hg', 'loc', 'set:**.rst+kallithea/i18n/how_to']).splitlines(): |
|
33 | for fn in subprocess.check_output(['hg', 'loc', 'set:**.rst+kallithea/i18n/how_to']).splitlines(): | |
34 | print 'processing %s:' % fn |
|
34 | print 'processing %s:' % fn | |
35 |
s = |
|
35 | s = open(fn).read() | |
36 |
|
36 | |||
37 | # find levels and their styles |
|
37 | # find levels and their styles | |
38 | lastpos = 0 |
|
38 | lastpos = 0 | |
39 | styles = [] |
|
39 | styles = [] | |
40 | for markup in headermatch.findall(s): |
|
40 | for markup in headermatch.findall(s): | |
41 | style = markup[1] |
|
41 | style = markup[1] | |
42 | if style in styles: |
|
42 | if style in styles: | |
43 | stylepos = styles.index(style) |
|
43 | stylepos = styles.index(style) | |
44 | if stylepos > lastpos + 1: |
|
44 | if stylepos > lastpos + 1: | |
45 | print 'bad style %r with level %s - was at %s' % (style, stylepos, lastpos) |
|
45 | print 'bad style %r with level %s - was at %s' % (style, stylepos, lastpos) | |
46 | else: |
|
46 | else: | |
47 | stylepos = len(styles) |
|
47 | stylepos = len(styles) | |
48 | if stylepos > lastpos + 1: |
|
48 | if stylepos > lastpos + 1: | |
49 | print 'bad new style %r - expected %r' % (style, styles[lastpos + 1]) |
|
49 | print 'bad new style %r - expected %r' % (style, styles[lastpos + 1]) | |
50 | else: |
|
50 | else: | |
51 | styles.append(style) |
|
51 | styles.append(style) | |
52 | lastpos = stylepos |
|
52 | lastpos = stylepos | |
53 |
|
53 | |||
54 | # remove superfluous spacing (may however be restored by header spacing) |
|
54 | # remove superfluous spacing (may however be restored by header spacing) | |
55 | s = re.sub(r'''(\n\n)\n*''', r'\1', s, flags=re.MULTILINE) |
|
55 | s = re.sub(r'''(\n\n)\n*''', r'\1', s, flags=re.MULTILINE) | |
56 |
|
56 | |||
57 | if styles: |
|
57 | if styles: | |
58 | newstyles = pystyles[pystyles.index(styles[0]):] |
|
58 | newstyles = pystyles[pystyles.index(styles[0]):] | |
59 |
|
59 | |||
60 | def subf(m): |
|
60 | def subf(m): | |
61 | title, style = m.groups() |
|
61 | title, style = m.groups() | |
62 | level = styles.index(style) |
|
62 | level = styles.index(style) | |
63 | before, after = spaces[level] |
|
63 | before, after = spaces[level] | |
64 | newstyle = newstyles[level] |
|
64 | newstyle = newstyles[level] | |
65 | return '\n' * (before + 1) + title + '\n' + newstyle * len(title) + '\n' * (after + 1) |
|
65 | return '\n' * (before + 1) + title + '\n' + newstyle * len(title) + '\n' * (after + 1) | |
66 | s = headermatch.sub(subf, s) |
|
66 | s = headermatch.sub(subf, s) | |
67 |
|
67 | |||
68 | # remove superfluous spacing when headers are adjacent |
|
68 | # remove superfluous spacing when headers are adjacent | |
69 | s = re.sub(r'''(\n.+\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n\n\n)\n*''', r'\1', s, flags=re.MULTILINE) |
|
69 | s = re.sub(r'''(\n.+\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n\n\n)\n*''', r'\1', s, flags=re.MULTILINE) | |
70 | # fix trailing space and spacing before link sections |
|
70 | # fix trailing space and spacing before link sections | |
71 | s = s.strip() + '\n' |
|
71 | s = s.strip() + '\n' | |
72 | s = re.sub(r'''\n+((?:\.\. _[^\n]*\n)+)$''', r'\n\n\n\1', s) |
|
72 | s = re.sub(r'''\n+((?:\.\. _[^\n]*\n)+)$''', r'\n\n\n\1', s) | |
73 |
|
73 | |||
74 |
|
|
74 | open(fn, 'w').write(s) | |
75 | print subprocess.check_output(['hg', 'diff', fn]) |
|
75 | print subprocess.check_output(['hg', 'diff', fn]) | |
76 |
|
76 | |||
77 |
|
77 | |||
78 | if __name__ == '__main__': |
|
78 | if __name__ == '__main__': | |
79 | main() |
|
79 | main() |
@@ -1,155 +1,155 b'' | |||||
1 | #!/usr/bin/env python2 |
|
1 | #!/usr/bin/env python2 | |
2 | """ |
|
2 | """ | |
3 | Based on kallithea/lib/paster_commands/template.ini.mako, generate |
|
3 | Based on kallithea/lib/paster_commands/template.ini.mako, generate | |
4 | development.ini |
|
4 | development.ini | |
5 | kallithea/tests/test.ini |
|
5 | kallithea/tests/test.ini | |
6 | """ |
|
6 | """ | |
7 |
|
7 | |||
8 | import re |
|
8 | import re | |
9 |
|
9 | |||
10 | makofile = 'kallithea/lib/paster_commands/template.ini.mako' |
|
10 | makofile = 'kallithea/lib/paster_commands/template.ini.mako' | |
11 |
|
11 | |||
12 | # the mako conditionals used in all other ini files and templates |
|
12 | # the mako conditionals used in all other ini files and templates | |
13 | selected_mako_conditionals = set([ |
|
13 | selected_mako_conditionals = set([ | |
14 | "database_engine == 'sqlite'", |
|
14 | "database_engine == 'sqlite'", | |
15 | "http_server == 'waitress'", |
|
15 | "http_server == 'waitress'", | |
16 | "error_aggregation_service == 'appenlight'", |
|
16 | "error_aggregation_service == 'appenlight'", | |
17 | "error_aggregation_service == 'sentry'", |
|
17 | "error_aggregation_service == 'sentry'", | |
18 | ]) |
|
18 | ]) | |
19 |
|
19 | |||
20 | # the mako variables used in all other ini files and templates |
|
20 | # the mako variables used in all other ini files and templates | |
21 | mako_variable_values = { |
|
21 | mako_variable_values = { | |
22 | 'host': '127.0.0.1', |
|
22 | 'host': '127.0.0.1', | |
23 | 'port': '5000', |
|
23 | 'port': '5000', | |
24 | 'uuid()': '${app_instance_uuid}', |
|
24 | 'uuid()': '${app_instance_uuid}', | |
25 | } |
|
25 | } | |
26 |
|
26 | |||
27 | # files to be generated from the mako template |
|
27 | # files to be generated from the mako template | |
28 | ini_files = [ |
|
28 | ini_files = [ | |
29 | ('kallithea/tests/test.ini', |
|
29 | ('kallithea/tests/test.ini', | |
30 | ''' |
|
30 | ''' | |
31 | Kallithea - config for tests: |
|
31 | Kallithea - config for tests: | |
32 | sqlalchemy and kallithea_test.sqlite |
|
32 | sqlalchemy and kallithea_test.sqlite | |
33 | custom logging |
|
33 | custom logging | |
34 | ''', |
|
34 | ''', | |
35 | { |
|
35 | { | |
36 | '[server:main]': { |
|
36 | '[server:main]': { | |
37 | 'port': '4999', |
|
37 | 'port': '4999', | |
38 | }, |
|
38 | }, | |
39 | '[app:main]': { |
|
39 | '[app:main]': { | |
40 | 'app_instance_uuid': 'test', |
|
40 | 'app_instance_uuid': 'test', | |
41 | 'show_revision_number': 'true', |
|
41 | 'show_revision_number': 'true', | |
42 | 'beaker.cache.sql_cache_short.expire': '1', |
|
42 | 'beaker.cache.sql_cache_short.expire': '1', | |
43 | 'beaker.session.secret': '{74e0cd75-b339-478b-b129-07dd221def1f}', |
|
43 | 'beaker.session.secret': '{74e0cd75-b339-478b-b129-07dd221def1f}', | |
44 | }, |
|
44 | }, | |
45 | '[handler_console]': { |
|
45 | '[handler_console]': { | |
46 | 'level': 'DEBUG', |
|
46 | 'level': 'DEBUG', | |
47 | 'formatter': 'color_formatter', |
|
47 | 'formatter': 'color_formatter', | |
48 | }, |
|
48 | }, | |
49 | # The 'handler_console_sql' block is very similar to the one in |
|
49 | # The 'handler_console_sql' block is very similar to the one in | |
50 | # development.ini, but without the explicit 'level=DEBUG' setting: |
|
50 | # development.ini, but without the explicit 'level=DEBUG' setting: | |
51 | # it causes duplicate sqlalchemy debug logs, one through |
|
51 | # it causes duplicate sqlalchemy debug logs, one through | |
52 | # handler_console_sql and another through another path. |
|
52 | # handler_console_sql and another through another path. | |
53 | '[handler_console_sql]': { |
|
53 | '[handler_console_sql]': { | |
54 | 'formatter': 'color_formatter_sql', |
|
54 | 'formatter': 'color_formatter_sql', | |
55 | }, |
|
55 | }, | |
56 | }, |
|
56 | }, | |
57 | ), |
|
57 | ), | |
58 | ('development.ini', |
|
58 | ('development.ini', | |
59 | ''' |
|
59 | ''' | |
60 | Kallithea - Development config: |
|
60 | Kallithea - Development config: | |
61 | listening on *:5000 |
|
61 | listening on *:5000 | |
62 | sqlite and kallithea.db |
|
62 | sqlite and kallithea.db | |
63 | initial_repo_scan = true |
|
63 | initial_repo_scan = true | |
64 | debug = true |
|
64 | debug = true | |
65 | verbose and colorful logging |
|
65 | verbose and colorful logging | |
66 | ''', |
|
66 | ''', | |
67 | { |
|
67 | { | |
68 | '[server:main]': { |
|
68 | '[server:main]': { | |
69 | 'host': '0.0.0.0', |
|
69 | 'host': '0.0.0.0', | |
70 | }, |
|
70 | }, | |
71 | '[app:main]': { |
|
71 | '[app:main]': { | |
72 | 'initial_repo_scan': 'true', |
|
72 | 'initial_repo_scan': 'true', | |
73 | 'debug': 'true', |
|
73 | 'debug': 'true', | |
74 | 'app_instance_uuid': 'development-not-secret', |
|
74 | 'app_instance_uuid': 'development-not-secret', | |
75 | 'beaker.session.secret': 'development-not-secret', |
|
75 | 'beaker.session.secret': 'development-not-secret', | |
76 | }, |
|
76 | }, | |
77 | '[handler_console]': { |
|
77 | '[handler_console]': { | |
78 | 'level': 'DEBUG', |
|
78 | 'level': 'DEBUG', | |
79 | 'formatter': 'color_formatter', |
|
79 | 'formatter': 'color_formatter', | |
80 | }, |
|
80 | }, | |
81 | '[handler_console_sql]': { |
|
81 | '[handler_console_sql]': { | |
82 | 'level': 'DEBUG', |
|
82 | 'level': 'DEBUG', | |
83 | 'formatter': 'color_formatter_sql', |
|
83 | 'formatter': 'color_formatter_sql', | |
84 | }, |
|
84 | }, | |
85 | }, |
|
85 | }, | |
86 | ), |
|
86 | ), | |
87 | ] |
|
87 | ] | |
88 |
|
88 | |||
89 |
|
89 | |||
90 | def main(): |
|
90 | def main(): | |
91 | # make sure all mako lines starting with '#' (the '##' comments) are marked up as <text> |
|
91 | # make sure all mako lines starting with '#' (the '##' comments) are marked up as <text> | |
92 | print 'reading:', makofile |
|
92 | print 'reading:', makofile | |
93 |
mako_org = |
|
93 | mako_org = open(makofile).read() | |
94 | mako_no_text_markup = re.sub(r'</?%text>', '', mako_org) |
|
94 | mako_no_text_markup = re.sub(r'</?%text>', '', mako_org) | |
95 | mako_marked_up = re.sub(r'\n(##.*)', r'\n<%text>\1</%text>', mako_no_text_markup, flags=re.MULTILINE) |
|
95 | mako_marked_up = re.sub(r'\n(##.*)', r'\n<%text>\1</%text>', mako_no_text_markup, flags=re.MULTILINE) | |
96 | if mako_marked_up != mako_org: |
|
96 | if mako_marked_up != mako_org: | |
97 | print 'writing:', makofile |
|
97 | print 'writing:', makofile | |
98 |
|
|
98 | open(makofile, 'w').write(mako_marked_up) | |
99 |
|
99 | |||
100 | # select the right mako conditionals for the other less sophisticated formats |
|
100 | # select the right mako conditionals for the other less sophisticated formats | |
101 | def sub_conditionals(m): |
|
101 | def sub_conditionals(m): | |
102 | """given a %if...%endif match, replace with just the selected |
|
102 | """given a %if...%endif match, replace with just the selected | |
103 | conditional sections enabled and the rest as comments |
|
103 | conditional sections enabled and the rest as comments | |
104 | """ |
|
104 | """ | |
105 | conditional_lines = m.group(1) |
|
105 | conditional_lines = m.group(1) | |
106 | def sub_conditional(m): |
|
106 | def sub_conditional(m): | |
107 | """given a conditional and the corresponding lines, return them raw |
|
107 | """given a conditional and the corresponding lines, return them raw | |
108 | or commented out, based on whether conditional is selected |
|
108 | or commented out, based on whether conditional is selected | |
109 | """ |
|
109 | """ | |
110 | criteria, lines = m.groups() |
|
110 | criteria, lines = m.groups() | |
111 | if criteria not in selected_mako_conditionals: |
|
111 | if criteria not in selected_mako_conditionals: | |
112 | lines = '\n'.join((l if not l or l.startswith('#') else '#' + l) for l in lines.split('\n')) |
|
112 | lines = '\n'.join((l if not l or l.startswith('#') else '#' + l) for l in lines.split('\n')) | |
113 | return lines |
|
113 | return lines | |
114 | conditional_lines = re.sub(r'^%(?:el)?if (.*):\n((?:^[^%\n].*\n|\n)*)', |
|
114 | conditional_lines = re.sub(r'^%(?:el)?if (.*):\n((?:^[^%\n].*\n|\n)*)', | |
115 | sub_conditional, conditional_lines, flags=re.MULTILINE) |
|
115 | sub_conditional, conditional_lines, flags=re.MULTILINE) | |
116 | return conditional_lines |
|
116 | return conditional_lines | |
117 | mako_no_conditionals = re.sub(r'^(%if .*\n(?:[^%\n].*\n|%elif .*\n|\n)*)%endif\n', |
|
117 | mako_no_conditionals = re.sub(r'^(%if .*\n(?:[^%\n].*\n|%elif .*\n|\n)*)%endif\n', | |
118 | sub_conditionals, mako_no_text_markup, flags=re.MULTILINE) |
|
118 | sub_conditionals, mako_no_text_markup, flags=re.MULTILINE) | |
119 |
|
119 | |||
120 | # expand mako variables |
|
120 | # expand mako variables | |
121 | def pyrepl(m): |
|
121 | def pyrepl(m): | |
122 | return mako_variable_values.get(m.group(1), m.group(0)) |
|
122 | return mako_variable_values.get(m.group(1), m.group(0)) | |
123 | mako_no_variables = re.sub(r'\${([^}]*)}', pyrepl, mako_no_conditionals) |
|
123 | mako_no_variables = re.sub(r'\${([^}]*)}', pyrepl, mako_no_conditionals) | |
124 |
|
124 | |||
125 | # remove utf-8 coding header |
|
125 | # remove utf-8 coding header | |
126 | base_ini = re.sub(r'^## -\*- coding: utf-8 -\*-\n', '', mako_no_variables) |
|
126 | base_ini = re.sub(r'^## -\*- coding: utf-8 -\*-\n', '', mako_no_variables) | |
127 |
|
127 | |||
128 | # create ini files |
|
128 | # create ini files | |
129 | for fn, desc, settings in ini_files: |
|
129 | for fn, desc, settings in ini_files: | |
130 | print 'updating:', fn |
|
130 | print 'updating:', fn | |
131 | ini_lines = re.sub( |
|
131 | ini_lines = re.sub( | |
132 | '# Kallithea - config file generated with kallithea-config *#\n', |
|
132 | '# Kallithea - config file generated with kallithea-config *#\n', | |
133 | ''.join('# %-77s#\n' % l.strip() for l in desc.strip().split('\n')), |
|
133 | ''.join('# %-77s#\n' % l.strip() for l in desc.strip().split('\n')), | |
134 | base_ini) |
|
134 | base_ini) | |
135 | def process_section(m): |
|
135 | def process_section(m): | |
136 | """process a ini section, replacing values as necessary""" |
|
136 | """process a ini section, replacing values as necessary""" | |
137 | sectionname, lines = m.groups() |
|
137 | sectionname, lines = m.groups() | |
138 | if sectionname in settings: |
|
138 | if sectionname in settings: | |
139 | section_settings = settings[sectionname] |
|
139 | section_settings = settings[sectionname] | |
140 | def process_line(m): |
|
140 | def process_line(m): | |
141 | """process a section line and update value if necessary""" |
|
141 | """process a section line and update value if necessary""" | |
142 | setting, value = m.groups() |
|
142 | setting, value = m.groups() | |
143 | line = m.group(0) |
|
143 | line = m.group(0) | |
144 | if setting in section_settings: |
|
144 | if setting in section_settings: | |
145 | line = '%s = %s' % (setting, section_settings[setting]) |
|
145 | line = '%s = %s' % (setting, section_settings[setting]) | |
146 | if '$' not in value: |
|
146 | if '$' not in value: | |
147 | line = '#%s = %s\n%s' % (setting, value, line) |
|
147 | line = '#%s = %s\n%s' % (setting, value, line) | |
148 | return line.rstrip() |
|
148 | return line.rstrip() | |
149 | lines = re.sub(r'^([^#\n].*) = ?(.*)', process_line, lines, flags=re.MULTILINE) |
|
149 | lines = re.sub(r'^([^#\n].*) = ?(.*)', process_line, lines, flags=re.MULTILINE) | |
150 | return sectionname + '\n' + lines |
|
150 | return sectionname + '\n' + lines | |
151 | ini_lines = re.sub(r'^(\[.*\])\n((?:(?:[^[\n].*)?\n)*)', process_section, ini_lines, flags=re.MULTILINE) |
|
151 | ini_lines = re.sub(r'^(\[.*\])\n((?:(?:[^[\n].*)?\n)*)', process_section, ini_lines, flags=re.MULTILINE) | |
152 |
|
|
152 | open(fn, 'w').write(ini_lines) | |
153 |
|
153 | |||
154 | if __name__ == '__main__': |
|
154 | if __name__ == '__main__': | |
155 | main() |
|
155 | main() |
@@ -1,39 +1,39 b'' | |||||
1 | #!/usr/bin/env python2 |
|
1 | #!/usr/bin/env python2 | |
2 |
|
2 | |||
3 | import re |
|
3 | import re | |
4 | import sys |
|
4 | import sys | |
5 |
|
5 | |||
6 | if len(sys.argv) < 2: |
|
6 | if len(sys.argv) < 2: | |
7 | print 'Cleanup of superfluous % formatting of log statements.' |
|
7 | print 'Cleanup of superfluous % formatting of log statements.' | |
8 | print 'Usage:' |
|
8 | print 'Usage:' | |
9 | print ''' hg revert `hg loc '*.py'|grep -v logformat.py` && scripts/logformat.py `hg loc '*.py'` && hg diff''' |
|
9 | print ''' hg revert `hg loc '*.py'|grep -v logformat.py` && scripts/logformat.py `hg loc '*.py'` && hg diff''' | |
10 | raise SystemExit(1) |
|
10 | raise SystemExit(1) | |
11 |
|
11 | |||
12 |
|
12 | |||
13 | logre = r''' |
|
13 | logre = r''' | |
14 | (log\.(?:error|info|warning|debug) |
|
14 | (log\.(?:error|info|warning|debug) | |
15 | [(][ \n]* |
|
15 | [(][ \n]* | |
16 | ) |
|
16 | ) | |
17 | %s |
|
17 | %s | |
18 | ( |
|
18 | ( | |
19 | [ \n]*[)] |
|
19 | [ \n]*[)] | |
20 | ) |
|
20 | ) | |
21 | ''' |
|
21 | ''' | |
22 | res = [ |
|
22 | res = [ | |
23 | # handle % () - keeping spaces around the old % |
|
23 | # handle % () - keeping spaces around the old % | |
24 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) \( ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) \) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
24 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) \( ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) \) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), | |
25 | # handle % without () - keeping spaces around the old % |
|
25 | # handle % without () - keeping spaces around the old % | |
26 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
26 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), | |
27 | # remove extra space if it is on next line |
|
27 | # remove extra space if it is on next line | |
28 | (re.compile(logre % r'''("[^"]*"|'[^']*') , (\n [ ]) ([ ][\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
28 | (re.compile(logre % r'''("[^"]*"|'[^']*') , (\n [ ]) ([ ][\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), | |
29 | # remove extra space if it is on same line |
|
29 | # remove extra space if it is on same line | |
30 | (re.compile(logre % r'''("[^"]*"|'[^']*') , [ ]+ () ( [\n ]+) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
30 | (re.compile(logre % r'''("[^"]*"|'[^']*') , [ ]+ () ( [\n ]+) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), | |
31 | # remove trailing , and space |
|
31 | # remove trailing , and space | |
32 | (re.compile(logre % r'''("[^"]*"|'[^']*') , () ( [\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* [^(), \n] ) [ ,]*''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
32 | (re.compile(logre % r'''("[^"]*"|'[^']*') , () ( [\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* [^(), \n] ) [ ,]*''', flags=re.MULTILINE|re.VERBOSE), r'\1\2,\3\4\5\6'), | |
33 | ] |
|
33 | ] | |
34 |
|
34 | |||
35 | for f in sys.argv[1:]: |
|
35 | for f in sys.argv[1:]: | |
36 |
s = |
|
36 | s = open(f).read() | |
37 | for r, t in res: |
|
37 | for r, t in res: | |
38 | s = r.sub(t, s) |
|
38 | s = r.sub(t, s) | |
39 |
|
|
39 | open(f, 'w').write(s) |
@@ -1,253 +1,253 b'' | |||||
1 | #!/usr/bin/env python2 |
|
1 | #!/usr/bin/env python2 | |
2 | # -*- coding: utf-8 -*- |
|
2 | # -*- coding: utf-8 -*- | |
3 |
|
3 | |||
4 | """ |
|
4 | """ | |
5 | Kallithea script for maintaining contributor lists from version control |
|
5 | Kallithea script for maintaining contributor lists from version control | |
6 | history. |
|
6 | history. | |
7 |
|
7 | |||
8 | This script and the data in it is a best effort attempt at reverse engineering |
|
8 | This script and the data in it is a best effort attempt at reverse engineering | |
9 | previous attributions and correlate that with version control history while |
|
9 | previous attributions and correlate that with version control history while | |
10 | preserving all existing copyright statements and attribution. This script is |
|
10 | preserving all existing copyright statements and attribution. This script is | |
11 | processing and summarizing information found elsewhere - it is not by itself |
|
11 | processing and summarizing information found elsewhere - it is not by itself | |
12 | making any claims. Comments in the script are an attempt at reverse engineering |
|
12 | making any claims. Comments in the script are an attempt at reverse engineering | |
13 | possible explanations - they are not showing any intent or confirming it is |
|
13 | possible explanations - they are not showing any intent or confirming it is | |
14 | correct. |
|
14 | correct. | |
15 |
|
15 | |||
16 | Three files are generated / modified by this script: |
|
16 | Three files are generated / modified by this script: | |
17 |
|
17 | |||
18 | kallithea/templates/about.html claims to show copyright holders, and the GPL |
|
18 | kallithea/templates/about.html claims to show copyright holders, and the GPL | |
19 | license requires such existing "legal notices" to be preserved. We also try to |
|
19 | license requires such existing "legal notices" to be preserved. We also try to | |
20 | keep it updated with copyright holders, but do not claim it is a correct list. |
|
20 | keep it updated with copyright holders, but do not claim it is a correct list. | |
21 |
|
21 | |||
22 | CONTRIBUTORS has the purpose of giving credit where credit is due and list all |
|
22 | CONTRIBUTORS has the purpose of giving credit where credit is due and list all | |
23 | the contributor names in the source. |
|
23 | the contributor names in the source. | |
24 |
|
24 | |||
25 | kallithea/templates/base/base.html contains the copyright years in the page |
|
25 | kallithea/templates/base/base.html contains the copyright years in the page | |
26 | footer. |
|
26 | footer. | |
27 |
|
27 | |||
28 | Both make a best effort of listing all copyright holders, but revision control |
|
28 | Both make a best effort of listing all copyright holders, but revision control | |
29 | history might be a better and more definitive source. |
|
29 | history might be a better and more definitive source. | |
30 |
|
30 | |||
31 | Contributors are sorted "fairly" by copyright year and amount of |
|
31 | Contributors are sorted "fairly" by copyright year and amount of | |
32 | contribution. |
|
32 | contribution. | |
33 |
|
33 | |||
34 | New contributors are listed, without considering if the contribution contains |
|
34 | New contributors are listed, without considering if the contribution contains | |
35 | copyrightable work. |
|
35 | copyrightable work. | |
36 |
|
36 | |||
37 | When the copyright might belong to a different legal entity than the |
|
37 | When the copyright might belong to a different legal entity than the | |
38 | contributor, the legal entity is given credit too. |
|
38 | contributor, the legal entity is given credit too. | |
39 | """ |
|
39 | """ | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | # Some committers are so wrong that it doesn't point at any contributor: |
|
42 | # Some committers are so wrong that it doesn't point at any contributor: | |
43 | total_ignore = set() |
|
43 | total_ignore = set() | |
44 | total_ignore.add('*** failed to import extension hggit: No module named hggit') |
|
44 | total_ignore.add('*** failed to import extension hggit: No module named hggit') | |
45 | total_ignore.add('<>') |
|
45 | total_ignore.add('<>') | |
46 |
|
46 | |||
47 | # Normalize some committer names where people have contributed under different |
|
47 | # Normalize some committer names where people have contributed under different | |
48 | # names or email addresses: |
|
48 | # names or email addresses: | |
49 | name_fixes = {} |
|
49 | name_fixes = {} | |
50 | name_fixes['Andrew Shadura'] = "Andrew Shadura <andrew@shadura.me>" |
|
50 | name_fixes['Andrew Shadura'] = "Andrew Shadura <andrew@shadura.me>" | |
51 | name_fixes['aparkar'] = "Aparkar <aparkar@icloud.com>" |
|
51 | name_fixes['aparkar'] = "Aparkar <aparkar@icloud.com>" | |
52 | name_fixes['Aras Pranckevicius'] = "Aras PranckeviΔius <aras@unity3d.com>" |
|
52 | name_fixes['Aras Pranckevicius'] = "Aras PranckeviΔius <aras@unity3d.com>" | |
53 | name_fixes['Augosto Hermann'] = "Augusto Herrmann <augusto.herrmann@planejamento.gov.br>" |
|
53 | name_fixes['Augosto Hermann'] = "Augusto Herrmann <augusto.herrmann@planejamento.gov.br>" | |
54 | name_fixes['"Bradley M. Kuhn" <bkuhn@ebb.org>'] = "Bradley M. Kuhn <bkuhn@sfconservancy.org>" |
|
54 | name_fixes['"Bradley M. Kuhn" <bkuhn@ebb.org>'] = "Bradley M. Kuhn <bkuhn@sfconservancy.org>" | |
55 | name_fixes['dmitri.kuznetsov'] = "Dmitri Kuznetsov" |
|
55 | name_fixes['dmitri.kuznetsov'] = "Dmitri Kuznetsov" | |
56 | name_fixes['Dmitri Kuznetsov'] = "Dmitri Kuznetsov" |
|
56 | name_fixes['Dmitri Kuznetsov'] = "Dmitri Kuznetsov" | |
57 | name_fixes['domruf'] = "Dominik Ruf <dominikruf@gmail.com>" |
|
57 | name_fixes['domruf'] = "Dominik Ruf <dominikruf@gmail.com>" | |
58 | name_fixes['Ingo von borstel'] = "Ingo von Borstel <kallithea@planetmaker.de>" |
|
58 | name_fixes['Ingo von borstel'] = "Ingo von Borstel <kallithea@planetmaker.de>" | |
59 | name_fixes['Jan Heylen'] = "Jan Heylen <heyleke@gmail.com>" |
|
59 | name_fixes['Jan Heylen'] = "Jan Heylen <heyleke@gmail.com>" | |
60 | name_fixes['Jason F. Harris'] = "Jason Harris <jason@jasonfharris.com>" |
|
60 | name_fixes['Jason F. Harris'] = "Jason Harris <jason@jasonfharris.com>" | |
61 | name_fixes['Jelmer Vernooij'] = "Jelmer VernooΔ³ <jelmer@samba.org>" |
|
61 | name_fixes['Jelmer Vernooij'] = "Jelmer VernooΔ³ <jelmer@samba.org>" | |
62 | name_fixes['jfh <jason@jasonfharris.com>'] = "Jason Harris <jason@jasonfharris.com>" |
|
62 | name_fixes['jfh <jason@jasonfharris.com>'] = "Jason Harris <jason@jasonfharris.com>" | |
63 | name_fixes['Leonardo Carneiro<leonardo@unity3d.com>'] = "Leonardo Carneiro <leonardo@unity3d.com>" |
|
63 | name_fixes['Leonardo Carneiro<leonardo@unity3d.com>'] = "Leonardo Carneiro <leonardo@unity3d.com>" | |
64 | name_fixes['leonardo'] = "Leonardo Carneiro <leonardo@unity3d.com>" |
|
64 | name_fixes['leonardo'] = "Leonardo Carneiro <leonardo@unity3d.com>" | |
65 | name_fixes['Leonardo <leo@unity3d.com>'] = "Leonardo Carneiro <leonardo@unity3d.com>" |
|
65 | name_fixes['Leonardo <leo@unity3d.com>'] = "Leonardo Carneiro <leonardo@unity3d.com>" | |
66 | name_fixes['Les Peabody'] = "Les Peabody <lpeabody@gmail.com>" |
|
66 | name_fixes['Les Peabody'] = "Les Peabody <lpeabody@gmail.com>" | |
67 | name_fixes['"Lorenzo M. Catucci" <lorenzo@sancho.ccd.uniroma2.it>'] = "Lorenzo M. Catucci <lorenzo@sancho.ccd.uniroma2.it>" |
|
67 | name_fixes['"Lorenzo M. Catucci" <lorenzo@sancho.ccd.uniroma2.it>'] = "Lorenzo M. Catucci <lorenzo@sancho.ccd.uniroma2.it>" | |
68 | name_fixes['Lukasz Balcerzak'] = "Εukasz Balcerzak <lukaszbalcerzak@gmail.com>" |
|
68 | name_fixes['Lukasz Balcerzak'] = "Εukasz Balcerzak <lukaszbalcerzak@gmail.com>" | |
69 | name_fixes['mao <mao@lins.fju.edu.tw>'] = "Ching-Chen Mao <mao@lins.fju.edu.tw>" |
|
69 | name_fixes['mao <mao@lins.fju.edu.tw>'] = "Ching-Chen Mao <mao@lins.fju.edu.tw>" | |
70 | name_fixes['marcink'] = "Marcin KuΕΊmiΕski <marcin@python-works.com>" |
|
70 | name_fixes['marcink'] = "Marcin KuΕΊmiΕski <marcin@python-works.com>" | |
71 | name_fixes['Marcin Kuzminski'] = "Marcin KuΕΊmiΕski <marcin@python-works.com>" |
|
71 | name_fixes['Marcin Kuzminski'] = "Marcin KuΕΊmiΕski <marcin@python-works.com>" | |
72 | name_fixes['nansenat16@null.tw'] = "nansenat16 <nansenat16@null.tw>" |
|
72 | name_fixes['nansenat16@null.tw'] = "nansenat16 <nansenat16@null.tw>" | |
73 | name_fixes['Peter Vitt'] = "Peter Vitt <petervitt@web.de>" |
|
73 | name_fixes['Peter Vitt'] = "Peter Vitt <petervitt@web.de>" | |
74 | name_fixes['philip.j@hostdime.com'] = "Philip Jameson <philip.j@hostdime.com>" |
|
74 | name_fixes['philip.j@hostdime.com'] = "Philip Jameson <philip.j@hostdime.com>" | |
75 | name_fixes['SΓΈren LΓΈvborg'] = "SΓΈren LΓΈvborg <sorenl@unity3d.com>" |
|
75 | name_fixes['SΓΈren LΓΈvborg'] = "SΓΈren LΓΈvborg <sorenl@unity3d.com>" | |
76 | name_fixes['Thomas De Schampheleire'] = "Thomas De Schampheleire <thomas.de.schampheleire@gmail.com>" |
|
76 | name_fixes['Thomas De Schampheleire'] = "Thomas De Schampheleire <thomas.de.schampheleire@gmail.com>" | |
77 | name_fixes['Weblate'] = "<>" |
|
77 | name_fixes['Weblate'] = "<>" | |
78 | name_fixes['xpol'] = "xpol <xpolife@gmail.com>" |
|
78 | name_fixes['xpol'] = "xpol <xpolife@gmail.com>" | |
79 |
|
79 | |||
80 |
|
80 | |||
81 | # Some committer email address domains that indicate that another entity might |
|
81 | # Some committer email address domains that indicate that another entity might | |
82 | # hold some copyright too: |
|
82 | # hold some copyright too: | |
83 | domain_extra = {} |
|
83 | domain_extra = {} | |
84 | domain_extra['unity3d.com'] = "Unity Technologies" |
|
84 | domain_extra['unity3d.com'] = "Unity Technologies" | |
85 | domain_extra['rhodecode.com'] = "RhodeCode GmbH" |
|
85 | domain_extra['rhodecode.com'] = "RhodeCode GmbH" | |
86 |
|
86 | |||
87 | # Repository history show some old contributions that traditionally hasn't been |
|
87 | # Repository history show some old contributions that traditionally hasn't been | |
88 | # listed in about.html - preserve that: |
|
88 | # listed in about.html - preserve that: | |
89 | no_about = set(total_ignore) |
|
89 | no_about = set(total_ignore) | |
90 | # The following contributors were traditionally not listed in about.html and it |
|
90 | # The following contributors were traditionally not listed in about.html and it | |
91 | # seems unclear if the copyright is personal or belongs to a company. |
|
91 | # seems unclear if the copyright is personal or belongs to a company. | |
92 | no_about.add(('Thayne Harbaugh <thayne@fusionio.com>', '2011')) |
|
92 | no_about.add(('Thayne Harbaugh <thayne@fusionio.com>', '2011')) | |
93 | no_about.add(('Dies Koper <diesk@fast.au.fujitsu.com>', '2012')) |
|
93 | no_about.add(('Dies Koper <diesk@fast.au.fujitsu.com>', '2012')) | |
94 | no_about.add(('Erwin Kroon <e.kroon@smartmetersolutions.nl>', '2012')) |
|
94 | no_about.add(('Erwin Kroon <e.kroon@smartmetersolutions.nl>', '2012')) | |
95 | no_about.add(('Vincent Caron <vcaron@bearstech.com>', '2012')) |
|
95 | no_about.add(('Vincent Caron <vcaron@bearstech.com>', '2012')) | |
96 | # These contributors' contributions might be too small to be copyrightable: |
|
96 | # These contributors' contributions might be too small to be copyrightable: | |
97 | no_about.add(('philip.j@hostdime.com', '2012')) |
|
97 | no_about.add(('philip.j@hostdime.com', '2012')) | |
98 | no_about.add(('Stefan Engel <mail@engel-stefan.de>', '2012')) |
|
98 | no_about.add(('Stefan Engel <mail@engel-stefan.de>', '2012')) | |
99 | no_about.add(('Ton Plomp <tcplomp@gmail.com>', '2013')) |
|
99 | no_about.add(('Ton Plomp <tcplomp@gmail.com>', '2013')) | |
100 | # Was reworked and contributed later and shadowed by other contributions: |
|
100 | # Was reworked and contributed later and shadowed by other contributions: | |
101 | no_about.add(('Sean Farley <sean.michael.farley@gmail.com>', '2013')) |
|
101 | no_about.add(('Sean Farley <sean.michael.farley@gmail.com>', '2013')) | |
102 |
|
102 | |||
103 | # Preserve contributors listed in about.html but not appearing in repository |
|
103 | # Preserve contributors listed in about.html but not appearing in repository | |
104 | # history: |
|
104 | # history: | |
105 | other_about = [ |
|
105 | other_about = [ | |
106 | ("2011", "Aparkar <aparkar@icloud.com>"), |
|
106 | ("2011", "Aparkar <aparkar@icloud.com>"), | |
107 | ("2010", "RhodeCode GmbH"), |
|
107 | ("2010", "RhodeCode GmbH"), | |
108 | ("2011", "RhodeCode GmbH"), |
|
108 | ("2011", "RhodeCode GmbH"), | |
109 | ("2012", "RhodeCode GmbH"), |
|
109 | ("2012", "RhodeCode GmbH"), | |
110 | ("2013", "RhodeCode GmbH"), |
|
110 | ("2013", "RhodeCode GmbH"), | |
111 | ] |
|
111 | ] | |
112 |
|
112 | |||
113 | # Preserve contributors listed in CONTRIBUTORS but not appearing in repository |
|
113 | # Preserve contributors listed in CONTRIBUTORS but not appearing in repository | |
114 | # history: |
|
114 | # history: | |
115 | other_contributors = [ |
|
115 | other_contributors = [ | |
116 | ("", "Andrew Kesterson <andrew@aklabs.net>"), |
|
116 | ("", "Andrew Kesterson <andrew@aklabs.net>"), | |
117 | ("", "cejones"), |
|
117 | ("", "cejones"), | |
118 | ("", "David A. SjΓΈen <david.sjoen@westcon.no>"), |
|
118 | ("", "David A. SjΓΈen <david.sjoen@westcon.no>"), | |
119 | ("", "James Rhodes <jrhodes@redpointsoftware.com.au>"), |
|
119 | ("", "James Rhodes <jrhodes@redpointsoftware.com.au>"), | |
120 | ("", "Jonas Oberschweiber <jonas.oberschweiber@d-velop.de>"), |
|
120 | ("", "Jonas Oberschweiber <jonas.oberschweiber@d-velop.de>"), | |
121 | ("", "larikale"), |
|
121 | ("", "larikale"), | |
122 | ("", "RhodeCode GmbH"), |
|
122 | ("", "RhodeCode GmbH"), | |
123 | ("", "Sebastian Kreutzberger <sebastian@rhodecode.com>"), |
|
123 | ("", "Sebastian Kreutzberger <sebastian@rhodecode.com>"), | |
124 | ("", "Steve Romanow <slestak989@gmail.com>"), |
|
124 | ("", "Steve Romanow <slestak989@gmail.com>"), | |
125 | ("", "SteveCohen"), |
|
125 | ("", "SteveCohen"), | |
126 | ("", "Thomas <thomas@rhodecode.com>"), |
|
126 | ("", "Thomas <thomas@rhodecode.com>"), | |
127 | ("", "Thomas Waldmann <tw-public@gmx.de>"), |
|
127 | ("", "Thomas Waldmann <tw-public@gmx.de>"), | |
128 | ] |
|
128 | ] | |
129 |
|
129 | |||
130 |
|
130 | |||
131 | import os |
|
131 | import os | |
132 | import re |
|
132 | import re | |
133 | from collections import defaultdict |
|
133 | from collections import defaultdict | |
134 |
|
134 | |||
135 |
|
135 | |||
136 | def sortkey(x): |
|
136 | def sortkey(x): | |
137 | """Return key for sorting contributors "fairly": |
|
137 | """Return key for sorting contributors "fairly": | |
138 | * latest contribution |
|
138 | * latest contribution | |
139 | * first contribution |
|
139 | * first contribution | |
140 | * number of contribution years |
|
140 | * number of contribution years | |
141 | * name (with some unicode normalization) |
|
141 | * name (with some unicode normalization) | |
142 | The entries must be 2-tuples of a list of string years and the unicode name""" |
|
142 | The entries must be 2-tuples of a list of string years and the unicode name""" | |
143 | return (x[0] and -int(x[0][-1]), |
|
143 | return (x[0] and -int(x[0][-1]), | |
144 | x[0] and int(x[0][0]), |
|
144 | x[0] and int(x[0][0]), | |
145 | -len(x[0]), |
|
145 | -len(x[0]), | |
146 | x[1].decode('utf8').lower().replace(u'\xe9', u'e').replace(u'\u0142', u'l') |
|
146 | x[1].decode('utf8').lower().replace(u'\xe9', u'e').replace(u'\u0142', u'l') | |
147 | ) |
|
147 | ) | |
148 |
|
148 | |||
149 |
|
149 | |||
150 | def nice_years(l, dash='-', join=' '): |
|
150 | def nice_years(l, dash='-', join=' '): | |
151 | """Convert a list of years into brief range like '1900-1901, 1921'.""" |
|
151 | """Convert a list of years into brief range like '1900-1901, 1921'.""" | |
152 | if not l: |
|
152 | if not l: | |
153 | return '' |
|
153 | return '' | |
154 | start = end = int(l[0]) |
|
154 | start = end = int(l[0]) | |
155 | ranges = [] |
|
155 | ranges = [] | |
156 | for year in l[1:] + [0]: |
|
156 | for year in l[1:] + [0]: | |
157 | year = int(year) |
|
157 | year = int(year) | |
158 | if year == end + 1: |
|
158 | if year == end + 1: | |
159 | end = year |
|
159 | end = year | |
160 | continue |
|
160 | continue | |
161 | if start == end: |
|
161 | if start == end: | |
162 | ranges.append('%s' % start) |
|
162 | ranges.append('%s' % start) | |
163 | else: |
|
163 | else: | |
164 | ranges.append('%s%s%s' % (start, dash, end)) |
|
164 | ranges.append('%s%s%s' % (start, dash, end)) | |
165 | start = end = year |
|
165 | start = end = year | |
166 | assert start == 0 and end == 0, (start, end) |
|
166 | assert start == 0 and end == 0, (start, end) | |
167 | return join.join(ranges) |
|
167 | return join.join(ranges) | |
168 |
|
168 | |||
169 |
|
169 | |||
170 | def insert_entries( |
|
170 | def insert_entries( | |
171 | filename, |
|
171 | filename, | |
172 | all_entries, |
|
172 | all_entries, | |
173 | no_entries, |
|
173 | no_entries, | |
174 | domain_extra, |
|
174 | domain_extra, | |
175 | split_re, |
|
175 | split_re, | |
176 | normalize_name, |
|
176 | normalize_name, | |
177 | format_f): |
|
177 | format_f): | |
178 | """Update file with contributor information. |
|
178 | """Update file with contributor information. | |
179 | all_entries: list of tuples with year and name |
|
179 | all_entries: list of tuples with year and name | |
180 | no_entries: set of names or name and year tuples to ignore |
|
180 | no_entries: set of names or name and year tuples to ignore | |
181 | domain_extra: map domain name to extra credit name |
|
181 | domain_extra: map domain name to extra credit name | |
182 | split_re: regexp matching the part of file to rewrite |
|
182 | split_re: regexp matching the part of file to rewrite | |
183 | normalize_name: function to normalize names for grouping and display |
|
183 | normalize_name: function to normalize names for grouping and display | |
184 | format_f: function formatting year list and name to a string |
|
184 | format_f: function formatting year list and name to a string | |
185 | """ |
|
185 | """ | |
186 | name_years = defaultdict(set) |
|
186 | name_years = defaultdict(set) | |
187 |
|
187 | |||
188 | for year, name in all_entries: |
|
188 | for year, name in all_entries: | |
189 | if name in no_entries or (name, year) in no_entries: |
|
189 | if name in no_entries or (name, year) in no_entries: | |
190 | continue |
|
190 | continue | |
191 | domain = name.split('@', 1)[-1].rstrip('>') |
|
191 | domain = name.split('@', 1)[-1].rstrip('>') | |
192 | if domain in domain_extra: |
|
192 | if domain in domain_extra: | |
193 | name_years[domain_extra[domain]].add(year) |
|
193 | name_years[domain_extra[domain]].add(year) | |
194 | name_years[normalize_name(name)].add(year) |
|
194 | name_years[normalize_name(name)].add(year) | |
195 |
|
195 | |||
196 | l = [(list(sorted(year for year in years if year)), name) |
|
196 | l = [(list(sorted(year for year in years if year)), name) | |
197 | for name, years in name_years.items()] |
|
197 | for name, years in name_years.items()] | |
198 | l.sort(key=sortkey) |
|
198 | l.sort(key=sortkey) | |
199 |
|
199 | |||
200 |
with |
|
200 | with open(filename) as f: | |
201 | pre, post = re.split(split_re, f.read()) |
|
201 | pre, post = re.split(split_re, f.read()) | |
202 |
|
202 | |||
203 |
with |
|
203 | with open(filename, 'w') as f: | |
204 | f.write(pre + |
|
204 | f.write(pre + | |
205 | ''.join(format_f(years, name) for years, name in l) + |
|
205 | ''.join(format_f(years, name) for years, name in l) + | |
206 | post) |
|
206 | post) | |
207 |
|
207 | |||
208 |
|
208 | |||
209 | def main(): |
|
209 | def main(): | |
210 | repo_entries = [ |
|
210 | repo_entries = [ | |
211 | (year, name_fixes.get(name) or name_fixes.get(name.rsplit('<', 1)[0].strip()) or name) |
|
211 | (year, name_fixes.get(name) or name_fixes.get(name.rsplit('<', 1)[0].strip()) or name) | |
212 | for year, name in |
|
212 | for year, name in | |
213 | (line.strip().split(' ', 1) |
|
213 | (line.strip().split(' ', 1) | |
214 | for line in os.popen("""hg log -r '::.' -T '{date(date,"%Y")} {author}\n'""").readlines()) |
|
214 | for line in os.popen("""hg log -r '::.' -T '{date(date,"%Y")} {author}\n'""").readlines()) | |
215 | ] |
|
215 | ] | |
216 |
|
216 | |||
217 | insert_entries( |
|
217 | insert_entries( | |
218 | filename='kallithea/templates/about.html', |
|
218 | filename='kallithea/templates/about.html', | |
219 | all_entries=repo_entries + other_about, |
|
219 | all_entries=repo_entries + other_about, | |
220 | no_entries=no_about, |
|
220 | no_entries=no_about, | |
221 | domain_extra=domain_extra, |
|
221 | domain_extra=domain_extra, | |
222 | split_re=r'(?: <li>Copyright © [^\n]*</li>\n)*', |
|
222 | split_re=r'(?: <li>Copyright © [^\n]*</li>\n)*', | |
223 | normalize_name=lambda name: name.split('<', 1)[0].strip(), |
|
223 | normalize_name=lambda name: name.split('<', 1)[0].strip(), | |
224 | format_f=lambda years, name: ' <li>Copyright © %s, %s</li>\n' % (nice_years(years, '–', ', '), name), |
|
224 | format_f=lambda years, name: ' <li>Copyright © %s, %s</li>\n' % (nice_years(years, '–', ', '), name), | |
225 | ) |
|
225 | ) | |
226 |
|
226 | |||
227 | insert_entries( |
|
227 | insert_entries( | |
228 | filename='CONTRIBUTORS', |
|
228 | filename='CONTRIBUTORS', | |
229 | all_entries=repo_entries + other_contributors, |
|
229 | all_entries=repo_entries + other_contributors, | |
230 | no_entries=total_ignore, |
|
230 | no_entries=total_ignore, | |
231 | domain_extra=domain_extra, |
|
231 | domain_extra=domain_extra, | |
232 | split_re=r'(?: [^\n]*\n)*', |
|
232 | split_re=r'(?: [^\n]*\n)*', | |
233 | normalize_name=lambda name: name, |
|
233 | normalize_name=lambda name: name, | |
234 | format_f=lambda years, name: (' %s%s%s\n' % (name, ' ' if years else '', nice_years(years))), |
|
234 | format_f=lambda years, name: (' %s%s%s\n' % (name, ' ' if years else '', nice_years(years))), | |
235 | ) |
|
235 | ) | |
236 |
|
236 | |||
237 | insert_entries( |
|
237 | insert_entries( | |
238 | filename='kallithea/templates/base/base.html', |
|
238 | filename='kallithea/templates/base/base.html', | |
239 | all_entries=repo_entries, |
|
239 | all_entries=repo_entries, | |
240 | no_entries=total_ignore, |
|
240 | no_entries=total_ignore, | |
241 | domain_extra={}, |
|
241 | domain_extra={}, | |
242 | split_re=r'(?<=©) .* (?=by various authors)', |
|
242 | split_re=r'(?<=©) .* (?=by various authors)', | |
243 | normalize_name=lambda name: '', |
|
243 | normalize_name=lambda name: '', | |
244 | format_f=lambda years, name: ' ' + nice_years(years, '–', ', ') + ' ', |
|
244 | format_f=lambda years, name: ' ' + nice_years(years, '–', ', ') + ' ', | |
245 | ) |
|
245 | ) | |
246 |
|
246 | |||
247 |
|
247 | |||
248 | if __name__ == '__main__': |
|
248 | if __name__ == '__main__': | |
249 | main() |
|
249 | main() | |
250 |
|
250 | |||
251 |
|
251 | |||
252 | # To list new contributors since last tagging: |
|
252 | # To list new contributors since last tagging: | |
253 | # { hg log -r '::tagged()' -T ' {author}\n {author}\n'; hg log -r '::.' -T ' {author}\n' | sort | uniq; } | sort | uniq -u |
|
253 | # { hg log -r '::tagged()' -T ' {author}\n {author}\n'; hg log -r '::.' -T ' {author}\n' | sort | uniq; } | sort | uniq -u |
General Comments 0
You need to be logged in to leave comments.
Login now