##// END OF EJS Templates
python3: removed from future imports
super-admin -
r4912:fba394fb default
parent child Browse files
Show More
@@ -1,106 +1,106 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Single source for redirection links.
22 Single source for redirection links.
23
23
24 Goal of this module is to provide a single source of truth regarding external
24 Goal of this module is to provide a single source of truth regarding external
25 links. The data inside this module is used to configure the routing
25 links. The data inside this module is used to configure the routing
26 system of Enterprise and it is used also as a base to check if this data
26 system of Enterprise and it is used also as a base to check if this data
27 and our server configuration are in sync.
27 and our server configuration are in sync.
28
28
29 .. py:data:: link_config
29 .. py:data:: link_config
30
30
31 Contains the configuration for external links. Each item is supposed to be
31 Contains the configuration for external links. Each item is supposed to be
32 a `dict` like this example::
32 a `dict` like this example::
33
33
34 {"name": "url_name",
34 {"name": "url_name",
35 "target": "https://rhodecode.com/r1/enterprise/keyword/",
35 "target": "https://rhodecode.com/r1/enterprise/keyword/",
36 "external_target": "https://example.com/some-page.html",
36 "external_target": "https://example.com/some-page.html",
37 }
37 }
38
38
39 then you can retrieve the url by simply calling the URL function:
39 then you can retrieve the url by simply calling the URL function:
40
40
41 `h.route_path('url_name')`
41 `h.route_path('url_name')`
42
42
43 The redirection must be first implemented in our servers before
43 The redirection must be first implemented in our servers before
44 you can see it working.
44 you can see it working.
45 """
45 """
46 # pragma: no cover
46 # pragma: no cover
47 from __future__ import unicode_literals
47
48
48
49 link_config = [
49 link_config = [
50 {
50 {
51 "name": "enterprise_docs",
51 "name": "enterprise_docs",
52 "target": "https://rhodecode.com/r1/enterprise/docs/",
52 "target": "https://rhodecode.com/r1/enterprise/docs/",
53 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/",
53 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/",
54 },
54 },
55 {
55 {
56 "name": "enterprise_log_file_locations",
56 "name": "enterprise_log_file_locations",
57 "target": "https://rhodecode.com/r1/enterprise/docs/admin-system-overview/",
57 "target": "https://rhodecode.com/r1/enterprise/docs/admin-system-overview/",
58 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/admin/system-overview.html#log-files",
58 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/admin/system-overview.html#log-files",
59 },
59 },
60 {
60 {
61 "name": "enterprise_issue_tracker_settings",
61 "name": "enterprise_issue_tracker_settings",
62 "target": "https://rhodecode.com/r1/enterprise/docs/issue-trackers-overview/",
62 "target": "https://rhodecode.com/r1/enterprise/docs/issue-trackers-overview/",
63 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/issue-trackers/issue-trackers.html",
63 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/issue-trackers/issue-trackers.html",
64 },
64 },
65 {
65 {
66 "name": "enterprise_svn_setup",
66 "name": "enterprise_svn_setup",
67 "target": "https://rhodecode.com/r1/enterprise/docs/svn-setup/",
67 "target": "https://rhodecode.com/r1/enterprise/docs/svn-setup/",
68 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/admin/svn-http.html",
68 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/admin/svn-http.html",
69 },
69 },
70 {
70 {
71 "name": "enterprise_license_convert_from_old",
71 "name": "enterprise_license_convert_from_old",
72 "target": "https://rhodecode.com/r1/enterprise/convert-license/",
72 "target": "https://rhodecode.com/r1/enterprise/convert-license/",
73 "external_target": "https://rhodecode.com/u/license-upgrade",
73 "external_target": "https://rhodecode.com/u/license-upgrade",
74 },
74 },
75 {
75 {
76 "name": "rst_help",
76 "name": "rst_help",
77 "target": "http://docutils.sourceforge.io/docs/user/rst/quickref.html",
77 "target": "http://docutils.sourceforge.io/docs/user/rst/quickref.html",
78 "external_target": "https://docutils.sourceforge.io/docs/user/rst/quickref.html",
78 "external_target": "https://docutils.sourceforge.io/docs/user/rst/quickref.html",
79 },
79 },
80 {
80 {
81 "name": "markdown_help",
81 "name": "markdown_help",
82 "target": "https://daringfireball.net/projects/markdown/syntax",
82 "target": "https://daringfireball.net/projects/markdown/syntax",
83 "external_target": "https://daringfireball.net/projects/markdown/syntax",
83 "external_target": "https://daringfireball.net/projects/markdown/syntax",
84 },
84 },
85 {
85 {
86 "name": "rhodecode_official",
86 "name": "rhodecode_official",
87 "target": "https://rhodecode.com",
87 "target": "https://rhodecode.com",
88 "external_target": "https://rhodecode.com/",
88 "external_target": "https://rhodecode.com/",
89 },
89 },
90 {
90 {
91 "name": "rhodecode_support",
91 "name": "rhodecode_support",
92 "target": "https://rhodecode.com/help/",
92 "target": "https://rhodecode.com/help/",
93 "external_target": "https://rhodecode.com/support",
93 "external_target": "https://rhodecode.com/support",
94 },
94 },
95 {
95 {
96 "name": "rhodecode_translations",
96 "name": "rhodecode_translations",
97 "target": "https://rhodecode.com/translate/enterprise",
97 "target": "https://rhodecode.com/translate/enterprise",
98 "external_target": "https://explore.transifex.com/rhodecode/RhodeCode/",
98 "external_target": "https://explore.transifex.com/rhodecode/RhodeCode/",
99 },
99 },
100
100
101 ]
101 ]
102
102
103
103
104 def connect_redirection_links(config):
104 def connect_redirection_links(config):
105 for link in link_config:
105 for link in link_config:
106 config.add_route(link['name'], link['target'], static=True)
106 config.add_route(link['name'], link['target'], static=True)
@@ -1,329 +1,329 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from __future__ import unicode_literals
21
22 import logging
22 import logging
23
23
24 import colander
24 import colander
25 import deform.widget
25 import deform.widget
26 from mako.template import Template
26 from mako.template import Template
27
27
28 from rhodecode import events
28 from rhodecode import events
29 from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc
29 from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc
30 from rhodecode.translation import _
30 from rhodecode.translation import _
31 from rhodecode.lib.celerylib import run_task
31 from rhodecode.lib.celerylib import run_task
32 from rhodecode.lib.celerylib import tasks
32 from rhodecode.lib.celerylib import tasks
33 from rhodecode.integrations.types.base import (
33 from rhodecode.integrations.types.base import (
34 IntegrationTypeBase, render_with_traceback)
34 IntegrationTypeBase, render_with_traceback)
35
35
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39 REPO_PUSH_TEMPLATE_PLAINTEXT = Template('''
39 REPO_PUSH_TEMPLATE_PLAINTEXT = Template('''
40 Commits:
40 Commits:
41
41
42 % for commit in data['push']['commits']:
42 % for commit in data['push']['commits']:
43 ${commit['url']} by ${commit['author']} at ${commit['date']}
43 ${commit['url']} by ${commit['author']} at ${commit['date']}
44 ${commit['message']}
44 ${commit['message']}
45 ----
45 ----
46
46
47 % endfor
47 % endfor
48 ''')
48 ''')
49
49
50 REPO_PUSH_TEMPLATE_HTML = Template('''
50 REPO_PUSH_TEMPLATE_HTML = Template('''
51 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
51 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
52 <html xmlns="http://www.w3.org/1999/xhtml">
52 <html xmlns="http://www.w3.org/1999/xhtml">
53 <head>
53 <head>
54 <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
54 <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
55 <meta name="viewport" content="width=device-width, initial-scale=1.0"/>
55 <meta name="viewport" content="width=device-width, initial-scale=1.0"/>
56 <title>${subject}</title>
56 <title>${subject}</title>
57 <style type="text/css">
57 <style type="text/css">
58 /* Based on The MailChimp Reset INLINE: Yes. */
58 /* Based on The MailChimp Reset INLINE: Yes. */
59 #outlook a {padding:0;} /* Force Outlook to provide a "view in browser" menu link. */
59 #outlook a {padding:0;} /* Force Outlook to provide a "view in browser" menu link. */
60 body{width:100% !important; -webkit-text-size-adjust:100%; -ms-text-size-adjust:100%; margin:0; padding:0;}
60 body{width:100% !important; -webkit-text-size-adjust:100%; -ms-text-size-adjust:100%; margin:0; padding:0;}
61 /* Prevent Webkit and Windows Mobile platforms from changing default font sizes.*/
61 /* Prevent Webkit and Windows Mobile platforms from changing default font sizes.*/
62 .ExternalClass {width:100%;} /* Force Hotmail to display emails at full width */
62 .ExternalClass {width:100%;} /* Force Hotmail to display emails at full width */
63 .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div {line-height: 100%;}
63 .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div {line-height: 100%;}
64 /* Forces Hotmail to display normal line spacing. More on that: http://www.emailonacid.com/forum/viewthread/43/ */
64 /* Forces Hotmail to display normal line spacing. More on that: http://www.emailonacid.com/forum/viewthread/43/ */
65 #backgroundTable {margin:0; padding:0; line-height: 100% !important;}
65 #backgroundTable {margin:0; padding:0; line-height: 100% !important;}
66 /* End reset */
66 /* End reset */
67
67
68 /* defaults for images*/
68 /* defaults for images*/
69 img {outline:none; text-decoration:none; -ms-interpolation-mode: bicubic;}
69 img {outline:none; text-decoration:none; -ms-interpolation-mode: bicubic;}
70 a img {border:none;}
70 a img {border:none;}
71 .image_fix {display:block;}
71 .image_fix {display:block;}
72
72
73 body {line-height:1.2em;}
73 body {line-height:1.2em;}
74 p {margin: 0 0 20px;}
74 p {margin: 0 0 20px;}
75 h1, h2, h3, h4, h5, h6 {color:#323232!important;}
75 h1, h2, h3, h4, h5, h6 {color:#323232!important;}
76 a {color:#427cc9;text-decoration:none;outline:none;cursor:pointer;}
76 a {color:#427cc9;text-decoration:none;outline:none;cursor:pointer;}
77 a:focus {outline:none;}
77 a:focus {outline:none;}
78 a:hover {color: #305b91;}
78 a:hover {color: #305b91;}
79 h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {color:#427cc9!important;text-decoration:none!important;}
79 h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {color:#427cc9!important;text-decoration:none!important;}
80 h1 a:active, h2 a:active, h3 a:active, h4 a:active, h5 a:active, h6 a:active {color: #305b91!important;}
80 h1 a:active, h2 a:active, h3 a:active, h4 a:active, h5 a:active, h6 a:active {color: #305b91!important;}
81 h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited {color: #305b91!important;}
81 h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited {color: #305b91!important;}
82 table {font-size:13px;border-collapse:collapse;mso-table-lspace:0pt;mso-table-rspace:0pt;}
82 table {font-size:13px;border-collapse:collapse;mso-table-lspace:0pt;mso-table-rspace:0pt;}
83 table td {padding:.65em 1em .65em 0;border-collapse:collapse;vertical-align:top;text-align:left;}
83 table td {padding:.65em 1em .65em 0;border-collapse:collapse;vertical-align:top;text-align:left;}
84 input {display:inline;border-radius:2px;border-style:solid;border: 1px solid #dbd9da;padding:.5em;}
84 input {display:inline;border-radius:2px;border-style:solid;border: 1px solid #dbd9da;padding:.5em;}
85 input:focus {outline: 1px solid #979797}
85 input:focus {outline: 1px solid #979797}
86 @media only screen and (-webkit-min-device-pixel-ratio: 2) {
86 @media only screen and (-webkit-min-device-pixel-ratio: 2) {
87 /* Put your iPhone 4g styles in here */
87 /* Put your iPhone 4g styles in here */
88 }
88 }
89
89
90 /* Android targeting */
90 /* Android targeting */
91 @media only screen and (-webkit-device-pixel-ratio:.75){
91 @media only screen and (-webkit-device-pixel-ratio:.75){
92 /* Put CSS for low density (ldpi) Android layouts in here */
92 /* Put CSS for low density (ldpi) Android layouts in here */
93 }
93 }
94 @media only screen and (-webkit-device-pixel-ratio:1){
94 @media only screen and (-webkit-device-pixel-ratio:1){
95 /* Put CSS for medium density (mdpi) Android layouts in here */
95 /* Put CSS for medium density (mdpi) Android layouts in here */
96 }
96 }
97 @media only screen and (-webkit-device-pixel-ratio:1.5){
97 @media only screen and (-webkit-device-pixel-ratio:1.5){
98 /* Put CSS for high density (hdpi) Android layouts in here */
98 /* Put CSS for high density (hdpi) Android layouts in here */
99 }
99 }
100 /* end Android targeting */
100 /* end Android targeting */
101
101
102 </style>
102 </style>
103
103
104 <!-- Targeting Windows Mobile -->
104 <!-- Targeting Windows Mobile -->
105 <!--[if IEMobile 7]>
105 <!--[if IEMobile 7]>
106 <style type="text/css">
106 <style type="text/css">
107
107
108 </style>
108 </style>
109 <![endif]-->
109 <![endif]-->
110
110
111 <!--[if gte mso 9]>
111 <!--[if gte mso 9]>
112 <style>
112 <style>
113 /* Target Outlook 2007 and 2010 */
113 /* Target Outlook 2007 and 2010 */
114 </style>
114 </style>
115 <![endif]-->
115 <![endif]-->
116 </head>
116 </head>
117 <body>
117 <body>
118 <!-- Wrapper/Container Table: Use a wrapper table to control the width and the background color consistently of your email. Use this approach instead of setting attributes on the body tag. -->
118 <!-- Wrapper/Container Table: Use a wrapper table to control the width and the background color consistently of your email. Use this approach instead of setting attributes on the body tag. -->
119 <table cellpadding="0" cellspacing="0" border="0" id="backgroundTable" align="left" style="margin:1%;width:97%;padding:0;font-family:sans-serif;font-weight:100;border:1px solid #dbd9da">
119 <table cellpadding="0" cellspacing="0" border="0" id="backgroundTable" align="left" style="margin:1%;width:97%;padding:0;font-family:sans-serif;font-weight:100;border:1px solid #dbd9da">
120 <tr>
120 <tr>
121 <td valign="top" style="padding:0;">
121 <td valign="top" style="padding:0;">
122 <table cellpadding="0" cellspacing="0" border="0" align="left" width="100%">
122 <table cellpadding="0" cellspacing="0" border="0" align="left" width="100%">
123 <tr><td style="width:100%;padding:7px;background-color:#202020" valign="top">
123 <tr><td style="width:100%;padding:7px;background-color:#202020" valign="top">
124 <a style="color:#eeeeee;text-decoration:none;" href="${instance_url}">
124 <a style="color:#eeeeee;text-decoration:none;" href="${instance_url}">
125 ${'RhodeCode'}
125 ${'RhodeCode'}
126 </a>
126 </a>
127 </td></tr>
127 </td></tr>
128 <tr>
128 <tr>
129 <td style="padding:15px;" valign="top">
129 <td style="padding:15px;" valign="top">
130 % if data['push']['commits']:
130 % if data['push']['commits']:
131 % for commit in data['push']['commits']:
131 % for commit in data['push']['commits']:
132 <a href="${commit['url']}">${commit['short_id']}</a> by ${commit['author']} at ${commit['date']} <br/>
132 <a href="${commit['url']}">${commit['short_id']}</a> by ${commit['author']} at ${commit['date']} <br/>
133 ${commit['message_html']} <br/>
133 ${commit['message_html']} <br/>
134 <br/>
134 <br/>
135 % endfor
135 % endfor
136 % else:
136 % else:
137 No commit data
137 No commit data
138 % endif
138 % endif
139 </td>
139 </td>
140 </tr>
140 </tr>
141 </table>
141 </table>
142 </td>
142 </td>
143 </tr>
143 </tr>
144 </table>
144 </table>
145 <!-- End of wrapper table -->
145 <!-- End of wrapper table -->
146 <p><a style="margin-top:15px;margin-left:1%;font-family:sans-serif;font-weight:100;font-size:11px;color:#666666;text-decoration:none;" href="${instance_url}">
146 <p><a style="margin-top:15px;margin-left:1%;font-family:sans-serif;font-weight:100;font-size:11px;color:#666666;text-decoration:none;" href="${instance_url}">
147 ${'This is a notification from RhodeCode. %(instance_url)s' % {'instance_url': instance_url}}
147 ${'This is a notification from RhodeCode. %(instance_url)s' % {'instance_url': instance_url}}
148 </a></p>
148 </a></p>
149 </body>
149 </body>
150 </html>
150 </html>
151 ''')
151 ''')
152
152
153
153
154 class EmailSettingsSchema(colander.Schema):
154 class EmailSettingsSchema(colander.Schema):
155 @colander.instantiate(validator=colander.Length(min=1))
155 @colander.instantiate(validator=colander.Length(min=1))
156 class recipients(colander.SequenceSchema):
156 class recipients(colander.SequenceSchema):
157 title = _('Recipients')
157 title = _('Recipients')
158 description = _('Email addresses to send push events to')
158 description = _('Email addresses to send push events to')
159 widget = deform.widget.SequenceWidget(min_len=1)
159 widget = deform.widget.SequenceWidget(min_len=1)
160
160
161 recipient = colander.SchemaNode(
161 recipient = colander.SchemaNode(
162 colander.String(),
162 colander.String(),
163 title=_('Email address'),
163 title=_('Email address'),
164 description=_('Email address'),
164 description=_('Email address'),
165 default='',
165 default='',
166 validator=colander.Email(),
166 validator=colander.Email(),
167 widget=deform.widget.TextInputWidget(
167 widget=deform.widget.TextInputWidget(
168 placeholder='user@domain.com',
168 placeholder='user@domain.com',
169 ),
169 ),
170 )
170 )
171
171
172
172
173 class EmailIntegrationType(IntegrationTypeBase):
173 class EmailIntegrationType(IntegrationTypeBase):
174 key = 'email'
174 key = 'email'
175 display_name = _('Email')
175 display_name = _('Email')
176 description = _('Send repo push summaries to a list of recipients via email')
176 description = _('Send repo push summaries to a list of recipients via email')
177
177
178 valid_events = [
178 valid_events = [
179 events.RepoPushEvent
179 events.RepoPushEvent
180 ]
180 ]
181
181
182 @classmethod
182 @classmethod
183 def icon(cls):
183 def icon(cls):
184 return '''
184 return '''
185 <?xml version="1.0" encoding="UTF-8" standalone="no"?>
185 <?xml version="1.0" encoding="UTF-8" standalone="no"?>
186 <svg
186 <svg
187 xmlns:dc="http://purl.org/dc/elements/1.1/"
187 xmlns:dc="http://purl.org/dc/elements/1.1/"
188 xmlns:cc="http://creativecommons.org/ns#"
188 xmlns:cc="http://creativecommons.org/ns#"
189 xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
189 xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
190 xmlns:svg="http://www.w3.org/2000/svg"
190 xmlns:svg="http://www.w3.org/2000/svg"
191 xmlns="http://www.w3.org/2000/svg"
191 xmlns="http://www.w3.org/2000/svg"
192 xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
192 xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
193 xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
193 xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
194 viewBox="0 -256 1850 1850"
194 viewBox="0 -256 1850 1850"
195 id="svg2989"
195 id="svg2989"
196 version="1.1"
196 version="1.1"
197 inkscape:version="0.48.3.1 r9886"
197 inkscape:version="0.48.3.1 r9886"
198 width="100%"
198 width="100%"
199 height="100%"
199 height="100%"
200 sodipodi:docname="envelope_font_awesome.svg">
200 sodipodi:docname="envelope_font_awesome.svg">
201 <metadata
201 <metadata
202 id="metadata2999">
202 id="metadata2999">
203 <rdf:RDF>
203 <rdf:RDF>
204 <cc:Work
204 <cc:Work
205 rdf:about="">
205 rdf:about="">
206 <dc:format>image/svg+xml</dc:format>
206 <dc:format>image/svg+xml</dc:format>
207 <dc:type
207 <dc:type
208 rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
208 rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
209 </cc:Work>
209 </cc:Work>
210 </rdf:RDF>
210 </rdf:RDF>
211 </metadata>
211 </metadata>
212 <defs
212 <defs
213 id="defs2997" />
213 id="defs2997" />
214 <sodipodi:namedview
214 <sodipodi:namedview
215 pagecolor="#ffffff"
215 pagecolor="#ffffff"
216 bordercolor="#666666"
216 bordercolor="#666666"
217 borderopacity="1"
217 borderopacity="1"
218 objecttolerance="10"
218 objecttolerance="10"
219 gridtolerance="10"
219 gridtolerance="10"
220 guidetolerance="10"
220 guidetolerance="10"
221 inkscape:pageopacity="0"
221 inkscape:pageopacity="0"
222 inkscape:pageshadow="2"
222 inkscape:pageshadow="2"
223 inkscape:window-width="640"
223 inkscape:window-width="640"
224 inkscape:window-height="480"
224 inkscape:window-height="480"
225 id="namedview2995"
225 id="namedview2995"
226 showgrid="false"
226 showgrid="false"
227 inkscape:zoom="0.13169643"
227 inkscape:zoom="0.13169643"
228 inkscape:cx="896"
228 inkscape:cx="896"
229 inkscape:cy="896"
229 inkscape:cy="896"
230 inkscape:window-x="0"
230 inkscape:window-x="0"
231 inkscape:window-y="25"
231 inkscape:window-y="25"
232 inkscape:window-maximized="0"
232 inkscape:window-maximized="0"
233 inkscape:current-layer="svg2989" />
233 inkscape:current-layer="svg2989" />
234 <g
234 <g
235 transform="matrix(1,0,0,-1,37.966102,1282.678)"
235 transform="matrix(1,0,0,-1,37.966102,1282.678)"
236 id="g2991">
236 id="g2991">
237 <path
237 <path
238 d="m 1664,32 v 768 q -32,-36 -69,-66 -268,-206 -426,-338 -51,-43 -83,-67 -32,-24 -86.5,-48.5 Q 945,256 897,256 h -1 -1 Q 847,256 792.5,280.5 738,305 706,329 674,353 623,396 465,528 197,734 160,764 128,800 V 32 Q 128,19 137.5,9.5 147,0 160,0 h 1472 q 13,0 22.5,9.5 9.5,9.5 9.5,22.5 z m 0,1051 v 11 13.5 q 0,0 -0.5,13 -0.5,13 -3,12.5 -2.5,-0.5 -5.5,9 -3,9.5 -9,7.5 -6,-2 -14,2.5 H 160 q -13,0 -22.5,-9.5 Q 128,1133 128,1120 128,952 275,836 468,684 676,519 682,514 711,489.5 740,465 757,452 774,439 801.5,420.5 829,402 852,393 q 23,-9 43,-9 h 1 1 q 20,0 43,9 23,9 50.5,27.5 27.5,18.5 44.5,31.5 17,13 46,37.5 29,24.5 35,29.5 208,165 401,317 54,43 100.5,115.5 46.5,72.5 46.5,131.5 z m 128,37 V 32 q 0,-66 -47,-113 -47,-47 -113,-47 H 160 Q 94,-128 47,-81 0,-34 0,32 v 1088 q 0,66 47,113 47,47 113,47 h 1472 q 66,0 113,-47 47,-47 47,-113 z"
238 d="m 1664,32 v 768 q -32,-36 -69,-66 -268,-206 -426,-338 -51,-43 -83,-67 -32,-24 -86.5,-48.5 Q 945,256 897,256 h -1 -1 Q 847,256 792.5,280.5 738,305 706,329 674,353 623,396 465,528 197,734 160,764 128,800 V 32 Q 128,19 137.5,9.5 147,0 160,0 h 1472 q 13,0 22.5,9.5 9.5,9.5 9.5,22.5 z m 0,1051 v 11 13.5 q 0,0 -0.5,13 -0.5,13 -3,12.5 -2.5,-0.5 -5.5,9 -3,9.5 -9,7.5 -6,-2 -14,2.5 H 160 q -13,0 -22.5,-9.5 Q 128,1133 128,1120 128,952 275,836 468,684 676,519 682,514 711,489.5 740,465 757,452 774,439 801.5,420.5 829,402 852,393 q 23,-9 43,-9 h 1 1 q 20,0 43,9 23,9 50.5,27.5 27.5,18.5 44.5,31.5 17,13 46,37.5 29,24.5 35,29.5 208,165 401,317 54,43 100.5,115.5 46.5,72.5 46.5,131.5 z m 128,37 V 32 q 0,-66 -47,-113 -47,-47 -113,-47 H 160 Q 94,-128 47,-81 0,-34 0,32 v 1088 q 0,66 47,113 47,47 113,47 h 1472 q 66,0 113,-47 47,-47 47,-113 z"
239 id="path2993"
239 id="path2993"
240 inkscape:connector-curvature="0"
240 inkscape:connector-curvature="0"
241 style="fill:currentColor" />
241 style="fill:currentColor" />
242 </g>
242 </g>
243 </svg>
243 </svg>
244 '''
244 '''
245
245
246 def settings_schema(self):
246 def settings_schema(self):
247 schema = EmailSettingsSchema()
247 schema = EmailSettingsSchema()
248 schema.add(colander.SchemaNode(
248 schema.add(colander.SchemaNode(
249 colander.Set(),
249 colander.Set(),
250 widget=CheckboxChoiceWidgetDesc(
250 widget=CheckboxChoiceWidgetDesc(
251 values=sorted(
251 values=sorted(
252 [(e.name, e.display_name, e.description) for e in self.valid_events]
252 [(e.name, e.display_name, e.description) for e in self.valid_events]
253 ),
253 ),
254 ),
254 ),
255 description="List of events activated for this integration",
255 description="List of events activated for this integration",
256 name='events'
256 name='events'
257 ))
257 ))
258 return schema
258 return schema
259
259
260 def send_event(self, event):
260 def send_event(self, event):
261 log.debug('handling event %s with integration %s', event.name, self)
261 log.debug('handling event %s with integration %s', event.name, self)
262
262
263 if event.__class__ not in self.valid_events:
263 if event.__class__ not in self.valid_events:
264 log.debug('event %r not present in valid event list (%s)', event, self.valid_events)
264 log.debug('event %r not present in valid event list (%s)', event, self.valid_events)
265 return
265 return
266
266
267 if not self.event_enabled(event):
267 if not self.event_enabled(event):
268 # NOTE(marcink): for legacy reasons we're skipping this check...
268 # NOTE(marcink): for legacy reasons we're skipping this check...
269 # since the email event haven't had any settings...
269 # since the email event haven't had any settings...
270 pass
270 pass
271
271
272 handler = EmailEventHandler(self.settings)
272 handler = EmailEventHandler(self.settings)
273 handler(event, event_data=event.as_dict())
273 handler(event, event_data=event.as_dict())
274
274
275
275
276 class EmailEventHandler(object):
276 class EmailEventHandler(object):
277 def __init__(self, integration_settings):
277 def __init__(self, integration_settings):
278 self.integration_settings = integration_settings
278 self.integration_settings = integration_settings
279
279
280 def __call__(self, event, event_data):
280 def __call__(self, event, event_data):
281 if isinstance(event, events.RepoPushEvent):
281 if isinstance(event, events.RepoPushEvent):
282 self.repo_push_handler(event, event_data)
282 self.repo_push_handler(event, event_data)
283 else:
283 else:
284 log.debug('ignoring event: %r', event)
284 log.debug('ignoring event: %r', event)
285
285
286 def repo_push_handler(self, event, data):
286 def repo_push_handler(self, event, data):
287 commit_num = len(data['push']['commits'])
287 commit_num = len(data['push']['commits'])
288 server_url = data['server_url']
288 server_url = data['server_url']
289
289
290 if commit_num == 1:
290 if commit_num == 1:
291 if data['push']['branches']:
291 if data['push']['branches']:
292 _subject = '[{repo_name}] {author} pushed {commit_num} commit on branches: {branches}'
292 _subject = '[{repo_name}] {author} pushed {commit_num} commit on branches: {branches}'
293 else:
293 else:
294 _subject = '[{repo_name}] {author} pushed {commit_num} commit'
294 _subject = '[{repo_name}] {author} pushed {commit_num} commit'
295 subject = _subject.format(
295 subject = _subject.format(
296 author=data['actor']['username'],
296 author=data['actor']['username'],
297 repo_name=data['repo']['repo_name'],
297 repo_name=data['repo']['repo_name'],
298 commit_num=commit_num,
298 commit_num=commit_num,
299 branches=', '.join(
299 branches=', '.join(
300 branch['name'] for branch in data['push']['branches'])
300 branch['name'] for branch in data['push']['branches'])
301 )
301 )
302 else:
302 else:
303 if data['push']['branches']:
303 if data['push']['branches']:
304 _subject = '[{repo_name}] {author} pushed {commit_num} commits on branches: {branches}'
304 _subject = '[{repo_name}] {author} pushed {commit_num} commits on branches: {branches}'
305 else:
305 else:
306 _subject = '[{repo_name}] {author} pushed {commit_num} commits'
306 _subject = '[{repo_name}] {author} pushed {commit_num} commits'
307 subject = _subject.format(
307 subject = _subject.format(
308 author=data['actor']['username'],
308 author=data['actor']['username'],
309 repo_name=data['repo']['repo_name'],
309 repo_name=data['repo']['repo_name'],
310 commit_num=commit_num,
310 commit_num=commit_num,
311 branches=', '.join(
311 branches=', '.join(
312 branch['name'] for branch in data['push']['branches']))
312 branch['name'] for branch in data['push']['branches']))
313
313
314 email_body_plaintext = render_with_traceback(
314 email_body_plaintext = render_with_traceback(
315 REPO_PUSH_TEMPLATE_PLAINTEXT,
315 REPO_PUSH_TEMPLATE_PLAINTEXT,
316 data=data,
316 data=data,
317 subject=subject,
317 subject=subject,
318 instance_url=server_url)
318 instance_url=server_url)
319
319
320 email_body_html = render_with_traceback(
320 email_body_html = render_with_traceback(
321 REPO_PUSH_TEMPLATE_HTML,
321 REPO_PUSH_TEMPLATE_HTML,
322 data=data,
322 data=data,
323 subject=subject,
323 subject=subject,
324 instance_url=server_url)
324 instance_url=server_url)
325
325
326 recipients = self.integration_settings['recipients']
326 recipients = self.integration_settings['recipients']
327 for email_address in recipients:
327 for email_address in recipients:
328 run_task(tasks.send_email, email_address, subject,
328 run_task(tasks.send_email, email_address, subject,
329 email_body_plaintext, email_body_html)
329 email_body_plaintext, email_body_html)
@@ -1,253 +1,253 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from __future__ import unicode_literals
21
22 import deform
22 import deform
23 import logging
23 import logging
24 import requests
24 import requests
25 import colander
25 import colander
26 import textwrap
26 import textwrap
27 from mako.template import Template
27 from mako.template import Template
28 from rhodecode import events
28 from rhodecode import events
29 from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc
29 from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc
30 from rhodecode.translation import _
30 from rhodecode.translation import _
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask
32 from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask
33 from rhodecode.lib.colander_utils import strip_whitespace
33 from rhodecode.lib.colander_utils import strip_whitespace
34 from rhodecode.integrations.types.base import (
34 from rhodecode.integrations.types.base import (
35 IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback,
35 IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback,
36 requests_retry_call)
36 requests_retry_call)
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40 REPO_PUSH_TEMPLATE = Template('''
40 REPO_PUSH_TEMPLATE = Template('''
41 <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>:
41 <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>:
42 <br>
42 <br>
43 <ul>
43 <ul>
44 %for branch, branch_commits in branches_commits.items():
44 %for branch, branch_commits in branches_commits.items():
45 <li>
45 <li>
46 % if branch:
46 % if branch:
47 <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a>
47 <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a>
48 % else:
48 % else:
49 to trunk
49 to trunk
50 % endif
50 % endif
51 <ul>
51 <ul>
52 % for commit in branch_commits['commits']:
52 % for commit in branch_commits['commits']:
53 <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li>
53 <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li>
54 % endfor
54 % endfor
55 </ul>
55 </ul>
56 </li>
56 </li>
57 %endfor
57 %endfor
58 ''')
58 ''')
59
59
60
60
61 class HipchatSettingsSchema(colander.Schema):
61 class HipchatSettingsSchema(colander.Schema):
62 color_choices = [
62 color_choices = [
63 ('yellow', _('Yellow')),
63 ('yellow', _('Yellow')),
64 ('red', _('Red')),
64 ('red', _('Red')),
65 ('green', _('Green')),
65 ('green', _('Green')),
66 ('purple', _('Purple')),
66 ('purple', _('Purple')),
67 ('gray', _('Gray')),
67 ('gray', _('Gray')),
68 ]
68 ]
69
69
70 server_url = colander.SchemaNode(
70 server_url = colander.SchemaNode(
71 colander.String(),
71 colander.String(),
72 title=_('Hipchat server URL'),
72 title=_('Hipchat server URL'),
73 description=_('Hipchat integration url.'),
73 description=_('Hipchat integration url.'),
74 default='',
74 default='',
75 preparer=strip_whitespace,
75 preparer=strip_whitespace,
76 validator=colander.url,
76 validator=colander.url,
77 widget=deform.widget.TextInputWidget(
77 widget=deform.widget.TextInputWidget(
78 placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?',
78 placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?',
79 ),
79 ),
80 )
80 )
81 notify = colander.SchemaNode(
81 notify = colander.SchemaNode(
82 colander.Bool(),
82 colander.Bool(),
83 title=_('Notify'),
83 title=_('Notify'),
84 description=_('Make a notification to the users in room.'),
84 description=_('Make a notification to the users in room.'),
85 missing=False,
85 missing=False,
86 default=False,
86 default=False,
87 )
87 )
88 color = colander.SchemaNode(
88 color = colander.SchemaNode(
89 colander.String(),
89 colander.String(),
90 title=_('Color'),
90 title=_('Color'),
91 description=_('Background color of message.'),
91 description=_('Background color of message.'),
92 missing='',
92 missing='',
93 validator=colander.OneOf([x[0] for x in color_choices]),
93 validator=colander.OneOf([x[0] for x in color_choices]),
94 widget=deform.widget.Select2Widget(
94 widget=deform.widget.Select2Widget(
95 values=color_choices,
95 values=color_choices,
96 ),
96 ),
97 )
97 )
98
98
99
99
100 class HipchatIntegrationType(IntegrationTypeBase, CommitParsingDataHandler):
100 class HipchatIntegrationType(IntegrationTypeBase, CommitParsingDataHandler):
101 key = 'hipchat'
101 key = 'hipchat'
102 display_name = _('Hipchat')
102 display_name = _('Hipchat')
103 description = _('Send events such as repo pushes and pull requests to '
103 description = _('Send events such as repo pushes and pull requests to '
104 'your hipchat channel.')
104 'your hipchat channel.')
105
105
106 @classmethod
106 @classmethod
107 def icon(cls):
107 def icon(cls):
108 return '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>'''
108 return '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>'''
109
109
110 valid_events = [
110 valid_events = [
111 events.PullRequestCloseEvent,
111 events.PullRequestCloseEvent,
112 events.PullRequestMergeEvent,
112 events.PullRequestMergeEvent,
113 events.PullRequestUpdateEvent,
113 events.PullRequestUpdateEvent,
114 events.PullRequestCommentEvent,
114 events.PullRequestCommentEvent,
115 events.PullRequestReviewEvent,
115 events.PullRequestReviewEvent,
116 events.PullRequestCreateEvent,
116 events.PullRequestCreateEvent,
117 events.RepoPushEvent,
117 events.RepoPushEvent,
118 events.RepoCreateEvent,
118 events.RepoCreateEvent,
119 ]
119 ]
120
120
121 def send_event(self, event):
121 def send_event(self, event):
122 if event.__class__ not in self.valid_events:
122 if event.__class__ not in self.valid_events:
123 log.debug('event %r not present in valid event list (%s)', event, self.valid_events)
123 log.debug('event %r not present in valid event list (%s)', event, self.valid_events)
124 return
124 return
125
125
126 if not self.event_enabled(event):
126 if not self.event_enabled(event):
127 return
127 return
128
128
129 data = event.as_dict()
129 data = event.as_dict()
130
130
131 text = '<b>%s<b> caused a <b>%s</b> event' % (
131 text = '<b>%s<b> caused a <b>%s</b> event' % (
132 data['actor']['username'], event.name)
132 data['actor']['username'], event.name)
133
133
134 if isinstance(event, events.PullRequestCommentEvent):
134 if isinstance(event, events.PullRequestCommentEvent):
135 text = self.format_pull_request_comment_event(event, data)
135 text = self.format_pull_request_comment_event(event, data)
136 elif isinstance(event, events.PullRequestCommentEditEvent):
136 elif isinstance(event, events.PullRequestCommentEditEvent):
137 text = self.format_pull_request_comment_event(event, data)
137 text = self.format_pull_request_comment_event(event, data)
138 elif isinstance(event, events.PullRequestReviewEvent):
138 elif isinstance(event, events.PullRequestReviewEvent):
139 text = self.format_pull_request_review_event(event, data)
139 text = self.format_pull_request_review_event(event, data)
140 elif isinstance(event, events.PullRequestEvent):
140 elif isinstance(event, events.PullRequestEvent):
141 text = self.format_pull_request_event(event, data)
141 text = self.format_pull_request_event(event, data)
142 elif isinstance(event, events.RepoPushEvent):
142 elif isinstance(event, events.RepoPushEvent):
143 text = self.format_repo_push_event(data)
143 text = self.format_repo_push_event(data)
144 elif isinstance(event, events.RepoCreateEvent):
144 elif isinstance(event, events.RepoCreateEvent):
145 text = self.format_repo_create_event(data)
145 text = self.format_repo_create_event(data)
146 else:
146 else:
147 log.error('unhandled event type: %r', event)
147 log.error('unhandled event type: %r', event)
148
148
149 run_task(post_text_to_hipchat, self.settings, text)
149 run_task(post_text_to_hipchat, self.settings, text)
150
150
151 def settings_schema(self):
151 def settings_schema(self):
152 schema = HipchatSettingsSchema()
152 schema = HipchatSettingsSchema()
153 schema.add(colander.SchemaNode(
153 schema.add(colander.SchemaNode(
154 colander.Set(),
154 colander.Set(),
155 widget=CheckboxChoiceWidgetDesc(
155 widget=CheckboxChoiceWidgetDesc(
156 values=sorted(
156 values=sorted(
157 [(e.name, e.display_name, e.description) for e in self.valid_events]
157 [(e.name, e.display_name, e.description) for e in self.valid_events]
158 ),
158 ),
159 ),
159 ),
160 description="List of events activated for this integration",
160 description="List of events activated for this integration",
161 name='events'
161 name='events'
162 ))
162 ))
163
163
164 return schema
164 return schema
165
165
166 def format_pull_request_comment_event(self, event, data):
166 def format_pull_request_comment_event(self, event, data):
167 comment_text = data['comment']['text']
167 comment_text = data['comment']['text']
168 if len(comment_text) > 200:
168 if len(comment_text) > 200:
169 comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format(
169 comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format(
170 comment_text=h.html_escape(comment_text[:200]),
170 comment_text=h.html_escape(comment_text[:200]),
171 comment_url=data['comment']['url'],
171 comment_url=data['comment']['url'],
172 )
172 )
173
173
174 comment_status = ''
174 comment_status = ''
175 if data['comment']['status']:
175 if data['comment']['status']:
176 comment_status = '[{}]: '.format(data['comment']['status'])
176 comment_status = '[{}]: '.format(data['comment']['status'])
177
177
178 return (textwrap.dedent(
178 return (textwrap.dedent(
179 '''
179 '''
180 {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}:
180 {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}:
181 >>> {comment_status}{comment_text}
181 >>> {comment_status}{comment_text}
182 ''').format(
182 ''').format(
183 comment_status=comment_status,
183 comment_status=comment_status,
184 user=data['actor']['username'],
184 user=data['actor']['username'],
185 number=data['pullrequest']['pull_request_id'],
185 number=data['pullrequest']['pull_request_id'],
186 pr_url=data['pullrequest']['url'],
186 pr_url=data['pullrequest']['url'],
187 pr_status=data['pullrequest']['status'],
187 pr_status=data['pullrequest']['status'],
188 pr_title=h.html_escape(data['pullrequest']['title']),
188 pr_title=h.html_escape(data['pullrequest']['title']),
189 comment_text=h.html_escape(comment_text)
189 comment_text=h.html_escape(comment_text)
190 )
190 )
191 )
191 )
192
192
193 def format_pull_request_review_event(self, event, data):
193 def format_pull_request_review_event(self, event, data):
194 return (textwrap.dedent(
194 return (textwrap.dedent(
195 '''
195 '''
196 Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title}
196 Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title}
197 ''').format(
197 ''').format(
198 user=data['actor']['username'],
198 user=data['actor']['username'],
199 number=data['pullrequest']['pull_request_id'],
199 number=data['pullrequest']['pull_request_id'],
200 pr_url=data['pullrequest']['url'],
200 pr_url=data['pullrequest']['url'],
201 pr_status=data['pullrequest']['status'],
201 pr_status=data['pullrequest']['status'],
202 pr_title=h.html_escape(data['pullrequest']['title']),
202 pr_title=h.html_escape(data['pullrequest']['title']),
203 )
203 )
204 )
204 )
205
205
206 def format_pull_request_event(self, event, data):
206 def format_pull_request_event(self, event, data):
207 action = {
207 action = {
208 events.PullRequestCloseEvent: 'closed',
208 events.PullRequestCloseEvent: 'closed',
209 events.PullRequestMergeEvent: 'merged',
209 events.PullRequestMergeEvent: 'merged',
210 events.PullRequestUpdateEvent: 'updated',
210 events.PullRequestUpdateEvent: 'updated',
211 events.PullRequestCreateEvent: 'created',
211 events.PullRequestCreateEvent: 'created',
212 }.get(event.__class__, str(event.__class__))
212 }.get(event.__class__, str(event.__class__))
213
213
214 return ('Pull request <a href="{url}">#{number}</a> - {title} '
214 return ('Pull request <a href="{url}">#{number}</a> - {title} '
215 '{action} by <b>{user}</b>').format(
215 '{action} by <b>{user}</b>').format(
216 user=data['actor']['username'],
216 user=data['actor']['username'],
217 number=data['pullrequest']['pull_request_id'],
217 number=data['pullrequest']['pull_request_id'],
218 url=data['pullrequest']['url'],
218 url=data['pullrequest']['url'],
219 title=h.html_escape(data['pullrequest']['title']),
219 title=h.html_escape(data['pullrequest']['title']),
220 action=action
220 action=action
221 )
221 )
222
222
223 def format_repo_push_event(self, data):
223 def format_repo_push_event(self, data):
224 branches_commits = self.aggregate_branch_data(
224 branches_commits = self.aggregate_branch_data(
225 data['push']['branches'], data['push']['commits'])
225 data['push']['branches'], data['push']['commits'])
226
226
227 result = render_with_traceback(
227 result = render_with_traceback(
228 REPO_PUSH_TEMPLATE,
228 REPO_PUSH_TEMPLATE,
229 data=data,
229 data=data,
230 branches_commits=branches_commits,
230 branches_commits=branches_commits,
231 )
231 )
232 return result
232 return result
233
233
234 def format_repo_create_event(self, data):
234 def format_repo_create_event(self, data):
235 return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format(
235 return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format(
236 data['repo']['url'],
236 data['repo']['url'],
237 h.html_escape(data['repo']['repo_name']),
237 h.html_escape(data['repo']['repo_name']),
238 data['repo']['repo_type'],
238 data['repo']['repo_type'],
239 data['actor']['username'],
239 data['actor']['username'],
240 )
240 )
241
241
242
242
243 @async_task(ignore_result=True, base=RequestContextTask)
243 @async_task(ignore_result=True, base=RequestContextTask)
244 def post_text_to_hipchat(settings, text):
244 def post_text_to_hipchat(settings, text):
245 log.debug('sending %s to hipchat %s', text, settings['server_url'])
245 log.debug('sending %s to hipchat %s', text, settings['server_url'])
246 json_message = {
246 json_message = {
247 "message": text,
247 "message": text,
248 "color": settings.get('color', 'yellow'),
248 "color": settings.get('color', 'yellow'),
249 "notify": settings.get('notify', False),
249 "notify": settings.get('notify', False),
250 }
250 }
251 req_session = requests_retry_call()
251 req_session = requests_retry_call()
252 resp = req_session.post(settings['server_url'], json=json_message, timeout=60)
252 resp = req_session.post(settings['server_url'], json=json_message, timeout=60)
253 resp.raise_for_status() # raise exception on a failed request
253 resp.raise_for_status() # raise exception on a failed request
@@ -1,354 +1,354 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from __future__ import unicode_literals
21
22 import re
22 import re
23 import time
23 import time
24 import textwrap
24 import textwrap
25 import logging
25 import logging
26
26
27 import deform
27 import deform
28 import requests
28 import requests
29 import colander
29 import colander
30 from mako.template import Template
30 from mako.template import Template
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc
33 from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc
34 from rhodecode.translation import _
34 from rhodecode.translation import _
35 from rhodecode.lib import helpers as h
35 from rhodecode.lib import helpers as h
36 from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask
36 from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask
37 from rhodecode.lib.colander_utils import strip_whitespace
37 from rhodecode.lib.colander_utils import strip_whitespace
38 from rhodecode.integrations.types.base import (
38 from rhodecode.integrations.types.base import (
39 IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback,
39 IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback,
40 requests_retry_call)
40 requests_retry_call)
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 def html_to_slack_links(message):
45 def html_to_slack_links(message):
46 return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub(
46 return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub(
47 r'<\1|\2>', message)
47 r'<\1|\2>', message)
48
48
49
49
50 REPO_PUSH_TEMPLATE = Template('''
50 REPO_PUSH_TEMPLATE = Template('''
51 <%
51 <%
52 def branch_text(branch):
52 def branch_text(branch):
53 if branch:
53 if branch:
54 return 'on branch: <{}|{}>'.format(branch_commits['branch']['url'], branch_commits['branch']['name'])
54 return 'on branch: <{}|{}>'.format(branch_commits['branch']['url'], branch_commits['branch']['name'])
55 else:
55 else:
56 ## case for SVN no branch push...
56 ## case for SVN no branch push...
57 return 'to trunk'
57 return 'to trunk'
58 %> \
58 %> \
59
59
60 % for branch, branch_commits in branches_commits.items():
60 % for branch, branch_commits in branches_commits.items():
61 ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} ${branch_text(branch)}
61 ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} ${branch_text(branch)}
62 % for commit in branch_commits['commits']:
62 % for commit in branch_commits['commits']:
63 `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links}
63 `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links}
64 % endfor
64 % endfor
65 % endfor
65 % endfor
66 ''')
66 ''')
67
67
68
68
69 class SlackSettingsSchema(colander.Schema):
69 class SlackSettingsSchema(colander.Schema):
70 service = colander.SchemaNode(
70 service = colander.SchemaNode(
71 colander.String(),
71 colander.String(),
72 title=_('Slack service URL'),
72 title=_('Slack service URL'),
73 description=h.literal(_(
73 description=h.literal(_(
74 'This can be setup at the '
74 'This can be setup at the '
75 '<a href="https://my.slack.com/services/new/incoming-webhook/">'
75 '<a href="https://my.slack.com/services/new/incoming-webhook/">'
76 'slack app manager</a>')),
76 'slack app manager</a>')),
77 default='',
77 default='',
78 preparer=strip_whitespace,
78 preparer=strip_whitespace,
79 validator=colander.url,
79 validator=colander.url,
80 widget=deform.widget.TextInputWidget(
80 widget=deform.widget.TextInputWidget(
81 placeholder='https://hooks.slack.com/services/...',
81 placeholder='https://hooks.slack.com/services/...',
82 ),
82 ),
83 )
83 )
84 username = colander.SchemaNode(
84 username = colander.SchemaNode(
85 colander.String(),
85 colander.String(),
86 title=_('Username'),
86 title=_('Username'),
87 description=_('Username to show notifications coming from.'),
87 description=_('Username to show notifications coming from.'),
88 missing='Rhodecode',
88 missing='Rhodecode',
89 preparer=strip_whitespace,
89 preparer=strip_whitespace,
90 widget=deform.widget.TextInputWidget(
90 widget=deform.widget.TextInputWidget(
91 placeholder='Rhodecode'
91 placeholder='Rhodecode'
92 ),
92 ),
93 )
93 )
94 channel = colander.SchemaNode(
94 channel = colander.SchemaNode(
95 colander.String(),
95 colander.String(),
96 title=_('Channel'),
96 title=_('Channel'),
97 description=_('Channel to send notifications to.'),
97 description=_('Channel to send notifications to.'),
98 missing='',
98 missing='',
99 preparer=strip_whitespace,
99 preparer=strip_whitespace,
100 widget=deform.widget.TextInputWidget(
100 widget=deform.widget.TextInputWidget(
101 placeholder='#general'
101 placeholder='#general'
102 ),
102 ),
103 )
103 )
104 icon_emoji = colander.SchemaNode(
104 icon_emoji = colander.SchemaNode(
105 colander.String(),
105 colander.String(),
106 title=_('Emoji'),
106 title=_('Emoji'),
107 description=_('Emoji to use eg. :studio_microphone:'),
107 description=_('Emoji to use eg. :studio_microphone:'),
108 missing='',
108 missing='',
109 preparer=strip_whitespace,
109 preparer=strip_whitespace,
110 widget=deform.widget.TextInputWidget(
110 widget=deform.widget.TextInputWidget(
111 placeholder=':studio_microphone:'
111 placeholder=':studio_microphone:'
112 ),
112 ),
113 )
113 )
114
114
115
115
116 class SlackIntegrationType(IntegrationTypeBase, CommitParsingDataHandler):
116 class SlackIntegrationType(IntegrationTypeBase, CommitParsingDataHandler):
117 key = 'slack'
117 key = 'slack'
118 display_name = _('Slack')
118 display_name = _('Slack')
119 description = _('Send events such as repo pushes and pull requests to '
119 description = _('Send events such as repo pushes and pull requests to '
120 'your slack channel.')
120 'your slack channel.')
121
121
122 @classmethod
122 @classmethod
123 def icon(cls):
123 def icon(cls):
124 return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>'''
124 return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>'''
125
125
126 valid_events = [
126 valid_events = [
127 events.PullRequestCloseEvent,
127 events.PullRequestCloseEvent,
128 events.PullRequestMergeEvent,
128 events.PullRequestMergeEvent,
129 events.PullRequestUpdateEvent,
129 events.PullRequestUpdateEvent,
130 events.PullRequestCommentEvent,
130 events.PullRequestCommentEvent,
131 events.PullRequestReviewEvent,
131 events.PullRequestReviewEvent,
132 events.PullRequestCreateEvent,
132 events.PullRequestCreateEvent,
133 events.RepoPushEvent,
133 events.RepoPushEvent,
134 events.RepoCreateEvent,
134 events.RepoCreateEvent,
135 ]
135 ]
136
136
137 def send_event(self, event):
137 def send_event(self, event):
138 log.debug('handling event %s with integration %s', event.name, self)
138 log.debug('handling event %s with integration %s', event.name, self)
139
139
140 if event.__class__ not in self.valid_events:
140 if event.__class__ not in self.valid_events:
141 log.debug('event %r not present in valid event list (%s)', event, self.valid_events)
141 log.debug('event %r not present in valid event list (%s)', event, self.valid_events)
142 return
142 return
143
143
144 if not self.event_enabled(event):
144 if not self.event_enabled(event):
145 return
145 return
146
146
147 data = event.as_dict()
147 data = event.as_dict()
148
148
149 # defaults
149 # defaults
150 title = '*%s* caused a *%s* event' % (
150 title = '*%s* caused a *%s* event' % (
151 data['actor']['username'], event.name)
151 data['actor']['username'], event.name)
152 text = '*%s* caused a *%s* event' % (
152 text = '*%s* caused a *%s* event' % (
153 data['actor']['username'], event.name)
153 data['actor']['username'], event.name)
154 fields = None
154 fields = None
155 overrides = None
155 overrides = None
156
156
157 if isinstance(event, events.PullRequestCommentEvent):
157 if isinstance(event, events.PullRequestCommentEvent):
158 (title, text, fields, overrides) \
158 (title, text, fields, overrides) \
159 = self.format_pull_request_comment_event(event, data)
159 = self.format_pull_request_comment_event(event, data)
160 elif isinstance(event, events.PullRequestCommentEditEvent):
160 elif isinstance(event, events.PullRequestCommentEditEvent):
161 (title, text, fields, overrides) \
161 (title, text, fields, overrides) \
162 = self.format_pull_request_comment_event(event, data)
162 = self.format_pull_request_comment_event(event, data)
163 elif isinstance(event, events.PullRequestReviewEvent):
163 elif isinstance(event, events.PullRequestReviewEvent):
164 title, text = self.format_pull_request_review_event(event, data)
164 title, text = self.format_pull_request_review_event(event, data)
165 elif isinstance(event, events.PullRequestEvent):
165 elif isinstance(event, events.PullRequestEvent):
166 title, text = self.format_pull_request_event(event, data)
166 title, text = self.format_pull_request_event(event, data)
167 elif isinstance(event, events.RepoPushEvent):
167 elif isinstance(event, events.RepoPushEvent):
168 title, text = self.format_repo_push_event(data)
168 title, text = self.format_repo_push_event(data)
169 elif isinstance(event, events.RepoCreateEvent):
169 elif isinstance(event, events.RepoCreateEvent):
170 title, text = self.format_repo_create_event(data)
170 title, text = self.format_repo_create_event(data)
171 else:
171 else:
172 log.error('unhandled event type: %r', event)
172 log.error('unhandled event type: %r', event)
173
173
174 run_task(post_text_to_slack, self.settings, title, text, fields, overrides)
174 run_task(post_text_to_slack, self.settings, title, text, fields, overrides)
175
175
176 def settings_schema(self):
176 def settings_schema(self):
177 schema = SlackSettingsSchema()
177 schema = SlackSettingsSchema()
178 schema.add(colander.SchemaNode(
178 schema.add(colander.SchemaNode(
179 colander.Set(),
179 colander.Set(),
180 widget=CheckboxChoiceWidgetDesc(
180 widget=CheckboxChoiceWidgetDesc(
181 values=sorted(
181 values=sorted(
182 [(e.name, e.display_name, e.description) for e in self.valid_events]
182 [(e.name, e.display_name, e.description) for e in self.valid_events]
183 ),
183 ),
184 ),
184 ),
185 description="List of events activated for this integration",
185 description="List of events activated for this integration",
186 name='events'
186 name='events'
187 ))
187 ))
188
188
189 return schema
189 return schema
190
190
191 def format_pull_request_comment_event(self, event, data):
191 def format_pull_request_comment_event(self, event, data):
192 comment_text = data['comment']['text']
192 comment_text = data['comment']['text']
193 if len(comment_text) > 200:
193 if len(comment_text) > 200:
194 comment_text = '<{comment_url}|{comment_text}...>'.format(
194 comment_text = '<{comment_url}|{comment_text}...>'.format(
195 comment_text=comment_text[:200],
195 comment_text=comment_text[:200],
196 comment_url=data['comment']['url'],
196 comment_url=data['comment']['url'],
197 )
197 )
198
198
199 fields = None
199 fields = None
200 overrides = None
200 overrides = None
201 status_text = None
201 status_text = None
202
202
203 if data['comment']['status']:
203 if data['comment']['status']:
204 status_color = {
204 status_color = {
205 'approved': '#0ac878',
205 'approved': '#0ac878',
206 'rejected': '#e85e4d'}.get(data['comment']['status'])
206 'rejected': '#e85e4d'}.get(data['comment']['status'])
207
207
208 if status_color:
208 if status_color:
209 overrides = {"color": status_color}
209 overrides = {"color": status_color}
210
210
211 status_text = data['comment']['status']
211 status_text = data['comment']['status']
212
212
213 if data['comment']['file']:
213 if data['comment']['file']:
214 fields = [
214 fields = [
215 {
215 {
216 "title": "file",
216 "title": "file",
217 "value": data['comment']['file']
217 "value": data['comment']['file']
218 },
218 },
219 {
219 {
220 "title": "line",
220 "title": "line",
221 "value": data['comment']['line']
221 "value": data['comment']['line']
222 }
222 }
223 ]
223 ]
224
224
225 template = Template(textwrap.dedent(r'''
225 template = Template(textwrap.dedent(r'''
226 *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>:
226 *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>:
227 '''))
227 '''))
228 title = render_with_traceback(
228 title = render_with_traceback(
229 template, data=data, comment=event.comment)
229 template, data=data, comment=event.comment)
230
230
231 template = Template(textwrap.dedent(r'''
231 template = Template(textwrap.dedent(r'''
232 *pull request title*: ${pr_title}
232 *pull request title*: ${pr_title}
233 % if status_text:
233 % if status_text:
234 *submitted status*: `${status_text}`
234 *submitted status*: `${status_text}`
235 % endif
235 % endif
236 >>> ${comment_text}
236 >>> ${comment_text}
237 '''))
237 '''))
238 text = render_with_traceback(
238 text = render_with_traceback(
239 template,
239 template,
240 comment_text=comment_text,
240 comment_text=comment_text,
241 pr_title=data['pullrequest']['title'],
241 pr_title=data['pullrequest']['title'],
242 status_text=status_text)
242 status_text=status_text)
243
243
244 return title, text, fields, overrides
244 return title, text, fields, overrides
245
245
246 def format_pull_request_review_event(self, event, data):
246 def format_pull_request_review_event(self, event, data):
247 template = Template(textwrap.dedent(r'''
247 template = Template(textwrap.dedent(r'''
248 *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>:
248 *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>:
249 '''))
249 '''))
250 title = render_with_traceback(template, data=data)
250 title = render_with_traceback(template, data=data)
251
251
252 template = Template(textwrap.dedent(r'''
252 template = Template(textwrap.dedent(r'''
253 *pull request title*: ${pr_title}
253 *pull request title*: ${pr_title}
254 '''))
254 '''))
255 text = render_with_traceback(
255 text = render_with_traceback(
256 template,
256 template,
257 pr_title=data['pullrequest']['title'])
257 pr_title=data['pullrequest']['title'])
258
258
259 return title, text
259 return title, text
260
260
261 def format_pull_request_event(self, event, data):
261 def format_pull_request_event(self, event, data):
262 action = {
262 action = {
263 events.PullRequestCloseEvent: 'closed',
263 events.PullRequestCloseEvent: 'closed',
264 events.PullRequestMergeEvent: 'merged',
264 events.PullRequestMergeEvent: 'merged',
265 events.PullRequestUpdateEvent: 'updated',
265 events.PullRequestUpdateEvent: 'updated',
266 events.PullRequestCreateEvent: 'created',
266 events.PullRequestCreateEvent: 'created',
267 }.get(event.__class__, str(event.__class__))
267 }.get(event.__class__, str(event.__class__))
268
268
269 template = Template(textwrap.dedent(r'''
269 template = Template(textwrap.dedent(r'''
270 *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>:
270 *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>:
271 '''))
271 '''))
272 title = render_with_traceback(template, data=data, action=action)
272 title = render_with_traceback(template, data=data, action=action)
273
273
274 template = Template(textwrap.dedent(r'''
274 template = Template(textwrap.dedent(r'''
275 *pull request title*: ${pr_title}
275 *pull request title*: ${pr_title}
276 %if data['pullrequest']['commits']:
276 %if data['pullrequest']['commits']:
277 *commits*: ${len(data['pullrequest']['commits'])}
277 *commits*: ${len(data['pullrequest']['commits'])}
278 %endif
278 %endif
279 '''))
279 '''))
280 text = render_with_traceback(
280 text = render_with_traceback(
281 template,
281 template,
282 pr_title=data['pullrequest']['title'],
282 pr_title=data['pullrequest']['title'],
283 data=data)
283 data=data)
284
284
285 return title, text
285 return title, text
286
286
287 def format_repo_push_event(self, data):
287 def format_repo_push_event(self, data):
288 branches_commits = self.aggregate_branch_data(
288 branches_commits = self.aggregate_branch_data(
289 data['push']['branches'], data['push']['commits'])
289 data['push']['branches'], data['push']['commits'])
290
290
291 template = Template(r'''
291 template = Template(r'''
292 *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>:
292 *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>:
293 ''')
293 ''')
294 title = render_with_traceback(template, data=data)
294 title = render_with_traceback(template, data=data)
295
295
296 text = render_with_traceback(
296 text = render_with_traceback(
297 REPO_PUSH_TEMPLATE,
297 REPO_PUSH_TEMPLATE,
298 data=data,
298 data=data,
299 branches_commits=branches_commits,
299 branches_commits=branches_commits,
300 html_to_slack_links=html_to_slack_links,
300 html_to_slack_links=html_to_slack_links,
301 )
301 )
302
302
303 return title, text
303 return title, text
304
304
305 def format_repo_create_event(self, data):
305 def format_repo_create_event(self, data):
306 template = Template(r'''
306 template = Template(r'''
307 *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}:
307 *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}:
308 ''')
308 ''')
309 title = render_with_traceback(template, data=data)
309 title = render_with_traceback(template, data=data)
310
310
311 template = Template(textwrap.dedent(r'''
311 template = Template(textwrap.dedent(r'''
312 repo_url: ${data['repo']['url']}
312 repo_url: ${data['repo']['url']}
313 repo_type: ${data['repo']['repo_type']}
313 repo_type: ${data['repo']['repo_type']}
314 '''))
314 '''))
315 text = render_with_traceback(template, data=data)
315 text = render_with_traceback(template, data=data)
316
316
317 return title, text
317 return title, text
318
318
319
319
320 @async_task(ignore_result=True, base=RequestContextTask)
320 @async_task(ignore_result=True, base=RequestContextTask)
321 def post_text_to_slack(settings, title, text, fields=None, overrides=None):
321 def post_text_to_slack(settings, title, text, fields=None, overrides=None):
322 log.debug('sending %s (%s) to slack %s', title, text, settings['service'])
322 log.debug('sending %s (%s) to slack %s', title, text, settings['service'])
323
323
324 fields = fields or []
324 fields = fields or []
325 overrides = overrides or {}
325 overrides = overrides or {}
326
326
327 message_data = {
327 message_data = {
328 "fallback": text,
328 "fallback": text,
329 "color": "#427cc9",
329 "color": "#427cc9",
330 "pretext": title,
330 "pretext": title,
331 #"author_name": "Bobby Tables",
331 #"author_name": "Bobby Tables",
332 #"author_link": "http://flickr.com/bobby/",
332 #"author_link": "http://flickr.com/bobby/",
333 #"author_icon": "http://flickr.com/icons/bobby.jpg",
333 #"author_icon": "http://flickr.com/icons/bobby.jpg",
334 #"title": "Slack API Documentation",
334 #"title": "Slack API Documentation",
335 #"title_link": "https://api.slack.com/",
335 #"title_link": "https://api.slack.com/",
336 "text": text,
336 "text": text,
337 "fields": fields,
337 "fields": fields,
338 #"image_url": "http://my-website.com/path/to/image.jpg",
338 #"image_url": "http://my-website.com/path/to/image.jpg",
339 #"thumb_url": "http://example.com/path/to/thumb.png",
339 #"thumb_url": "http://example.com/path/to/thumb.png",
340 "footer": "RhodeCode",
340 "footer": "RhodeCode",
341 #"footer_icon": "",
341 #"footer_icon": "",
342 "ts": time.time(),
342 "ts": time.time(),
343 "mrkdwn_in": ["pretext", "text"]
343 "mrkdwn_in": ["pretext", "text"]
344 }
344 }
345 message_data.update(overrides)
345 message_data.update(overrides)
346 json_message = {
346 json_message = {
347 "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'),
347 "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'),
348 "channel": settings.get('channel', ''),
348 "channel": settings.get('channel', ''),
349 "username": settings.get('username', 'Rhodecode'),
349 "username": settings.get('username', 'Rhodecode'),
350 "attachments": [message_data]
350 "attachments": [message_data]
351 }
351 }
352 req_session = requests_retry_call()
352 req_session = requests_retry_call()
353 resp = req_session.post(settings['service'], json=json_message, timeout=60)
353 resp = req_session.post(settings['service'], json=json_message, timeout=60)
354 resp.raise_for_status() # raise exception on a failed request
354 resp.raise_for_status() # raise exception on a failed request
@@ -1,266 +1,266 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from __future__ import unicode_literals
21
22
22
23 import deform.widget
23 import deform.widget
24 import logging
24 import logging
25 import colander
25 import colander
26
26
27 import rhodecode
27 import rhodecode
28 from rhodecode import events
28 from rhodecode import events
29 from rhodecode.lib.colander_utils import strip_whitespace
29 from rhodecode.lib.colander_utils import strip_whitespace
30 from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc
30 from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc
31 from rhodecode.translation import _
31 from rhodecode.translation import _
32 from rhodecode.integrations.types.base import (
32 from rhodecode.integrations.types.base import (
33 IntegrationTypeBase, get_auth, get_web_token, get_url_vars,
33 IntegrationTypeBase, get_auth, get_web_token, get_url_vars,
34 WebhookDataHandler, WEBHOOK_URL_VARS, requests_retry_call)
34 WebhookDataHandler, WEBHOOK_URL_VARS, requests_retry_call)
35 from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask
35 from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask
36 from rhodecode.model.validation_schema import widgets
36 from rhodecode.model.validation_schema import widgets
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40
40
41 # updating this required to update the `common_vars` passed in url calling func
41 # updating this required to update the `common_vars` passed in url calling func
42
42
43 URL_VARS = get_url_vars(WEBHOOK_URL_VARS)
43 URL_VARS = get_url_vars(WEBHOOK_URL_VARS)
44
44
45
45
46 class WebhookSettingsSchema(colander.Schema):
46 class WebhookSettingsSchema(colander.Schema):
47 url = colander.SchemaNode(
47 url = colander.SchemaNode(
48 colander.String(),
48 colander.String(),
49 title=_('Webhook URL'),
49 title=_('Webhook URL'),
50 description=
50 description=
51 _('URL to which Webhook should submit data. If used some of the '
51 _('URL to which Webhook should submit data. If used some of the '
52 'variables would trigger multiple calls, like ${branch} or '
52 'variables would trigger multiple calls, like ${branch} or '
53 '${commit_id}. Webhook will be called as many times as unique '
53 '${commit_id}. Webhook will be called as many times as unique '
54 'objects in data in such cases.'),
54 'objects in data in such cases.'),
55 missing=colander.required,
55 missing=colander.required,
56 required=True,
56 required=True,
57 preparer=strip_whitespace,
57 preparer=strip_whitespace,
58 validator=colander.url,
58 validator=colander.url,
59 widget=widgets.CodeMirrorWidget(
59 widget=widgets.CodeMirrorWidget(
60 help_block_collapsable_name='Show url variables',
60 help_block_collapsable_name='Show url variables',
61 help_block_collapsable=(
61 help_block_collapsable=(
62 'E.g http://my-serv.com/trigger_job/${{event_name}}'
62 'E.g http://my-serv.com/trigger_job/${{event_name}}'
63 '?PR_ID=${{pull_request_id}}'
63 '?PR_ID=${{pull_request_id}}'
64 '\nFull list of vars:\n{}'.format(URL_VARS)),
64 '\nFull list of vars:\n{}'.format(URL_VARS)),
65 codemirror_mode='text',
65 codemirror_mode='text',
66 codemirror_options='{"lineNumbers": false, "lineWrapping": true}'),
66 codemirror_options='{"lineNumbers": false, "lineWrapping": true}'),
67 )
67 )
68 secret_token = colander.SchemaNode(
68 secret_token = colander.SchemaNode(
69 colander.String(),
69 colander.String(),
70 title=_('Secret Token'),
70 title=_('Secret Token'),
71 description=_('Optional string used to validate received payloads. '
71 description=_('Optional string used to validate received payloads. '
72 'It will be sent together with event data in JSON'),
72 'It will be sent together with event data in JSON'),
73 default='',
73 default='',
74 missing='',
74 missing='',
75 widget=deform.widget.TextInputWidget(
75 widget=deform.widget.TextInputWidget(
76 placeholder='e.g. secret_token'
76 placeholder='e.g. secret_token'
77 ),
77 ),
78 )
78 )
79 username = colander.SchemaNode(
79 username = colander.SchemaNode(
80 colander.String(),
80 colander.String(),
81 title=_('Username'),
81 title=_('Username'),
82 description=_('Optional username to authenticate the call.'),
82 description=_('Optional username to authenticate the call.'),
83 default='',
83 default='',
84 missing='',
84 missing='',
85 widget=deform.widget.TextInputWidget(
85 widget=deform.widget.TextInputWidget(
86 placeholder='e.g. admin'
86 placeholder='e.g. admin'
87 ),
87 ),
88 )
88 )
89 password = colander.SchemaNode(
89 password = colander.SchemaNode(
90 colander.String(),
90 colander.String(),
91 title=_('Password'),
91 title=_('Password'),
92 description=_('Optional password to authenticate the call.'),
92 description=_('Optional password to authenticate the call.'),
93 default='',
93 default='',
94 missing='',
94 missing='',
95 widget=deform.widget.PasswordWidget(
95 widget=deform.widget.PasswordWidget(
96 placeholder='e.g. secret.',
96 placeholder='e.g. secret.',
97 redisplay=True,
97 redisplay=True,
98 ),
98 ),
99 )
99 )
100 custom_header_key = colander.SchemaNode(
100 custom_header_key = colander.SchemaNode(
101 colander.String(),
101 colander.String(),
102 title=_('Custom Header Key'),
102 title=_('Custom Header Key'),
103 description=_('Custom Header name to be set when calling endpoint.'),
103 description=_('Custom Header name to be set when calling endpoint.'),
104 default='',
104 default='',
105 missing='',
105 missing='',
106 widget=deform.widget.TextInputWidget(
106 widget=deform.widget.TextInputWidget(
107 placeholder='e.g: Authorization'
107 placeholder='e.g: Authorization'
108 ),
108 ),
109 )
109 )
110 custom_header_val = colander.SchemaNode(
110 custom_header_val = colander.SchemaNode(
111 colander.String(),
111 colander.String(),
112 title=_('Custom Header Value'),
112 title=_('Custom Header Value'),
113 description=_('Custom Header value to be set when calling endpoint.'),
113 description=_('Custom Header value to be set when calling endpoint.'),
114 default='',
114 default='',
115 missing='',
115 missing='',
116 widget=deform.widget.TextInputWidget(
116 widget=deform.widget.TextInputWidget(
117 placeholder='e.g. Basic XxXxXx'
117 placeholder='e.g. Basic XxXxXx'
118 ),
118 ),
119 )
119 )
120 method_type = colander.SchemaNode(
120 method_type = colander.SchemaNode(
121 colander.String(),
121 colander.String(),
122 title=_('Call Method'),
122 title=_('Call Method'),
123 description=_('Select a HTTP method to use when calling the Webhook.'),
123 description=_('Select a HTTP method to use when calling the Webhook.'),
124 default='post',
124 default='post',
125 missing='',
125 missing='',
126 widget=deform.widget.RadioChoiceWidget(
126 widget=deform.widget.RadioChoiceWidget(
127 values=[('get', 'GET'), ('post', 'POST'), ('put', 'PUT')],
127 values=[('get', 'GET'), ('post', 'POST'), ('put', 'PUT')],
128 inline=True
128 inline=True
129 ),
129 ),
130 )
130 )
131
131
132
132
133 class WebhookIntegrationType(IntegrationTypeBase):
133 class WebhookIntegrationType(IntegrationTypeBase):
134 key = 'webhook'
134 key = 'webhook'
135 display_name = _('Webhook')
135 display_name = _('Webhook')
136 description = _('send JSON data to a url endpoint')
136 description = _('send JSON data to a url endpoint')
137
137
138 @classmethod
138 @classmethod
139 def icon(cls):
139 def icon(cls):
140 return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>'''
140 return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>'''
141
141
142 valid_events = [
142 valid_events = [
143 events.PullRequestCloseEvent,
143 events.PullRequestCloseEvent,
144 events.PullRequestMergeEvent,
144 events.PullRequestMergeEvent,
145 events.PullRequestUpdateEvent,
145 events.PullRequestUpdateEvent,
146 events.PullRequestCommentEvent,
146 events.PullRequestCommentEvent,
147 events.PullRequestCommentEditEvent,
147 events.PullRequestCommentEditEvent,
148 events.PullRequestReviewEvent,
148 events.PullRequestReviewEvent,
149 events.PullRequestCreateEvent,
149 events.PullRequestCreateEvent,
150 events.RepoPushEvent,
150 events.RepoPushEvent,
151 events.RepoCreateEvent,
151 events.RepoCreateEvent,
152 events.RepoCommitCommentEvent,
152 events.RepoCommitCommentEvent,
153 events.RepoCommitCommentEditEvent,
153 events.RepoCommitCommentEditEvent,
154 ]
154 ]
155
155
156 def settings_schema(self):
156 def settings_schema(self):
157 schema = WebhookSettingsSchema()
157 schema = WebhookSettingsSchema()
158 schema.add(colander.SchemaNode(
158 schema.add(colander.SchemaNode(
159 colander.Set(),
159 colander.Set(),
160 widget=CheckboxChoiceWidgetDesc(
160 widget=CheckboxChoiceWidgetDesc(
161 values=sorted(
161 values=sorted(
162 [(e.name, e.display_name, e.description) for e in self.valid_events]
162 [(e.name, e.display_name, e.description) for e in self.valid_events]
163 ),
163 ),
164 ),
164 ),
165 description="List of events activated for this integration",
165 description="List of events activated for this integration",
166 name='events'
166 name='events'
167 ))
167 ))
168 return schema
168 return schema
169
169
170 def send_event(self, event):
170 def send_event(self, event):
171 log.debug('handling event %s with integration %s', event.name, self)
171 log.debug('handling event %s with integration %s', event.name, self)
172
172
173 if event.__class__ not in self.valid_events:
173 if event.__class__ not in self.valid_events:
174 log.debug('event %r not present in valid event list (%s)', event, self.valid_events)
174 log.debug('event %r not present in valid event list (%s)', event, self.valid_events)
175 return
175 return
176
176
177 if not self.event_enabled(event):
177 if not self.event_enabled(event):
178 return
178 return
179
179
180 data = event.as_dict()
180 data = event.as_dict()
181 template_url = self.settings['url']
181 template_url = self.settings['url']
182
182
183 headers = {}
183 headers = {}
184 head_key = self.settings.get('custom_header_key')
184 head_key = self.settings.get('custom_header_key')
185 head_val = self.settings.get('custom_header_val')
185 head_val = self.settings.get('custom_header_val')
186 if head_key and head_val:
186 if head_key and head_val:
187 headers = {head_key: head_val}
187 headers = {head_key: head_val}
188
188
189 handler = WebhookDataHandler(template_url, headers)
189 handler = WebhookDataHandler(template_url, headers)
190
190
191 url_calls = handler(event, data)
191 url_calls = handler(event, data)
192 log.debug('Webhook: calling following urls: %s', [x[0] for x in url_calls])
192 log.debug('Webhook: calling following urls: %s', [x[0] for x in url_calls])
193
193
194 run_task(post_to_webhook, url_calls, self.settings)
194 run_task(post_to_webhook, url_calls, self.settings)
195
195
196
196
197 @async_task(ignore_result=True, base=RequestContextTask)
197 @async_task(ignore_result=True, base=RequestContextTask)
198 def post_to_webhook(url_calls, settings):
198 def post_to_webhook(url_calls, settings):
199 """
199 """
200 Example data::
200 Example data::
201
201
202 {'actor': {'user_id': 2, 'username': u'admin'},
202 {'actor': {'user_id': 2, 'username': u'admin'},
203 'actor_ip': u'192.168.157.1',
203 'actor_ip': u'192.168.157.1',
204 'name': 'repo-push',
204 'name': 'repo-push',
205 'push': {'branches': [{'name': u'default',
205 'push': {'branches': [{'name': u'default',
206 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}],
206 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}],
207 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>',
207 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>',
208 'branch': u'default',
208 'branch': u'default',
209 'date': datetime.datetime(2017, 11, 30, 12, 59, 48),
209 'date': datetime.datetime(2017, 11, 30, 12, 59, 48),
210 'issues': [],
210 'issues': [],
211 'mentions': [],
211 'mentions': [],
212 'message': u'commit Thu 30 Nov 2017 13:59:48 CET',
212 'message': u'commit Thu 30 Nov 2017 13:59:48 CET',
213 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET',
213 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET',
214 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET',
214 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET',
215 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}],
215 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}],
216 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf',
216 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf',
217 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf',
217 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf',
218 'refs': {'bookmarks': [],
218 'refs': {'bookmarks': [],
219 'branches': [u'default'],
219 'branches': [u'default'],
220 'tags': [u'tip']},
220 'tags': [u'tip']},
221 'reviewers': [],
221 'reviewers': [],
222 'revision': 9L,
222 'revision': 9L,
223 'short_id': 'a815cc738b96',
223 'short_id': 'a815cc738b96',
224 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}],
224 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}],
225 'issues': {}},
225 'issues': {}},
226 'repo': {'extra_fields': '',
226 'repo': {'extra_fields': '',
227 'permalink_url': u'http://rc.local:8080/_7',
227 'permalink_url': u'http://rc.local:8080/_7',
228 'repo_id': 7,
228 'repo_id': 7,
229 'repo_name': u'hg-repo',
229 'repo_name': u'hg-repo',
230 'repo_type': u'hg',
230 'repo_type': u'hg',
231 'url': u'http://rc.local:8080/hg-repo'},
231 'url': u'http://rc.local:8080/hg-repo'},
232 'server_url': u'http://rc.local:8080',
232 'server_url': u'http://rc.local:8080',
233 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276)
233 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276)
234 }
234 }
235 """
235 """
236
236
237 call_headers = {
237 call_headers = {
238 'User-Agent': 'RhodeCode-webhook-caller/{}'.format(rhodecode.__version__)
238 'User-Agent': 'RhodeCode-webhook-caller/{}'.format(rhodecode.__version__)
239 } # updated below with custom ones, allows override
239 } # updated below with custom ones, allows override
240
240
241 auth = get_auth(settings)
241 auth = get_auth(settings)
242 token = get_web_token(settings)
242 token = get_web_token(settings)
243
243
244 for url, headers, data in url_calls:
244 for url, headers, data in url_calls:
245 req_session = requests_retry_call()
245 req_session = requests_retry_call()
246
246
247 method = settings.get('method_type') or 'post'
247 method = settings.get('method_type') or 'post'
248 call_method = getattr(req_session, method)
248 call_method = getattr(req_session, method)
249
249
250 headers = headers or {}
250 headers = headers or {}
251 call_headers.update(headers)
251 call_headers.update(headers)
252
252
253 log.debug('calling Webhook with method: %s, and auth:%s', call_method, auth)
253 log.debug('calling Webhook with method: %s, and auth:%s', call_method, auth)
254 if settings.get('log_data'):
254 if settings.get('log_data'):
255 log.debug('calling webhook with data: %s', data)
255 log.debug('calling webhook with data: %s', data)
256 resp = call_method(url, json={
256 resp = call_method(url, json={
257 'token': token,
257 'token': token,
258 'event': data
258 'event': data
259 }, headers=call_headers, auth=auth, timeout=60)
259 }, headers=call_headers, auth=auth, timeout=60)
260 log.debug('Got Webhook response: %s', resp)
260 log.debug('Got Webhook response: %s', resp)
261
261
262 try:
262 try:
263 resp.raise_for_status() # raise exception on a failed request
263 resp.raise_for_status() # raise exception on a failed request
264 except Exception:
264 except Exception:
265 log.error(resp.text)
265 log.error(resp.text)
266 raise
266 raise
@@ -1,47 +1,47 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 |flask| Extras
3 |flask| Extras
4 --------------
4 --------------
5
5
6 Utilities you can use when using this library with the |flask|_ framework.
6 Utilities you can use when using this library with the |flask|_ framework.
7
7
8 Thanks to `Mark Steve Samson <http://marksteve.com>`_.
8 Thanks to `Mark Steve Samson <http://marksteve.com>`_.
9 """
9 """
10
10
11 from __future__ import absolute_import
11
12 from functools import wraps
12 from functools import wraps
13
13
14 from authomatic.adapters import WerkzeugAdapter
14 from authomatic.adapters import WerkzeugAdapter
15 from authomatic import Authomatic
15 from authomatic import Authomatic
16 from flask import make_response, request, session
16 from flask import make_response, request, session
17
17
18
18
19 class FlaskAuthomatic(Authomatic):
19 class FlaskAuthomatic(Authomatic):
20 """
20 """
21 Flask Plugin for authomatic support.
21 Flask Plugin for authomatic support.
22 """
22 """
23
23
24 result = None
24 result = None
25
25
26 def login(self, *login_args, **login_kwargs):
26 def login(self, *login_args, **login_kwargs):
27 """
27 """
28 Decorator for Flask view functions.
28 Decorator for Flask view functions.
29 """
29 """
30
30
31 def decorator(f):
31 def decorator(f):
32 @wraps(f)
32 @wraps(f)
33 def decorated(*args, **kwargs):
33 def decorated(*args, **kwargs):
34 self.response = make_response()
34 self.response = make_response()
35 adapter = WerkzeugAdapter(request, self.response)
35 adapter = WerkzeugAdapter(request, self.response)
36 login_kwargs.setdefault('session', session)
36 login_kwargs.setdefault('session', session)
37 login_kwargs.setdefault('session_saver', self.session_saver)
37 login_kwargs.setdefault('session_saver', self.session_saver)
38 self.result = super(FlaskAuthomatic, self).login(
38 self.result = super(FlaskAuthomatic, self).login(
39 adapter,
39 adapter,
40 *login_args,
40 *login_args,
41 **login_kwargs)
41 **login_kwargs)
42 return f(*args, **kwargs)
42 return f(*args, **kwargs)
43 return decorated
43 return decorated
44 return decorator
44 return decorator
45
45
46 def session_saver(self):
46 def session_saver(self):
47 session.modified = True
47 session.modified = True
@@ -1,156 +1,156 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # We need absolute import to import from openid library which has the same
3 # We need absolute import to import from openid library which has the same
4 # name as this module
4 # name as this module
5 from __future__ import absolute_import
5
6 import logging
6 import logging
7 import datetime
7 import datetime
8
8
9 from google.appengine.ext import ndb
9 from google.appengine.ext import ndb
10 import openid.store.interface
10 import openid.store.interface
11
11
12
12
13 class NDBOpenIDStore(ndb.Expando, openid.store.interface.OpenIDStore):
13 class NDBOpenIDStore(ndb.Expando, openid.store.interface.OpenIDStore):
14 """
14 """
15 |gae| `NDB <https://developers.google.com/appengine/docs/python/ndb/>`_
15 |gae| `NDB <https://developers.google.com/appengine/docs/python/ndb/>`_
16 based implementation of the :class:`openid.store.interface.OpenIDStore`
16 based implementation of the :class:`openid.store.interface.OpenIDStore`
17 interface of the `python-openid`_ library.
17 interface of the `python-openid`_ library.
18 """
18 """
19
19
20 serialized = ndb.StringProperty()
20 serialized = ndb.StringProperty()
21 expiration_date = ndb.DateTimeProperty()
21 expiration_date = ndb.DateTimeProperty()
22 # we need issued to sort by most recently issued
22 # we need issued to sort by most recently issued
23 issued = ndb.IntegerProperty()
23 issued = ndb.IntegerProperty()
24
24
25 @staticmethod
25 @staticmethod
26 def _log(*args, **kwargs):
26 def _log(*args, **kwargs):
27 pass
27 pass
28
28
29 @classmethod
29 @classmethod
30 def storeAssociation(cls, server_url, association):
30 def storeAssociation(cls, server_url, association):
31 # store an entity with key = server_url
31 # store an entity with key = server_url
32
32
33 issued = datetime.datetime.fromtimestamp(association.issued)
33 issued = datetime.datetime.fromtimestamp(association.issued)
34 lifetime = datetime.timedelta(0, association.lifetime)
34 lifetime = datetime.timedelta(0, association.lifetime)
35
35
36 expiration_date = issued + lifetime
36 expiration_date = issued + lifetime
37 entity = cls.get_or_insert(
37 entity = cls.get_or_insert(
38 association.handle, parent=ndb.Key(
38 association.handle, parent=ndb.Key(
39 'ServerUrl', server_url))
39 'ServerUrl', server_url))
40
40
41 entity.serialized = association.serialize()
41 entity.serialized = association.serialize()
42 entity.expiration_date = expiration_date
42 entity.expiration_date = expiration_date
43 entity.issued = association.issued
43 entity.issued = association.issued
44
44
45 cls._log(
45 cls._log(
46 logging.DEBUG,
46 logging.DEBUG,
47 u'NDBOpenIDStore: Putting OpenID association to datastore.')
47 u'NDBOpenIDStore: Putting OpenID association to datastore.')
48
48
49 entity.put()
49 entity.put()
50
50
51 @classmethod
51 @classmethod
52 def cleanupAssociations(cls):
52 def cleanupAssociations(cls):
53 # query for all expired
53 # query for all expired
54 cls._log(
54 cls._log(
55 logging.DEBUG,
55 logging.DEBUG,
56 u'NDBOpenIDStore: Querying datastore for OpenID associations.')
56 u'NDBOpenIDStore: Querying datastore for OpenID associations.')
57 query = cls.query(cls.expiration_date <= datetime.datetime.now())
57 query = cls.query(cls.expiration_date <= datetime.datetime.now())
58
58
59 # fetch keys only
59 # fetch keys only
60 expired = query.fetch(keys_only=True)
60 expired = query.fetch(keys_only=True)
61
61
62 # delete all expired
62 # delete all expired
63 cls._log(
63 cls._log(
64 logging.DEBUG,
64 logging.DEBUG,
65 u'NDBOpenIDStore: Deleting expired OpenID associations from datastore.')
65 u'NDBOpenIDStore: Deleting expired OpenID associations from datastore.')
66 ndb.delete_multi(expired)
66 ndb.delete_multi(expired)
67
67
68 return len(expired)
68 return len(expired)
69
69
70 @classmethod
70 @classmethod
71 def getAssociation(cls, server_url, handle=None):
71 def getAssociation(cls, server_url, handle=None):
72 cls.cleanupAssociations()
72 cls.cleanupAssociations()
73
73
74 if handle:
74 if handle:
75 key = ndb.Key('ServerUrl', server_url, cls, handle)
75 key = ndb.Key('ServerUrl', server_url, cls, handle)
76 cls._log(
76 cls._log(
77 logging.DEBUG,
77 logging.DEBUG,
78 u'NDBOpenIDStore: Getting OpenID association from datastore by key.')
78 u'NDBOpenIDStore: Getting OpenID association from datastore by key.')
79 entity = key.get()
79 entity = key.get()
80 else:
80 else:
81 # return most recently issued association
81 # return most recently issued association
82 cls._log(
82 cls._log(
83 logging.DEBUG,
83 logging.DEBUG,
84 u'NDBOpenIDStore: Querying datastore for OpenID associations by ancestor.')
84 u'NDBOpenIDStore: Querying datastore for OpenID associations by ancestor.')
85 entity = cls.query(ancestor=ndb.Key(
85 entity = cls.query(ancestor=ndb.Key(
86 'ServerUrl', server_url)).order(-cls.issued).get()
86 'ServerUrl', server_url)).order(-cls.issued).get()
87
87
88 if entity and entity.serialized:
88 if entity and entity.serialized:
89 return openid.association.Association.deserialize(
89 return openid.association.Association.deserialize(
90 entity.serialized)
90 entity.serialized)
91
91
92 @classmethod
92 @classmethod
93 def removeAssociation(cls, server_url, handle):
93 def removeAssociation(cls, server_url, handle):
94 key = ndb.Key('ServerUrl', server_url, cls, handle)
94 key = ndb.Key('ServerUrl', server_url, cls, handle)
95 cls._log(
95 cls._log(
96 logging.DEBUG,
96 logging.DEBUG,
97 u'NDBOpenIDStore: Getting OpenID association from datastore by key.')
97 u'NDBOpenIDStore: Getting OpenID association from datastore by key.')
98 if key.get():
98 if key.get():
99 cls._log(
99 cls._log(
100 logging.DEBUG,
100 logging.DEBUG,
101 u'NDBOpenIDStore: Deleting OpenID association from datastore.')
101 u'NDBOpenIDStore: Deleting OpenID association from datastore.')
102 key.delete()
102 key.delete()
103 return True
103 return True
104
104
105 @classmethod
105 @classmethod
106 def useNonce(cls, server_url, timestamp, salt):
106 def useNonce(cls, server_url, timestamp, salt):
107
107
108 # check whether there is already an entity with the same ancestor path
108 # check whether there is already an entity with the same ancestor path
109 # in the datastore
109 # in the datastore
110 key = ndb.Key(
110 key = ndb.Key(
111 'ServerUrl',
111 'ServerUrl',
112 str(server_url) or 'x',
112 str(server_url) or 'x',
113 'TimeStamp',
113 'TimeStamp',
114 str(timestamp),
114 str(timestamp),
115 cls,
115 cls,
116 str(salt))
116 str(salt))
117
117
118 cls._log(
118 cls._log(
119 logging.DEBUG,
119 logging.DEBUG,
120 u'NDBOpenIDStore: Getting OpenID nonce from datastore by key.')
120 u'NDBOpenIDStore: Getting OpenID nonce from datastore by key.')
121 result = key.get()
121 result = key.get()
122
122
123 if result:
123 if result:
124 # if so, the nonce is not valid so return False
124 # if so, the nonce is not valid so return False
125 cls._log(
125 cls._log(
126 logging.WARNING,
126 logging.WARNING,
127 u'NDBOpenIDStore: Nonce was already used!')
127 u'NDBOpenIDStore: Nonce was already used!')
128 return False
128 return False
129 else:
129 else:
130 # if not, store the key to datastore and return True
130 # if not, store the key to datastore and return True
131 nonce = cls(key=key)
131 nonce = cls(key=key)
132 nonce.expiration_date = datetime.datetime.fromtimestamp(
132 nonce.expiration_date = datetime.datetime.fromtimestamp(
133 timestamp) + datetime.timedelta(0, openid.store.nonce.SKEW)
133 timestamp) + datetime.timedelta(0, openid.store.nonce.SKEW)
134 cls._log(
134 cls._log(
135 logging.DEBUG,
135 logging.DEBUG,
136 u'NDBOpenIDStore: Putting new nonce to datastore.')
136 u'NDBOpenIDStore: Putting new nonce to datastore.')
137 nonce.put()
137 nonce.put()
138 return True
138 return True
139
139
140 @classmethod
140 @classmethod
141 def cleanupNonces(cls):
141 def cleanupNonces(cls):
142 # get all expired nonces
142 # get all expired nonces
143 cls._log(
143 cls._log(
144 logging.DEBUG,
144 logging.DEBUG,
145 u'NDBOpenIDStore: Querying datastore for OpenID nonces ordered by expiration date.')
145 u'NDBOpenIDStore: Querying datastore for OpenID nonces ordered by expiration date.')
146 expired = cls.query().filter(
146 expired = cls.query().filter(
147 cls.expiration_date <= datetime.datetime.now()).fetch(
147 cls.expiration_date <= datetime.datetime.now()).fetch(
148 keys_only=True)
148 keys_only=True)
149
149
150 # delete all expired
150 # delete all expired
151 cls._log(
151 cls._log(
152 logging.DEBUG,
152 logging.DEBUG,
153 u'NDBOpenIDStore: Deleting expired OpenID nonces from datastore.')
153 u'NDBOpenIDStore: Deleting expired OpenID nonces from datastore.')
154 ndb.delete_multi(expired)
154 ndb.delete_multi(expired)
155
155
156 return len(expired)
156 return len(expired)
@@ -1,505 +1,505 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 |openid| Providers
3 |openid| Providers
4 ----------------------------------
4 ----------------------------------
5
5
6 Providers which implement the |openid|_ protocol based on the
6 Providers which implement the |openid|_ protocol based on the
7 `python-openid`_ library.
7 `python-openid`_ library.
8
8
9 .. warning::
9 .. warning::
10
10
11 This providers are dependent on the |pyopenid|_ package.
11 This providers are dependent on the |pyopenid|_ package.
12
12
13 .. autosummary::
13 .. autosummary::
14
14
15 OpenID
15 OpenID
16 Yahoo
16 Yahoo
17 Google
17 Google
18
18
19 """
19 """
20
20
21 # We need absolute import to import from openid library which has the same
21 # We need absolute import to import from openid library which has the same
22 # name as this module
22 # name as this module
23 from __future__ import absolute_import
23
24 import datetime
24 import datetime
25 import logging
25 import logging
26 import time
26 import time
27
27
28 from openid import oidutil
28 from openid import oidutil
29 from openid.consumer import consumer
29 from openid.consumer import consumer
30 from openid.extensions import ax, pape, sreg
30 from openid.extensions import ax, pape, sreg
31 from openid.association import Association
31 from openid.association import Association
32
32
33 from authomatic import providers
33 from authomatic import providers
34 from authomatic.exceptions import FailureError, CancellationError, OpenIDError
34 from authomatic.exceptions import FailureError, CancellationError, OpenIDError
35
35
36
36
37 __all__ = ['OpenID', 'Yahoo', 'Google']
37 __all__ = ['OpenID', 'Yahoo', 'Google']
38
38
39
39
40 # Suppress openid logging.
40 # Suppress openid logging.
41 oidutil.log = lambda message, level=0: None
41 oidutil.log = lambda message, level=0: None
42
42
43
43
44 REALM_HTML = \
44 REALM_HTML = \
45 """
45 """
46 <!DOCTYPE html>
46 <!DOCTYPE html>
47 <html>
47 <html>
48 <head>
48 <head>
49 <meta http-equiv="X-XRDS-Location" content="{xrds_location}" />
49 <meta http-equiv="X-XRDS-Location" content="{xrds_location}" />
50 </head>
50 </head>
51 <body>{body}</body>
51 <body>{body}</body>
52 </html>
52 </html>
53 """
53 """
54
54
55
55
56 XRDS_XML = \
56 XRDS_XML = \
57 """
57 """
58 <?xml version="1.0" encoding="UTF-8"?>
58 <?xml version="1.0" encoding="UTF-8"?>
59 <xrds:XRDS
59 <xrds:XRDS
60 xmlns:xrds="xri://$xrds"
60 xmlns:xrds="xri://$xrds"
61 xmlns:openid="http://openid.net/xmlns/1.0"
61 xmlns:openid="http://openid.net/xmlns/1.0"
62 xmlns="xri://$xrd*($v*2.0)">
62 xmlns="xri://$xrd*($v*2.0)">
63 <XRD>
63 <XRD>
64 <Service priority="1">
64 <Service priority="1">
65 <Type>http://specs.openid.net/auth/2.0/return_to</Type>
65 <Type>http://specs.openid.net/auth/2.0/return_to</Type>
66 <URI>{return_to}</URI>
66 <URI>{return_to}</URI>
67 </Service>
67 </Service>
68 </XRD>
68 </XRD>
69 </xrds:XRDS>
69 </xrds:XRDS>
70 """
70 """
71
71
72
72
73 class SessionOpenIDStore(object):
73 class SessionOpenIDStore(object):
74 """
74 """
75 A very primitive session-based implementation of the.
75 A very primitive session-based implementation of the.
76
76
77 :class:`openid.store.interface.OpenIDStore` interface of the
77 :class:`openid.store.interface.OpenIDStore` interface of the
78 `python-openid`_ library.
78 `python-openid`_ library.
79
79
80 .. warning::
80 .. warning::
81
81
82 Nonces get verified only by their timeout. Use on your own risk!
82 Nonces get verified only by their timeout. Use on your own risk!
83
83
84 """
84 """
85
85
86 @staticmethod
86 @staticmethod
87 def _log(level, message):
87 def _log(level, message):
88 return None
88 return None
89
89
90 ASSOCIATION_KEY = ('authomatic.providers.openid.SessionOpenIDStore:'
90 ASSOCIATION_KEY = ('authomatic.providers.openid.SessionOpenIDStore:'
91 'association')
91 'association')
92
92
93 def __init__(self, session, nonce_timeout=None):
93 def __init__(self, session, nonce_timeout=None):
94 """
94 """
95 :param int nonce_timeout:
95 :param int nonce_timeout:
96
96
97 Nonces older than this in seconds will be considered expired.
97 Nonces older than this in seconds will be considered expired.
98 Default is 600.
98 Default is 600.
99 """
99 """
100 self.session = session
100 self.session = session
101 self.nonce_timeout = nonce_timeout or 600
101 self.nonce_timeout = nonce_timeout or 600
102
102
103 def storeAssociation(self, server_url, association):
103 def storeAssociation(self, server_url, association):
104 self._log(logging.DEBUG,
104 self._log(logging.DEBUG,
105 'SessionOpenIDStore: Storing association to session.')
105 'SessionOpenIDStore: Storing association to session.')
106
106
107 serialized = association.serialize()
107 serialized = association.serialize()
108 decoded = serialized.decode('latin-1')
108 decoded = serialized.decode('latin-1')
109
109
110 assoc = decoded
110 assoc = decoded
111 # assoc = serialized
111 # assoc = serialized
112
112
113 # Always store only one association as a tuple.
113 # Always store only one association as a tuple.
114 self.session[self.ASSOCIATION_KEY] = (server_url, association.handle,
114 self.session[self.ASSOCIATION_KEY] = (server_url, association.handle,
115 assoc)
115 assoc)
116
116
117 def getAssociation(self, server_url, handle=None):
117 def getAssociation(self, server_url, handle=None):
118 # Try to get association.
118 # Try to get association.
119 assoc = self.session.get(self.ASSOCIATION_KEY)
119 assoc = self.session.get(self.ASSOCIATION_KEY)
120 if assoc and assoc[0] == server_url:
120 if assoc and assoc[0] == server_url:
121 # If found deserialize and return it.
121 # If found deserialize and return it.
122 self._log(logging.DEBUG, u'SessionOpenIDStore: Association found.')
122 self._log(logging.DEBUG, u'SessionOpenIDStore: Association found.')
123 return Association.deserialize(assoc[2].encode('latin-1'))
123 return Association.deserialize(assoc[2].encode('latin-1'))
124 else:
124 else:
125 self._log(logging.DEBUG,
125 self._log(logging.DEBUG,
126 u'SessionOpenIDStore: Association not found.')
126 u'SessionOpenIDStore: Association not found.')
127
127
128 def removeAssociation(self, server_url, handle):
128 def removeAssociation(self, server_url, handle):
129 # Just inform the caller that it's gone.
129 # Just inform the caller that it's gone.
130 return True
130 return True
131
131
132 def useNonce(self, server_url, timestamp, salt):
132 def useNonce(self, server_url, timestamp, salt):
133 # Evaluate expired nonces as false.
133 # Evaluate expired nonces as false.
134 age = int(time.time()) - int(timestamp)
134 age = int(time.time()) - int(timestamp)
135 if age < self.nonce_timeout:
135 if age < self.nonce_timeout:
136 return True
136 return True
137 else:
137 else:
138 self._log(logging.ERROR, u'SessionOpenIDStore: Expired nonce!')
138 self._log(logging.ERROR, u'SessionOpenIDStore: Expired nonce!')
139 return False
139 return False
140
140
141
141
142 class OpenID(providers.AuthenticationProvider):
142 class OpenID(providers.AuthenticationProvider):
143 """
143 """
144 |openid|_ provider based on the `python-openid`_ library.
144 |openid|_ provider based on the `python-openid`_ library.
145 """
145 """
146
146
147 AX = ['http://axschema.org/contact/email',
147 AX = ['http://axschema.org/contact/email',
148 'http://schema.openid.net/contact/email',
148 'http://schema.openid.net/contact/email',
149 'http://axschema.org/namePerson',
149 'http://axschema.org/namePerson',
150 'http://openid.net/schema/namePerson/first',
150 'http://openid.net/schema/namePerson/first',
151 'http://openid.net/schema/namePerson/last',
151 'http://openid.net/schema/namePerson/last',
152 'http://openid.net/schema/gender',
152 'http://openid.net/schema/gender',
153 'http://openid.net/schema/language/pref',
153 'http://openid.net/schema/language/pref',
154 'http://openid.net/schema/contact/web/default',
154 'http://openid.net/schema/contact/web/default',
155 'http://openid.net/schema/media/image',
155 'http://openid.net/schema/media/image',
156 'http://openid.net/schema/timezone']
156 'http://openid.net/schema/timezone']
157
157
158 AX_REQUIRED = ['http://schema.openid.net/contact/email']
158 AX_REQUIRED = ['http://schema.openid.net/contact/email']
159
159
160 SREG = ['nickname',
160 SREG = ['nickname',
161 'email',
161 'email',
162 'fullname',
162 'fullname',
163 'dob',
163 'dob',
164 'gender',
164 'gender',
165 'postcode',
165 'postcode',
166 'country',
166 'country',
167 'language',
167 'language',
168 'timezone']
168 'timezone']
169
169
170 PAPE = [
170 PAPE = [
171 'http://schemas.openid.net/pape/policies/2007/06/'
171 'http://schemas.openid.net/pape/policies/2007/06/'
172 'multi-factor-physical',
172 'multi-factor-physical',
173 'http://schemas.openid.net/pape/policies/2007/06/multi-factor',
173 'http://schemas.openid.net/pape/policies/2007/06/multi-factor',
174 'http://schemas.openid.net/pape/policies/2007/06/phishing-resistant'
174 'http://schemas.openid.net/pape/policies/2007/06/phishing-resistant'
175 ]
175 ]
176
176
177 def __init__(self, *args, **kwargs):
177 def __init__(self, *args, **kwargs):
178 """
178 """
179 Accepts additional keyword arguments:
179 Accepts additional keyword arguments:
180
180
181 :param store:
181 :param store:
182 Any object which implements
182 Any object which implements
183 :class:`openid.store.interface.OpenIDStore`
183 :class:`openid.store.interface.OpenIDStore`
184 of the `python-openid`_ library.
184 of the `python-openid`_ library.
185
185
186 :param bool use_realm:
186 :param bool use_realm:
187 Whether to use `OpenID realm
187 Whether to use `OpenID realm
188 <http://openid.net/specs/openid-authentication-2_0-12.html#realms>`_
188 <http://openid.net/specs/openid-authentication-2_0-12.html#realms>`_
189 If ``True`` the realm HTML document will be accessible at
189 If ``True`` the realm HTML document will be accessible at
190 ``{current url}?{realm_param}={realm_param}``
190 ``{current url}?{realm_param}={realm_param}``
191 e.g. ``http://example.com/path?realm=realm``.
191 e.g. ``http://example.com/path?realm=realm``.
192
192
193 :param str realm_body:
193 :param str realm_body:
194 Contents of the HTML body tag of the realm.
194 Contents of the HTML body tag of the realm.
195
195
196 :param str realm_param:
196 :param str realm_param:
197 Name of the query parameter to be used to serve the realm.
197 Name of the query parameter to be used to serve the realm.
198
198
199 :param str xrds_param:
199 :param str xrds_param:
200 The name of the query parameter to be used to serve the
200 The name of the query parameter to be used to serve the
201 `XRDS document
201 `XRDS document
202 <http://openid.net/specs/openid-authentication-2_0-12.html#XRDS_Sample>`_.
202 <http://openid.net/specs/openid-authentication-2_0-12.html#XRDS_Sample>`_.
203
203
204 :param list sreg:
204 :param list sreg:
205 List of strings of optional
205 List of strings of optional
206 `SREG
206 `SREG
207 <http://openid.net/specs/openid-simple-registration-extension-1_0.html>`_
207 <http://openid.net/specs/openid-simple-registration-extension-1_0.html>`_
208 fields.
208 fields.
209 Default = :attr:`OpenID.SREG`.
209 Default = :attr:`OpenID.SREG`.
210
210
211 :param list sreg_required:
211 :param list sreg_required:
212 List of strings of required
212 List of strings of required
213 `SREG
213 `SREG
214 <http://openid.net/specs/openid-simple-registration-extension-1_0.html>`_
214 <http://openid.net/specs/openid-simple-registration-extension-1_0.html>`_
215 fields.
215 fields.
216 Default = ``[]``.
216 Default = ``[]``.
217
217
218 :param list ax:
218 :param list ax:
219 List of strings of optional
219 List of strings of optional
220 `AX
220 `AX
221 <http://openid.net/specs/openid-attribute-exchange-1_0.html>`_
221 <http://openid.net/specs/openid-attribute-exchange-1_0.html>`_
222 schemas.
222 schemas.
223 Default = :attr:`OpenID.AX`.
223 Default = :attr:`OpenID.AX`.
224
224
225 :param list ax_required:
225 :param list ax_required:
226 List of strings of required
226 List of strings of required
227 `AX
227 `AX
228 <http://openid.net/specs/openid-attribute-exchange-1_0.html>`_
228 <http://openid.net/specs/openid-attribute-exchange-1_0.html>`_
229 schemas.
229 schemas.
230 Default = :attr:`OpenID.AX_REQUIRED`.
230 Default = :attr:`OpenID.AX_REQUIRED`.
231
231
232 :param list pape:
232 :param list pape:
233 of requested
233 of requested
234 `PAPE
234 `PAPE
235 <http://openid.net/specs/openid-provider-authentication-policy-extension-1_0.html>`_
235 <http://openid.net/specs/openid-provider-authentication-policy-extension-1_0.html>`_
236 policies.
236 policies.
237 Default = :attr:`OpenID.PAPE`.
237 Default = :attr:`OpenID.PAPE`.
238
238
239 As well as those inherited from :class:`.AuthenticationProvider`
239 As well as those inherited from :class:`.AuthenticationProvider`
240 constructor.
240 constructor.
241
241
242 """
242 """
243
243
244 super(OpenID, self).__init__(*args, **kwargs)
244 super(OpenID, self).__init__(*args, **kwargs)
245
245
246 # Allow for other openid store implementations.
246 # Allow for other openid store implementations.
247 self.store = self._kwarg(
247 self.store = self._kwarg(
248 kwargs, 'store', SessionOpenIDStore(
248 kwargs, 'store', SessionOpenIDStore(
249 self.session))
249 self.session))
250
250
251 # Realm
251 # Realm
252 self.use_realm = self._kwarg(kwargs, 'use_realm', True)
252 self.use_realm = self._kwarg(kwargs, 'use_realm', True)
253 self.realm_body = self._kwarg(kwargs, 'realm_body', '')
253 self.realm_body = self._kwarg(kwargs, 'realm_body', '')
254 self.realm_param = self._kwarg(kwargs, 'realm_param', 'realm')
254 self.realm_param = self._kwarg(kwargs, 'realm_param', 'realm')
255 self.xrds_param = self._kwarg(kwargs, 'xrds_param', 'xrds')
255 self.xrds_param = self._kwarg(kwargs, 'xrds_param', 'xrds')
256
256
257 # SREG
257 # SREG
258 self.sreg = self._kwarg(kwargs, 'sreg', self.SREG)
258 self.sreg = self._kwarg(kwargs, 'sreg', self.SREG)
259 self.sreg_required = self._kwarg(kwargs, 'sreg_required', [])
259 self.sreg_required = self._kwarg(kwargs, 'sreg_required', [])
260
260
261 # AX
261 # AX
262 self.ax = self._kwarg(kwargs, 'ax', self.AX)
262 self.ax = self._kwarg(kwargs, 'ax', self.AX)
263 self.ax_required = self._kwarg(kwargs, 'ax_required', self.AX_REQUIRED)
263 self.ax_required = self._kwarg(kwargs, 'ax_required', self.AX_REQUIRED)
264 # add required schemas to schemas if not already there
264 # add required schemas to schemas if not already there
265 for i in self.ax_required:
265 for i in self.ax_required:
266 if i not in self.ax:
266 if i not in self.ax:
267 self.ax.append(i)
267 self.ax.append(i)
268
268
269 # PAPE
269 # PAPE
270 self.pape = self._kwarg(kwargs, 'pape', self.PAPE)
270 self.pape = self._kwarg(kwargs, 'pape', self.PAPE)
271
271
272 @staticmethod
272 @staticmethod
273 def _x_user_parser(user, data):
273 def _x_user_parser(user, data):
274
274
275 user.first_name = data.get('ax', {}).get(
275 user.first_name = data.get('ax', {}).get(
276 'http://openid.net/schema/namePerson/first')
276 'http://openid.net/schema/namePerson/first')
277 user.last_name = data.get('ax', {}).get(
277 user.last_name = data.get('ax', {}).get(
278 'http://openid.net/schema/namePerson/last')
278 'http://openid.net/schema/namePerson/last')
279 user.id = data.get('guid')
279 user.id = data.get('guid')
280 user.link = data.get('ax', {}).get(
280 user.link = data.get('ax', {}).get(
281 'http://openid.net/schema/contact/web/default')
281 'http://openid.net/schema/contact/web/default')
282 user.picture = data.get('ax', {}).get(
282 user.picture = data.get('ax', {}).get(
283 'http://openid.net/schema/media/image')
283 'http://openid.net/schema/media/image')
284 user.nickname = data.get('sreg', {}).get('nickname')
284 user.nickname = data.get('sreg', {}).get('nickname')
285 user.country = data.get('sreg', {}).get('country')
285 user.country = data.get('sreg', {}).get('country')
286 user.postal_code = data.get('sreg', {}).get('postcode')
286 user.postal_code = data.get('sreg', {}).get('postcode')
287
287
288 user.name = data.get('sreg', {}).get('fullname') or \
288 user.name = data.get('sreg', {}).get('fullname') or \
289 data.get('ax', {}).get('http://axschema.org/namePerson')
289 data.get('ax', {}).get('http://axschema.org/namePerson')
290
290
291 user.gender = data.get('sreg', {}).get('gender') or \
291 user.gender = data.get('sreg', {}).get('gender') or \
292 data.get('ax', {}).get('http://openid.net/schema/gender')
292 data.get('ax', {}).get('http://openid.net/schema/gender')
293
293
294 user.locale = data.get('sreg', {}).get('language') or \
294 user.locale = data.get('sreg', {}).get('language') or \
295 data.get('ax', {}).get('http://openid.net/schema/language/pref')
295 data.get('ax', {}).get('http://openid.net/schema/language/pref')
296
296
297 user.timezone = data.get('sreg', {}).get('timezone') or \
297 user.timezone = data.get('sreg', {}).get('timezone') or \
298 data.get('ax', {}).get('http://openid.net/schema/timezone')
298 data.get('ax', {}).get('http://openid.net/schema/timezone')
299
299
300 user.email = data.get('sreg', {}).get('email') or \
300 user.email = data.get('sreg', {}).get('email') or \
301 data.get('ax', {}).get('http://axschema.org/contact/email') or \
301 data.get('ax', {}).get('http://axschema.org/contact/email') or \
302 data.get('ax', {}).get('http://schema.openid.net/contact/email')
302 data.get('ax', {}).get('http://schema.openid.net/contact/email')
303
303
304 if data.get('sreg', {}).get('dob'):
304 if data.get('sreg', {}).get('dob'):
305 user.birth_date = datetime.datetime.strptime(
305 user.birth_date = datetime.datetime.strptime(
306 data.get('sreg', {}).get('dob'),
306 data.get('sreg', {}).get('dob'),
307 '%Y-%m-%d'
307 '%Y-%m-%d'
308 )
308 )
309 else:
309 else:
310 user.birth_date = None
310 user.birth_date = None
311
311
312 return user
312 return user
313
313
314 @providers.login_decorator
314 @providers.login_decorator
315 def login(self):
315 def login(self):
316 # Instantiate consumer
316 # Instantiate consumer
317 self.store._log = self._log
317 self.store._log = self._log
318 oi_consumer = consumer.Consumer(self.session, self.store)
318 oi_consumer = consumer.Consumer(self.session, self.store)
319
319
320 # handle realm and XRDS if there is only one query parameter
320 # handle realm and XRDS if there is only one query parameter
321 if self.use_realm and len(self.params) == 1:
321 if self.use_realm and len(self.params) == 1:
322 realm_request = self.params.get(self.realm_param)
322 realm_request = self.params.get(self.realm_param)
323 xrds_request = self.params.get(self.xrds_param)
323 xrds_request = self.params.get(self.xrds_param)
324 else:
324 else:
325 realm_request = None
325 realm_request = None
326 xrds_request = None
326 xrds_request = None
327
327
328 # determine type of request
328 # determine type of request
329 if realm_request:
329 if realm_request:
330 # =================================================================
330 # =================================================================
331 # Realm HTML
331 # Realm HTML
332 # =================================================================
332 # =================================================================
333
333
334 self._log(
334 self._log(
335 logging.INFO,
335 logging.INFO,
336 u'Writing OpenID realm HTML to the response.')
336 u'Writing OpenID realm HTML to the response.')
337 xrds_location = '{u}?{x}={x}'.format(u=self.url, x=self.xrds_param)
337 xrds_location = '{u}?{x}={x}'.format(u=self.url, x=self.xrds_param)
338 self.write(
338 self.write(
339 REALM_HTML.format(
339 REALM_HTML.format(
340 xrds_location=xrds_location,
340 xrds_location=xrds_location,
341 body=self.realm_body))
341 body=self.realm_body))
342
342
343 elif xrds_request:
343 elif xrds_request:
344 # =================================================================
344 # =================================================================
345 # XRDS XML
345 # XRDS XML
346 # =================================================================
346 # =================================================================
347
347
348 self._log(
348 self._log(
349 logging.INFO,
349 logging.INFO,
350 u'Writing XRDS XML document to the response.')
350 u'Writing XRDS XML document to the response.')
351 self.set_header('Content-Type', 'application/xrds+xml')
351 self.set_header('Content-Type', 'application/xrds+xml')
352 self.write(XRDS_XML.format(return_to=self.url))
352 self.write(XRDS_XML.format(return_to=self.url))
353
353
354 elif self.params.get('openid.mode'):
354 elif self.params.get('openid.mode'):
355 # =================================================================
355 # =================================================================
356 # Phase 2 after redirect
356 # Phase 2 after redirect
357 # =================================================================
357 # =================================================================
358
358
359 self._log(
359 self._log(
360 logging.INFO,
360 logging.INFO,
361 u'Continuing OpenID authentication procedure after redirect.')
361 u'Continuing OpenID authentication procedure after redirect.')
362
362
363 # complete the authentication process
363 # complete the authentication process
364 response = oi_consumer.complete(self.params, self.url)
364 response = oi_consumer.complete(self.params, self.url)
365
365
366 # on success
366 # on success
367 if response.status == consumer.SUCCESS:
367 if response.status == consumer.SUCCESS:
368
368
369 data = {}
369 data = {}
370
370
371 # get user ID
371 # get user ID
372 data['guid'] = response.getDisplayIdentifier()
372 data['guid'] = response.getDisplayIdentifier()
373
373
374 self._log(logging.INFO, u'Authentication successful.')
374 self._log(logging.INFO, u'Authentication successful.')
375
375
376 # get user data from AX response
376 # get user data from AX response
377 ax_response = ax.FetchResponse.fromSuccessResponse(response)
377 ax_response = ax.FetchResponse.fromSuccessResponse(response)
378 if ax_response and ax_response.data:
378 if ax_response and ax_response.data:
379 self._log(logging.INFO, u'Got AX data.')
379 self._log(logging.INFO, u'Got AX data.')
380 ax_data = {}
380 ax_data = {}
381 # convert iterable values to their first item
381 # convert iterable values to their first item
382 for k, v in ax_response.data.items():
382 for k, v in ax_response.data.items():
383 if v and isinstance(v, (list, tuple)):
383 if v and isinstance(v, (list, tuple)):
384 ax_data[k] = v[0]
384 ax_data[k] = v[0]
385 data['ax'] = ax_data
385 data['ax'] = ax_data
386
386
387 # get user data from SREG response
387 # get user data from SREG response
388 sreg_response = sreg.SRegResponse.fromSuccessResponse(response)
388 sreg_response = sreg.SRegResponse.fromSuccessResponse(response)
389 if sreg_response and sreg_response.data:
389 if sreg_response and sreg_response.data:
390 self._log(logging.INFO, u'Got SREG data.')
390 self._log(logging.INFO, u'Got SREG data.')
391 data['sreg'] = sreg_response.data
391 data['sreg'] = sreg_response.data
392
392
393 # get data from PAPE response
393 # get data from PAPE response
394 pape_response = pape.Response.fromSuccessResponse(response)
394 pape_response = pape.Response.fromSuccessResponse(response)
395 if pape_response and pape_response.auth_policies:
395 if pape_response and pape_response.auth_policies:
396 self._log(logging.INFO, u'Got PAPE data.')
396 self._log(logging.INFO, u'Got PAPE data.')
397 data['pape'] = pape_response.auth_policies
397 data['pape'] = pape_response.auth_policies
398
398
399 # create user
399 # create user
400 self._update_or_create_user(data)
400 self._update_or_create_user(data)
401
401
402 # =============================================================
402 # =============================================================
403 # We're done!
403 # We're done!
404 # =============================================================
404 # =============================================================
405
405
406 elif response.status == consumer.CANCEL:
406 elif response.status == consumer.CANCEL:
407 raise CancellationError(
407 raise CancellationError(
408 u'User cancelled the verification of ID "{0}"!'.format(
408 u'User cancelled the verification of ID "{0}"!'.format(
409 response.getDisplayIdentifier()))
409 response.getDisplayIdentifier()))
410
410
411 elif response.status == consumer.FAILURE:
411 elif response.status == consumer.FAILURE:
412 raise FailureError(response.message)
412 raise FailureError(response.message)
413
413
414 elif self.identifier: # As set in AuthenticationProvider.__init__
414 elif self.identifier: # As set in AuthenticationProvider.__init__
415 # =================================================================
415 # =================================================================
416 # Phase 1 before redirect
416 # Phase 1 before redirect
417 # =================================================================
417 # =================================================================
418
418
419 self._log(
419 self._log(
420 logging.INFO,
420 logging.INFO,
421 u'Starting OpenID authentication procedure.')
421 u'Starting OpenID authentication procedure.')
422
422
423 # get AuthRequest object
423 # get AuthRequest object
424 try:
424 try:
425 auth_request = oi_consumer.begin(self.identifier)
425 auth_request = oi_consumer.begin(self.identifier)
426 except consumer.DiscoveryFailure as e:
426 except consumer.DiscoveryFailure as e:
427 raise FailureError(
427 raise FailureError(
428 u'Discovery failed for identifier {0}!'.format(
428 u'Discovery failed for identifier {0}!'.format(
429 self.identifier
429 self.identifier
430 ),
430 ),
431 url=self.identifier,
431 url=self.identifier,
432 original_message=e.message)
432 original_message=e.message)
433
433
434 self._log(
434 self._log(
435 logging.INFO,
435 logging.INFO,
436 u'Service discovery for identifier {0} successful.'.format(
436 u'Service discovery for identifier {0} successful.'.format(
437 self.identifier))
437 self.identifier))
438
438
439 # add SREG extension
439 # add SREG extension
440 # we need to remove required fields from optional fields because
440 # we need to remove required fields from optional fields because
441 # addExtension then raises an error
441 # addExtension then raises an error
442 self.sreg = [i for i in self.sreg if i not in self.sreg_required]
442 self.sreg = [i for i in self.sreg if i not in self.sreg_required]
443 auth_request.addExtension(
443 auth_request.addExtension(
444 sreg.SRegRequest(
444 sreg.SRegRequest(
445 optional=self.sreg,
445 optional=self.sreg,
446 required=self.sreg_required)
446 required=self.sreg_required)
447 )
447 )
448
448
449 # add AX extension
449 # add AX extension
450 ax_request = ax.FetchRequest()
450 ax_request = ax.FetchRequest()
451 # set AX schemas
451 # set AX schemas
452 for i in self.ax:
452 for i in self.ax:
453 required = i in self.ax_required
453 required = i in self.ax_required
454 ax_request.add(ax.AttrInfo(i, required=required))
454 ax_request.add(ax.AttrInfo(i, required=required))
455 auth_request.addExtension(ax_request)
455 auth_request.addExtension(ax_request)
456
456
457 # add PAPE extension
457 # add PAPE extension
458 auth_request.addExtension(pape.Request(self.pape))
458 auth_request.addExtension(pape.Request(self.pape))
459
459
460 # prepare realm and return_to URLs
460 # prepare realm and return_to URLs
461 if self.use_realm:
461 if self.use_realm:
462 realm = return_to = '{u}?{r}={r}'.format(
462 realm = return_to = '{u}?{r}={r}'.format(
463 u=self.url, r=self.realm_param)
463 u=self.url, r=self.realm_param)
464 else:
464 else:
465 realm = return_to = self.url
465 realm = return_to = self.url
466
466
467 url = auth_request.redirectURL(realm, return_to)
467 url = auth_request.redirectURL(realm, return_to)
468
468
469 if auth_request.shouldSendRedirect():
469 if auth_request.shouldSendRedirect():
470 # can be redirected
470 # can be redirected
471 url = auth_request.redirectURL(realm, return_to)
471 url = auth_request.redirectURL(realm, return_to)
472 self._log(
472 self._log(
473 logging.INFO,
473 logging.INFO,
474 u'Redirecting user to {0}.'.format(url))
474 u'Redirecting user to {0}.'.format(url))
475 self.redirect(url)
475 self.redirect(url)
476 else:
476 else:
477 # must be sent as POST
477 # must be sent as POST
478 # this writes a html post form with auto-submit
478 # this writes a html post form with auto-submit
479 self._log(
479 self._log(
480 logging.INFO,
480 logging.INFO,
481 u'Writing an auto-submit HTML form to the response.')
481 u'Writing an auto-submit HTML form to the response.')
482 form = auth_request.htmlMarkup(
482 form = auth_request.htmlMarkup(
483 realm, return_to, False, dict(
483 realm, return_to, False, dict(
484 id='openid_form'))
484 id='openid_form'))
485 self.write(form)
485 self.write(form)
486 else:
486 else:
487 raise OpenIDError('No identifier specified!')
487 raise OpenIDError('No identifier specified!')
488
488
489
489
490 class Yahoo(OpenID):
490 class Yahoo(OpenID):
491 """
491 """
492 Yahoo :class:`.OpenID` provider with the :attr:`.identifier` predefined to
492 Yahoo :class:`.OpenID` provider with the :attr:`.identifier` predefined to
493 ``"me.yahoo.com"``.
493 ``"me.yahoo.com"``.
494 """
494 """
495
495
496 identifier = 'me.yahoo.com'
496 identifier = 'me.yahoo.com'
497
497
498
498
499 class Google(OpenID):
499 class Google(OpenID):
500 """
500 """
501 Google :class:`.OpenID` provider with the :attr:`.identifier` predefined to
501 Google :class:`.OpenID` provider with the :attr:`.identifier` predefined to
502 ``"https://www.google.com/accounts/o8/id"``.
502 ``"https://www.google.com/accounts/o8/id"``.
503 """
503 """
504
504
505 identifier = 'https://www.google.com/accounts/o8/id'
505 identifier = 'https://www.google.com/accounts/o8/id'
@@ -1,839 +1,839 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """Utilities for writing code that runs on Python 2 and 3"""
2 """Utilities for writing code that runs on Python 2 and 3"""
3
3
4 # Copyright (c) 2010-2015 Benjamin Peterson
4 # Copyright (c) 2010-2015 Benjamin Peterson
5 #
5 #
6 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # Permission is hereby granted, free of charge, to any person obtaining a copy
7 # of this software and associated documentation files (the "Software"), to deal
7 # of this software and associated documentation files (the "Software"), to deal
8 # in the Software without restriction, including without limitation the rights
8 # in the Software without restriction, including without limitation the rights
9 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 # copies of the Software, and to permit persons to whom the Software is
10 # copies of the Software, and to permit persons to whom the Software is
11 # furnished to do so, subject to the following conditions:
11 # furnished to do so, subject to the following conditions:
12 #
12 #
13 # The above copyright notice and this permission notice shall be included in all
13 # The above copyright notice and this permission notice shall be included in all
14 # copies or substantial portions of the Software.
14 # copies or substantial portions of the Software.
15 #
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 # SOFTWARE.
22 # SOFTWARE.
23
23
24 from __future__ import absolute_import
24
25
25
26 import functools
26 import functools
27 import itertools
27 import itertools
28 import operator
28 import operator
29 import sys
29 import sys
30 import types
30 import types
31
31
32 __author__ = "Benjamin Peterson <benjamin@python.org>"
32 __author__ = "Benjamin Peterson <benjamin@python.org>"
33 __version__ = "1.9.0"
33 __version__ = "1.9.0"
34
34
35
35
36 # Useful for very coarse version differentiation.
36 # Useful for very coarse version differentiation.
37 PY2 = sys.version_info[0] == 2
37 PY2 = sys.version_info[0] == 2
38 PY3 = sys.version_info[0] == 3
38 PY3 = sys.version_info[0] == 3
39
39
40 if PY3:
40 if PY3:
41 string_types = str,
41 string_types = str,
42 integer_types = int,
42 integer_types = int,
43 class_types = type,
43 class_types = type,
44 text_type = str
44 text_type = str
45 binary_type = bytes
45 binary_type = bytes
46
46
47 MAXSIZE = sys.maxsize
47 MAXSIZE = sys.maxsize
48 else:
48 else:
49 string_types = basestring,
49 string_types = basestring,
50 integer_types = (int, long)
50 integer_types = (int, long)
51 class_types = (type, types.ClassType)
51 class_types = (type, types.ClassType)
52 text_type = unicode
52 text_type = unicode
53 binary_type = str
53 binary_type = str
54
54
55 if sys.platform.startswith("java"):
55 if sys.platform.startswith("java"):
56 # Jython always uses 32 bits.
56 # Jython always uses 32 bits.
57 MAXSIZE = int((1 << 31) - 1)
57 MAXSIZE = int((1 << 31) - 1)
58 else:
58 else:
59 # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
59 # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
60 class X(object):
60 class X(object):
61 def __len__(self):
61 def __len__(self):
62 return 1 << 31
62 return 1 << 31
63 try:
63 try:
64 len(X())
64 len(X())
65 except OverflowError:
65 except OverflowError:
66 # 32-bit
66 # 32-bit
67 MAXSIZE = int((1 << 31) - 1)
67 MAXSIZE = int((1 << 31) - 1)
68 else:
68 else:
69 # 64-bit
69 # 64-bit
70 MAXSIZE = int((1 << 63) - 1)
70 MAXSIZE = int((1 << 63) - 1)
71 del X
71 del X
72
72
73
73
74 def _add_doc(func, doc):
74 def _add_doc(func, doc):
75 """Add documentation to a function."""
75 """Add documentation to a function."""
76 func.__doc__ = doc
76 func.__doc__ = doc
77
77
78
78
79 def _import_module(name):
79 def _import_module(name):
80 """Import module, returning the module after the last dot."""
80 """Import module, returning the module after the last dot."""
81 __import__(name)
81 __import__(name)
82 return sys.modules[name]
82 return sys.modules[name]
83
83
84
84
85 class _LazyDescr(object):
85 class _LazyDescr(object):
86
86
87 def __init__(self, name):
87 def __init__(self, name):
88 self.name = name
88 self.name = name
89
89
90 def __get__(self, obj, tp):
90 def __get__(self, obj, tp):
91 result = self._resolve()
91 result = self._resolve()
92 setattr(obj, self.name, result) # Invokes __set__.
92 setattr(obj, self.name, result) # Invokes __set__.
93 try:
93 try:
94 # This is a bit ugly, but it avoids running this again by
94 # This is a bit ugly, but it avoids running this again by
95 # removing this descriptor.
95 # removing this descriptor.
96 delattr(obj.__class__, self.name)
96 delattr(obj.__class__, self.name)
97 except AttributeError:
97 except AttributeError:
98 pass
98 pass
99 return result
99 return result
100
100
101
101
102 class MovedModule(_LazyDescr):
102 class MovedModule(_LazyDescr):
103
103
104 def __init__(self, name, old, new=None):
104 def __init__(self, name, old, new=None):
105 super(MovedModule, self).__init__(name)
105 super(MovedModule, self).__init__(name)
106 if PY3:
106 if PY3:
107 if new is None:
107 if new is None:
108 new = name
108 new = name
109 self.mod = new
109 self.mod = new
110 else:
110 else:
111 self.mod = old
111 self.mod = old
112
112
113 def _resolve(self):
113 def _resolve(self):
114 return _import_module(self.mod)
114 return _import_module(self.mod)
115
115
116 def __getattr__(self, attr):
116 def __getattr__(self, attr):
117 _module = self._resolve()
117 _module = self._resolve()
118 value = getattr(_module, attr)
118 value = getattr(_module, attr)
119 setattr(self, attr, value)
119 setattr(self, attr, value)
120 return value
120 return value
121
121
122
122
123 class _LazyModule(types.ModuleType):
123 class _LazyModule(types.ModuleType):
124
124
125 def __init__(self, name):
125 def __init__(self, name):
126 super(_LazyModule, self).__init__(name)
126 super(_LazyModule, self).__init__(name)
127 self.__doc__ = self.__class__.__doc__
127 self.__doc__ = self.__class__.__doc__
128
128
129 def __dir__(self):
129 def __dir__(self):
130 attrs = ["__doc__", "__name__"]
130 attrs = ["__doc__", "__name__"]
131 attrs += [attr.name for attr in self._moved_attributes]
131 attrs += [attr.name for attr in self._moved_attributes]
132 return attrs
132 return attrs
133
133
134 # Subclasses should override this
134 # Subclasses should override this
135 _moved_attributes = []
135 _moved_attributes = []
136
136
137
137
138 class MovedAttribute(_LazyDescr):
138 class MovedAttribute(_LazyDescr):
139
139
140 def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
140 def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
141 super(MovedAttribute, self).__init__(name)
141 super(MovedAttribute, self).__init__(name)
142 if PY3:
142 if PY3:
143 if new_mod is None:
143 if new_mod is None:
144 new_mod = name
144 new_mod = name
145 self.mod = new_mod
145 self.mod = new_mod
146 if new_attr is None:
146 if new_attr is None:
147 if old_attr is None:
147 if old_attr is None:
148 new_attr = name
148 new_attr = name
149 else:
149 else:
150 new_attr = old_attr
150 new_attr = old_attr
151 self.attr = new_attr
151 self.attr = new_attr
152 else:
152 else:
153 self.mod = old_mod
153 self.mod = old_mod
154 if old_attr is None:
154 if old_attr is None:
155 old_attr = name
155 old_attr = name
156 self.attr = old_attr
156 self.attr = old_attr
157
157
158 def _resolve(self):
158 def _resolve(self):
159 module = _import_module(self.mod)
159 module = _import_module(self.mod)
160 return getattr(module, self.attr)
160 return getattr(module, self.attr)
161
161
162
162
163 class _SixMetaPathImporter(object):
163 class _SixMetaPathImporter(object):
164 """
164 """
165 A meta path importer to import six.moves and its submodules.
165 A meta path importer to import six.moves and its submodules.
166
166
167 This class implements a PEP302 finder and loader. It should be compatible
167 This class implements a PEP302 finder and loader. It should be compatible
168 with Python 2.5 and all existing versions of Python3
168 with Python 2.5 and all existing versions of Python3
169 """
169 """
170 def __init__(self, six_module_name):
170 def __init__(self, six_module_name):
171 self.name = six_module_name
171 self.name = six_module_name
172 self.known_modules = {}
172 self.known_modules = {}
173
173
174 def _add_module(self, mod, *fullnames):
174 def _add_module(self, mod, *fullnames):
175 for fullname in fullnames:
175 for fullname in fullnames:
176 self.known_modules[self.name + "." + fullname] = mod
176 self.known_modules[self.name + "." + fullname] = mod
177
177
178 def _get_module(self, fullname):
178 def _get_module(self, fullname):
179 return self.known_modules[self.name + "." + fullname]
179 return self.known_modules[self.name + "." + fullname]
180
180
181 def find_module(self, fullname, path=None):
181 def find_module(self, fullname, path=None):
182 if fullname in self.known_modules:
182 if fullname in self.known_modules:
183 return self
183 return self
184 return None
184 return None
185
185
186 def __get_module(self, fullname):
186 def __get_module(self, fullname):
187 try:
187 try:
188 return self.known_modules[fullname]
188 return self.known_modules[fullname]
189 except KeyError:
189 except KeyError:
190 raise ImportError("This loader does not know module " + fullname)
190 raise ImportError("This loader does not know module " + fullname)
191
191
192 def load_module(self, fullname):
192 def load_module(self, fullname):
193 try:
193 try:
194 # in case of a reload
194 # in case of a reload
195 return sys.modules[fullname]
195 return sys.modules[fullname]
196 except KeyError:
196 except KeyError:
197 pass
197 pass
198 mod = self.__get_module(fullname)
198 mod = self.__get_module(fullname)
199 if isinstance(mod, MovedModule):
199 if isinstance(mod, MovedModule):
200 mod = mod._resolve()
200 mod = mod._resolve()
201 else:
201 else:
202 mod.__loader__ = self
202 mod.__loader__ = self
203 sys.modules[fullname] = mod
203 sys.modules[fullname] = mod
204 return mod
204 return mod
205
205
206 def is_package(self, fullname):
206 def is_package(self, fullname):
207 """
207 """
208 Return true, if the named module is a package.
208 Return true, if the named module is a package.
209
209
210 We need this method to get correct spec objects with
210 We need this method to get correct spec objects with
211 Python 3.4 (see PEP451)
211 Python 3.4 (see PEP451)
212 """
212 """
213 return hasattr(self.__get_module(fullname), "__path__")
213 return hasattr(self.__get_module(fullname), "__path__")
214
214
215 def get_code(self, fullname):
215 def get_code(self, fullname):
216 """Return None
216 """Return None
217
217
218 Required, if is_package is implemented"""
218 Required, if is_package is implemented"""
219 self.__get_module(fullname) # eventually raises ImportError
219 self.__get_module(fullname) # eventually raises ImportError
220 return None
220 return None
221 get_source = get_code # same as get_code
221 get_source = get_code # same as get_code
222
222
223 _importer = _SixMetaPathImporter(__name__)
223 _importer = _SixMetaPathImporter(__name__)
224
224
225
225
226 class _MovedItems(_LazyModule):
226 class _MovedItems(_LazyModule):
227 """Lazy loading of moved objects"""
227 """Lazy loading of moved objects"""
228 __path__ = [] # mark as package
228 __path__ = [] # mark as package
229
229
230
230
231 _moved_attributes = [
231 _moved_attributes = [
232 MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
232 MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
233 MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
233 MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
234 MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
234 MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
235 MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
235 MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
236 MovedAttribute("intern", "__builtin__", "sys"),
236 MovedAttribute("intern", "__builtin__", "sys"),
237 MovedAttribute("map", "itertools", "builtins", "imap", "map"),
237 MovedAttribute("map", "itertools", "builtins", "imap", "map"),
238 MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
238 MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
239 MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
239 MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
240 MovedAttribute("reduce", "__builtin__", "functools"),
240 MovedAttribute("reduce", "__builtin__", "functools"),
241 MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
241 MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
242 MovedAttribute("StringIO", "StringIO", "io"),
242 MovedAttribute("StringIO", "StringIO", "io"),
243 MovedAttribute("UserDict", "UserDict", "collections"),
243 MovedAttribute("UserDict", "UserDict", "collections"),
244 MovedAttribute("UserList", "UserList", "collections"),
244 MovedAttribute("UserList", "UserList", "collections"),
245 MovedAttribute("UserString", "UserString", "collections"),
245 MovedAttribute("UserString", "UserString", "collections"),
246 MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
246 MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
247 MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
247 MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
248 MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
248 MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
249
249
250 MovedModule("builtins", "__builtin__"),
250 MovedModule("builtins", "__builtin__"),
251 MovedModule("configparser", "ConfigParser"),
251 MovedModule("configparser", "ConfigParser"),
252 MovedModule("copyreg", "copy_reg"),
252 MovedModule("copyreg", "copy_reg"),
253 MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
253 MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
254 MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
254 MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
255 MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
255 MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
256 MovedModule("http_cookies", "Cookie", "http.cookies"),
256 MovedModule("http_cookies", "Cookie", "http.cookies"),
257 MovedModule("html_entities", "htmlentitydefs", "html.entities"),
257 MovedModule("html_entities", "htmlentitydefs", "html.entities"),
258 MovedModule("html_parser", "HTMLParser", "html.parser"),
258 MovedModule("html_parser", "HTMLParser", "html.parser"),
259 MovedModule("http_client", "httplib", "http.client"),
259 MovedModule("http_client", "httplib", "http.client"),
260 MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
260 MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
261 MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
261 MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
262 MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
262 MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
263 MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
263 MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
264 MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
264 MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
265 MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
265 MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
266 MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
266 MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
267 MovedModule("cPickle", "cPickle", "pickle"),
267 MovedModule("cPickle", "cPickle", "pickle"),
268 MovedModule("queue", "Queue"),
268 MovedModule("queue", "Queue"),
269 MovedModule("reprlib", "repr"),
269 MovedModule("reprlib", "repr"),
270 MovedModule("socketserver", "SocketServer"),
270 MovedModule("socketserver", "SocketServer"),
271 MovedModule("_thread", "thread", "_thread"),
271 MovedModule("_thread", "thread", "_thread"),
272 MovedModule("tkinter", "Tkinter"),
272 MovedModule("tkinter", "Tkinter"),
273 MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
273 MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
274 MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
274 MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
275 MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
275 MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
276 MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
276 MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
277 MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
277 MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
278 MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
278 MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
279 MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
279 MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
280 MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
280 MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
281 MovedModule("tkinter_colorchooser", "tkColorChooser",
281 MovedModule("tkinter_colorchooser", "tkColorChooser",
282 "tkinter.colorchooser"),
282 "tkinter.colorchooser"),
283 MovedModule("tkinter_commondialog", "tkCommonDialog",
283 MovedModule("tkinter_commondialog", "tkCommonDialog",
284 "tkinter.commondialog"),
284 "tkinter.commondialog"),
285 MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
285 MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
286 MovedModule("tkinter_font", "tkFont", "tkinter.font"),
286 MovedModule("tkinter_font", "tkFont", "tkinter.font"),
287 MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
287 MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
288 MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
288 MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
289 "tkinter.simpledialog"),
289 "tkinter.simpledialog"),
290 MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
290 MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
291 MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
291 MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
292 MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
292 MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
293 MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
293 MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
294 MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
294 MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
295 MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
295 MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
296 MovedModule("winreg", "_winreg"),
296 MovedModule("winreg", "_winreg"),
297 ]
297 ]
298 for attr in _moved_attributes:
298 for attr in _moved_attributes:
299 setattr(_MovedItems, attr.name, attr)
299 setattr(_MovedItems, attr.name, attr)
300 if isinstance(attr, MovedModule):
300 if isinstance(attr, MovedModule):
301 _importer._add_module(attr, "moves." + attr.name)
301 _importer._add_module(attr, "moves." + attr.name)
302 del attr
302 del attr
303
303
304 _MovedItems._moved_attributes = _moved_attributes
304 _MovedItems._moved_attributes = _moved_attributes
305
305
306 moves = _MovedItems(__name__ + ".moves")
306 moves = _MovedItems(__name__ + ".moves")
307 _importer._add_module(moves, "moves")
307 _importer._add_module(moves, "moves")
308
308
309
309
310 class Module_six_moves_urllib_parse(_LazyModule):
310 class Module_six_moves_urllib_parse(_LazyModule):
311 """Lazy loading of moved objects in six.moves.urllib_parse"""
311 """Lazy loading of moved objects in six.moves.urllib_parse"""
312
312
313
313
314 _urllib_parse_moved_attributes = [
314 _urllib_parse_moved_attributes = [
315 MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
315 MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
316 MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
316 MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
317 MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
317 MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
318 MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
318 MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
319 MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
319 MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
320 MovedAttribute("urljoin", "urlparse", "urllib.parse"),
320 MovedAttribute("urljoin", "urlparse", "urllib.parse"),
321 MovedAttribute("urlparse", "urlparse", "urllib.parse"),
321 MovedAttribute("urlparse", "urlparse", "urllib.parse"),
322 MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
322 MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
323 MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
323 MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
324 MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
324 MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
325 MovedAttribute("quote", "urllib", "urllib.parse"),
325 MovedAttribute("quote", "urllib", "urllib.parse"),
326 MovedAttribute("quote_plus", "urllib", "urllib.parse"),
326 MovedAttribute("quote_plus", "urllib", "urllib.parse"),
327 MovedAttribute("unquote", "urllib", "urllib.parse"),
327 MovedAttribute("unquote", "urllib", "urllib.parse"),
328 MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
328 MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
329 MovedAttribute("urlencode", "urllib", "urllib.parse"),
329 MovedAttribute("urlencode", "urllib", "urllib.parse"),
330 MovedAttribute("splitquery", "urllib", "urllib.parse"),
330 MovedAttribute("splitquery", "urllib", "urllib.parse"),
331 MovedAttribute("splittag", "urllib", "urllib.parse"),
331 MovedAttribute("splittag", "urllib", "urllib.parse"),
332 MovedAttribute("splituser", "urllib", "urllib.parse"),
332 MovedAttribute("splituser", "urllib", "urllib.parse"),
333 MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
333 MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
334 MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
334 MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
335 MovedAttribute("uses_params", "urlparse", "urllib.parse"),
335 MovedAttribute("uses_params", "urlparse", "urllib.parse"),
336 MovedAttribute("uses_query", "urlparse", "urllib.parse"),
336 MovedAttribute("uses_query", "urlparse", "urllib.parse"),
337 MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
337 MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
338 ]
338 ]
339 for attr in _urllib_parse_moved_attributes:
339 for attr in _urllib_parse_moved_attributes:
340 setattr(Module_six_moves_urllib_parse, attr.name, attr)
340 setattr(Module_six_moves_urllib_parse, attr.name, attr)
341 del attr
341 del attr
342
342
343 Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
343 Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
344
344
345 _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
345 _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
346 "moves.urllib_parse", "moves.urllib.parse")
346 "moves.urllib_parse", "moves.urllib.parse")
347
347
348
348
349 class Module_six_moves_urllib_error(_LazyModule):
349 class Module_six_moves_urllib_error(_LazyModule):
350 """Lazy loading of moved objects in six.moves.urllib_error"""
350 """Lazy loading of moved objects in six.moves.urllib_error"""
351
351
352
352
353 _urllib_error_moved_attributes = [
353 _urllib_error_moved_attributes = [
354 MovedAttribute("URLError", "urllib2", "urllib.error"),
354 MovedAttribute("URLError", "urllib2", "urllib.error"),
355 MovedAttribute("HTTPError", "urllib2", "urllib.error"),
355 MovedAttribute("HTTPError", "urllib2", "urllib.error"),
356 MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
356 MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
357 ]
357 ]
358 for attr in _urllib_error_moved_attributes:
358 for attr in _urllib_error_moved_attributes:
359 setattr(Module_six_moves_urllib_error, attr.name, attr)
359 setattr(Module_six_moves_urllib_error, attr.name, attr)
360 del attr
360 del attr
361
361
362 Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
362 Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
363
363
364 _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
364 _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
365 "moves.urllib_error", "moves.urllib.error")
365 "moves.urllib_error", "moves.urllib.error")
366
366
367
367
368 class Module_six_moves_urllib_request(_LazyModule):
368 class Module_six_moves_urllib_request(_LazyModule):
369 """Lazy loading of moved objects in six.moves.urllib_request"""
369 """Lazy loading of moved objects in six.moves.urllib_request"""
370
370
371
371
372 _urllib_request_moved_attributes = [
372 _urllib_request_moved_attributes = [
373 MovedAttribute("urlopen", "urllib2", "urllib.request"),
373 MovedAttribute("urlopen", "urllib2", "urllib.request"),
374 MovedAttribute("install_opener", "urllib2", "urllib.request"),
374 MovedAttribute("install_opener", "urllib2", "urllib.request"),
375 MovedAttribute("build_opener", "urllib2", "urllib.request"),
375 MovedAttribute("build_opener", "urllib2", "urllib.request"),
376 MovedAttribute("pathname2url", "urllib", "urllib.request"),
376 MovedAttribute("pathname2url", "urllib", "urllib.request"),
377 MovedAttribute("url2pathname", "urllib", "urllib.request"),
377 MovedAttribute("url2pathname", "urllib", "urllib.request"),
378 MovedAttribute("getproxies", "urllib", "urllib.request"),
378 MovedAttribute("getproxies", "urllib", "urllib.request"),
379 MovedAttribute("Request", "urllib2", "urllib.request"),
379 MovedAttribute("Request", "urllib2", "urllib.request"),
380 MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
380 MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
381 MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
381 MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
382 MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
382 MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
383 MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
383 MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
384 MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
384 MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
385 MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
385 MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
386 MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
386 MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
387 MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
387 MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
388 MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
388 MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
389 MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
389 MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
390 MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
390 MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
391 MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
391 MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
392 MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
392 MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
393 MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
393 MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
394 MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
394 MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
395 MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
395 MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
396 MovedAttribute("FileHandler", "urllib2", "urllib.request"),
396 MovedAttribute("FileHandler", "urllib2", "urllib.request"),
397 MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
397 MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
398 MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
398 MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
399 MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
399 MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
400 MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
400 MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
401 MovedAttribute("urlretrieve", "urllib", "urllib.request"),
401 MovedAttribute("urlretrieve", "urllib", "urllib.request"),
402 MovedAttribute("urlcleanup", "urllib", "urllib.request"),
402 MovedAttribute("urlcleanup", "urllib", "urllib.request"),
403 MovedAttribute("URLopener", "urllib", "urllib.request"),
403 MovedAttribute("URLopener", "urllib", "urllib.request"),
404 MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
404 MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
405 MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
405 MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
406 ]
406 ]
407 for attr in _urllib_request_moved_attributes:
407 for attr in _urllib_request_moved_attributes:
408 setattr(Module_six_moves_urllib_request, attr.name, attr)
408 setattr(Module_six_moves_urllib_request, attr.name, attr)
409 del attr
409 del attr
410
410
411 Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
411 Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
412
412
413 _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
413 _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
414 "moves.urllib_request", "moves.urllib.request")
414 "moves.urllib_request", "moves.urllib.request")
415
415
416
416
417 class Module_six_moves_urllib_response(_LazyModule):
417 class Module_six_moves_urllib_response(_LazyModule):
418 """Lazy loading of moved objects in six.moves.urllib_response"""
418 """Lazy loading of moved objects in six.moves.urllib_response"""
419
419
420
420
421 _urllib_response_moved_attributes = [
421 _urllib_response_moved_attributes = [
422 MovedAttribute("addbase", "urllib", "urllib.response"),
422 MovedAttribute("addbase", "urllib", "urllib.response"),
423 MovedAttribute("addclosehook", "urllib", "urllib.response"),
423 MovedAttribute("addclosehook", "urllib", "urllib.response"),
424 MovedAttribute("addinfo", "urllib", "urllib.response"),
424 MovedAttribute("addinfo", "urllib", "urllib.response"),
425 MovedAttribute("addinfourl", "urllib", "urllib.response"),
425 MovedAttribute("addinfourl", "urllib", "urllib.response"),
426 ]
426 ]
427 for attr in _urllib_response_moved_attributes:
427 for attr in _urllib_response_moved_attributes:
428 setattr(Module_six_moves_urllib_response, attr.name, attr)
428 setattr(Module_six_moves_urllib_response, attr.name, attr)
429 del attr
429 del attr
430
430
431 Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
431 Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
432
432
433 _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
433 _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
434 "moves.urllib_response", "moves.urllib.response")
434 "moves.urllib_response", "moves.urllib.response")
435
435
436
436
437 class Module_six_moves_urllib_robotparser(_LazyModule):
437 class Module_six_moves_urllib_robotparser(_LazyModule):
438 """Lazy loading of moved objects in six.moves.urllib_robotparser"""
438 """Lazy loading of moved objects in six.moves.urllib_robotparser"""
439
439
440
440
441 _urllib_robotparser_moved_attributes = [
441 _urllib_robotparser_moved_attributes = [
442 MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
442 MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
443 ]
443 ]
444 for attr in _urllib_robotparser_moved_attributes:
444 for attr in _urllib_robotparser_moved_attributes:
445 setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
445 setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
446 del attr
446 del attr
447
447
448 Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
448 Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
449
449
450 _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
450 _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
451 "moves.urllib_robotparser", "moves.urllib.robotparser")
451 "moves.urllib_robotparser", "moves.urllib.robotparser")
452
452
453
453
454 class Module_six_moves_urllib(types.ModuleType):
454 class Module_six_moves_urllib(types.ModuleType):
455 """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
455 """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
456 __path__ = [] # mark as package
456 __path__ = [] # mark as package
457 parse = _importer._get_module("moves.urllib_parse")
457 parse = _importer._get_module("moves.urllib_parse")
458 error = _importer._get_module("moves.urllib_error")
458 error = _importer._get_module("moves.urllib_error")
459 request = _importer._get_module("moves.urllib_request")
459 request = _importer._get_module("moves.urllib_request")
460 response = _importer._get_module("moves.urllib_response")
460 response = _importer._get_module("moves.urllib_response")
461 robotparser = _importer._get_module("moves.urllib_robotparser")
461 robotparser = _importer._get_module("moves.urllib_robotparser")
462
462
463 def __dir__(self):
463 def __dir__(self):
464 return ['parse', 'error', 'request', 'response', 'robotparser']
464 return ['parse', 'error', 'request', 'response', 'robotparser']
465
465
466 _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
466 _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
467 "moves.urllib")
467 "moves.urllib")
468
468
469
469
470 def add_move(move):
470 def add_move(move):
471 """Add an item to six.moves."""
471 """Add an item to six.moves."""
472 setattr(_MovedItems, move.name, move)
472 setattr(_MovedItems, move.name, move)
473
473
474
474
475 def remove_move(name):
475 def remove_move(name):
476 """Remove item from six.moves."""
476 """Remove item from six.moves."""
477 try:
477 try:
478 delattr(_MovedItems, name)
478 delattr(_MovedItems, name)
479 except AttributeError:
479 except AttributeError:
480 try:
480 try:
481 del moves.__dict__[name]
481 del moves.__dict__[name]
482 except KeyError:
482 except KeyError:
483 raise AttributeError("no such move, %r" % (name,))
483 raise AttributeError("no such move, %r" % (name,))
484
484
485
485
486 if PY3:
486 if PY3:
487 _meth_func = "__func__"
487 _meth_func = "__func__"
488 _meth_self = "__self__"
488 _meth_self = "__self__"
489
489
490 _func_closure = "__closure__"
490 _func_closure = "__closure__"
491 _func_code = "__code__"
491 _func_code = "__code__"
492 _func_defaults = "__defaults__"
492 _func_defaults = "__defaults__"
493 _func_globals = "__globals__"
493 _func_globals = "__globals__"
494 else:
494 else:
495 _meth_func = "im_func"
495 _meth_func = "im_func"
496 _meth_self = "im_self"
496 _meth_self = "im_self"
497
497
498 _func_closure = "func_closure"
498 _func_closure = "func_closure"
499 _func_code = "func_code"
499 _func_code = "func_code"
500 _func_defaults = "func_defaults"
500 _func_defaults = "func_defaults"
501 _func_globals = "func_globals"
501 _func_globals = "func_globals"
502
502
503
503
504 try:
504 try:
505 advance_iterator = next
505 advance_iterator = next
506 except NameError:
506 except NameError:
507 def advance_iterator(it):
507 def advance_iterator(it):
508 return it.next()
508 return it.next()
509 next = advance_iterator
509 next = advance_iterator
510
510
511
511
512 try:
512 try:
513 callable = callable
513 callable = callable
514 except NameError:
514 except NameError:
515 def callable(obj):
515 def callable(obj):
516 return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
516 return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
517
517
518
518
519 if PY3:
519 if PY3:
520 def get_unbound_function(unbound):
520 def get_unbound_function(unbound):
521 return unbound
521 return unbound
522
522
523 create_bound_method = types.MethodType
523 create_bound_method = types.MethodType
524
524
525 Iterator = object
525 Iterator = object
526 else:
526 else:
527 def get_unbound_function(unbound):
527 def get_unbound_function(unbound):
528 return unbound.im_func
528 return unbound.im_func
529
529
530 def create_bound_method(func, obj):
530 def create_bound_method(func, obj):
531 return types.MethodType(func, obj, obj.__class__)
531 return types.MethodType(func, obj, obj.__class__)
532
532
533 class Iterator(object):
533 class Iterator(object):
534
534
535 def next(self):
535 def next(self):
536 return type(self).__next__(self)
536 return type(self).__next__(self)
537
537
538 callable = callable
538 callable = callable
539 _add_doc(get_unbound_function,
539 _add_doc(get_unbound_function,
540 """Get the function out of a possibly unbound function""")
540 """Get the function out of a possibly unbound function""")
541
541
542
542
543 get_method_function = operator.attrgetter(_meth_func)
543 get_method_function = operator.attrgetter(_meth_func)
544 get_method_self = operator.attrgetter(_meth_self)
544 get_method_self = operator.attrgetter(_meth_self)
545 get_function_closure = operator.attrgetter(_func_closure)
545 get_function_closure = operator.attrgetter(_func_closure)
546 get_function_code = operator.attrgetter(_func_code)
546 get_function_code = operator.attrgetter(_func_code)
547 get_function_defaults = operator.attrgetter(_func_defaults)
547 get_function_defaults = operator.attrgetter(_func_defaults)
548 get_function_globals = operator.attrgetter(_func_globals)
548 get_function_globals = operator.attrgetter(_func_globals)
549
549
550
550
551 if PY3:
551 if PY3:
552 def iterkeys(d, **kw):
552 def iterkeys(d, **kw):
553 return iter(d.keys(**kw))
553 return iter(d.keys(**kw))
554
554
555 def itervalues(d, **kw):
555 def itervalues(d, **kw):
556 return iter(d.values(**kw))
556 return iter(d.values(**kw))
557
557
558 def iteritems(d, **kw):
558 def iteritems(d, **kw):
559 return iter(d.items(**kw))
559 return iter(d.items(**kw))
560
560
561 def iterlists(d, **kw):
561 def iterlists(d, **kw):
562 return iter(d.lists(**kw))
562 return iter(d.lists(**kw))
563
563
564 viewkeys = operator.methodcaller("keys")
564 viewkeys = operator.methodcaller("keys")
565
565
566 viewvalues = operator.methodcaller("values")
566 viewvalues = operator.methodcaller("values")
567
567
568 viewitems = operator.methodcaller("items")
568 viewitems = operator.methodcaller("items")
569 else:
569 else:
570 def iterkeys(d, **kw):
570 def iterkeys(d, **kw):
571 return iter(d.iterkeys(**kw))
571 return iter(d.iterkeys(**kw))
572
572
573 def itervalues(d, **kw):
573 def itervalues(d, **kw):
574 return iter(d.itervalues(**kw))
574 return iter(d.itervalues(**kw))
575
575
576 def iteritems(d, **kw):
576 def iteritems(d, **kw):
577 return iter(d.iteritems(**kw))
577 return iter(d.iteritems(**kw))
578
578
579 def iterlists(d, **kw):
579 def iterlists(d, **kw):
580 return iter(d.iterlists(**kw))
580 return iter(d.iterlists(**kw))
581
581
582 viewkeys = operator.methodcaller("viewkeys")
582 viewkeys = operator.methodcaller("viewkeys")
583
583
584 viewvalues = operator.methodcaller("viewvalues")
584 viewvalues = operator.methodcaller("viewvalues")
585
585
586 viewitems = operator.methodcaller("viewitems")
586 viewitems = operator.methodcaller("viewitems")
587
587
588 _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
588 _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
589 _add_doc(itervalues, "Return an iterator over the values of a dictionary.")
589 _add_doc(itervalues, "Return an iterator over the values of a dictionary.")
590 _add_doc(iteritems,
590 _add_doc(iteritems,
591 "Return an iterator over the (key, value) pairs of a dictionary.")
591 "Return an iterator over the (key, value) pairs of a dictionary.")
592 _add_doc(iterlists,
592 _add_doc(iterlists,
593 "Return an iterator over the (key, [values]) pairs of a dictionary.")
593 "Return an iterator over the (key, [values]) pairs of a dictionary.")
594
594
595
595
596 if PY3:
596 if PY3:
597 def b(s):
597 def b(s):
598 return s.encode("latin-1")
598 return s.encode("latin-1")
599 def u(s):
599 def u(s):
600 return s
600 return s
601 unichr = chr
601 unichr = chr
602 if sys.version_info[1] <= 1:
602 if sys.version_info[1] <= 1:
603 def int2byte(i):
603 def int2byte(i):
604 return bytes((i,))
604 return bytes((i,))
605 else:
605 else:
606 # This is about 2x faster than the implementation above on 3.2+
606 # This is about 2x faster than the implementation above on 3.2+
607 int2byte = operator.methodcaller("to_bytes", 1, "big")
607 int2byte = operator.methodcaller("to_bytes", 1, "big")
608 byte2int = operator.itemgetter(0)
608 byte2int = operator.itemgetter(0)
609 indexbytes = operator.getitem
609 indexbytes = operator.getitem
610 iterbytes = iter
610 iterbytes = iter
611 import io
611 import io
612 StringIO = io.StringIO
612 StringIO = io.StringIO
613 BytesIO = io.BytesIO
613 BytesIO = io.BytesIO
614 _assertCountEqual = "assertCountEqual"
614 _assertCountEqual = "assertCountEqual"
615 _assertRaisesRegex = "assertRaisesRegex"
615 _assertRaisesRegex = "assertRaisesRegex"
616 _assertRegex = "assertRegex"
616 _assertRegex = "assertRegex"
617 else:
617 else:
618 def b(s):
618 def b(s):
619 return s
619 return s
620 # Workaround for standalone backslash
620 # Workaround for standalone backslash
621 def u(s):
621 def u(s):
622 return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
622 return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
623 unichr = unichr
623 unichr = unichr
624 int2byte = chr
624 int2byte = chr
625 def byte2int(bs):
625 def byte2int(bs):
626 return ord(bs[0])
626 return ord(bs[0])
627 def indexbytes(buf, i):
627 def indexbytes(buf, i):
628 return ord(buf[i])
628 return ord(buf[i])
629 iterbytes = functools.partial(itertools.imap, ord)
629 iterbytes = functools.partial(itertools.imap, ord)
630 import StringIO
630 import StringIO
631 StringIO = BytesIO = StringIO.StringIO
631 StringIO = BytesIO = StringIO.StringIO
632 _assertCountEqual = "assertItemsEqual"
632 _assertCountEqual = "assertItemsEqual"
633 _assertRaisesRegex = "assertRaisesRegexp"
633 _assertRaisesRegex = "assertRaisesRegexp"
634 _assertRegex = "assertRegexpMatches"
634 _assertRegex = "assertRegexpMatches"
635 _add_doc(b, """Byte literal""")
635 _add_doc(b, """Byte literal""")
636 _add_doc(u, """Text literal""")
636 _add_doc(u, """Text literal""")
637
637
638
638
639 def assertCountEqual(self, *args, **kwargs):
639 def assertCountEqual(self, *args, **kwargs):
640 return getattr(self, _assertCountEqual)(*args, **kwargs)
640 return getattr(self, _assertCountEqual)(*args, **kwargs)
641
641
642
642
643 def assertRaisesRegex(self, *args, **kwargs):
643 def assertRaisesRegex(self, *args, **kwargs):
644 return getattr(self, _assertRaisesRegex)(*args, **kwargs)
644 return getattr(self, _assertRaisesRegex)(*args, **kwargs)
645
645
646
646
647 def assertRegex(self, *args, **kwargs):
647 def assertRegex(self, *args, **kwargs):
648 return getattr(self, _assertRegex)(*args, **kwargs)
648 return getattr(self, _assertRegex)(*args, **kwargs)
649
649
650
650
651 if PY3:
651 if PY3:
652 exec_ = getattr(moves.builtins, "exec")
652 exec_ = getattr(moves.builtins, "exec")
653
653
654
654
655 def reraise(tp, value, tb=None):
655 def reraise(tp, value, tb=None):
656 if value is None:
656 if value is None:
657 value = tp()
657 value = tp()
658 if value.__traceback__ is not tb:
658 if value.__traceback__ is not tb:
659 raise value.with_traceback(tb)
659 raise value.with_traceback(tb)
660 raise value
660 raise value
661
661
662 else:
662 else:
663 def exec_(_code_, _globs_=None, _locs_=None):
663 def exec_(_code_, _globs_=None, _locs_=None):
664 """Execute code in a namespace."""
664 """Execute code in a namespace."""
665 if _globs_ is None:
665 if _globs_ is None:
666 frame = sys._getframe(1)
666 frame = sys._getframe(1)
667 _globs_ = frame.f_globals
667 _globs_ = frame.f_globals
668 if _locs_ is None:
668 if _locs_ is None:
669 _locs_ = frame.f_locals
669 _locs_ = frame.f_locals
670 del frame
670 del frame
671 elif _locs_ is None:
671 elif _locs_ is None:
672 _locs_ = _globs_
672 _locs_ = _globs_
673 exec("""exec _code_ in _globs_, _locs_""")
673 exec("""exec _code_ in _globs_, _locs_""")
674
674
675
675
676 exec_("""def reraise(tp, value, tb=None):
676 exec_("""def reraise(tp, value, tb=None):
677 raise tp, value, tb
677 raise tp, value, tb
678 """)
678 """)
679
679
680
680
681 if sys.version_info[:2] == (3, 2):
681 if sys.version_info[:2] == (3, 2):
682 exec_("""def raise_from(value, from_value):
682 exec_("""def raise_from(value, from_value):
683 if from_value is None:
683 if from_value is None:
684 raise value
684 raise value
685 raise value from from_value
685 raise value from from_value
686 """)
686 """)
687 elif sys.version_info[:2] > (3, 2):
687 elif sys.version_info[:2] > (3, 2):
688 exec_("""def raise_from(value, from_value):
688 exec_("""def raise_from(value, from_value):
689 raise value from from_value
689 raise value from from_value
690 """)
690 """)
691 else:
691 else:
692 def raise_from(value, from_value):
692 def raise_from(value, from_value):
693 raise value
693 raise value
694
694
695
695
696 print_ = getattr(moves.builtins, "print", None)
696 print_ = getattr(moves.builtins, "print", None)
697 if print_ is None:
697 if print_ is None:
698 def print_(*args, **kwargs):
698 def print_(*args, **kwargs):
699 """The new-style print function for Python 2.4 and 2.5."""
699 """The new-style print function for Python 2.4 and 2.5."""
700 fp = kwargs.pop("file", sys.stdout)
700 fp = kwargs.pop("file", sys.stdout)
701 if fp is None:
701 if fp is None:
702 return
702 return
703 def write(data):
703 def write(data):
704 if not isinstance(data, basestring):
704 if not isinstance(data, basestring):
705 data = str(data)
705 data = str(data)
706 # If the file has an encoding, encode unicode with it.
706 # If the file has an encoding, encode unicode with it.
707 if (isinstance(fp, file) and
707 if (isinstance(fp, file) and
708 isinstance(data, unicode) and
708 isinstance(data, unicode) and
709 fp.encoding is not None):
709 fp.encoding is not None):
710 errors = getattr(fp, "errors", None)
710 errors = getattr(fp, "errors", None)
711 if errors is None:
711 if errors is None:
712 errors = "strict"
712 errors = "strict"
713 data = data.encode(fp.encoding, errors)
713 data = data.encode(fp.encoding, errors)
714 fp.write(data)
714 fp.write(data)
715 want_unicode = False
715 want_unicode = False
716 sep = kwargs.pop("sep", None)
716 sep = kwargs.pop("sep", None)
717 if sep is not None:
717 if sep is not None:
718 if isinstance(sep, unicode):
718 if isinstance(sep, unicode):
719 want_unicode = True
719 want_unicode = True
720 elif not isinstance(sep, str):
720 elif not isinstance(sep, str):
721 raise TypeError("sep must be None or a string")
721 raise TypeError("sep must be None or a string")
722 end = kwargs.pop("end", None)
722 end = kwargs.pop("end", None)
723 if end is not None:
723 if end is not None:
724 if isinstance(end, unicode):
724 if isinstance(end, unicode):
725 want_unicode = True
725 want_unicode = True
726 elif not isinstance(end, str):
726 elif not isinstance(end, str):
727 raise TypeError("end must be None or a string")
727 raise TypeError("end must be None or a string")
728 if kwargs:
728 if kwargs:
729 raise TypeError("invalid keyword arguments to print()")
729 raise TypeError("invalid keyword arguments to print()")
730 if not want_unicode:
730 if not want_unicode:
731 for arg in args:
731 for arg in args:
732 if isinstance(arg, unicode):
732 if isinstance(arg, unicode):
733 want_unicode = True
733 want_unicode = True
734 break
734 break
735 if want_unicode:
735 if want_unicode:
736 newline = unicode("\n")
736 newline = unicode("\n")
737 space = unicode(" ")
737 space = unicode(" ")
738 else:
738 else:
739 newline = "\n"
739 newline = "\n"
740 space = " "
740 space = " "
741 if sep is None:
741 if sep is None:
742 sep = space
742 sep = space
743 if end is None:
743 if end is None:
744 end = newline
744 end = newline
745 for i, arg in enumerate(args):
745 for i, arg in enumerate(args):
746 if i:
746 if i:
747 write(sep)
747 write(sep)
748 write(arg)
748 write(arg)
749 write(end)
749 write(end)
750 if sys.version_info[:2] < (3, 3):
750 if sys.version_info[:2] < (3, 3):
751 _print = print_
751 _print = print_
752 def print_(*args, **kwargs):
752 def print_(*args, **kwargs):
753 fp = kwargs.get("file", sys.stdout)
753 fp = kwargs.get("file", sys.stdout)
754 flush = kwargs.pop("flush", False)
754 flush = kwargs.pop("flush", False)
755 _print(*args, **kwargs)
755 _print(*args, **kwargs)
756 if flush and fp is not None:
756 if flush and fp is not None:
757 fp.flush()
757 fp.flush()
758
758
759 _add_doc(reraise, """Reraise an exception.""")
759 _add_doc(reraise, """Reraise an exception.""")
760
760
761 if sys.version_info[0:2] < (3, 4):
761 if sys.version_info[0:2] < (3, 4):
762 def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
762 def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
763 updated=functools.WRAPPER_UPDATES):
763 updated=functools.WRAPPER_UPDATES):
764 def wrapper(f):
764 def wrapper(f):
765 f = functools.wraps(wrapped, assigned, updated)(f)
765 f = functools.wraps(wrapped, assigned, updated)(f)
766 f.__wrapped__ = wrapped
766 f.__wrapped__ = wrapped
767 return f
767 return f
768 return wrapper
768 return wrapper
769 else:
769 else:
770 wraps = functools.wraps
770 wraps = functools.wraps
771
771
772 def with_metaclass(meta, *bases):
772 def with_metaclass(meta, *bases):
773 """Create a base class with a metaclass."""
773 """Create a base class with a metaclass."""
774 # This requires a bit of explanation: the basic idea is to make a dummy
774 # This requires a bit of explanation: the basic idea is to make a dummy
775 # metaclass for one level of class instantiation that replaces itself with
775 # metaclass for one level of class instantiation that replaces itself with
776 # the actual metaclass.
776 # the actual metaclass.
777 class metaclass(meta):
777 class metaclass(meta):
778 def __new__(cls, name, this_bases, d):
778 def __new__(cls, name, this_bases, d):
779 return meta(name, bases, d)
779 return meta(name, bases, d)
780 return type.__new__(metaclass, 'temporary_class', (), {})
780 return type.__new__(metaclass, 'temporary_class', (), {})
781
781
782
782
783 def add_metaclass(metaclass):
783 def add_metaclass(metaclass):
784 """Class decorator for creating a class with a metaclass."""
784 """Class decorator for creating a class with a metaclass."""
785 def wrapper(cls):
785 def wrapper(cls):
786 orig_vars = cls.__dict__.copy()
786 orig_vars = cls.__dict__.copy()
787 slots = orig_vars.get('__slots__')
787 slots = orig_vars.get('__slots__')
788 if slots is not None:
788 if slots is not None:
789 if isinstance(slots, str):
789 if isinstance(slots, str):
790 slots = [slots]
790 slots = [slots]
791 for slots_var in slots:
791 for slots_var in slots:
792 orig_vars.pop(slots_var)
792 orig_vars.pop(slots_var)
793 orig_vars.pop('__dict__', None)
793 orig_vars.pop('__dict__', None)
794 orig_vars.pop('__weakref__', None)
794 orig_vars.pop('__weakref__', None)
795 return metaclass(cls.__name__, cls.__bases__, orig_vars)
795 return metaclass(cls.__name__, cls.__bases__, orig_vars)
796 return wrapper
796 return wrapper
797
797
798
798
799 def python_2_unicode_compatible(klass):
799 def python_2_unicode_compatible(klass):
800 """
800 """
801 A decorator that defines __unicode__ and __str__ methods under Python 2.
801 A decorator that defines __unicode__ and __str__ methods under Python 2.
802 Under Python 3 it does nothing.
802 Under Python 3 it does nothing.
803
803
804 To support Python 2 and 3 with a single code base, define a __str__ method
804 To support Python 2 and 3 with a single code base, define a __str__ method
805 returning text and apply this decorator to the class.
805 returning text and apply this decorator to the class.
806 """
806 """
807 if PY2:
807 if PY2:
808 if '__str__' not in klass.__dict__:
808 if '__str__' not in klass.__dict__:
809 raise ValueError("@python_2_unicode_compatible cannot be applied "
809 raise ValueError("@python_2_unicode_compatible cannot be applied "
810 "to %s because it doesn't define __str__()." %
810 "to %s because it doesn't define __str__()." %
811 klass.__name__)
811 klass.__name__)
812 klass.__unicode__ = klass.__str__
812 klass.__unicode__ = klass.__str__
813 klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
813 klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
814 return klass
814 return klass
815
815
816
816
817 # Complete the moves implementation.
817 # Complete the moves implementation.
818 # This code is at the end of this module to speed up module loading.
818 # This code is at the end of this module to speed up module loading.
819 # Turn this module into a package.
819 # Turn this module into a package.
820 __path__ = [] # required for PEP 302 and PEP 451
820 __path__ = [] # required for PEP 302 and PEP 451
821 __package__ = __name__ # see PEP 366 @ReservedAssignment
821 __package__ = __name__ # see PEP 366 @ReservedAssignment
822 if globals().get("__spec__") is not None:
822 if globals().get("__spec__") is not None:
823 __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
823 __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
824 # Remove other six meta path importers, since they cause problems. This can
824 # Remove other six meta path importers, since they cause problems. This can
825 # happen if six is removed from sys.modules and then reloaded. (Setuptools does
825 # happen if six is removed from sys.modules and then reloaded. (Setuptools does
826 # this for some reason.)
826 # this for some reason.)
827 if sys.meta_path:
827 if sys.meta_path:
828 for i, importer in enumerate(sys.meta_path):
828 for i, importer in enumerate(sys.meta_path):
829 # Here's some real nastiness: Another "instance" of the six module might
829 # Here's some real nastiness: Another "instance" of the six module might
830 # be floating around. Therefore, we can't use isinstance() to check for
830 # be floating around. Therefore, we can't use isinstance() to check for
831 # the six meta path importer, since the other six instance will have
831 # the six meta path importer, since the other six instance will have
832 # inserted an importer with different class.
832 # inserted an importer with different class.
833 if (type(importer).__name__ == "_SixMetaPathImporter" and
833 if (type(importer).__name__ == "_SixMetaPathImporter" and
834 importer.name == __name__):
834 importer.name == __name__):
835 del sys.meta_path[i]
835 del sys.meta_path[i]
836 break
836 break
837 del i, importer
837 del i, importer
838 # Finally, add the importer to the meta path import hook.
838 # Finally, add the importer to the meta path import hook.
839 sys.meta_path.append(_importer)
839 sys.meta_path.append(_importer)
@@ -1,68 +1,68 b''
1 from __future__ import absolute_import, division, unicode_literals
1
2
2
3 import functools
3 import functools
4 from time import perf_counter as time_now
4 from time import perf_counter as time_now
5
5
6
6
7 def safe_wraps(wrapper, *args, **kwargs):
7 def safe_wraps(wrapper, *args, **kwargs):
8 """Safely wraps partial functions."""
8 """Safely wraps partial functions."""
9 while isinstance(wrapper, functools.partial):
9 while isinstance(wrapper, functools.partial):
10 wrapper = wrapper.func
10 wrapper = wrapper.func
11 return functools.wraps(wrapper, *args, **kwargs)
11 return functools.wraps(wrapper, *args, **kwargs)
12
12
13
13
14 class Timer(object):
14 class Timer(object):
15 """A context manager/decorator for statsd.timing()."""
15 """A context manager/decorator for statsd.timing()."""
16
16
17 def __init__(self, client, stat, rate=1, tags=None, use_decimals=True, auto_send=True):
17 def __init__(self, client, stat, rate=1, tags=None, use_decimals=True, auto_send=True):
18 self.client = client
18 self.client = client
19 self.stat = stat
19 self.stat = stat
20 self.rate = rate
20 self.rate = rate
21 self.tags = tags
21 self.tags = tags
22 self.ms = None
22 self.ms = None
23 self._sent = False
23 self._sent = False
24 self._start_time = None
24 self._start_time = None
25 self.use_decimals = use_decimals
25 self.use_decimals = use_decimals
26 self.auto_send = auto_send
26 self.auto_send = auto_send
27
27
28 def __call__(self, f):
28 def __call__(self, f):
29 """Thread-safe timing function decorator."""
29 """Thread-safe timing function decorator."""
30 @safe_wraps(f)
30 @safe_wraps(f)
31 def _wrapped(*args, **kwargs):
31 def _wrapped(*args, **kwargs):
32 start_time = time_now()
32 start_time = time_now()
33 try:
33 try:
34 return f(*args, **kwargs)
34 return f(*args, **kwargs)
35 finally:
35 finally:
36 elapsed_time_ms = 1000.0 * (time_now() - start_time)
36 elapsed_time_ms = 1000.0 * (time_now() - start_time)
37 self.client.timing(self.stat, elapsed_time_ms, self.rate, self.tags, self.use_decimals)
37 self.client.timing(self.stat, elapsed_time_ms, self.rate, self.tags, self.use_decimals)
38 self._sent = True
38 self._sent = True
39 return _wrapped
39 return _wrapped
40
40
41 def __enter__(self):
41 def __enter__(self):
42 return self.start()
42 return self.start()
43
43
44 def __exit__(self, typ, value, tb):
44 def __exit__(self, typ, value, tb):
45 self.stop(send=self.auto_send)
45 self.stop(send=self.auto_send)
46
46
47 def start(self):
47 def start(self):
48 self.ms = None
48 self.ms = None
49 self._sent = False
49 self._sent = False
50 self._start_time = time_now()
50 self._start_time = time_now()
51 return self
51 return self
52
52
53 def stop(self, send=True):
53 def stop(self, send=True):
54 if self._start_time is None:
54 if self._start_time is None:
55 raise RuntimeError('Timer has not started.')
55 raise RuntimeError('Timer has not started.')
56 dt = time_now() - self._start_time
56 dt = time_now() - self._start_time
57 self.ms = 1000.0 * dt # Convert to milliseconds.
57 self.ms = 1000.0 * dt # Convert to milliseconds.
58 if send:
58 if send:
59 self.send()
59 self.send()
60 return self
60 return self
61
61
62 def send(self):
62 def send(self):
63 if self.ms is None:
63 if self.ms is None:
64 raise RuntimeError('No data recorded.')
64 raise RuntimeError('No data recorded.')
65 if self._sent:
65 if self._sent:
66 raise RuntimeError('Already sent data.')
66 raise RuntimeError('Already sent data.')
67 self._sent = True
67 self._sent = True
68 self.client.timing(self.stat, self.ms, self.rate, self.tags, self.use_decimals)
68 self.client.timing(self.stat, self.ms, self.rate, self.tags, self.use_decimals)
@@ -1,1919 +1,1919 b''
1
1
2 from __future__ import division
2
3
3
4 """Diff Match and Patch
4 """Diff Match and Patch
5
5
6 Copyright 2006 Google Inc.
6 Copyright 2006 Google Inc.
7 http://code.google.com/p/google-diff-match-patch/
7 http://code.google.com/p/google-diff-match-patch/
8
8
9 Licensed under the Apache License, Version 2.0 (the "License");
9 Licensed under the Apache License, Version 2.0 (the "License");
10 you may not use this file except in compliance with the License.
10 you may not use this file except in compliance with the License.
11 You may obtain a copy of the License at
11 You may obtain a copy of the License at
12
12
13 http://www.apache.org/licenses/LICENSE-2.0
13 http://www.apache.org/licenses/LICENSE-2.0
14
14
15 Unless required by applicable law or agreed to in writing, software
15 Unless required by applicable law or agreed to in writing, software
16 distributed under the License is distributed on an "AS IS" BASIS,
16 distributed under the License is distributed on an "AS IS" BASIS,
17 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 See the License for the specific language governing permissions and
18 See the License for the specific language governing permissions and
19 limitations under the License.
19 limitations under the License.
20 """
20 """
21
21
22 """Functions for diff, match and patch.
22 """Functions for diff, match and patch.
23
23
24 Computes the difference between two texts to create a patch.
24 Computes the difference between two texts to create a patch.
25 Applies the patch onto another text, allowing for errors.
25 Applies the patch onto another text, allowing for errors.
26 """
26 """
27
27
28 __author__ = 'fraser@google.com (Neil Fraser)'
28 __author__ = 'fraser@google.com (Neil Fraser)'
29
29
30 import math
30 import math
31 import re
31 import re
32 import sys
32 import sys
33 import time
33 import time
34 import urllib
34 import urllib
35
35
36
36
37 class diff_match_patch:
37 class diff_match_patch:
38 """Class containing the diff, match and patch methods.
38 """Class containing the diff, match and patch methods.
39
39
40 Also contains the behaviour settings.
40 Also contains the behaviour settings.
41 """
41 """
42
42
43 def __init__(self):
43 def __init__(self):
44 """Inits a diff_match_patch object with default settings.
44 """Inits a diff_match_patch object with default settings.
45 Redefine these in your program to override the defaults.
45 Redefine these in your program to override the defaults.
46 """
46 """
47
47
48 # Number of seconds to map a diff before giving up (0 for infinity).
48 # Number of seconds to map a diff before giving up (0 for infinity).
49 self.Diff_Timeout = 1.0
49 self.Diff_Timeout = 1.0
50 # Cost of an empty edit operation in terms of edit characters.
50 # Cost of an empty edit operation in terms of edit characters.
51 self.Diff_EditCost = 4
51 self.Diff_EditCost = 4
52 # At what point is no match declared (0.0 = perfection, 1.0 = very loose).
52 # At what point is no match declared (0.0 = perfection, 1.0 = very loose).
53 self.Match_Threshold = 0.5
53 self.Match_Threshold = 0.5
54 # How far to search for a match (0 = exact location, 1000+ = broad match).
54 # How far to search for a match (0 = exact location, 1000+ = broad match).
55 # A match this many characters away from the expected location will add
55 # A match this many characters away from the expected location will add
56 # 1.0 to the score (0.0 is a perfect match).
56 # 1.0 to the score (0.0 is a perfect match).
57 self.Match_Distance = 1000
57 self.Match_Distance = 1000
58 # When deleting a large block of text (over ~64 characters), how close do
58 # When deleting a large block of text (over ~64 characters), how close do
59 # the contents have to be to match the expected contents. (0.0 = perfection,
59 # the contents have to be to match the expected contents. (0.0 = perfection,
60 # 1.0 = very loose). Note that Match_Threshold controls how closely the
60 # 1.0 = very loose). Note that Match_Threshold controls how closely the
61 # end points of a delete need to match.
61 # end points of a delete need to match.
62 self.Patch_DeleteThreshold = 0.5
62 self.Patch_DeleteThreshold = 0.5
63 # Chunk size for context length.
63 # Chunk size for context length.
64 self.Patch_Margin = 4
64 self.Patch_Margin = 4
65
65
66 # The number of bits in an int.
66 # The number of bits in an int.
67 # Python has no maximum, thus to disable patch splitting set to 0.
67 # Python has no maximum, thus to disable patch splitting set to 0.
68 # However to avoid long patches in certain pathological cases, use 32.
68 # However to avoid long patches in certain pathological cases, use 32.
69 # Multiple short patches (using native ints) are much faster than long ones.
69 # Multiple short patches (using native ints) are much faster than long ones.
70 self.Match_MaxBits = 32
70 self.Match_MaxBits = 32
71
71
72 # DIFF FUNCTIONS
72 # DIFF FUNCTIONS
73
73
74 # The data structure representing a diff is an array of tuples:
74 # The data structure representing a diff is an array of tuples:
75 # [(DIFF_DELETE, "Hello"), (DIFF_INSERT, "Goodbye"), (DIFF_EQUAL, " world.")]
75 # [(DIFF_DELETE, "Hello"), (DIFF_INSERT, "Goodbye"), (DIFF_EQUAL, " world.")]
76 # which means: delete "Hello", add "Goodbye" and keep " world."
76 # which means: delete "Hello", add "Goodbye" and keep " world."
77 DIFF_DELETE = -1
77 DIFF_DELETE = -1
78 DIFF_INSERT = 1
78 DIFF_INSERT = 1
79 DIFF_EQUAL = 0
79 DIFF_EQUAL = 0
80
80
81 def diff_main(self, text1, text2, checklines=True, deadline=None):
81 def diff_main(self, text1, text2, checklines=True, deadline=None):
82 """Find the differences between two texts. Simplifies the problem by
82 """Find the differences between two texts. Simplifies the problem by
83 stripping any common prefix or suffix off the texts before diffing.
83 stripping any common prefix or suffix off the texts before diffing.
84
84
85 Args:
85 Args:
86 text1: Old string to be diffed.
86 text1: Old string to be diffed.
87 text2: New string to be diffed.
87 text2: New string to be diffed.
88 checklines: Optional speedup flag. If present and false, then don't run
88 checklines: Optional speedup flag. If present and false, then don't run
89 a line-level diff first to identify the changed areas.
89 a line-level diff first to identify the changed areas.
90 Defaults to true, which does a faster, slightly less optimal diff.
90 Defaults to true, which does a faster, slightly less optimal diff.
91 deadline: Optional time when the diff should be complete by. Used
91 deadline: Optional time when the diff should be complete by. Used
92 internally for recursive calls. Users should set DiffTimeout instead.
92 internally for recursive calls. Users should set DiffTimeout instead.
93
93
94 Returns:
94 Returns:
95 Array of changes.
95 Array of changes.
96 """
96 """
97 # Set a deadline by which time the diff must be complete.
97 # Set a deadline by which time the diff must be complete.
98 if deadline is None:
98 if deadline is None:
99 # Unlike in most languages, Python counts time in seconds.
99 # Unlike in most languages, Python counts time in seconds.
100 if self.Diff_Timeout <= 0:
100 if self.Diff_Timeout <= 0:
101 deadline = sys.maxint
101 deadline = sys.maxint
102 else:
102 else:
103 deadline = time.time() + self.Diff_Timeout
103 deadline = time.time() + self.Diff_Timeout
104
104
105 # Check for null inputs.
105 # Check for null inputs.
106 if text1 is None or text2 is None:
106 if text1 is None or text2 is None:
107 raise ValueError("Null inputs. (diff_main)")
107 raise ValueError("Null inputs. (diff_main)")
108
108
109 # Check for equality (speedup).
109 # Check for equality (speedup).
110 if text1 == text2:
110 if text1 == text2:
111 if text1:
111 if text1:
112 return [(self.DIFF_EQUAL, text1)]
112 return [(self.DIFF_EQUAL, text1)]
113 return []
113 return []
114
114
115 # Trim off common prefix (speedup).
115 # Trim off common prefix (speedup).
116 commonlength = self.diff_commonPrefix(text1, text2)
116 commonlength = self.diff_commonPrefix(text1, text2)
117 commonprefix = text1[:commonlength]
117 commonprefix = text1[:commonlength]
118 text1 = text1[commonlength:]
118 text1 = text1[commonlength:]
119 text2 = text2[commonlength:]
119 text2 = text2[commonlength:]
120
120
121 # Trim off common suffix (speedup).
121 # Trim off common suffix (speedup).
122 commonlength = self.diff_commonSuffix(text1, text2)
122 commonlength = self.diff_commonSuffix(text1, text2)
123 if commonlength == 0:
123 if commonlength == 0:
124 commonsuffix = ''
124 commonsuffix = ''
125 else:
125 else:
126 commonsuffix = text1[-commonlength:]
126 commonsuffix = text1[-commonlength:]
127 text1 = text1[:-commonlength]
127 text1 = text1[:-commonlength]
128 text2 = text2[:-commonlength]
128 text2 = text2[:-commonlength]
129
129
130 # Compute the diff on the middle block.
130 # Compute the diff on the middle block.
131 diffs = self.diff_compute(text1, text2, checklines, deadline)
131 diffs = self.diff_compute(text1, text2, checklines, deadline)
132
132
133 # Restore the prefix and suffix.
133 # Restore the prefix and suffix.
134 if commonprefix:
134 if commonprefix:
135 diffs[:0] = [(self.DIFF_EQUAL, commonprefix)]
135 diffs[:0] = [(self.DIFF_EQUAL, commonprefix)]
136 if commonsuffix:
136 if commonsuffix:
137 diffs.append((self.DIFF_EQUAL, commonsuffix))
137 diffs.append((self.DIFF_EQUAL, commonsuffix))
138 self.diff_cleanupMerge(diffs)
138 self.diff_cleanupMerge(diffs)
139 return diffs
139 return diffs
140
140
141 def diff_compute(self, text1, text2, checklines, deadline):
141 def diff_compute(self, text1, text2, checklines, deadline):
142 """Find the differences between two texts. Assumes that the texts do not
142 """Find the differences between two texts. Assumes that the texts do not
143 have any common prefix or suffix.
143 have any common prefix or suffix.
144
144
145 Args:
145 Args:
146 text1: Old string to be diffed.
146 text1: Old string to be diffed.
147 text2: New string to be diffed.
147 text2: New string to be diffed.
148 checklines: Speedup flag. If false, then don't run a line-level diff
148 checklines: Speedup flag. If false, then don't run a line-level diff
149 first to identify the changed areas.
149 first to identify the changed areas.
150 If true, then run a faster, slightly less optimal diff.
150 If true, then run a faster, slightly less optimal diff.
151 deadline: Time when the diff should be complete by.
151 deadline: Time when the diff should be complete by.
152
152
153 Returns:
153 Returns:
154 Array of changes.
154 Array of changes.
155 """
155 """
156 if not text1:
156 if not text1:
157 # Just add some text (speedup).
157 # Just add some text (speedup).
158 return [(self.DIFF_INSERT, text2)]
158 return [(self.DIFF_INSERT, text2)]
159
159
160 if not text2:
160 if not text2:
161 # Just delete some text (speedup).
161 # Just delete some text (speedup).
162 return [(self.DIFF_DELETE, text1)]
162 return [(self.DIFF_DELETE, text1)]
163
163
164 if len(text1) > len(text2):
164 if len(text1) > len(text2):
165 (longtext, shorttext) = (text1, text2)
165 (longtext, shorttext) = (text1, text2)
166 else:
166 else:
167 (shorttext, longtext) = (text1, text2)
167 (shorttext, longtext) = (text1, text2)
168 i = longtext.find(shorttext)
168 i = longtext.find(shorttext)
169 if i != -1:
169 if i != -1:
170 # Shorter text is inside the longer text (speedup).
170 # Shorter text is inside the longer text (speedup).
171 diffs = [(self.DIFF_INSERT, longtext[:i]), (self.DIFF_EQUAL, shorttext),
171 diffs = [(self.DIFF_INSERT, longtext[:i]), (self.DIFF_EQUAL, shorttext),
172 (self.DIFF_INSERT, longtext[i + len(shorttext):])]
172 (self.DIFF_INSERT, longtext[i + len(shorttext):])]
173 # Swap insertions for deletions if diff is reversed.
173 # Swap insertions for deletions if diff is reversed.
174 if len(text1) > len(text2):
174 if len(text1) > len(text2):
175 diffs[0] = (self.DIFF_DELETE, diffs[0][1])
175 diffs[0] = (self.DIFF_DELETE, diffs[0][1])
176 diffs[2] = (self.DIFF_DELETE, diffs[2][1])
176 diffs[2] = (self.DIFF_DELETE, diffs[2][1])
177 return diffs
177 return diffs
178
178
179 if len(shorttext) == 1:
179 if len(shorttext) == 1:
180 # Single character string.
180 # Single character string.
181 # After the previous speedup, the character can't be an equality.
181 # After the previous speedup, the character can't be an equality.
182 return [(self.DIFF_DELETE, text1), (self.DIFF_INSERT, text2)]
182 return [(self.DIFF_DELETE, text1), (self.DIFF_INSERT, text2)]
183
183
184 # Check to see if the problem can be split in two.
184 # Check to see if the problem can be split in two.
185 hm = self.diff_halfMatch(text1, text2)
185 hm = self.diff_halfMatch(text1, text2)
186 if hm:
186 if hm:
187 # A half-match was found, sort out the return data.
187 # A half-match was found, sort out the return data.
188 (text1_a, text1_b, text2_a, text2_b, mid_common) = hm
188 (text1_a, text1_b, text2_a, text2_b, mid_common) = hm
189 # Send both pairs off for separate processing.
189 # Send both pairs off for separate processing.
190 diffs_a = self.diff_main(text1_a, text2_a, checklines, deadline)
190 diffs_a = self.diff_main(text1_a, text2_a, checklines, deadline)
191 diffs_b = self.diff_main(text1_b, text2_b, checklines, deadline)
191 diffs_b = self.diff_main(text1_b, text2_b, checklines, deadline)
192 # Merge the results.
192 # Merge the results.
193 return diffs_a + [(self.DIFF_EQUAL, mid_common)] + diffs_b
193 return diffs_a + [(self.DIFF_EQUAL, mid_common)] + diffs_b
194
194
195 if checklines and len(text1) > 100 and len(text2) > 100:
195 if checklines and len(text1) > 100 and len(text2) > 100:
196 return self.diff_lineMode(text1, text2, deadline)
196 return self.diff_lineMode(text1, text2, deadline)
197
197
198 return self.diff_bisect(text1, text2, deadline)
198 return self.diff_bisect(text1, text2, deadline)
199
199
200 def diff_lineMode(self, text1, text2, deadline):
200 def diff_lineMode(self, text1, text2, deadline):
201 """Do a quick line-level diff on both strings, then rediff the parts for
201 """Do a quick line-level diff on both strings, then rediff the parts for
202 greater accuracy.
202 greater accuracy.
203 This speedup can produce non-minimal diffs.
203 This speedup can produce non-minimal diffs.
204
204
205 Args:
205 Args:
206 text1: Old string to be diffed.
206 text1: Old string to be diffed.
207 text2: New string to be diffed.
207 text2: New string to be diffed.
208 deadline: Time when the diff should be complete by.
208 deadline: Time when the diff should be complete by.
209
209
210 Returns:
210 Returns:
211 Array of changes.
211 Array of changes.
212 """
212 """
213
213
214 # Scan the text on a line-by-line basis first.
214 # Scan the text on a line-by-line basis first.
215 (text1, text2, linearray) = self.diff_linesToChars(text1, text2)
215 (text1, text2, linearray) = self.diff_linesToChars(text1, text2)
216
216
217 diffs = self.diff_main(text1, text2, False, deadline)
217 diffs = self.diff_main(text1, text2, False, deadline)
218
218
219 # Convert the diff back to original text.
219 # Convert the diff back to original text.
220 self.diff_charsToLines(diffs, linearray)
220 self.diff_charsToLines(diffs, linearray)
221 # Eliminate freak matches (e.g. blank lines)
221 # Eliminate freak matches (e.g. blank lines)
222 self.diff_cleanupSemantic(diffs)
222 self.diff_cleanupSemantic(diffs)
223
223
224 # Rediff any replacement blocks, this time character-by-character.
224 # Rediff any replacement blocks, this time character-by-character.
225 # Add a dummy entry at the end.
225 # Add a dummy entry at the end.
226 diffs.append((self.DIFF_EQUAL, ''))
226 diffs.append((self.DIFF_EQUAL, ''))
227 pointer = 0
227 pointer = 0
228 count_delete = 0
228 count_delete = 0
229 count_insert = 0
229 count_insert = 0
230 text_delete = ''
230 text_delete = ''
231 text_insert = ''
231 text_insert = ''
232 while pointer < len(diffs):
232 while pointer < len(diffs):
233 if diffs[pointer][0] == self.DIFF_INSERT:
233 if diffs[pointer][0] == self.DIFF_INSERT:
234 count_insert += 1
234 count_insert += 1
235 text_insert += diffs[pointer][1]
235 text_insert += diffs[pointer][1]
236 elif diffs[pointer][0] == self.DIFF_DELETE:
236 elif diffs[pointer][0] == self.DIFF_DELETE:
237 count_delete += 1
237 count_delete += 1
238 text_delete += diffs[pointer][1]
238 text_delete += diffs[pointer][1]
239 elif diffs[pointer][0] == self.DIFF_EQUAL:
239 elif diffs[pointer][0] == self.DIFF_EQUAL:
240 # Upon reaching an equality, check for prior redundancies.
240 # Upon reaching an equality, check for prior redundancies.
241 if count_delete >= 1 and count_insert >= 1:
241 if count_delete >= 1 and count_insert >= 1:
242 # Delete the offending records and add the merged ones.
242 # Delete the offending records and add the merged ones.
243 a = self.diff_main(text_delete, text_insert, False, deadline)
243 a = self.diff_main(text_delete, text_insert, False, deadline)
244 diffs[pointer - count_delete - count_insert : pointer] = a
244 diffs[pointer - count_delete - count_insert : pointer] = a
245 pointer = pointer - count_delete - count_insert + len(a)
245 pointer = pointer - count_delete - count_insert + len(a)
246 count_insert = 0
246 count_insert = 0
247 count_delete = 0
247 count_delete = 0
248 text_delete = ''
248 text_delete = ''
249 text_insert = ''
249 text_insert = ''
250
250
251 pointer += 1
251 pointer += 1
252
252
253 diffs.pop() # Remove the dummy entry at the end.
253 diffs.pop() # Remove the dummy entry at the end.
254
254
255 return diffs
255 return diffs
256
256
257 def diff_bisect(self, text1, text2, deadline):
257 def diff_bisect(self, text1, text2, deadline):
258 """Find the 'middle snake' of a diff, split the problem in two
258 """Find the 'middle snake' of a diff, split the problem in two
259 and return the recursively constructed diff.
259 and return the recursively constructed diff.
260 See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.
260 See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.
261
261
262 Args:
262 Args:
263 text1: Old string to be diffed.
263 text1: Old string to be diffed.
264 text2: New string to be diffed.
264 text2: New string to be diffed.
265 deadline: Time at which to bail if not yet complete.
265 deadline: Time at which to bail if not yet complete.
266
266
267 Returns:
267 Returns:
268 Array of diff tuples.
268 Array of diff tuples.
269 """
269 """
270
270
271 # Cache the text lengths to prevent multiple calls.
271 # Cache the text lengths to prevent multiple calls.
272 text1_length = len(text1)
272 text1_length = len(text1)
273 text2_length = len(text2)
273 text2_length = len(text2)
274 max_d = (text1_length + text2_length + 1) // 2
274 max_d = (text1_length + text2_length + 1) // 2
275 v_offset = max_d
275 v_offset = max_d
276 v_length = 2 * max_d
276 v_length = 2 * max_d
277 v1 = [-1] * v_length
277 v1 = [-1] * v_length
278 v1[v_offset + 1] = 0
278 v1[v_offset + 1] = 0
279 v2 = v1[:]
279 v2 = v1[:]
280 delta = text1_length - text2_length
280 delta = text1_length - text2_length
281 # If the total number of characters is odd, then the front path will
281 # If the total number of characters is odd, then the front path will
282 # collide with the reverse path.
282 # collide with the reverse path.
283 front = (delta % 2 != 0)
283 front = (delta % 2 != 0)
284 # Offsets for start and end of k loop.
284 # Offsets for start and end of k loop.
285 # Prevents mapping of space beyond the grid.
285 # Prevents mapping of space beyond the grid.
286 k1start = 0
286 k1start = 0
287 k1end = 0
287 k1end = 0
288 k2start = 0
288 k2start = 0
289 k2end = 0
289 k2end = 0
290 for d in range(max_d):
290 for d in range(max_d):
291 # Bail out if deadline is reached.
291 # Bail out if deadline is reached.
292 if time.time() > deadline:
292 if time.time() > deadline:
293 break
293 break
294
294
295 # Walk the front path one step.
295 # Walk the front path one step.
296 for k1 in range(-d + k1start, d + 1 - k1end, 2):
296 for k1 in range(-d + k1start, d + 1 - k1end, 2):
297 k1_offset = v_offset + k1
297 k1_offset = v_offset + k1
298 if k1 == -d or (k1 != d and
298 if k1 == -d or (k1 != d and
299 v1[k1_offset - 1] < v1[k1_offset + 1]):
299 v1[k1_offset - 1] < v1[k1_offset + 1]):
300 x1 = v1[k1_offset + 1]
300 x1 = v1[k1_offset + 1]
301 else:
301 else:
302 x1 = v1[k1_offset - 1] + 1
302 x1 = v1[k1_offset - 1] + 1
303 y1 = x1 - k1
303 y1 = x1 - k1
304 while (x1 < text1_length and y1 < text2_length and
304 while (x1 < text1_length and y1 < text2_length and
305 text1[x1] == text2[y1]):
305 text1[x1] == text2[y1]):
306 x1 += 1
306 x1 += 1
307 y1 += 1
307 y1 += 1
308 v1[k1_offset] = x1
308 v1[k1_offset] = x1
309 if x1 > text1_length:
309 if x1 > text1_length:
310 # Ran off the right of the graph.
310 # Ran off the right of the graph.
311 k1end += 2
311 k1end += 2
312 elif y1 > text2_length:
312 elif y1 > text2_length:
313 # Ran off the bottom of the graph.
313 # Ran off the bottom of the graph.
314 k1start += 2
314 k1start += 2
315 elif front:
315 elif front:
316 k2_offset = v_offset + delta - k1
316 k2_offset = v_offset + delta - k1
317 if k2_offset >= 0 and k2_offset < v_length and v2[k2_offset] != -1:
317 if k2_offset >= 0 and k2_offset < v_length and v2[k2_offset] != -1:
318 # Mirror x2 onto top-left coordinate system.
318 # Mirror x2 onto top-left coordinate system.
319 x2 = text1_length - v2[k2_offset]
319 x2 = text1_length - v2[k2_offset]
320 if x1 >= x2:
320 if x1 >= x2:
321 # Overlap detected.
321 # Overlap detected.
322 return self.diff_bisectSplit(text1, text2, x1, y1, deadline)
322 return self.diff_bisectSplit(text1, text2, x1, y1, deadline)
323
323
324 # Walk the reverse path one step.
324 # Walk the reverse path one step.
325 for k2 in range(-d + k2start, d + 1 - k2end, 2):
325 for k2 in range(-d + k2start, d + 1 - k2end, 2):
326 k2_offset = v_offset + k2
326 k2_offset = v_offset + k2
327 if k2 == -d or (k2 != d and
327 if k2 == -d or (k2 != d and
328 v2[k2_offset - 1] < v2[k2_offset + 1]):
328 v2[k2_offset - 1] < v2[k2_offset + 1]):
329 x2 = v2[k2_offset + 1]
329 x2 = v2[k2_offset + 1]
330 else:
330 else:
331 x2 = v2[k2_offset - 1] + 1
331 x2 = v2[k2_offset - 1] + 1
332 y2 = x2 - k2
332 y2 = x2 - k2
333 while (x2 < text1_length and y2 < text2_length and
333 while (x2 < text1_length and y2 < text2_length and
334 text1[-x2 - 1] == text2[-y2 - 1]):
334 text1[-x2 - 1] == text2[-y2 - 1]):
335 x2 += 1
335 x2 += 1
336 y2 += 1
336 y2 += 1
337 v2[k2_offset] = x2
337 v2[k2_offset] = x2
338 if x2 > text1_length:
338 if x2 > text1_length:
339 # Ran off the left of the graph.
339 # Ran off the left of the graph.
340 k2end += 2
340 k2end += 2
341 elif y2 > text2_length:
341 elif y2 > text2_length:
342 # Ran off the top of the graph.
342 # Ran off the top of the graph.
343 k2start += 2
343 k2start += 2
344 elif not front:
344 elif not front:
345 k1_offset = v_offset + delta - k2
345 k1_offset = v_offset + delta - k2
346 if k1_offset >= 0 and k1_offset < v_length and v1[k1_offset] != -1:
346 if k1_offset >= 0 and k1_offset < v_length and v1[k1_offset] != -1:
347 x1 = v1[k1_offset]
347 x1 = v1[k1_offset]
348 y1 = v_offset + x1 - k1_offset
348 y1 = v_offset + x1 - k1_offset
349 # Mirror x2 onto top-left coordinate system.
349 # Mirror x2 onto top-left coordinate system.
350 x2 = text1_length - x2
350 x2 = text1_length - x2
351 if x1 >= x2:
351 if x1 >= x2:
352 # Overlap detected.
352 # Overlap detected.
353 return self.diff_bisectSplit(text1, text2, x1, y1, deadline)
353 return self.diff_bisectSplit(text1, text2, x1, y1, deadline)
354
354
355 # Diff took too long and hit the deadline or
355 # Diff took too long and hit the deadline or
356 # number of diffs equals number of characters, no commonality at all.
356 # number of diffs equals number of characters, no commonality at all.
357 return [(self.DIFF_DELETE, text1), (self.DIFF_INSERT, text2)]
357 return [(self.DIFF_DELETE, text1), (self.DIFF_INSERT, text2)]
358
358
359 def diff_bisectSplit(self, text1, text2, x, y, deadline):
359 def diff_bisectSplit(self, text1, text2, x, y, deadline):
360 """Given the location of the 'middle snake', split the diff in two parts
360 """Given the location of the 'middle snake', split the diff in two parts
361 and recurse.
361 and recurse.
362
362
363 Args:
363 Args:
364 text1: Old string to be diffed.
364 text1: Old string to be diffed.
365 text2: New string to be diffed.
365 text2: New string to be diffed.
366 x: Index of split point in text1.
366 x: Index of split point in text1.
367 y: Index of split point in text2.
367 y: Index of split point in text2.
368 deadline: Time at which to bail if not yet complete.
368 deadline: Time at which to bail if not yet complete.
369
369
370 Returns:
370 Returns:
371 Array of diff tuples.
371 Array of diff tuples.
372 """
372 """
373 text1a = text1[:x]
373 text1a = text1[:x]
374 text2a = text2[:y]
374 text2a = text2[:y]
375 text1b = text1[x:]
375 text1b = text1[x:]
376 text2b = text2[y:]
376 text2b = text2[y:]
377
377
378 # Compute both diffs serially.
378 # Compute both diffs serially.
379 diffs = self.diff_main(text1a, text2a, False, deadline)
379 diffs = self.diff_main(text1a, text2a, False, deadline)
380 diffsb = self.diff_main(text1b, text2b, False, deadline)
380 diffsb = self.diff_main(text1b, text2b, False, deadline)
381
381
382 return diffs + diffsb
382 return diffs + diffsb
383
383
384 def diff_linesToChars(self, text1, text2):
384 def diff_linesToChars(self, text1, text2):
385 """Split two texts into an array of strings. Reduce the texts to a string
385 """Split two texts into an array of strings. Reduce the texts to a string
386 of hashes where each Unicode character represents one line.
386 of hashes where each Unicode character represents one line.
387
387
388 Args:
388 Args:
389 text1: First string.
389 text1: First string.
390 text2: Second string.
390 text2: Second string.
391
391
392 Returns:
392 Returns:
393 Three element tuple, containing the encoded text1, the encoded text2 and
393 Three element tuple, containing the encoded text1, the encoded text2 and
394 the array of unique strings. The zeroth element of the array of unique
394 the array of unique strings. The zeroth element of the array of unique
395 strings is intentionally blank.
395 strings is intentionally blank.
396 """
396 """
397 lineArray = [] # e.g. lineArray[4] == "Hello\n"
397 lineArray = [] # e.g. lineArray[4] == "Hello\n"
398 lineHash = {} # e.g. lineHash["Hello\n"] == 4
398 lineHash = {} # e.g. lineHash["Hello\n"] == 4
399
399
400 # "\x00" is a valid character, but various debuggers don't like it.
400 # "\x00" is a valid character, but various debuggers don't like it.
401 # So we'll insert a junk entry to avoid generating a null character.
401 # So we'll insert a junk entry to avoid generating a null character.
402 lineArray.append('')
402 lineArray.append('')
403
403
404 def diff_linesToCharsMunge(text):
404 def diff_linesToCharsMunge(text):
405 """Split a text into an array of strings. Reduce the texts to a string
405 """Split a text into an array of strings. Reduce the texts to a string
406 of hashes where each Unicode character represents one line.
406 of hashes where each Unicode character represents one line.
407 Modifies linearray and linehash through being a closure.
407 Modifies linearray and linehash through being a closure.
408
408
409 Args:
409 Args:
410 text: String to encode.
410 text: String to encode.
411
411
412 Returns:
412 Returns:
413 Encoded string.
413 Encoded string.
414 """
414 """
415 chars = []
415 chars = []
416 # Walk the text, pulling out a substring for each line.
416 # Walk the text, pulling out a substring for each line.
417 # text.split('\n') would would temporarily double our memory footprint.
417 # text.split('\n') would would temporarily double our memory footprint.
418 # Modifying text would create many large strings to garbage collect.
418 # Modifying text would create many large strings to garbage collect.
419 lineStart = 0
419 lineStart = 0
420 lineEnd = -1
420 lineEnd = -1
421 while lineEnd < len(text) - 1:
421 while lineEnd < len(text) - 1:
422 lineEnd = text.find('\n', lineStart)
422 lineEnd = text.find('\n', lineStart)
423 if lineEnd == -1:
423 if lineEnd == -1:
424 lineEnd = len(text) - 1
424 lineEnd = len(text) - 1
425 line = text[lineStart:lineEnd + 1]
425 line = text[lineStart:lineEnd + 1]
426 lineStart = lineEnd + 1
426 lineStart = lineEnd + 1
427
427
428 if line in lineHash:
428 if line in lineHash:
429 chars.append(unichr(lineHash[line]))
429 chars.append(unichr(lineHash[line]))
430 else:
430 else:
431 lineArray.append(line)
431 lineArray.append(line)
432 lineHash[line] = len(lineArray) - 1
432 lineHash[line] = len(lineArray) - 1
433 chars.append(unichr(len(lineArray) - 1))
433 chars.append(unichr(len(lineArray) - 1))
434 return "".join(chars)
434 return "".join(chars)
435
435
436 chars1 = diff_linesToCharsMunge(text1)
436 chars1 = diff_linesToCharsMunge(text1)
437 chars2 = diff_linesToCharsMunge(text2)
437 chars2 = diff_linesToCharsMunge(text2)
438 return (chars1, chars2, lineArray)
438 return (chars1, chars2, lineArray)
439
439
440 def diff_charsToLines(self, diffs, lineArray):
440 def diff_charsToLines(self, diffs, lineArray):
441 """Rehydrate the text in a diff from a string of line hashes to real lines
441 """Rehydrate the text in a diff from a string of line hashes to real lines
442 of text.
442 of text.
443
443
444 Args:
444 Args:
445 diffs: Array of diff tuples.
445 diffs: Array of diff tuples.
446 lineArray: Array of unique strings.
446 lineArray: Array of unique strings.
447 """
447 """
448 for x in range(len(diffs)):
448 for x in range(len(diffs)):
449 text = []
449 text = []
450 for char in diffs[x][1]:
450 for char in diffs[x][1]:
451 text.append(lineArray[ord(char)])
451 text.append(lineArray[ord(char)])
452 diffs[x] = (diffs[x][0], "".join(text))
452 diffs[x] = (diffs[x][0], "".join(text))
453
453
454 def diff_commonPrefix(self, text1, text2):
454 def diff_commonPrefix(self, text1, text2):
455 """Determine the common prefix of two strings.
455 """Determine the common prefix of two strings.
456
456
457 Args:
457 Args:
458 text1: First string.
458 text1: First string.
459 text2: Second string.
459 text2: Second string.
460
460
461 Returns:
461 Returns:
462 The number of characters common to the start of each string.
462 The number of characters common to the start of each string.
463 """
463 """
464 # Quick check for common null cases.
464 # Quick check for common null cases.
465 if not text1 or not text2 or text1[0] != text2[0]:
465 if not text1 or not text2 or text1[0] != text2[0]:
466 return 0
466 return 0
467 # Binary search.
467 # Binary search.
468 # Performance analysis: http://neil.fraser.name/news/2007/10/09/
468 # Performance analysis: http://neil.fraser.name/news/2007/10/09/
469 pointermin = 0
469 pointermin = 0
470 pointermax = min(len(text1), len(text2))
470 pointermax = min(len(text1), len(text2))
471 pointermid = pointermax
471 pointermid = pointermax
472 pointerstart = 0
472 pointerstart = 0
473 while pointermin < pointermid:
473 while pointermin < pointermid:
474 if text1[pointerstart:pointermid] == text2[pointerstart:pointermid]:
474 if text1[pointerstart:pointermid] == text2[pointerstart:pointermid]:
475 pointermin = pointermid
475 pointermin = pointermid
476 pointerstart = pointermin
476 pointerstart = pointermin
477 else:
477 else:
478 pointermax = pointermid
478 pointermax = pointermid
479 pointermid = (pointermax - pointermin) // 2 + pointermin
479 pointermid = (pointermax - pointermin) // 2 + pointermin
480 return pointermid
480 return pointermid
481
481
482 def diff_commonSuffix(self, text1, text2):
482 def diff_commonSuffix(self, text1, text2):
483 """Determine the common suffix of two strings.
483 """Determine the common suffix of two strings.
484
484
485 Args:
485 Args:
486 text1: First string.
486 text1: First string.
487 text2: Second string.
487 text2: Second string.
488
488
489 Returns:
489 Returns:
490 The number of characters common to the end of each string.
490 The number of characters common to the end of each string.
491 """
491 """
492 # Quick check for common null cases.
492 # Quick check for common null cases.
493 if not text1 or not text2 or text1[-1] != text2[-1]:
493 if not text1 or not text2 or text1[-1] != text2[-1]:
494 return 0
494 return 0
495 # Binary search.
495 # Binary search.
496 # Performance analysis: http://neil.fraser.name/news/2007/10/09/
496 # Performance analysis: http://neil.fraser.name/news/2007/10/09/
497 pointermin = 0
497 pointermin = 0
498 pointermax = min(len(text1), len(text2))
498 pointermax = min(len(text1), len(text2))
499 pointermid = pointermax
499 pointermid = pointermax
500 pointerend = 0
500 pointerend = 0
501 while pointermin < pointermid:
501 while pointermin < pointermid:
502 if (text1[-pointermid:len(text1) - pointerend] ==
502 if (text1[-pointermid:len(text1) - pointerend] ==
503 text2[-pointermid:len(text2) - pointerend]):
503 text2[-pointermid:len(text2) - pointerend]):
504 pointermin = pointermid
504 pointermin = pointermid
505 pointerend = pointermin
505 pointerend = pointermin
506 else:
506 else:
507 pointermax = pointermid
507 pointermax = pointermid
508 pointermid = (pointermax - pointermin) // 2 + pointermin
508 pointermid = (pointermax - pointermin) // 2 + pointermin
509 return pointermid
509 return pointermid
510
510
511 def diff_commonOverlap(self, text1, text2):
511 def diff_commonOverlap(self, text1, text2):
512 """Determine if the suffix of one string is the prefix of another.
512 """Determine if the suffix of one string is the prefix of another.
513
513
514 Args:
514 Args:
515 text1 First string.
515 text1 First string.
516 text2 Second string.
516 text2 Second string.
517
517
518 Returns:
518 Returns:
519 The number of characters common to the end of the first
519 The number of characters common to the end of the first
520 string and the start of the second string.
520 string and the start of the second string.
521 """
521 """
522 # Cache the text lengths to prevent multiple calls.
522 # Cache the text lengths to prevent multiple calls.
523 text1_length = len(text1)
523 text1_length = len(text1)
524 text2_length = len(text2)
524 text2_length = len(text2)
525 # Eliminate the null case.
525 # Eliminate the null case.
526 if text1_length == 0 or text2_length == 0:
526 if text1_length == 0 or text2_length == 0:
527 return 0
527 return 0
528 # Truncate the longer string.
528 # Truncate the longer string.
529 if text1_length > text2_length:
529 if text1_length > text2_length:
530 text1 = text1[-text2_length:]
530 text1 = text1[-text2_length:]
531 elif text1_length < text2_length:
531 elif text1_length < text2_length:
532 text2 = text2[:text1_length]
532 text2 = text2[:text1_length]
533 text_length = min(text1_length, text2_length)
533 text_length = min(text1_length, text2_length)
534 # Quick check for the worst case.
534 # Quick check for the worst case.
535 if text1 == text2:
535 if text1 == text2:
536 return text_length
536 return text_length
537
537
538 # Start by looking for a single character match
538 # Start by looking for a single character match
539 # and increase length until no match is found.
539 # and increase length until no match is found.
540 # Performance analysis: http://neil.fraser.name/news/2010/11/04/
540 # Performance analysis: http://neil.fraser.name/news/2010/11/04/
541 best = 0
541 best = 0
542 length = 1
542 length = 1
543 while True:
543 while True:
544 pattern = text1[-length:]
544 pattern = text1[-length:]
545 found = text2.find(pattern)
545 found = text2.find(pattern)
546 if found == -1:
546 if found == -1:
547 return best
547 return best
548 length += found
548 length += found
549 if found == 0 or text1[-length:] == text2[:length]:
549 if found == 0 or text1[-length:] == text2[:length]:
550 best = length
550 best = length
551 length += 1
551 length += 1
552
552
553 def diff_halfMatch(self, text1, text2):
553 def diff_halfMatch(self, text1, text2):
554 """Do the two texts share a substring which is at least half the length of
554 """Do the two texts share a substring which is at least half the length of
555 the longer text?
555 the longer text?
556 This speedup can produce non-minimal diffs.
556 This speedup can produce non-minimal diffs.
557
557
558 Args:
558 Args:
559 text1: First string.
559 text1: First string.
560 text2: Second string.
560 text2: Second string.
561
561
562 Returns:
562 Returns:
563 Five element Array, containing the prefix of text1, the suffix of text1,
563 Five element Array, containing the prefix of text1, the suffix of text1,
564 the prefix of text2, the suffix of text2 and the common middle. Or None
564 the prefix of text2, the suffix of text2 and the common middle. Or None
565 if there was no match.
565 if there was no match.
566 """
566 """
567 if self.Diff_Timeout <= 0:
567 if self.Diff_Timeout <= 0:
568 # Don't risk returning a non-optimal diff if we have unlimited time.
568 # Don't risk returning a non-optimal diff if we have unlimited time.
569 return None
569 return None
570 if len(text1) > len(text2):
570 if len(text1) > len(text2):
571 (longtext, shorttext) = (text1, text2)
571 (longtext, shorttext) = (text1, text2)
572 else:
572 else:
573 (shorttext, longtext) = (text1, text2)
573 (shorttext, longtext) = (text1, text2)
574 if len(longtext) < 4 or len(shorttext) * 2 < len(longtext):
574 if len(longtext) < 4 or len(shorttext) * 2 < len(longtext):
575 return None # Pointless.
575 return None # Pointless.
576
576
577 def diff_halfMatchI(longtext, shorttext, i):
577 def diff_halfMatchI(longtext, shorttext, i):
578 """Does a substring of shorttext exist within longtext such that the
578 """Does a substring of shorttext exist within longtext such that the
579 substring is at least half the length of longtext?
579 substring is at least half the length of longtext?
580 Closure, but does not reference any external variables.
580 Closure, but does not reference any external variables.
581
581
582 Args:
582 Args:
583 longtext: Longer string.
583 longtext: Longer string.
584 shorttext: Shorter string.
584 shorttext: Shorter string.
585 i: Start index of quarter length substring within longtext.
585 i: Start index of quarter length substring within longtext.
586
586
587 Returns:
587 Returns:
588 Five element Array, containing the prefix of longtext, the suffix of
588 Five element Array, containing the prefix of longtext, the suffix of
589 longtext, the prefix of shorttext, the suffix of shorttext and the
589 longtext, the prefix of shorttext, the suffix of shorttext and the
590 common middle. Or None if there was no match.
590 common middle. Or None if there was no match.
591 """
591 """
592 seed = longtext[i:i + len(longtext) // 4]
592 seed = longtext[i:i + len(longtext) // 4]
593 best_common = ''
593 best_common = ''
594 j = shorttext.find(seed)
594 j = shorttext.find(seed)
595 while j != -1:
595 while j != -1:
596 prefixLength = self.diff_commonPrefix(longtext[i:], shorttext[j:])
596 prefixLength = self.diff_commonPrefix(longtext[i:], shorttext[j:])
597 suffixLength = self.diff_commonSuffix(longtext[:i], shorttext[:j])
597 suffixLength = self.diff_commonSuffix(longtext[:i], shorttext[:j])
598 if len(best_common) < suffixLength + prefixLength:
598 if len(best_common) < suffixLength + prefixLength:
599 best_common = (shorttext[j - suffixLength:j] +
599 best_common = (shorttext[j - suffixLength:j] +
600 shorttext[j:j + prefixLength])
600 shorttext[j:j + prefixLength])
601 best_longtext_a = longtext[:i - suffixLength]
601 best_longtext_a = longtext[:i - suffixLength]
602 best_longtext_b = longtext[i + prefixLength:]
602 best_longtext_b = longtext[i + prefixLength:]
603 best_shorttext_a = shorttext[:j - suffixLength]
603 best_shorttext_a = shorttext[:j - suffixLength]
604 best_shorttext_b = shorttext[j + prefixLength:]
604 best_shorttext_b = shorttext[j + prefixLength:]
605 j = shorttext.find(seed, j + 1)
605 j = shorttext.find(seed, j + 1)
606
606
607 if len(best_common) * 2 >= len(longtext):
607 if len(best_common) * 2 >= len(longtext):
608 return (best_longtext_a, best_longtext_b,
608 return (best_longtext_a, best_longtext_b,
609 best_shorttext_a, best_shorttext_b, best_common)
609 best_shorttext_a, best_shorttext_b, best_common)
610 else:
610 else:
611 return None
611 return None
612
612
613 # First check if the second quarter is the seed for a half-match.
613 # First check if the second quarter is the seed for a half-match.
614 hm1 = diff_halfMatchI(longtext, shorttext, (len(longtext) + 3) // 4)
614 hm1 = diff_halfMatchI(longtext, shorttext, (len(longtext) + 3) // 4)
615 # Check again based on the third quarter.
615 # Check again based on the third quarter.
616 hm2 = diff_halfMatchI(longtext, shorttext, (len(longtext) + 1) // 2)
616 hm2 = diff_halfMatchI(longtext, shorttext, (len(longtext) + 1) // 2)
617 if not hm1 and not hm2:
617 if not hm1 and not hm2:
618 return None
618 return None
619 elif not hm2:
619 elif not hm2:
620 hm = hm1
620 hm = hm1
621 elif not hm1:
621 elif not hm1:
622 hm = hm2
622 hm = hm2
623 else:
623 else:
624 # Both matched. Select the longest.
624 # Both matched. Select the longest.
625 if len(hm1[4]) > len(hm2[4]):
625 if len(hm1[4]) > len(hm2[4]):
626 hm = hm1
626 hm = hm1
627 else:
627 else:
628 hm = hm2
628 hm = hm2
629
629
630 # A half-match was found, sort out the return data.
630 # A half-match was found, sort out the return data.
631 if len(text1) > len(text2):
631 if len(text1) > len(text2):
632 (text1_a, text1_b, text2_a, text2_b, mid_common) = hm
632 (text1_a, text1_b, text2_a, text2_b, mid_common) = hm
633 else:
633 else:
634 (text2_a, text2_b, text1_a, text1_b, mid_common) = hm
634 (text2_a, text2_b, text1_a, text1_b, mid_common) = hm
635 return (text1_a, text1_b, text2_a, text2_b, mid_common)
635 return (text1_a, text1_b, text2_a, text2_b, mid_common)
636
636
637 def diff_cleanupSemantic(self, diffs):
637 def diff_cleanupSemantic(self, diffs):
638 """Reduce the number of edits by eliminating semantically trivial
638 """Reduce the number of edits by eliminating semantically trivial
639 equalities.
639 equalities.
640
640
641 Args:
641 Args:
642 diffs: Array of diff tuples.
642 diffs: Array of diff tuples.
643 """
643 """
644 changes = False
644 changes = False
645 equalities = [] # Stack of indices where equalities are found.
645 equalities = [] # Stack of indices where equalities are found.
646 lastequality = None # Always equal to diffs[equalities[-1]][1]
646 lastequality = None # Always equal to diffs[equalities[-1]][1]
647 pointer = 0 # Index of current position.
647 pointer = 0 # Index of current position.
648 # Number of chars that changed prior to the equality.
648 # Number of chars that changed prior to the equality.
649 length_insertions1, length_deletions1 = 0, 0
649 length_insertions1, length_deletions1 = 0, 0
650 # Number of chars that changed after the equality.
650 # Number of chars that changed after the equality.
651 length_insertions2, length_deletions2 = 0, 0
651 length_insertions2, length_deletions2 = 0, 0
652 while pointer < len(diffs):
652 while pointer < len(diffs):
653 if diffs[pointer][0] == self.DIFF_EQUAL: # Equality found.
653 if diffs[pointer][0] == self.DIFF_EQUAL: # Equality found.
654 equalities.append(pointer)
654 equalities.append(pointer)
655 length_insertions1, length_insertions2 = length_insertions2, 0
655 length_insertions1, length_insertions2 = length_insertions2, 0
656 length_deletions1, length_deletions2 = length_deletions2, 0
656 length_deletions1, length_deletions2 = length_deletions2, 0
657 lastequality = diffs[pointer][1]
657 lastequality = diffs[pointer][1]
658 else: # An insertion or deletion.
658 else: # An insertion or deletion.
659 if diffs[pointer][0] == self.DIFF_INSERT:
659 if diffs[pointer][0] == self.DIFF_INSERT:
660 length_insertions2 += len(diffs[pointer][1])
660 length_insertions2 += len(diffs[pointer][1])
661 else:
661 else:
662 length_deletions2 += len(diffs[pointer][1])
662 length_deletions2 += len(diffs[pointer][1])
663 # Eliminate an equality that is smaller or equal to the edits on both
663 # Eliminate an equality that is smaller or equal to the edits on both
664 # sides of it.
664 # sides of it.
665 if (lastequality and (len(lastequality) <=
665 if (lastequality and (len(lastequality) <=
666 max(length_insertions1, length_deletions1)) and
666 max(length_insertions1, length_deletions1)) and
667 (len(lastequality) <= max(length_insertions2, length_deletions2))):
667 (len(lastequality) <= max(length_insertions2, length_deletions2))):
668 # Duplicate record.
668 # Duplicate record.
669 diffs.insert(equalities[-1], (self.DIFF_DELETE, lastequality))
669 diffs.insert(equalities[-1], (self.DIFF_DELETE, lastequality))
670 # Change second copy to insert.
670 # Change second copy to insert.
671 diffs[equalities[-1] + 1] = (self.DIFF_INSERT,
671 diffs[equalities[-1] + 1] = (self.DIFF_INSERT,
672 diffs[equalities[-1] + 1][1])
672 diffs[equalities[-1] + 1][1])
673 # Throw away the equality we just deleted.
673 # Throw away the equality we just deleted.
674 equalities.pop()
674 equalities.pop()
675 # Throw away the previous equality (it needs to be reevaluated).
675 # Throw away the previous equality (it needs to be reevaluated).
676 if len(equalities):
676 if len(equalities):
677 equalities.pop()
677 equalities.pop()
678 if len(equalities):
678 if len(equalities):
679 pointer = equalities[-1]
679 pointer = equalities[-1]
680 else:
680 else:
681 pointer = -1
681 pointer = -1
682 # Reset the counters.
682 # Reset the counters.
683 length_insertions1, length_deletions1 = 0, 0
683 length_insertions1, length_deletions1 = 0, 0
684 length_insertions2, length_deletions2 = 0, 0
684 length_insertions2, length_deletions2 = 0, 0
685 lastequality = None
685 lastequality = None
686 changes = True
686 changes = True
687 pointer += 1
687 pointer += 1
688
688
689 # Normalize the diff.
689 # Normalize the diff.
690 if changes:
690 if changes:
691 self.diff_cleanupMerge(diffs)
691 self.diff_cleanupMerge(diffs)
692 self.diff_cleanupSemanticLossless(diffs)
692 self.diff_cleanupSemanticLossless(diffs)
693
693
694 # Find any overlaps between deletions and insertions.
694 # Find any overlaps between deletions and insertions.
695 # e.g: <del>abcxxx</del><ins>xxxdef</ins>
695 # e.g: <del>abcxxx</del><ins>xxxdef</ins>
696 # -> <del>abc</del>xxx<ins>def</ins>
696 # -> <del>abc</del>xxx<ins>def</ins>
697 # e.g: <del>xxxabc</del><ins>defxxx</ins>
697 # e.g: <del>xxxabc</del><ins>defxxx</ins>
698 # -> <ins>def</ins>xxx<del>abc</del>
698 # -> <ins>def</ins>xxx<del>abc</del>
699 # Only extract an overlap if it is as big as the edit ahead or behind it.
699 # Only extract an overlap if it is as big as the edit ahead or behind it.
700 pointer = 1
700 pointer = 1
701 while pointer < len(diffs):
701 while pointer < len(diffs):
702 if (diffs[pointer - 1][0] == self.DIFF_DELETE and
702 if (diffs[pointer - 1][0] == self.DIFF_DELETE and
703 diffs[pointer][0] == self.DIFF_INSERT):
703 diffs[pointer][0] == self.DIFF_INSERT):
704 deletion = diffs[pointer - 1][1]
704 deletion = diffs[pointer - 1][1]
705 insertion = diffs[pointer][1]
705 insertion = diffs[pointer][1]
706 overlap_length1 = self.diff_commonOverlap(deletion, insertion)
706 overlap_length1 = self.diff_commonOverlap(deletion, insertion)
707 overlap_length2 = self.diff_commonOverlap(insertion, deletion)
707 overlap_length2 = self.diff_commonOverlap(insertion, deletion)
708 if overlap_length1 >= overlap_length2:
708 if overlap_length1 >= overlap_length2:
709 if (overlap_length1 >= len(deletion) / 2.0 or
709 if (overlap_length1 >= len(deletion) / 2.0 or
710 overlap_length1 >= len(insertion) / 2.0):
710 overlap_length1 >= len(insertion) / 2.0):
711 # Overlap found. Insert an equality and trim the surrounding edits.
711 # Overlap found. Insert an equality and trim the surrounding edits.
712 diffs.insert(pointer, (self.DIFF_EQUAL,
712 diffs.insert(pointer, (self.DIFF_EQUAL,
713 insertion[:overlap_length1]))
713 insertion[:overlap_length1]))
714 diffs[pointer - 1] = (self.DIFF_DELETE,
714 diffs[pointer - 1] = (self.DIFF_DELETE,
715 deletion[:len(deletion) - overlap_length1])
715 deletion[:len(deletion) - overlap_length1])
716 diffs[pointer + 1] = (self.DIFF_INSERT,
716 diffs[pointer + 1] = (self.DIFF_INSERT,
717 insertion[overlap_length1:])
717 insertion[overlap_length1:])
718 pointer += 1
718 pointer += 1
719 else:
719 else:
720 if (overlap_length2 >= len(deletion) / 2.0 or
720 if (overlap_length2 >= len(deletion) / 2.0 or
721 overlap_length2 >= len(insertion) / 2.0):
721 overlap_length2 >= len(insertion) / 2.0):
722 # Reverse overlap found.
722 # Reverse overlap found.
723 # Insert an equality and swap and trim the surrounding edits.
723 # Insert an equality and swap and trim the surrounding edits.
724 diffs.insert(pointer, (self.DIFF_EQUAL, deletion[:overlap_length2]))
724 diffs.insert(pointer, (self.DIFF_EQUAL, deletion[:overlap_length2]))
725 diffs[pointer - 1] = (self.DIFF_INSERT,
725 diffs[pointer - 1] = (self.DIFF_INSERT,
726 insertion[:len(insertion) - overlap_length2])
726 insertion[:len(insertion) - overlap_length2])
727 diffs[pointer + 1] = (self.DIFF_DELETE, deletion[overlap_length2:])
727 diffs[pointer + 1] = (self.DIFF_DELETE, deletion[overlap_length2:])
728 pointer += 1
728 pointer += 1
729 pointer += 1
729 pointer += 1
730 pointer += 1
730 pointer += 1
731
731
732 def diff_cleanupSemanticLossless(self, diffs):
732 def diff_cleanupSemanticLossless(self, diffs):
733 """Look for single edits surrounded on both sides by equalities
733 """Look for single edits surrounded on both sides by equalities
734 which can be shifted sideways to align the edit to a word boundary.
734 which can be shifted sideways to align the edit to a word boundary.
735 e.g: The c<ins>at c</ins>ame. -> The <ins>cat </ins>came.
735 e.g: The c<ins>at c</ins>ame. -> The <ins>cat </ins>came.
736
736
737 Args:
737 Args:
738 diffs: Array of diff tuples.
738 diffs: Array of diff tuples.
739 """
739 """
740
740
741 def diff_cleanupSemanticScore(one, two):
741 def diff_cleanupSemanticScore(one, two):
742 """Given two strings, compute a score representing whether the
742 """Given two strings, compute a score representing whether the
743 internal boundary falls on logical boundaries.
743 internal boundary falls on logical boundaries.
744 Scores range from 6 (best) to 0 (worst).
744 Scores range from 6 (best) to 0 (worst).
745 Closure, but does not reference any external variables.
745 Closure, but does not reference any external variables.
746
746
747 Args:
747 Args:
748 one: First string.
748 one: First string.
749 two: Second string.
749 two: Second string.
750
750
751 Returns:
751 Returns:
752 The score.
752 The score.
753 """
753 """
754 if not one or not two:
754 if not one or not two:
755 # Edges are the best.
755 # Edges are the best.
756 return 6
756 return 6
757
757
758 # Each port of this function behaves slightly differently due to
758 # Each port of this function behaves slightly differently due to
759 # subtle differences in each language's definition of things like
759 # subtle differences in each language's definition of things like
760 # 'whitespace'. Since this function's purpose is largely cosmetic,
760 # 'whitespace'. Since this function's purpose is largely cosmetic,
761 # the choice has been made to use each language's native features
761 # the choice has been made to use each language's native features
762 # rather than force total conformity.
762 # rather than force total conformity.
763 char1 = one[-1]
763 char1 = one[-1]
764 char2 = two[0]
764 char2 = two[0]
765 nonAlphaNumeric1 = not char1.isalnum()
765 nonAlphaNumeric1 = not char1.isalnum()
766 nonAlphaNumeric2 = not char2.isalnum()
766 nonAlphaNumeric2 = not char2.isalnum()
767 whitespace1 = nonAlphaNumeric1 and char1.isspace()
767 whitespace1 = nonAlphaNumeric1 and char1.isspace()
768 whitespace2 = nonAlphaNumeric2 and char2.isspace()
768 whitespace2 = nonAlphaNumeric2 and char2.isspace()
769 lineBreak1 = whitespace1 and (char1 == "\r" or char1 == "\n")
769 lineBreak1 = whitespace1 and (char1 == "\r" or char1 == "\n")
770 lineBreak2 = whitespace2 and (char2 == "\r" or char2 == "\n")
770 lineBreak2 = whitespace2 and (char2 == "\r" or char2 == "\n")
771 blankLine1 = lineBreak1 and self.BLANKLINEEND.search(one)
771 blankLine1 = lineBreak1 and self.BLANKLINEEND.search(one)
772 blankLine2 = lineBreak2 and self.BLANKLINESTART.match(two)
772 blankLine2 = lineBreak2 and self.BLANKLINESTART.match(two)
773
773
774 if blankLine1 or blankLine2:
774 if blankLine1 or blankLine2:
775 # Five points for blank lines.
775 # Five points for blank lines.
776 return 5
776 return 5
777 elif lineBreak1 or lineBreak2:
777 elif lineBreak1 or lineBreak2:
778 # Four points for line breaks.
778 # Four points for line breaks.
779 return 4
779 return 4
780 elif nonAlphaNumeric1 and not whitespace1 and whitespace2:
780 elif nonAlphaNumeric1 and not whitespace1 and whitespace2:
781 # Three points for end of sentences.
781 # Three points for end of sentences.
782 return 3
782 return 3
783 elif whitespace1 or whitespace2:
783 elif whitespace1 or whitespace2:
784 # Two points for whitespace.
784 # Two points for whitespace.
785 return 2
785 return 2
786 elif nonAlphaNumeric1 or nonAlphaNumeric2:
786 elif nonAlphaNumeric1 or nonAlphaNumeric2:
787 # One point for non-alphanumeric.
787 # One point for non-alphanumeric.
788 return 1
788 return 1
789 return 0
789 return 0
790
790
791 pointer = 1
791 pointer = 1
792 # Intentionally ignore the first and last element (don't need checking).
792 # Intentionally ignore the first and last element (don't need checking).
793 while pointer < len(diffs) - 1:
793 while pointer < len(diffs) - 1:
794 if (diffs[pointer - 1][0] == self.DIFF_EQUAL and
794 if (diffs[pointer - 1][0] == self.DIFF_EQUAL and
795 diffs[pointer + 1][0] == self.DIFF_EQUAL):
795 diffs[pointer + 1][0] == self.DIFF_EQUAL):
796 # This is a single edit surrounded by equalities.
796 # This is a single edit surrounded by equalities.
797 equality1 = diffs[pointer - 1][1]
797 equality1 = diffs[pointer - 1][1]
798 edit = diffs[pointer][1]
798 edit = diffs[pointer][1]
799 equality2 = diffs[pointer + 1][1]
799 equality2 = diffs[pointer + 1][1]
800
800
801 # First, shift the edit as far left as possible.
801 # First, shift the edit as far left as possible.
802 commonOffset = self.diff_commonSuffix(equality1, edit)
802 commonOffset = self.diff_commonSuffix(equality1, edit)
803 if commonOffset:
803 if commonOffset:
804 commonString = edit[-commonOffset:]
804 commonString = edit[-commonOffset:]
805 equality1 = equality1[:-commonOffset]
805 equality1 = equality1[:-commonOffset]
806 edit = commonString + edit[:-commonOffset]
806 edit = commonString + edit[:-commonOffset]
807 equality2 = commonString + equality2
807 equality2 = commonString + equality2
808
808
809 # Second, step character by character right, looking for the best fit.
809 # Second, step character by character right, looking for the best fit.
810 bestEquality1 = equality1
810 bestEquality1 = equality1
811 bestEdit = edit
811 bestEdit = edit
812 bestEquality2 = equality2
812 bestEquality2 = equality2
813 bestScore = (diff_cleanupSemanticScore(equality1, edit) +
813 bestScore = (diff_cleanupSemanticScore(equality1, edit) +
814 diff_cleanupSemanticScore(edit, equality2))
814 diff_cleanupSemanticScore(edit, equality2))
815 while edit and equality2 and edit[0] == equality2[0]:
815 while edit and equality2 and edit[0] == equality2[0]:
816 equality1 += edit[0]
816 equality1 += edit[0]
817 edit = edit[1:] + equality2[0]
817 edit = edit[1:] + equality2[0]
818 equality2 = equality2[1:]
818 equality2 = equality2[1:]
819 score = (diff_cleanupSemanticScore(equality1, edit) +
819 score = (diff_cleanupSemanticScore(equality1, edit) +
820 diff_cleanupSemanticScore(edit, equality2))
820 diff_cleanupSemanticScore(edit, equality2))
821 # The >= encourages trailing rather than leading whitespace on edits.
821 # The >= encourages trailing rather than leading whitespace on edits.
822 if score >= bestScore:
822 if score >= bestScore:
823 bestScore = score
823 bestScore = score
824 bestEquality1 = equality1
824 bestEquality1 = equality1
825 bestEdit = edit
825 bestEdit = edit
826 bestEquality2 = equality2
826 bestEquality2 = equality2
827
827
828 if diffs[pointer - 1][1] != bestEquality1:
828 if diffs[pointer - 1][1] != bestEquality1:
829 # We have an improvement, save it back to the diff.
829 # We have an improvement, save it back to the diff.
830 if bestEquality1:
830 if bestEquality1:
831 diffs[pointer - 1] = (diffs[pointer - 1][0], bestEquality1)
831 diffs[pointer - 1] = (diffs[pointer - 1][0], bestEquality1)
832 else:
832 else:
833 del diffs[pointer - 1]
833 del diffs[pointer - 1]
834 pointer -= 1
834 pointer -= 1
835 diffs[pointer] = (diffs[pointer][0], bestEdit)
835 diffs[pointer] = (diffs[pointer][0], bestEdit)
836 if bestEquality2:
836 if bestEquality2:
837 diffs[pointer + 1] = (diffs[pointer + 1][0], bestEquality2)
837 diffs[pointer + 1] = (diffs[pointer + 1][0], bestEquality2)
838 else:
838 else:
839 del diffs[pointer + 1]
839 del diffs[pointer + 1]
840 pointer -= 1
840 pointer -= 1
841 pointer += 1
841 pointer += 1
842
842
843 # Define some regex patterns for matching boundaries.
843 # Define some regex patterns for matching boundaries.
844 BLANKLINEEND = re.compile(r"\n\r?\n$");
844 BLANKLINEEND = re.compile(r"\n\r?\n$");
845 BLANKLINESTART = re.compile(r"^\r?\n\r?\n");
845 BLANKLINESTART = re.compile(r"^\r?\n\r?\n");
846
846
847 def diff_cleanupEfficiency(self, diffs):
847 def diff_cleanupEfficiency(self, diffs):
848 """Reduce the number of edits by eliminating operationally trivial
848 """Reduce the number of edits by eliminating operationally trivial
849 equalities.
849 equalities.
850
850
851 Args:
851 Args:
852 diffs: Array of diff tuples.
852 diffs: Array of diff tuples.
853 """
853 """
854 changes = False
854 changes = False
855 equalities = [] # Stack of indices where equalities are found.
855 equalities = [] # Stack of indices where equalities are found.
856 lastequality = None # Always equal to diffs[equalities[-1]][1]
856 lastequality = None # Always equal to diffs[equalities[-1]][1]
857 pointer = 0 # Index of current position.
857 pointer = 0 # Index of current position.
858 pre_ins = False # Is there an insertion operation before the last equality.
858 pre_ins = False # Is there an insertion operation before the last equality.
859 pre_del = False # Is there a deletion operation before the last equality.
859 pre_del = False # Is there a deletion operation before the last equality.
860 post_ins = False # Is there an insertion operation after the last equality.
860 post_ins = False # Is there an insertion operation after the last equality.
861 post_del = False # Is there a deletion operation after the last equality.
861 post_del = False # Is there a deletion operation after the last equality.
862 while pointer < len(diffs):
862 while pointer < len(diffs):
863 if diffs[pointer][0] == self.DIFF_EQUAL: # Equality found.
863 if diffs[pointer][0] == self.DIFF_EQUAL: # Equality found.
864 if (len(diffs[pointer][1]) < self.Diff_EditCost and
864 if (len(diffs[pointer][1]) < self.Diff_EditCost and
865 (post_ins or post_del)):
865 (post_ins or post_del)):
866 # Candidate found.
866 # Candidate found.
867 equalities.append(pointer)
867 equalities.append(pointer)
868 pre_ins = post_ins
868 pre_ins = post_ins
869 pre_del = post_del
869 pre_del = post_del
870 lastequality = diffs[pointer][1]
870 lastequality = diffs[pointer][1]
871 else:
871 else:
872 # Not a candidate, and can never become one.
872 # Not a candidate, and can never become one.
873 equalities = []
873 equalities = []
874 lastequality = None
874 lastequality = None
875
875
876 post_ins = post_del = False
876 post_ins = post_del = False
877 else: # An insertion or deletion.
877 else: # An insertion or deletion.
878 if diffs[pointer][0] == self.DIFF_DELETE:
878 if diffs[pointer][0] == self.DIFF_DELETE:
879 post_del = True
879 post_del = True
880 else:
880 else:
881 post_ins = True
881 post_ins = True
882
882
883 # Five types to be split:
883 # Five types to be split:
884 # <ins>A</ins><del>B</del>XY<ins>C</ins><del>D</del>
884 # <ins>A</ins><del>B</del>XY<ins>C</ins><del>D</del>
885 # <ins>A</ins>X<ins>C</ins><del>D</del>
885 # <ins>A</ins>X<ins>C</ins><del>D</del>
886 # <ins>A</ins><del>B</del>X<ins>C</ins>
886 # <ins>A</ins><del>B</del>X<ins>C</ins>
887 # <ins>A</del>X<ins>C</ins><del>D</del>
887 # <ins>A</del>X<ins>C</ins><del>D</del>
888 # <ins>A</ins><del>B</del>X<del>C</del>
888 # <ins>A</ins><del>B</del>X<del>C</del>
889
889
890 if lastequality and ((pre_ins and pre_del and post_ins and post_del) or
890 if lastequality and ((pre_ins and pre_del and post_ins and post_del) or
891 ((len(lastequality) < self.Diff_EditCost / 2) and
891 ((len(lastequality) < self.Diff_EditCost / 2) and
892 (pre_ins + pre_del + post_ins + post_del) == 3)):
892 (pre_ins + pre_del + post_ins + post_del) == 3)):
893 # Duplicate record.
893 # Duplicate record.
894 diffs.insert(equalities[-1], (self.DIFF_DELETE, lastequality))
894 diffs.insert(equalities[-1], (self.DIFF_DELETE, lastequality))
895 # Change second copy to insert.
895 # Change second copy to insert.
896 diffs[equalities[-1] + 1] = (self.DIFF_INSERT,
896 diffs[equalities[-1] + 1] = (self.DIFF_INSERT,
897 diffs[equalities[-1] + 1][1])
897 diffs[equalities[-1] + 1][1])
898 equalities.pop() # Throw away the equality we just deleted.
898 equalities.pop() # Throw away the equality we just deleted.
899 lastequality = None
899 lastequality = None
900 if pre_ins and pre_del:
900 if pre_ins and pre_del:
901 # No changes made which could affect previous entry, keep going.
901 # No changes made which could affect previous entry, keep going.
902 post_ins = post_del = True
902 post_ins = post_del = True
903 equalities = []
903 equalities = []
904 else:
904 else:
905 if len(equalities):
905 if len(equalities):
906 equalities.pop() # Throw away the previous equality.
906 equalities.pop() # Throw away the previous equality.
907 if len(equalities):
907 if len(equalities):
908 pointer = equalities[-1]
908 pointer = equalities[-1]
909 else:
909 else:
910 pointer = -1
910 pointer = -1
911 post_ins = post_del = False
911 post_ins = post_del = False
912 changes = True
912 changes = True
913 pointer += 1
913 pointer += 1
914
914
915 if changes:
915 if changes:
916 self.diff_cleanupMerge(diffs)
916 self.diff_cleanupMerge(diffs)
917
917
918 def diff_cleanupMerge(self, diffs):
918 def diff_cleanupMerge(self, diffs):
919 """Reorder and merge like edit sections. Merge equalities.
919 """Reorder and merge like edit sections. Merge equalities.
920 Any edit section can move as long as it doesn't cross an equality.
920 Any edit section can move as long as it doesn't cross an equality.
921
921
922 Args:
922 Args:
923 diffs: Array of diff tuples.
923 diffs: Array of diff tuples.
924 """
924 """
925 diffs.append((self.DIFF_EQUAL, '')) # Add a dummy entry at the end.
925 diffs.append((self.DIFF_EQUAL, '')) # Add a dummy entry at the end.
926 pointer = 0
926 pointer = 0
927 count_delete = 0
927 count_delete = 0
928 count_insert = 0
928 count_insert = 0
929 text_delete = ''
929 text_delete = ''
930 text_insert = ''
930 text_insert = ''
931 while pointer < len(diffs):
931 while pointer < len(diffs):
932 if diffs[pointer][0] == self.DIFF_INSERT:
932 if diffs[pointer][0] == self.DIFF_INSERT:
933 count_insert += 1
933 count_insert += 1
934 text_insert += diffs[pointer][1]
934 text_insert += diffs[pointer][1]
935 pointer += 1
935 pointer += 1
936 elif diffs[pointer][0] == self.DIFF_DELETE:
936 elif diffs[pointer][0] == self.DIFF_DELETE:
937 count_delete += 1
937 count_delete += 1
938 text_delete += diffs[pointer][1]
938 text_delete += diffs[pointer][1]
939 pointer += 1
939 pointer += 1
940 elif diffs[pointer][0] == self.DIFF_EQUAL:
940 elif diffs[pointer][0] == self.DIFF_EQUAL:
941 # Upon reaching an equality, check for prior redundancies.
941 # Upon reaching an equality, check for prior redundancies.
942 if count_delete + count_insert > 1:
942 if count_delete + count_insert > 1:
943 if count_delete != 0 and count_insert != 0:
943 if count_delete != 0 and count_insert != 0:
944 # Factor out any common prefixies.
944 # Factor out any common prefixies.
945 commonlength = self.diff_commonPrefix(text_insert, text_delete)
945 commonlength = self.diff_commonPrefix(text_insert, text_delete)
946 if commonlength != 0:
946 if commonlength != 0:
947 x = pointer - count_delete - count_insert - 1
947 x = pointer - count_delete - count_insert - 1
948 if x >= 0 and diffs[x][0] == self.DIFF_EQUAL:
948 if x >= 0 and diffs[x][0] == self.DIFF_EQUAL:
949 diffs[x] = (diffs[x][0], diffs[x][1] +
949 diffs[x] = (diffs[x][0], diffs[x][1] +
950 text_insert[:commonlength])
950 text_insert[:commonlength])
951 else:
951 else:
952 diffs.insert(0, (self.DIFF_EQUAL, text_insert[:commonlength]))
952 diffs.insert(0, (self.DIFF_EQUAL, text_insert[:commonlength]))
953 pointer += 1
953 pointer += 1
954 text_insert = text_insert[commonlength:]
954 text_insert = text_insert[commonlength:]
955 text_delete = text_delete[commonlength:]
955 text_delete = text_delete[commonlength:]
956 # Factor out any common suffixies.
956 # Factor out any common suffixies.
957 commonlength = self.diff_commonSuffix(text_insert, text_delete)
957 commonlength = self.diff_commonSuffix(text_insert, text_delete)
958 if commonlength != 0:
958 if commonlength != 0:
959 diffs[pointer] = (diffs[pointer][0], text_insert[-commonlength:] +
959 diffs[pointer] = (diffs[pointer][0], text_insert[-commonlength:] +
960 diffs[pointer][1])
960 diffs[pointer][1])
961 text_insert = text_insert[:-commonlength]
961 text_insert = text_insert[:-commonlength]
962 text_delete = text_delete[:-commonlength]
962 text_delete = text_delete[:-commonlength]
963 # Delete the offending records and add the merged ones.
963 # Delete the offending records and add the merged ones.
964 if count_delete == 0:
964 if count_delete == 0:
965 diffs[pointer - count_insert : pointer] = [
965 diffs[pointer - count_insert : pointer] = [
966 (self.DIFF_INSERT, text_insert)]
966 (self.DIFF_INSERT, text_insert)]
967 elif count_insert == 0:
967 elif count_insert == 0:
968 diffs[pointer - count_delete : pointer] = [
968 diffs[pointer - count_delete : pointer] = [
969 (self.DIFF_DELETE, text_delete)]
969 (self.DIFF_DELETE, text_delete)]
970 else:
970 else:
971 diffs[pointer - count_delete - count_insert : pointer] = [
971 diffs[pointer - count_delete - count_insert : pointer] = [
972 (self.DIFF_DELETE, text_delete),
972 (self.DIFF_DELETE, text_delete),
973 (self.DIFF_INSERT, text_insert)]
973 (self.DIFF_INSERT, text_insert)]
974 pointer = pointer - count_delete - count_insert + 1
974 pointer = pointer - count_delete - count_insert + 1
975 if count_delete != 0:
975 if count_delete != 0:
976 pointer += 1
976 pointer += 1
977 if count_insert != 0:
977 if count_insert != 0:
978 pointer += 1
978 pointer += 1
979 elif pointer != 0 and diffs[pointer - 1][0] == self.DIFF_EQUAL:
979 elif pointer != 0 and diffs[pointer - 1][0] == self.DIFF_EQUAL:
980 # Merge this equality with the previous one.
980 # Merge this equality with the previous one.
981 diffs[pointer - 1] = (diffs[pointer - 1][0],
981 diffs[pointer - 1] = (diffs[pointer - 1][0],
982 diffs[pointer - 1][1] + diffs[pointer][1])
982 diffs[pointer - 1][1] + diffs[pointer][1])
983 del diffs[pointer]
983 del diffs[pointer]
984 else:
984 else:
985 pointer += 1
985 pointer += 1
986
986
987 count_insert = 0
987 count_insert = 0
988 count_delete = 0
988 count_delete = 0
989 text_delete = ''
989 text_delete = ''
990 text_insert = ''
990 text_insert = ''
991
991
992 if diffs[-1][1] == '':
992 if diffs[-1][1] == '':
993 diffs.pop() # Remove the dummy entry at the end.
993 diffs.pop() # Remove the dummy entry at the end.
994
994
995 # Second pass: look for single edits surrounded on both sides by equalities
995 # Second pass: look for single edits surrounded on both sides by equalities
996 # which can be shifted sideways to eliminate an equality.
996 # which can be shifted sideways to eliminate an equality.
997 # e.g: A<ins>BA</ins>C -> <ins>AB</ins>AC
997 # e.g: A<ins>BA</ins>C -> <ins>AB</ins>AC
998 changes = False
998 changes = False
999 pointer = 1
999 pointer = 1
1000 # Intentionally ignore the first and last element (don't need checking).
1000 # Intentionally ignore the first and last element (don't need checking).
1001 while pointer < len(diffs) - 1:
1001 while pointer < len(diffs) - 1:
1002 if (diffs[pointer - 1][0] == self.DIFF_EQUAL and
1002 if (diffs[pointer - 1][0] == self.DIFF_EQUAL and
1003 diffs[pointer + 1][0] == self.DIFF_EQUAL):
1003 diffs[pointer + 1][0] == self.DIFF_EQUAL):
1004 # This is a single edit surrounded by equalities.
1004 # This is a single edit surrounded by equalities.
1005 if diffs[pointer][1].endswith(diffs[pointer - 1][1]):
1005 if diffs[pointer][1].endswith(diffs[pointer - 1][1]):
1006 # Shift the edit over the previous equality.
1006 # Shift the edit over the previous equality.
1007 diffs[pointer] = (diffs[pointer][0],
1007 diffs[pointer] = (diffs[pointer][0],
1008 diffs[pointer - 1][1] +
1008 diffs[pointer - 1][1] +
1009 diffs[pointer][1][:-len(diffs[pointer - 1][1])])
1009 diffs[pointer][1][:-len(diffs[pointer - 1][1])])
1010 diffs[pointer + 1] = (diffs[pointer + 1][0],
1010 diffs[pointer + 1] = (diffs[pointer + 1][0],
1011 diffs[pointer - 1][1] + diffs[pointer + 1][1])
1011 diffs[pointer - 1][1] + diffs[pointer + 1][1])
1012 del diffs[pointer - 1]
1012 del diffs[pointer - 1]
1013 changes = True
1013 changes = True
1014 elif diffs[pointer][1].startswith(diffs[pointer + 1][1]):
1014 elif diffs[pointer][1].startswith(diffs[pointer + 1][1]):
1015 # Shift the edit over the next equality.
1015 # Shift the edit over the next equality.
1016 diffs[pointer - 1] = (diffs[pointer - 1][0],
1016 diffs[pointer - 1] = (diffs[pointer - 1][0],
1017 diffs[pointer - 1][1] + diffs[pointer + 1][1])
1017 diffs[pointer - 1][1] + diffs[pointer + 1][1])
1018 diffs[pointer] = (diffs[pointer][0],
1018 diffs[pointer] = (diffs[pointer][0],
1019 diffs[pointer][1][len(diffs[pointer + 1][1]):] +
1019 diffs[pointer][1][len(diffs[pointer + 1][1]):] +
1020 diffs[pointer + 1][1])
1020 diffs[pointer + 1][1])
1021 del diffs[pointer + 1]
1021 del diffs[pointer + 1]
1022 changes = True
1022 changes = True
1023 pointer += 1
1023 pointer += 1
1024
1024
1025 # If shifts were made, the diff needs reordering and another shift sweep.
1025 # If shifts were made, the diff needs reordering and another shift sweep.
1026 if changes:
1026 if changes:
1027 self.diff_cleanupMerge(diffs)
1027 self.diff_cleanupMerge(diffs)
1028
1028
1029 def diff_xIndex(self, diffs, loc):
1029 def diff_xIndex(self, diffs, loc):
1030 """loc is a location in text1, compute and return the equivalent location
1030 """loc is a location in text1, compute and return the equivalent location
1031 in text2. e.g. "The cat" vs "The big cat", 1->1, 5->8
1031 in text2. e.g. "The cat" vs "The big cat", 1->1, 5->8
1032
1032
1033 Args:
1033 Args:
1034 diffs: Array of diff tuples.
1034 diffs: Array of diff tuples.
1035 loc: Location within text1.
1035 loc: Location within text1.
1036
1036
1037 Returns:
1037 Returns:
1038 Location within text2.
1038 Location within text2.
1039 """
1039 """
1040 chars1 = 0
1040 chars1 = 0
1041 chars2 = 0
1041 chars2 = 0
1042 last_chars1 = 0
1042 last_chars1 = 0
1043 last_chars2 = 0
1043 last_chars2 = 0
1044 for x in range(len(diffs)):
1044 for x in range(len(diffs)):
1045 (op, text) = diffs[x]
1045 (op, text) = diffs[x]
1046 if op != self.DIFF_INSERT: # Equality or deletion.
1046 if op != self.DIFF_INSERT: # Equality or deletion.
1047 chars1 += len(text)
1047 chars1 += len(text)
1048 if op != self.DIFF_DELETE: # Equality or insertion.
1048 if op != self.DIFF_DELETE: # Equality or insertion.
1049 chars2 += len(text)
1049 chars2 += len(text)
1050 if chars1 > loc: # Overshot the location.
1050 if chars1 > loc: # Overshot the location.
1051 break
1051 break
1052 last_chars1 = chars1
1052 last_chars1 = chars1
1053 last_chars2 = chars2
1053 last_chars2 = chars2
1054
1054
1055 if len(diffs) != x and diffs[x][0] == self.DIFF_DELETE:
1055 if len(diffs) != x and diffs[x][0] == self.DIFF_DELETE:
1056 # The location was deleted.
1056 # The location was deleted.
1057 return last_chars2
1057 return last_chars2
1058 # Add the remaining len(character).
1058 # Add the remaining len(character).
1059 return last_chars2 + (loc - last_chars1)
1059 return last_chars2 + (loc - last_chars1)
1060
1060
1061 def diff_prettyHtml(self, diffs):
1061 def diff_prettyHtml(self, diffs):
1062 """Convert a diff array into a pretty HTML report.
1062 """Convert a diff array into a pretty HTML report.
1063
1063
1064 Args:
1064 Args:
1065 diffs: Array of diff tuples.
1065 diffs: Array of diff tuples.
1066
1066
1067 Returns:
1067 Returns:
1068 HTML representation.
1068 HTML representation.
1069 """
1069 """
1070 html = []
1070 html = []
1071 for (op, data) in diffs:
1071 for (op, data) in diffs:
1072 text = (data.replace("&", "&amp;").replace("<", "&lt;")
1072 text = (data.replace("&", "&amp;").replace("<", "&lt;")
1073 .replace(">", "&gt;").replace("\n", "&para;<br>"))
1073 .replace(">", "&gt;").replace("\n", "&para;<br>"))
1074 if op == self.DIFF_INSERT:
1074 if op == self.DIFF_INSERT:
1075 html.append("<ins style=\"background:#e6ffe6;\">%s</ins>" % text)
1075 html.append("<ins style=\"background:#e6ffe6;\">%s</ins>" % text)
1076 elif op == self.DIFF_DELETE:
1076 elif op == self.DIFF_DELETE:
1077 html.append("<del style=\"background:#ffe6e6;\">%s</del>" % text)
1077 html.append("<del style=\"background:#ffe6e6;\">%s</del>" % text)
1078 elif op == self.DIFF_EQUAL:
1078 elif op == self.DIFF_EQUAL:
1079 html.append("<span>%s</span>" % text)
1079 html.append("<span>%s</span>" % text)
1080 return "".join(html)
1080 return "".join(html)
1081
1081
1082 def diff_text1(self, diffs):
1082 def diff_text1(self, diffs):
1083 """Compute and return the source text (all equalities and deletions).
1083 """Compute and return the source text (all equalities and deletions).
1084
1084
1085 Args:
1085 Args:
1086 diffs: Array of diff tuples.
1086 diffs: Array of diff tuples.
1087
1087
1088 Returns:
1088 Returns:
1089 Source text.
1089 Source text.
1090 """
1090 """
1091 text = []
1091 text = []
1092 for (op, data) in diffs:
1092 for (op, data) in diffs:
1093 if op != self.DIFF_INSERT:
1093 if op != self.DIFF_INSERT:
1094 text.append(data)
1094 text.append(data)
1095 return "".join(text)
1095 return "".join(text)
1096
1096
1097 def diff_text2(self, diffs):
1097 def diff_text2(self, diffs):
1098 """Compute and return the destination text (all equalities and insertions).
1098 """Compute and return the destination text (all equalities and insertions).
1099
1099
1100 Args:
1100 Args:
1101 diffs: Array of diff tuples.
1101 diffs: Array of diff tuples.
1102
1102
1103 Returns:
1103 Returns:
1104 Destination text.
1104 Destination text.
1105 """
1105 """
1106 text = []
1106 text = []
1107 for (op, data) in diffs:
1107 for (op, data) in diffs:
1108 if op != self.DIFF_DELETE:
1108 if op != self.DIFF_DELETE:
1109 text.append(data)
1109 text.append(data)
1110 return "".join(text)
1110 return "".join(text)
1111
1111
1112 def diff_levenshtein(self, diffs):
1112 def diff_levenshtein(self, diffs):
1113 """Compute the Levenshtein distance; the number of inserted, deleted or
1113 """Compute the Levenshtein distance; the number of inserted, deleted or
1114 substituted characters.
1114 substituted characters.
1115
1115
1116 Args:
1116 Args:
1117 diffs: Array of diff tuples.
1117 diffs: Array of diff tuples.
1118
1118
1119 Returns:
1119 Returns:
1120 Number of changes.
1120 Number of changes.
1121 """
1121 """
1122 levenshtein = 0
1122 levenshtein = 0
1123 insertions = 0
1123 insertions = 0
1124 deletions = 0
1124 deletions = 0
1125 for (op, data) in diffs:
1125 for (op, data) in diffs:
1126 if op == self.DIFF_INSERT:
1126 if op == self.DIFF_INSERT:
1127 insertions += len(data)
1127 insertions += len(data)
1128 elif op == self.DIFF_DELETE:
1128 elif op == self.DIFF_DELETE:
1129 deletions += len(data)
1129 deletions += len(data)
1130 elif op == self.DIFF_EQUAL:
1130 elif op == self.DIFF_EQUAL:
1131 # A deletion and an insertion is one substitution.
1131 # A deletion and an insertion is one substitution.
1132 levenshtein += max(insertions, deletions)
1132 levenshtein += max(insertions, deletions)
1133 insertions = 0
1133 insertions = 0
1134 deletions = 0
1134 deletions = 0
1135 levenshtein += max(insertions, deletions)
1135 levenshtein += max(insertions, deletions)
1136 return levenshtein
1136 return levenshtein
1137
1137
1138 def diff_toDelta(self, diffs):
1138 def diff_toDelta(self, diffs):
1139 """Crush the diff into an encoded string which describes the operations
1139 """Crush the diff into an encoded string which describes the operations
1140 required to transform text1 into text2.
1140 required to transform text1 into text2.
1141 E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'.
1141 E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'.
1142 Operations are tab-separated. Inserted text is escaped using %xx notation.
1142 Operations are tab-separated. Inserted text is escaped using %xx notation.
1143
1143
1144 Args:
1144 Args:
1145 diffs: Array of diff tuples.
1145 diffs: Array of diff tuples.
1146
1146
1147 Returns:
1147 Returns:
1148 Delta text.
1148 Delta text.
1149 """
1149 """
1150 text = []
1150 text = []
1151 for (op, data) in diffs:
1151 for (op, data) in diffs:
1152 if op == self.DIFF_INSERT:
1152 if op == self.DIFF_INSERT:
1153 # High ascii will raise UnicodeDecodeError. Use Unicode instead.
1153 # High ascii will raise UnicodeDecodeError. Use Unicode instead.
1154 data = data.encode("utf-8")
1154 data = data.encode("utf-8")
1155 text.append("+" + urllib.quote(data, "!~*'();/?:@&=+$,# "))
1155 text.append("+" + urllib.quote(data, "!~*'();/?:@&=+$,# "))
1156 elif op == self.DIFF_DELETE:
1156 elif op == self.DIFF_DELETE:
1157 text.append("-%d" % len(data))
1157 text.append("-%d" % len(data))
1158 elif op == self.DIFF_EQUAL:
1158 elif op == self.DIFF_EQUAL:
1159 text.append("=%d" % len(data))
1159 text.append("=%d" % len(data))
1160 return "\t".join(text)
1160 return "\t".join(text)
1161
1161
1162 def diff_fromDelta(self, text1, delta):
1162 def diff_fromDelta(self, text1, delta):
1163 """Given the original text1, and an encoded string which describes the
1163 """Given the original text1, and an encoded string which describes the
1164 operations required to transform text1 into text2, compute the full diff.
1164 operations required to transform text1 into text2, compute the full diff.
1165
1165
1166 Args:
1166 Args:
1167 text1: Source string for the diff.
1167 text1: Source string for the diff.
1168 delta: Delta text.
1168 delta: Delta text.
1169
1169
1170 Returns:
1170 Returns:
1171 Array of diff tuples.
1171 Array of diff tuples.
1172
1172
1173 Raises:
1173 Raises:
1174 ValueError: If invalid input.
1174 ValueError: If invalid input.
1175 """
1175 """
1176 if type(delta) == unicode:
1176 if type(delta) == unicode:
1177 # Deltas should be composed of a subset of ascii chars, Unicode not
1177 # Deltas should be composed of a subset of ascii chars, Unicode not
1178 # required. If this encode raises UnicodeEncodeError, delta is invalid.
1178 # required. If this encode raises UnicodeEncodeError, delta is invalid.
1179 delta = delta.encode("ascii")
1179 delta = delta.encode("ascii")
1180 diffs = []
1180 diffs = []
1181 pointer = 0 # Cursor in text1
1181 pointer = 0 # Cursor in text1
1182 tokens = delta.split("\t")
1182 tokens = delta.split("\t")
1183 for token in tokens:
1183 for token in tokens:
1184 if token == "":
1184 if token == "":
1185 # Blank tokens are ok (from a trailing \t).
1185 # Blank tokens are ok (from a trailing \t).
1186 continue
1186 continue
1187 # Each token begins with a one character parameter which specifies the
1187 # Each token begins with a one character parameter which specifies the
1188 # operation of this token (delete, insert, equality).
1188 # operation of this token (delete, insert, equality).
1189 param = token[1:]
1189 param = token[1:]
1190 if token[0] == "+":
1190 if token[0] == "+":
1191 param = urllib.unquote(param).decode("utf-8")
1191 param = urllib.unquote(param).decode("utf-8")
1192 diffs.append((self.DIFF_INSERT, param))
1192 diffs.append((self.DIFF_INSERT, param))
1193 elif token[0] == "-" or token[0] == "=":
1193 elif token[0] == "-" or token[0] == "=":
1194 try:
1194 try:
1195 n = int(param)
1195 n = int(param)
1196 except ValueError:
1196 except ValueError:
1197 raise ValueError("Invalid number in diff_fromDelta: " + param)
1197 raise ValueError("Invalid number in diff_fromDelta: " + param)
1198 if n < 0:
1198 if n < 0:
1199 raise ValueError("Negative number in diff_fromDelta: " + param)
1199 raise ValueError("Negative number in diff_fromDelta: " + param)
1200 text = text1[pointer : pointer + n]
1200 text = text1[pointer : pointer + n]
1201 pointer += n
1201 pointer += n
1202 if token[0] == "=":
1202 if token[0] == "=":
1203 diffs.append((self.DIFF_EQUAL, text))
1203 diffs.append((self.DIFF_EQUAL, text))
1204 else:
1204 else:
1205 diffs.append((self.DIFF_DELETE, text))
1205 diffs.append((self.DIFF_DELETE, text))
1206 else:
1206 else:
1207 # Anything else is an error.
1207 # Anything else is an error.
1208 raise ValueError("Invalid diff operation in diff_fromDelta: " +
1208 raise ValueError("Invalid diff operation in diff_fromDelta: " +
1209 token[0])
1209 token[0])
1210 if pointer != len(text1):
1210 if pointer != len(text1):
1211 raise ValueError(
1211 raise ValueError(
1212 "Delta length (%d) does not equal source text length (%d)." %
1212 "Delta length (%d) does not equal source text length (%d)." %
1213 (pointer, len(text1)))
1213 (pointer, len(text1)))
1214 return diffs
1214 return diffs
1215
1215
1216 # MATCH FUNCTIONS
1216 # MATCH FUNCTIONS
1217
1217
1218 def match_main(self, text, pattern, loc):
1218 def match_main(self, text, pattern, loc):
1219 """Locate the best instance of 'pattern' in 'text' near 'loc'.
1219 """Locate the best instance of 'pattern' in 'text' near 'loc'.
1220
1220
1221 Args:
1221 Args:
1222 text: The text to search.
1222 text: The text to search.
1223 pattern: The pattern to search for.
1223 pattern: The pattern to search for.
1224 loc: The location to search around.
1224 loc: The location to search around.
1225
1225
1226 Returns:
1226 Returns:
1227 Best match index or -1.
1227 Best match index or -1.
1228 """
1228 """
1229 # Check for null inputs.
1229 # Check for null inputs.
1230 if text is None or pattern is None:
1230 if text is None or pattern is None:
1231 raise ValueError("Null inputs. (match_main)")
1231 raise ValueError("Null inputs. (match_main)")
1232
1232
1233 loc = max(0, min(loc, len(text)))
1233 loc = max(0, min(loc, len(text)))
1234 if text == pattern:
1234 if text == pattern:
1235 # Shortcut (potentially not guaranteed by the algorithm)
1235 # Shortcut (potentially not guaranteed by the algorithm)
1236 return 0
1236 return 0
1237 elif not text:
1237 elif not text:
1238 # Nothing to match.
1238 # Nothing to match.
1239 return -1
1239 return -1
1240 elif text[loc:loc + len(pattern)] == pattern:
1240 elif text[loc:loc + len(pattern)] == pattern:
1241 # Perfect match at the perfect spot! (Includes case of null pattern)
1241 # Perfect match at the perfect spot! (Includes case of null pattern)
1242 return loc
1242 return loc
1243 else:
1243 else:
1244 # Do a fuzzy compare.
1244 # Do a fuzzy compare.
1245 match = self.match_bitap(text, pattern, loc)
1245 match = self.match_bitap(text, pattern, loc)
1246 return match
1246 return match
1247
1247
1248 def match_bitap(self, text, pattern, loc):
1248 def match_bitap(self, text, pattern, loc):
1249 """Locate the best instance of 'pattern' in 'text' near 'loc' using the
1249 """Locate the best instance of 'pattern' in 'text' near 'loc' using the
1250 Bitap algorithm.
1250 Bitap algorithm.
1251
1251
1252 Args:
1252 Args:
1253 text: The text to search.
1253 text: The text to search.
1254 pattern: The pattern to search for.
1254 pattern: The pattern to search for.
1255 loc: The location to search around.
1255 loc: The location to search around.
1256
1256
1257 Returns:
1257 Returns:
1258 Best match index or -1.
1258 Best match index or -1.
1259 """
1259 """
1260 # Python doesn't have a maxint limit, so ignore this check.
1260 # Python doesn't have a maxint limit, so ignore this check.
1261 #if self.Match_MaxBits != 0 and len(pattern) > self.Match_MaxBits:
1261 #if self.Match_MaxBits != 0 and len(pattern) > self.Match_MaxBits:
1262 # raise ValueError("Pattern too long for this application.")
1262 # raise ValueError("Pattern too long for this application.")
1263
1263
1264 # Initialise the alphabet.
1264 # Initialise the alphabet.
1265 s = self.match_alphabet(pattern)
1265 s = self.match_alphabet(pattern)
1266
1266
1267 def match_bitapScore(e, x):
1267 def match_bitapScore(e, x):
1268 """Compute and return the score for a match with e errors and x location.
1268 """Compute and return the score for a match with e errors and x location.
1269 Accesses loc and pattern through being a closure.
1269 Accesses loc and pattern through being a closure.
1270
1270
1271 Args:
1271 Args:
1272 e: Number of errors in match.
1272 e: Number of errors in match.
1273 x: Location of match.
1273 x: Location of match.
1274
1274
1275 Returns:
1275 Returns:
1276 Overall score for match (0.0 = good, 1.0 = bad).
1276 Overall score for match (0.0 = good, 1.0 = bad).
1277 """
1277 """
1278 accuracy = float(e) / len(pattern)
1278 accuracy = float(e) / len(pattern)
1279 proximity = abs(loc - x)
1279 proximity = abs(loc - x)
1280 if not self.Match_Distance:
1280 if not self.Match_Distance:
1281 # Dodge divide by zero error.
1281 # Dodge divide by zero error.
1282 return proximity and 1.0 or accuracy
1282 return proximity and 1.0 or accuracy
1283 return accuracy + (proximity / float(self.Match_Distance))
1283 return accuracy + (proximity / float(self.Match_Distance))
1284
1284
1285 # Highest score beyond which we give up.
1285 # Highest score beyond which we give up.
1286 score_threshold = self.Match_Threshold
1286 score_threshold = self.Match_Threshold
1287 # Is there a nearby exact match? (speedup)
1287 # Is there a nearby exact match? (speedup)
1288 best_loc = text.find(pattern, loc)
1288 best_loc = text.find(pattern, loc)
1289 if best_loc != -1:
1289 if best_loc != -1:
1290 score_threshold = min(match_bitapScore(0, best_loc), score_threshold)
1290 score_threshold = min(match_bitapScore(0, best_loc), score_threshold)
1291 # What about in the other direction? (speedup)
1291 # What about in the other direction? (speedup)
1292 best_loc = text.rfind(pattern, loc + len(pattern))
1292 best_loc = text.rfind(pattern, loc + len(pattern))
1293 if best_loc != -1:
1293 if best_loc != -1:
1294 score_threshold = min(match_bitapScore(0, best_loc), score_threshold)
1294 score_threshold = min(match_bitapScore(0, best_loc), score_threshold)
1295
1295
1296 # Initialise the bit arrays.
1296 # Initialise the bit arrays.
1297 matchmask = 1 << (len(pattern) - 1)
1297 matchmask = 1 << (len(pattern) - 1)
1298 best_loc = -1
1298 best_loc = -1
1299
1299
1300 bin_max = len(pattern) + len(text)
1300 bin_max = len(pattern) + len(text)
1301 # Empty initialization added to appease pychecker.
1301 # Empty initialization added to appease pychecker.
1302 last_rd = None
1302 last_rd = None
1303 for d in range(len(pattern)):
1303 for d in range(len(pattern)):
1304 # Scan for the best match each iteration allows for one more error.
1304 # Scan for the best match each iteration allows for one more error.
1305 # Run a binary search to determine how far from 'loc' we can stray at
1305 # Run a binary search to determine how far from 'loc' we can stray at
1306 # this error level.
1306 # this error level.
1307 bin_min = 0
1307 bin_min = 0
1308 bin_mid = bin_max
1308 bin_mid = bin_max
1309 while bin_min < bin_mid:
1309 while bin_min < bin_mid:
1310 if match_bitapScore(d, loc + bin_mid) <= score_threshold:
1310 if match_bitapScore(d, loc + bin_mid) <= score_threshold:
1311 bin_min = bin_mid
1311 bin_min = bin_mid
1312 else:
1312 else:
1313 bin_max = bin_mid
1313 bin_max = bin_mid
1314 bin_mid = (bin_max - bin_min) // 2 + bin_min
1314 bin_mid = (bin_max - bin_min) // 2 + bin_min
1315
1315
1316 # Use the result from this iteration as the maximum for the next.
1316 # Use the result from this iteration as the maximum for the next.
1317 bin_max = bin_mid
1317 bin_max = bin_mid
1318 start = max(1, loc - bin_mid + 1)
1318 start = max(1, loc - bin_mid + 1)
1319 finish = min(loc + bin_mid, len(text)) + len(pattern)
1319 finish = min(loc + bin_mid, len(text)) + len(pattern)
1320
1320
1321 rd = [0] * (finish + 2)
1321 rd = [0] * (finish + 2)
1322 rd[finish + 1] = (1 << d) - 1
1322 rd[finish + 1] = (1 << d) - 1
1323 for j in range(finish, start - 1, -1):
1323 for j in range(finish, start - 1, -1):
1324 if len(text) <= j - 1:
1324 if len(text) <= j - 1:
1325 # Out of range.
1325 # Out of range.
1326 charMatch = 0
1326 charMatch = 0
1327 else:
1327 else:
1328 charMatch = s.get(text[j - 1], 0)
1328 charMatch = s.get(text[j - 1], 0)
1329 if d == 0: # First pass: exact match.
1329 if d == 0: # First pass: exact match.
1330 rd[j] = ((rd[j + 1] << 1) | 1) & charMatch
1330 rd[j] = ((rd[j + 1] << 1) | 1) & charMatch
1331 else: # Subsequent passes: fuzzy match.
1331 else: # Subsequent passes: fuzzy match.
1332 rd[j] = (((rd[j + 1] << 1) | 1) & charMatch) | (
1332 rd[j] = (((rd[j + 1] << 1) | 1) & charMatch) | (
1333 ((last_rd[j + 1] | last_rd[j]) << 1) | 1) | last_rd[j + 1]
1333 ((last_rd[j + 1] | last_rd[j]) << 1) | 1) | last_rd[j + 1]
1334 if rd[j] & matchmask:
1334 if rd[j] & matchmask:
1335 score = match_bitapScore(d, j - 1)
1335 score = match_bitapScore(d, j - 1)
1336 # This match will almost certainly be better than any existing match.
1336 # This match will almost certainly be better than any existing match.
1337 # But check anyway.
1337 # But check anyway.
1338 if score <= score_threshold:
1338 if score <= score_threshold:
1339 # Told you so.
1339 # Told you so.
1340 score_threshold = score
1340 score_threshold = score
1341 best_loc = j - 1
1341 best_loc = j - 1
1342 if best_loc > loc:
1342 if best_loc > loc:
1343 # When passing loc, don't exceed our current distance from loc.
1343 # When passing loc, don't exceed our current distance from loc.
1344 start = max(1, 2 * loc - best_loc)
1344 start = max(1, 2 * loc - best_loc)
1345 else:
1345 else:
1346 # Already passed loc, downhill from here on in.
1346 # Already passed loc, downhill from here on in.
1347 break
1347 break
1348 # No hope for a (better) match at greater error levels.
1348 # No hope for a (better) match at greater error levels.
1349 if match_bitapScore(d + 1, loc) > score_threshold:
1349 if match_bitapScore(d + 1, loc) > score_threshold:
1350 break
1350 break
1351 last_rd = rd
1351 last_rd = rd
1352 return best_loc
1352 return best_loc
1353
1353
1354 def match_alphabet(self, pattern):
1354 def match_alphabet(self, pattern):
1355 """Initialise the alphabet for the Bitap algorithm.
1355 """Initialise the alphabet for the Bitap algorithm.
1356
1356
1357 Args:
1357 Args:
1358 pattern: The text to encode.
1358 pattern: The text to encode.
1359
1359
1360 Returns:
1360 Returns:
1361 Hash of character locations.
1361 Hash of character locations.
1362 """
1362 """
1363 s = {}
1363 s = {}
1364 for char in pattern:
1364 for char in pattern:
1365 s[char] = 0
1365 s[char] = 0
1366 for i in range(len(pattern)):
1366 for i in range(len(pattern)):
1367 s[pattern[i]] |= 1 << (len(pattern) - i - 1)
1367 s[pattern[i]] |= 1 << (len(pattern) - i - 1)
1368 return s
1368 return s
1369
1369
1370 # PATCH FUNCTIONS
1370 # PATCH FUNCTIONS
1371
1371
1372 def patch_addContext(self, patch, text):
1372 def patch_addContext(self, patch, text):
1373 """Increase the context until it is unique,
1373 """Increase the context until it is unique,
1374 but don't let the pattern expand beyond Match_MaxBits.
1374 but don't let the pattern expand beyond Match_MaxBits.
1375
1375
1376 Args:
1376 Args:
1377 patch: The patch to grow.
1377 patch: The patch to grow.
1378 text: Source text.
1378 text: Source text.
1379 """
1379 """
1380 if len(text) == 0:
1380 if len(text) == 0:
1381 return
1381 return
1382 pattern = text[patch.start2 : patch.start2 + patch.length1]
1382 pattern = text[patch.start2 : patch.start2 + patch.length1]
1383 padding = 0
1383 padding = 0
1384
1384
1385 # Look for the first and last matches of pattern in text. If two different
1385 # Look for the first and last matches of pattern in text. If two different
1386 # matches are found, increase the pattern length.
1386 # matches are found, increase the pattern length.
1387 while (text.find(pattern) != text.rfind(pattern) and (self.Match_MaxBits ==
1387 while (text.find(pattern) != text.rfind(pattern) and (self.Match_MaxBits ==
1388 0 or len(pattern) < self.Match_MaxBits - self.Patch_Margin -
1388 0 or len(pattern) < self.Match_MaxBits - self.Patch_Margin -
1389 self.Patch_Margin)):
1389 self.Patch_Margin)):
1390 padding += self.Patch_Margin
1390 padding += self.Patch_Margin
1391 pattern = text[max(0, patch.start2 - padding) :
1391 pattern = text[max(0, patch.start2 - padding) :
1392 patch.start2 + patch.length1 + padding]
1392 patch.start2 + patch.length1 + padding]
1393 # Add one chunk for good luck.
1393 # Add one chunk for good luck.
1394 padding += self.Patch_Margin
1394 padding += self.Patch_Margin
1395
1395
1396 # Add the prefix.
1396 # Add the prefix.
1397 prefix = text[max(0, patch.start2 - padding) : patch.start2]
1397 prefix = text[max(0, patch.start2 - padding) : patch.start2]
1398 if prefix:
1398 if prefix:
1399 patch.diffs[:0] = [(self.DIFF_EQUAL, prefix)]
1399 patch.diffs[:0] = [(self.DIFF_EQUAL, prefix)]
1400 # Add the suffix.
1400 # Add the suffix.
1401 suffix = text[patch.start2 + patch.length1 :
1401 suffix = text[patch.start2 + patch.length1 :
1402 patch.start2 + patch.length1 + padding]
1402 patch.start2 + patch.length1 + padding]
1403 if suffix:
1403 if suffix:
1404 patch.diffs.append((self.DIFF_EQUAL, suffix))
1404 patch.diffs.append((self.DIFF_EQUAL, suffix))
1405
1405
1406 # Roll back the start points.
1406 # Roll back the start points.
1407 patch.start1 -= len(prefix)
1407 patch.start1 -= len(prefix)
1408 patch.start2 -= len(prefix)
1408 patch.start2 -= len(prefix)
1409 # Extend lengths.
1409 # Extend lengths.
1410 patch.length1 += len(prefix) + len(suffix)
1410 patch.length1 += len(prefix) + len(suffix)
1411 patch.length2 += len(prefix) + len(suffix)
1411 patch.length2 += len(prefix) + len(suffix)
1412
1412
1413 def patch_make(self, a, b=None, c=None):
1413 def patch_make(self, a, b=None, c=None):
1414 """Compute a list of patches to turn text1 into text2.
1414 """Compute a list of patches to turn text1 into text2.
1415 Use diffs if provided, otherwise compute it ourselves.
1415 Use diffs if provided, otherwise compute it ourselves.
1416 There are four ways to call this function, depending on what data is
1416 There are four ways to call this function, depending on what data is
1417 available to the caller:
1417 available to the caller:
1418 Method 1:
1418 Method 1:
1419 a = text1, b = text2
1419 a = text1, b = text2
1420 Method 2:
1420 Method 2:
1421 a = diffs
1421 a = diffs
1422 Method 3 (optimal):
1422 Method 3 (optimal):
1423 a = text1, b = diffs
1423 a = text1, b = diffs
1424 Method 4 (deprecated, use method 3):
1424 Method 4 (deprecated, use method 3):
1425 a = text1, b = text2, c = diffs
1425 a = text1, b = text2, c = diffs
1426
1426
1427 Args:
1427 Args:
1428 a: text1 (methods 1,3,4) or Array of diff tuples for text1 to
1428 a: text1 (methods 1,3,4) or Array of diff tuples for text1 to
1429 text2 (method 2).
1429 text2 (method 2).
1430 b: text2 (methods 1,4) or Array of diff tuples for text1 to
1430 b: text2 (methods 1,4) or Array of diff tuples for text1 to
1431 text2 (method 3) or undefined (method 2).
1431 text2 (method 3) or undefined (method 2).
1432 c: Array of diff tuples for text1 to text2 (method 4) or
1432 c: Array of diff tuples for text1 to text2 (method 4) or
1433 undefined (methods 1,2,3).
1433 undefined (methods 1,2,3).
1434
1434
1435 Returns:
1435 Returns:
1436 Array of Patch objects.
1436 Array of Patch objects.
1437 """
1437 """
1438 text1 = None
1438 text1 = None
1439 diffs = None
1439 diffs = None
1440 # Note that texts may arrive as 'str' or 'unicode'.
1440 # Note that texts may arrive as 'str' or 'unicode'.
1441 if isinstance(a, str) and isinstance(b, str) and c is None:
1441 if isinstance(a, str) and isinstance(b, str) and c is None:
1442 # Method 1: text1, text2
1442 # Method 1: text1, text2
1443 # Compute diffs from text1 and text2.
1443 # Compute diffs from text1 and text2.
1444 text1 = a
1444 text1 = a
1445 diffs = self.diff_main(text1, b, True)
1445 diffs = self.diff_main(text1, b, True)
1446 if len(diffs) > 2:
1446 if len(diffs) > 2:
1447 self.diff_cleanupSemantic(diffs)
1447 self.diff_cleanupSemantic(diffs)
1448 self.diff_cleanupEfficiency(diffs)
1448 self.diff_cleanupEfficiency(diffs)
1449 elif isinstance(a, list) and b is None and c is None:
1449 elif isinstance(a, list) and b is None and c is None:
1450 # Method 2: diffs
1450 # Method 2: diffs
1451 # Compute text1 from diffs.
1451 # Compute text1 from diffs.
1452 diffs = a
1452 diffs = a
1453 text1 = self.diff_text1(diffs)
1453 text1 = self.diff_text1(diffs)
1454 elif isinstance(a, str) and isinstance(b, list) and c is None:
1454 elif isinstance(a, str) and isinstance(b, list) and c is None:
1455 # Method 3: text1, diffs
1455 # Method 3: text1, diffs
1456 text1 = a
1456 text1 = a
1457 diffs = b
1457 diffs = b
1458 elif (isinstance(a, str) and isinstance(b, str) and
1458 elif (isinstance(a, str) and isinstance(b, str) and
1459 isinstance(c, list)):
1459 isinstance(c, list)):
1460 # Method 4: text1, text2, diffs
1460 # Method 4: text1, text2, diffs
1461 # text2 is not used.
1461 # text2 is not used.
1462 text1 = a
1462 text1 = a
1463 diffs = c
1463 diffs = c
1464 else:
1464 else:
1465 raise ValueError("Unknown call format to patch_make.")
1465 raise ValueError("Unknown call format to patch_make.")
1466
1466
1467 if not diffs:
1467 if not diffs:
1468 return [] # Get rid of the None case.
1468 return [] # Get rid of the None case.
1469 patches = []
1469 patches = []
1470 patch = patch_obj()
1470 patch = patch_obj()
1471 char_count1 = 0 # Number of characters into the text1 string.
1471 char_count1 = 0 # Number of characters into the text1 string.
1472 char_count2 = 0 # Number of characters into the text2 string.
1472 char_count2 = 0 # Number of characters into the text2 string.
1473 prepatch_text = text1 # Recreate the patches to determine context info.
1473 prepatch_text = text1 # Recreate the patches to determine context info.
1474 postpatch_text = text1
1474 postpatch_text = text1
1475 for x in range(len(diffs)):
1475 for x in range(len(diffs)):
1476 (diff_type, diff_text) = diffs[x]
1476 (diff_type, diff_text) = diffs[x]
1477 if len(patch.diffs) == 0 and diff_type != self.DIFF_EQUAL:
1477 if len(patch.diffs) == 0 and diff_type != self.DIFF_EQUAL:
1478 # A new patch starts here.
1478 # A new patch starts here.
1479 patch.start1 = char_count1
1479 patch.start1 = char_count1
1480 patch.start2 = char_count2
1480 patch.start2 = char_count2
1481 if diff_type == self.DIFF_INSERT:
1481 if diff_type == self.DIFF_INSERT:
1482 # Insertion
1482 # Insertion
1483 patch.diffs.append(diffs[x])
1483 patch.diffs.append(diffs[x])
1484 patch.length2 += len(diff_text)
1484 patch.length2 += len(diff_text)
1485 postpatch_text = (postpatch_text[:char_count2] + diff_text +
1485 postpatch_text = (postpatch_text[:char_count2] + diff_text +
1486 postpatch_text[char_count2:])
1486 postpatch_text[char_count2:])
1487 elif diff_type == self.DIFF_DELETE:
1487 elif diff_type == self.DIFF_DELETE:
1488 # Deletion.
1488 # Deletion.
1489 patch.length1 += len(diff_text)
1489 patch.length1 += len(diff_text)
1490 patch.diffs.append(diffs[x])
1490 patch.diffs.append(diffs[x])
1491 postpatch_text = (postpatch_text[:char_count2] +
1491 postpatch_text = (postpatch_text[:char_count2] +
1492 postpatch_text[char_count2 + len(diff_text):])
1492 postpatch_text[char_count2 + len(diff_text):])
1493 elif (diff_type == self.DIFF_EQUAL and
1493 elif (diff_type == self.DIFF_EQUAL and
1494 len(diff_text) <= 2 * self.Patch_Margin and
1494 len(diff_text) <= 2 * self.Patch_Margin and
1495 len(patch.diffs) != 0 and len(diffs) != x + 1):
1495 len(patch.diffs) != 0 and len(diffs) != x + 1):
1496 # Small equality inside a patch.
1496 # Small equality inside a patch.
1497 patch.diffs.append(diffs[x])
1497 patch.diffs.append(diffs[x])
1498 patch.length1 += len(diff_text)
1498 patch.length1 += len(diff_text)
1499 patch.length2 += len(diff_text)
1499 patch.length2 += len(diff_text)
1500
1500
1501 if (diff_type == self.DIFF_EQUAL and
1501 if (diff_type == self.DIFF_EQUAL and
1502 len(diff_text) >= 2 * self.Patch_Margin):
1502 len(diff_text) >= 2 * self.Patch_Margin):
1503 # Time for a new patch.
1503 # Time for a new patch.
1504 if len(patch.diffs) != 0:
1504 if len(patch.diffs) != 0:
1505 self.patch_addContext(patch, prepatch_text)
1505 self.patch_addContext(patch, prepatch_text)
1506 patches.append(patch)
1506 patches.append(patch)
1507 patch = patch_obj()
1507 patch = patch_obj()
1508 # Unlike Unidiff, our patch lists have a rolling context.
1508 # Unlike Unidiff, our patch lists have a rolling context.
1509 # http://code.google.com/p/google-diff-match-patch/wiki/Unidiff
1509 # http://code.google.com/p/google-diff-match-patch/wiki/Unidiff
1510 # Update prepatch text & pos to reflect the application of the
1510 # Update prepatch text & pos to reflect the application of the
1511 # just completed patch.
1511 # just completed patch.
1512 prepatch_text = postpatch_text
1512 prepatch_text = postpatch_text
1513 char_count1 = char_count2
1513 char_count1 = char_count2
1514
1514
1515 # Update the current character count.
1515 # Update the current character count.
1516 if diff_type != self.DIFF_INSERT:
1516 if diff_type != self.DIFF_INSERT:
1517 char_count1 += len(diff_text)
1517 char_count1 += len(diff_text)
1518 if diff_type != self.DIFF_DELETE:
1518 if diff_type != self.DIFF_DELETE:
1519 char_count2 += len(diff_text)
1519 char_count2 += len(diff_text)
1520
1520
1521 # Pick up the leftover patch if not empty.
1521 # Pick up the leftover patch if not empty.
1522 if len(patch.diffs) != 0:
1522 if len(patch.diffs) != 0:
1523 self.patch_addContext(patch, prepatch_text)
1523 self.patch_addContext(patch, prepatch_text)
1524 patches.append(patch)
1524 patches.append(patch)
1525 return patches
1525 return patches
1526
1526
1527 def patch_deepCopy(self, patches):
1527 def patch_deepCopy(self, patches):
1528 """Given an array of patches, return another array that is identical.
1528 """Given an array of patches, return another array that is identical.
1529
1529
1530 Args:
1530 Args:
1531 patches: Array of Patch objects.
1531 patches: Array of Patch objects.
1532
1532
1533 Returns:
1533 Returns:
1534 Array of Patch objects.
1534 Array of Patch objects.
1535 """
1535 """
1536 patchesCopy = []
1536 patchesCopy = []
1537 for patch in patches:
1537 for patch in patches:
1538 patchCopy = patch_obj()
1538 patchCopy = patch_obj()
1539 # No need to deep copy the tuples since they are immutable.
1539 # No need to deep copy the tuples since they are immutable.
1540 patchCopy.diffs = patch.diffs[:]
1540 patchCopy.diffs = patch.diffs[:]
1541 patchCopy.start1 = patch.start1
1541 patchCopy.start1 = patch.start1
1542 patchCopy.start2 = patch.start2
1542 patchCopy.start2 = patch.start2
1543 patchCopy.length1 = patch.length1
1543 patchCopy.length1 = patch.length1
1544 patchCopy.length2 = patch.length2
1544 patchCopy.length2 = patch.length2
1545 patchesCopy.append(patchCopy)
1545 patchesCopy.append(patchCopy)
1546 return patchesCopy
1546 return patchesCopy
1547
1547
1548 def patch_apply(self, patches, text):
1548 def patch_apply(self, patches, text):
1549 """Merge a set of patches onto the text. Return a patched text, as well
1549 """Merge a set of patches onto the text. Return a patched text, as well
1550 as a list of true/false values indicating which patches were applied.
1550 as a list of true/false values indicating which patches were applied.
1551
1551
1552 Args:
1552 Args:
1553 patches: Array of Patch objects.
1553 patches: Array of Patch objects.
1554 text: Old text.
1554 text: Old text.
1555
1555
1556 Returns:
1556 Returns:
1557 Two element Array, containing the new text and an array of boolean values.
1557 Two element Array, containing the new text and an array of boolean values.
1558 """
1558 """
1559 if not patches:
1559 if not patches:
1560 return (text, [])
1560 return (text, [])
1561
1561
1562 # Deep copy the patches so that no changes are made to originals.
1562 # Deep copy the patches so that no changes are made to originals.
1563 patches = self.patch_deepCopy(patches)
1563 patches = self.patch_deepCopy(patches)
1564
1564
1565 nullPadding = self.patch_addPadding(patches)
1565 nullPadding = self.patch_addPadding(patches)
1566 text = nullPadding + text + nullPadding
1566 text = nullPadding + text + nullPadding
1567 self.patch_splitMax(patches)
1567 self.patch_splitMax(patches)
1568
1568
1569 # delta keeps track of the offset between the expected and actual location
1569 # delta keeps track of the offset between the expected and actual location
1570 # of the previous patch. If there are patches expected at positions 10 and
1570 # of the previous patch. If there are patches expected at positions 10 and
1571 # 20, but the first patch was found at 12, delta is 2 and the second patch
1571 # 20, but the first patch was found at 12, delta is 2 and the second patch
1572 # has an effective expected position of 22.
1572 # has an effective expected position of 22.
1573 delta = 0
1573 delta = 0
1574 results = []
1574 results = []
1575 for patch in patches:
1575 for patch in patches:
1576 expected_loc = patch.start2 + delta
1576 expected_loc = patch.start2 + delta
1577 text1 = self.diff_text1(patch.diffs)
1577 text1 = self.diff_text1(patch.diffs)
1578 end_loc = -1
1578 end_loc = -1
1579 if len(text1) > self.Match_MaxBits:
1579 if len(text1) > self.Match_MaxBits:
1580 # patch_splitMax will only provide an oversized pattern in the case of
1580 # patch_splitMax will only provide an oversized pattern in the case of
1581 # a monster delete.
1581 # a monster delete.
1582 start_loc = self.match_main(text, text1[:self.Match_MaxBits],
1582 start_loc = self.match_main(text, text1[:self.Match_MaxBits],
1583 expected_loc)
1583 expected_loc)
1584 if start_loc != -1:
1584 if start_loc != -1:
1585 end_loc = self.match_main(text, text1[-self.Match_MaxBits:],
1585 end_loc = self.match_main(text, text1[-self.Match_MaxBits:],
1586 expected_loc + len(text1) - self.Match_MaxBits)
1586 expected_loc + len(text1) - self.Match_MaxBits)
1587 if end_loc == -1 or start_loc >= end_loc:
1587 if end_loc == -1 or start_loc >= end_loc:
1588 # Can't find valid trailing context. Drop this patch.
1588 # Can't find valid trailing context. Drop this patch.
1589 start_loc = -1
1589 start_loc = -1
1590 else:
1590 else:
1591 start_loc = self.match_main(text, text1, expected_loc)
1591 start_loc = self.match_main(text, text1, expected_loc)
1592 if start_loc == -1:
1592 if start_loc == -1:
1593 # No match found. :(
1593 # No match found. :(
1594 results.append(False)
1594 results.append(False)
1595 # Subtract the delta for this failed patch from subsequent patches.
1595 # Subtract the delta for this failed patch from subsequent patches.
1596 delta -= patch.length2 - patch.length1
1596 delta -= patch.length2 - patch.length1
1597 else:
1597 else:
1598 # Found a match. :)
1598 # Found a match. :)
1599 results.append(True)
1599 results.append(True)
1600 delta = start_loc - expected_loc
1600 delta = start_loc - expected_loc
1601 if end_loc == -1:
1601 if end_loc == -1:
1602 text2 = text[start_loc : start_loc + len(text1)]
1602 text2 = text[start_loc : start_loc + len(text1)]
1603 else:
1603 else:
1604 text2 = text[start_loc : end_loc + self.Match_MaxBits]
1604 text2 = text[start_loc : end_loc + self.Match_MaxBits]
1605 if text1 == text2:
1605 if text1 == text2:
1606 # Perfect match, just shove the replacement text in.
1606 # Perfect match, just shove the replacement text in.
1607 text = (text[:start_loc] + self.diff_text2(patch.diffs) +
1607 text = (text[:start_loc] + self.diff_text2(patch.diffs) +
1608 text[start_loc + len(text1):])
1608 text[start_loc + len(text1):])
1609 else:
1609 else:
1610 # Imperfect match.
1610 # Imperfect match.
1611 # Run a diff to get a framework of equivalent indices.
1611 # Run a diff to get a framework of equivalent indices.
1612 diffs = self.diff_main(text1, text2, False)
1612 diffs = self.diff_main(text1, text2, False)
1613 if (len(text1) > self.Match_MaxBits and
1613 if (len(text1) > self.Match_MaxBits and
1614 self.diff_levenshtein(diffs) / float(len(text1)) >
1614 self.diff_levenshtein(diffs) / float(len(text1)) >
1615 self.Patch_DeleteThreshold):
1615 self.Patch_DeleteThreshold):
1616 # The end points match, but the content is unacceptably bad.
1616 # The end points match, but the content is unacceptably bad.
1617 results[-1] = False
1617 results[-1] = False
1618 else:
1618 else:
1619 self.diff_cleanupSemanticLossless(diffs)
1619 self.diff_cleanupSemanticLossless(diffs)
1620 index1 = 0
1620 index1 = 0
1621 for (op, data) in patch.diffs:
1621 for (op, data) in patch.diffs:
1622 if op != self.DIFF_EQUAL:
1622 if op != self.DIFF_EQUAL:
1623 index2 = self.diff_xIndex(diffs, index1)
1623 index2 = self.diff_xIndex(diffs, index1)
1624 if op == self.DIFF_INSERT: # Insertion
1624 if op == self.DIFF_INSERT: # Insertion
1625 text = text[:start_loc + index2] + data + text[start_loc +
1625 text = text[:start_loc + index2] + data + text[start_loc +
1626 index2:]
1626 index2:]
1627 elif op == self.DIFF_DELETE: # Deletion
1627 elif op == self.DIFF_DELETE: # Deletion
1628 text = text[:start_loc + index2] + text[start_loc +
1628 text = text[:start_loc + index2] + text[start_loc +
1629 self.diff_xIndex(diffs, index1 + len(data)):]
1629 self.diff_xIndex(diffs, index1 + len(data)):]
1630 if op != self.DIFF_DELETE:
1630 if op != self.DIFF_DELETE:
1631 index1 += len(data)
1631 index1 += len(data)
1632 # Strip the padding off.
1632 # Strip the padding off.
1633 text = text[len(nullPadding):-len(nullPadding)]
1633 text = text[len(nullPadding):-len(nullPadding)]
1634 return (text, results)
1634 return (text, results)
1635
1635
1636 def patch_addPadding(self, patches):
1636 def patch_addPadding(self, patches):
1637 """Add some padding on text start and end so that edges can match
1637 """Add some padding on text start and end so that edges can match
1638 something. Intended to be called only from within patch_apply.
1638 something. Intended to be called only from within patch_apply.
1639
1639
1640 Args:
1640 Args:
1641 patches: Array of Patch objects.
1641 patches: Array of Patch objects.
1642
1642
1643 Returns:
1643 Returns:
1644 The padding string added to each side.
1644 The padding string added to each side.
1645 """
1645 """
1646 paddingLength = self.Patch_Margin
1646 paddingLength = self.Patch_Margin
1647 nullPadding = ""
1647 nullPadding = ""
1648 for x in range(1, paddingLength + 1):
1648 for x in range(1, paddingLength + 1):
1649 nullPadding += chr(x)
1649 nullPadding += chr(x)
1650
1650
1651 # Bump all the patches forward.
1651 # Bump all the patches forward.
1652 for patch in patches:
1652 for patch in patches:
1653 patch.start1 += paddingLength
1653 patch.start1 += paddingLength
1654 patch.start2 += paddingLength
1654 patch.start2 += paddingLength
1655
1655
1656 # Add some padding on start of first diff.
1656 # Add some padding on start of first diff.
1657 patch = patches[0]
1657 patch = patches[0]
1658 diffs = patch.diffs
1658 diffs = patch.diffs
1659 if not diffs or diffs[0][0] != self.DIFF_EQUAL:
1659 if not diffs or diffs[0][0] != self.DIFF_EQUAL:
1660 # Add nullPadding equality.
1660 # Add nullPadding equality.
1661 diffs.insert(0, (self.DIFF_EQUAL, nullPadding))
1661 diffs.insert(0, (self.DIFF_EQUAL, nullPadding))
1662 patch.start1 -= paddingLength # Should be 0.
1662 patch.start1 -= paddingLength # Should be 0.
1663 patch.start2 -= paddingLength # Should be 0.
1663 patch.start2 -= paddingLength # Should be 0.
1664 patch.length1 += paddingLength
1664 patch.length1 += paddingLength
1665 patch.length2 += paddingLength
1665 patch.length2 += paddingLength
1666 elif paddingLength > len(diffs[0][1]):
1666 elif paddingLength > len(diffs[0][1]):
1667 # Grow first equality.
1667 # Grow first equality.
1668 extraLength = paddingLength - len(diffs[0][1])
1668 extraLength = paddingLength - len(diffs[0][1])
1669 newText = nullPadding[len(diffs[0][1]):] + diffs[0][1]
1669 newText = nullPadding[len(diffs[0][1]):] + diffs[0][1]
1670 diffs[0] = (diffs[0][0], newText)
1670 diffs[0] = (diffs[0][0], newText)
1671 patch.start1 -= extraLength
1671 patch.start1 -= extraLength
1672 patch.start2 -= extraLength
1672 patch.start2 -= extraLength
1673 patch.length1 += extraLength
1673 patch.length1 += extraLength
1674 patch.length2 += extraLength
1674 patch.length2 += extraLength
1675
1675
1676 # Add some padding on end of last diff.
1676 # Add some padding on end of last diff.
1677 patch = patches[-1]
1677 patch = patches[-1]
1678 diffs = patch.diffs
1678 diffs = patch.diffs
1679 if not diffs or diffs[-1][0] != self.DIFF_EQUAL:
1679 if not diffs or diffs[-1][0] != self.DIFF_EQUAL:
1680 # Add nullPadding equality.
1680 # Add nullPadding equality.
1681 diffs.append((self.DIFF_EQUAL, nullPadding))
1681 diffs.append((self.DIFF_EQUAL, nullPadding))
1682 patch.length1 += paddingLength
1682 patch.length1 += paddingLength
1683 patch.length2 += paddingLength
1683 patch.length2 += paddingLength
1684 elif paddingLength > len(diffs[-1][1]):
1684 elif paddingLength > len(diffs[-1][1]):
1685 # Grow last equality.
1685 # Grow last equality.
1686 extraLength = paddingLength - len(diffs[-1][1])
1686 extraLength = paddingLength - len(diffs[-1][1])
1687 newText = diffs[-1][1] + nullPadding[:extraLength]
1687 newText = diffs[-1][1] + nullPadding[:extraLength]
1688 diffs[-1] = (diffs[-1][0], newText)
1688 diffs[-1] = (diffs[-1][0], newText)
1689 patch.length1 += extraLength
1689 patch.length1 += extraLength
1690 patch.length2 += extraLength
1690 patch.length2 += extraLength
1691
1691
1692 return nullPadding
1692 return nullPadding
1693
1693
1694 def patch_splitMax(self, patches):
1694 def patch_splitMax(self, patches):
1695 """Look through the patches and break up any which are longer than the
1695 """Look through the patches and break up any which are longer than the
1696 maximum limit of the match algorithm.
1696 maximum limit of the match algorithm.
1697 Intended to be called only from within patch_apply.
1697 Intended to be called only from within patch_apply.
1698
1698
1699 Args:
1699 Args:
1700 patches: Array of Patch objects.
1700 patches: Array of Patch objects.
1701 """
1701 """
1702 patch_size = self.Match_MaxBits
1702 patch_size = self.Match_MaxBits
1703 if patch_size == 0:
1703 if patch_size == 0:
1704 # Python has the option of not splitting strings due to its ability
1704 # Python has the option of not splitting strings due to its ability
1705 # to handle integers of arbitrary precision.
1705 # to handle integers of arbitrary precision.
1706 return
1706 return
1707 for x in range(len(patches)):
1707 for x in range(len(patches)):
1708 if patches[x].length1 <= patch_size:
1708 if patches[x].length1 <= patch_size:
1709 continue
1709 continue
1710 bigpatch = patches[x]
1710 bigpatch = patches[x]
1711 # Remove the big old patch.
1711 # Remove the big old patch.
1712 del patches[x]
1712 del patches[x]
1713 x -= 1
1713 x -= 1
1714 start1 = bigpatch.start1
1714 start1 = bigpatch.start1
1715 start2 = bigpatch.start2
1715 start2 = bigpatch.start2
1716 precontext = ''
1716 precontext = ''
1717 while len(bigpatch.diffs) != 0:
1717 while len(bigpatch.diffs) != 0:
1718 # Create one of several smaller patches.
1718 # Create one of several smaller patches.
1719 patch = patch_obj()
1719 patch = patch_obj()
1720 empty = True
1720 empty = True
1721 patch.start1 = start1 - len(precontext)
1721 patch.start1 = start1 - len(precontext)
1722 patch.start2 = start2 - len(precontext)
1722 patch.start2 = start2 - len(precontext)
1723 if precontext:
1723 if precontext:
1724 patch.length1 = patch.length2 = len(precontext)
1724 patch.length1 = patch.length2 = len(precontext)
1725 patch.diffs.append((self.DIFF_EQUAL, precontext))
1725 patch.diffs.append((self.DIFF_EQUAL, precontext))
1726
1726
1727 while (len(bigpatch.diffs) != 0 and
1727 while (len(bigpatch.diffs) != 0 and
1728 patch.length1 < patch_size - self.Patch_Margin):
1728 patch.length1 < patch_size - self.Patch_Margin):
1729 (diff_type, diff_text) = bigpatch.diffs[0]
1729 (diff_type, diff_text) = bigpatch.diffs[0]
1730 if diff_type == self.DIFF_INSERT:
1730 if diff_type == self.DIFF_INSERT:
1731 # Insertions are harmless.
1731 # Insertions are harmless.
1732 patch.length2 += len(diff_text)
1732 patch.length2 += len(diff_text)
1733 start2 += len(diff_text)
1733 start2 += len(diff_text)
1734 patch.diffs.append(bigpatch.diffs.pop(0))
1734 patch.diffs.append(bigpatch.diffs.pop(0))
1735 empty = False
1735 empty = False
1736 elif (diff_type == self.DIFF_DELETE and len(patch.diffs) == 1 and
1736 elif (diff_type == self.DIFF_DELETE and len(patch.diffs) == 1 and
1737 patch.diffs[0][0] == self.DIFF_EQUAL and
1737 patch.diffs[0][0] == self.DIFF_EQUAL and
1738 len(diff_text) > 2 * patch_size):
1738 len(diff_text) > 2 * patch_size):
1739 # This is a large deletion. Let it pass in one chunk.
1739 # This is a large deletion. Let it pass in one chunk.
1740 patch.length1 += len(diff_text)
1740 patch.length1 += len(diff_text)
1741 start1 += len(diff_text)
1741 start1 += len(diff_text)
1742 empty = False
1742 empty = False
1743 patch.diffs.append((diff_type, diff_text))
1743 patch.diffs.append((diff_type, diff_text))
1744 del bigpatch.diffs[0]
1744 del bigpatch.diffs[0]
1745 else:
1745 else:
1746 # Deletion or equality. Only take as much as we can stomach.
1746 # Deletion or equality. Only take as much as we can stomach.
1747 diff_text = diff_text[:patch_size - patch.length1 -
1747 diff_text = diff_text[:patch_size - patch.length1 -
1748 self.Patch_Margin]
1748 self.Patch_Margin]
1749 patch.length1 += len(diff_text)
1749 patch.length1 += len(diff_text)
1750 start1 += len(diff_text)
1750 start1 += len(diff_text)
1751 if diff_type == self.DIFF_EQUAL:
1751 if diff_type == self.DIFF_EQUAL:
1752 patch.length2 += len(diff_text)
1752 patch.length2 += len(diff_text)
1753 start2 += len(diff_text)
1753 start2 += len(diff_text)
1754 else:
1754 else:
1755 empty = False
1755 empty = False
1756
1756
1757 patch.diffs.append((diff_type, diff_text))
1757 patch.diffs.append((diff_type, diff_text))
1758 if diff_text == bigpatch.diffs[0][1]:
1758 if diff_text == bigpatch.diffs[0][1]:
1759 del bigpatch.diffs[0]
1759 del bigpatch.diffs[0]
1760 else:
1760 else:
1761 bigpatch.diffs[0] = (bigpatch.diffs[0][0],
1761 bigpatch.diffs[0] = (bigpatch.diffs[0][0],
1762 bigpatch.diffs[0][1][len(diff_text):])
1762 bigpatch.diffs[0][1][len(diff_text):])
1763
1763
1764 # Compute the head context for the next patch.
1764 # Compute the head context for the next patch.
1765 precontext = self.diff_text2(patch.diffs)
1765 precontext = self.diff_text2(patch.diffs)
1766 precontext = precontext[-self.Patch_Margin:]
1766 precontext = precontext[-self.Patch_Margin:]
1767 # Append the end context for this patch.
1767 # Append the end context for this patch.
1768 postcontext = self.diff_text1(bigpatch.diffs)[:self.Patch_Margin]
1768 postcontext = self.diff_text1(bigpatch.diffs)[:self.Patch_Margin]
1769 if postcontext:
1769 if postcontext:
1770 patch.length1 += len(postcontext)
1770 patch.length1 += len(postcontext)
1771 patch.length2 += len(postcontext)
1771 patch.length2 += len(postcontext)
1772 if len(patch.diffs) != 0 and patch.diffs[-1][0] == self.DIFF_EQUAL:
1772 if len(patch.diffs) != 0 and patch.diffs[-1][0] == self.DIFF_EQUAL:
1773 patch.diffs[-1] = (self.DIFF_EQUAL, patch.diffs[-1][1] +
1773 patch.diffs[-1] = (self.DIFF_EQUAL, patch.diffs[-1][1] +
1774 postcontext)
1774 postcontext)
1775 else:
1775 else:
1776 patch.diffs.append((self.DIFF_EQUAL, postcontext))
1776 patch.diffs.append((self.DIFF_EQUAL, postcontext))
1777
1777
1778 if not empty:
1778 if not empty:
1779 x += 1
1779 x += 1
1780 patches.insert(x, patch)
1780 patches.insert(x, patch)
1781
1781
1782 def patch_toText(self, patches):
1782 def patch_toText(self, patches):
1783 """Take a list of patches and return a textual representation.
1783 """Take a list of patches and return a textual representation.
1784
1784
1785 Args:
1785 Args:
1786 patches: Array of Patch objects.
1786 patches: Array of Patch objects.
1787
1787
1788 Returns:
1788 Returns:
1789 Text representation of patches.
1789 Text representation of patches.
1790 """
1790 """
1791 text = []
1791 text = []
1792 for patch in patches:
1792 for patch in patches:
1793 text.append(str(patch))
1793 text.append(str(patch))
1794 return "".join(text)
1794 return "".join(text)
1795
1795
1796 def patch_fromText(self, textline):
1796 def patch_fromText(self, textline):
1797 """Parse a textual representation of patches and return a list of patch
1797 """Parse a textual representation of patches and return a list of patch
1798 objects.
1798 objects.
1799
1799
1800 Args:
1800 Args:
1801 textline: Text representation of patches.
1801 textline: Text representation of patches.
1802
1802
1803 Returns:
1803 Returns:
1804 Array of Patch objects.
1804 Array of Patch objects.
1805
1805
1806 Raises:
1806 Raises:
1807 ValueError: If invalid input.
1807 ValueError: If invalid input.
1808 """
1808 """
1809 if type(textline) == unicode:
1809 if type(textline) == unicode:
1810 # Patches should be composed of a subset of ascii chars, Unicode not
1810 # Patches should be composed of a subset of ascii chars, Unicode not
1811 # required. If this encode raises UnicodeEncodeError, patch is invalid.
1811 # required. If this encode raises UnicodeEncodeError, patch is invalid.
1812 textline = textline.encode("ascii")
1812 textline = textline.encode("ascii")
1813 patches = []
1813 patches = []
1814 if not textline:
1814 if not textline:
1815 return patches
1815 return patches
1816 text = textline.split('\n')
1816 text = textline.split('\n')
1817 while len(text) != 0:
1817 while len(text) != 0:
1818 m = re.match("^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$", text[0])
1818 m = re.match("^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$", text[0])
1819 if not m:
1819 if not m:
1820 raise ValueError("Invalid patch string: " + text[0])
1820 raise ValueError("Invalid patch string: " + text[0])
1821 patch = patch_obj()
1821 patch = patch_obj()
1822 patches.append(patch)
1822 patches.append(patch)
1823 patch.start1 = int(m.group(1))
1823 patch.start1 = int(m.group(1))
1824 if m.group(2) == '':
1824 if m.group(2) == '':
1825 patch.start1 -= 1
1825 patch.start1 -= 1
1826 patch.length1 = 1
1826 patch.length1 = 1
1827 elif m.group(2) == '0':
1827 elif m.group(2) == '0':
1828 patch.length1 = 0
1828 patch.length1 = 0
1829 else:
1829 else:
1830 patch.start1 -= 1
1830 patch.start1 -= 1
1831 patch.length1 = int(m.group(2))
1831 patch.length1 = int(m.group(2))
1832
1832
1833 patch.start2 = int(m.group(3))
1833 patch.start2 = int(m.group(3))
1834 if m.group(4) == '':
1834 if m.group(4) == '':
1835 patch.start2 -= 1
1835 patch.start2 -= 1
1836 patch.length2 = 1
1836 patch.length2 = 1
1837 elif m.group(4) == '0':
1837 elif m.group(4) == '0':
1838 patch.length2 = 0
1838 patch.length2 = 0
1839 else:
1839 else:
1840 patch.start2 -= 1
1840 patch.start2 -= 1
1841 patch.length2 = int(m.group(4))
1841 patch.length2 = int(m.group(4))
1842
1842
1843 del text[0]
1843 del text[0]
1844
1844
1845 while len(text) != 0:
1845 while len(text) != 0:
1846 if text[0]:
1846 if text[0]:
1847 sign = text[0][0]
1847 sign = text[0][0]
1848 else:
1848 else:
1849 sign = ''
1849 sign = ''
1850 line = urllib.unquote(text[0][1:])
1850 line = urllib.unquote(text[0][1:])
1851 line = line.decode("utf-8")
1851 line = line.decode("utf-8")
1852 if sign == '+':
1852 if sign == '+':
1853 # Insertion.
1853 # Insertion.
1854 patch.diffs.append((self.DIFF_INSERT, line))
1854 patch.diffs.append((self.DIFF_INSERT, line))
1855 elif sign == '-':
1855 elif sign == '-':
1856 # Deletion.
1856 # Deletion.
1857 patch.diffs.append((self.DIFF_DELETE, line))
1857 patch.diffs.append((self.DIFF_DELETE, line))
1858 elif sign == ' ':
1858 elif sign == ' ':
1859 # Minor equality.
1859 # Minor equality.
1860 patch.diffs.append((self.DIFF_EQUAL, line))
1860 patch.diffs.append((self.DIFF_EQUAL, line))
1861 elif sign == '@':
1861 elif sign == '@':
1862 # Start of next patch.
1862 # Start of next patch.
1863 break
1863 break
1864 elif sign == '':
1864 elif sign == '':
1865 # Blank line? Whatever.
1865 # Blank line? Whatever.
1866 pass
1866 pass
1867 else:
1867 else:
1868 # WTF?
1868 # WTF?
1869 raise ValueError("Invalid patch mode: '%s'\n%s" % (sign, line))
1869 raise ValueError("Invalid patch mode: '%s'\n%s" % (sign, line))
1870 del text[0]
1870 del text[0]
1871 return patches
1871 return patches
1872
1872
1873
1873
1874 class patch_obj:
1874 class patch_obj:
1875 """Class representing one patch operation.
1875 """Class representing one patch operation.
1876 """
1876 """
1877
1877
1878 def __init__(self):
1878 def __init__(self):
1879 """Initializes with an empty list of diffs.
1879 """Initializes with an empty list of diffs.
1880 """
1880 """
1881 self.diffs = []
1881 self.diffs = []
1882 self.start1 = None
1882 self.start1 = None
1883 self.start2 = None
1883 self.start2 = None
1884 self.length1 = 0
1884 self.length1 = 0
1885 self.length2 = 0
1885 self.length2 = 0
1886
1886
1887 def __str__(self):
1887 def __str__(self):
1888 """Emmulate GNU diff's format.
1888 """Emmulate GNU diff's format.
1889 Header: @@ -382,8 +481,9 @@
1889 Header: @@ -382,8 +481,9 @@
1890 Indicies are printed as 1-based, not 0-based.
1890 Indicies are printed as 1-based, not 0-based.
1891
1891
1892 Returns:
1892 Returns:
1893 The GNU diff string.
1893 The GNU diff string.
1894 """
1894 """
1895 if self.length1 == 0:
1895 if self.length1 == 0:
1896 coords1 = str(self.start1) + ",0"
1896 coords1 = str(self.start1) + ",0"
1897 elif self.length1 == 1:
1897 elif self.length1 == 1:
1898 coords1 = str(self.start1 + 1)
1898 coords1 = str(self.start1 + 1)
1899 else:
1899 else:
1900 coords1 = str(self.start1 + 1) + "," + str(self.length1)
1900 coords1 = str(self.start1 + 1) + "," + str(self.length1)
1901 if self.length2 == 0:
1901 if self.length2 == 0:
1902 coords2 = str(self.start2) + ",0"
1902 coords2 = str(self.start2) + ",0"
1903 elif self.length2 == 1:
1903 elif self.length2 == 1:
1904 coords2 = str(self.start2 + 1)
1904 coords2 = str(self.start2 + 1)
1905 else:
1905 else:
1906 coords2 = str(self.start2 + 1) + "," + str(self.length2)
1906 coords2 = str(self.start2 + 1) + "," + str(self.length2)
1907 text = ["@@ -", coords1, " +", coords2, " @@\n"]
1907 text = ["@@ -", coords1, " +", coords2, " @@\n"]
1908 # Escape the body of the patch with %xx notation.
1908 # Escape the body of the patch with %xx notation.
1909 for (op, data) in self.diffs:
1909 for (op, data) in self.diffs:
1910 if op == diff_match_patch.DIFF_INSERT:
1910 if op == diff_match_patch.DIFF_INSERT:
1911 text.append("+")
1911 text.append("+")
1912 elif op == diff_match_patch.DIFF_DELETE:
1912 elif op == diff_match_patch.DIFF_DELETE:
1913 text.append("-")
1913 text.append("-")
1914 elif op == diff_match_patch.DIFF_EQUAL:
1914 elif op == diff_match_patch.DIFF_EQUAL:
1915 text.append(" ")
1915 text.append(" ")
1916 # High ascii will raise UnicodeDecodeError. Use Unicode instead.
1916 # High ascii will raise UnicodeDecodeError. Use Unicode instead.
1917 data = data.encode("utf-8")
1917 data = data.encode("utf-8")
1918 text.append(urllib.quote(data, "!~*'();/?:@&=+$,# ") + "\n")
1918 text.append(urllib.quote(data, "!~*'();/?:@&=+$,# ") + "\n")
1919 return "".join(text) No newline at end of file
1919 return "".join(text)
@@ -1,446 +1,446 b''
1 # Copyright (c) Django Software Foundation and individual contributors.
1 # Copyright (c) Django Software Foundation and individual contributors.
2 # All rights reserved.
2 # All rights reserved.
3 #
3 #
4 # Redistribution and use in source and binary forms, with or without modification,
4 # Redistribution and use in source and binary forms, with or without modification,
5 # are permitted provided that the following conditions are met:
5 # are permitted provided that the following conditions are met:
6 #
6 #
7 # 1. Redistributions of source code must retain the above copyright notice,
7 # 1. Redistributions of source code must retain the above copyright notice,
8 # this list of conditions and the following disclaimer.
8 # this list of conditions and the following disclaimer.
9 #
9 #
10 # 2. Redistributions in binary form must reproduce the above copyright
10 # 2. Redistributions in binary form must reproduce the above copyright
11 # notice, this list of conditions and the following disclaimer in the
11 # notice, this list of conditions and the following disclaimer in the
12 # documentation and/or other materials provided with the distribution.
12 # documentation and/or other materials provided with the distribution.
13 #
13 #
14 # 3. Neither the name of Django nor the names of its contributors may be used
14 # 3. Neither the name of Django nor the names of its contributors may be used
15 # to endorse or promote products derived from this software without
15 # to endorse or promote products derived from this software without
16 # specific prior written permission.
16 # specific prior written permission.
17 #
17 #
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
18 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
19 # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
21 # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
22 # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
24 # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
25 # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
27 # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
28
29 """
29 """
30 For definitions of the different versions of RSS, see:
30 For definitions of the different versions of RSS, see:
31 http://web.archive.org/web/20110718035220/http://diveintomark.org/archives/2004/02/04/incompatible-rss
31 http://web.archive.org/web/20110718035220/http://diveintomark.org/archives/2004/02/04/incompatible-rss
32 """
32 """
33 from __future__ import unicode_literals
33
34
34
35 import datetime
35 import datetime
36 from StringIO import StringIO
36 from StringIO import StringIO
37
37
38 import pytz
38 import pytz
39 from six.moves.urllib import parse as urlparse
39 from six.moves.urllib import parse as urlparse
40
40
41 from rhodecode.lib.feedgenerator import datetime_safe
41 from rhodecode.lib.feedgenerator import datetime_safe
42 from rhodecode.lib.feedgenerator.utils import SimplerXMLGenerator, iri_to_uri, force_text
42 from rhodecode.lib.feedgenerator.utils import SimplerXMLGenerator, iri_to_uri, force_text
43
43
44
44
45 #### The following code comes from ``django.utils.feedgenerator`` ####
45 #### The following code comes from ``django.utils.feedgenerator`` ####
46
46
47
47
48 def rfc2822_date(date):
48 def rfc2822_date(date):
49 # We can't use strftime() because it produces locale-dependent results, so
49 # We can't use strftime() because it produces locale-dependent results, so
50 # we have to map english month and day names manually
50 # we have to map english month and day names manually
51 months = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec',)
51 months = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec',)
52 days = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
52 days = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
53 # Support datetime objects older than 1900
53 # Support datetime objects older than 1900
54 date = datetime_safe.new_datetime(date)
54 date = datetime_safe.new_datetime(date)
55 # We do this ourselves to be timezone aware, email.Utils is not tz aware.
55 # We do this ourselves to be timezone aware, email.Utils is not tz aware.
56 dow = days[date.weekday()]
56 dow = days[date.weekday()]
57 month = months[date.month - 1]
57 month = months[date.month - 1]
58 time_str = date.strftime('%s, %%d %s %%Y %%H:%%M:%%S ' % (dow, month))
58 time_str = date.strftime('%s, %%d %s %%Y %%H:%%M:%%S ' % (dow, month))
59
59
60 time_str = time_str.decode('utf-8')
60 time_str = time_str.decode('utf-8')
61 offset = date.utcoffset()
61 offset = date.utcoffset()
62 # Historically, this function assumes that naive datetimes are in UTC.
62 # Historically, this function assumes that naive datetimes are in UTC.
63 if offset is None:
63 if offset is None:
64 return time_str + '-0000'
64 return time_str + '-0000'
65 else:
65 else:
66 timezone = (offset.days * 24 * 60) + (offset.seconds // 60)
66 timezone = (offset.days * 24 * 60) + (offset.seconds // 60)
67 hour, minute = divmod(timezone, 60)
67 hour, minute = divmod(timezone, 60)
68 return time_str + '%+03d%02d' % (hour, minute)
68 return time_str + '%+03d%02d' % (hour, minute)
69
69
70
70
71 def rfc3339_date(date):
71 def rfc3339_date(date):
72 # Support datetime objects older than 1900
72 # Support datetime objects older than 1900
73 date = datetime_safe.new_datetime(date)
73 date = datetime_safe.new_datetime(date)
74 time_str = date.strftime('%Y-%m-%dT%H:%M:%S')
74 time_str = date.strftime('%Y-%m-%dT%H:%M:%S')
75
75
76 time_str = time_str.decode('utf-8')
76 time_str = time_str.decode('utf-8')
77 offset = date.utcoffset()
77 offset = date.utcoffset()
78 # Historically, this function assumes that naive datetimes are in UTC.
78 # Historically, this function assumes that naive datetimes are in UTC.
79 if offset is None:
79 if offset is None:
80 return time_str + 'Z'
80 return time_str + 'Z'
81 else:
81 else:
82 timezone = (offset.days * 24 * 60) + (offset.seconds // 60)
82 timezone = (offset.days * 24 * 60) + (offset.seconds // 60)
83 hour, minute = divmod(timezone, 60)
83 hour, minute = divmod(timezone, 60)
84 return time_str + '%+03d:%02d' % (hour, minute)
84 return time_str + '%+03d:%02d' % (hour, minute)
85
85
86
86
87 def get_tag_uri(url, date):
87 def get_tag_uri(url, date):
88 """
88 """
89 Creates a TagURI.
89 Creates a TagURI.
90
90
91 See http://web.archive.org/web/20110514113830/http://diveintomark.org/archives/2004/05/28/howto-atom-id
91 See http://web.archive.org/web/20110514113830/http://diveintomark.org/archives/2004/05/28/howto-atom-id
92 """
92 """
93 bits = urlparse(url)
93 bits = urlparse(url)
94 d = ''
94 d = ''
95 if date is not None:
95 if date is not None:
96 d = ',%s' % datetime_safe.new_datetime(date).strftime('%Y-%m-%d')
96 d = ',%s' % datetime_safe.new_datetime(date).strftime('%Y-%m-%d')
97 return 'tag:%s%s:%s/%s' % (bits.hostname, d, bits.path, bits.fragment)
97 return 'tag:%s%s:%s/%s' % (bits.hostname, d, bits.path, bits.fragment)
98
98
99
99
100 class SyndicationFeed(object):
100 class SyndicationFeed(object):
101 """Base class for all syndication feeds. Subclasses should provide write()"""
101 """Base class for all syndication feeds. Subclasses should provide write()"""
102
102
103 def __init__(self, title, link, description, language=None, author_email=None,
103 def __init__(self, title, link, description, language=None, author_email=None,
104 author_name=None, author_link=None, subtitle=None, categories=None,
104 author_name=None, author_link=None, subtitle=None, categories=None,
105 feed_url=None, feed_copyright=None, feed_guid=None, ttl=None, **kwargs):
105 feed_url=None, feed_copyright=None, feed_guid=None, ttl=None, **kwargs):
106 def to_unicode(s):
106 def to_unicode(s):
107 return force_text(s, strings_only=True)
107 return force_text(s, strings_only=True)
108 if categories:
108 if categories:
109 categories = [force_text(c) for c in categories]
109 categories = [force_text(c) for c in categories]
110 if ttl is not None:
110 if ttl is not None:
111 # Force ints to unicode
111 # Force ints to unicode
112 ttl = force_text(ttl)
112 ttl = force_text(ttl)
113 self.feed = {
113 self.feed = {
114 'title': to_unicode(title),
114 'title': to_unicode(title),
115 'link': iri_to_uri(link),
115 'link': iri_to_uri(link),
116 'description': to_unicode(description),
116 'description': to_unicode(description),
117 'language': to_unicode(language),
117 'language': to_unicode(language),
118 'author_email': to_unicode(author_email),
118 'author_email': to_unicode(author_email),
119 'author_name': to_unicode(author_name),
119 'author_name': to_unicode(author_name),
120 'author_link': iri_to_uri(author_link),
120 'author_link': iri_to_uri(author_link),
121 'subtitle': to_unicode(subtitle),
121 'subtitle': to_unicode(subtitle),
122 'categories': categories or (),
122 'categories': categories or (),
123 'feed_url': iri_to_uri(feed_url),
123 'feed_url': iri_to_uri(feed_url),
124 'feed_copyright': to_unicode(feed_copyright),
124 'feed_copyright': to_unicode(feed_copyright),
125 'id': feed_guid or link,
125 'id': feed_guid or link,
126 'ttl': ttl,
126 'ttl': ttl,
127 }
127 }
128 self.feed.update(kwargs)
128 self.feed.update(kwargs)
129 self.items = []
129 self.items = []
130
130
131 def add_item(self, title, link, description, author_email=None,
131 def add_item(self, title, link, description, author_email=None,
132 author_name=None, author_link=None, pubdate=None, comments=None,
132 author_name=None, author_link=None, pubdate=None, comments=None,
133 unique_id=None, unique_id_is_permalink=None, enclosure=None,
133 unique_id=None, unique_id_is_permalink=None, enclosure=None,
134 categories=(), item_copyright=None, ttl=None, updateddate=None,
134 categories=(), item_copyright=None, ttl=None, updateddate=None,
135 enclosures=None, **kwargs):
135 enclosures=None, **kwargs):
136 """
136 """
137 Adds an item to the feed. All args are expected to be Python Unicode
137 Adds an item to the feed. All args are expected to be Python Unicode
138 objects except pubdate and updateddate, which are datetime.datetime
138 objects except pubdate and updateddate, which are datetime.datetime
139 objects, and enclosures, which is an iterable of instances of the
139 objects, and enclosures, which is an iterable of instances of the
140 Enclosure class.
140 Enclosure class.
141 """
141 """
142 def to_unicode(s):
142 def to_unicode(s):
143 return force_text(s, strings_only=True)
143 return force_text(s, strings_only=True)
144 if categories:
144 if categories:
145 categories = [to_unicode(c) for c in categories]
145 categories = [to_unicode(c) for c in categories]
146 if ttl is not None:
146 if ttl is not None:
147 # Force ints to unicode
147 # Force ints to unicode
148 ttl = force_text(ttl)
148 ttl = force_text(ttl)
149 if enclosure is None:
149 if enclosure is None:
150 enclosures = [] if enclosures is None else enclosures
150 enclosures = [] if enclosures is None else enclosures
151
151
152 item = {
152 item = {
153 'title': to_unicode(title),
153 'title': to_unicode(title),
154 'link': iri_to_uri(link),
154 'link': iri_to_uri(link),
155 'description': to_unicode(description),
155 'description': to_unicode(description),
156 'author_email': to_unicode(author_email),
156 'author_email': to_unicode(author_email),
157 'author_name': to_unicode(author_name),
157 'author_name': to_unicode(author_name),
158 'author_link': iri_to_uri(author_link),
158 'author_link': iri_to_uri(author_link),
159 'pubdate': pubdate,
159 'pubdate': pubdate,
160 'updateddate': updateddate,
160 'updateddate': updateddate,
161 'comments': to_unicode(comments),
161 'comments': to_unicode(comments),
162 'unique_id': to_unicode(unique_id),
162 'unique_id': to_unicode(unique_id),
163 'unique_id_is_permalink': unique_id_is_permalink,
163 'unique_id_is_permalink': unique_id_is_permalink,
164 'enclosures': enclosures,
164 'enclosures': enclosures,
165 'categories': categories or (),
165 'categories': categories or (),
166 'item_copyright': to_unicode(item_copyright),
166 'item_copyright': to_unicode(item_copyright),
167 'ttl': ttl,
167 'ttl': ttl,
168 }
168 }
169 item.update(kwargs)
169 item.update(kwargs)
170 self.items.append(item)
170 self.items.append(item)
171
171
172 def num_items(self):
172 def num_items(self):
173 return len(self.items)
173 return len(self.items)
174
174
175 def root_attributes(self):
175 def root_attributes(self):
176 """
176 """
177 Return extra attributes to place on the root (i.e. feed/channel) element.
177 Return extra attributes to place on the root (i.e. feed/channel) element.
178 Called from write().
178 Called from write().
179 """
179 """
180 return {}
180 return {}
181
181
182 def add_root_elements(self, handler):
182 def add_root_elements(self, handler):
183 """
183 """
184 Add elements in the root (i.e. feed/channel) element. Called
184 Add elements in the root (i.e. feed/channel) element. Called
185 from write().
185 from write().
186 """
186 """
187 pass
187 pass
188
188
189 def item_attributes(self, item):
189 def item_attributes(self, item):
190 """
190 """
191 Return extra attributes to place on each item (i.e. item/entry) element.
191 Return extra attributes to place on each item (i.e. item/entry) element.
192 """
192 """
193 return {}
193 return {}
194
194
195 def add_item_elements(self, handler, item):
195 def add_item_elements(self, handler, item):
196 """
196 """
197 Add elements on each item (i.e. item/entry) element.
197 Add elements on each item (i.e. item/entry) element.
198 """
198 """
199 pass
199 pass
200
200
201 def write(self, outfile, encoding):
201 def write(self, outfile, encoding):
202 """
202 """
203 Outputs the feed in the given encoding to outfile, which is a file-like
203 Outputs the feed in the given encoding to outfile, which is a file-like
204 object. Subclasses should override this.
204 object. Subclasses should override this.
205 """
205 """
206 raise NotImplementedError('subclasses of SyndicationFeed must provide a write() method')
206 raise NotImplementedError('subclasses of SyndicationFeed must provide a write() method')
207
207
208 def writeString(self, encoding):
208 def writeString(self, encoding):
209 """
209 """
210 Returns the feed in the given encoding as a string.
210 Returns the feed in the given encoding as a string.
211 """
211 """
212 s = StringIO()
212 s = StringIO()
213 self.write(s, encoding)
213 self.write(s, encoding)
214 return s.getvalue()
214 return s.getvalue()
215
215
216 def latest_post_date(self):
216 def latest_post_date(self):
217 """
217 """
218 Returns the latest item's pubdate or updateddate. If no items
218 Returns the latest item's pubdate or updateddate. If no items
219 have either of these attributes this returns the current UTC date/time.
219 have either of these attributes this returns the current UTC date/time.
220 """
220 """
221 latest_date = None
221 latest_date = None
222 date_keys = ('updateddate', 'pubdate')
222 date_keys = ('updateddate', 'pubdate')
223
223
224 for item in self.items:
224 for item in self.items:
225 for date_key in date_keys:
225 for date_key in date_keys:
226 item_date = item.get(date_key)
226 item_date = item.get(date_key)
227 if item_date:
227 if item_date:
228 if latest_date is None or item_date > latest_date:
228 if latest_date is None or item_date > latest_date:
229 latest_date = item_date
229 latest_date = item_date
230
230
231 # datetime.now(tz=utc) is slower, as documented in django.utils.timezone.now
231 # datetime.now(tz=utc) is slower, as documented in django.utils.timezone.now
232 return latest_date or datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
232 return latest_date or datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
233
233
234
234
235 class Enclosure(object):
235 class Enclosure(object):
236 """Represents an RSS enclosure"""
236 """Represents an RSS enclosure"""
237 def __init__(self, url, length, mime_type):
237 def __init__(self, url, length, mime_type):
238 """All args are expected to be Python Unicode objects"""
238 """All args are expected to be Python Unicode objects"""
239 self.length, self.mime_type = length, mime_type
239 self.length, self.mime_type = length, mime_type
240 self.url = iri_to_uri(url)
240 self.url = iri_to_uri(url)
241
241
242
242
243 class RssFeed(SyndicationFeed):
243 class RssFeed(SyndicationFeed):
244 content_type = 'application/rss+xml; charset=utf-8'
244 content_type = 'application/rss+xml; charset=utf-8'
245
245
246 def write(self, outfile, encoding):
246 def write(self, outfile, encoding):
247 handler = SimplerXMLGenerator(outfile, encoding)
247 handler = SimplerXMLGenerator(outfile, encoding)
248 handler.startDocument()
248 handler.startDocument()
249 handler.startElement("rss", self.rss_attributes())
249 handler.startElement("rss", self.rss_attributes())
250 handler.startElement("channel", self.root_attributes())
250 handler.startElement("channel", self.root_attributes())
251 self.add_root_elements(handler)
251 self.add_root_elements(handler)
252 self.write_items(handler)
252 self.write_items(handler)
253 self.endChannelElement(handler)
253 self.endChannelElement(handler)
254 handler.endElement("rss")
254 handler.endElement("rss")
255
255
256 def rss_attributes(self):
256 def rss_attributes(self):
257 return {"version": self._version,
257 return {"version": self._version,
258 "xmlns:atom": "http://www.w3.org/2005/Atom"}
258 "xmlns:atom": "http://www.w3.org/2005/Atom"}
259
259
260 def write_items(self, handler):
260 def write_items(self, handler):
261 for item in self.items:
261 for item in self.items:
262 handler.startElement('item', self.item_attributes(item))
262 handler.startElement('item', self.item_attributes(item))
263 self.add_item_elements(handler, item)
263 self.add_item_elements(handler, item)
264 handler.endElement("item")
264 handler.endElement("item")
265
265
266 def add_root_elements(self, handler):
266 def add_root_elements(self, handler):
267 handler.addQuickElement("title", self.feed['title'])
267 handler.addQuickElement("title", self.feed['title'])
268 handler.addQuickElement("link", self.feed['link'])
268 handler.addQuickElement("link", self.feed['link'])
269 handler.addQuickElement("description", self.feed['description'])
269 handler.addQuickElement("description", self.feed['description'])
270 if self.feed['feed_url'] is not None:
270 if self.feed['feed_url'] is not None:
271 handler.addQuickElement("atom:link", None, {"rel": "self", "href": self.feed['feed_url']})
271 handler.addQuickElement("atom:link", None, {"rel": "self", "href": self.feed['feed_url']})
272 if self.feed['language'] is not None:
272 if self.feed['language'] is not None:
273 handler.addQuickElement("language", self.feed['language'])
273 handler.addQuickElement("language", self.feed['language'])
274 for cat in self.feed['categories']:
274 for cat in self.feed['categories']:
275 handler.addQuickElement("category", cat)
275 handler.addQuickElement("category", cat)
276 if self.feed['feed_copyright'] is not None:
276 if self.feed['feed_copyright'] is not None:
277 handler.addQuickElement("copyright", self.feed['feed_copyright'])
277 handler.addQuickElement("copyright", self.feed['feed_copyright'])
278 handler.addQuickElement("lastBuildDate", rfc2822_date(self.latest_post_date()))
278 handler.addQuickElement("lastBuildDate", rfc2822_date(self.latest_post_date()))
279 if self.feed['ttl'] is not None:
279 if self.feed['ttl'] is not None:
280 handler.addQuickElement("ttl", self.feed['ttl'])
280 handler.addQuickElement("ttl", self.feed['ttl'])
281
281
282 def endChannelElement(self, handler):
282 def endChannelElement(self, handler):
283 handler.endElement("channel")
283 handler.endElement("channel")
284
284
285
285
286 class RssUserland091Feed(RssFeed):
286 class RssUserland091Feed(RssFeed):
287 _version = "0.91"
287 _version = "0.91"
288
288
289 def add_item_elements(self, handler, item):
289 def add_item_elements(self, handler, item):
290 handler.addQuickElement("title", item['title'])
290 handler.addQuickElement("title", item['title'])
291 handler.addQuickElement("link", item['link'])
291 handler.addQuickElement("link", item['link'])
292 if item['description'] is not None:
292 if item['description'] is not None:
293 handler.addQuickElement("description", item['description'])
293 handler.addQuickElement("description", item['description'])
294
294
295
295
296 class Rss201rev2Feed(RssFeed):
296 class Rss201rev2Feed(RssFeed):
297 # Spec: http://blogs.law.harvard.edu/tech/rss
297 # Spec: http://blogs.law.harvard.edu/tech/rss
298 _version = "2.0"
298 _version = "2.0"
299
299
300 def add_item_elements(self, handler, item):
300 def add_item_elements(self, handler, item):
301 handler.addQuickElement("title", item['title'])
301 handler.addQuickElement("title", item['title'])
302 handler.addQuickElement("link", item['link'])
302 handler.addQuickElement("link", item['link'])
303 if item['description'] is not None:
303 if item['description'] is not None:
304 handler.addQuickElement("description", item['description'])
304 handler.addQuickElement("description", item['description'])
305
305
306 # Author information.
306 # Author information.
307 if item["author_name"] and item["author_email"]:
307 if item["author_name"] and item["author_email"]:
308 handler.addQuickElement("author", "%s (%s)" % (item['author_email'], item['author_name']))
308 handler.addQuickElement("author", "%s (%s)" % (item['author_email'], item['author_name']))
309 elif item["author_email"]:
309 elif item["author_email"]:
310 handler.addQuickElement("author", item["author_email"])
310 handler.addQuickElement("author", item["author_email"])
311 elif item["author_name"]:
311 elif item["author_name"]:
312 handler.addQuickElement(
312 handler.addQuickElement(
313 "dc:creator", item["author_name"], {"xmlns:dc": "http://purl.org/dc/elements/1.1/"}
313 "dc:creator", item["author_name"], {"xmlns:dc": "http://purl.org/dc/elements/1.1/"}
314 )
314 )
315
315
316 if item['pubdate'] is not None:
316 if item['pubdate'] is not None:
317 handler.addQuickElement("pubDate", rfc2822_date(item['pubdate']))
317 handler.addQuickElement("pubDate", rfc2822_date(item['pubdate']))
318 if item['comments'] is not None:
318 if item['comments'] is not None:
319 handler.addQuickElement("comments", item['comments'])
319 handler.addQuickElement("comments", item['comments'])
320 if item['unique_id'] is not None:
320 if item['unique_id'] is not None:
321 guid_attrs = {}
321 guid_attrs = {}
322 if isinstance(item.get('unique_id_is_permalink'), bool):
322 if isinstance(item.get('unique_id_is_permalink'), bool):
323 guid_attrs['isPermaLink'] = str(item['unique_id_is_permalink']).lower()
323 guid_attrs['isPermaLink'] = str(item['unique_id_is_permalink']).lower()
324 handler.addQuickElement("guid", item['unique_id'], guid_attrs)
324 handler.addQuickElement("guid", item['unique_id'], guid_attrs)
325 if item['ttl'] is not None:
325 if item['ttl'] is not None:
326 handler.addQuickElement("ttl", item['ttl'])
326 handler.addQuickElement("ttl", item['ttl'])
327
327
328 # Enclosure.
328 # Enclosure.
329 if item['enclosures']:
329 if item['enclosures']:
330 enclosures = list(item['enclosures'])
330 enclosures = list(item['enclosures'])
331 if len(enclosures) > 1:
331 if len(enclosures) > 1:
332 raise ValueError(
332 raise ValueError(
333 "RSS feed items may only have one enclosure, see "
333 "RSS feed items may only have one enclosure, see "
334 "http://www.rssboard.org/rss-profile#element-channel-item-enclosure"
334 "http://www.rssboard.org/rss-profile#element-channel-item-enclosure"
335 )
335 )
336 enclosure = enclosures[0]
336 enclosure = enclosures[0]
337 handler.addQuickElement('enclosure', '', {
337 handler.addQuickElement('enclosure', '', {
338 'url': enclosure.url,
338 'url': enclosure.url,
339 'length': enclosure.length,
339 'length': enclosure.length,
340 'type': enclosure.mime_type,
340 'type': enclosure.mime_type,
341 })
341 })
342
342
343 # Categories.
343 # Categories.
344 for cat in item['categories']:
344 for cat in item['categories']:
345 handler.addQuickElement("category", cat)
345 handler.addQuickElement("category", cat)
346
346
347
347
348 class Atom1Feed(SyndicationFeed):
348 class Atom1Feed(SyndicationFeed):
349 # Spec: https://tools.ietf.org/html/rfc4287
349 # Spec: https://tools.ietf.org/html/rfc4287
350 content_type = 'application/atom+xml; charset=utf-8'
350 content_type = 'application/atom+xml; charset=utf-8'
351 ns = "http://www.w3.org/2005/Atom"
351 ns = "http://www.w3.org/2005/Atom"
352
352
353 def write(self, outfile, encoding):
353 def write(self, outfile, encoding):
354 handler = SimplerXMLGenerator(outfile, encoding)
354 handler = SimplerXMLGenerator(outfile, encoding)
355 handler.startDocument()
355 handler.startDocument()
356 handler.startElement('feed', self.root_attributes())
356 handler.startElement('feed', self.root_attributes())
357 self.add_root_elements(handler)
357 self.add_root_elements(handler)
358 self.write_items(handler)
358 self.write_items(handler)
359 handler.endElement("feed")
359 handler.endElement("feed")
360
360
361 def root_attributes(self):
361 def root_attributes(self):
362 if self.feed['language'] is not None:
362 if self.feed['language'] is not None:
363 return {"xmlns": self.ns, "xml:lang": self.feed['language']}
363 return {"xmlns": self.ns, "xml:lang": self.feed['language']}
364 else:
364 else:
365 return {"xmlns": self.ns}
365 return {"xmlns": self.ns}
366
366
367 def add_root_elements(self, handler):
367 def add_root_elements(self, handler):
368 handler.addQuickElement("title", self.feed['title'])
368 handler.addQuickElement("title", self.feed['title'])
369 handler.addQuickElement("link", "", {"rel": "alternate", "href": self.feed['link']})
369 handler.addQuickElement("link", "", {"rel": "alternate", "href": self.feed['link']})
370 if self.feed['feed_url'] is not None:
370 if self.feed['feed_url'] is not None:
371 handler.addQuickElement("link", "", {"rel": "self", "href": self.feed['feed_url']})
371 handler.addQuickElement("link", "", {"rel": "self", "href": self.feed['feed_url']})
372 handler.addQuickElement("id", self.feed['id'])
372 handler.addQuickElement("id", self.feed['id'])
373 handler.addQuickElement("updated", rfc3339_date(self.latest_post_date()))
373 handler.addQuickElement("updated", rfc3339_date(self.latest_post_date()))
374 if self.feed['author_name'] is not None:
374 if self.feed['author_name'] is not None:
375 handler.startElement("author", {})
375 handler.startElement("author", {})
376 handler.addQuickElement("name", self.feed['author_name'])
376 handler.addQuickElement("name", self.feed['author_name'])
377 if self.feed['author_email'] is not None:
377 if self.feed['author_email'] is not None:
378 handler.addQuickElement("email", self.feed['author_email'])
378 handler.addQuickElement("email", self.feed['author_email'])
379 if self.feed['author_link'] is not None:
379 if self.feed['author_link'] is not None:
380 handler.addQuickElement("uri", self.feed['author_link'])
380 handler.addQuickElement("uri", self.feed['author_link'])
381 handler.endElement("author")
381 handler.endElement("author")
382 if self.feed['subtitle'] is not None:
382 if self.feed['subtitle'] is not None:
383 handler.addQuickElement("subtitle", self.feed['subtitle'])
383 handler.addQuickElement("subtitle", self.feed['subtitle'])
384 for cat in self.feed['categories']:
384 for cat in self.feed['categories']:
385 handler.addQuickElement("category", "", {"term": cat})
385 handler.addQuickElement("category", "", {"term": cat})
386 if self.feed['feed_copyright'] is not None:
386 if self.feed['feed_copyright'] is not None:
387 handler.addQuickElement("rights", self.feed['feed_copyright'])
387 handler.addQuickElement("rights", self.feed['feed_copyright'])
388
388
389 def write_items(self, handler):
389 def write_items(self, handler):
390 for item in self.items:
390 for item in self.items:
391 handler.startElement("entry", self.item_attributes(item))
391 handler.startElement("entry", self.item_attributes(item))
392 self.add_item_elements(handler, item)
392 self.add_item_elements(handler, item)
393 handler.endElement("entry")
393 handler.endElement("entry")
394
394
395 def add_item_elements(self, handler, item):
395 def add_item_elements(self, handler, item):
396 handler.addQuickElement("title", item['title'])
396 handler.addQuickElement("title", item['title'])
397 handler.addQuickElement("link", "", {"href": item['link'], "rel": "alternate"})
397 handler.addQuickElement("link", "", {"href": item['link'], "rel": "alternate"})
398
398
399 if item['pubdate'] is not None:
399 if item['pubdate'] is not None:
400 handler.addQuickElement('published', rfc3339_date(item['pubdate']))
400 handler.addQuickElement('published', rfc3339_date(item['pubdate']))
401
401
402 if item['updateddate'] is not None:
402 if item['updateddate'] is not None:
403 handler.addQuickElement('updated', rfc3339_date(item['updateddate']))
403 handler.addQuickElement('updated', rfc3339_date(item['updateddate']))
404
404
405 # Author information.
405 # Author information.
406 if item['author_name'] is not None:
406 if item['author_name'] is not None:
407 handler.startElement("author", {})
407 handler.startElement("author", {})
408 handler.addQuickElement("name", item['author_name'])
408 handler.addQuickElement("name", item['author_name'])
409 if item['author_email'] is not None:
409 if item['author_email'] is not None:
410 handler.addQuickElement("email", item['author_email'])
410 handler.addQuickElement("email", item['author_email'])
411 if item['author_link'] is not None:
411 if item['author_link'] is not None:
412 handler.addQuickElement("uri", item['author_link'])
412 handler.addQuickElement("uri", item['author_link'])
413 handler.endElement("author")
413 handler.endElement("author")
414
414
415 # Unique ID.
415 # Unique ID.
416 if item['unique_id'] is not None:
416 if item['unique_id'] is not None:
417 unique_id = item['unique_id']
417 unique_id = item['unique_id']
418 else:
418 else:
419 unique_id = get_tag_uri(item['link'], item['pubdate'])
419 unique_id = get_tag_uri(item['link'], item['pubdate'])
420 handler.addQuickElement("id", unique_id)
420 handler.addQuickElement("id", unique_id)
421
421
422 # Summary.
422 # Summary.
423 if item['description'] is not None:
423 if item['description'] is not None:
424 handler.addQuickElement("summary", item['description'], {"type": "html"})
424 handler.addQuickElement("summary", item['description'], {"type": "html"})
425
425
426 # Enclosures.
426 # Enclosures.
427 for enclosure in item['enclosures']:
427 for enclosure in item['enclosures']:
428 handler.addQuickElement('link', '', {
428 handler.addQuickElement('link', '', {
429 'rel': 'enclosure',
429 'rel': 'enclosure',
430 'href': enclosure.url,
430 'href': enclosure.url,
431 'length': enclosure.length,
431 'length': enclosure.length,
432 'type': enclosure.mime_type,
432 'type': enclosure.mime_type,
433 })
433 })
434
434
435 # Categories.
435 # Categories.
436 for cat in item['categories']:
436 for cat in item['categories']:
437 handler.addQuickElement("category", "", {"term": cat})
437 handler.addQuickElement("category", "", {"term": cat})
438
438
439 # Rights.
439 # Rights.
440 if item['item_copyright'] is not None:
440 if item['item_copyright'] is not None:
441 handler.addQuickElement("rights", item['item_copyright'])
441 handler.addQuickElement("rights", item['item_copyright'])
442
442
443
443
444 # This isolates the decision of what the system default is, so calling code can
444 # This isolates the decision of what the system default is, so calling code can
445 # do "feedgenerator.DefaultFeed" instead of "feedgenerator.Rss201rev2Feed".
445 # do "feedgenerator.DefaultFeed" instead of "feedgenerator.Rss201rev2Feed".
446 DefaultFeed = Rss201rev2Feed No newline at end of file
446 DefaultFeed = Rss201rev2Feed
@@ -1,311 +1,311 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Index schema for RhodeCode
22 Index schema for RhodeCode
23 """
23 """
24
24
25 from __future__ import absolute_import
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29
29
30 from whoosh import query as query_lib
30 from whoosh import query as query_lib
31 from whoosh.highlight import HtmlFormatter, ContextFragmenter
31 from whoosh.highlight import HtmlFormatter, ContextFragmenter
32 from whoosh.index import create_in, open_dir, exists_in, EmptyIndexError
32 from whoosh.index import create_in, open_dir, exists_in, EmptyIndexError
33 from whoosh.qparser import QueryParser, QueryParserError
33 from whoosh.qparser import QueryParser, QueryParserError
34
34
35 import rhodecode.lib.helpers as h
35 import rhodecode.lib.helpers as h
36 from rhodecode.lib.index import BaseSearcher
36 from rhodecode.lib.index import BaseSearcher
37 from rhodecode.lib.utils2 import safe_unicode
37 from rhodecode.lib.utils2 import safe_unicode
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 try:
42 try:
43 # we first try to import from rhodecode tools, fallback to copies if
43 # we first try to import from rhodecode tools, fallback to copies if
44 # we're unable to
44 # we're unable to
45 from rhodecode_tools.lib.fts_index.whoosh_schema import (
45 from rhodecode_tools.lib.fts_index.whoosh_schema import (
46 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
46 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
47 COMMIT_SCHEMA)
47 COMMIT_SCHEMA)
48 except ImportError:
48 except ImportError:
49 log.warning('rhodecode_tools schema not available, doing a fallback '
49 log.warning('rhodecode_tools schema not available, doing a fallback '
50 'import from `rhodecode.lib.index.whoosh_fallback_schema`')
50 'import from `rhodecode.lib.index.whoosh_fallback_schema`')
51 from rhodecode.lib.index.whoosh_fallback_schema import (
51 from rhodecode.lib.index.whoosh_fallback_schema import (
52 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
52 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
53 COMMIT_SCHEMA)
53 COMMIT_SCHEMA)
54
54
55
55
56 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
56 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
57 FRAGMENTER = ContextFragmenter(200)
57 FRAGMENTER = ContextFragmenter(200)
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class WhooshSearcher(BaseSearcher):
62 class WhooshSearcher(BaseSearcher):
63 # this also shows in UI
63 # this also shows in UI
64 query_lang_doc = 'http://whoosh.readthedocs.io/en/latest/querylang.html'
64 query_lang_doc = 'http://whoosh.readthedocs.io/en/latest/querylang.html'
65 name = 'whoosh'
65 name = 'whoosh'
66
66
67 def __init__(self, config):
67 def __init__(self, config):
68 super(Searcher, self).__init__()
68 super(Searcher, self).__init__()
69 self.config = config
69 self.config = config
70 if not os.path.isdir(self.config['location']):
70 if not os.path.isdir(self.config['location']):
71 os.makedirs(self.config['location'])
71 os.makedirs(self.config['location'])
72
72
73 opener = create_in
73 opener = create_in
74 if exists_in(self.config['location'], indexname=FILE_INDEX_NAME):
74 if exists_in(self.config['location'], indexname=FILE_INDEX_NAME):
75 opener = open_dir
75 opener = open_dir
76 file_index = opener(self.config['location'], schema=FILE_SCHEMA,
76 file_index = opener(self.config['location'], schema=FILE_SCHEMA,
77 indexname=FILE_INDEX_NAME)
77 indexname=FILE_INDEX_NAME)
78
78
79 opener = create_in
79 opener = create_in
80 if exists_in(self.config['location'], indexname=COMMIT_INDEX_NAME):
80 if exists_in(self.config['location'], indexname=COMMIT_INDEX_NAME):
81 opener = open_dir
81 opener = open_dir
82 changeset_index = opener(self.config['location'], schema=COMMIT_SCHEMA,
82 changeset_index = opener(self.config['location'], schema=COMMIT_SCHEMA,
83 indexname=COMMIT_INDEX_NAME)
83 indexname=COMMIT_INDEX_NAME)
84
84
85 self.commit_schema = COMMIT_SCHEMA
85 self.commit_schema = COMMIT_SCHEMA
86 self.commit_index = changeset_index
86 self.commit_index = changeset_index
87 self.file_schema = FILE_SCHEMA
87 self.file_schema = FILE_SCHEMA
88 self.file_index = file_index
88 self.file_index = file_index
89 self.searcher = None
89 self.searcher = None
90
90
91 def cleanup(self):
91 def cleanup(self):
92 if self.searcher:
92 if self.searcher:
93 self.searcher.close()
93 self.searcher.close()
94
94
95 def _extend_query(self, query):
95 def _extend_query(self, query):
96 hashes = re.compile('([0-9a-f]{5,40})').findall(query)
96 hashes = re.compile('([0-9a-f]{5,40})').findall(query)
97 if hashes:
97 if hashes:
98 hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes)
98 hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes)
99 query = u'(%s) OR %s' % (query, hashes_or_query)
99 query = u'(%s) OR %s' % (query, hashes_or_query)
100 return query
100 return query
101
101
102 def sort_def(self, search_type, direction, sort_field):
102 def sort_def(self, search_type, direction, sort_field):
103
103
104 if search_type == 'commit':
104 if search_type == 'commit':
105 field_defs = {
105 field_defs = {
106 'message': 'message',
106 'message': 'message',
107 'date': 'date',
107 'date': 'date',
108 'author_email': 'author',
108 'author_email': 'author',
109 }
109 }
110 elif search_type == 'path':
110 elif search_type == 'path':
111 field_defs = {
111 field_defs = {
112 'file': 'path',
112 'file': 'path',
113 'size': 'size',
113 'size': 'size',
114 'lines': 'lines',
114 'lines': 'lines',
115 }
115 }
116 elif search_type == 'content':
116 elif search_type == 'content':
117 # NOTE(dan): content doesn't support any sorting
117 # NOTE(dan): content doesn't support any sorting
118 field_defs = {}
118 field_defs = {}
119 else:
119 else:
120 return ''
120 return ''
121
121
122 if sort_field in field_defs:
122 if sort_field in field_defs:
123 return field_defs[sort_field]
123 return field_defs[sort_field]
124
124
125 def search(self, query, document_type, search_user,
125 def search(self, query, document_type, search_user,
126 repo_name=None, repo_group_name=None,
126 repo_name=None, repo_group_name=None,
127 requested_page=1, page_limit=10, sort=None, raise_on_exc=True):
127 requested_page=1, page_limit=10, sort=None, raise_on_exc=True):
128
128
129 original_query = query
129 original_query = query
130 query = self._extend_query(query)
130 query = self._extend_query(query)
131
131
132 log.debug(u'QUERY: %s on %s', query, document_type)
132 log.debug(u'QUERY: %s on %s', query, document_type)
133 result = {
133 result = {
134 'results': [],
134 'results': [],
135 'count': 0,
135 'count': 0,
136 'error': None,
136 'error': None,
137 'runtime': 0
137 'runtime': 0
138 }
138 }
139 search_type, index_name, schema_defn = self._prepare_for_search(
139 search_type, index_name, schema_defn = self._prepare_for_search(
140 document_type)
140 document_type)
141 self._init_searcher(index_name)
141 self._init_searcher(index_name)
142 try:
142 try:
143 qp = QueryParser(search_type, schema=schema_defn)
143 qp = QueryParser(search_type, schema=schema_defn)
144 allowed_repos_filter = self._get_repo_filter(
144 allowed_repos_filter = self._get_repo_filter(
145 search_user, repo_name)
145 search_user, repo_name)
146 try:
146 try:
147 query = qp.parse(safe_unicode(query))
147 query = qp.parse(safe_unicode(query))
148 log.debug('query: %s (%s)', query, repr(query))
148 log.debug('query: %s (%s)', query, repr(query))
149
149
150 reverse, sorted_by = False, None
150 reverse, sorted_by = False, None
151 direction, sort_field = self.get_sort(search_type, sort)
151 direction, sort_field = self.get_sort(search_type, sort)
152 if sort_field:
152 if sort_field:
153 sort_definition = self.sort_def(search_type, direction, sort_field)
153 sort_definition = self.sort_def(search_type, direction, sort_field)
154 if sort_definition:
154 if sort_definition:
155 sorted_by = sort_definition
155 sorted_by = sort_definition
156 if direction == Searcher.DIRECTION_DESC:
156 if direction == Searcher.DIRECTION_DESC:
157 reverse = True
157 reverse = True
158 if direction == Searcher.DIRECTION_ASC:
158 if direction == Searcher.DIRECTION_ASC:
159 reverse = False
159 reverse = False
160
160
161 whoosh_results = self.searcher.search(
161 whoosh_results = self.searcher.search(
162 query, filter=allowed_repos_filter, limit=None,
162 query, filter=allowed_repos_filter, limit=None,
163 sortedby=sorted_by, reverse=reverse)
163 sortedby=sorted_by, reverse=reverse)
164
164
165 # fixes for 32k limit that whoosh uses for highlight
165 # fixes for 32k limit that whoosh uses for highlight
166 whoosh_results.fragmenter.charlimit = None
166 whoosh_results.fragmenter.charlimit = None
167 res_ln = whoosh_results.scored_length()
167 res_ln = whoosh_results.scored_length()
168 result['runtime'] = whoosh_results.runtime
168 result['runtime'] = whoosh_results.runtime
169 result['count'] = res_ln
169 result['count'] = res_ln
170 result['results'] = WhooshResultWrapper(
170 result['results'] = WhooshResultWrapper(
171 search_type, res_ln, whoosh_results)
171 search_type, res_ln, whoosh_results)
172
172
173 except QueryParserError:
173 except QueryParserError:
174 result['error'] = 'Invalid search query. Try quoting it.'
174 result['error'] = 'Invalid search query. Try quoting it.'
175 except (EmptyIndexError, IOError, OSError):
175 except (EmptyIndexError, IOError, OSError):
176 msg = 'There is no index to search in. Please run whoosh indexer'
176 msg = 'There is no index to search in. Please run whoosh indexer'
177 log.exception(msg)
177 log.exception(msg)
178 result['error'] = msg
178 result['error'] = msg
179 except Exception:
179 except Exception:
180 msg = 'An error occurred during this search operation'
180 msg = 'An error occurred during this search operation'
181 log.exception(msg)
181 log.exception(msg)
182 result['error'] = msg
182 result['error'] = msg
183
183
184 return result
184 return result
185
185
186 def statistics(self, translator):
186 def statistics(self, translator):
187 _ = translator
187 _ = translator
188 stats = [
188 stats = [
189 {'key': _('Index Type'), 'value': 'Whoosh'},
189 {'key': _('Index Type'), 'value': 'Whoosh'},
190 {'sep': True},
190 {'sep': True},
191
191
192 {'key': _('File Index'), 'value': str(self.file_index)},
192 {'key': _('File Index'), 'value': str(self.file_index)},
193 {'key': _('Indexed documents'), 'value': self.file_index.doc_count()},
193 {'key': _('Indexed documents'), 'value': self.file_index.doc_count()},
194 {'key': _('Last update'), 'value': h.time_to_datetime(self.file_index.last_modified())},
194 {'key': _('Last update'), 'value': h.time_to_datetime(self.file_index.last_modified())},
195
195
196 {'sep': True},
196 {'sep': True},
197
197
198 {'key': _('Commit index'), 'value': str(self.commit_index)},
198 {'key': _('Commit index'), 'value': str(self.commit_index)},
199 {'key': _('Indexed documents'), 'value': str(self.commit_index.doc_count())},
199 {'key': _('Indexed documents'), 'value': str(self.commit_index.doc_count())},
200 {'key': _('Last update'), 'value': h.time_to_datetime(self.commit_index.last_modified())}
200 {'key': _('Last update'), 'value': h.time_to_datetime(self.commit_index.last_modified())}
201 ]
201 ]
202 return stats
202 return stats
203
203
204 def _get_repo_filter(self, auth_user, repo_name):
204 def _get_repo_filter(self, auth_user, repo_name):
205
205
206 allowed_to_search = [
206 allowed_to_search = [
207 repo for repo, perm in
207 repo for repo, perm in
208 auth_user.permissions['repositories'].items()
208 auth_user.permissions['repositories'].items()
209 if perm != 'repository.none']
209 if perm != 'repository.none']
210
210
211 if repo_name:
211 if repo_name:
212 repo_filter = [query_lib.Term('repository', repo_name)]
212 repo_filter = [query_lib.Term('repository', repo_name)]
213
213
214 elif 'hg.admin' in auth_user.permissions.get('global', []):
214 elif 'hg.admin' in auth_user.permissions.get('global', []):
215 return None
215 return None
216
216
217 else:
217 else:
218 repo_filter = [query_lib.Term('repository', _rn)
218 repo_filter = [query_lib.Term('repository', _rn)
219 for _rn in allowed_to_search]
219 for _rn in allowed_to_search]
220 # in case we're not allowed to search anywhere, it's a trick
220 # in case we're not allowed to search anywhere, it's a trick
221 # to tell whoosh we're filtering, on ALL results
221 # to tell whoosh we're filtering, on ALL results
222 repo_filter = repo_filter or [query_lib.Term('repository', '')]
222 repo_filter = repo_filter or [query_lib.Term('repository', '')]
223
223
224 return query_lib.Or(repo_filter)
224 return query_lib.Or(repo_filter)
225
225
226 def _prepare_for_search(self, cur_type):
226 def _prepare_for_search(self, cur_type):
227 search_type = {
227 search_type = {
228 'content': 'content',
228 'content': 'content',
229 'commit': 'message',
229 'commit': 'message',
230 'path': 'path',
230 'path': 'path',
231 'repository': 'repository'
231 'repository': 'repository'
232 }.get(cur_type, 'content')
232 }.get(cur_type, 'content')
233
233
234 index_name = {
234 index_name = {
235 'content': FILE_INDEX_NAME,
235 'content': FILE_INDEX_NAME,
236 'commit': COMMIT_INDEX_NAME,
236 'commit': COMMIT_INDEX_NAME,
237 'path': FILE_INDEX_NAME
237 'path': FILE_INDEX_NAME
238 }.get(cur_type, FILE_INDEX_NAME)
238 }.get(cur_type, FILE_INDEX_NAME)
239
239
240 schema_defn = {
240 schema_defn = {
241 'content': self.file_schema,
241 'content': self.file_schema,
242 'commit': self.commit_schema,
242 'commit': self.commit_schema,
243 'path': self.file_schema
243 'path': self.file_schema
244 }.get(cur_type, self.file_schema)
244 }.get(cur_type, self.file_schema)
245
245
246 log.debug('IDX: %s', index_name)
246 log.debug('IDX: %s', index_name)
247 log.debug('SCHEMA: %s', schema_defn)
247 log.debug('SCHEMA: %s', schema_defn)
248 return search_type, index_name, schema_defn
248 return search_type, index_name, schema_defn
249
249
250 def _init_searcher(self, index_name):
250 def _init_searcher(self, index_name):
251 idx = open_dir(self.config['location'], indexname=index_name)
251 idx = open_dir(self.config['location'], indexname=index_name)
252 self.searcher = idx.searcher()
252 self.searcher = idx.searcher()
253 return self.searcher
253 return self.searcher
254
254
255
255
256 Searcher = WhooshSearcher
256 Searcher = WhooshSearcher
257
257
258
258
259 class WhooshResultWrapper(object):
259 class WhooshResultWrapper(object):
260 def __init__(self, search_type, total_hits, results):
260 def __init__(self, search_type, total_hits, results):
261 self.search_type = search_type
261 self.search_type = search_type
262 self.results = results
262 self.results = results
263 self.total_hits = total_hits
263 self.total_hits = total_hits
264
264
265 def __str__(self):
265 def __str__(self):
266 return '<%s at %s>' % (self.__class__.__name__, len(self))
266 return '<%s at %s>' % (self.__class__.__name__, len(self))
267
267
268 def __repr__(self):
268 def __repr__(self):
269 return self.__str__()
269 return self.__str__()
270
270
271 def __len__(self):
271 def __len__(self):
272 return self.total_hits
272 return self.total_hits
273
273
274 def __iter__(self):
274 def __iter__(self):
275 """
275 """
276 Allows Iteration over results,and lazy generate content
276 Allows Iteration over results,and lazy generate content
277
277
278 *Requires* implementation of ``__getitem__`` method.
278 *Requires* implementation of ``__getitem__`` method.
279 """
279 """
280 for hit in self.results:
280 for hit in self.results:
281 yield self.get_full_content(hit)
281 yield self.get_full_content(hit)
282
282
283 def __getitem__(self, key):
283 def __getitem__(self, key):
284 """
284 """
285 Slicing of resultWrapper
285 Slicing of resultWrapper
286 """
286 """
287 i, j = key.start, key.stop
287 i, j = key.start, key.stop
288 for hit in self.results[i:j]:
288 for hit in self.results[i:j]:
289 yield self.get_full_content(hit)
289 yield self.get_full_content(hit)
290
290
291 def get_full_content(self, hit):
291 def get_full_content(self, hit):
292 # TODO: marcink: this feels like an overkill, there's a lot of data
292 # TODO: marcink: this feels like an overkill, there's a lot of data
293 # inside hit object, and we don't need all
293 # inside hit object, and we don't need all
294 res = dict(hit)
294 res = dict(hit)
295 # elastic search uses that, we set it empty so it fallbacks to regular HL logic
295 # elastic search uses that, we set it empty so it fallbacks to regular HL logic
296 res['content_highlight'] = ''
296 res['content_highlight'] = ''
297
297
298 f_path = '' # pragma: no cover
298 f_path = '' # pragma: no cover
299 if self.search_type in ['content', 'path']:
299 if self.search_type in ['content', 'path']:
300 f_path = res['path'][len(res['repository']):]
300 f_path = res['path'][len(res['repository']):]
301 f_path = f_path.lstrip(os.sep)
301 f_path = f_path.lstrip(os.sep)
302
302
303 if self.search_type == 'content':
303 if self.search_type == 'content':
304 res.update({'content_short_hl': hit.highlights('content'),
304 res.update({'content_short_hl': hit.highlights('content'),
305 'f_path': f_path})
305 'f_path': f_path})
306 elif self.search_type == 'path':
306 elif self.search_type == 'path':
307 res.update({'f_path': f_path})
307 res.update({'f_path': f_path})
308 elif self.search_type == 'message':
308 elif self.search_type == 'message':
309 res.update({'message_hl': hit.highlights('message')})
309 res.update({'message_hl': hit.highlights('message')})
310
310
311 return res
311 return res
@@ -1,75 +1,75 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Whoosh fallback schema for RhodeCode in case rhodecode_tools defined one is
22 Whoosh fallback schema for RhodeCode in case rhodecode_tools defined one is
23 not available
23 not available
24 """
24 """
25
25
26 from __future__ import absolute_import
26
27
27
28 from whoosh.analysis import RegexTokenizer, LowercaseFilter
28 from whoosh.analysis import RegexTokenizer, LowercaseFilter
29 from whoosh.formats import Characters
29 from whoosh.formats import Characters
30 from whoosh.fields import (
30 from whoosh.fields import (
31 TEXT, ID, STORED, NUMERIC, BOOLEAN, Schema, FieldType, DATETIME)
31 TEXT, ID, STORED, NUMERIC, BOOLEAN, Schema, FieldType, DATETIME)
32
32
33 # CUSTOM ANALYZER wordsplit + lowercase filter for case insensitive search
33 # CUSTOM ANALYZER wordsplit + lowercase filter for case insensitive search
34 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
34 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
35
35
36 # FILE INDEX SCHEMA DEFINITION
36 # FILE INDEX SCHEMA DEFINITION
37 FILE_INDEX_NAME = 'FILE_INDEX'
37 FILE_INDEX_NAME = 'FILE_INDEX'
38 FILE_SCHEMA = Schema(
38 FILE_SCHEMA = Schema(
39 fileid=ID(unique=True), # Path
39 fileid=ID(unique=True), # Path
40 repository=ID(stored=True),
40 repository=ID(stored=True),
41 repository_id=NUMERIC(unique=True, stored=True), # Numeric id of repo
41 repository_id=NUMERIC(unique=True, stored=True), # Numeric id of repo
42 repo_name=TEXT(stored=True),
42 repo_name=TEXT(stored=True),
43 owner=TEXT(),
43 owner=TEXT(),
44 path=TEXT(stored=True),
44 path=TEXT(stored=True),
45 content=FieldType(format=Characters(), analyzer=ANALYZER,
45 content=FieldType(format=Characters(), analyzer=ANALYZER,
46 scorable=True, stored=True),
46 scorable=True, stored=True),
47 modtime=STORED(),
47 modtime=STORED(),
48 md5=STORED(),
48 md5=STORED(),
49 extension=ID(stored=True),
49 extension=ID(stored=True),
50 commit_id=TEXT(stored=True),
50 commit_id=TEXT(stored=True),
51
51
52 size=NUMERIC(int, 64, signed=False, stored=True),
52 size=NUMERIC(int, 64, signed=False, stored=True),
53 mimetype=TEXT(stored=True),
53 mimetype=TEXT(stored=True),
54 lines=NUMERIC(int, 64, signed=False, stored=True),
54 lines=NUMERIC(int, 64, signed=False, stored=True),
55 )
55 )
56
56
57
57
58 # COMMIT INDEX SCHEMA
58 # COMMIT INDEX SCHEMA
59 COMMIT_INDEX_NAME = 'COMMIT_INDEX'
59 COMMIT_INDEX_NAME = 'COMMIT_INDEX'
60 COMMIT_SCHEMA = Schema(
60 COMMIT_SCHEMA = Schema(
61 commit_id=ID(unique=True, stored=True),
61 commit_id=ID(unique=True, stored=True),
62 repository=ID(unique=True, stored=True),
62 repository=ID(unique=True, stored=True),
63 repository_id=NUMERIC(unique=True, stored=True),
63 repository_id=NUMERIC(unique=True, stored=True),
64 commit_idx=NUMERIC(stored=True, sortable=True),
64 commit_idx=NUMERIC(stored=True, sortable=True),
65 commit_idx_sort=ID(),
65 commit_idx_sort=ID(),
66 date=NUMERIC(int, 64, signed=False, stored=True, sortable=True),
66 date=NUMERIC(int, 64, signed=False, stored=True, sortable=True),
67 owner=TEXT(stored=True),
67 owner=TEXT(stored=True),
68 author=TEXT(stored=True),
68 author=TEXT(stored=True),
69 message=FieldType(format=Characters(), analyzer=ANALYZER,
69 message=FieldType(format=Characters(), analyzer=ANALYZER,
70 scorable=True, stored=True),
70 scorable=True, stored=True),
71 parents=TEXT(stored=True),
71 parents=TEXT(stored=True),
72 added=TEXT(stored=True), # space separated names of added files
72 added=TEXT(stored=True), # space separated names of added files
73 removed=TEXT(stored=True), # space separated names of removed files
73 removed=TEXT(stored=True), # space separated names of removed files
74 changed=TEXT(stored=True), # space separated names of changed files
74 changed=TEXT(stored=True), # space separated names of changed files
75 )
75 )
@@ -1,161 +1,161 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities aimed to help achieve mostly basic tasks.
22 Utilities aimed to help achieve mostly basic tasks.
23 """
23 """
24
24
25
25
26 from __future__ import division
26
27
27
28 import re
28 import re
29 import os
29 import os
30 import time
30 import time
31 import datetime
31 import datetime
32 import logging
32 import logging
33
33
34 from rhodecode.lib.vcs.conf import settings
34 from rhodecode.lib.vcs.conf import settings
35 from rhodecode.lib.vcs.exceptions import VCSError, VCSBackendNotSupportedError
35 from rhodecode.lib.vcs.exceptions import VCSError, VCSBackendNotSupportedError
36
36
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40
40
41 def get_scm(path):
41 def get_scm(path):
42 """
42 """
43 Returns one of alias from ``ALIASES`` (in order of precedence same as
43 Returns one of alias from ``ALIASES`` (in order of precedence same as
44 shortcuts given in ``ALIASES``) and working dir path for the given
44 shortcuts given in ``ALIASES``) and working dir path for the given
45 argument. If no scm-specific directory is found or more than one scm is
45 argument. If no scm-specific directory is found or more than one scm is
46 found at that directory, ``VCSError`` is raised.
46 found at that directory, ``VCSError`` is raised.
47 """
47 """
48 if not os.path.isdir(path):
48 if not os.path.isdir(path):
49 raise VCSError("Given path %s is not a directory" % path)
49 raise VCSError("Given path %s is not a directory" % path)
50
50
51 found_scms = [(scm, path) for scm in get_scms_for_path(path)]
51 found_scms = [(scm, path) for scm in get_scms_for_path(path)]
52
52
53 if len(found_scms) > 1:
53 if len(found_scms) > 1:
54 found = ', '.join((x[0] for x in found_scms))
54 found = ', '.join((x[0] for x in found_scms))
55 raise VCSError(
55 raise VCSError(
56 'More than one [%s] scm found at given path %s' % (found, path))
56 'More than one [%s] scm found at given path %s' % (found, path))
57
57
58 if len(found_scms) is 0:
58 if len(found_scms) is 0:
59 raise VCSError('No scm found at given path %s' % path)
59 raise VCSError('No scm found at given path %s' % path)
60
60
61 return found_scms[0]
61 return found_scms[0]
62
62
63
63
64 def get_scm_backend(backend_type):
64 def get_scm_backend(backend_type):
65 from rhodecode.lib.vcs.backends import get_backend
65 from rhodecode.lib.vcs.backends import get_backend
66 return get_backend(backend_type)
66 return get_backend(backend_type)
67
67
68
68
69 def get_scms_for_path(path):
69 def get_scms_for_path(path):
70 """
70 """
71 Returns all scm's found at the given path. If no scm is recognized
71 Returns all scm's found at the given path. If no scm is recognized
72 - empty list is returned.
72 - empty list is returned.
73
73
74 :param path: path to directory which should be checked. May be callable.
74 :param path: path to directory which should be checked. May be callable.
75
75
76 :raises VCSError: if given ``path`` is not a directory
76 :raises VCSError: if given ``path`` is not a directory
77 """
77 """
78 from rhodecode.lib.vcs.backends import get_backend
78 from rhodecode.lib.vcs.backends import get_backend
79 if hasattr(path, '__call__'):
79 if hasattr(path, '__call__'):
80 path = path()
80 path = path()
81 if not os.path.isdir(path):
81 if not os.path.isdir(path):
82 raise VCSError("Given path %r is not a directory" % path)
82 raise VCSError("Given path %r is not a directory" % path)
83
83
84 result = []
84 result = []
85 for key in settings.available_aliases():
85 for key in settings.available_aliases():
86 try:
86 try:
87 backend = get_backend(key)
87 backend = get_backend(key)
88 except VCSBackendNotSupportedError:
88 except VCSBackendNotSupportedError:
89 log.warning('VCSBackendNotSupportedError: %s not supported', key)
89 log.warning('VCSBackendNotSupportedError: %s not supported', key)
90 continue
90 continue
91 if backend.is_valid_repository(path):
91 if backend.is_valid_repository(path):
92 result.append(key)
92 result.append(key)
93 return result
93 return result
94
94
95
95
96 def parse_datetime(text):
96 def parse_datetime(text):
97 """
97 """
98 Parses given text and returns ``datetime.datetime`` instance or raises
98 Parses given text and returns ``datetime.datetime`` instance or raises
99 ``ValueError``.
99 ``ValueError``.
100
100
101 :param text: string of desired date/datetime or something more verbose,
101 :param text: string of desired date/datetime or something more verbose,
102 like *yesterday*, *2weeks 3days*, etc.
102 like *yesterday*, *2weeks 3days*, etc.
103 """
103 """
104 if not text:
104 if not text:
105 raise ValueError('Wrong date: "%s"' % text)
105 raise ValueError('Wrong date: "%s"' % text)
106
106
107 if isinstance(text, datetime.datetime):
107 if isinstance(text, datetime.datetime):
108 return text
108 return text
109
109
110 # we limit a format to no include microseconds e.g 2017-10-17t17:48:23.XXXX
110 # we limit a format to no include microseconds e.g 2017-10-17t17:48:23.XXXX
111 text = text.strip().lower()[:19]
111 text = text.strip().lower()[:19]
112
112
113 input_formats = (
113 input_formats = (
114 '%Y-%m-%d %H:%M:%S',
114 '%Y-%m-%d %H:%M:%S',
115 '%Y-%m-%dt%H:%M:%S',
115 '%Y-%m-%dt%H:%M:%S',
116 '%Y-%m-%d %H:%M',
116 '%Y-%m-%d %H:%M',
117 '%Y-%m-%dt%H:%M',
117 '%Y-%m-%dt%H:%M',
118 '%Y-%m-%d',
118 '%Y-%m-%d',
119 '%m/%d/%Y %H:%M:%S',
119 '%m/%d/%Y %H:%M:%S',
120 '%m/%d/%Yt%H:%M:%S',
120 '%m/%d/%Yt%H:%M:%S',
121 '%m/%d/%Y %H:%M',
121 '%m/%d/%Y %H:%M',
122 '%m/%d/%Yt%H:%M',
122 '%m/%d/%Yt%H:%M',
123 '%m/%d/%Y',
123 '%m/%d/%Y',
124 '%m/%d/%y %H:%M:%S',
124 '%m/%d/%y %H:%M:%S',
125 '%m/%d/%yt%H:%M:%S',
125 '%m/%d/%yt%H:%M:%S',
126 '%m/%d/%y %H:%M',
126 '%m/%d/%y %H:%M',
127 '%m/%d/%yt%H:%M',
127 '%m/%d/%yt%H:%M',
128 '%m/%d/%y',
128 '%m/%d/%y',
129 )
129 )
130 for format_def in input_formats:
130 for format_def in input_formats:
131 try:
131 try:
132 return datetime.datetime(*time.strptime(text, format_def)[:6])
132 return datetime.datetime(*time.strptime(text, format_def)[:6])
133 except ValueError:
133 except ValueError:
134 pass
134 pass
135
135
136 # Try descriptive texts
136 # Try descriptive texts
137 if text == 'tomorrow':
137 if text == 'tomorrow':
138 future = datetime.datetime.now() + datetime.timedelta(days=1)
138 future = datetime.datetime.now() + datetime.timedelta(days=1)
139 args = future.timetuple()[:3] + (23, 59, 59)
139 args = future.timetuple()[:3] + (23, 59, 59)
140 return datetime.datetime(*args)
140 return datetime.datetime(*args)
141 elif text == 'today':
141 elif text == 'today':
142 return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
142 return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
143 elif text == 'now':
143 elif text == 'now':
144 return datetime.datetime.now()
144 return datetime.datetime.now()
145 elif text == 'yesterday':
145 elif text == 'yesterday':
146 past = datetime.datetime.now() - datetime.timedelta(days=1)
146 past = datetime.datetime.now() - datetime.timedelta(days=1)
147 return datetime.datetime(*past.timetuple()[:3])
147 return datetime.datetime(*past.timetuple()[:3])
148 else:
148 else:
149 days = 0
149 days = 0
150 matched = re.match(
150 matched = re.match(
151 r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
151 r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
152 if matched:
152 if matched:
153 groupdict = matched.groupdict()
153 groupdict = matched.groupdict()
154 if groupdict['days']:
154 if groupdict['days']:
155 days += int(matched.groupdict()['days'])
155 days += int(matched.groupdict()['days'])
156 if groupdict['weeks']:
156 if groupdict['weeks']:
157 days += int(matched.groupdict()['weeks']) * 7
157 days += int(matched.groupdict()['weeks']) * 7
158 past = datetime.datetime.now() - datetime.timedelta(days=days)
158 past = datetime.datetime.now() - datetime.timedelta(days=days)
159 return datetime.datetime(*past.timetuple()[:3])
159 return datetime.datetime(*past.timetuple()[:3])
160
160
161 raise ValueError('Wrong date: "%s"' % text)
161 raise ValueError('Wrong date: "%s"' % text)
@@ -1,84 +1,84 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from __future__ import unicode_literals
21
22
22
23 import time
23 import time
24
24
25 from whoosh import index
25 from whoosh import index
26 import mock
26 import mock
27 import pytest
27 import pytest
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib.auth import AuthUser
30 from rhodecode.lib.auth import AuthUser
31 from rhodecode.lib.index import whoosh, searcher_from_config
31 from rhodecode.lib.index import whoosh, searcher_from_config
32
32
33
33
34 @pytest.mark.parametrize("name_suffix", [
34 @pytest.mark.parametrize("name_suffix", [
35 "",
35 "",
36 "UpPeRcAsE",
36 "UpPeRcAsE",
37 ])
37 ])
38 def test_search_finds_results(
38 def test_search_finds_results(
39 tmpdir, backend_random, user_regular, name_suffix):
39 tmpdir, backend_random, user_regular, name_suffix):
40 repo = backend_random.create_repo(name_suffix=name_suffix)
40 repo = backend_random.create_repo(name_suffix=name_suffix)
41
41
42 search_location = tmpdir.strpath
42 search_location = tmpdir.strpath
43 create_commit_index_with_one_document(
43 create_commit_index_with_one_document(
44 search_location, repo_name=repo.repo_name)
44 search_location, repo_name=repo.repo_name)
45
45
46 auth_user = AuthUser(user_id=user_regular.user_id)
46 auth_user = AuthUser(user_id=user_regular.user_id)
47 with mock.patch.dict(rhodecode.CONFIG,
47 with mock.patch.dict(rhodecode.CONFIG,
48 {'search.location': search_location}):
48 {'search.location': search_location}):
49 searcher = searcher_from_config(rhodecode.CONFIG)
49 searcher = searcher_from_config(rhodecode.CONFIG)
50
50
51 search_result = searcher.search(
51 search_result = searcher.search(
52 "Test", document_type='commit', search_user=auth_user, repo_name=None)
52 "Test", document_type='commit', search_user=auth_user, repo_name=None)
53 results = list(search_result['results'])
53 results = list(search_result['results'])
54 assert len(results) == 1
54 assert len(results) == 1
55 assert results[0]['repository'] == repo.repo_name
55 assert results[0]['repository'] == repo.repo_name
56
56
57
57
58 def create_commit_index_with_one_document(search_location, repo_name):
58 def create_commit_index_with_one_document(search_location, repo_name):
59 """
59 """
60 Provides a test index based on our search schema.
60 Provides a test index based on our search schema.
61
61
62 The full details of index creation are found inside of `rhodecode-tools`.
62 The full details of index creation are found inside of `rhodecode-tools`.
63 The intention of this function is to provide just enough so that the
63 The intention of this function is to provide just enough so that the
64 search works.
64 search works.
65 """
65 """
66 test_index = index.create_in(
66 test_index = index.create_in(
67 search_location, whoosh.COMMIT_SCHEMA,
67 search_location, whoosh.COMMIT_SCHEMA,
68 indexname=whoosh.COMMIT_INDEX_NAME)
68 indexname=whoosh.COMMIT_INDEX_NAME)
69 writer = test_index.writer()
69 writer = test_index.writer()
70
70
71 writer.add_document(
71 writer.add_document(
72 commit_id="fake_commit_id",
72 commit_id="fake_commit_id",
73 commit_idx=1,
73 commit_idx=1,
74 owner="Test Owner",
74 owner="Test Owner",
75 date=time.time(),
75 date=time.time(),
76 repository=repo_name,
76 repository=repo_name,
77 author="Test Author",
77 author="Test Author",
78 message="Test Message",
78 message="Test Message",
79 added="added_1.txt added_2.txt",
79 added="added_1.txt added_2.txt",
80 removed="removed_1.txt removed_2.txt",
80 removed="removed_1.txt removed_2.txt",
81 changed="changed_1.txt changed_2.txt",
81 changed="changed_1.txt changed_2.txt",
82 parents="fake_parent_1_id fake_parent_2_id",
82 parents="fake_parent_1_id fake_parent_2_id",
83 )
83 )
84 writer.commit()
84 writer.commit()
General Comments 0
You need to be logged in to leave comments. Login now