Show More
@@ -1,330 +1,329 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import logging |
|
22 | import logging | |
23 |
|
23 | |||
24 | import colander |
|
24 | import colander | |
25 | import deform.widget |
|
25 | import deform.widget | |
26 | from mako.template import Template |
|
26 | from mako.template import Template | |
27 |
|
27 | |||
28 | from rhodecode import events |
|
28 | from rhodecode import events | |
29 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc |
|
29 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc | |
30 | from rhodecode.translation import _ |
|
30 | from rhodecode.translation import _ | |
31 | from rhodecode.lib.celerylib import run_task |
|
31 | from rhodecode.lib.celerylib import run_task | |
32 | from rhodecode.lib.celerylib import tasks |
|
32 | from rhodecode.lib.celerylib import tasks | |
33 | from rhodecode.integrations.types.base import ( |
|
33 | from rhodecode.integrations.types.base import ( | |
34 | IntegrationTypeBase, render_with_traceback) |
|
34 | IntegrationTypeBase, render_with_traceback) | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | log = logging.getLogger(__name__) |
|
37 | log = logging.getLogger(__name__) | |
38 |
|
38 | |||
39 | REPO_PUSH_TEMPLATE_PLAINTEXT = Template(''' |
|
39 | REPO_PUSH_TEMPLATE_PLAINTEXT = Template(''' | |
40 | Commits: |
|
40 | Commits: | |
41 |
|
41 | |||
42 | % for commit in data['push']['commits']: |
|
42 | % for commit in data['push']['commits']: | |
43 | ${commit['url']} by ${commit['author']} at ${commit['date']} |
|
43 | ${commit['url']} by ${commit['author']} at ${commit['date']} | |
44 | ${commit['message']} |
|
44 | ${commit['message']} | |
45 | ---- |
|
45 | ---- | |
46 |
|
46 | |||
47 | % endfor |
|
47 | % endfor | |
48 | ''') |
|
48 | ''') | |
49 |
|
49 | |||
50 | REPO_PUSH_TEMPLATE_HTML = Template(''' |
|
50 | REPO_PUSH_TEMPLATE_HTML = Template(''' | |
51 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> |
|
51 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> | |
52 | <html xmlns="http://www.w3.org/1999/xhtml"> |
|
52 | <html xmlns="http://www.w3.org/1999/xhtml"> | |
53 | <head> |
|
53 | <head> | |
54 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> |
|
54 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> | |
55 | <meta name="viewport" content="width=device-width, initial-scale=1.0"/> |
|
55 | <meta name="viewport" content="width=device-width, initial-scale=1.0"/> | |
56 | <title>${subject}</title> |
|
56 | <title>${subject}</title> | |
57 | <style type="text/css"> |
|
57 | <style type="text/css"> | |
58 | /* Based on The MailChimp Reset INLINE: Yes. */ |
|
58 | /* Based on The MailChimp Reset INLINE: Yes. */ | |
59 | #outlook a {padding:0;} /* Force Outlook to provide a "view in browser" menu link. */ |
|
59 | #outlook a {padding:0;} /* Force Outlook to provide a "view in browser" menu link. */ | |
60 | body{width:100% !important; -webkit-text-size-adjust:100%; -ms-text-size-adjust:100%; margin:0; padding:0;} |
|
60 | body{width:100% !important; -webkit-text-size-adjust:100%; -ms-text-size-adjust:100%; margin:0; padding:0;} | |
61 | /* Prevent Webkit and Windows Mobile platforms from changing default font sizes.*/ |
|
61 | /* Prevent Webkit and Windows Mobile platforms from changing default font sizes.*/ | |
62 | .ExternalClass {width:100%;} /* Force Hotmail to display emails at full width */ |
|
62 | .ExternalClass {width:100%;} /* Force Hotmail to display emails at full width */ | |
63 | .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div {line-height: 100%;} |
|
63 | .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div {line-height: 100%;} | |
64 | /* Forces Hotmail to display normal line spacing. More on that: http://www.emailonacid.com/forum/viewthread/43/ */ |
|
64 | /* Forces Hotmail to display normal line spacing. More on that: http://www.emailonacid.com/forum/viewthread/43/ */ | |
65 | #backgroundTable {margin:0; padding:0; line-height: 100% !important;} |
|
65 | #backgroundTable {margin:0; padding:0; line-height: 100% !important;} | |
66 | /* End reset */ |
|
66 | /* End reset */ | |
67 |
|
67 | |||
68 | /* defaults for images*/ |
|
68 | /* defaults for images*/ | |
69 | img {outline:none; text-decoration:none; -ms-interpolation-mode: bicubic;} |
|
69 | img {outline:none; text-decoration:none; -ms-interpolation-mode: bicubic;} | |
70 | a img {border:none;} |
|
70 | a img {border:none;} | |
71 | .image_fix {display:block;} |
|
71 | .image_fix {display:block;} | |
72 |
|
72 | |||
73 | body {line-height:1.2em;} |
|
73 | body {line-height:1.2em;} | |
74 | p {margin: 0 0 20px;} |
|
74 | p {margin: 0 0 20px;} | |
75 | h1, h2, h3, h4, h5, h6 {color:#323232!important;} |
|
75 | h1, h2, h3, h4, h5, h6 {color:#323232!important;} | |
76 | a {color:#427cc9;text-decoration:none;outline:none;cursor:pointer;} |
|
76 | a {color:#427cc9;text-decoration:none;outline:none;cursor:pointer;} | |
77 | a:focus {outline:none;} |
|
77 | a:focus {outline:none;} | |
78 | a:hover {color: #305b91;} |
|
78 | a:hover {color: #305b91;} | |
79 | h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {color:#427cc9!important;text-decoration:none!important;} |
|
79 | h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {color:#427cc9!important;text-decoration:none!important;} | |
80 | h1 a:active, h2 a:active, h3 a:active, h4 a:active, h5 a:active, h6 a:active {color: #305b91!important;} |
|
80 | h1 a:active, h2 a:active, h3 a:active, h4 a:active, h5 a:active, h6 a:active {color: #305b91!important;} | |
81 | h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited {color: #305b91!important;} |
|
81 | h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited {color: #305b91!important;} | |
82 | table {font-size:13px;border-collapse:collapse;mso-table-lspace:0pt;mso-table-rspace:0pt;} |
|
82 | table {font-size:13px;border-collapse:collapse;mso-table-lspace:0pt;mso-table-rspace:0pt;} | |
83 | table td {padding:.65em 1em .65em 0;border-collapse:collapse;vertical-align:top;text-align:left;} |
|
83 | table td {padding:.65em 1em .65em 0;border-collapse:collapse;vertical-align:top;text-align:left;} | |
84 | input {display:inline;border-radius:2px;border-style:solid;border: 1px solid #dbd9da;padding:.5em;} |
|
84 | input {display:inline;border-radius:2px;border-style:solid;border: 1px solid #dbd9da;padding:.5em;} | |
85 | input:focus {outline: 1px solid #979797} |
|
85 | input:focus {outline: 1px solid #979797} | |
86 | @media only screen and (-webkit-min-device-pixel-ratio: 2) { |
|
86 | @media only screen and (-webkit-min-device-pixel-ratio: 2) { | |
87 | /* Put your iPhone 4g styles in here */ |
|
87 | /* Put your iPhone 4g styles in here */ | |
88 | } |
|
88 | } | |
89 |
|
89 | |||
90 | /* Android targeting */ |
|
90 | /* Android targeting */ | |
91 | @media only screen and (-webkit-device-pixel-ratio:.75){ |
|
91 | @media only screen and (-webkit-device-pixel-ratio:.75){ | |
92 | /* Put CSS for low density (ldpi) Android layouts in here */ |
|
92 | /* Put CSS for low density (ldpi) Android layouts in here */ | |
93 | } |
|
93 | } | |
94 | @media only screen and (-webkit-device-pixel-ratio:1){ |
|
94 | @media only screen and (-webkit-device-pixel-ratio:1){ | |
95 | /* Put CSS for medium density (mdpi) Android layouts in here */ |
|
95 | /* Put CSS for medium density (mdpi) Android layouts in here */ | |
96 | } |
|
96 | } | |
97 | @media only screen and (-webkit-device-pixel-ratio:1.5){ |
|
97 | @media only screen and (-webkit-device-pixel-ratio:1.5){ | |
98 | /* Put CSS for high density (hdpi) Android layouts in here */ |
|
98 | /* Put CSS for high density (hdpi) Android layouts in here */ | |
99 | } |
|
99 | } | |
100 | /* end Android targeting */ |
|
100 | /* end Android targeting */ | |
101 |
|
101 | |||
102 | </style> |
|
102 | </style> | |
103 |
|
103 | |||
104 | <!-- Targeting Windows Mobile --> |
|
104 | <!-- Targeting Windows Mobile --> | |
105 | <!--[if IEMobile 7]> |
|
105 | <!--[if IEMobile 7]> | |
106 | <style type="text/css"> |
|
106 | <style type="text/css"> | |
107 |
|
107 | |||
108 | </style> |
|
108 | </style> | |
109 | <![endif]--> |
|
109 | <![endif]--> | |
110 |
|
110 | |||
111 | <!--[if gte mso 9]> |
|
111 | <!--[if gte mso 9]> | |
112 | <style> |
|
112 | <style> | |
113 | /* Target Outlook 2007 and 2010 */ |
|
113 | /* Target Outlook 2007 and 2010 */ | |
114 | </style> |
|
114 | </style> | |
115 | <![endif]--> |
|
115 | <![endif]--> | |
116 | </head> |
|
116 | </head> | |
117 | <body> |
|
117 | <body> | |
118 | <!-- Wrapper/Container Table: Use a wrapper table to control the width and the background color consistently of your email. Use this approach instead of setting attributes on the body tag. --> |
|
118 | <!-- Wrapper/Container Table: Use a wrapper table to control the width and the background color consistently of your email. Use this approach instead of setting attributes on the body tag. --> | |
119 | <table cellpadding="0" cellspacing="0" border="0" id="backgroundTable" align="left" style="margin:1%;width:97%;padding:0;font-family:sans-serif;font-weight:100;border:1px solid #dbd9da"> |
|
119 | <table cellpadding="0" cellspacing="0" border="0" id="backgroundTable" align="left" style="margin:1%;width:97%;padding:0;font-family:sans-serif;font-weight:100;border:1px solid #dbd9da"> | |
120 | <tr> |
|
120 | <tr> | |
121 | <td valign="top" style="padding:0;"> |
|
121 | <td valign="top" style="padding:0;"> | |
122 | <table cellpadding="0" cellspacing="0" border="0" align="left" width="100%"> |
|
122 | <table cellpadding="0" cellspacing="0" border="0" align="left" width="100%"> | |
123 | <tr><td style="width:100%;padding:7px;background-color:#202020" valign="top"> |
|
123 | <tr><td style="width:100%;padding:7px;background-color:#202020" valign="top"> | |
124 | <a style="color:#eeeeee;text-decoration:none;" href="${instance_url}"> |
|
124 | <a style="color:#eeeeee;text-decoration:none;" href="${instance_url}"> | |
125 | ${'RhodeCode'} |
|
125 | ${'RhodeCode'} | |
126 | </a> |
|
126 | </a> | |
127 | </td></tr> |
|
127 | </td></tr> | |
128 | <tr> |
|
128 | <tr> | |
129 | <td style="padding:15px;" valign="top"> |
|
129 | <td style="padding:15px;" valign="top"> | |
130 | % if data['push']['commits']: |
|
130 | % if data['push']['commits']: | |
131 | % for commit in data['push']['commits']: |
|
131 | % for commit in data['push']['commits']: | |
132 | <a href="${commit['url']}">${commit['short_id']}</a> by ${commit['author']} at ${commit['date']} <br/> |
|
132 | <a href="${commit['url']}">${commit['short_id']}</a> by ${commit['author']} at ${commit['date']} <br/> | |
133 | ${commit['message_html']} <br/> |
|
133 | ${commit['message_html']} <br/> | |
134 | <br/> |
|
134 | <br/> | |
135 | % endfor |
|
135 | % endfor | |
136 | % else: |
|
136 | % else: | |
137 | No commit data |
|
137 | No commit data | |
138 | % endif |
|
138 | % endif | |
139 | </td> |
|
139 | </td> | |
140 | </tr> |
|
140 | </tr> | |
141 | </table> |
|
141 | </table> | |
142 | </td> |
|
142 | </td> | |
143 | </tr> |
|
143 | </tr> | |
144 | </table> |
|
144 | </table> | |
145 | <!-- End of wrapper table --> |
|
145 | <!-- End of wrapper table --> | |
146 | <p><a style="margin-top:15px;margin-left:1%;font-family:sans-serif;font-weight:100;font-size:11px;color:#666666;text-decoration:none;" href="${instance_url}"> |
|
146 | <p><a style="margin-top:15px;margin-left:1%;font-family:sans-serif;font-weight:100;font-size:11px;color:#666666;text-decoration:none;" href="${instance_url}"> | |
147 | ${'This is a notification from RhodeCode. %(instance_url)s' % {'instance_url': instance_url}} |
|
147 | ${'This is a notification from RhodeCode. %(instance_url)s' % {'instance_url': instance_url}} | |
148 | </a></p> |
|
148 | </a></p> | |
149 | </body> |
|
149 | </body> | |
150 | </html> |
|
150 | </html> | |
151 | ''') |
|
151 | ''') | |
152 |
|
152 | |||
153 |
|
153 | |||
154 | class EmailSettingsSchema(colander.Schema): |
|
154 | class EmailSettingsSchema(colander.Schema): | |
155 | @colander.instantiate(validator=colander.Length(min=1)) |
|
155 | @colander.instantiate(validator=colander.Length(min=1)) | |
156 | class recipients(colander.SequenceSchema): |
|
156 | class recipients(colander.SequenceSchema): | |
157 | title = _('Recipients') |
|
157 | title = _('Recipients') | |
158 | description = _('Email addresses to send push events to') |
|
158 | description = _('Email addresses to send push events to') | |
159 | widget = deform.widget.SequenceWidget(min_len=1) |
|
159 | widget = deform.widget.SequenceWidget(min_len=1) | |
160 |
|
160 | |||
161 | recipient = colander.SchemaNode( |
|
161 | recipient = colander.SchemaNode( | |
162 | colander.String(), |
|
162 | colander.String(), | |
163 | title=_('Email address'), |
|
163 | title=_('Email address'), | |
164 | description=_('Email address'), |
|
164 | description=_('Email address'), | |
165 | default='', |
|
165 | default='', | |
166 | validator=colander.Email(), |
|
166 | validator=colander.Email(), | |
167 | widget=deform.widget.TextInputWidget( |
|
167 | widget=deform.widget.TextInputWidget( | |
168 | placeholder='user@domain.com', |
|
168 | placeholder='user@domain.com', | |
169 | ), |
|
169 | ), | |
170 | ) |
|
170 | ) | |
171 |
|
171 | |||
172 |
|
172 | |||
173 | class EmailIntegrationType(IntegrationTypeBase): |
|
173 | class EmailIntegrationType(IntegrationTypeBase): | |
174 | key = 'email' |
|
174 | key = 'email' | |
175 | display_name = _('Email') |
|
175 | display_name = _('Email') | |
176 | description = _('Send repo push summaries to a list of recipients via email') |
|
176 | description = _('Send repo push summaries to a list of recipients via email') | |
177 |
|
177 | |||
178 | valid_events = [ |
|
178 | valid_events = [ | |
179 | events.RepoPushEvent |
|
179 | events.RepoPushEvent | |
180 | ] |
|
180 | ] | |
181 |
|
181 | |||
182 | @classmethod |
|
182 | @classmethod | |
183 | def icon(cls): |
|
183 | def icon(cls): | |
184 | return ''' |
|
184 | return ''' | |
185 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> |
|
185 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | |
186 | <svg |
|
186 | <svg | |
187 | xmlns:dc="http://purl.org/dc/elements/1.1/" |
|
187 | xmlns:dc="http://purl.org/dc/elements/1.1/" | |
188 | xmlns:cc="http://creativecommons.org/ns#" |
|
188 | xmlns:cc="http://creativecommons.org/ns#" | |
189 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" |
|
189 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" | |
190 | xmlns:svg="http://www.w3.org/2000/svg" |
|
190 | xmlns:svg="http://www.w3.org/2000/svg" | |
191 | xmlns="http://www.w3.org/2000/svg" |
|
191 | xmlns="http://www.w3.org/2000/svg" | |
192 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" |
|
192 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | |
193 | xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" |
|
193 | xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | |
194 | viewBox="0 -256 1850 1850" |
|
194 | viewBox="0 -256 1850 1850" | |
195 | id="svg2989" |
|
195 | id="svg2989" | |
196 | version="1.1" |
|
196 | version="1.1" | |
197 | inkscape:version="0.48.3.1 r9886" |
|
197 | inkscape:version="0.48.3.1 r9886" | |
198 | width="100%" |
|
198 | width="100%" | |
199 | height="100%" |
|
199 | height="100%" | |
200 | sodipodi:docname="envelope_font_awesome.svg"> |
|
200 | sodipodi:docname="envelope_font_awesome.svg"> | |
201 | <metadata |
|
201 | <metadata | |
202 | id="metadata2999"> |
|
202 | id="metadata2999"> | |
203 | <rdf:RDF> |
|
203 | <rdf:RDF> | |
204 | <cc:Work |
|
204 | <cc:Work | |
205 | rdf:about=""> |
|
205 | rdf:about=""> | |
206 | <dc:format>image/svg+xml</dc:format> |
|
206 | <dc:format>image/svg+xml</dc:format> | |
207 | <dc:type |
|
207 | <dc:type | |
208 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> |
|
208 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> | |
209 | </cc:Work> |
|
209 | </cc:Work> | |
210 | </rdf:RDF> |
|
210 | </rdf:RDF> | |
211 | </metadata> |
|
211 | </metadata> | |
212 | <defs |
|
212 | <defs | |
213 | id="defs2997" /> |
|
213 | id="defs2997" /> | |
214 | <sodipodi:namedview |
|
214 | <sodipodi:namedview | |
215 | pagecolor="#ffffff" |
|
215 | pagecolor="#ffffff" | |
216 | bordercolor="#666666" |
|
216 | bordercolor="#666666" | |
217 | borderopacity="1" |
|
217 | borderopacity="1" | |
218 | objecttolerance="10" |
|
218 | objecttolerance="10" | |
219 | gridtolerance="10" |
|
219 | gridtolerance="10" | |
220 | guidetolerance="10" |
|
220 | guidetolerance="10" | |
221 | inkscape:pageopacity="0" |
|
221 | inkscape:pageopacity="0" | |
222 | inkscape:pageshadow="2" |
|
222 | inkscape:pageshadow="2" | |
223 | inkscape:window-width="640" |
|
223 | inkscape:window-width="640" | |
224 | inkscape:window-height="480" |
|
224 | inkscape:window-height="480" | |
225 | id="namedview2995" |
|
225 | id="namedview2995" | |
226 | showgrid="false" |
|
226 | showgrid="false" | |
227 | inkscape:zoom="0.13169643" |
|
227 | inkscape:zoom="0.13169643" | |
228 | inkscape:cx="896" |
|
228 | inkscape:cx="896" | |
229 | inkscape:cy="896" |
|
229 | inkscape:cy="896" | |
230 | inkscape:window-x="0" |
|
230 | inkscape:window-x="0" | |
231 | inkscape:window-y="25" |
|
231 | inkscape:window-y="25" | |
232 | inkscape:window-maximized="0" |
|
232 | inkscape:window-maximized="0" | |
233 | inkscape:current-layer="svg2989" /> |
|
233 | inkscape:current-layer="svg2989" /> | |
234 | <g |
|
234 | <g | |
235 | transform="matrix(1,0,0,-1,37.966102,1282.678)" |
|
235 | transform="matrix(1,0,0,-1,37.966102,1282.678)" | |
236 | id="g2991"> |
|
236 | id="g2991"> | |
237 | <path |
|
237 | <path | |
238 | d="m 1664,32 v 768 q -32,-36 -69,-66 -268,-206 -426,-338 -51,-43 -83,-67 -32,-24 -86.5,-48.5 Q 945,256 897,256 h -1 -1 Q 847,256 792.5,280.5 738,305 706,329 674,353 623,396 465,528 197,734 160,764 128,800 V 32 Q 128,19 137.5,9.5 147,0 160,0 h 1472 q 13,0 22.5,9.5 9.5,9.5 9.5,22.5 z m 0,1051 v 11 13.5 q 0,0 -0.5,13 -0.5,13 -3,12.5 -2.5,-0.5 -5.5,9 -3,9.5 -9,7.5 -6,-2 -14,2.5 H 160 q -13,0 -22.5,-9.5 Q 128,1133 128,1120 128,952 275,836 468,684 676,519 682,514 711,489.5 740,465 757,452 774,439 801.5,420.5 829,402 852,393 q 23,-9 43,-9 h 1 1 q 20,0 43,9 23,9 50.5,27.5 27.5,18.5 44.5,31.5 17,13 46,37.5 29,24.5 35,29.5 208,165 401,317 54,43 100.5,115.5 46.5,72.5 46.5,131.5 z m 128,37 V 32 q 0,-66 -47,-113 -47,-47 -113,-47 H 160 Q 94,-128 47,-81 0,-34 0,32 v 1088 q 0,66 47,113 47,47 113,47 h 1472 q 66,0 113,-47 47,-47 47,-113 z" |
|
238 | d="m 1664,32 v 768 q -32,-36 -69,-66 -268,-206 -426,-338 -51,-43 -83,-67 -32,-24 -86.5,-48.5 Q 945,256 897,256 h -1 -1 Q 847,256 792.5,280.5 738,305 706,329 674,353 623,396 465,528 197,734 160,764 128,800 V 32 Q 128,19 137.5,9.5 147,0 160,0 h 1472 q 13,0 22.5,9.5 9.5,9.5 9.5,22.5 z m 0,1051 v 11 13.5 q 0,0 -0.5,13 -0.5,13 -3,12.5 -2.5,-0.5 -5.5,9 -3,9.5 -9,7.5 -6,-2 -14,2.5 H 160 q -13,0 -22.5,-9.5 Q 128,1133 128,1120 128,952 275,836 468,684 676,519 682,514 711,489.5 740,465 757,452 774,439 801.5,420.5 829,402 852,393 q 23,-9 43,-9 h 1 1 q 20,0 43,9 23,9 50.5,27.5 27.5,18.5 44.5,31.5 17,13 46,37.5 29,24.5 35,29.5 208,165 401,317 54,43 100.5,115.5 46.5,72.5 46.5,131.5 z m 128,37 V 32 q 0,-66 -47,-113 -47,-47 -113,-47 H 160 Q 94,-128 47,-81 0,-34 0,32 v 1088 q 0,66 47,113 47,47 113,47 h 1472 q 66,0 113,-47 47,-47 47,-113 z" | |
239 | id="path2993" |
|
239 | id="path2993" | |
240 | inkscape:connector-curvature="0" |
|
240 | inkscape:connector-curvature="0" | |
241 | style="fill:currentColor" /> |
|
241 | style="fill:currentColor" /> | |
242 | </g> |
|
242 | </g> | |
243 | </svg> |
|
243 | </svg> | |
244 | ''' |
|
244 | ''' | |
245 |
|
245 | |||
246 | def settings_schema(self): |
|
246 | def settings_schema(self): | |
247 | schema = EmailSettingsSchema() |
|
247 | schema = EmailSettingsSchema() | |
248 | schema.add(colander.SchemaNode( |
|
248 | schema.add(colander.SchemaNode( | |
249 | colander.Set(), |
|
249 | colander.Set(), | |
250 | widget=CheckboxChoiceWidgetDesc( |
|
250 | widget=CheckboxChoiceWidgetDesc( | |
251 | values=sorted( |
|
251 | values=sorted( | |
252 | [(e.name, e.display_name, e.description) for e in self.valid_events] |
|
252 | [(e.name, e.display_name, e.description) for e in self.valid_events] | |
253 | ), |
|
253 | ), | |
254 | ), |
|
254 | ), | |
255 | description="List of events activated for this integration", |
|
255 | description="List of events activated for this integration", | |
256 | name='events' |
|
256 | name='events' | |
257 | )) |
|
257 | )) | |
258 | return schema |
|
258 | return schema | |
259 |
|
259 | |||
260 | def send_event(self, event): |
|
260 | def send_event(self, event): | |
261 | log.debug('handling event %s with integration %s', event.name, self) |
|
261 | log.debug('handling event %s with integration %s', event.name, self) | |
262 |
|
262 | |||
263 | if event.__class__ not in self.valid_events: |
|
263 | if event.__class__ not in self.valid_events: | |
264 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) |
|
264 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) | |
265 | return |
|
265 | return | |
266 |
|
266 | |||
267 | if not self.event_enabled(event): |
|
267 | if not self.event_enabled(event): | |
268 | # NOTE(marcink): for legacy reasons we're skipping this check... |
|
268 | # NOTE(marcink): for legacy reasons we're skipping this check... | |
269 | # since the email event haven't had any settings... |
|
269 | # since the email event haven't had any settings... | |
270 | pass |
|
270 | pass | |
271 |
|
271 | |||
272 | handler = EmailEventHandler(self.settings) |
|
272 | handler = EmailEventHandler(self.settings) | |
273 | handler(event, event_data=event.as_dict()) |
|
273 | handler(event, event_data=event.as_dict()) | |
274 |
|
274 | |||
275 |
|
275 | |||
276 | class EmailEventHandler(object): |
|
276 | class EmailEventHandler(object): | |
277 | def __init__(self, integration_settings): |
|
277 | def __init__(self, integration_settings): | |
278 | self.integration_settings = integration_settings |
|
278 | self.integration_settings = integration_settings | |
279 |
|
279 | |||
280 | def __call__(self, event, event_data): |
|
280 | def __call__(self, event, event_data): | |
281 | if isinstance(event, events.RepoPushEvent): |
|
281 | if isinstance(event, events.RepoPushEvent): | |
282 | self.repo_push_handler(event, event_data) |
|
282 | self.repo_push_handler(event, event_data) | |
283 | else: |
|
283 | else: | |
284 | log.debug('ignoring event: %r', event) |
|
284 | log.debug('ignoring event: %r', event) | |
285 |
|
285 | |||
286 | def repo_push_handler(self, event, data): |
|
286 | def repo_push_handler(self, event, data): | |
287 | commit_num = len(data['push']['commits']) |
|
287 | commit_num = len(data['push']['commits']) | |
288 | server_url = data['server_url'] |
|
288 | server_url = data['server_url'] | |
289 |
|
289 | |||
290 | if commit_num == 1: |
|
290 | if commit_num == 1: | |
291 | if data['push']['branches']: |
|
291 | if data['push']['branches']: | |
292 | _subject = '[{repo_name}] {author} pushed {commit_num} commit on branches: {branches}' |
|
292 | _subject = '[{repo_name}] {author} pushed {commit_num} commit on branches: {branches}' | |
293 | else: |
|
293 | else: | |
294 | _subject = '[{repo_name}] {author} pushed {commit_num} commit' |
|
294 | _subject = '[{repo_name}] {author} pushed {commit_num} commit' | |
295 | subject = _subject.format( |
|
295 | subject = _subject.format( | |
296 | author=data['actor']['username'], |
|
296 | author=data['actor']['username'], | |
297 | repo_name=data['repo']['repo_name'], |
|
297 | repo_name=data['repo']['repo_name'], | |
298 | commit_num=commit_num, |
|
298 | commit_num=commit_num, | |
299 | branches=', '.join( |
|
299 | branches=', '.join( | |
300 | branch['name'] for branch in data['push']['branches']) |
|
300 | branch['name'] for branch in data['push']['branches']) | |
301 | ) |
|
301 | ) | |
302 | else: |
|
302 | else: | |
303 | if data['push']['branches']: |
|
303 | if data['push']['branches']: | |
304 | _subject = '[{repo_name}] {author} pushed {commit_num} commits on branches: {branches}' |
|
304 | _subject = '[{repo_name}] {author} pushed {commit_num} commits on branches: {branches}' | |
305 | else: |
|
305 | else: | |
306 | _subject = '[{repo_name}] {author} pushed {commit_num} commits' |
|
306 | _subject = '[{repo_name}] {author} pushed {commit_num} commits' | |
307 | subject = _subject.format( |
|
307 | subject = _subject.format( | |
308 | author=data['actor']['username'], |
|
308 | author=data['actor']['username'], | |
309 | repo_name=data['repo']['repo_name'], |
|
309 | repo_name=data['repo']['repo_name'], | |
310 | commit_num=commit_num, |
|
310 | commit_num=commit_num, | |
311 | branches=', '.join( |
|
311 | branches=', '.join( | |
312 | branch['name'] for branch in data['push']['branches'])) |
|
312 | branch['name'] for branch in data['push']['branches'])) | |
313 |
|
313 | |||
314 | email_body_plaintext = render_with_traceback( |
|
314 | email_body_plaintext = render_with_traceback( | |
315 | REPO_PUSH_TEMPLATE_PLAINTEXT, |
|
315 | REPO_PUSH_TEMPLATE_PLAINTEXT, | |
316 | data=data, |
|
316 | data=data, | |
317 | subject=subject, |
|
317 | subject=subject, | |
318 | instance_url=server_url) |
|
318 | instance_url=server_url) | |
319 |
|
319 | |||
320 | email_body_html = render_with_traceback( |
|
320 | email_body_html = render_with_traceback( | |
321 | REPO_PUSH_TEMPLATE_HTML, |
|
321 | REPO_PUSH_TEMPLATE_HTML, | |
322 | data=data, |
|
322 | data=data, | |
323 | subject=subject, |
|
323 | subject=subject, | |
324 | instance_url=server_url) |
|
324 | instance_url=server_url) | |
325 |
|
325 | |||
326 | recipients = self.integration_settings['recipients'] |
|
326 | recipients = self.integration_settings['recipients'] | |
327 | for email_address in recipients: |
|
327 | for email_address in recipients: | |
328 | run_task( |
|
328 | run_task(tasks.send_email, email_address, subject, | |
329 | tasks.send_email, email_address, subject, |
|
329 | email_body_plaintext, email_body_html) | |
330 | email_body_plaintext, email_body_html) |
|
@@ -1,324 +1,321 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | """ |
|
20 | """ | |
21 | Celery loader, run with:: |
|
21 | Celery loader, run with:: | |
22 |
|
22 | |||
23 | celery worker \ |
|
23 | celery worker \ | |
24 | --task-events \ |
|
24 | --task-events \ | |
25 | --beat \ |
|
25 | --beat \ | |
26 | --autoscale=20,2 \ |
|
26 | --autoscale=20,2 \ | |
27 | --max-tasks-per-child 1 \ |
|
27 | --max-tasks-per-child 1 \ | |
28 | --app rhodecode.lib.celerylib.loader \ |
|
28 | --app rhodecode.lib.celerylib.loader \ | |
29 | --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \ |
|
29 | --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \ | |
30 | --loglevel DEBUG --ini=.dev/dev.ini |
|
30 | --loglevel DEBUG --ini=.dev/dev.ini | |
31 | """ |
|
31 | """ | |
32 | import os |
|
32 | import os | |
33 | import logging |
|
33 | import logging | |
34 | import importlib |
|
34 | import importlib | |
35 |
|
35 | |||
36 | from celery import Celery |
|
36 | from celery import Celery | |
37 | from celery import signals |
|
37 | from celery import signals | |
38 | from celery import Task |
|
38 | from celery import Task | |
39 | from celery import exceptions # pragma: no cover |
|
39 | from celery import exceptions # pragma: no cover | |
40 | from kombu.serialization import register |
|
40 | from kombu.serialization import register | |
41 | from pyramid.threadlocal import get_current_request |
|
|||
42 |
|
41 | |||
43 | import rhodecode |
|
42 | import rhodecode | |
44 |
|
43 | |||
45 | from rhodecode.lib.auth import AuthUser |
|
|||
46 | from rhodecode.lib.celerylib.utils import parse_ini_vars, ping_db |
|
44 | from rhodecode.lib.celerylib.utils import parse_ini_vars, ping_db | |
47 | from rhodecode.lib.ext_json import json |
|
45 | from rhodecode.lib.ext_json import json | |
48 | from rhodecode.lib.pyramid_utils import bootstrap, setup_logging |
|
46 | from rhodecode.lib.pyramid_utils import bootstrap, setup_logging | |
49 | from rhodecode.lib.utils2 import str2bool |
|
47 | from rhodecode.lib.utils2 import str2bool | |
50 | from rhodecode.model import meta |
|
48 | from rhodecode.model import meta | |
51 |
|
49 | |||
52 |
|
50 | |||
53 | register('json_ext', json.dumps, json.loads, |
|
51 | register('json_ext', json.dumps, json.loads, | |
54 | content_type='application/x-json-ext', |
|
52 | content_type='application/x-json-ext', | |
55 | content_encoding='utf-8') |
|
53 | content_encoding='utf-8') | |
56 |
|
54 | |||
57 | log = logging.getLogger('celery.rhodecode.loader') |
|
55 | log = logging.getLogger('celery.rhodecode.loader') | |
58 |
|
56 | |||
59 |
|
57 | |||
60 | def add_preload_arguments(parser): |
|
58 | def add_preload_arguments(parser): | |
61 | parser.add_argument( |
|
59 | parser.add_argument( | |
62 | '--ini', default=None, |
|
60 | '--ini', default=None, | |
63 | help='Path to ini configuration file.' |
|
61 | help='Path to ini configuration file.' | |
64 | ) |
|
62 | ) | |
65 | parser.add_argument( |
|
63 | parser.add_argument( | |
66 | '--ini-var', default=None, |
|
64 | '--ini-var', default=None, | |
67 | help='Comma separated list of key=value to pass to ini.' |
|
65 | help='Comma separated list of key=value to pass to ini.' | |
68 | ) |
|
66 | ) | |
69 |
|
67 | |||
70 |
|
68 | |||
71 | def get_logger(obj): |
|
69 | def get_logger(obj): | |
72 | custom_log = logging.getLogger( |
|
70 | custom_log = logging.getLogger( | |
73 | 'rhodecode.task.{}'.format(obj.__class__.__name__)) |
|
71 | 'rhodecode.task.{}'.format(obj.__class__.__name__)) | |
74 |
|
72 | |||
75 | if rhodecode.CELERY_ENABLED: |
|
73 | if rhodecode.CELERY_ENABLED: | |
76 | try: |
|
74 | try: | |
77 | custom_log = obj.get_logger() |
|
75 | custom_log = obj.get_logger() | |
78 | except Exception: |
|
76 | except Exception: | |
79 | pass |
|
77 | pass | |
80 |
|
78 | |||
81 | return custom_log |
|
79 | return custom_log | |
82 |
|
80 | |||
83 |
|
81 | |||
84 | imports = ['rhodecode.lib.celerylib.tasks'] |
|
82 | imports = ['rhodecode.lib.celerylib.tasks'] | |
85 |
|
83 | |||
86 | try: |
|
84 | try: | |
87 | # try if we have EE tasks available |
|
85 | # try if we have EE tasks available | |
88 | importlib.import_module('rc_ee') |
|
86 | importlib.import_module('rc_ee') | |
89 | imports.append('rc_ee.lib.celerylib.tasks') |
|
87 | imports.append('rc_ee.lib.celerylib.tasks') | |
90 | except ImportError: |
|
88 | except ImportError: | |
91 | pass |
|
89 | pass | |
92 |
|
90 | |||
93 |
|
91 | |||
94 | base_celery_config = { |
|
92 | base_celery_config = { | |
95 | 'result_backend': 'rpc://', |
|
93 | 'result_backend': 'rpc://', | |
96 | 'result_expires': 60 * 60 * 24, |
|
94 | 'result_expires': 60 * 60 * 24, | |
97 | 'result_persistent': True, |
|
95 | 'result_persistent': True, | |
98 | 'imports': imports, |
|
96 | 'imports': imports, | |
99 | 'worker_max_tasks_per_child': 100, |
|
97 | 'worker_max_tasks_per_child': 100, | |
100 | 'accept_content': ['json_ext'], |
|
98 | 'accept_content': ['json_ext'], | |
101 | 'task_serializer': 'json_ext', |
|
99 | 'task_serializer': 'json_ext', | |
102 | 'result_serializer': 'json_ext', |
|
100 | 'result_serializer': 'json_ext', | |
103 | 'worker_hijack_root_logger': False, |
|
101 | 'worker_hijack_root_logger': False, | |
104 | 'database_table_names': { |
|
102 | 'database_table_names': { | |
105 | 'task': 'beat_taskmeta', |
|
103 | 'task': 'beat_taskmeta', | |
106 | 'group': 'beat_groupmeta', |
|
104 | 'group': 'beat_groupmeta', | |
107 | } |
|
105 | } | |
108 | } |
|
106 | } | |
109 | # init main celery app |
|
107 | # init main celery app | |
110 | celery_app = Celery() |
|
108 | celery_app = Celery() | |
111 | celery_app.user_options['preload'].add(add_preload_arguments) |
|
109 | celery_app.user_options['preload'].add(add_preload_arguments) | |
112 | ini_file_glob = None |
|
110 | ini_file_glob = None | |
113 |
|
111 | |||
114 |
|
112 | |||
115 | @signals.setup_logging.connect |
|
113 | @signals.setup_logging.connect | |
116 | def setup_logging_callback(**kwargs): |
|
114 | def setup_logging_callback(**kwargs): | |
117 | setup_logging(ini_file_glob) |
|
115 | setup_logging(ini_file_glob) | |
118 |
|
116 | |||
119 |
|
117 | |||
120 | @signals.user_preload_options.connect |
|
118 | @signals.user_preload_options.connect | |
121 | def on_preload_parsed(options, **kwargs): |
|
119 | def on_preload_parsed(options, **kwargs): | |
122 | from rhodecode.config.middleware import get_celery_config |
|
120 | from rhodecode.config.middleware import get_celery_config | |
123 |
|
121 | |||
124 | ini_location = options['ini'] |
|
122 | ini_location = options['ini'] | |
125 | ini_vars = options['ini_var'] |
|
123 | ini_vars = options['ini_var'] | |
126 | celery_app.conf['INI_PYRAMID'] = options['ini'] |
|
124 | celery_app.conf['INI_PYRAMID'] = options['ini'] | |
127 |
|
125 | |||
128 | if ini_location is None: |
|
126 | if ini_location is None: | |
129 | print('You must provide the paste --ini argument') |
|
127 | print('You must provide the paste --ini argument') | |
130 | exit(-1) |
|
128 | exit(-1) | |
131 |
|
129 | |||
132 | options = None |
|
130 | options = None | |
133 | if ini_vars is not None: |
|
131 | if ini_vars is not None: | |
134 | options = parse_ini_vars(ini_vars) |
|
132 | options = parse_ini_vars(ini_vars) | |
135 |
|
133 | |||
136 | global ini_file_glob |
|
134 | global ini_file_glob | |
137 | ini_file_glob = ini_location |
|
135 | ini_file_glob = ini_location | |
138 |
|
136 | |||
139 | log.debug('Bootstrapping RhodeCode application...') |
|
137 | log.debug('Bootstrapping RhodeCode application...') | |
140 |
|
138 | |||
141 | try: |
|
139 | try: | |
142 | env = bootstrap(ini_location, options=options) |
|
140 | env = bootstrap(ini_location, options=options) | |
143 | except Exception: |
|
141 | except Exception: | |
144 | log.exception('Failed to bootstrap RhodeCode APP') |
|
142 | log.exception('Failed to bootstrap RhodeCode APP') | |
145 | raise |
|
143 | raise | |
146 |
|
144 | |||
147 | log.debug('Got Pyramid ENV: %s', env) |
|
145 | log.debug('Got Pyramid ENV: %s', env) | |
148 |
|
146 | |||
149 | celery_settings = get_celery_config(env['registry'].settings) |
|
147 | celery_settings = get_celery_config(env['registry'].settings) | |
150 |
|
148 | |||
151 | setup_celery_app( |
|
149 | setup_celery_app( | |
152 | app=env['app'], root=env['root'], request=env['request'], |
|
150 | app=env['app'], root=env['root'], request=env['request'], | |
153 | registry=env['registry'], closer=env['closer'], |
|
151 | registry=env['registry'], closer=env['closer'], | |
154 | celery_settings=celery_settings) |
|
152 | celery_settings=celery_settings) | |
155 |
|
153 | |||
156 | # fix the global flag even if it's disabled via .ini file because this |
|
154 | # fix the global flag even if it's disabled via .ini file because this | |
157 | # is a worker code that doesn't need this to be disabled. |
|
155 | # is a worker code that doesn't need this to be disabled. | |
158 | rhodecode.CELERY_ENABLED = True |
|
156 | rhodecode.CELERY_ENABLED = True | |
159 |
|
157 | |||
160 |
|
158 | |||
161 | @signals.task_prerun.connect |
|
159 | @signals.task_prerun.connect | |
162 | def task_prerun_signal(task_id, task, args, **kwargs): |
|
160 | def task_prerun_signal(task_id, task, args, **kwargs): | |
163 | ping_db() |
|
161 | ping_db() | |
164 |
|
162 | |||
165 |
|
163 | |||
166 | @signals.task_success.connect |
|
164 | @signals.task_success.connect | |
167 | def task_success_signal(result, **kwargs): |
|
165 | def task_success_signal(result, **kwargs): | |
168 | meta.Session.commit() |
|
166 | meta.Session.commit() | |
169 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
167 | closer = celery_app.conf['PYRAMID_CLOSER'] | |
170 | if closer: |
|
168 | if closer: | |
171 | closer() |
|
169 | closer() | |
172 |
|
170 | |||
173 |
|
171 | |||
174 | @signals.task_retry.connect |
|
172 | @signals.task_retry.connect | |
175 | def task_retry_signal( |
|
173 | def task_retry_signal( | |
176 | request, reason, einfo, **kwargs): |
|
174 | request, reason, einfo, **kwargs): | |
177 | meta.Session.remove() |
|
175 | meta.Session.remove() | |
178 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
176 | closer = celery_app.conf['PYRAMID_CLOSER'] | |
179 | if closer: |
|
177 | if closer: | |
180 | closer() |
|
178 | closer() | |
181 |
|
179 | |||
182 |
|
180 | |||
183 | @signals.task_failure.connect |
|
181 | @signals.task_failure.connect | |
184 | def task_failure_signal( |
|
182 | def task_failure_signal( | |
185 | task_id, exception, args, kwargs, traceback, einfo, **kargs): |
|
183 | task_id, exception, args, kwargs, traceback, einfo, **kargs): | |
186 |
|
184 | |||
187 | log.error('Task: %s failed !! exc_info: %s', task_id, einfo) |
|
185 | log.error('Task: %s failed !! exc_info: %s', task_id, einfo) | |
188 | from rhodecode.lib.exc_tracking import store_exception |
|
186 | from rhodecode.lib.exc_tracking import store_exception | |
189 | from rhodecode.lib.statsd_client import StatsdClient |
|
187 | from rhodecode.lib.statsd_client import StatsdClient | |
190 |
|
188 | |||
191 | meta.Session.remove() |
|
189 | meta.Session.remove() | |
192 |
|
190 | |||
193 | # simulate sys.exc_info() |
|
191 | # simulate sys.exc_info() | |
194 | exc_info = (einfo.type, einfo.exception, einfo.tb) |
|
192 | exc_info = (einfo.type, einfo.exception, einfo.tb) | |
195 | store_exception(id(exc_info), exc_info, prefix='rhodecode-celery') |
|
193 | store_exception(id(exc_info), exc_info, prefix='rhodecode-celery') | |
196 | statsd = StatsdClient.statsd |
|
194 | statsd = StatsdClient.statsd | |
197 | if statsd: |
|
195 | if statsd: | |
198 | exc_type = "{}.{}".format(einfo.__class__.__module__, einfo.__class__.__name__) |
|
196 | exc_type = "{}.{}".format(einfo.__class__.__module__, einfo.__class__.__name__) | |
199 | statsd.incr('rhodecode_exception_total', |
|
197 | statsd.incr('rhodecode_exception_total', | |
200 | tags=["exc_source:celery", "type:{}".format(exc_type)]) |
|
198 | tags=["exc_source:celery", "type:{}".format(exc_type)]) | |
201 |
|
199 | |||
202 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
200 | closer = celery_app.conf['PYRAMID_CLOSER'] | |
203 | if closer: |
|
201 | if closer: | |
204 | closer() |
|
202 | closer() | |
205 |
|
203 | |||
206 |
|
204 | |||
207 | @signals.task_revoked.connect |
|
205 | @signals.task_revoked.connect | |
208 | def task_revoked_signal( |
|
206 | def task_revoked_signal( | |
209 | request, terminated, signum, expired, **kwargs): |
|
207 | request, terminated, signum, expired, **kwargs): | |
210 | closer = celery_app.conf['PYRAMID_CLOSER'] |
|
208 | closer = celery_app.conf['PYRAMID_CLOSER'] | |
211 | if closer: |
|
209 | if closer: | |
212 | closer() |
|
210 | closer() | |
213 |
|
211 | |||
214 |
|
212 | |||
|
213 | class UNSET(object): | |||
|
214 | pass | |||
|
215 | ||||
|
216 | ||||
|
217 | def set_celery_conf(app=UNSET(), root=UNSET(), request=UNSET(), registry=UNSET(), closer=UNSET()): | |||
|
218 | ||||
|
219 | if request is not UNSET: | |||
|
220 | celery_app.conf.update({'PYRAMID_REQUEST': request}) | |||
|
221 | ||||
|
222 | if registry is not UNSET: | |||
|
223 | celery_app.conf.update({'PYRAMID_REGISTRY': registry}) | |||
|
224 | ||||
|
225 | ||||
215 | def setup_celery_app(app, root, request, registry, closer, celery_settings): |
|
226 | def setup_celery_app(app, root, request, registry, closer, celery_settings): | |
216 | log.debug('Got custom celery conf: %s', celery_settings) |
|
227 | log.debug('Got custom celery conf: %s', celery_settings) | |
217 | celery_config = base_celery_config |
|
228 | celery_config = base_celery_config | |
218 | celery_config.update({ |
|
229 | celery_config.update({ | |
219 | # store celerybeat scheduler db where the .ini file is |
|
230 | # store celerybeat scheduler db where the .ini file is | |
220 | 'beat_schedule_filename': registry.settings['celerybeat-schedule.path'], |
|
231 | 'beat_schedule_filename': registry.settings['celerybeat-schedule.path'], | |
221 | }) |
|
232 | }) | |
222 |
|
233 | |||
223 | celery_config.update(celery_settings) |
|
234 | celery_config.update(celery_settings) | |
224 | celery_app.config_from_object(celery_config) |
|
235 | celery_app.config_from_object(celery_config) | |
225 |
|
236 | |||
226 | celery_app.conf.update({'PYRAMID_APP': app}) |
|
237 | celery_app.conf.update({'PYRAMID_APP': app}) | |
227 | celery_app.conf.update({'PYRAMID_ROOT': root}) |
|
238 | celery_app.conf.update({'PYRAMID_ROOT': root}) | |
228 | celery_app.conf.update({'PYRAMID_REQUEST': request}) |
|
239 | celery_app.conf.update({'PYRAMID_REQUEST': request}) | |
229 | celery_app.conf.update({'PYRAMID_REGISTRY': registry}) |
|
240 | celery_app.conf.update({'PYRAMID_REGISTRY': registry}) | |
230 | celery_app.conf.update({'PYRAMID_CLOSER': closer}) |
|
241 | celery_app.conf.update({'PYRAMID_CLOSER': closer}) | |
231 |
|
242 | |||
232 |
|
243 | |||
233 | def configure_celery(config, celery_settings): |
|
244 | def configure_celery(config, celery_settings): | |
234 | """ |
|
245 | """ | |
235 | Helper that is called from our application creation logic. It gives |
|
246 | Helper that is called from our application creation logic. It gives | |
236 | connection info into running webapp and allows execution of tasks from |
|
247 | connection info into running webapp and allows execution of tasks from | |
237 | RhodeCode itself |
|
248 | RhodeCode itself | |
238 | """ |
|
249 | """ | |
239 | # store some globals into rhodecode |
|
250 | # store some globals into rhodecode | |
240 | rhodecode.CELERY_ENABLED = str2bool( |
|
251 | rhodecode.CELERY_ENABLED = str2bool( | |
241 | config.registry.settings.get('use_celery')) |
|
252 | config.registry.settings.get('use_celery')) | |
242 | if rhodecode.CELERY_ENABLED: |
|
253 | if rhodecode.CELERY_ENABLED: | |
243 | log.info('Configuring celery based on `%s` settings', celery_settings) |
|
254 | log.info('Configuring celery based on `%s` settings', celery_settings) | |
244 | setup_celery_app( |
|
255 | setup_celery_app( | |
245 | app=None, root=None, request=None, registry=config.registry, |
|
256 | app=None, root=None, request=None, registry=config.registry, | |
246 | closer=None, celery_settings=celery_settings) |
|
257 | closer=None, celery_settings=celery_settings) | |
247 |
|
258 | |||
248 |
|
259 | |||
249 | def maybe_prepare_env(req): |
|
260 | def maybe_prepare_env(req): | |
250 | environ = {} |
|
261 | environ = {} | |
251 | try: |
|
262 | try: | |
252 | environ.update({ |
|
263 | environ.update({ | |
253 | 'PATH_INFO': req.environ['PATH_INFO'], |
|
264 | 'PATH_INFO': req.environ['PATH_INFO'], | |
254 | 'SCRIPT_NAME': req.environ['SCRIPT_NAME'], |
|
265 | 'SCRIPT_NAME': req.environ['SCRIPT_NAME'], | |
255 | 'HTTP_HOST': req.environ.get('HTTP_HOST', req.environ['SERVER_NAME']), |
|
266 | 'HTTP_HOST': req.environ.get('HTTP_HOST', req.environ['SERVER_NAME']), | |
256 | 'SERVER_NAME': req.environ['SERVER_NAME'], |
|
267 | 'SERVER_NAME': req.environ['SERVER_NAME'], | |
257 | 'SERVER_PORT': req.environ['SERVER_PORT'], |
|
268 | 'SERVER_PORT': req.environ['SERVER_PORT'], | |
258 | 'wsgi.url_scheme': req.environ['wsgi.url_scheme'], |
|
269 | 'wsgi.url_scheme': req.environ['wsgi.url_scheme'], | |
259 | }) |
|
270 | }) | |
260 | except Exception: |
|
271 | except Exception: | |
261 | pass |
|
272 | pass | |
262 |
|
273 | |||
263 | return environ |
|
274 | return environ | |
264 |
|
275 | |||
265 |
|
276 | |||
266 | class RequestContextTask(Task): |
|
277 | class RequestContextTask(Task): | |
267 | """ |
|
278 | """ | |
268 | This is a celery task which will create a rhodecode app instance context |
|
279 | This is a celery task which will create a rhodecode app instance context | |
269 | for the task, patch pyramid with the original request |
|
280 | for the task, patch pyramid with the original request | |
270 | that created the task and also add the user to the context. |
|
281 | that created the task and also add the user to the context. | |
271 | """ |
|
282 | """ | |
272 |
|
283 | |||
273 | def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, |
|
284 | def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, | |
274 | link=None, link_error=None, shadow=None, **options): |
|
285 | link=None, link_error=None, shadow=None, **options): | |
275 | """ queue the job to run (we are in web request context here) """ |
|
286 | """ queue the job to run (we are in web request context here) """ | |
|
287 | from rhodecode.lib.base import get_ip_addr | |||
276 |
|
288 | |||
277 |
req = self.app.conf['PYRAMID_REQUEST'] |
|
289 | req = self.app.conf['PYRAMID_REQUEST'] | |
|
290 | if not req: | |||
|
291 | raise ValueError('celery_app.conf is having empty PYRAMID_REQUEST key') | |||
278 |
|
292 | |||
279 | log.debug('Running Task with class: %s. Request Class: %s', |
|
293 | log.debug('Running Task with class: %s. Request Class: %s', | |
280 | self.__class__, req.__class__) |
|
294 | self.__class__, req.__class__) | |
281 |
|
295 | |||
282 |
user_id = |
|
296 | user_id = 0 | |
283 | ip_addr = None |
|
|||
284 |
|
297 | |||
285 | # web case |
|
298 | # web case | |
286 | if hasattr(req, 'user'): |
|
299 | if hasattr(req, 'user'): | |
287 | ip_addr = req.user.ip_addr |
|
|||
288 | user_id = req.user.user_id |
|
300 | user_id = req.user.user_id | |
289 |
|
301 | |||
290 | # api case |
|
302 | # api case | |
291 | elif hasattr(req, 'rpc_user'): |
|
303 | elif hasattr(req, 'rpc_user'): | |
292 | ip_addr = req.rpc_user.ip_addr |
|
|||
293 | user_id = req.rpc_user.user_id |
|
304 | user_id = req.rpc_user.user_id | |
294 | else: |
|
|||
295 | if user_id and ip_addr: |
|
|||
296 | log.debug('Using data from celery proxy user') |
|
|||
297 |
|
305 | |||
298 | else: |
|
306 | # we hook into kwargs since it is the only way to pass our data to | |
299 | raise Exception( |
|
307 | # the celery worker | |
300 | 'Unable to fetch required data from request: {}. \n' |
|
308 | environ = maybe_prepare_env(req) | |
301 | 'This task is required to be executed from context of ' |
|
309 | options['headers'] = options.get('headers', {}) | |
302 | 'request in a webapp. Task: {}'.format( |
|
310 | options['headers'].update({ | |
303 | repr(req), |
|
311 | 'rhodecode_proxy_data': { | |
304 | self.__class__ |
|
312 | 'environ': environ, | |
305 |
|
|
313 | 'auth_user': { | |
306 | ) |
|
314 | 'ip_addr': get_ip_addr(req.environ), | |
307 |
|
315 | 'user_id': user_id | ||
308 | if req: |
|
316 | }, | |
309 | # we hook into kwargs since it is the only way to pass our data to |
|
317 | } | |
310 | # the celery worker |
|
318 | }) | |
311 | environ = maybe_prepare_env(req) |
|
|||
312 | options['headers'] = options.get('headers', {}) |
|
|||
313 | options['headers'].update({ |
|
|||
314 | 'rhodecode_proxy_data': { |
|
|||
315 | 'environ': environ, |
|
|||
316 | 'auth_user': { |
|
|||
317 | 'ip_addr': ip_addr, |
|
|||
318 | 'user_id': user_id |
|
|||
319 | }, |
|
|||
320 | } |
|
|||
321 | }) |
|
|||
322 |
|
319 | |||
323 | return super(RequestContextTask, self).apply_async( |
|
320 | return super(RequestContextTask, self).apply_async( | |
324 | args, kwargs, task_id, producer, link, link_error, shadow, **options) |
|
321 | args, kwargs, task_id, producer, link, link_error, shadow, **options) |
@@ -1,347 +1,352 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import time |
|
22 | import time | |
23 | import logging |
|
23 | import logging | |
24 | import tempfile |
|
24 | import tempfile | |
25 | import traceback |
|
25 | import traceback | |
26 | import threading |
|
26 | import threading | |
27 | import socket |
|
27 | import socket | |
28 | import random |
|
28 | import random | |
29 |
|
29 | |||
30 | from BaseHTTPServer import BaseHTTPRequestHandler |
|
30 | from BaseHTTPServer import BaseHTTPRequestHandler | |
31 | from SocketServer import TCPServer |
|
31 | from SocketServer import TCPServer | |
32 |
|
32 | |||
33 | import rhodecode |
|
33 | import rhodecode | |
34 | from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected |
|
34 | from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected | |
35 | from rhodecode.model import meta |
|
35 | from rhodecode.model import meta | |
36 | from rhodecode.lib.base import bootstrap_request, bootstrap_config |
|
36 | from rhodecode.lib.base import bootstrap_request, bootstrap_config | |
37 | from rhodecode.lib import hooks_base |
|
37 | from rhodecode.lib import hooks_base | |
38 | from rhodecode.lib.utils2 import AttributeDict |
|
38 | from rhodecode.lib.utils2 import AttributeDict | |
39 | from rhodecode.lib.ext_json import json |
|
39 | from rhodecode.lib.ext_json import json | |
40 | from rhodecode.lib import rc_cache |
|
40 | from rhodecode.lib import rc_cache | |
41 |
|
41 | |||
42 | log = logging.getLogger(__name__) |
|
42 | log = logging.getLogger(__name__) | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | class HooksHttpHandler(BaseHTTPRequestHandler): |
|
45 | class HooksHttpHandler(BaseHTTPRequestHandler): | |
46 |
|
46 | |||
47 | def do_POST(self): |
|
47 | def do_POST(self): | |
48 | method, extras = self._read_request() |
|
48 | method, extras = self._read_request() | |
49 | txn_id = getattr(self.server, 'txn_id', None) |
|
49 | txn_id = getattr(self.server, 'txn_id', None) | |
50 | if txn_id: |
|
50 | if txn_id: | |
51 | log.debug('Computing TXN_ID based on `%s`:`%s`', |
|
51 | log.debug('Computing TXN_ID based on `%s`:`%s`', | |
52 | extras['repository'], extras['txn_id']) |
|
52 | extras['repository'], extras['txn_id']) | |
53 | computed_txn_id = rc_cache.utils.compute_key_from_params( |
|
53 | computed_txn_id = rc_cache.utils.compute_key_from_params( | |
54 | extras['repository'], extras['txn_id']) |
|
54 | extras['repository'], extras['txn_id']) | |
55 | if txn_id != computed_txn_id: |
|
55 | if txn_id != computed_txn_id: | |
56 | raise Exception( |
|
56 | raise Exception( | |
57 | 'TXN ID fail: expected {} got {} instead'.format( |
|
57 | 'TXN ID fail: expected {} got {} instead'.format( | |
58 | txn_id, computed_txn_id)) |
|
58 | txn_id, computed_txn_id)) | |
59 |
|
59 | |||
|
60 | request = getattr(self.server, 'request', None) | |||
60 | try: |
|
61 | try: | |
61 | result = self._call_hook(method, extras) |
|
62 | hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address)) | |
|
63 | result = self._call_hook_method(hooks, method, extras) | |||
62 | except Exception as e: |
|
64 | except Exception as e: | |
63 | exc_tb = traceback.format_exc() |
|
65 | exc_tb = traceback.format_exc() | |
64 | result = { |
|
66 | result = { | |
65 | 'exception': e.__class__.__name__, |
|
67 | 'exception': e.__class__.__name__, | |
66 | 'exception_traceback': exc_tb, |
|
68 | 'exception_traceback': exc_tb, | |
67 | 'exception_args': e.args |
|
69 | 'exception_args': e.args | |
68 | } |
|
70 | } | |
69 | self._write_response(result) |
|
71 | self._write_response(result) | |
70 |
|
72 | |||
71 | def _read_request(self): |
|
73 | def _read_request(self): | |
72 | length = int(self.headers['Content-Length']) |
|
74 | length = int(self.headers['Content-Length']) | |
73 | body = self.rfile.read(length).decode('utf-8') |
|
75 | body = self.rfile.read(length).decode('utf-8') | |
74 | data = json.loads(body) |
|
76 | data = json.loads(body) | |
75 | return data['method'], data['extras'] |
|
77 | return data['method'], data['extras'] | |
76 |
|
78 | |||
77 | def _write_response(self, result): |
|
79 | def _write_response(self, result): | |
78 | self.send_response(200) |
|
80 | self.send_response(200) | |
79 | self.send_header("Content-type", "text/json") |
|
81 | self.send_header("Content-type", "text/json") | |
80 | self.end_headers() |
|
82 | self.end_headers() | |
81 | self.wfile.write(json.dumps(result)) |
|
83 | self.wfile.write(json.dumps(result)) | |
82 |
|
84 | |||
83 | def _call_hook(self, method, extras): |
|
85 | def _call_hook_method(self, hooks, method, extras): | |
84 | hooks = Hooks() |
|
|||
85 | try: |
|
86 | try: | |
86 | result = getattr(hooks, method)(extras) |
|
87 | result = getattr(hooks, method)(extras) | |
87 | finally: |
|
88 | finally: | |
88 | meta.Session.remove() |
|
89 | meta.Session.remove() | |
89 | return result |
|
90 | return result | |
90 |
|
91 | |||
91 | def log_message(self, format, *args): |
|
92 | def log_message(self, format, *args): | |
92 | """ |
|
93 | """ | |
93 | This is an overridden method of BaseHTTPRequestHandler which logs using |
|
94 | This is an overridden method of BaseHTTPRequestHandler which logs using | |
94 | logging library instead of writing directly to stderr. |
|
95 | logging library instead of writing directly to stderr. | |
95 | """ |
|
96 | """ | |
96 |
|
97 | |||
97 | message = format % args |
|
98 | message = format % args | |
98 |
|
99 | |||
99 | log.debug( |
|
100 | log.debug( | |
100 |
"%s - - [%s] %s", self.client_address |
|
101 | "HOOKS: %s - - [%s] %s", self.client_address, | |
101 | self.log_date_time_string(), message) |
|
102 | self.log_date_time_string(), message) | |
102 |
|
103 | |||
103 |
|
104 | |||
104 | class DummyHooksCallbackDaemon(object): |
|
105 | class DummyHooksCallbackDaemon(object): | |
105 | hooks_uri = '' |
|
106 | hooks_uri = '' | |
106 |
|
107 | |||
107 | def __init__(self): |
|
108 | def __init__(self): | |
108 | self.hooks_module = Hooks.__module__ |
|
109 | self.hooks_module = Hooks.__module__ | |
109 |
|
110 | |||
110 | def __enter__(self): |
|
111 | def __enter__(self): | |
111 | log.debug('Running `%s` callback daemon', self.__class__.__name__) |
|
112 | log.debug('Running `%s` callback daemon', self.__class__.__name__) | |
112 | return self |
|
113 | return self | |
113 |
|
114 | |||
114 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
115 | def __exit__(self, exc_type, exc_val, exc_tb): | |
115 | log.debug('Exiting `%s` callback daemon', self.__class__.__name__) |
|
116 | log.debug('Exiting `%s` callback daemon', self.__class__.__name__) | |
116 |
|
117 | |||
117 |
|
118 | |||
118 | class ThreadedHookCallbackDaemon(object): |
|
119 | class ThreadedHookCallbackDaemon(object): | |
119 |
|
120 | |||
120 | _callback_thread = None |
|
121 | _callback_thread = None | |
121 | _daemon = None |
|
122 | _daemon = None | |
122 | _done = False |
|
123 | _done = False | |
123 |
|
124 | |||
124 | def __init__(self, txn_id=None, host=None, port=None): |
|
125 | def __init__(self, txn_id=None, host=None, port=None): | |
125 | self._prepare(txn_id=txn_id, host=host, port=port) |
|
126 | self._prepare(txn_id=txn_id, host=host, port=port) | |
126 |
|
127 | |||
127 | def __enter__(self): |
|
128 | def __enter__(self): | |
128 | log.debug('Running `%s` callback daemon', self.__class__.__name__) |
|
129 | log.debug('Running `%s` callback daemon', self.__class__.__name__) | |
129 | self._run() |
|
130 | self._run() | |
130 | return self |
|
131 | return self | |
131 |
|
132 | |||
132 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
133 | def __exit__(self, exc_type, exc_val, exc_tb): | |
133 | log.debug('Exiting `%s` callback daemon', self.__class__.__name__) |
|
134 | log.debug('Exiting `%s` callback daemon', self.__class__.__name__) | |
134 | self._stop() |
|
135 | self._stop() | |
135 |
|
136 | |||
136 | def _prepare(self, txn_id=None, host=None, port=None): |
|
137 | def _prepare(self, txn_id=None, host=None, port=None): | |
137 | raise NotImplementedError() |
|
138 | raise NotImplementedError() | |
138 |
|
139 | |||
139 | def _run(self): |
|
140 | def _run(self): | |
140 | raise NotImplementedError() |
|
141 | raise NotImplementedError() | |
141 |
|
142 | |||
142 | def _stop(self): |
|
143 | def _stop(self): | |
143 | raise NotImplementedError() |
|
144 | raise NotImplementedError() | |
144 |
|
145 | |||
145 |
|
146 | |||
146 | class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon): |
|
147 | class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon): | |
147 | """ |
|
148 | """ | |
148 | Context manager which will run a callback daemon in a background thread. |
|
149 | Context manager which will run a callback daemon in a background thread. | |
149 | """ |
|
150 | """ | |
150 |
|
151 | |||
151 | hooks_uri = None |
|
152 | hooks_uri = None | |
152 |
|
153 | |||
153 | # From Python docs: Polling reduces our responsiveness to a shutdown |
|
154 | # From Python docs: Polling reduces our responsiveness to a shutdown | |
154 | # request and wastes cpu at all other times. |
|
155 | # request and wastes cpu at all other times. | |
155 | POLL_INTERVAL = 0.01 |
|
156 | POLL_INTERVAL = 0.01 | |
156 |
|
157 | |||
|
158 | @property | |||
|
159 | def _hook_prefix(self): | |||
|
160 | return 'HOOKS: {} '.format(self.hooks_uri) | |||
|
161 | ||||
157 | def get_hostname(self): |
|
162 | def get_hostname(self): | |
158 | return socket.gethostname() or '127.0.0.1' |
|
163 | return socket.gethostname() or '127.0.0.1' | |
159 |
|
164 | |||
160 | def get_available_port(self, min_port=20000, max_port=65535): |
|
165 | def get_available_port(self, min_port=20000, max_port=65535): | |
161 | from rhodecode.lib.utils2 import get_available_port as _get_port |
|
166 | from rhodecode.lib.utils2 import get_available_port as _get_port | |
162 | return _get_port(min_port, max_port) |
|
167 | return _get_port(min_port, max_port) | |
163 |
|
168 | |||
164 | def _prepare(self, txn_id=None, host=None, port=None): |
|
169 | def _prepare(self, txn_id=None, host=None, port=None): | |
|
170 | from pyramid.threadlocal import get_current_request | |||
|
171 | ||||
165 | if not host or host == "*": |
|
172 | if not host or host == "*": | |
166 | host = self.get_hostname() |
|
173 | host = self.get_hostname() | |
167 | if not port: |
|
174 | if not port: | |
168 | port = self.get_available_port() |
|
175 | port = self.get_available_port() | |
169 |
|
176 | |||
170 | server_address = (host, port) |
|
177 | server_address = (host, port) | |
171 | self.hooks_uri = '{}:{}'.format(host, port) |
|
178 | self.hooks_uri = '{}:{}'.format(host, port) | |
172 | self.txn_id = txn_id |
|
179 | self.txn_id = txn_id | |
173 | self._done = False |
|
180 | self._done = False | |
174 |
|
181 | |||
175 | log.debug( |
|
182 | log.debug( | |
176 |
" |
|
183 | "%s Preparing HTTP callback daemon registering hook object: %s", | |
177 |
self. |
|
184 | self._hook_prefix, HooksHttpHandler) | |
178 |
|
185 | |||
179 | self._daemon = TCPServer(server_address, HooksHttpHandler) |
|
186 | self._daemon = TCPServer(server_address, HooksHttpHandler) | |
180 | # inject transaction_id for later verification |
|
187 | # inject transaction_id for later verification | |
181 | self._daemon.txn_id = self.txn_id |
|
188 | self._daemon.txn_id = self.txn_id | |
182 |
|
189 | |||
|
190 | # pass the WEB app request into daemon | |||
|
191 | self._daemon.request = get_current_request() | |||
|
192 | ||||
183 | def _run(self): |
|
193 | def _run(self): | |
184 | log.debug("Running event loop of callback daemon in background thread") |
|
194 | log.debug("Running event loop of callback daemon in background thread") | |
185 | callback_thread = threading.Thread( |
|
195 | callback_thread = threading.Thread( | |
186 | target=self._daemon.serve_forever, |
|
196 | target=self._daemon.serve_forever, | |
187 | kwargs={'poll_interval': self.POLL_INTERVAL}) |
|
197 | kwargs={'poll_interval': self.POLL_INTERVAL}) | |
188 | callback_thread.daemon = True |
|
198 | callback_thread.daemon = True | |
189 | callback_thread.start() |
|
199 | callback_thread.start() | |
190 | self._callback_thread = callback_thread |
|
200 | self._callback_thread = callback_thread | |
191 |
|
201 | |||
192 | def _stop(self): |
|
202 | def _stop(self): | |
193 | log.debug("Waiting for background thread to finish.") |
|
203 | log.debug("Waiting for background thread to finish.") | |
194 | self._daemon.shutdown() |
|
204 | self._daemon.shutdown() | |
195 | self._callback_thread.join() |
|
205 | self._callback_thread.join() | |
196 | self._daemon = None |
|
206 | self._daemon = None | |
197 | self._callback_thread = None |
|
207 | self._callback_thread = None | |
198 | if self.txn_id: |
|
208 | if self.txn_id: | |
199 | txn_id_file = get_txn_id_data_path(self.txn_id) |
|
209 | txn_id_file = get_txn_id_data_path(self.txn_id) | |
200 | log.debug('Cleaning up TXN ID %s', txn_id_file) |
|
210 | log.debug('Cleaning up TXN ID %s', txn_id_file) | |
201 | if os.path.isfile(txn_id_file): |
|
211 | if os.path.isfile(txn_id_file): | |
202 | os.remove(txn_id_file) |
|
212 | os.remove(txn_id_file) | |
203 |
|
213 | |||
204 | log.debug("Background thread done.") |
|
214 | log.debug("Background thread done.") | |
205 |
|
215 | |||
206 |
|
216 | |||
207 | def get_txn_id_data_path(txn_id): |
|
217 | def get_txn_id_data_path(txn_id): | |
208 | import rhodecode |
|
218 | import rhodecode | |
209 |
|
219 | |||
210 | root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir() |
|
220 | root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir() | |
211 | final_dir = os.path.join(root, 'svn_txn_id') |
|
221 | final_dir = os.path.join(root, 'svn_txn_id') | |
212 |
|
222 | |||
213 | if not os.path.isdir(final_dir): |
|
223 | if not os.path.isdir(final_dir): | |
214 | os.makedirs(final_dir) |
|
224 | os.makedirs(final_dir) | |
215 | return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id)) |
|
225 | return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id)) | |
216 |
|
226 | |||
217 |
|
227 | |||
218 | def store_txn_id_data(txn_id, data_dict): |
|
228 | def store_txn_id_data(txn_id, data_dict): | |
219 | if not txn_id: |
|
229 | if not txn_id: | |
220 | log.warning('Cannot store txn_id because it is empty') |
|
230 | log.warning('Cannot store txn_id because it is empty') | |
221 | return |
|
231 | return | |
222 |
|
232 | |||
223 | path = get_txn_id_data_path(txn_id) |
|
233 | path = get_txn_id_data_path(txn_id) | |
224 | try: |
|
234 | try: | |
225 | with open(path, 'wb') as f: |
|
235 | with open(path, 'wb') as f: | |
226 | f.write(json.dumps(data_dict)) |
|
236 | f.write(json.dumps(data_dict)) | |
227 | except Exception: |
|
237 | except Exception: | |
228 | log.exception('Failed to write txn_id metadata') |
|
238 | log.exception('Failed to write txn_id metadata') | |
229 |
|
239 | |||
230 |
|
240 | |||
231 | def get_txn_id_from_store(txn_id): |
|
241 | def get_txn_id_from_store(txn_id): | |
232 | """ |
|
242 | """ | |
233 | Reads txn_id from store and if present returns the data for callback manager |
|
243 | Reads txn_id from store and if present returns the data for callback manager | |
234 | """ |
|
244 | """ | |
235 | path = get_txn_id_data_path(txn_id) |
|
245 | path = get_txn_id_data_path(txn_id) | |
236 | try: |
|
246 | try: | |
237 | with open(path, 'rb') as f: |
|
247 | with open(path, 'rb') as f: | |
238 | return json.loads(f.read()) |
|
248 | return json.loads(f.read()) | |
239 | except Exception: |
|
249 | except Exception: | |
240 | return {} |
|
250 | return {} | |
241 |
|
251 | |||
242 |
|
252 | |||
243 | def prepare_callback_daemon(extras, protocol, host, use_direct_calls, txn_id=None): |
|
253 | def prepare_callback_daemon(extras, protocol, host, use_direct_calls, txn_id=None): | |
244 | txn_details = get_txn_id_from_store(txn_id) |
|
254 | txn_details = get_txn_id_from_store(txn_id) | |
245 | port = txn_details.get('port', 0) |
|
255 | port = txn_details.get('port', 0) | |
246 | if use_direct_calls: |
|
256 | if use_direct_calls: | |
247 | callback_daemon = DummyHooksCallbackDaemon() |
|
257 | callback_daemon = DummyHooksCallbackDaemon() | |
248 | extras['hooks_module'] = callback_daemon.hooks_module |
|
258 | extras['hooks_module'] = callback_daemon.hooks_module | |
249 | else: |
|
259 | else: | |
250 | if protocol == 'http': |
|
260 | if protocol == 'http': | |
251 | callback_daemon = HttpHooksCallbackDaemon( |
|
261 | callback_daemon = HttpHooksCallbackDaemon( | |
252 | txn_id=txn_id, host=host, port=port) |
|
262 | txn_id=txn_id, host=host, port=port) | |
253 | else: |
|
263 | else: | |
254 | log.error('Unsupported callback daemon protocol "%s"', protocol) |
|
264 | log.error('Unsupported callback daemon protocol "%s"', protocol) | |
255 | raise Exception('Unsupported callback daemon protocol.') |
|
265 | raise Exception('Unsupported callback daemon protocol.') | |
256 |
|
266 | |||
257 | extras['hooks_uri'] = callback_daemon.hooks_uri |
|
267 | extras['hooks_uri'] = callback_daemon.hooks_uri | |
258 | extras['hooks_protocol'] = protocol |
|
268 | extras['hooks_protocol'] = protocol | |
259 | extras['time'] = time.time() |
|
269 | extras['time'] = time.time() | |
260 |
|
270 | |||
261 | # register txn_id |
|
271 | # register txn_id | |
262 | extras['txn_id'] = txn_id |
|
272 | extras['txn_id'] = txn_id | |
263 | log.debug('Prepared a callback daemon: %s at url `%s`', |
|
273 | log.debug('Prepared a callback daemon: %s at url `%s`', | |
264 | callback_daemon.__class__.__name__, callback_daemon.hooks_uri) |
|
274 | callback_daemon.__class__.__name__, callback_daemon.hooks_uri) | |
265 | return callback_daemon, extras |
|
275 | return callback_daemon, extras | |
266 |
|
276 | |||
267 |
|
277 | |||
268 | class Hooks(object): |
|
278 | class Hooks(object): | |
269 | """ |
|
279 | """ | |
270 | Exposes the hooks for remote call backs |
|
280 | Exposes the hooks for remote call backs | |
271 | """ |
|
281 | """ | |
|
282 | def __init__(self, request=None, log_prefix=''): | |||
|
283 | self.log_prefix = log_prefix | |||
|
284 | self.request = request | |||
272 |
|
285 | |||
273 | def repo_size(self, extras): |
|
286 | def repo_size(self, extras): | |
274 | log.debug("Called repo_size of %s object", self) |
|
287 | log.debug("%sCalled repo_size of %s object", self.log_prefix, self) | |
275 | return self._call_hook(hooks_base.repo_size, extras) |
|
288 | return self._call_hook(hooks_base.repo_size, extras) | |
276 |
|
289 | |||
277 | def pre_pull(self, extras): |
|
290 | def pre_pull(self, extras): | |
278 | log.debug("Called pre_pull of %s object", self) |
|
291 | log.debug("%sCalled pre_pull of %s object", self.log_prefix, self) | |
279 | return self._call_hook(hooks_base.pre_pull, extras) |
|
292 | return self._call_hook(hooks_base.pre_pull, extras) | |
280 |
|
293 | |||
281 | def post_pull(self, extras): |
|
294 | def post_pull(self, extras): | |
282 | log.debug("Called post_pull of %s object", self) |
|
295 | log.debug("%sCalled post_pull of %s object", self.log_prefix, self) | |
283 | return self._call_hook(hooks_base.post_pull, extras) |
|
296 | return self._call_hook(hooks_base.post_pull, extras) | |
284 |
|
297 | |||
285 | def pre_push(self, extras): |
|
298 | def pre_push(self, extras): | |
286 | log.debug("Called pre_push of %s object", self) |
|
299 | log.debug("%sCalled pre_push of %s object", self.log_prefix, self) | |
287 | return self._call_hook(hooks_base.pre_push, extras) |
|
300 | return self._call_hook(hooks_base.pre_push, extras) | |
288 |
|
301 | |||
289 | def post_push(self, extras): |
|
302 | def post_push(self, extras): | |
290 | log.debug("Called post_push of %s object", self) |
|
303 | log.debug("%sCalled post_push of %s object", self.log_prefix, self) | |
291 | return self._call_hook(hooks_base.post_push, extras) |
|
304 | return self._call_hook(hooks_base.post_push, extras) | |
292 |
|
305 | |||
293 | def _call_hook(self, hook, extras): |
|
306 | def _call_hook(self, hook, extras): | |
294 | extras = AttributeDict(extras) |
|
307 | extras = AttributeDict(extras) | |
295 | server_url = extras['server_url'] |
|
308 | server_url = extras['server_url'] | |
296 | request = bootstrap_request(application_url=server_url) |
|
|||
297 |
|
309 | |||
298 | bootstrap_config(request) # inject routes and other interfaces |
|
310 | extras.request = self.request | |
299 |
|
||||
300 | # inject the user for usage in hooks |
|
|||
301 | request.user = AttributeDict({'username': extras.username, |
|
|||
302 | 'ip_addr': extras.ip, |
|
|||
303 | 'user_id': extras.user_id}) |
|
|||
304 |
|
||||
305 | extras.request = request |
|
|||
306 |
|
311 | |||
307 | try: |
|
312 | try: | |
308 | result = hook(extras) |
|
313 | result = hook(extras) | |
309 | if result is None: |
|
314 | if result is None: | |
310 | raise Exception( |
|
315 | raise Exception( | |
311 | 'Failed to obtain hook result from func: {}'.format(hook)) |
|
316 | 'Failed to obtain hook result from func: {}'.format(hook)) | |
312 | except HTTPBranchProtected as handled_error: |
|
317 | except HTTPBranchProtected as handled_error: | |
313 | # Those special cases doesn't need error reporting. It's a case of |
|
318 | # Those special cases doesn't need error reporting. It's a case of | |
314 | # locked repo or protected branch |
|
319 | # locked repo or protected branch | |
315 | result = AttributeDict({ |
|
320 | result = AttributeDict({ | |
316 | 'status': handled_error.code, |
|
321 | 'status': handled_error.code, | |
317 | 'output': handled_error.explanation |
|
322 | 'output': handled_error.explanation | |
318 | }) |
|
323 | }) | |
319 | except (HTTPLockedRC, Exception) as error: |
|
324 | except (HTTPLockedRC, Exception) as error: | |
320 | # locked needs different handling since we need to also |
|
325 | # locked needs different handling since we need to also | |
321 | # handle PULL operations |
|
326 | # handle PULL operations | |
322 | exc_tb = '' |
|
327 | exc_tb = '' | |
323 | if not isinstance(error, HTTPLockedRC): |
|
328 | if not isinstance(error, HTTPLockedRC): | |
324 | exc_tb = traceback.format_exc() |
|
329 | exc_tb = traceback.format_exc() | |
325 | log.exception('Exception when handling hook %s', hook) |
|
330 | log.exception('%sException when handling hook %s', self.log_prefix, hook) | |
326 | error_args = error.args |
|
331 | error_args = error.args | |
327 | return { |
|
332 | return { | |
328 | 'status': 128, |
|
333 | 'status': 128, | |
329 | 'output': '', |
|
334 | 'output': '', | |
330 | 'exception': type(error).__name__, |
|
335 | 'exception': type(error).__name__, | |
331 | 'exception_traceback': exc_tb, |
|
336 | 'exception_traceback': exc_tb, | |
332 | 'exception_args': error_args, |
|
337 | 'exception_args': error_args, | |
333 | } |
|
338 | } | |
334 | finally: |
|
339 | finally: | |
335 | meta.Session.remove() |
|
340 | meta.Session.remove() | |
336 |
|
341 | |||
337 | log.debug('Got hook call response %s', result) |
|
342 | log.debug('%sGot hook call response %s', self.log_prefix, result) | |
338 | return { |
|
343 | return { | |
339 | 'status': result.status, |
|
344 | 'status': result.status, | |
340 | 'output': result.output, |
|
345 | 'output': result.output, | |
341 | } |
|
346 | } | |
342 |
|
347 | |||
343 | def __enter__(self): |
|
348 | def __enter__(self): | |
344 | return self |
|
349 | return self | |
345 |
|
350 | |||
346 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
351 | def __exit__(self, exc_type, exc_val, exc_tb): | |
347 | pass |
|
352 | pass |
@@ -1,454 +1,453 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | Model for notifications |
|
23 | Model for notifications | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import logging |
|
26 | import logging | |
27 | import traceback |
|
27 | import traceback | |
28 |
|
28 | |||
29 | import premailer |
|
29 | import premailer | |
30 | from pyramid.threadlocal import get_current_request |
|
30 | from pyramid.threadlocal import get_current_request | |
31 | from sqlalchemy.sql.expression import false, true |
|
31 | from sqlalchemy.sql.expression import false, true | |
32 |
|
32 | |||
33 | import rhodecode |
|
33 | import rhodecode | |
34 | from rhodecode.lib import helpers as h |
|
34 | from rhodecode.lib import helpers as h | |
35 | from rhodecode.model import BaseModel |
|
35 | from rhodecode.model import BaseModel | |
36 | from rhodecode.model.db import Notification, User, UserNotification |
|
36 | from rhodecode.model.db import Notification, User, UserNotification | |
37 | from rhodecode.model.meta import Session |
|
37 | from rhodecode.model.meta import Session | |
38 | from rhodecode.translation import TranslationString |
|
38 | from rhodecode.translation import TranslationString | |
39 |
|
39 | |||
40 | log = logging.getLogger(__name__) |
|
40 | log = logging.getLogger(__name__) | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | class NotificationModel(BaseModel): |
|
43 | class NotificationModel(BaseModel): | |
44 |
|
44 | |||
45 | cls = Notification |
|
45 | cls = Notification | |
46 |
|
46 | |||
47 | def __get_notification(self, notification): |
|
47 | def __get_notification(self, notification): | |
48 | if isinstance(notification, Notification): |
|
48 | if isinstance(notification, Notification): | |
49 | return notification |
|
49 | return notification | |
50 | elif isinstance(notification, (int, long)): |
|
50 | elif isinstance(notification, (int, long)): | |
51 | return Notification.get(notification) |
|
51 | return Notification.get(notification) | |
52 | else: |
|
52 | else: | |
53 | if notification: |
|
53 | if notification: | |
54 | raise Exception('notification must be int, long or Instance' |
|
54 | raise Exception('notification must be int, long or Instance' | |
55 | ' of Notification got %s' % type(notification)) |
|
55 | ' of Notification got %s' % type(notification)) | |
56 |
|
56 | |||
57 | def create( |
|
57 | def create( | |
58 | self, created_by, notification_subject='', notification_body='', |
|
58 | self, created_by, notification_subject='', notification_body='', | |
59 | notification_type=Notification.TYPE_MESSAGE, recipients=None, |
|
59 | notification_type=Notification.TYPE_MESSAGE, recipients=None, | |
60 | mention_recipients=None, with_email=True, email_kwargs=None): |
|
60 | mention_recipients=None, with_email=True, email_kwargs=None): | |
61 | """ |
|
61 | """ | |
62 |
|
62 | |||
63 | Creates notification of given type |
|
63 | Creates notification of given type | |
64 |
|
64 | |||
65 | :param created_by: int, str or User instance. User who created this |
|
65 | :param created_by: int, str or User instance. User who created this | |
66 | notification |
|
66 | notification | |
67 | :param notification_subject: subject of notification itself, |
|
67 | :param notification_subject: subject of notification itself, | |
68 | it will be generated automatically from notification_type if not specified |
|
68 | it will be generated automatically from notification_type if not specified | |
69 | :param notification_body: body of notification text |
|
69 | :param notification_body: body of notification text | |
70 | it will be generated automatically from notification_type if not specified |
|
70 | it will be generated automatically from notification_type if not specified | |
71 | :param notification_type: type of notification, based on that we |
|
71 | :param notification_type: type of notification, based on that we | |
72 | pick templates |
|
72 | pick templates | |
73 | :param recipients: list of int, str or User objects, when None |
|
73 | :param recipients: list of int, str or User objects, when None | |
74 | is given send to all admins |
|
74 | is given send to all admins | |
75 | :param mention_recipients: list of int, str or User objects, |
|
75 | :param mention_recipients: list of int, str or User objects, | |
76 | that were mentioned |
|
76 | that were mentioned | |
77 | :param with_email: send email with this notification |
|
77 | :param with_email: send email with this notification | |
78 | :param email_kwargs: dict with arguments to generate email |
|
78 | :param email_kwargs: dict with arguments to generate email | |
79 | """ |
|
79 | """ | |
80 |
|
80 | |||
81 | from rhodecode.lib.celerylib import tasks, run_task |
|
81 | from rhodecode.lib.celerylib import tasks, run_task | |
82 |
|
82 | |||
83 | if recipients and not getattr(recipients, '__iter__', False): |
|
83 | if recipients and not getattr(recipients, '__iter__', False): | |
84 | raise Exception('recipients must be an iterable object') |
|
84 | raise Exception('recipients must be an iterable object') | |
85 |
|
85 | |||
86 | if not (notification_subject and notification_body) and not notification_type: |
|
86 | if not (notification_subject and notification_body) and not notification_type: | |
87 | raise ValueError('notification_subject, and notification_body ' |
|
87 | raise ValueError('notification_subject, and notification_body ' | |
88 | 'cannot be empty when notification_type is not specified') |
|
88 | 'cannot be empty when notification_type is not specified') | |
89 |
|
89 | |||
90 | created_by_obj = self._get_user(created_by) |
|
90 | created_by_obj = self._get_user(created_by) | |
91 |
|
91 | |||
92 | if not created_by_obj: |
|
92 | if not created_by_obj: | |
93 | raise Exception('unknown user %s' % created_by) |
|
93 | raise Exception('unknown user %s' % created_by) | |
94 |
|
94 | |||
95 | # default MAIN body if not given |
|
95 | # default MAIN body if not given | |
96 | email_kwargs = email_kwargs or {'body': notification_body} |
|
96 | email_kwargs = email_kwargs or {'body': notification_body} | |
97 | mention_recipients = mention_recipients or set() |
|
97 | mention_recipients = mention_recipients or set() | |
98 |
|
98 | |||
99 | if recipients is None: |
|
99 | if recipients is None: | |
100 | # recipients is None means to all admins |
|
100 | # recipients is None means to all admins | |
101 | recipients_objs = User.query().filter(User.admin == true()).all() |
|
101 | recipients_objs = User.query().filter(User.admin == true()).all() | |
102 | log.debug('sending notifications %s to admins: %s', |
|
102 | log.debug('sending notifications %s to admins: %s', | |
103 | notification_type, recipients_objs) |
|
103 | notification_type, recipients_objs) | |
104 | else: |
|
104 | else: | |
105 | recipients_objs = set() |
|
105 | recipients_objs = set() | |
106 | for u in recipients: |
|
106 | for u in recipients: | |
107 | obj = self._get_user(u) |
|
107 | obj = self._get_user(u) | |
108 | if obj: |
|
108 | if obj: | |
109 | recipients_objs.add(obj) |
|
109 | recipients_objs.add(obj) | |
110 | else: # we didn't find this user, log the error and carry on |
|
110 | else: # we didn't find this user, log the error and carry on | |
111 | log.error('cannot notify unknown user %r', u) |
|
111 | log.error('cannot notify unknown user %r', u) | |
112 |
|
112 | |||
113 | if not recipients_objs: |
|
113 | if not recipients_objs: | |
114 | raise Exception('no valid recipients specified') |
|
114 | raise Exception('no valid recipients specified') | |
115 |
|
115 | |||
116 | log.debug('sending notifications %s to %s', |
|
116 | log.debug('sending notifications %s to %s', | |
117 | notification_type, recipients_objs) |
|
117 | notification_type, recipients_objs) | |
118 |
|
118 | |||
119 | # add mentioned users into recipients |
|
119 | # add mentioned users into recipients | |
120 | final_recipients = set(recipients_objs).union(mention_recipients) |
|
120 | final_recipients = set(recipients_objs).union(mention_recipients) | |
121 |
|
121 | |||
122 | (subject, email_body, email_body_plaintext) = \ |
|
122 | (subject, email_body, email_body_plaintext) = \ | |
123 | EmailNotificationModel().render_email(notification_type, **email_kwargs) |
|
123 | EmailNotificationModel().render_email(notification_type, **email_kwargs) | |
124 |
|
124 | |||
125 | if not notification_subject: |
|
125 | if not notification_subject: | |
126 | notification_subject = subject |
|
126 | notification_subject = subject | |
127 |
|
127 | |||
128 | if not notification_body: |
|
128 | if not notification_body: | |
129 | notification_body = email_body_plaintext |
|
129 | notification_body = email_body_plaintext | |
130 |
|
130 | |||
131 | notification = Notification.create( |
|
131 | notification = Notification.create( | |
132 | created_by=created_by_obj, subject=notification_subject, |
|
132 | created_by=created_by_obj, subject=notification_subject, | |
133 | body=notification_body, recipients=final_recipients, |
|
133 | body=notification_body, recipients=final_recipients, | |
134 | type_=notification_type |
|
134 | type_=notification_type | |
135 | ) |
|
135 | ) | |
136 |
|
136 | |||
137 | if not with_email: # skip sending email, and just create notification |
|
137 | if not with_email: # skip sending email, and just create notification | |
138 | return notification |
|
138 | return notification | |
139 |
|
139 | |||
140 | # don't send email to person who created this comment |
|
140 | # don't send email to person who created this comment | |
141 | rec_objs = set(recipients_objs).difference({created_by_obj}) |
|
141 | rec_objs = set(recipients_objs).difference({created_by_obj}) | |
142 |
|
142 | |||
143 | # now notify all recipients in question |
|
143 | # now notify all recipients in question | |
144 |
|
144 | |||
145 | for recipient in rec_objs.union(mention_recipients): |
|
145 | for recipient in rec_objs.union(mention_recipients): | |
146 | # inject current recipient |
|
146 | # inject current recipient | |
147 | email_kwargs['recipient'] = recipient |
|
147 | email_kwargs['recipient'] = recipient | |
148 | email_kwargs['mention'] = recipient in mention_recipients |
|
148 | email_kwargs['mention'] = recipient in mention_recipients | |
149 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
149 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( | |
150 | notification_type, **email_kwargs) |
|
150 | notification_type, **email_kwargs) | |
151 |
|
151 | |||
152 | extra_headers = None |
|
152 | extra_headers = None | |
153 | if 'thread_ids' in email_kwargs: |
|
153 | if 'thread_ids' in email_kwargs: | |
154 | extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')} |
|
154 | extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')} | |
155 |
|
155 | |||
156 | log.debug('Creating notification email task for user:`%s`', recipient) |
|
156 | log.debug('Creating notification email task for user:`%s`', recipient) | |
157 | task = run_task( |
|
157 | task = run_task(tasks.send_email, recipient.email, subject, | |
158 | tasks.send_email, recipient.email, subject, |
|
158 | email_body_plaintext, email_body, extra_headers=extra_headers) | |
159 | email_body_plaintext, email_body, extra_headers=extra_headers) |
|
|||
160 | log.debug('Created email task: %s', task) |
|
159 | log.debug('Created email task: %s', task) | |
161 |
|
160 | |||
162 | return notification |
|
161 | return notification | |
163 |
|
162 | |||
164 | def delete(self, user, notification): |
|
163 | def delete(self, user, notification): | |
165 | # we don't want to remove actual notification just the assignment |
|
164 | # we don't want to remove actual notification just the assignment | |
166 | try: |
|
165 | try: | |
167 | notification = self.__get_notification(notification) |
|
166 | notification = self.__get_notification(notification) | |
168 | user = self._get_user(user) |
|
167 | user = self._get_user(user) | |
169 | if notification and user: |
|
168 | if notification and user: | |
170 | obj = UserNotification.query()\ |
|
169 | obj = UserNotification.query()\ | |
171 | .filter(UserNotification.user == user)\ |
|
170 | .filter(UserNotification.user == user)\ | |
172 | .filter(UserNotification.notification == notification)\ |
|
171 | .filter(UserNotification.notification == notification)\ | |
173 | .one() |
|
172 | .one() | |
174 | Session().delete(obj) |
|
173 | Session().delete(obj) | |
175 | return True |
|
174 | return True | |
176 | except Exception: |
|
175 | except Exception: | |
177 | log.error(traceback.format_exc()) |
|
176 | log.error(traceback.format_exc()) | |
178 | raise |
|
177 | raise | |
179 |
|
178 | |||
180 | def get_for_user(self, user, filter_=None): |
|
179 | def get_for_user(self, user, filter_=None): | |
181 | """ |
|
180 | """ | |
182 | Get mentions for given user, filter them if filter dict is given |
|
181 | Get mentions for given user, filter them if filter dict is given | |
183 | """ |
|
182 | """ | |
184 | user = self._get_user(user) |
|
183 | user = self._get_user(user) | |
185 |
|
184 | |||
186 | q = UserNotification.query()\ |
|
185 | q = UserNotification.query()\ | |
187 | .filter(UserNotification.user == user)\ |
|
186 | .filter(UserNotification.user == user)\ | |
188 | .join(( |
|
187 | .join(( | |
189 | Notification, UserNotification.notification_id == |
|
188 | Notification, UserNotification.notification_id == | |
190 | Notification.notification_id)) |
|
189 | Notification.notification_id)) | |
191 | if filter_ == ['all']: |
|
190 | if filter_ == ['all']: | |
192 | q = q # no filter |
|
191 | q = q # no filter | |
193 | elif filter_ == ['unread']: |
|
192 | elif filter_ == ['unread']: | |
194 | q = q.filter(UserNotification.read == false()) |
|
193 | q = q.filter(UserNotification.read == false()) | |
195 | elif filter_: |
|
194 | elif filter_: | |
196 | q = q.filter(Notification.type_.in_(filter_)) |
|
195 | q = q.filter(Notification.type_.in_(filter_)) | |
197 |
|
196 | |||
198 | return q |
|
197 | return q | |
199 |
|
198 | |||
200 | def mark_read(self, user, notification): |
|
199 | def mark_read(self, user, notification): | |
201 | try: |
|
200 | try: | |
202 | notification = self.__get_notification(notification) |
|
201 | notification = self.__get_notification(notification) | |
203 | user = self._get_user(user) |
|
202 | user = self._get_user(user) | |
204 | if notification and user: |
|
203 | if notification and user: | |
205 | obj = UserNotification.query()\ |
|
204 | obj = UserNotification.query()\ | |
206 | .filter(UserNotification.user == user)\ |
|
205 | .filter(UserNotification.user == user)\ | |
207 | .filter(UserNotification.notification == notification)\ |
|
206 | .filter(UserNotification.notification == notification)\ | |
208 | .one() |
|
207 | .one() | |
209 | obj.read = True |
|
208 | obj.read = True | |
210 | Session().add(obj) |
|
209 | Session().add(obj) | |
211 | return True |
|
210 | return True | |
212 | except Exception: |
|
211 | except Exception: | |
213 | log.error(traceback.format_exc()) |
|
212 | log.error(traceback.format_exc()) | |
214 | raise |
|
213 | raise | |
215 |
|
214 | |||
216 | def mark_all_read_for_user(self, user, filter_=None): |
|
215 | def mark_all_read_for_user(self, user, filter_=None): | |
217 | user = self._get_user(user) |
|
216 | user = self._get_user(user) | |
218 | q = UserNotification.query()\ |
|
217 | q = UserNotification.query()\ | |
219 | .filter(UserNotification.user == user)\ |
|
218 | .filter(UserNotification.user == user)\ | |
220 | .filter(UserNotification.read == false())\ |
|
219 | .filter(UserNotification.read == false())\ | |
221 | .join(( |
|
220 | .join(( | |
222 | Notification, UserNotification.notification_id == |
|
221 | Notification, UserNotification.notification_id == | |
223 | Notification.notification_id)) |
|
222 | Notification.notification_id)) | |
224 | if filter_ == ['unread']: |
|
223 | if filter_ == ['unread']: | |
225 | q = q.filter(UserNotification.read == false()) |
|
224 | q = q.filter(UserNotification.read == false()) | |
226 | elif filter_: |
|
225 | elif filter_: | |
227 | q = q.filter(Notification.type_.in_(filter_)) |
|
226 | q = q.filter(Notification.type_.in_(filter_)) | |
228 |
|
227 | |||
229 | # this is a little inefficient but sqlalchemy doesn't support |
|
228 | # this is a little inefficient but sqlalchemy doesn't support | |
230 | # update on joined tables :( |
|
229 | # update on joined tables :( | |
231 | for obj in q.all(): |
|
230 | for obj in q.all(): | |
232 | obj.read = True |
|
231 | obj.read = True | |
233 | Session().add(obj) |
|
232 | Session().add(obj) | |
234 |
|
233 | |||
235 | def get_unread_cnt_for_user(self, user): |
|
234 | def get_unread_cnt_for_user(self, user): | |
236 | user = self._get_user(user) |
|
235 | user = self._get_user(user) | |
237 | return UserNotification.query()\ |
|
236 | return UserNotification.query()\ | |
238 | .filter(UserNotification.read == false())\ |
|
237 | .filter(UserNotification.read == false())\ | |
239 | .filter(UserNotification.user == user).count() |
|
238 | .filter(UserNotification.user == user).count() | |
240 |
|
239 | |||
241 | def get_unread_for_user(self, user): |
|
240 | def get_unread_for_user(self, user): | |
242 | user = self._get_user(user) |
|
241 | user = self._get_user(user) | |
243 | return [x.notification for x in UserNotification.query() |
|
242 | return [x.notification for x in UserNotification.query() | |
244 | .filter(UserNotification.read == false()) |
|
243 | .filter(UserNotification.read == false()) | |
245 | .filter(UserNotification.user == user).all()] |
|
244 | .filter(UserNotification.user == user).all()] | |
246 |
|
245 | |||
247 | def get_user_notification(self, user, notification): |
|
246 | def get_user_notification(self, user, notification): | |
248 | user = self._get_user(user) |
|
247 | user = self._get_user(user) | |
249 | notification = self.__get_notification(notification) |
|
248 | notification = self.__get_notification(notification) | |
250 |
|
249 | |||
251 | return UserNotification.query()\ |
|
250 | return UserNotification.query()\ | |
252 | .filter(UserNotification.notification == notification)\ |
|
251 | .filter(UserNotification.notification == notification)\ | |
253 | .filter(UserNotification.user == user).scalar() |
|
252 | .filter(UserNotification.user == user).scalar() | |
254 |
|
253 | |||
255 | def make_description(self, notification, translate, show_age=True): |
|
254 | def make_description(self, notification, translate, show_age=True): | |
256 | """ |
|
255 | """ | |
257 | Creates a human readable description based on properties |
|
256 | Creates a human readable description based on properties | |
258 | of notification object |
|
257 | of notification object | |
259 | """ |
|
258 | """ | |
260 | _ = translate |
|
259 | _ = translate | |
261 | _map = { |
|
260 | _map = { | |
262 | notification.TYPE_CHANGESET_COMMENT: [ |
|
261 | notification.TYPE_CHANGESET_COMMENT: [ | |
263 | _('%(user)s commented on commit %(date_or_age)s'), |
|
262 | _('%(user)s commented on commit %(date_or_age)s'), | |
264 | _('%(user)s commented on commit at %(date_or_age)s'), |
|
263 | _('%(user)s commented on commit at %(date_or_age)s'), | |
265 | ], |
|
264 | ], | |
266 | notification.TYPE_MESSAGE: [ |
|
265 | notification.TYPE_MESSAGE: [ | |
267 | _('%(user)s sent message %(date_or_age)s'), |
|
266 | _('%(user)s sent message %(date_or_age)s'), | |
268 | _('%(user)s sent message at %(date_or_age)s'), |
|
267 | _('%(user)s sent message at %(date_or_age)s'), | |
269 | ], |
|
268 | ], | |
270 | notification.TYPE_MENTION: [ |
|
269 | notification.TYPE_MENTION: [ | |
271 | _('%(user)s mentioned you %(date_or_age)s'), |
|
270 | _('%(user)s mentioned you %(date_or_age)s'), | |
272 | _('%(user)s mentioned you at %(date_or_age)s'), |
|
271 | _('%(user)s mentioned you at %(date_or_age)s'), | |
273 | ], |
|
272 | ], | |
274 | notification.TYPE_REGISTRATION: [ |
|
273 | notification.TYPE_REGISTRATION: [ | |
275 | _('%(user)s registered in RhodeCode %(date_or_age)s'), |
|
274 | _('%(user)s registered in RhodeCode %(date_or_age)s'), | |
276 | _('%(user)s registered in RhodeCode at %(date_or_age)s'), |
|
275 | _('%(user)s registered in RhodeCode at %(date_or_age)s'), | |
277 | ], |
|
276 | ], | |
278 | notification.TYPE_PULL_REQUEST: [ |
|
277 | notification.TYPE_PULL_REQUEST: [ | |
279 | _('%(user)s opened new pull request %(date_or_age)s'), |
|
278 | _('%(user)s opened new pull request %(date_or_age)s'), | |
280 | _('%(user)s opened new pull request at %(date_or_age)s'), |
|
279 | _('%(user)s opened new pull request at %(date_or_age)s'), | |
281 | ], |
|
280 | ], | |
282 | notification.TYPE_PULL_REQUEST_UPDATE: [ |
|
281 | notification.TYPE_PULL_REQUEST_UPDATE: [ | |
283 | _('%(user)s updated pull request %(date_or_age)s'), |
|
282 | _('%(user)s updated pull request %(date_or_age)s'), | |
284 | _('%(user)s updated pull request at %(date_or_age)s'), |
|
283 | _('%(user)s updated pull request at %(date_or_age)s'), | |
285 | ], |
|
284 | ], | |
286 | notification.TYPE_PULL_REQUEST_COMMENT: [ |
|
285 | notification.TYPE_PULL_REQUEST_COMMENT: [ | |
287 | _('%(user)s commented on pull request %(date_or_age)s'), |
|
286 | _('%(user)s commented on pull request %(date_or_age)s'), | |
288 | _('%(user)s commented on pull request at %(date_or_age)s'), |
|
287 | _('%(user)s commented on pull request at %(date_or_age)s'), | |
289 | ], |
|
288 | ], | |
290 | } |
|
289 | } | |
291 |
|
290 | |||
292 | templates = _map[notification.type_] |
|
291 | templates = _map[notification.type_] | |
293 |
|
292 | |||
294 | if show_age: |
|
293 | if show_age: | |
295 | template = templates[0] |
|
294 | template = templates[0] | |
296 | date_or_age = h.age(notification.created_on) |
|
295 | date_or_age = h.age(notification.created_on) | |
297 | if translate: |
|
296 | if translate: | |
298 | date_or_age = translate(date_or_age) |
|
297 | date_or_age = translate(date_or_age) | |
299 |
|
298 | |||
300 | if isinstance(date_or_age, TranslationString): |
|
299 | if isinstance(date_or_age, TranslationString): | |
301 | date_or_age = date_or_age.interpolate() |
|
300 | date_or_age = date_or_age.interpolate() | |
302 |
|
301 | |||
303 | else: |
|
302 | else: | |
304 | template = templates[1] |
|
303 | template = templates[1] | |
305 | date_or_age = h.format_date(notification.created_on) |
|
304 | date_or_age = h.format_date(notification.created_on) | |
306 |
|
305 | |||
307 | return template % { |
|
306 | return template % { | |
308 | 'user': notification.created_by_user.username, |
|
307 | 'user': notification.created_by_user.username, | |
309 | 'date_or_age': date_or_age, |
|
308 | 'date_or_age': date_or_age, | |
310 | } |
|
309 | } | |
311 |
|
310 | |||
312 |
|
311 | |||
313 | # Templates for Titles, that could be overwritten by rcextensions |
|
312 | # Templates for Titles, that could be overwritten by rcextensions | |
314 | # Title of email for pull-request update |
|
313 | # Title of email for pull-request update | |
315 | EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = '' |
|
314 | EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = '' | |
316 | # Title of email for request for pull request review |
|
315 | # Title of email for request for pull request review | |
317 | EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = '' |
|
316 | EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = '' | |
318 |
|
317 | |||
319 | # Title of email for general comment on pull request |
|
318 | # Title of email for general comment on pull request | |
320 | EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = '' |
|
319 | EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = '' | |
321 | # Title of email for general comment which includes status change on pull request |
|
320 | # Title of email for general comment which includes status change on pull request | |
322 | EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' |
|
321 | EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' | |
323 | # Title of email for inline comment on a file in pull request |
|
322 | # Title of email for inline comment on a file in pull request | |
324 | EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = '' |
|
323 | EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = '' | |
325 |
|
324 | |||
326 | # Title of email for general comment on commit |
|
325 | # Title of email for general comment on commit | |
327 | EMAIL_COMMENT_SUBJECT_TEMPLATE = '' |
|
326 | EMAIL_COMMENT_SUBJECT_TEMPLATE = '' | |
328 | # Title of email for general comment which includes status change on commit |
|
327 | # Title of email for general comment which includes status change on commit | |
329 | EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' |
|
328 | EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' | |
330 | # Title of email for inline comment on a file in commit |
|
329 | # Title of email for inline comment on a file in commit | |
331 | EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = '' |
|
330 | EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = '' | |
332 |
|
331 | |||
333 | import cssutils |
|
332 | import cssutils | |
334 | # hijack css utils logger and replace with ours |
|
333 | # hijack css utils logger and replace with ours | |
335 | log = logging.getLogger('rhodecode.cssutils.premailer') |
|
334 | log = logging.getLogger('rhodecode.cssutils.premailer') | |
336 | cssutils.log.setLog(log) |
|
335 | cssutils.log.setLog(log) | |
337 |
|
336 | |||
338 |
|
337 | |||
339 | class EmailNotificationModel(BaseModel): |
|
338 | class EmailNotificationModel(BaseModel): | |
340 | TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT |
|
339 | TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT | |
341 | TYPE_REGISTRATION = Notification.TYPE_REGISTRATION |
|
340 | TYPE_REGISTRATION = Notification.TYPE_REGISTRATION | |
342 | TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST |
|
341 | TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST | |
343 | TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT |
|
342 | TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT | |
344 | TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE |
|
343 | TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE | |
345 | TYPE_MAIN = Notification.TYPE_MESSAGE |
|
344 | TYPE_MAIN = Notification.TYPE_MESSAGE | |
346 |
|
345 | |||
347 | TYPE_PASSWORD_RESET = 'password_reset' |
|
346 | TYPE_PASSWORD_RESET = 'password_reset' | |
348 | TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation' |
|
347 | TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation' | |
349 | TYPE_EMAIL_TEST = 'email_test' |
|
348 | TYPE_EMAIL_TEST = 'email_test' | |
350 | TYPE_EMAIL_EXCEPTION = 'exception' |
|
349 | TYPE_EMAIL_EXCEPTION = 'exception' | |
351 | TYPE_UPDATE_AVAILABLE = 'update_available' |
|
350 | TYPE_UPDATE_AVAILABLE = 'update_available' | |
352 | TYPE_TEST = 'test' |
|
351 | TYPE_TEST = 'test' | |
353 |
|
352 | |||
354 | email_types = { |
|
353 | email_types = { | |
355 | TYPE_MAIN: |
|
354 | TYPE_MAIN: | |
356 | 'rhodecode:templates/email_templates/main.mako', |
|
355 | 'rhodecode:templates/email_templates/main.mako', | |
357 | TYPE_TEST: |
|
356 | TYPE_TEST: | |
358 | 'rhodecode:templates/email_templates/test.mako', |
|
357 | 'rhodecode:templates/email_templates/test.mako', | |
359 | TYPE_EMAIL_EXCEPTION: |
|
358 | TYPE_EMAIL_EXCEPTION: | |
360 | 'rhodecode:templates/email_templates/exception_tracker.mako', |
|
359 | 'rhodecode:templates/email_templates/exception_tracker.mako', | |
361 | TYPE_UPDATE_AVAILABLE: |
|
360 | TYPE_UPDATE_AVAILABLE: | |
362 | 'rhodecode:templates/email_templates/update_available.mako', |
|
361 | 'rhodecode:templates/email_templates/update_available.mako', | |
363 | TYPE_EMAIL_TEST: |
|
362 | TYPE_EMAIL_TEST: | |
364 | 'rhodecode:templates/email_templates/email_test.mako', |
|
363 | 'rhodecode:templates/email_templates/email_test.mako', | |
365 | TYPE_REGISTRATION: |
|
364 | TYPE_REGISTRATION: | |
366 | 'rhodecode:templates/email_templates/user_registration.mako', |
|
365 | 'rhodecode:templates/email_templates/user_registration.mako', | |
367 | TYPE_PASSWORD_RESET: |
|
366 | TYPE_PASSWORD_RESET: | |
368 | 'rhodecode:templates/email_templates/password_reset.mako', |
|
367 | 'rhodecode:templates/email_templates/password_reset.mako', | |
369 | TYPE_PASSWORD_RESET_CONFIRMATION: |
|
368 | TYPE_PASSWORD_RESET_CONFIRMATION: | |
370 | 'rhodecode:templates/email_templates/password_reset_confirmation.mako', |
|
369 | 'rhodecode:templates/email_templates/password_reset_confirmation.mako', | |
371 | TYPE_COMMIT_COMMENT: |
|
370 | TYPE_COMMIT_COMMENT: | |
372 | 'rhodecode:templates/email_templates/commit_comment.mako', |
|
371 | 'rhodecode:templates/email_templates/commit_comment.mako', | |
373 | TYPE_PULL_REQUEST: |
|
372 | TYPE_PULL_REQUEST: | |
374 | 'rhodecode:templates/email_templates/pull_request_review.mako', |
|
373 | 'rhodecode:templates/email_templates/pull_request_review.mako', | |
375 | TYPE_PULL_REQUEST_COMMENT: |
|
374 | TYPE_PULL_REQUEST_COMMENT: | |
376 | 'rhodecode:templates/email_templates/pull_request_comment.mako', |
|
375 | 'rhodecode:templates/email_templates/pull_request_comment.mako', | |
377 | TYPE_PULL_REQUEST_UPDATE: |
|
376 | TYPE_PULL_REQUEST_UPDATE: | |
378 | 'rhodecode:templates/email_templates/pull_request_update.mako', |
|
377 | 'rhodecode:templates/email_templates/pull_request_update.mako', | |
379 | } |
|
378 | } | |
380 |
|
379 | |||
381 | premailer_instance = premailer.Premailer() |
|
380 | premailer_instance = premailer.Premailer() | |
382 |
|
381 | |||
383 | def __init__(self): |
|
382 | def __init__(self): | |
384 | """ |
|
383 | """ | |
385 | Example usage:: |
|
384 | Example usage:: | |
386 |
|
385 | |||
387 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
386 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( | |
388 | EmailNotificationModel.TYPE_TEST, **email_kwargs) |
|
387 | EmailNotificationModel.TYPE_TEST, **email_kwargs) | |
389 |
|
388 | |||
390 | """ |
|
389 | """ | |
391 | super(EmailNotificationModel, self).__init__() |
|
390 | super(EmailNotificationModel, self).__init__() | |
392 | self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title') |
|
391 | self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title') | |
393 |
|
392 | |||
394 | def _update_kwargs_for_render(self, kwargs): |
|
393 | def _update_kwargs_for_render(self, kwargs): | |
395 | """ |
|
394 | """ | |
396 | Inject params required for Mako rendering |
|
395 | Inject params required for Mako rendering | |
397 |
|
396 | |||
398 | :param kwargs: |
|
397 | :param kwargs: | |
399 | """ |
|
398 | """ | |
400 |
|
399 | |||
401 | kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name |
|
400 | kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name | |
402 | kwargs['rhodecode_version'] = rhodecode.__version__ |
|
401 | kwargs['rhodecode_version'] = rhodecode.__version__ | |
403 | instance_url = h.route_url('home') |
|
402 | instance_url = h.route_url('home') | |
404 | _kwargs = { |
|
403 | _kwargs = { | |
405 | 'instance_url': instance_url, |
|
404 | 'instance_url': instance_url, | |
406 | 'whitespace_filter': self.whitespace_filter, |
|
405 | 'whitespace_filter': self.whitespace_filter, | |
407 | 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE, |
|
406 | 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE, | |
408 | 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE, |
|
407 | 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE, | |
409 | 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE, |
|
408 | 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE, | |
410 | 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, |
|
409 | 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, | |
411 | 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE, |
|
410 | 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE, | |
412 | 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE, |
|
411 | 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE, | |
413 | 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, |
|
412 | 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, | |
414 | 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE, |
|
413 | 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE, | |
415 | } |
|
414 | } | |
416 | _kwargs.update(kwargs) |
|
415 | _kwargs.update(kwargs) | |
417 | return _kwargs |
|
416 | return _kwargs | |
418 |
|
417 | |||
419 | def whitespace_filter(self, text): |
|
418 | def whitespace_filter(self, text): | |
420 | return text.replace('\n', '').replace('\t', '') |
|
419 | return text.replace('\n', '').replace('\t', '') | |
421 |
|
420 | |||
422 | def get_renderer(self, type_, request): |
|
421 | def get_renderer(self, type_, request): | |
423 | template_name = self.email_types[type_] |
|
422 | template_name = self.email_types[type_] | |
424 | return request.get_partial_renderer(template_name) |
|
423 | return request.get_partial_renderer(template_name) | |
425 |
|
424 | |||
426 | def render_email(self, type_, **kwargs): |
|
425 | def render_email(self, type_, **kwargs): | |
427 | """ |
|
426 | """ | |
428 | renders template for email, and returns a tuple of |
|
427 | renders template for email, and returns a tuple of | |
429 | (subject, email_headers, email_html_body, email_plaintext_body) |
|
428 | (subject, email_headers, email_html_body, email_plaintext_body) | |
430 | """ |
|
429 | """ | |
431 | request = get_current_request() |
|
430 | request = get_current_request() | |
432 |
|
431 | |||
433 | # translator and helpers inject |
|
432 | # translator and helpers inject | |
434 | _kwargs = self._update_kwargs_for_render(kwargs) |
|
433 | _kwargs = self._update_kwargs_for_render(kwargs) | |
435 | email_template = self.get_renderer(type_, request=request) |
|
434 | email_template = self.get_renderer(type_, request=request) | |
436 | subject = email_template.render('subject', **_kwargs) |
|
435 | subject = email_template.render('subject', **_kwargs) | |
437 |
|
436 | |||
438 | try: |
|
437 | try: | |
439 | body_plaintext = email_template.render('body_plaintext', **_kwargs) |
|
438 | body_plaintext = email_template.render('body_plaintext', **_kwargs) | |
440 | except AttributeError: |
|
439 | except AttributeError: | |
441 | # it's not defined in template, ok we can skip it |
|
440 | # it's not defined in template, ok we can skip it | |
442 | body_plaintext = '' |
|
441 | body_plaintext = '' | |
443 |
|
442 | |||
444 | # render WHOLE template |
|
443 | # render WHOLE template | |
445 | body = email_template.render(None, **_kwargs) |
|
444 | body = email_template.render(None, **_kwargs) | |
446 |
|
445 | |||
447 | try: |
|
446 | try: | |
448 | # Inline CSS styles and conversion |
|
447 | # Inline CSS styles and conversion | |
449 | body = self.premailer_instance.transform(body) |
|
448 | body = self.premailer_instance.transform(body) | |
450 | except Exception: |
|
449 | except Exception: | |
451 | log.exception('Failed to parse body with premailer') |
|
450 | log.exception('Failed to parse body with premailer') | |
452 | pass |
|
451 | pass | |
453 |
|
452 | |||
454 | return subject, body, body_plaintext |
|
453 | return subject, body, body_plaintext |
@@ -1,392 +1,398 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | import io |
|
20 | import io | |
21 | import shlex |
|
21 | import shlex | |
22 |
|
22 | |||
23 | import math |
|
23 | import math | |
24 | import re |
|
24 | import re | |
25 | import os |
|
25 | import os | |
26 | import datetime |
|
26 | import datetime | |
27 | import logging |
|
27 | import logging | |
28 | import Queue |
|
28 | import Queue | |
29 | import subprocess32 |
|
29 | import subprocess32 | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | from dateutil.parser import parse |
|
32 | from dateutil.parser import parse | |
33 | from pyramid.threadlocal import get_current_request |
|
33 | from pyramid.threadlocal import get_current_request | |
34 | from pyramid.interfaces import IRoutesMapper |
|
34 | from pyramid.interfaces import IRoutesMapper | |
35 | from pyramid.settings import asbool |
|
35 | from pyramid.settings import asbool | |
36 | from pyramid.path import AssetResolver |
|
36 | from pyramid.path import AssetResolver | |
37 | from threading import Thread |
|
37 | from threading import Thread | |
38 |
|
38 | |||
39 | from rhodecode.config.jsroutes import generate_jsroutes_content |
|
39 | from rhodecode.config.jsroutes import generate_jsroutes_content | |
40 | from rhodecode.lib.base import get_auth_user |
|
40 | from rhodecode.lib.base import get_auth_user | |
41 |
|
41 | |||
42 | import rhodecode |
|
42 | import rhodecode | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | def add_renderer_globals(event): |
|
48 | def add_renderer_globals(event): | |
49 | from rhodecode.lib import helpers |
|
49 | from rhodecode.lib import helpers | |
50 |
|
50 | |||
51 | # TODO: When executed in pyramid view context the request is not available |
|
51 | # TODO: When executed in pyramid view context the request is not available | |
52 | # in the event. Find a better solution to get the request. |
|
52 | # in the event. Find a better solution to get the request. | |
53 | request = event['request'] or get_current_request() |
|
53 | request = event['request'] or get_current_request() | |
54 |
|
54 | |||
55 | # Add Pyramid translation as '_' to context |
|
55 | # Add Pyramid translation as '_' to context | |
56 | event['_'] = request.translate |
|
56 | event['_'] = request.translate | |
57 | event['_ungettext'] = request.plularize |
|
57 | event['_ungettext'] = request.plularize | |
58 | event['h'] = helpers |
|
58 | event['h'] = helpers | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | def set_user_lang(event): |
|
61 | def set_user_lang(event): | |
62 | request = event.request |
|
62 | request = event.request | |
63 | cur_user = getattr(request, 'user', None) |
|
63 | cur_user = getattr(request, 'user', None) | |
64 |
|
64 | |||
65 | if cur_user: |
|
65 | if cur_user: | |
66 | user_lang = cur_user.get_instance().user_data.get('language') |
|
66 | user_lang = cur_user.get_instance().user_data.get('language') | |
67 | if user_lang: |
|
67 | if user_lang: | |
68 | log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang) |
|
68 | log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang) | |
69 | event.request._LOCALE_ = user_lang |
|
69 | event.request._LOCALE_ = user_lang | |
70 |
|
70 | |||
71 |
|
71 | |||
|
72 | def update_celery_conf(event): | |||
|
73 | from rhodecode.lib.celerylib.loader import set_celery_conf | |||
|
74 | log.debug('Setting celery config from new request') | |||
|
75 | set_celery_conf(request=event.request, registry=event.request.registry) | |||
|
76 | ||||
|
77 | ||||
72 | def add_request_user_context(event): |
|
78 | def add_request_user_context(event): | |
73 | """ |
|
79 | """ | |
74 | Adds auth user into request context |
|
80 | Adds auth user into request context | |
75 | """ |
|
81 | """ | |
76 | request = event.request |
|
82 | request = event.request | |
77 | # access req_id as soon as possible |
|
83 | # access req_id as soon as possible | |
78 | req_id = request.req_id |
|
84 | req_id = request.req_id | |
79 |
|
85 | |||
80 | if hasattr(request, 'vcs_call'): |
|
86 | if hasattr(request, 'vcs_call'): | |
81 | # skip vcs calls |
|
87 | # skip vcs calls | |
82 | return |
|
88 | return | |
83 |
|
89 | |||
84 | if hasattr(request, 'rpc_method'): |
|
90 | if hasattr(request, 'rpc_method'): | |
85 | # skip api calls |
|
91 | # skip api calls | |
86 | return |
|
92 | return | |
87 |
|
93 | |||
88 | auth_user, auth_token = get_auth_user(request) |
|
94 | auth_user, auth_token = get_auth_user(request) | |
89 | request.user = auth_user |
|
95 | request.user = auth_user | |
90 | request.user_auth_token = auth_token |
|
96 | request.user_auth_token = auth_token | |
91 | request.environ['rc_auth_user'] = auth_user |
|
97 | request.environ['rc_auth_user'] = auth_user | |
92 | request.environ['rc_auth_user_id'] = auth_user.user_id |
|
98 | request.environ['rc_auth_user_id'] = auth_user.user_id | |
93 | request.environ['rc_req_id'] = req_id |
|
99 | request.environ['rc_req_id'] = req_id | |
94 |
|
100 | |||
95 |
|
101 | |||
96 | def reset_log_bucket(event): |
|
102 | def reset_log_bucket(event): | |
97 | """ |
|
103 | """ | |
98 | reset the log bucket on new request |
|
104 | reset the log bucket on new request | |
99 | """ |
|
105 | """ | |
100 | request = event.request |
|
106 | request = event.request | |
101 | request.req_id_records_init() |
|
107 | request.req_id_records_init() | |
102 |
|
108 | |||
103 |
|
109 | |||
104 | def scan_repositories_if_enabled(event): |
|
110 | def scan_repositories_if_enabled(event): | |
105 | """ |
|
111 | """ | |
106 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It |
|
112 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It | |
107 | does a repository scan if enabled in the settings. |
|
113 | does a repository scan if enabled in the settings. | |
108 | """ |
|
114 | """ | |
109 | settings = event.app.registry.settings |
|
115 | settings = event.app.registry.settings | |
110 | vcs_server_enabled = settings['vcs.server.enable'] |
|
116 | vcs_server_enabled = settings['vcs.server.enable'] | |
111 | import_on_startup = settings['startup.import_repos'] |
|
117 | import_on_startup = settings['startup.import_repos'] | |
112 | if vcs_server_enabled and import_on_startup: |
|
118 | if vcs_server_enabled and import_on_startup: | |
113 | from rhodecode.model.scm import ScmModel |
|
119 | from rhodecode.model.scm import ScmModel | |
114 | from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path |
|
120 | from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path | |
115 | repositories = ScmModel().repo_scan(get_rhodecode_base_path()) |
|
121 | repositories = ScmModel().repo_scan(get_rhodecode_base_path()) | |
116 | repo2db_mapper(repositories, remove_obsolete=False) |
|
122 | repo2db_mapper(repositories, remove_obsolete=False) | |
117 |
|
123 | |||
118 |
|
124 | |||
119 | def write_metadata_if_needed(event): |
|
125 | def write_metadata_if_needed(event): | |
120 | """ |
|
126 | """ | |
121 | Writes upgrade metadata |
|
127 | Writes upgrade metadata | |
122 | """ |
|
128 | """ | |
123 | import rhodecode |
|
129 | import rhodecode | |
124 | from rhodecode.lib import system_info |
|
130 | from rhodecode.lib import system_info | |
125 | from rhodecode.lib import ext_json |
|
131 | from rhodecode.lib import ext_json | |
126 |
|
132 | |||
127 | fname = '.rcmetadata.json' |
|
133 | fname = '.rcmetadata.json' | |
128 | ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) |
|
134 | ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) | |
129 | metadata_destination = os.path.join(ini_loc, fname) |
|
135 | metadata_destination = os.path.join(ini_loc, fname) | |
130 |
|
136 | |||
131 | def get_update_age(): |
|
137 | def get_update_age(): | |
132 | now = datetime.datetime.utcnow() |
|
138 | now = datetime.datetime.utcnow() | |
133 |
|
139 | |||
134 | with open(metadata_destination, 'rb') as f: |
|
140 | with open(metadata_destination, 'rb') as f: | |
135 | data = ext_json.json.loads(f.read()) |
|
141 | data = ext_json.json.loads(f.read()) | |
136 | if 'created_on' in data: |
|
142 | if 'created_on' in data: | |
137 | update_date = parse(data['created_on']) |
|
143 | update_date = parse(data['created_on']) | |
138 | diff = now - update_date |
|
144 | diff = now - update_date | |
139 | return diff.total_seconds() / 60.0 |
|
145 | return diff.total_seconds() / 60.0 | |
140 |
|
146 | |||
141 | return 0 |
|
147 | return 0 | |
142 |
|
148 | |||
143 | def write(): |
|
149 | def write(): | |
144 | configuration = system_info.SysInfo( |
|
150 | configuration = system_info.SysInfo( | |
145 | system_info.rhodecode_config)()['value'] |
|
151 | system_info.rhodecode_config)()['value'] | |
146 | license_token = configuration['config']['license_token'] |
|
152 | license_token = configuration['config']['license_token'] | |
147 |
|
153 | |||
148 | setup = dict( |
|
154 | setup = dict( | |
149 | workers=configuration['config']['server:main'].get( |
|
155 | workers=configuration['config']['server:main'].get( | |
150 | 'workers', '?'), |
|
156 | 'workers', '?'), | |
151 | worker_type=configuration['config']['server:main'].get( |
|
157 | worker_type=configuration['config']['server:main'].get( | |
152 | 'worker_class', 'sync'), |
|
158 | 'worker_class', 'sync'), | |
153 | ) |
|
159 | ) | |
154 | dbinfo = system_info.SysInfo(system_info.database_info)()['value'] |
|
160 | dbinfo = system_info.SysInfo(system_info.database_info)()['value'] | |
155 | del dbinfo['url'] |
|
161 | del dbinfo['url'] | |
156 |
|
162 | |||
157 | metadata = dict( |
|
163 | metadata = dict( | |
158 | desc='upgrade metadata info', |
|
164 | desc='upgrade metadata info', | |
159 | license_token=license_token, |
|
165 | license_token=license_token, | |
160 | created_on=datetime.datetime.utcnow().isoformat(), |
|
166 | created_on=datetime.datetime.utcnow().isoformat(), | |
161 | usage=system_info.SysInfo(system_info.usage_info)()['value'], |
|
167 | usage=system_info.SysInfo(system_info.usage_info)()['value'], | |
162 | platform=system_info.SysInfo(system_info.platform_type)()['value'], |
|
168 | platform=system_info.SysInfo(system_info.platform_type)()['value'], | |
163 | database=dbinfo, |
|
169 | database=dbinfo, | |
164 | cpu=system_info.SysInfo(system_info.cpu)()['value'], |
|
170 | cpu=system_info.SysInfo(system_info.cpu)()['value'], | |
165 | memory=system_info.SysInfo(system_info.memory)()['value'], |
|
171 | memory=system_info.SysInfo(system_info.memory)()['value'], | |
166 | setup=setup |
|
172 | setup=setup | |
167 | ) |
|
173 | ) | |
168 |
|
174 | |||
169 | with open(metadata_destination, 'wb') as f: |
|
175 | with open(metadata_destination, 'wb') as f: | |
170 | f.write(ext_json.json.dumps(metadata)) |
|
176 | f.write(ext_json.json.dumps(metadata)) | |
171 |
|
177 | |||
172 | settings = event.app.registry.settings |
|
178 | settings = event.app.registry.settings | |
173 | if settings.get('metadata.skip'): |
|
179 | if settings.get('metadata.skip'): | |
174 | return |
|
180 | return | |
175 |
|
181 | |||
176 | # only write this every 24h, workers restart caused unwanted delays |
|
182 | # only write this every 24h, workers restart caused unwanted delays | |
177 | try: |
|
183 | try: | |
178 | age_in_min = get_update_age() |
|
184 | age_in_min = get_update_age() | |
179 | except Exception: |
|
185 | except Exception: | |
180 | age_in_min = 0 |
|
186 | age_in_min = 0 | |
181 |
|
187 | |||
182 | if age_in_min > 60 * 60 * 24: |
|
188 | if age_in_min > 60 * 60 * 24: | |
183 | return |
|
189 | return | |
184 |
|
190 | |||
185 | try: |
|
191 | try: | |
186 | write() |
|
192 | write() | |
187 | except Exception: |
|
193 | except Exception: | |
188 | pass |
|
194 | pass | |
189 |
|
195 | |||
190 |
|
196 | |||
191 | def write_usage_data(event): |
|
197 | def write_usage_data(event): | |
192 | import rhodecode |
|
198 | import rhodecode | |
193 | from rhodecode.lib import system_info |
|
199 | from rhodecode.lib import system_info | |
194 | from rhodecode.lib import ext_json |
|
200 | from rhodecode.lib import ext_json | |
195 |
|
201 | |||
196 | settings = event.app.registry.settings |
|
202 | settings = event.app.registry.settings | |
197 | instance_tag = settings.get('metadata.write_usage_tag') |
|
203 | instance_tag = settings.get('metadata.write_usage_tag') | |
198 | if not settings.get('metadata.write_usage'): |
|
204 | if not settings.get('metadata.write_usage'): | |
199 | return |
|
205 | return | |
200 |
|
206 | |||
201 | def get_update_age(dest_file): |
|
207 | def get_update_age(dest_file): | |
202 | now = datetime.datetime.utcnow() |
|
208 | now = datetime.datetime.utcnow() | |
203 |
|
209 | |||
204 | with open(dest_file, 'rb') as f: |
|
210 | with open(dest_file, 'rb') as f: | |
205 | data = ext_json.json.loads(f.read()) |
|
211 | data = ext_json.json.loads(f.read()) | |
206 | if 'created_on' in data: |
|
212 | if 'created_on' in data: | |
207 | update_date = parse(data['created_on']) |
|
213 | update_date = parse(data['created_on']) | |
208 | diff = now - update_date |
|
214 | diff = now - update_date | |
209 | return math.ceil(diff.total_seconds() / 60.0) |
|
215 | return math.ceil(diff.total_seconds() / 60.0) | |
210 |
|
216 | |||
211 | return 0 |
|
217 | return 0 | |
212 |
|
218 | |||
213 | utc_date = datetime.datetime.utcnow() |
|
219 | utc_date = datetime.datetime.utcnow() | |
214 | hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.)) |
|
220 | hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.)) | |
215 | fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json'.format( |
|
221 | fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json'.format( | |
216 | date=utc_date, hour=hour_quarter) |
|
222 | date=utc_date, hour=hour_quarter) | |
217 | ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) |
|
223 | ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) | |
218 |
|
224 | |||
219 | usage_dir = os.path.join(ini_loc, '.rcusage') |
|
225 | usage_dir = os.path.join(ini_loc, '.rcusage') | |
220 | if not os.path.isdir(usage_dir): |
|
226 | if not os.path.isdir(usage_dir): | |
221 | os.makedirs(usage_dir) |
|
227 | os.makedirs(usage_dir) | |
222 | usage_metadata_destination = os.path.join(usage_dir, fname) |
|
228 | usage_metadata_destination = os.path.join(usage_dir, fname) | |
223 |
|
229 | |||
224 | try: |
|
230 | try: | |
225 | age_in_min = get_update_age(usage_metadata_destination) |
|
231 | age_in_min = get_update_age(usage_metadata_destination) | |
226 | except Exception: |
|
232 | except Exception: | |
227 | age_in_min = 0 |
|
233 | age_in_min = 0 | |
228 |
|
234 | |||
229 | # write every 6th hour |
|
235 | # write every 6th hour | |
230 | if age_in_min and age_in_min < 60 * 6: |
|
236 | if age_in_min and age_in_min < 60 * 6: | |
231 | log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...', |
|
237 | log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...', | |
232 | age_in_min, 60 * 6) |
|
238 | age_in_min, 60 * 6) | |
233 | return |
|
239 | return | |
234 |
|
240 | |||
235 | def write(dest_file): |
|
241 | def write(dest_file): | |
236 | configuration = system_info.SysInfo(system_info.rhodecode_config)()['value'] |
|
242 | configuration = system_info.SysInfo(system_info.rhodecode_config)()['value'] | |
237 | license_token = configuration['config']['license_token'] |
|
243 | license_token = configuration['config']['license_token'] | |
238 |
|
244 | |||
239 | metadata = dict( |
|
245 | metadata = dict( | |
240 | desc='Usage data', |
|
246 | desc='Usage data', | |
241 | instance_tag=instance_tag, |
|
247 | instance_tag=instance_tag, | |
242 | license_token=license_token, |
|
248 | license_token=license_token, | |
243 | created_on=datetime.datetime.utcnow().isoformat(), |
|
249 | created_on=datetime.datetime.utcnow().isoformat(), | |
244 | usage=system_info.SysInfo(system_info.usage_info)()['value'], |
|
250 | usage=system_info.SysInfo(system_info.usage_info)()['value'], | |
245 | ) |
|
251 | ) | |
246 |
|
252 | |||
247 | with open(dest_file, 'wb') as f: |
|
253 | with open(dest_file, 'wb') as f: | |
248 | f.write(ext_json.json.dumps(metadata, indent=2, sort_keys=True)) |
|
254 | f.write(ext_json.json.dumps(metadata, indent=2, sort_keys=True)) | |
249 |
|
255 | |||
250 | try: |
|
256 | try: | |
251 | log.debug('Writing usage file at: %s', usage_metadata_destination) |
|
257 | log.debug('Writing usage file at: %s', usage_metadata_destination) | |
252 | write(usage_metadata_destination) |
|
258 | write(usage_metadata_destination) | |
253 | except Exception: |
|
259 | except Exception: | |
254 | pass |
|
260 | pass | |
255 |
|
261 | |||
256 |
|
262 | |||
257 | def write_js_routes_if_enabled(event): |
|
263 | def write_js_routes_if_enabled(event): | |
258 | registry = event.app.registry |
|
264 | registry = event.app.registry | |
259 |
|
265 | |||
260 | mapper = registry.queryUtility(IRoutesMapper) |
|
266 | mapper = registry.queryUtility(IRoutesMapper) | |
261 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
267 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') | |
262 |
|
268 | |||
263 | def _extract_route_information(route): |
|
269 | def _extract_route_information(route): | |
264 | """ |
|
270 | """ | |
265 | Convert a route into tuple(name, path, args), eg: |
|
271 | Convert a route into tuple(name, path, args), eg: | |
266 | ('show_user', '/profile/%(username)s', ['username']) |
|
272 | ('show_user', '/profile/%(username)s', ['username']) | |
267 | """ |
|
273 | """ | |
268 |
|
274 | |||
269 | routepath = route.pattern |
|
275 | routepath = route.pattern | |
270 | pattern = route.pattern |
|
276 | pattern = route.pattern | |
271 |
|
277 | |||
272 | def replace(matchobj): |
|
278 | def replace(matchobj): | |
273 | if matchobj.group(1): |
|
279 | if matchobj.group(1): | |
274 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
280 | return "%%(%s)s" % matchobj.group(1).split(':')[0] | |
275 | else: |
|
281 | else: | |
276 | return "%%(%s)s" % matchobj.group(2) |
|
282 | return "%%(%s)s" % matchobj.group(2) | |
277 |
|
283 | |||
278 | routepath = _argument_prog.sub(replace, routepath) |
|
284 | routepath = _argument_prog.sub(replace, routepath) | |
279 |
|
285 | |||
280 | if not routepath.startswith('/'): |
|
286 | if not routepath.startswith('/'): | |
281 | routepath = '/'+routepath |
|
287 | routepath = '/'+routepath | |
282 |
|
288 | |||
283 | return ( |
|
289 | return ( | |
284 | route.name, |
|
290 | route.name, | |
285 | routepath, |
|
291 | routepath, | |
286 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
292 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) | |
287 | for arg in _argument_prog.findall(pattern)] |
|
293 | for arg in _argument_prog.findall(pattern)] | |
288 | ) |
|
294 | ) | |
289 |
|
295 | |||
290 | def get_routes(): |
|
296 | def get_routes(): | |
291 | # pyramid routes |
|
297 | # pyramid routes | |
292 | for route in mapper.get_routes(): |
|
298 | for route in mapper.get_routes(): | |
293 | if not route.name.startswith('__'): |
|
299 | if not route.name.startswith('__'): | |
294 | yield _extract_route_information(route) |
|
300 | yield _extract_route_information(route) | |
295 |
|
301 | |||
296 | if asbool(registry.settings.get('generate_js_files', 'false')): |
|
302 | if asbool(registry.settings.get('generate_js_files', 'false')): | |
297 | static_path = AssetResolver().resolve('rhodecode:public').abspath() |
|
303 | static_path = AssetResolver().resolve('rhodecode:public').abspath() | |
298 | jsroutes = get_routes() |
|
304 | jsroutes = get_routes() | |
299 | jsroutes_file_content = generate_jsroutes_content(jsroutes) |
|
305 | jsroutes_file_content = generate_jsroutes_content(jsroutes) | |
300 | jsroutes_file_path = os.path.join( |
|
306 | jsroutes_file_path = os.path.join( | |
301 | static_path, 'js', 'rhodecode', 'routes.js') |
|
307 | static_path, 'js', 'rhodecode', 'routes.js') | |
302 |
|
308 | |||
303 | try: |
|
309 | try: | |
304 | with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f: |
|
310 | with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f: | |
305 | f.write(jsroutes_file_content) |
|
311 | f.write(jsroutes_file_content) | |
306 | except Exception: |
|
312 | except Exception: | |
307 | log.exception('Failed to write routes.js into %s', jsroutes_file_path) |
|
313 | log.exception('Failed to write routes.js into %s', jsroutes_file_path) | |
308 |
|
314 | |||
309 |
|
315 | |||
310 | class Subscriber(object): |
|
316 | class Subscriber(object): | |
311 | """ |
|
317 | """ | |
312 | Base class for subscribers to the pyramid event system. |
|
318 | Base class for subscribers to the pyramid event system. | |
313 | """ |
|
319 | """ | |
314 | def __call__(self, event): |
|
320 | def __call__(self, event): | |
315 | self.run(event) |
|
321 | self.run(event) | |
316 |
|
322 | |||
317 | def run(self, event): |
|
323 | def run(self, event): | |
318 | raise NotImplementedError('Subclass has to implement this.') |
|
324 | raise NotImplementedError('Subclass has to implement this.') | |
319 |
|
325 | |||
320 |
|
326 | |||
321 | class AsyncSubscriber(Subscriber): |
|
327 | class AsyncSubscriber(Subscriber): | |
322 | """ |
|
328 | """ | |
323 | Subscriber that handles the execution of events in a separate task to not |
|
329 | Subscriber that handles the execution of events in a separate task to not | |
324 | block the execution of the code which triggers the event. It puts the |
|
330 | block the execution of the code which triggers the event. It puts the | |
325 | received events into a queue from which the worker process takes them in |
|
331 | received events into a queue from which the worker process takes them in | |
326 | order. |
|
332 | order. | |
327 | """ |
|
333 | """ | |
328 | def __init__(self): |
|
334 | def __init__(self): | |
329 | self._stop = False |
|
335 | self._stop = False | |
330 | self._eventq = Queue.Queue() |
|
336 | self._eventq = Queue.Queue() | |
331 | self._worker = self.create_worker() |
|
337 | self._worker = self.create_worker() | |
332 | self._worker.start() |
|
338 | self._worker.start() | |
333 |
|
339 | |||
334 | def __call__(self, event): |
|
340 | def __call__(self, event): | |
335 | self._eventq.put(event) |
|
341 | self._eventq.put(event) | |
336 |
|
342 | |||
337 | def create_worker(self): |
|
343 | def create_worker(self): | |
338 | worker = Thread(target=self.do_work) |
|
344 | worker = Thread(target=self.do_work) | |
339 | worker.daemon = True |
|
345 | worker.daemon = True | |
340 | return worker |
|
346 | return worker | |
341 |
|
347 | |||
342 | def stop_worker(self): |
|
348 | def stop_worker(self): | |
343 | self._stop = False |
|
349 | self._stop = False | |
344 | self._eventq.put(None) |
|
350 | self._eventq.put(None) | |
345 | self._worker.join() |
|
351 | self._worker.join() | |
346 |
|
352 | |||
347 | def do_work(self): |
|
353 | def do_work(self): | |
348 | while not self._stop: |
|
354 | while not self._stop: | |
349 | event = self._eventq.get() |
|
355 | event = self._eventq.get() | |
350 | if event is not None: |
|
356 | if event is not None: | |
351 | self.run(event) |
|
357 | self.run(event) | |
352 |
|
358 | |||
353 |
|
359 | |||
354 | class AsyncSubprocessSubscriber(AsyncSubscriber): |
|
360 | class AsyncSubprocessSubscriber(AsyncSubscriber): | |
355 | """ |
|
361 | """ | |
356 | Subscriber that uses the subprocess32 module to execute a command if an |
|
362 | Subscriber that uses the subprocess32 module to execute a command if an | |
357 | event is received. Events are handled asynchronously:: |
|
363 | event is received. Events are handled asynchronously:: | |
358 |
|
364 | |||
359 | subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10) |
|
365 | subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10) | |
360 | subscriber(dummyEvent) # running __call__(event) |
|
366 | subscriber(dummyEvent) # running __call__(event) | |
361 |
|
367 | |||
362 | """ |
|
368 | """ | |
363 |
|
369 | |||
364 | def __init__(self, cmd, timeout=None): |
|
370 | def __init__(self, cmd, timeout=None): | |
365 | if not isinstance(cmd, (list, tuple)): |
|
371 | if not isinstance(cmd, (list, tuple)): | |
366 | cmd = shlex.split(cmd) |
|
372 | cmd = shlex.split(cmd) | |
367 | super(AsyncSubprocessSubscriber, self).__init__() |
|
373 | super(AsyncSubprocessSubscriber, self).__init__() | |
368 | self._cmd = cmd |
|
374 | self._cmd = cmd | |
369 | self._timeout = timeout |
|
375 | self._timeout = timeout | |
370 |
|
376 | |||
371 | def run(self, event): |
|
377 | def run(self, event): | |
372 | cmd = self._cmd |
|
378 | cmd = self._cmd | |
373 | timeout = self._timeout |
|
379 | timeout = self._timeout | |
374 | log.debug('Executing command %s.', cmd) |
|
380 | log.debug('Executing command %s.', cmd) | |
375 |
|
381 | |||
376 | try: |
|
382 | try: | |
377 | output = subprocess32.check_output( |
|
383 | output = subprocess32.check_output( | |
378 | cmd, timeout=timeout, stderr=subprocess32.STDOUT) |
|
384 | cmd, timeout=timeout, stderr=subprocess32.STDOUT) | |
379 | log.debug('Command finished %s', cmd) |
|
385 | log.debug('Command finished %s', cmd) | |
380 | if output: |
|
386 | if output: | |
381 | log.debug('Command output: %s', output) |
|
387 | log.debug('Command output: %s', output) | |
382 | except subprocess32.TimeoutExpired as e: |
|
388 | except subprocess32.TimeoutExpired as e: | |
383 | log.exception('Timeout while executing command.') |
|
389 | log.exception('Timeout while executing command.') | |
384 | if e.output: |
|
390 | if e.output: | |
385 | log.error('Command output: %s', e.output) |
|
391 | log.error('Command output: %s', e.output) | |
386 | except subprocess32.CalledProcessError as e: |
|
392 | except subprocess32.CalledProcessError as e: | |
387 | log.exception('Error while executing command.') |
|
393 | log.exception('Error while executing command.') | |
388 | if e.output: |
|
394 | if e.output: | |
389 | log.error('Command output: %s', e.output) |
|
395 | log.error('Command output: %s', e.output) | |
390 | except Exception: |
|
396 | except Exception: | |
391 | log.exception( |
|
397 | log.exception( | |
392 | 'Exception while executing command %s.', cmd) |
|
398 | 'Exception while executing command %s.', cmd) |
@@ -1,122 +1,124 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import logging |
|
22 | import logging | |
23 | from pyramid.httpexceptions import HTTPException, HTTPBadRequest |
|
23 | from pyramid.httpexceptions import HTTPException, HTTPBadRequest | |
24 |
|
24 | |||
25 | from rhodecode.lib.middleware.vcs import ( |
|
25 | from rhodecode.lib.middleware.vcs import ( | |
26 | detect_vcs_request, VCS_TYPE_KEY, VCS_TYPE_SKIP) |
|
26 | detect_vcs_request, VCS_TYPE_KEY, VCS_TYPE_SKIP) | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | log = logging.getLogger(__name__) |
|
29 | log = logging.getLogger(__name__) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | def vcs_detection_tween_factory(handler, registry): |
|
32 | def vcs_detection_tween_factory(handler, registry): | |
33 |
|
33 | |||
34 | def vcs_detection_tween(request): |
|
34 | def vcs_detection_tween(request): | |
35 | """ |
|
35 | """ | |
36 | Do detection of vcs type, and save results for other layers to re-use |
|
36 | Do detection of vcs type, and save results for other layers to re-use | |
37 | this information |
|
37 | this information | |
38 | """ |
|
38 | """ | |
39 | vcs_server_enabled = request.registry.settings.get('vcs.server.enable') |
|
39 | vcs_server_enabled = request.registry.settings.get('vcs.server.enable') | |
40 | vcs_handler = vcs_server_enabled and detect_vcs_request( |
|
40 | vcs_handler = vcs_server_enabled and detect_vcs_request( | |
41 | request.environ, request.registry.settings.get('vcs.backends')) |
|
41 | request.environ, request.registry.settings.get('vcs.backends')) | |
42 |
|
42 | |||
43 | if vcs_handler: |
|
43 | if vcs_handler: | |
44 | # save detected VCS type for later re-use |
|
44 | # save detected VCS type for later re-use | |
45 | request.environ[VCS_TYPE_KEY] = vcs_handler.SCM |
|
45 | request.environ[VCS_TYPE_KEY] = vcs_handler.SCM | |
46 | request.vcs_call = vcs_handler.SCM |
|
46 | request.vcs_call = vcs_handler.SCM | |
47 |
|
47 | |||
48 | log.debug('Processing request with `%s` handler', handler) |
|
48 | log.debug('Processing request with `%s` handler', handler) | |
49 | return handler(request) |
|
49 | return handler(request) | |
50 |
|
50 | |||
51 | # mark that we didn't detect an VCS, and we can skip detection later on |
|
51 | # mark that we didn't detect an VCS, and we can skip detection later on | |
52 | request.environ[VCS_TYPE_KEY] = VCS_TYPE_SKIP |
|
52 | request.environ[VCS_TYPE_KEY] = VCS_TYPE_SKIP | |
53 |
|
53 | |||
54 | log.debug('Processing request with `%s` handler', handler) |
|
54 | log.debug('Processing request with `%s` handler', handler) | |
55 | return handler(request) |
|
55 | return handler(request) | |
56 |
|
56 | |||
57 | return vcs_detection_tween |
|
57 | return vcs_detection_tween | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | def junk_encoding_detector(request): |
|
60 | def junk_encoding_detector(request): | |
61 | """ |
|
61 | """ | |
62 | Detect bad encoded GET params, and fail immediately with BadRequest |
|
62 | Detect bad encoded GET params, and fail immediately with BadRequest | |
63 | """ |
|
63 | """ | |
64 |
|
64 | |||
65 | try: |
|
65 | try: | |
66 | request.GET.get("", None) |
|
66 | request.GET.get("", None) | |
67 | except UnicodeDecodeError: |
|
67 | except UnicodeDecodeError: | |
68 | raise HTTPBadRequest("Invalid bytes in query string.") |
|
68 | raise HTTPBadRequest("Invalid bytes in query string.") | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | def bad_url_data_detector(request): |
|
71 | def bad_url_data_detector(request): | |
72 | """ |
|
72 | """ | |
73 | Detect invalid bytes in a path. |
|
73 | Detect invalid bytes in a path. | |
74 | """ |
|
74 | """ | |
75 | try: |
|
75 | try: | |
76 | request.path_info |
|
76 | request.path_info | |
77 | except UnicodeDecodeError: |
|
77 | except UnicodeDecodeError: | |
78 | raise HTTPBadRequest("Invalid bytes in URL.") |
|
78 | raise HTTPBadRequest("Invalid bytes in URL.") | |
79 |
|
79 | |||
80 |
|
80 | |||
81 | def junk_form_data_detector(request): |
|
81 | def junk_form_data_detector(request): | |
82 | """ |
|
82 | """ | |
83 | Detect bad encoded POST params, and fail immediately with BadRequest |
|
83 | Detect bad encoded POST params, and fail immediately with BadRequest | |
84 | """ |
|
84 | """ | |
85 |
|
85 | |||
86 | if request.method == "POST": |
|
86 | if request.method == "POST": | |
87 | try: |
|
87 | try: | |
88 | request.POST.get("", None) |
|
88 | request.POST.get("", None) | |
89 | except ValueError: |
|
89 | except ValueError: | |
90 | raise HTTPBadRequest("Invalid bytes in form data.") |
|
90 | raise HTTPBadRequest("Invalid bytes in form data.") | |
91 |
|
91 | |||
92 |
|
92 | |||
93 | def sanity_check_factory(handler, registry): |
|
93 | def sanity_check_factory(handler, registry): | |
94 | def sanity_check(request): |
|
94 | def sanity_check(request): | |
95 | log.debug('Checking current URL sanity for bad data') |
|
95 | log.debug('Checking current URL sanity for bad data') | |
96 | try: |
|
96 | try: | |
97 | junk_encoding_detector(request) |
|
97 | junk_encoding_detector(request) | |
98 | bad_url_data_detector(request) |
|
98 | bad_url_data_detector(request) | |
99 | junk_form_data_detector(request) |
|
99 | junk_form_data_detector(request) | |
100 | except HTTPException as exc: |
|
100 | except HTTPException as exc: | |
101 | return exc |
|
101 | return exc | |
102 |
|
102 | |||
103 | return handler(request) |
|
103 | return handler(request) | |
104 |
|
104 | |||
105 | return sanity_check |
|
105 | return sanity_check | |
106 |
|
106 | |||
107 |
|
107 | |||
108 | def includeme(config): |
|
108 | def includeme(config): | |
109 | config.add_subscriber('rhodecode.subscribers.add_renderer_globals', |
|
109 | config.add_subscriber('rhodecode.subscribers.add_renderer_globals', | |
110 | 'pyramid.events.BeforeRender') |
|
110 | 'pyramid.events.BeforeRender') | |
|
111 | config.add_subscriber('rhodecode.subscribers.update_celery_conf', | |||
|
112 | 'pyramid.events.NewRequest') | |||
111 | config.add_subscriber('rhodecode.subscribers.set_user_lang', |
|
113 | config.add_subscriber('rhodecode.subscribers.set_user_lang', | |
112 | 'pyramid.events.NewRequest') |
|
114 | 'pyramid.events.NewRequest') | |
113 | config.add_subscriber('rhodecode.subscribers.reset_log_bucket', |
|
115 | config.add_subscriber('rhodecode.subscribers.reset_log_bucket', | |
114 | 'pyramid.events.NewRequest') |
|
116 | 'pyramid.events.NewRequest') | |
115 | config.add_subscriber('rhodecode.subscribers.add_request_user_context', |
|
117 | config.add_subscriber('rhodecode.subscribers.add_request_user_context', | |
116 | 'pyramid.events.ContextFound') |
|
118 | 'pyramid.events.ContextFound') | |
117 | config.add_tween('rhodecode.tweens.vcs_detection_tween_factory') |
|
119 | config.add_tween('rhodecode.tweens.vcs_detection_tween_factory') | |
118 | config.add_tween('rhodecode.tweens.sanity_check_factory') |
|
120 | config.add_tween('rhodecode.tweens.sanity_check_factory') | |
119 |
|
121 | |||
120 | # This needs to be the LAST item |
|
122 | # This needs to be the LAST item | |
121 | config.add_tween('rhodecode.lib.middleware.request_wrapper.RequestWrapperTween') |
|
123 | config.add_tween('rhodecode.lib.middleware.request_wrapper.RequestWrapperTween') | |
122 | log.debug('configured all tweens') |
|
124 | log.debug('configured all tweens') |
General Comments 0
You need to be logged in to leave comments.
Login now