Show More
@@ -1,110 +1,113 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import colander |
|
21 | import colander | |
22 | from rhodecode.translation import _ |
|
22 | from rhodecode.translation import _ | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class IntegrationTypeBase(object): |
|
25 | class IntegrationTypeBase(object): | |
26 | """ Base class for IntegrationType plugins """ |
|
26 | """ Base class for IntegrationType plugins """ | |
27 | is_dummy = False |
|
27 | is_dummy = False | |
28 | description = '' |
|
28 | description = '' | |
29 | icon = ''' |
|
29 | ||
30 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> |
|
30 | @classmethod | |
31 | <svg |
|
31 | def icon(cls): | |
32 | xmlns:dc="http://purl.org/dc/elements/1.1/" |
|
32 | return ''' | |
33 | xmlns:cc="http://creativecommons.org/ns#" |
|
33 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | |
34 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" |
|
34 | <svg | |
35 | xmlns:svg="http://www.w3.org/2000/svg" |
|
35 | xmlns:dc="http://purl.org/dc/elements/1.1/" | |
36 | xmlns="http://www.w3.org/2000/svg" |
|
36 | xmlns:cc="http://creativecommons.org/ns#" | |
37 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" |
|
37 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" | |
38 | xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" |
|
38 | xmlns:svg="http://www.w3.org/2000/svg" | |
39 | viewBox="0 -256 1792 1792" |
|
39 | xmlns="http://www.w3.org/2000/svg" | |
40 | id="svg3025" |
|
40 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | |
41 | version="1.1" |
|
41 | xmlns:inkscape="http://setwww.inkscape.org/namespaces/inkscape" | |
42 | inkscape:version="0.48.3.1 r9886" |
|
42 | viewBox="0 -256 1792 1792" | |
43 | width="100%" |
|
43 | id="svg3025" | |
44 | height="100%" |
|
44 | version="1.1" | |
45 | sodipodi:docname="cog_font_awesome.svg"> |
|
45 | inkscape:version="0.48.3.1 r9886" | |
46 | <metadata |
|
46 | width="100%" | |
47 | id="metadata3035"> |
|
47 | height="100%" | |
48 | <rdf:RDF> |
|
48 | sodipodi:docname="cog_font_awesome.svg"> | |
49 | <cc:Work |
|
49 | <metadata | |
50 | rdf:about=""> |
|
50 | id="metadata3035"> | |
51 | <dc:format>image/svg+xml</dc:format> |
|
51 | <rdf:RDF> | |
52 | <dc:type |
|
52 | <cc:Work | |
53 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> |
|
53 | rdf:about=""> | |
54 | </cc:Work> |
|
54 | <dc:format>image/svg+xml</dc:format> | |
55 | </rdf:RDF> |
|
55 | <dc:type | |
56 | </metadata> |
|
56 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> | |
57 | <defs |
|
57 | </cc:Work> | |
58 | id="defs3033" /> |
|
58 | </rdf:RDF> | |
59 | <sodipodi:namedview |
|
59 | </metadata> | |
60 | pagecolor="#ffffff" |
|
60 | <defs | |
61 | bordercolor="#666666" |
|
61 | id="defs3033" /> | |
62 | borderopacity="1" |
|
62 | <sodipodi:namedview | |
63 | objecttolerance="10" |
|
63 | pagecolor="#ffffff" | |
64 | gridtolerance="10" |
|
64 | bordercolor="#666666" | |
65 | guidetolerance="10" |
|
65 | borderopacity="1" | |
66 | inkscape:pageopacity="0" |
|
66 | objecttolerance="10" | |
67 | inkscape:pageshadow="2" |
|
67 | gridtolerance="10" | |
68 | inkscape:window-width="640" |
|
68 | guidetolerance="10" | |
69 | inkscape:window-height="480" |
|
69 | inkscape:pageopacity="0" | |
70 | id="namedview3031" |
|
70 | inkscape:pageshadow="2" | |
71 | showgrid="false" |
|
71 | inkscape:window-width="640" | |
72 | inkscape:zoom="0.13169643" |
|
72 | inkscape:window-height="480" | |
73 | inkscape:cx="896" |
|
73 | id="namedview3031" | |
74 | inkscape:cy="896" |
|
74 | showgrid="false" | |
75 | inkscape:window-x="0" |
|
75 | inkscape:zoom="0.13169643" | |
76 | inkscape:window-y="25" |
|
76 | inkscape:cx="896" | |
77 | inkscape:window-maximized="0" |
|
77 | inkscape:cy="896" | |
78 | inkscape:current-layer="svg3025" /> |
|
78 | inkscape:window-x="0" | |
79 | <g |
|
79 | inkscape:window-y="25" | |
80 | transform="matrix(1,0,0,-1,121.49153,1285.4237)" |
|
80 | inkscape:window-maximized="0" | |
81 | id="g3027"> |
|
81 | inkscape:current-layer="svg3025" /> | |
82 | <path |
|
82 | <g | |
83 | d="m 1024,640 q 0,106 -75,181 -75,75 -181,75 -106,0 -181,-75 -75,-75 -75,-181 0,-106 75,-181 75,-75 181,-75 106,0 181,75 75,75 75,181 z m 512,109 V 527 q 0,-12 -8,-23 -8,-11 -20,-13 l -185,-28 q -19,-54 -39,-91 35,-50 107,-138 10,-12 10,-25 0,-13 -9,-23 -27,-37 -99,-108 -72,-71 -94,-71 -12,0 -26,9 l -138,108 q -44,-23 -91,-38 -16,-136 -29,-186 -7,-28 -36,-28 H 657 q -14,0 -24.5,8.5 Q 622,-111 621,-98 L 593,86 q -49,16 -90,37 L 362,16 Q 352,7 337,7 323,7 312,18 186,132 147,186 q -7,10 -7,23 0,12 8,23 15,21 51,66.5 36,45.5 54,70.5 -27,50 -41,99 L 29,495 Q 16,497 8,507.5 0,518 0,531 v 222 q 0,12 8,23 8,11 19,13 l 186,28 q 14,46 39,92 -40,57 -107,138 -10,12 -10,24 0,10 9,23 26,36 98.5,107.5 72.5,71.5 94.5,71.5 13,0 26,-10 l 138,-107 q 44,23 91,38 16,136 29,186 7,28 36,28 h 222 q 14,0 24.5,-8.5 Q 914,1391 915,1378 l 28,-184 q 49,-16 90,-37 l 142,107 q 9,9 24,9 13,0 25,-10 129,-119 165,-170 7,-8 7,-22 0,-12 -8,-23 -15,-21 -51,-66.5 -36,-45.5 -54,-70.5 26,-50 41,-98 l 183,-28 q 13,-2 21,-12.5 8,-10.5 8,-23.5 z" |
|
83 | transform="matrix(1,0,0,-1,121.49153,1285.4237)" | |
84 |
id=" |
|
84 | id="g3027"> | |
85 | inkscape:connector-curvature="0" |
|
85 | <path | |
86 | style="fill:currentColor" /> |
|
86 | d="m 1024,640 q 0,106 -75,181 -75,75 -181,75 -106,0 -181,-75 -75,-75 -75,-181 0,-106 75,-181 75,-75 181,-75 106,0 181,75 75,75 75,181 z m 512,109 V 527 q 0,-12 -8,-23 -8,-11 -20,-13 l -185,-28 q -19,-54 -39,-91 35,-50 107,-138 10,-12 10,-25 0,-13 -9,-23 -27,-37 -99,-108 -72,-71 -94,-71 -12,0 -26,9 l -138,108 q -44,-23 -91,-38 -16,-136 -29,-186 -7,-28 -36,-28 H 657 q -14,0 -24.5,8.5 Q 622,-111 621,-98 L 593,86 q -49,16 -90,37 L 362,16 Q 352,7 337,7 323,7 312,18 186,132 147,186 q -7,10 -7,23 0,12 8,23 15,21 51,66.5 36,45.5 54,70.5 -27,50 -41,99 L 29,495 Q 16,497 8,507.5 0,518 0,531 v 222 q 0,12 8,23 8,11 19,13 l 186,28 q 14,46 39,92 -40,57 -107,138 -10,12 -10,24 0,10 9,23 26,36 98.5,107.5 72.5,71.5 94.5,71.5 13,0 26,-10 l 138,-107 q 44,23 91,38 16,136 29,186 7,28 36,28 h 222 q 14,0 24.5,-8.5 Q 914,1391 915,1378 l 28,-184 q 49,-16 90,-37 l 142,107 q 9,9 24,9 13,0 25,-10 129,-119 165,-170 7,-8 7,-22 0,-12 -8,-23 -15,-21 -51,-66.5 -36,-45.5 -54,-70.5 26,-50 41,-98 l 183,-28 q 13,-2 21,-12.5 8,-10.5 8,-23.5 z" | |
87 | </g> |
|
87 | id="path3029" | |
88 | </svg> |
|
88 | inkscape:connector-curvature="0" | |
89 | ''' |
|
89 | style="fill:currentColor" /> | |
|
90 | </g> | |||
|
91 | </svg> | |||
|
92 | ''' | |||
90 |
|
93 | |||
91 | def __init__(self, settings): |
|
94 | def __init__(self, settings): | |
92 | """ |
|
95 | """ | |
93 | :param settings: dict of settings to be used for the integration |
|
96 | :param settings: dict of settings to be used for the integration | |
94 | """ |
|
97 | """ | |
95 | self.settings = settings |
|
98 | self.settings = settings | |
96 |
|
99 | |||
97 | def settings_schema(self): |
|
100 | def settings_schema(self): | |
98 | """ |
|
101 | """ | |
99 | A colander schema of settings for the integration type |
|
102 | A colander schema of settings for the integration type | |
100 | """ |
|
103 | """ | |
101 | return colander.Schema() |
|
104 | return colander.Schema() | |
102 |
|
105 | |||
103 |
|
106 | |||
104 | class EEIntegration(IntegrationTypeBase): |
|
107 | class EEIntegration(IntegrationTypeBase): | |
105 | description = 'Integration available in RhodeCode EE edition.' |
|
108 | description = 'Integration available in RhodeCode EE edition.' | |
106 | is_dummy = True |
|
109 | is_dummy = True | |
107 |
|
110 | |||
108 | def __init__(self, name, key, settings=None): |
|
111 | def __init__(self, name, key, settings=None): | |
109 | self.display_name = name |
|
112 | self.display_name = name | |
110 | self.key = key |
|
113 | self.key = key |
@@ -1,292 +1,295 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import deform |
|
22 | import deform | |
23 | import logging |
|
23 | import logging | |
24 | import colander |
|
24 | import colander | |
25 |
|
25 | |||
26 | from mako.template import Template |
|
26 | from mako.template import Template | |
27 |
|
27 | |||
28 | from rhodecode import events |
|
28 | from rhodecode import events | |
29 | from rhodecode.translation import _ |
|
29 | from rhodecode.translation import _ | |
30 | from rhodecode.lib.celerylib import run_task |
|
30 | from rhodecode.lib.celerylib import run_task | |
31 | from rhodecode.lib.celerylib import tasks |
|
31 | from rhodecode.lib.celerylib import tasks | |
32 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
32 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | log = logging.getLogger(__name__) |
|
35 | log = logging.getLogger(__name__) | |
36 |
|
36 | |||
37 | repo_push_template_plaintext = Template(''' |
|
37 | repo_push_template_plaintext = Template(''' | |
38 | Commits: |
|
38 | Commits: | |
39 |
|
39 | |||
40 | % for commit in data['push']['commits']: |
|
40 | % for commit in data['push']['commits']: | |
41 | ${commit['url']} by ${commit['author']} at ${commit['date']} |
|
41 | ${commit['url']} by ${commit['author']} at ${commit['date']} | |
42 | ${commit['message']} |
|
42 | ${commit['message']} | |
43 | ---- |
|
43 | ---- | |
44 |
|
44 | |||
45 | % endfor |
|
45 | % endfor | |
46 | ''') |
|
46 | ''') | |
47 |
|
47 | |||
48 | ## TODO (marcink): think about putting this into a file, or use base.mako email template |
|
48 | ## TODO (marcink): think about putting this into a file, or use base.mako email template | |
49 |
|
49 | |||
50 | repo_push_template_html = Template(''' |
|
50 | repo_push_template_html = Template(''' | |
51 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> |
|
51 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> | |
52 | <html xmlns="http://www.w3.org/1999/xhtml"> |
|
52 | <html xmlns="http://www.w3.org/1999/xhtml"> | |
53 | <head> |
|
53 | <head> | |
54 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> |
|
54 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> | |
55 | <meta name="viewport" content="width=device-width, initial-scale=1.0"/> |
|
55 | <meta name="viewport" content="width=device-width, initial-scale=1.0"/> | |
56 | <title>${subject}</title> |
|
56 | <title>${subject}</title> | |
57 | <style type="text/css"> |
|
57 | <style type="text/css"> | |
58 | /* Based on The MailChimp Reset INLINE: Yes. */ |
|
58 | /* Based on The MailChimp Reset INLINE: Yes. */ | |
59 | #outlook a {padding:0;} /* Force Outlook to provide a "view in browser" menu link. */ |
|
59 | #outlook a {padding:0;} /* Force Outlook to provide a "view in browser" menu link. */ | |
60 | body{width:100% !important; -webkit-text-size-adjust:100%; -ms-text-size-adjust:100%; margin:0; padding:0;} |
|
60 | body{width:100% !important; -webkit-text-size-adjust:100%; -ms-text-size-adjust:100%; margin:0; padding:0;} | |
61 | /* Prevent Webkit and Windows Mobile platforms from changing default font sizes.*/ |
|
61 | /* Prevent Webkit and Windows Mobile platforms from changing default font sizes.*/ | |
62 | .ExternalClass {width:100%;} /* Force Hotmail to display emails at full width */ |
|
62 | .ExternalClass {width:100%;} /* Force Hotmail to display emails at full width */ | |
63 | .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div {line-height: 100%;} |
|
63 | .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div {line-height: 100%;} | |
64 | /* Forces Hotmail to display normal line spacing. More on that: http://www.emailonacid.com/forum/viewthread/43/ */ |
|
64 | /* Forces Hotmail to display normal line spacing. More on that: http://www.emailonacid.com/forum/viewthread/43/ */ | |
65 | #backgroundTable {margin:0; padding:0; line-height: 100% !important;} |
|
65 | #backgroundTable {margin:0; padding:0; line-height: 100% !important;} | |
66 | /* End reset */ |
|
66 | /* End reset */ | |
67 |
|
67 | |||
68 | /* defaults for images*/ |
|
68 | /* defaults for images*/ | |
69 | img {outline:none; text-decoration:none; -ms-interpolation-mode: bicubic;} |
|
69 | img {outline:none; text-decoration:none; -ms-interpolation-mode: bicubic;} | |
70 | a img {border:none;} |
|
70 | a img {border:none;} | |
71 | .image_fix {display:block;} |
|
71 | .image_fix {display:block;} | |
72 |
|
72 | |||
73 | body {line-height:1.2em;} |
|
73 | body {line-height:1.2em;} | |
74 | p {margin: 0 0 20px;} |
|
74 | p {margin: 0 0 20px;} | |
75 | h1, h2, h3, h4, h5, h6 {color:#323232!important;} |
|
75 | h1, h2, h3, h4, h5, h6 {color:#323232!important;} | |
76 | a {color:#427cc9;text-decoration:none;outline:none;cursor:pointer;} |
|
76 | a {color:#427cc9;text-decoration:none;outline:none;cursor:pointer;} | |
77 | a:focus {outline:none;} |
|
77 | a:focus {outline:none;} | |
78 | a:hover {color: #305b91;} |
|
78 | a:hover {color: #305b91;} | |
79 | h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {color:#427cc9!important;text-decoration:none!important;} |
|
79 | h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {color:#427cc9!important;text-decoration:none!important;} | |
80 | h1 a:active, h2 a:active, h3 a:active, h4 a:active, h5 a:active, h6 a:active {color: #305b91!important;} |
|
80 | h1 a:active, h2 a:active, h3 a:active, h4 a:active, h5 a:active, h6 a:active {color: #305b91!important;} | |
81 | h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited {color: #305b91!important;} |
|
81 | h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited {color: #305b91!important;} | |
82 | table {font-size:13px;border-collapse:collapse;mso-table-lspace:0pt;mso-table-rspace:0pt;} |
|
82 | table {font-size:13px;border-collapse:collapse;mso-table-lspace:0pt;mso-table-rspace:0pt;} | |
83 | table td {padding:.65em 1em .65em 0;border-collapse:collapse;vertical-align:top;text-align:left;} |
|
83 | table td {padding:.65em 1em .65em 0;border-collapse:collapse;vertical-align:top;text-align:left;} | |
84 | input {display:inline;border-radius:2px;border-style:solid;border: 1px solid #dbd9da;padding:.5em;} |
|
84 | input {display:inline;border-radius:2px;border-style:solid;border: 1px solid #dbd9da;padding:.5em;} | |
85 | input:focus {outline: 1px solid #979797} |
|
85 | input:focus {outline: 1px solid #979797} | |
86 | @media only screen and (-webkit-min-device-pixel-ratio: 2) { |
|
86 | @media only screen and (-webkit-min-device-pixel-ratio: 2) { | |
87 | /* Put your iPhone 4g styles in here */ |
|
87 | /* Put your iPhone 4g styles in here */ | |
88 | } |
|
88 | } | |
89 |
|
89 | |||
90 | /* Android targeting */ |
|
90 | /* Android targeting */ | |
91 | @media only screen and (-webkit-device-pixel-ratio:.75){ |
|
91 | @media only screen and (-webkit-device-pixel-ratio:.75){ | |
92 | /* Put CSS for low density (ldpi) Android layouts in here */ |
|
92 | /* Put CSS for low density (ldpi) Android layouts in here */ | |
93 | } |
|
93 | } | |
94 | @media only screen and (-webkit-device-pixel-ratio:1){ |
|
94 | @media only screen and (-webkit-device-pixel-ratio:1){ | |
95 | /* Put CSS for medium density (mdpi) Android layouts in here */ |
|
95 | /* Put CSS for medium density (mdpi) Android layouts in here */ | |
96 | } |
|
96 | } | |
97 | @media only screen and (-webkit-device-pixel-ratio:1.5){ |
|
97 | @media only screen and (-webkit-device-pixel-ratio:1.5){ | |
98 | /* Put CSS for high density (hdpi) Android layouts in here */ |
|
98 | /* Put CSS for high density (hdpi) Android layouts in here */ | |
99 | } |
|
99 | } | |
100 | /* end Android targeting */ |
|
100 | /* end Android targeting */ | |
101 |
|
101 | |||
102 | </style> |
|
102 | </style> | |
103 |
|
103 | |||
104 | <!-- Targeting Windows Mobile --> |
|
104 | <!-- Targeting Windows Mobile --> | |
105 | <!--[if IEMobile 7]> |
|
105 | <!--[if IEMobile 7]> | |
106 | <style type="text/css"> |
|
106 | <style type="text/css"> | |
107 |
|
107 | |||
108 | </style> |
|
108 | </style> | |
109 | <![endif]--> |
|
109 | <![endif]--> | |
110 |
|
110 | |||
111 | <!--[if gte mso 9]> |
|
111 | <!--[if gte mso 9]> | |
112 | <style> |
|
112 | <style> | |
113 | /* Target Outlook 2007 and 2010 */ |
|
113 | /* Target Outlook 2007 and 2010 */ | |
114 | </style> |
|
114 | </style> | |
115 | <![endif]--> |
|
115 | <![endif]--> | |
116 | </head> |
|
116 | </head> | |
117 | <body> |
|
117 | <body> | |
118 | <!-- Wrapper/Container Table: Use a wrapper table to control the width and the background color consistently of your email. Use this approach instead of setting attributes on the body tag. --> |
|
118 | <!-- Wrapper/Container Table: Use a wrapper table to control the width and the background color consistently of your email. Use this approach instead of setting attributes on the body tag. --> | |
119 | <table cellpadding="0" cellspacing="0" border="0" id="backgroundTable" align="left" style="margin:1%;width:97%;padding:0;font-family:sans-serif;font-weight:100;border:1px solid #dbd9da"> |
|
119 | <table cellpadding="0" cellspacing="0" border="0" id="backgroundTable" align="left" style="margin:1%;width:97%;padding:0;font-family:sans-serif;font-weight:100;border:1px solid #dbd9da"> | |
120 | <tr> |
|
120 | <tr> | |
121 | <td valign="top" style="padding:0;"> |
|
121 | <td valign="top" style="padding:0;"> | |
122 | <table cellpadding="0" cellspacing="0" border="0" align="left" width="100%"> |
|
122 | <table cellpadding="0" cellspacing="0" border="0" align="left" width="100%"> | |
123 | <tr><td style="width:100%;padding:7px;background-color:#202020" valign="top"> |
|
123 | <tr><td style="width:100%;padding:7px;background-color:#202020" valign="top"> | |
124 | <a style="color:#eeeeee;text-decoration:none;" href="${instance_url}"> |
|
124 | <a style="color:#eeeeee;text-decoration:none;" href="${instance_url}"> | |
125 | ${'RhodeCode'} |
|
125 | ${'RhodeCode'} | |
126 | </a> |
|
126 | </a> | |
127 | </td></tr> |
|
127 | </td></tr> | |
128 | <tr> |
|
128 | <tr> | |
129 | <td style="padding:15px;" valign="top"> |
|
129 | <td style="padding:15px;" valign="top"> | |
130 | % for commit in data['push']['commits']: |
|
130 | % for commit in data['push']['commits']: | |
131 | <a href="${commit['url']}">${commit['short_id']}</a> by ${commit['author']} at ${commit['date']} <br/> |
|
131 | <a href="${commit['url']}">${commit['short_id']}</a> by ${commit['author']} at ${commit['date']} <br/> | |
132 | ${commit['message_html']} <br/> |
|
132 | ${commit['message_html']} <br/> | |
133 | <br/> |
|
133 | <br/> | |
134 | % endfor |
|
134 | % endfor | |
135 | </td> |
|
135 | </td> | |
136 | </tr> |
|
136 | </tr> | |
137 | </table> |
|
137 | </table> | |
138 | </td> |
|
138 | </td> | |
139 | </tr> |
|
139 | </tr> | |
140 | </table> |
|
140 | </table> | |
141 | <!-- End of wrapper table --> |
|
141 | <!-- End of wrapper table --> | |
142 | <p><a style="margin-top:15px;margin-left:1%;font-family:sans-serif;font-weight:100;font-size:11px;color:#666666;text-decoration:none;" href="${instance_url}"> |
|
142 | <p><a style="margin-top:15px;margin-left:1%;font-family:sans-serif;font-weight:100;font-size:11px;color:#666666;text-decoration:none;" href="${instance_url}"> | |
143 | ${'This is a notification from RhodeCode. %(instance_url)s' % {'instance_url': instance_url}} |
|
143 | ${'This is a notification from RhodeCode. %(instance_url)s' % {'instance_url': instance_url}} | |
144 | </a></p> |
|
144 | </a></p> | |
145 | </body> |
|
145 | </body> | |
146 | </html> |
|
146 | </html> | |
147 | ''') |
|
147 | ''') | |
148 |
|
148 | |||
149 | email_icon = ''' |
|
149 | ||
|
150 | ||||
|
151 | ||||
|
152 | class EmailSettingsSchema(colander.Schema): | |||
|
153 | @colander.instantiate(validator=colander.Length(min=1)) | |||
|
154 | class recipients(colander.SequenceSchema): | |||
|
155 | title = _('Recipients') | |||
|
156 | description = _('Email addresses to send push events to') | |||
|
157 | widget = deform.widget.SequenceWidget(min_len=1) | |||
|
158 | ||||
|
159 | recipient = colander.SchemaNode( | |||
|
160 | colander.String(), | |||
|
161 | title=_('Email address'), | |||
|
162 | description=_('Email address'), | |||
|
163 | default='', | |||
|
164 | validator=colander.Email(), | |||
|
165 | widget=deform.widget.TextInputWidget( | |||
|
166 | placeholder='user@domain.com', | |||
|
167 | ), | |||
|
168 | ) | |||
|
169 | ||||
|
170 | ||||
|
171 | class EmailIntegrationType(IntegrationTypeBase): | |||
|
172 | key = 'email' | |||
|
173 | display_name = _('Email') | |||
|
174 | description = _('Send repo push summaries to a list of recipients via email') | |||
|
175 | ||||
|
176 | @classmethod | |||
|
177 | def icon(cls): | |||
|
178 | return ''' | |||
150 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> |
|
179 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | |
151 | <svg |
|
180 | <svg | |
152 | xmlns:dc="http://purl.org/dc/elements/1.1/" |
|
181 | xmlns:dc="http://purl.org/dc/elements/1.1/" | |
153 | xmlns:cc="http://creativecommons.org/ns#" |
|
182 | xmlns:cc="http://creativecommons.org/ns#" | |
154 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" |
|
183 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" | |
155 | xmlns:svg="http://www.w3.org/2000/svg" |
|
184 | xmlns:svg="http://www.w3.org/2000/svg" | |
156 | xmlns="http://www.w3.org/2000/svg" |
|
185 | xmlns="http://www.w3.org/2000/svg" | |
157 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" |
|
186 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | |
158 | xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" |
|
187 | xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | |
159 | viewBox="0 -256 1850 1850" |
|
188 | viewBox="0 -256 1850 1850" | |
160 | id="svg2989" |
|
189 | id="svg2989" | |
161 | version="1.1" |
|
190 | version="1.1" | |
162 | inkscape:version="0.48.3.1 r9886" |
|
191 | inkscape:version="0.48.3.1 r9886" | |
163 | width="100%" |
|
192 | width="100%" | |
164 | height="100%" |
|
193 | height="100%" | |
165 | sodipodi:docname="envelope_font_awesome.svg"> |
|
194 | sodipodi:docname="envelope_font_awesome.svg"> | |
166 | <metadata |
|
195 | <metadata | |
167 | id="metadata2999"> |
|
196 | id="metadata2999"> | |
168 | <rdf:RDF> |
|
197 | <rdf:RDF> | |
169 | <cc:Work |
|
198 | <cc:Work | |
170 | rdf:about=""> |
|
199 | rdf:about=""> | |
171 | <dc:format>image/svg+xml</dc:format> |
|
200 | <dc:format>image/svg+xml</dc:format> | |
172 | <dc:type |
|
201 | <dc:type | |
173 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> |
|
202 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> | |
174 | </cc:Work> |
|
203 | </cc:Work> | |
175 | </rdf:RDF> |
|
204 | </rdf:RDF> | |
176 | </metadata> |
|
205 | </metadata> | |
177 | <defs |
|
206 | <defs | |
178 | id="defs2997" /> |
|
207 | id="defs2997" /> | |
179 | <sodipodi:namedview |
|
208 | <sodipodi:namedview | |
180 | pagecolor="#ffffff" |
|
209 | pagecolor="#ffffff" | |
181 | bordercolor="#666666" |
|
210 | bordercolor="#666666" | |
182 | borderopacity="1" |
|
211 | borderopacity="1" | |
183 | objecttolerance="10" |
|
212 | objecttolerance="10" | |
184 | gridtolerance="10" |
|
213 | gridtolerance="10" | |
185 | guidetolerance="10" |
|
214 | guidetolerance="10" | |
186 | inkscape:pageopacity="0" |
|
215 | inkscape:pageopacity="0" | |
187 | inkscape:pageshadow="2" |
|
216 | inkscape:pageshadow="2" | |
188 | inkscape:window-width="640" |
|
217 | inkscape:window-width="640" | |
189 | inkscape:window-height="480" |
|
218 | inkscape:window-height="480" | |
190 | id="namedview2995" |
|
219 | id="namedview2995" | |
191 | showgrid="false" |
|
220 | showgrid="false" | |
192 | inkscape:zoom="0.13169643" |
|
221 | inkscape:zoom="0.13169643" | |
193 | inkscape:cx="896" |
|
222 | inkscape:cx="896" | |
194 | inkscape:cy="896" |
|
223 | inkscape:cy="896" | |
195 | inkscape:window-x="0" |
|
224 | inkscape:window-x="0" | |
196 | inkscape:window-y="25" |
|
225 | inkscape:window-y="25" | |
197 | inkscape:window-maximized="0" |
|
226 | inkscape:window-maximized="0" | |
198 | inkscape:current-layer="svg2989" /> |
|
227 | inkscape:current-layer="svg2989" /> | |
199 | <g |
|
228 | <g | |
200 | transform="matrix(1,0,0,-1,37.966102,1282.678)" |
|
229 | transform="matrix(1,0,0,-1,37.966102,1282.678)" | |
201 | id="g2991"> |
|
230 | id="g2991"> | |
202 | <path |
|
231 | <path | |
203 | d="m 1664,32 v 768 q -32,-36 -69,-66 -268,-206 -426,-338 -51,-43 -83,-67 -32,-24 -86.5,-48.5 Q 945,256 897,256 h -1 -1 Q 847,256 792.5,280.5 738,305 706,329 674,353 623,396 465,528 197,734 160,764 128,800 V 32 Q 128,19 137.5,9.5 147,0 160,0 h 1472 q 13,0 22.5,9.5 9.5,9.5 9.5,22.5 z m 0,1051 v 11 13.5 q 0,0 -0.5,13 -0.5,13 -3,12.5 -2.5,-0.5 -5.5,9 -3,9.5 -9,7.5 -6,-2 -14,2.5 H 160 q -13,0 -22.5,-9.5 Q 128,1133 128,1120 128,952 275,836 468,684 676,519 682,514 711,489.5 740,465 757,452 774,439 801.5,420.5 829,402 852,393 q 23,-9 43,-9 h 1 1 q 20,0 43,9 23,9 50.5,27.5 27.5,18.5 44.5,31.5 17,13 46,37.5 29,24.5 35,29.5 208,165 401,317 54,43 100.5,115.5 46.5,72.5 46.5,131.5 z m 128,37 V 32 q 0,-66 -47,-113 -47,-47 -113,-47 H 160 Q 94,-128 47,-81 0,-34 0,32 v 1088 q 0,66 47,113 47,47 113,47 h 1472 q 66,0 113,-47 47,-47 47,-113 z" |
|
232 | d="m 1664,32 v 768 q -32,-36 -69,-66 -268,-206 -426,-338 -51,-43 -83,-67 -32,-24 -86.5,-48.5 Q 945,256 897,256 h -1 -1 Q 847,256 792.5,280.5 738,305 706,329 674,353 623,396 465,528 197,734 160,764 128,800 V 32 Q 128,19 137.5,9.5 147,0 160,0 h 1472 q 13,0 22.5,9.5 9.5,9.5 9.5,22.5 z m 0,1051 v 11 13.5 q 0,0 -0.5,13 -0.5,13 -3,12.5 -2.5,-0.5 -5.5,9 -3,9.5 -9,7.5 -6,-2 -14,2.5 H 160 q -13,0 -22.5,-9.5 Q 128,1133 128,1120 128,952 275,836 468,684 676,519 682,514 711,489.5 740,465 757,452 774,439 801.5,420.5 829,402 852,393 q 23,-9 43,-9 h 1 1 q 20,0 43,9 23,9 50.5,27.5 27.5,18.5 44.5,31.5 17,13 46,37.5 29,24.5 35,29.5 208,165 401,317 54,43 100.5,115.5 46.5,72.5 46.5,131.5 z m 128,37 V 32 q 0,-66 -47,-113 -47,-47 -113,-47 H 160 Q 94,-128 47,-81 0,-34 0,32 v 1088 q 0,66 47,113 47,47 113,47 h 1472 q 66,0 113,-47 47,-47 47,-113 z" | |
204 | id="path2993" |
|
233 | id="path2993" | |
205 | inkscape:connector-curvature="0" |
|
234 | inkscape:connector-curvature="0" | |
206 | style="fill:currentColor" /> |
|
235 | style="fill:currentColor" /> | |
207 | </g> |
|
236 | </g> | |
208 | </svg> |
|
237 | </svg> | |
209 | ''' |
|
238 | ''' | |
210 |
|
239 | |||
211 |
|
||||
212 | class EmailSettingsSchema(colander.Schema): |
|
|||
213 | @colander.instantiate(validator=colander.Length(min=1)) |
|
|||
214 | class recipients(colander.SequenceSchema): |
|
|||
215 | title = _('Recipients') |
|
|||
216 | description = _('Email addresses to send push events to') |
|
|||
217 | widget = deform.widget.SequenceWidget(min_len=1) |
|
|||
218 |
|
||||
219 | recipient = colander.SchemaNode( |
|
|||
220 | colander.String(), |
|
|||
221 | title=_('Email address'), |
|
|||
222 | description=_('Email address'), |
|
|||
223 | default='', |
|
|||
224 | validator=colander.Email(), |
|
|||
225 | widget=deform.widget.TextInputWidget( |
|
|||
226 | placeholder='user@domain.com', |
|
|||
227 | ), |
|
|||
228 | ) |
|
|||
229 |
|
||||
230 |
|
||||
231 | class EmailIntegrationType(IntegrationTypeBase): |
|
|||
232 | key = 'email' |
|
|||
233 | display_name = _('Email') |
|
|||
234 | description = _('Send repo push summaries to a list of recipients via email') |
|
|||
235 | icon = email_icon |
|
|||
236 |
|
||||
237 | def settings_schema(self): |
|
240 | def settings_schema(self): | |
238 | schema = EmailSettingsSchema() |
|
241 | schema = EmailSettingsSchema() | |
239 | return schema |
|
242 | return schema | |
240 |
|
243 | |||
241 | def send_event(self, event): |
|
244 | def send_event(self, event): | |
242 | data = event.as_dict() |
|
245 | data = event.as_dict() | |
243 | log.debug('got event: %r', event) |
|
246 | log.debug('got event: %r', event) | |
244 |
|
247 | |||
245 | if isinstance(event, events.RepoPushEvent): |
|
248 | if isinstance(event, events.RepoPushEvent): | |
246 | repo_push_handler(data, self.settings) |
|
249 | repo_push_handler(data, self.settings) | |
247 | else: |
|
250 | else: | |
248 | log.debug('ignoring event: %r', event) |
|
251 | log.debug('ignoring event: %r', event) | |
249 |
|
252 | |||
250 |
|
253 | |||
251 | def repo_push_handler(data, settings): |
|
254 | def repo_push_handler(data, settings): | |
252 | commit_num = len(data['push']['commits']) |
|
255 | commit_num = len(data['push']['commits']) | |
253 | server_url = data['server_url'] |
|
256 | server_url = data['server_url'] | |
254 |
|
257 | |||
255 | if commit_num == 1: |
|
258 | if commit_num == 1: | |
256 | if data['push']['branches']: |
|
259 | if data['push']['branches']: | |
257 | _subject = '[{repo_name}] {author} pushed {commit_num} commit on branches: {branches}' |
|
260 | _subject = '[{repo_name}] {author} pushed {commit_num} commit on branches: {branches}' | |
258 | else: |
|
261 | else: | |
259 | _subject = '[{repo_name}] {author} pushed {commit_num} commit' |
|
262 | _subject = '[{repo_name}] {author} pushed {commit_num} commit' | |
260 | subject = _subject.format( |
|
263 | subject = _subject.format( | |
261 | author=data['actor']['username'], |
|
264 | author=data['actor']['username'], | |
262 | repo_name=data['repo']['repo_name'], |
|
265 | repo_name=data['repo']['repo_name'], | |
263 | commit_num=commit_num, |
|
266 | commit_num=commit_num, | |
264 | branches=', '.join( |
|
267 | branches=', '.join( | |
265 | branch['name'] for branch in data['push']['branches']) |
|
268 | branch['name'] for branch in data['push']['branches']) | |
266 | ) |
|
269 | ) | |
267 | else: |
|
270 | else: | |
268 | if data['push']['branches']: |
|
271 | if data['push']['branches']: | |
269 | _subject = '[{repo_name}] {author} pushed {commit_num} commits on branches: {branches}' |
|
272 | _subject = '[{repo_name}] {author} pushed {commit_num} commits on branches: {branches}' | |
270 | else: |
|
273 | else: | |
271 | _subject = '[{repo_name}] {author} pushed {commit_num} commits' |
|
274 | _subject = '[{repo_name}] {author} pushed {commit_num} commits' | |
272 | subject = _subject.format( |
|
275 | subject = _subject.format( | |
273 | author=data['actor']['username'], |
|
276 | author=data['actor']['username'], | |
274 | repo_name=data['repo']['repo_name'], |
|
277 | repo_name=data['repo']['repo_name'], | |
275 | commit_num=commit_num, |
|
278 | commit_num=commit_num, | |
276 | branches=', '.join( |
|
279 | branches=', '.join( | |
277 | branch['name'] for branch in data['push']['branches'])) |
|
280 | branch['name'] for branch in data['push']['branches'])) | |
278 |
|
281 | |||
279 | email_body_plaintext = repo_push_template_plaintext.render( |
|
282 | email_body_plaintext = repo_push_template_plaintext.render( | |
280 | data=data, |
|
283 | data=data, | |
281 | subject=subject, |
|
284 | subject=subject, | |
282 | instance_url=server_url) |
|
285 | instance_url=server_url) | |
283 |
|
286 | |||
284 | email_body_html = repo_push_template_html.render( |
|
287 | email_body_html = repo_push_template_html.render( | |
285 | data=data, |
|
288 | data=data, | |
286 | subject=subject, |
|
289 | subject=subject, | |
287 | instance_url=server_url) |
|
290 | instance_url=server_url) | |
288 |
|
291 | |||
289 | for email_address in settings['recipients']: |
|
292 | for email_address in settings['recipients']: | |
290 | run_task( |
|
293 | run_task( | |
291 | tasks.send_email, email_address, subject, |
|
294 | tasks.send_email, email_address, subject, | |
292 | email_body_plaintext, email_body_html) |
|
295 | email_body_plaintext, email_body_html) |
@@ -1,252 +1,256 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import deform |
|
22 | import deform | |
23 | import logging |
|
23 | import logging | |
24 | import requests |
|
24 | import requests | |
25 | import colander |
|
25 | import colander | |
26 | import textwrap |
|
26 | import textwrap | |
27 | from collections import OrderedDict |
|
27 | from collections import OrderedDict | |
28 | from mako.template import Template |
|
28 | from mako.template import Template | |
29 | from rhodecode import events |
|
29 | from rhodecode import events | |
30 | from rhodecode.translation import _ |
|
30 | from rhodecode.translation import _ | |
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
32 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask | |
33 | from rhodecode.lib.colander_utils import strip_whitespace |
|
33 | from rhodecode.lib.colander_utils import strip_whitespace | |
34 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
34 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
35 |
|
35 | |||
36 | log = logging.getLogger(__name__) |
|
36 | log = logging.getLogger(__name__) | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | class HipchatSettingsSchema(colander.Schema): |
|
39 | class HipchatSettingsSchema(colander.Schema): | |
40 | color_choices = [ |
|
40 | color_choices = [ | |
41 | ('yellow', _('Yellow')), |
|
41 | ('yellow', _('Yellow')), | |
42 | ('red', _('Red')), |
|
42 | ('red', _('Red')), | |
43 | ('green', _('Green')), |
|
43 | ('green', _('Green')), | |
44 | ('purple', _('Purple')), |
|
44 | ('purple', _('Purple')), | |
45 | ('gray', _('Gray')), |
|
45 | ('gray', _('Gray')), | |
46 | ] |
|
46 | ] | |
47 |
|
47 | |||
48 | server_url = colander.SchemaNode( |
|
48 | server_url = colander.SchemaNode( | |
49 | colander.String(), |
|
49 | colander.String(), | |
50 | title=_('Hipchat server URL'), |
|
50 | title=_('Hipchat server URL'), | |
51 | description=_('Hipchat integration url.'), |
|
51 | description=_('Hipchat integration url.'), | |
52 | default='', |
|
52 | default='', | |
53 | preparer=strip_whitespace, |
|
53 | preparer=strip_whitespace, | |
54 | validator=colander.url, |
|
54 | validator=colander.url, | |
55 | widget=deform.widget.TextInputWidget( |
|
55 | widget=deform.widget.TextInputWidget( | |
56 | placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?', |
|
56 | placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?', | |
57 | ), |
|
57 | ), | |
58 | ) |
|
58 | ) | |
59 | notify = colander.SchemaNode( |
|
59 | notify = colander.SchemaNode( | |
60 | colander.Bool(), |
|
60 | colander.Bool(), | |
61 | title=_('Notify'), |
|
61 | title=_('Notify'), | |
62 | description=_('Make a notification to the users in room.'), |
|
62 | description=_('Make a notification to the users in room.'), | |
63 | missing=False, |
|
63 | missing=False, | |
64 | default=False, |
|
64 | default=False, | |
65 | ) |
|
65 | ) | |
66 | color = colander.SchemaNode( |
|
66 | color = colander.SchemaNode( | |
67 | colander.String(), |
|
67 | colander.String(), | |
68 | title=_('Color'), |
|
68 | title=_('Color'), | |
69 | description=_('Background color of message.'), |
|
69 | description=_('Background color of message.'), | |
70 | missing='', |
|
70 | missing='', | |
71 | validator=colander.OneOf([x[0] for x in color_choices]), |
|
71 | validator=colander.OneOf([x[0] for x in color_choices]), | |
72 | widget=deform.widget.Select2Widget( |
|
72 | widget=deform.widget.Select2Widget( | |
73 | values=color_choices, |
|
73 | values=color_choices, | |
74 | ), |
|
74 | ), | |
75 | ) |
|
75 | ) | |
76 |
|
76 | |||
77 |
|
77 | |||
78 | repo_push_template = Template(''' |
|
78 | repo_push_template = Template(''' | |
79 | <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>: |
|
79 | <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>: | |
80 | <br> |
|
80 | <br> | |
81 | <ul> |
|
81 | <ul> | |
82 | %for branch, branch_commits in branches_commits.items(): |
|
82 | %for branch, branch_commits in branches_commits.items(): | |
83 | <li> |
|
83 | <li> | |
84 | <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a> |
|
84 | <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a> | |
85 | <ul> |
|
85 | <ul> | |
86 | %for commit in branch_commits['commits']: |
|
86 | %for commit in branch_commits['commits']: | |
87 | <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li> |
|
87 | <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li> | |
88 | %endfor |
|
88 | %endfor | |
89 | </ul> |
|
89 | </ul> | |
90 | </li> |
|
90 | </li> | |
91 | %endfor |
|
91 | %endfor | |
92 | ''') |
|
92 | ''') | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | class HipchatIntegrationType(IntegrationTypeBase): |
|
95 | class HipchatIntegrationType(IntegrationTypeBase): | |
96 | key = 'hipchat' |
|
96 | key = 'hipchat' | |
97 | display_name = _('Hipchat') |
|
97 | display_name = _('Hipchat') | |
98 | description = _('Send events such as repo pushes and pull requests to ' |
|
98 | description = _('Send events such as repo pushes and pull requests to ' | |
99 | 'your hipchat channel.') |
|
99 | 'your hipchat channel.') | |
100 | icon = '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>''' |
|
100 | ||
|
101 | @classmethod | |||
|
102 | def icon(cls): | |||
|
103 | return '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>''' | |||
|
104 | ||||
101 | valid_events = [ |
|
105 | valid_events = [ | |
102 | events.PullRequestCloseEvent, |
|
106 | events.PullRequestCloseEvent, | |
103 | events.PullRequestMergeEvent, |
|
107 | events.PullRequestMergeEvent, | |
104 | events.PullRequestUpdateEvent, |
|
108 | events.PullRequestUpdateEvent, | |
105 | events.PullRequestCommentEvent, |
|
109 | events.PullRequestCommentEvent, | |
106 | events.PullRequestReviewEvent, |
|
110 | events.PullRequestReviewEvent, | |
107 | events.PullRequestCreateEvent, |
|
111 | events.PullRequestCreateEvent, | |
108 | events.RepoPushEvent, |
|
112 | events.RepoPushEvent, | |
109 | events.RepoCreateEvent, |
|
113 | events.RepoCreateEvent, | |
110 | ] |
|
114 | ] | |
111 |
|
115 | |||
112 | def send_event(self, event): |
|
116 | def send_event(self, event): | |
113 | if event.__class__ not in self.valid_events: |
|
117 | if event.__class__ not in self.valid_events: | |
114 | log.debug('event not valid: %r' % event) |
|
118 | log.debug('event not valid: %r' % event) | |
115 | return |
|
119 | return | |
116 |
|
120 | |||
117 | if event.name not in self.settings['events']: |
|
121 | if event.name not in self.settings['events']: | |
118 | log.debug('event ignored: %r' % event) |
|
122 | log.debug('event ignored: %r' % event) | |
119 | return |
|
123 | return | |
120 |
|
124 | |||
121 | data = event.as_dict() |
|
125 | data = event.as_dict() | |
122 |
|
126 | |||
123 | text = '<b>%s<b> caused a <b>%s</b> event' % ( |
|
127 | text = '<b>%s<b> caused a <b>%s</b> event' % ( | |
124 | data['actor']['username'], event.name) |
|
128 | data['actor']['username'], event.name) | |
125 |
|
129 | |||
126 | log.debug('handling hipchat event for %s' % event.name) |
|
130 | log.debug('handling hipchat event for %s' % event.name) | |
127 |
|
131 | |||
128 | if isinstance(event, events.PullRequestCommentEvent): |
|
132 | if isinstance(event, events.PullRequestCommentEvent): | |
129 | text = self.format_pull_request_comment_event(event, data) |
|
133 | text = self.format_pull_request_comment_event(event, data) | |
130 | elif isinstance(event, events.PullRequestReviewEvent): |
|
134 | elif isinstance(event, events.PullRequestReviewEvent): | |
131 | text = self.format_pull_request_review_event(event, data) |
|
135 | text = self.format_pull_request_review_event(event, data) | |
132 | elif isinstance(event, events.PullRequestEvent): |
|
136 | elif isinstance(event, events.PullRequestEvent): | |
133 | text = self.format_pull_request_event(event, data) |
|
137 | text = self.format_pull_request_event(event, data) | |
134 | elif isinstance(event, events.RepoPushEvent): |
|
138 | elif isinstance(event, events.RepoPushEvent): | |
135 | text = self.format_repo_push_event(data) |
|
139 | text = self.format_repo_push_event(data) | |
136 | elif isinstance(event, events.RepoCreateEvent): |
|
140 | elif isinstance(event, events.RepoCreateEvent): | |
137 | text = self.format_repo_create_event(data) |
|
141 | text = self.format_repo_create_event(data) | |
138 | else: |
|
142 | else: | |
139 | log.error('unhandled event type: %r' % event) |
|
143 | log.error('unhandled event type: %r' % event) | |
140 |
|
144 | |||
141 | run_task(post_text_to_hipchat, self.settings, text) |
|
145 | run_task(post_text_to_hipchat, self.settings, text) | |
142 |
|
146 | |||
143 | def settings_schema(self): |
|
147 | def settings_schema(self): | |
144 | schema = HipchatSettingsSchema() |
|
148 | schema = HipchatSettingsSchema() | |
145 | schema.add(colander.SchemaNode( |
|
149 | schema.add(colander.SchemaNode( | |
146 | colander.Set(), |
|
150 | colander.Set(), | |
147 | widget=deform.widget.CheckboxChoiceWidget( |
|
151 | widget=deform.widget.CheckboxChoiceWidget( | |
148 | values=sorted( |
|
152 | values=sorted( | |
149 | [(e.name, e.display_name) for e in self.valid_events] |
|
153 | [(e.name, e.display_name) for e in self.valid_events] | |
150 | ) |
|
154 | ) | |
151 | ), |
|
155 | ), | |
152 | description="Events activated for this integration", |
|
156 | description="Events activated for this integration", | |
153 | name='events' |
|
157 | name='events' | |
154 | )) |
|
158 | )) | |
155 |
|
159 | |||
156 | return schema |
|
160 | return schema | |
157 |
|
161 | |||
158 | def format_pull_request_comment_event(self, event, data): |
|
162 | def format_pull_request_comment_event(self, event, data): | |
159 | comment_text = data['comment']['text'] |
|
163 | comment_text = data['comment']['text'] | |
160 | if len(comment_text) > 200: |
|
164 | if len(comment_text) > 200: | |
161 | comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format( |
|
165 | comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format( | |
162 | comment_text=h.html_escape(comment_text[:200]), |
|
166 | comment_text=h.html_escape(comment_text[:200]), | |
163 | comment_url=data['comment']['url'], |
|
167 | comment_url=data['comment']['url'], | |
164 | ) |
|
168 | ) | |
165 |
|
169 | |||
166 | comment_status = '' |
|
170 | comment_status = '' | |
167 | if data['comment']['status']: |
|
171 | if data['comment']['status']: | |
168 | comment_status = '[{}]: '.format(data['comment']['status']) |
|
172 | comment_status = '[{}]: '.format(data['comment']['status']) | |
169 |
|
173 | |||
170 | return (textwrap.dedent( |
|
174 | return (textwrap.dedent( | |
171 | ''' |
|
175 | ''' | |
172 | {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}: |
|
176 | {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}: | |
173 | >>> {comment_status}{comment_text} |
|
177 | >>> {comment_status}{comment_text} | |
174 | ''').format( |
|
178 | ''').format( | |
175 | comment_status=comment_status, |
|
179 | comment_status=comment_status, | |
176 | user=data['actor']['username'], |
|
180 | user=data['actor']['username'], | |
177 | number=data['pullrequest']['pull_request_id'], |
|
181 | number=data['pullrequest']['pull_request_id'], | |
178 | pr_url=data['pullrequest']['url'], |
|
182 | pr_url=data['pullrequest']['url'], | |
179 | pr_status=data['pullrequest']['status'], |
|
183 | pr_status=data['pullrequest']['status'], | |
180 | pr_title=h.html_escape(data['pullrequest']['title']), |
|
184 | pr_title=h.html_escape(data['pullrequest']['title']), | |
181 | comment_text=h.html_escape(comment_text) |
|
185 | comment_text=h.html_escape(comment_text) | |
182 | ) |
|
186 | ) | |
183 | ) |
|
187 | ) | |
184 |
|
188 | |||
185 | def format_pull_request_review_event(self, event, data): |
|
189 | def format_pull_request_review_event(self, event, data): | |
186 | return (textwrap.dedent( |
|
190 | return (textwrap.dedent( | |
187 | ''' |
|
191 | ''' | |
188 | Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title} |
|
192 | Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title} | |
189 | ''').format( |
|
193 | ''').format( | |
190 | user=data['actor']['username'], |
|
194 | user=data['actor']['username'], | |
191 | number=data['pullrequest']['pull_request_id'], |
|
195 | number=data['pullrequest']['pull_request_id'], | |
192 | pr_url=data['pullrequest']['url'], |
|
196 | pr_url=data['pullrequest']['url'], | |
193 | pr_status=data['pullrequest']['status'], |
|
197 | pr_status=data['pullrequest']['status'], | |
194 | pr_title=h.html_escape(data['pullrequest']['title']), |
|
198 | pr_title=h.html_escape(data['pullrequest']['title']), | |
195 | ) |
|
199 | ) | |
196 | ) |
|
200 | ) | |
197 |
|
201 | |||
198 | def format_pull_request_event(self, event, data): |
|
202 | def format_pull_request_event(self, event, data): | |
199 | action = { |
|
203 | action = { | |
200 | events.PullRequestCloseEvent: 'closed', |
|
204 | events.PullRequestCloseEvent: 'closed', | |
201 | events.PullRequestMergeEvent: 'merged', |
|
205 | events.PullRequestMergeEvent: 'merged', | |
202 | events.PullRequestUpdateEvent: 'updated', |
|
206 | events.PullRequestUpdateEvent: 'updated', | |
203 | events.PullRequestCreateEvent: 'created', |
|
207 | events.PullRequestCreateEvent: 'created', | |
204 | }.get(event.__class__, str(event.__class__)) |
|
208 | }.get(event.__class__, str(event.__class__)) | |
205 |
|
209 | |||
206 | return ('Pull request <a href="{url}">#{number}</a> - {title} ' |
|
210 | return ('Pull request <a href="{url}">#{number}</a> - {title} ' | |
207 | '{action} by <b>{user}</b>').format( |
|
211 | '{action} by <b>{user}</b>').format( | |
208 | user=data['actor']['username'], |
|
212 | user=data['actor']['username'], | |
209 | number=data['pullrequest']['pull_request_id'], |
|
213 | number=data['pullrequest']['pull_request_id'], | |
210 | url=data['pullrequest']['url'], |
|
214 | url=data['pullrequest']['url'], | |
211 | title=h.html_escape(data['pullrequest']['title']), |
|
215 | title=h.html_escape(data['pullrequest']['title']), | |
212 | action=action |
|
216 | action=action | |
213 | ) |
|
217 | ) | |
214 |
|
218 | |||
215 | def format_repo_push_event(self, data): |
|
219 | def format_repo_push_event(self, data): | |
216 | branch_data = {branch['name']: branch |
|
220 | branch_data = {branch['name']: branch | |
217 | for branch in data['push']['branches']} |
|
221 | for branch in data['push']['branches']} | |
218 |
|
222 | |||
219 | branches_commits = OrderedDict() |
|
223 | branches_commits = OrderedDict() | |
220 | for commit in data['push']['commits']: |
|
224 | for commit in data['push']['commits']: | |
221 | if commit['branch'] not in branches_commits: |
|
225 | if commit['branch'] not in branches_commits: | |
222 | branch_commits = {'branch': branch_data[commit['branch']], |
|
226 | branch_commits = {'branch': branch_data[commit['branch']], | |
223 | 'commits': []} |
|
227 | 'commits': []} | |
224 | branches_commits[commit['branch']] = branch_commits |
|
228 | branches_commits[commit['branch']] = branch_commits | |
225 |
|
229 | |||
226 | branch_commits = branches_commits[commit['branch']] |
|
230 | branch_commits = branches_commits[commit['branch']] | |
227 | branch_commits['commits'].append(commit) |
|
231 | branch_commits['commits'].append(commit) | |
228 |
|
232 | |||
229 | result = repo_push_template.render( |
|
233 | result = repo_push_template.render( | |
230 | data=data, |
|
234 | data=data, | |
231 | branches_commits=branches_commits, |
|
235 | branches_commits=branches_commits, | |
232 | ) |
|
236 | ) | |
233 | return result |
|
237 | return result | |
234 |
|
238 | |||
235 | def format_repo_create_event(self, data): |
|
239 | def format_repo_create_event(self, data): | |
236 | return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format( |
|
240 | return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format( | |
237 | data['repo']['url'], |
|
241 | data['repo']['url'], | |
238 | h.html_escape(data['repo']['repo_name']), |
|
242 | h.html_escape(data['repo']['repo_name']), | |
239 | data['repo']['repo_type'], |
|
243 | data['repo']['repo_type'], | |
240 | data['actor']['username'], |
|
244 | data['actor']['username'], | |
241 | ) |
|
245 | ) | |
242 |
|
246 | |||
243 |
|
247 | |||
244 | @async_task(ignore_result=True, base=RequestContextTask) |
|
248 | @async_task(ignore_result=True, base=RequestContextTask) | |
245 | def post_text_to_hipchat(settings, text): |
|
249 | def post_text_to_hipchat(settings, text): | |
246 | log.debug('sending %s to hipchat %s' % (text, settings['server_url'])) |
|
250 | log.debug('sending %s to hipchat %s' % (text, settings['server_url'])) | |
247 | resp = requests.post(settings['server_url'], json={ |
|
251 | resp = requests.post(settings['server_url'], json={ | |
248 | "message": text, |
|
252 | "message": text, | |
249 | "color": settings.get('color', 'yellow'), |
|
253 | "color": settings.get('color', 'yellow'), | |
250 | "notify": settings.get('notify', False), |
|
254 | "notify": settings.get('notify', False), | |
251 | }) |
|
255 | }) | |
252 | resp.raise_for_status() # raise exception on a failed request |
|
256 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,334 +1,338 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import re |
|
22 | import re | |
23 | import time |
|
23 | import time | |
24 | import textwrap |
|
24 | import textwrap | |
25 | import logging |
|
25 | import logging | |
26 |
|
26 | |||
27 | import deform |
|
27 | import deform | |
28 | import requests |
|
28 | import requests | |
29 | import colander |
|
29 | import colander | |
30 | from mako.template import Template |
|
30 | from mako.template import Template | |
31 | from collections import OrderedDict |
|
31 | from collections import OrderedDict | |
32 |
|
32 | |||
33 | from rhodecode import events |
|
33 | from rhodecode import events | |
34 | from rhodecode.translation import _ |
|
34 | from rhodecode.translation import _ | |
35 | from rhodecode.lib import helpers as h |
|
35 | from rhodecode.lib import helpers as h | |
36 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
36 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask | |
37 | from rhodecode.lib.colander_utils import strip_whitespace |
|
37 | from rhodecode.lib.colander_utils import strip_whitespace | |
38 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
38 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
39 |
|
39 | |||
40 | log = logging.getLogger(__name__) |
|
40 | log = logging.getLogger(__name__) | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | class SlackSettingsSchema(colander.Schema): |
|
43 | class SlackSettingsSchema(colander.Schema): | |
44 | service = colander.SchemaNode( |
|
44 | service = colander.SchemaNode( | |
45 | colander.String(), |
|
45 | colander.String(), | |
46 | title=_('Slack service URL'), |
|
46 | title=_('Slack service URL'), | |
47 | description=h.literal(_( |
|
47 | description=h.literal(_( | |
48 | 'This can be setup at the ' |
|
48 | 'This can be setup at the ' | |
49 | '<a href="https://my.slack.com/services/new/incoming-webhook/">' |
|
49 | '<a href="https://my.slack.com/services/new/incoming-webhook/">' | |
50 | 'slack app manager</a>')), |
|
50 | 'slack app manager</a>')), | |
51 | default='', |
|
51 | default='', | |
52 | preparer=strip_whitespace, |
|
52 | preparer=strip_whitespace, | |
53 | validator=colander.url, |
|
53 | validator=colander.url, | |
54 | widget=deform.widget.TextInputWidget( |
|
54 | widget=deform.widget.TextInputWidget( | |
55 | placeholder='https://hooks.slack.com/services/...', |
|
55 | placeholder='https://hooks.slack.com/services/...', | |
56 | ), |
|
56 | ), | |
57 | ) |
|
57 | ) | |
58 | username = colander.SchemaNode( |
|
58 | username = colander.SchemaNode( | |
59 | colander.String(), |
|
59 | colander.String(), | |
60 | title=_('Username'), |
|
60 | title=_('Username'), | |
61 | description=_('Username to show notifications coming from.'), |
|
61 | description=_('Username to show notifications coming from.'), | |
62 | missing='Rhodecode', |
|
62 | missing='Rhodecode', | |
63 | preparer=strip_whitespace, |
|
63 | preparer=strip_whitespace, | |
64 | widget=deform.widget.TextInputWidget( |
|
64 | widget=deform.widget.TextInputWidget( | |
65 | placeholder='Rhodecode' |
|
65 | placeholder='Rhodecode' | |
66 | ), |
|
66 | ), | |
67 | ) |
|
67 | ) | |
68 | channel = colander.SchemaNode( |
|
68 | channel = colander.SchemaNode( | |
69 | colander.String(), |
|
69 | colander.String(), | |
70 | title=_('Channel'), |
|
70 | title=_('Channel'), | |
71 | description=_('Channel to send notifications to.'), |
|
71 | description=_('Channel to send notifications to.'), | |
72 | missing='', |
|
72 | missing='', | |
73 | preparer=strip_whitespace, |
|
73 | preparer=strip_whitespace, | |
74 | widget=deform.widget.TextInputWidget( |
|
74 | widget=deform.widget.TextInputWidget( | |
75 | placeholder='#general' |
|
75 | placeholder='#general' | |
76 | ), |
|
76 | ), | |
77 | ) |
|
77 | ) | |
78 | icon_emoji = colander.SchemaNode( |
|
78 | icon_emoji = colander.SchemaNode( | |
79 | colander.String(), |
|
79 | colander.String(), | |
80 | title=_('Emoji'), |
|
80 | title=_('Emoji'), | |
81 | description=_('Emoji to use eg. :studio_microphone:'), |
|
81 | description=_('Emoji to use eg. :studio_microphone:'), | |
82 | missing='', |
|
82 | missing='', | |
83 | preparer=strip_whitespace, |
|
83 | preparer=strip_whitespace, | |
84 | widget=deform.widget.TextInputWidget( |
|
84 | widget=deform.widget.TextInputWidget( | |
85 | placeholder=':studio_microphone:' |
|
85 | placeholder=':studio_microphone:' | |
86 | ), |
|
86 | ), | |
87 | ) |
|
87 | ) | |
88 |
|
88 | |||
89 |
|
89 | |||
90 | class SlackIntegrationType(IntegrationTypeBase): |
|
90 | class SlackIntegrationType(IntegrationTypeBase): | |
91 | key = 'slack' |
|
91 | key = 'slack' | |
92 | display_name = _('Slack') |
|
92 | display_name = _('Slack') | |
93 | description = _('Send events such as repo pushes and pull requests to ' |
|
93 | description = _('Send events such as repo pushes and pull requests to ' | |
94 | 'your slack channel.') |
|
94 | 'your slack channel.') | |
95 | icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>''' |
|
95 | ||
|
96 | @classmethod | |||
|
97 | def icon(cls): | |||
|
98 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>''' | |||
|
99 | ||||
96 | valid_events = [ |
|
100 | valid_events = [ | |
97 | events.PullRequestCloseEvent, |
|
101 | events.PullRequestCloseEvent, | |
98 | events.PullRequestMergeEvent, |
|
102 | events.PullRequestMergeEvent, | |
99 | events.PullRequestUpdateEvent, |
|
103 | events.PullRequestUpdateEvent, | |
100 | events.PullRequestCommentEvent, |
|
104 | events.PullRequestCommentEvent, | |
101 | events.PullRequestReviewEvent, |
|
105 | events.PullRequestReviewEvent, | |
102 | events.PullRequestCreateEvent, |
|
106 | events.PullRequestCreateEvent, | |
103 | events.RepoPushEvent, |
|
107 | events.RepoPushEvent, | |
104 | events.RepoCreateEvent, |
|
108 | events.RepoCreateEvent, | |
105 | ] |
|
109 | ] | |
106 |
|
110 | |||
107 | def send_event(self, event): |
|
111 | def send_event(self, event): | |
108 | if event.__class__ not in self.valid_events: |
|
112 | if event.__class__ not in self.valid_events: | |
109 | log.debug('event not valid: %r' % event) |
|
113 | log.debug('event not valid: %r' % event) | |
110 | return |
|
114 | return | |
111 |
|
115 | |||
112 | if event.name not in self.settings['events']: |
|
116 | if event.name not in self.settings['events']: | |
113 | log.debug('event ignored: %r' % event) |
|
117 | log.debug('event ignored: %r' % event) | |
114 | return |
|
118 | return | |
115 |
|
119 | |||
116 | data = event.as_dict() |
|
120 | data = event.as_dict() | |
117 |
|
121 | |||
118 | # defaults |
|
122 | # defaults | |
119 | title = '*%s* caused a *%s* event' % ( |
|
123 | title = '*%s* caused a *%s* event' % ( | |
120 | data['actor']['username'], event.name) |
|
124 | data['actor']['username'], event.name) | |
121 | text = '*%s* caused a *%s* event' % ( |
|
125 | text = '*%s* caused a *%s* event' % ( | |
122 | data['actor']['username'], event.name) |
|
126 | data['actor']['username'], event.name) | |
123 | fields = None |
|
127 | fields = None | |
124 | overrides = None |
|
128 | overrides = None | |
125 |
|
129 | |||
126 | log.debug('handling slack event for %s' % event.name) |
|
130 | log.debug('handling slack event for %s' % event.name) | |
127 |
|
131 | |||
128 | if isinstance(event, events.PullRequestCommentEvent): |
|
132 | if isinstance(event, events.PullRequestCommentEvent): | |
129 | (title, text, fields, overrides) \ |
|
133 | (title, text, fields, overrides) \ | |
130 | = self.format_pull_request_comment_event(event, data) |
|
134 | = self.format_pull_request_comment_event(event, data) | |
131 | elif isinstance(event, events.PullRequestReviewEvent): |
|
135 | elif isinstance(event, events.PullRequestReviewEvent): | |
132 | title, text = self.format_pull_request_review_event(event, data) |
|
136 | title, text = self.format_pull_request_review_event(event, data) | |
133 | elif isinstance(event, events.PullRequestEvent): |
|
137 | elif isinstance(event, events.PullRequestEvent): | |
134 | title, text = self.format_pull_request_event(event, data) |
|
138 | title, text = self.format_pull_request_event(event, data) | |
135 | elif isinstance(event, events.RepoPushEvent): |
|
139 | elif isinstance(event, events.RepoPushEvent): | |
136 | title, text = self.format_repo_push_event(data) |
|
140 | title, text = self.format_repo_push_event(data) | |
137 | elif isinstance(event, events.RepoCreateEvent): |
|
141 | elif isinstance(event, events.RepoCreateEvent): | |
138 | title, text = self.format_repo_create_event(data) |
|
142 | title, text = self.format_repo_create_event(data) | |
139 | else: |
|
143 | else: | |
140 | log.error('unhandled event type: %r' % event) |
|
144 | log.error('unhandled event type: %r' % event) | |
141 |
|
145 | |||
142 | run_task(post_text_to_slack, self.settings, title, text, fields, overrides) |
|
146 | run_task(post_text_to_slack, self.settings, title, text, fields, overrides) | |
143 |
|
147 | |||
144 | def settings_schema(self): |
|
148 | def settings_schema(self): | |
145 | schema = SlackSettingsSchema() |
|
149 | schema = SlackSettingsSchema() | |
146 | schema.add(colander.SchemaNode( |
|
150 | schema.add(colander.SchemaNode( | |
147 | colander.Set(), |
|
151 | colander.Set(), | |
148 | widget=deform.widget.CheckboxChoiceWidget( |
|
152 | widget=deform.widget.CheckboxChoiceWidget( | |
149 | values=sorted( |
|
153 | values=sorted( | |
150 | [(e.name, e.display_name) for e in self.valid_events] |
|
154 | [(e.name, e.display_name) for e in self.valid_events] | |
151 | ) |
|
155 | ) | |
152 | ), |
|
156 | ), | |
153 | description="Events activated for this integration", |
|
157 | description="Events activated for this integration", | |
154 | name='events' |
|
158 | name='events' | |
155 | )) |
|
159 | )) | |
156 |
|
160 | |||
157 | return schema |
|
161 | return schema | |
158 |
|
162 | |||
159 | def format_pull_request_comment_event(self, event, data): |
|
163 | def format_pull_request_comment_event(self, event, data): | |
160 | comment_text = data['comment']['text'] |
|
164 | comment_text = data['comment']['text'] | |
161 | if len(comment_text) > 200: |
|
165 | if len(comment_text) > 200: | |
162 | comment_text = '<{comment_url}|{comment_text}...>'.format( |
|
166 | comment_text = '<{comment_url}|{comment_text}...>'.format( | |
163 | comment_text=comment_text[:200], |
|
167 | comment_text=comment_text[:200], | |
164 | comment_url=data['comment']['url'], |
|
168 | comment_url=data['comment']['url'], | |
165 | ) |
|
169 | ) | |
166 |
|
170 | |||
167 | fields = None |
|
171 | fields = None | |
168 | overrides = None |
|
172 | overrides = None | |
169 | status_text = None |
|
173 | status_text = None | |
170 |
|
174 | |||
171 | if data['comment']['status']: |
|
175 | if data['comment']['status']: | |
172 | status_color = { |
|
176 | status_color = { | |
173 | 'approved': '#0ac878', |
|
177 | 'approved': '#0ac878', | |
174 | 'rejected': '#e85e4d'}.get(data['comment']['status']) |
|
178 | 'rejected': '#e85e4d'}.get(data['comment']['status']) | |
175 |
|
179 | |||
176 | if status_color: |
|
180 | if status_color: | |
177 | overrides = {"color": status_color} |
|
181 | overrides = {"color": status_color} | |
178 |
|
182 | |||
179 | status_text = data['comment']['status'] |
|
183 | status_text = data['comment']['status'] | |
180 |
|
184 | |||
181 | if data['comment']['file']: |
|
185 | if data['comment']['file']: | |
182 | fields = [ |
|
186 | fields = [ | |
183 | { |
|
187 | { | |
184 | "title": "file", |
|
188 | "title": "file", | |
185 | "value": data['comment']['file'] |
|
189 | "value": data['comment']['file'] | |
186 | }, |
|
190 | }, | |
187 | { |
|
191 | { | |
188 | "title": "line", |
|
192 | "title": "line", | |
189 | "value": data['comment']['line'] |
|
193 | "value": data['comment']['line'] | |
190 | } |
|
194 | } | |
191 | ] |
|
195 | ] | |
192 |
|
196 | |||
193 | title = Template(textwrap.dedent(r''' |
|
197 | title = Template(textwrap.dedent(r''' | |
194 | *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: |
|
198 | *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: | |
195 | ''')).render(data=data, comment=event.comment) |
|
199 | ''')).render(data=data, comment=event.comment) | |
196 |
|
200 | |||
197 | text = Template(textwrap.dedent(r''' |
|
201 | text = Template(textwrap.dedent(r''' | |
198 | *pull request title*: ${pr_title} |
|
202 | *pull request title*: ${pr_title} | |
199 | % if status_text: |
|
203 | % if status_text: | |
200 | *submitted status*: `${status_text}` |
|
204 | *submitted status*: `${status_text}` | |
201 | % endif |
|
205 | % endif | |
202 | >>> ${comment_text} |
|
206 | >>> ${comment_text} | |
203 | ''')).render(comment_text=comment_text, |
|
207 | ''')).render(comment_text=comment_text, | |
204 | pr_title=data['pullrequest']['title'], |
|
208 | pr_title=data['pullrequest']['title'], | |
205 | status_text=status_text) |
|
209 | status_text=status_text) | |
206 |
|
210 | |||
207 | return title, text, fields, overrides |
|
211 | return title, text, fields, overrides | |
208 |
|
212 | |||
209 | def format_pull_request_review_event(self, event, data): |
|
213 | def format_pull_request_review_event(self, event, data): | |
210 | title = Template(textwrap.dedent(r''' |
|
214 | title = Template(textwrap.dedent(r''' | |
211 | *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>: |
|
215 | *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>: | |
212 | ''')).render(data=data) |
|
216 | ''')).render(data=data) | |
213 |
|
217 | |||
214 | text = Template(textwrap.dedent(r''' |
|
218 | text = Template(textwrap.dedent(r''' | |
215 | *pull request title*: ${pr_title} |
|
219 | *pull request title*: ${pr_title} | |
216 | ''')).render( |
|
220 | ''')).render( | |
217 | pr_title=data['pullrequest']['title'], |
|
221 | pr_title=data['pullrequest']['title'], | |
218 | ) |
|
222 | ) | |
219 |
|
223 | |||
220 | return title, text |
|
224 | return title, text | |
221 |
|
225 | |||
222 | def format_pull_request_event(self, event, data): |
|
226 | def format_pull_request_event(self, event, data): | |
223 | action = { |
|
227 | action = { | |
224 | events.PullRequestCloseEvent: 'closed', |
|
228 | events.PullRequestCloseEvent: 'closed', | |
225 | events.PullRequestMergeEvent: 'merged', |
|
229 | events.PullRequestMergeEvent: 'merged', | |
226 | events.PullRequestUpdateEvent: 'updated', |
|
230 | events.PullRequestUpdateEvent: 'updated', | |
227 | events.PullRequestCreateEvent: 'created', |
|
231 | events.PullRequestCreateEvent: 'created', | |
228 | }.get(event.__class__, str(event.__class__)) |
|
232 | }.get(event.__class__, str(event.__class__)) | |
229 |
|
233 | |||
230 | title = Template(textwrap.dedent(r''' |
|
234 | title = Template(textwrap.dedent(r''' | |
231 | *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: |
|
235 | *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: | |
232 | ''')).render(data=data, action=action) |
|
236 | ''')).render(data=data, action=action) | |
233 |
|
237 | |||
234 | text = Template(textwrap.dedent(r''' |
|
238 | text = Template(textwrap.dedent(r''' | |
235 | *pull request title*: ${pr_title} |
|
239 | *pull request title*: ${pr_title} | |
236 | %if data['pullrequest']['commits']: |
|
240 | %if data['pullrequest']['commits']: | |
237 | *commits*: ${len(data['pullrequest']['commits'])} |
|
241 | *commits*: ${len(data['pullrequest']['commits'])} | |
238 | %endif |
|
242 | %endif | |
239 | ''')).render( |
|
243 | ''')).render( | |
240 | pr_title=data['pullrequest']['title'], |
|
244 | pr_title=data['pullrequest']['title'], | |
241 | data=data |
|
245 | data=data | |
242 | ) |
|
246 | ) | |
243 |
|
247 | |||
244 | return title, text |
|
248 | return title, text | |
245 |
|
249 | |||
246 | def format_repo_push_event(self, data): |
|
250 | def format_repo_push_event(self, data): | |
247 | branch_data = {branch['name']: branch |
|
251 | branch_data = {branch['name']: branch | |
248 | for branch in data['push']['branches']} |
|
252 | for branch in data['push']['branches']} | |
249 |
|
253 | |||
250 | branches_commits = OrderedDict() |
|
254 | branches_commits = OrderedDict() | |
251 | for commit in data['push']['commits']: |
|
255 | for commit in data['push']['commits']: | |
252 | if commit['branch'] not in branches_commits: |
|
256 | if commit['branch'] not in branches_commits: | |
253 | branch_commits = {'branch': branch_data[commit['branch']], |
|
257 | branch_commits = {'branch': branch_data[commit['branch']], | |
254 | 'commits': []} |
|
258 | 'commits': []} | |
255 | branches_commits[commit['branch']] = branch_commits |
|
259 | branches_commits[commit['branch']] = branch_commits | |
256 |
|
260 | |||
257 | branch_commits = branches_commits[commit['branch']] |
|
261 | branch_commits = branches_commits[commit['branch']] | |
258 | branch_commits['commits'].append(commit) |
|
262 | branch_commits['commits'].append(commit) | |
259 |
|
263 | |||
260 | title = Template(r''' |
|
264 | title = Template(r''' | |
261 | *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>: |
|
265 | *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>: | |
262 | ''').render(data=data) |
|
266 | ''').render(data=data) | |
263 |
|
267 | |||
264 | repo_push_template = Template(textwrap.dedent(r''' |
|
268 | repo_push_template = Template(textwrap.dedent(r''' | |
265 | %for branch, branch_commits in branches_commits.items(): |
|
269 | %for branch, branch_commits in branches_commits.items(): | |
266 | ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} on branch: <${branch_commits['branch']['url']}|${branch_commits['branch']['name']}> |
|
270 | ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} on branch: <${branch_commits['branch']['url']}|${branch_commits['branch']['name']}> | |
267 | %for commit in branch_commits['commits']: |
|
271 | %for commit in branch_commits['commits']: | |
268 | `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links} |
|
272 | `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links} | |
269 | %endfor |
|
273 | %endfor | |
270 | %endfor |
|
274 | %endfor | |
271 | ''')) |
|
275 | ''')) | |
272 |
|
276 | |||
273 | text = repo_push_template.render( |
|
277 | text = repo_push_template.render( | |
274 | data=data, |
|
278 | data=data, | |
275 | branches_commits=branches_commits, |
|
279 | branches_commits=branches_commits, | |
276 | html_to_slack_links=html_to_slack_links, |
|
280 | html_to_slack_links=html_to_slack_links, | |
277 | ) |
|
281 | ) | |
278 |
|
282 | |||
279 | return title, text |
|
283 | return title, text | |
280 |
|
284 | |||
281 | def format_repo_create_event(self, data): |
|
285 | def format_repo_create_event(self, data): | |
282 | title = Template(r''' |
|
286 | title = Template(r''' | |
283 | *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}: |
|
287 | *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}: | |
284 | ''').render(data=data) |
|
288 | ''').render(data=data) | |
285 |
|
289 | |||
286 | text = Template(textwrap.dedent(r''' |
|
290 | text = Template(textwrap.dedent(r''' | |
287 | repo_url: ${data['repo']['url']} |
|
291 | repo_url: ${data['repo']['url']} | |
288 | repo_type: ${data['repo']['repo_type']} |
|
292 | repo_type: ${data['repo']['repo_type']} | |
289 | ''')).render(data=data) |
|
293 | ''')).render(data=data) | |
290 |
|
294 | |||
291 | return title, text |
|
295 | return title, text | |
292 |
|
296 | |||
293 |
|
297 | |||
294 | def html_to_slack_links(message): |
|
298 | def html_to_slack_links(message): | |
295 | return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub( |
|
299 | return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub( | |
296 | r'<\1|\2>', message) |
|
300 | r'<\1|\2>', message) | |
297 |
|
301 | |||
298 |
|
302 | |||
299 | @async_task(ignore_result=True, base=RequestContextTask) |
|
303 | @async_task(ignore_result=True, base=RequestContextTask) | |
300 | def post_text_to_slack(settings, title, text, fields=None, overrides=None): |
|
304 | def post_text_to_slack(settings, title, text, fields=None, overrides=None): | |
301 | log.debug('sending %s (%s) to slack %s' % ( |
|
305 | log.debug('sending %s (%s) to slack %s' % ( | |
302 | title, text, settings['service'])) |
|
306 | title, text, settings['service'])) | |
303 |
|
307 | |||
304 | fields = fields or [] |
|
308 | fields = fields or [] | |
305 | overrides = overrides or {} |
|
309 | overrides = overrides or {} | |
306 |
|
310 | |||
307 | message_data = { |
|
311 | message_data = { | |
308 | "fallback": text, |
|
312 | "fallback": text, | |
309 | "color": "#427cc9", |
|
313 | "color": "#427cc9", | |
310 | "pretext": title, |
|
314 | "pretext": title, | |
311 | #"author_name": "Bobby Tables", |
|
315 | #"author_name": "Bobby Tables", | |
312 | #"author_link": "http://flickr.com/bobby/", |
|
316 | #"author_link": "http://flickr.com/bobby/", | |
313 | #"author_icon": "http://flickr.com/icons/bobby.jpg", |
|
317 | #"author_icon": "http://flickr.com/icons/bobby.jpg", | |
314 | #"title": "Slack API Documentation", |
|
318 | #"title": "Slack API Documentation", | |
315 | #"title_link": "https://api.slack.com/", |
|
319 | #"title_link": "https://api.slack.com/", | |
316 | "text": text, |
|
320 | "text": text, | |
317 | "fields": fields, |
|
321 | "fields": fields, | |
318 | #"image_url": "http://my-website.com/path/to/image.jpg", |
|
322 | #"image_url": "http://my-website.com/path/to/image.jpg", | |
319 | #"thumb_url": "http://example.com/path/to/thumb.png", |
|
323 | #"thumb_url": "http://example.com/path/to/thumb.png", | |
320 | "footer": "RhodeCode", |
|
324 | "footer": "RhodeCode", | |
321 | #"footer_icon": "", |
|
325 | #"footer_icon": "", | |
322 | "ts": time.time(), |
|
326 | "ts": time.time(), | |
323 | "mrkdwn_in": ["pretext", "text"] |
|
327 | "mrkdwn_in": ["pretext", "text"] | |
324 | } |
|
328 | } | |
325 | message_data.update(overrides) |
|
329 | message_data.update(overrides) | |
326 | json_message = { |
|
330 | json_message = { | |
327 | "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'), |
|
331 | "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'), | |
328 | "channel": settings.get('channel', ''), |
|
332 | "channel": settings.get('channel', ''), | |
329 | "username": settings.get('username', 'Rhodecode'), |
|
333 | "username": settings.get('username', 'Rhodecode'), | |
330 | "attachments": [message_data] |
|
334 | "attachments": [message_data] | |
331 | } |
|
335 | } | |
332 |
|
336 | |||
333 | resp = requests.post(settings['service'], json=json_message) |
|
337 | resp = requests.post(settings['service'], json=json_message) | |
334 | resp.raise_for_status() # raise exception on a failed request |
|
338 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,395 +1,398 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import string |
|
22 | import string | |
23 | from collections import OrderedDict |
|
23 | from collections import OrderedDict | |
24 |
|
24 | |||
25 | import deform |
|
25 | import deform | |
26 | import deform.widget |
|
26 | import deform.widget | |
27 | import logging |
|
27 | import logging | |
28 | import requests |
|
28 | import requests | |
29 | import requests.adapters |
|
29 | import requests.adapters | |
30 | import colander |
|
30 | import colander | |
31 | from requests.packages.urllib3.util.retry import Retry |
|
31 | from requests.packages.urllib3.util.retry import Retry | |
32 |
|
32 | |||
33 | import rhodecode |
|
33 | import rhodecode | |
34 | from rhodecode import events |
|
34 | from rhodecode import events | |
35 | from rhodecode.translation import _ |
|
35 | from rhodecode.translation import _ | |
36 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
36 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
37 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
37 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask | |
38 |
|
38 | |||
39 | log = logging.getLogger(__name__) |
|
39 | log = logging.getLogger(__name__) | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | # updating this required to update the `common_vars` passed in url calling func |
|
42 | # updating this required to update the `common_vars` passed in url calling func | |
43 | WEBHOOK_URL_VARS = [ |
|
43 | WEBHOOK_URL_VARS = [ | |
44 | 'repo_name', |
|
44 | 'repo_name', | |
45 | 'repo_type', |
|
45 | 'repo_type', | |
46 | 'repo_id', |
|
46 | 'repo_id', | |
47 | 'repo_url', |
|
47 | 'repo_url', | |
48 | # extra repo fields |
|
48 | # extra repo fields | |
49 | 'extra:<extra_key_name>', |
|
49 | 'extra:<extra_key_name>', | |
50 |
|
50 | |||
51 | # special attrs below that we handle, using multi-call |
|
51 | # special attrs below that we handle, using multi-call | |
52 | 'branch', |
|
52 | 'branch', | |
53 | 'commit_id', |
|
53 | 'commit_id', | |
54 |
|
54 | |||
55 | # pr events vars |
|
55 | # pr events vars | |
56 | 'pull_request_id', |
|
56 | 'pull_request_id', | |
57 | 'pull_request_url', |
|
57 | 'pull_request_url', | |
58 |
|
58 | |||
59 | # user who triggers the call |
|
59 | # user who triggers the call | |
60 | 'username', |
|
60 | 'username', | |
61 | 'user_id', |
|
61 | 'user_id', | |
62 |
|
62 | |||
63 | ] |
|
63 | ] | |
64 | URL_VARS = ', '.join('${' + x + '}' for x in WEBHOOK_URL_VARS) |
|
64 | URL_VARS = ', '.join('${' + x + '}' for x in WEBHOOK_URL_VARS) | |
65 |
|
65 | |||
66 |
|
66 | |||
67 | def get_auth(settings): |
|
67 | def get_auth(settings): | |
68 | from requests.auth import HTTPBasicAuth |
|
68 | from requests.auth import HTTPBasicAuth | |
69 | username = settings.get('username') |
|
69 | username = settings.get('username') | |
70 | password = settings.get('password') |
|
70 | password = settings.get('password') | |
71 | if username and password: |
|
71 | if username and password: | |
72 | return HTTPBasicAuth(username, password) |
|
72 | return HTTPBasicAuth(username, password) | |
73 | return None |
|
73 | return None | |
74 |
|
74 | |||
75 |
|
75 | |||
76 | class WebhookHandler(object): |
|
76 | class WebhookHandler(object): | |
77 | def __init__(self, template_url, secret_token, headers): |
|
77 | def __init__(self, template_url, secret_token, headers): | |
78 | self.template_url = template_url |
|
78 | self.template_url = template_url | |
79 | self.secret_token = secret_token |
|
79 | self.secret_token = secret_token | |
80 | self.headers = headers |
|
80 | self.headers = headers | |
81 |
|
81 | |||
82 | def get_base_parsed_template(self, data): |
|
82 | def get_base_parsed_template(self, data): | |
83 | """ |
|
83 | """ | |
84 | initially parses the passed in template with some common variables |
|
84 | initially parses the passed in template with some common variables | |
85 | available on ALL calls |
|
85 | available on ALL calls | |
86 | """ |
|
86 | """ | |
87 | # note: make sure to update the `WEBHOOK_URL_VARS` if this changes |
|
87 | # note: make sure to update the `WEBHOOK_URL_VARS` if this changes | |
88 | common_vars = { |
|
88 | common_vars = { | |
89 | 'repo_name': data['repo']['repo_name'], |
|
89 | 'repo_name': data['repo']['repo_name'], | |
90 | 'repo_type': data['repo']['repo_type'], |
|
90 | 'repo_type': data['repo']['repo_type'], | |
91 | 'repo_id': data['repo']['repo_id'], |
|
91 | 'repo_id': data['repo']['repo_id'], | |
92 | 'repo_url': data['repo']['url'], |
|
92 | 'repo_url': data['repo']['url'], | |
93 | 'username': data['actor']['username'], |
|
93 | 'username': data['actor']['username'], | |
94 | 'user_id': data['actor']['user_id'] |
|
94 | 'user_id': data['actor']['user_id'] | |
95 | } |
|
95 | } | |
96 |
|
96 | |||
97 | extra_vars = {} |
|
97 | extra_vars = {} | |
98 | for extra_key, extra_val in data['repo']['extra_fields'].items(): |
|
98 | for extra_key, extra_val in data['repo']['extra_fields'].items(): | |
99 | extra_vars['extra__{}'.format(extra_key)] = extra_val |
|
99 | extra_vars['extra__{}'.format(extra_key)] = extra_val | |
100 | common_vars.update(extra_vars) |
|
100 | common_vars.update(extra_vars) | |
101 |
|
101 | |||
102 | template_url = self.template_url.replace('${extra:', '${extra__') |
|
102 | template_url = self.template_url.replace('${extra:', '${extra__') | |
103 | return string.Template(template_url).safe_substitute(**common_vars) |
|
103 | return string.Template(template_url).safe_substitute(**common_vars) | |
104 |
|
104 | |||
105 | def repo_push_event_handler(self, event, data): |
|
105 | def repo_push_event_handler(self, event, data): | |
106 | url = self.get_base_parsed_template(data) |
|
106 | url = self.get_base_parsed_template(data) | |
107 | url_cals = [] |
|
107 | url_cals = [] | |
108 | branch_data = OrderedDict() |
|
108 | branch_data = OrderedDict() | |
109 | for obj in data['push']['branches']: |
|
109 | for obj in data['push']['branches']: | |
110 | branch_data[obj['name']] = obj |
|
110 | branch_data[obj['name']] = obj | |
111 |
|
111 | |||
112 | branches_commits = OrderedDict() |
|
112 | branches_commits = OrderedDict() | |
113 | for commit in data['push']['commits']: |
|
113 | for commit in data['push']['commits']: | |
114 | if commit.get('git_ref_change'): |
|
114 | if commit.get('git_ref_change'): | |
115 | # special case for GIT that allows creating tags, |
|
115 | # special case for GIT that allows creating tags, | |
116 | # deleting branches without associated commit |
|
116 | # deleting branches without associated commit | |
117 | continue |
|
117 | continue | |
118 |
|
118 | |||
119 | if commit['branch'] not in branches_commits: |
|
119 | if commit['branch'] not in branches_commits: | |
120 | branch_commits = {'branch': branch_data[commit['branch']], |
|
120 | branch_commits = {'branch': branch_data[commit['branch']], | |
121 | 'commits': []} |
|
121 | 'commits': []} | |
122 | branches_commits[commit['branch']] = branch_commits |
|
122 | branches_commits[commit['branch']] = branch_commits | |
123 |
|
123 | |||
124 | branch_commits = branches_commits[commit['branch']] |
|
124 | branch_commits = branches_commits[commit['branch']] | |
125 | branch_commits['commits'].append(commit) |
|
125 | branch_commits['commits'].append(commit) | |
126 |
|
126 | |||
127 | if '${branch}' in url: |
|
127 | if '${branch}' in url: | |
128 | # call it multiple times, for each branch if used in variables |
|
128 | # call it multiple times, for each branch if used in variables | |
129 | for branch, commit_ids in branches_commits.items(): |
|
129 | for branch, commit_ids in branches_commits.items(): | |
130 | branch_url = string.Template(url).safe_substitute(branch=branch) |
|
130 | branch_url = string.Template(url).safe_substitute(branch=branch) | |
131 | # call further down for each commit if used |
|
131 | # call further down for each commit if used | |
132 | if '${commit_id}' in branch_url: |
|
132 | if '${commit_id}' in branch_url: | |
133 | for commit_data in commit_ids['commits']: |
|
133 | for commit_data in commit_ids['commits']: | |
134 | commit_id = commit_data['raw_id'] |
|
134 | commit_id = commit_data['raw_id'] | |
135 | commit_url = string.Template(branch_url).safe_substitute( |
|
135 | commit_url = string.Template(branch_url).safe_substitute( | |
136 | commit_id=commit_id) |
|
136 | commit_id=commit_id) | |
137 | # register per-commit call |
|
137 | # register per-commit call | |
138 | log.debug( |
|
138 | log.debug( | |
139 | 'register webhook call(%s) to url %s', event, commit_url) |
|
139 | 'register webhook call(%s) to url %s', event, commit_url) | |
140 | url_cals.append((commit_url, self.secret_token, self.headers, data)) |
|
140 | url_cals.append((commit_url, self.secret_token, self.headers, data)) | |
141 |
|
141 | |||
142 | else: |
|
142 | else: | |
143 | # register per-branch call |
|
143 | # register per-branch call | |
144 | log.debug( |
|
144 | log.debug( | |
145 | 'register webhook call(%s) to url %s', event, branch_url) |
|
145 | 'register webhook call(%s) to url %s', event, branch_url) | |
146 | url_cals.append((branch_url, self.secret_token, self.headers, data)) |
|
146 | url_cals.append((branch_url, self.secret_token, self.headers, data)) | |
147 |
|
147 | |||
148 | else: |
|
148 | else: | |
149 | log.debug( |
|
149 | log.debug( | |
150 | 'register webhook call(%s) to url %s', event, url) |
|
150 | 'register webhook call(%s) to url %s', event, url) | |
151 | url_cals.append((url, self.secret_token, self.headers, data)) |
|
151 | url_cals.append((url, self.secret_token, self.headers, data)) | |
152 |
|
152 | |||
153 | return url_cals |
|
153 | return url_cals | |
154 |
|
154 | |||
155 | def repo_create_event_handler(self, event, data): |
|
155 | def repo_create_event_handler(self, event, data): | |
156 | url = self.get_base_parsed_template(data) |
|
156 | url = self.get_base_parsed_template(data) | |
157 | log.debug( |
|
157 | log.debug( | |
158 | 'register webhook call(%s) to url %s', event, url) |
|
158 | 'register webhook call(%s) to url %s', event, url) | |
159 | return [(url, self.secret_token, self.headers, data)] |
|
159 | return [(url, self.secret_token, self.headers, data)] | |
160 |
|
160 | |||
161 | def pull_request_event_handler(self, event, data): |
|
161 | def pull_request_event_handler(self, event, data): | |
162 | url = self.get_base_parsed_template(data) |
|
162 | url = self.get_base_parsed_template(data) | |
163 | log.debug( |
|
163 | log.debug( | |
164 | 'register webhook call(%s) to url %s', event, url) |
|
164 | 'register webhook call(%s) to url %s', event, url) | |
165 | url = string.Template(url).safe_substitute( |
|
165 | url = string.Template(url).safe_substitute( | |
166 | pull_request_id=data['pullrequest']['pull_request_id'], |
|
166 | pull_request_id=data['pullrequest']['pull_request_id'], | |
167 | pull_request_url=data['pullrequest']['url']) |
|
167 | pull_request_url=data['pullrequest']['url']) | |
168 | return [(url, self.secret_token, self.headers, data)] |
|
168 | return [(url, self.secret_token, self.headers, data)] | |
169 |
|
169 | |||
170 | def __call__(self, event, data): |
|
170 | def __call__(self, event, data): | |
171 | if isinstance(event, events.RepoPushEvent): |
|
171 | if isinstance(event, events.RepoPushEvent): | |
172 | return self.repo_push_event_handler(event, data) |
|
172 | return self.repo_push_event_handler(event, data) | |
173 | elif isinstance(event, events.RepoCreateEvent): |
|
173 | elif isinstance(event, events.RepoCreateEvent): | |
174 | return self.repo_create_event_handler(event, data) |
|
174 | return self.repo_create_event_handler(event, data) | |
175 | elif isinstance(event, events.PullRequestEvent): |
|
175 | elif isinstance(event, events.PullRequestEvent): | |
176 | return self.pull_request_event_handler(event, data) |
|
176 | return self.pull_request_event_handler(event, data) | |
177 | else: |
|
177 | else: | |
178 | raise ValueError('event type not supported: %s' % events) |
|
178 | raise ValueError('event type not supported: %s' % events) | |
179 |
|
179 | |||
180 |
|
180 | |||
181 | class WebhookSettingsSchema(colander.Schema): |
|
181 | class WebhookSettingsSchema(colander.Schema): | |
182 | url = colander.SchemaNode( |
|
182 | url = colander.SchemaNode( | |
183 | colander.String(), |
|
183 | colander.String(), | |
184 | title=_('Webhook URL'), |
|
184 | title=_('Webhook URL'), | |
185 | description= |
|
185 | description= | |
186 | _('URL to which Webhook should submit data. Following variables ' |
|
186 | _('URL to which Webhook should submit data. Following variables ' | |
187 | 'are allowed to be used: {vars}. Some of the variables would ' |
|
187 | 'are allowed to be used: {vars}. Some of the variables would ' | |
188 | 'trigger multiple calls, like ${{branch}} or ${{commit_id}}. ' |
|
188 | 'trigger multiple calls, like ${{branch}} or ${{commit_id}}. ' | |
189 | 'Webhook will be called as many times as unique objects in ' |
|
189 | 'Webhook will be called as many times as unique objects in ' | |
190 | 'data in such cases.').format(vars=URL_VARS), |
|
190 | 'data in such cases.').format(vars=URL_VARS), | |
191 | missing=colander.required, |
|
191 | missing=colander.required, | |
192 | required=True, |
|
192 | required=True, | |
193 | validator=colander.url, |
|
193 | validator=colander.url, | |
194 | widget=deform.widget.TextInputWidget( |
|
194 | widget=deform.widget.TextInputWidget( | |
195 | placeholder='https://www.example.com/webhook' |
|
195 | placeholder='https://www.example.com/webhook' | |
196 | ), |
|
196 | ), | |
197 | ) |
|
197 | ) | |
198 | secret_token = colander.SchemaNode( |
|
198 | secret_token = colander.SchemaNode( | |
199 | colander.String(), |
|
199 | colander.String(), | |
200 | title=_('Secret Token'), |
|
200 | title=_('Secret Token'), | |
201 | description=_('Optional string used to validate received payloads. ' |
|
201 | description=_('Optional string used to validate received payloads. ' | |
202 | 'It will be sent together with event data in JSON'), |
|
202 | 'It will be sent together with event data in JSON'), | |
203 | default='', |
|
203 | default='', | |
204 | missing='', |
|
204 | missing='', | |
205 | widget=deform.widget.TextInputWidget( |
|
205 | widget=deform.widget.TextInputWidget( | |
206 | placeholder='e.g. secret_token' |
|
206 | placeholder='e.g. secret_token' | |
207 | ), |
|
207 | ), | |
208 | ) |
|
208 | ) | |
209 | username = colander.SchemaNode( |
|
209 | username = colander.SchemaNode( | |
210 | colander.String(), |
|
210 | colander.String(), | |
211 | title=_('Username'), |
|
211 | title=_('Username'), | |
212 | description=_('Optional username to authenticate the call.'), |
|
212 | description=_('Optional username to authenticate the call.'), | |
213 | default='', |
|
213 | default='', | |
214 | missing='', |
|
214 | missing='', | |
215 | widget=deform.widget.TextInputWidget( |
|
215 | widget=deform.widget.TextInputWidget( | |
216 | placeholder='e.g. admin' |
|
216 | placeholder='e.g. admin' | |
217 | ), |
|
217 | ), | |
218 | ) |
|
218 | ) | |
219 | password = colander.SchemaNode( |
|
219 | password = colander.SchemaNode( | |
220 | colander.String(), |
|
220 | colander.String(), | |
221 | title=_('Password'), |
|
221 | title=_('Password'), | |
222 | description=_('Optional password to authenticate the call.'), |
|
222 | description=_('Optional password to authenticate the call.'), | |
223 | default='', |
|
223 | default='', | |
224 | missing='', |
|
224 | missing='', | |
225 | widget=deform.widget.PasswordWidget( |
|
225 | widget=deform.widget.PasswordWidget( | |
226 | placeholder='e.g. secret.', |
|
226 | placeholder='e.g. secret.', | |
227 | redisplay=True, |
|
227 | redisplay=True, | |
228 | ), |
|
228 | ), | |
229 | ) |
|
229 | ) | |
230 | custom_header_key = colander.SchemaNode( |
|
230 | custom_header_key = colander.SchemaNode( | |
231 | colander.String(), |
|
231 | colander.String(), | |
232 | title=_('Custom Header Key'), |
|
232 | title=_('Custom Header Key'), | |
233 | description=_('Custom Header name to be set when calling endpoint.'), |
|
233 | description=_('Custom Header name to be set when calling endpoint.'), | |
234 | default='', |
|
234 | default='', | |
235 | missing='', |
|
235 | missing='', | |
236 | widget=deform.widget.TextInputWidget( |
|
236 | widget=deform.widget.TextInputWidget( | |
237 | placeholder='e.g.Authorization' |
|
237 | placeholder='e.g.Authorization' | |
238 | ), |
|
238 | ), | |
239 | ) |
|
239 | ) | |
240 | custom_header_val = colander.SchemaNode( |
|
240 | custom_header_val = colander.SchemaNode( | |
241 | colander.String(), |
|
241 | colander.String(), | |
242 | title=_('Custom Header Value'), |
|
242 | title=_('Custom Header Value'), | |
243 | description=_('Custom Header value to be set when calling endpoint.'), |
|
243 | description=_('Custom Header value to be set when calling endpoint.'), | |
244 | default='', |
|
244 | default='', | |
245 | missing='', |
|
245 | missing='', | |
246 | widget=deform.widget.TextInputWidget( |
|
246 | widget=deform.widget.TextInputWidget( | |
247 | placeholder='e.g. RcLogin auth=xxxx' |
|
247 | placeholder='e.g. RcLogin auth=xxxx' | |
248 | ), |
|
248 | ), | |
249 | ) |
|
249 | ) | |
250 | method_type = colander.SchemaNode( |
|
250 | method_type = colander.SchemaNode( | |
251 | colander.String(), |
|
251 | colander.String(), | |
252 | title=_('Call Method'), |
|
252 | title=_('Call Method'), | |
253 | description=_('Select if the Webhook call should be made ' |
|
253 | description=_('Select if the Webhook call should be made ' | |
254 | 'with POST or GET.'), |
|
254 | 'with POST or GET.'), | |
255 | default='post', |
|
255 | default='post', | |
256 | missing='', |
|
256 | missing='', | |
257 | widget=deform.widget.RadioChoiceWidget( |
|
257 | widget=deform.widget.RadioChoiceWidget( | |
258 | values=[('get', 'GET'), ('post', 'POST')], |
|
258 | values=[('get', 'GET'), ('post', 'POST')], | |
259 | inline=True |
|
259 | inline=True | |
260 | ), |
|
260 | ), | |
261 | ) |
|
261 | ) | |
262 |
|
262 | |||
263 |
|
263 | |||
264 | class WebhookIntegrationType(IntegrationTypeBase): |
|
264 | class WebhookIntegrationType(IntegrationTypeBase): | |
265 | key = 'webhook' |
|
265 | key = 'webhook' | |
266 | display_name = _('Webhook') |
|
266 | display_name = _('Webhook') | |
267 | description = _('Post json events to a Webhook endpoint') |
|
267 | description = _('Post json events to a Webhook endpoint') | |
268 | icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' |
|
268 | ||
|
269 | @classmethod | |||
|
270 | def icon(cls): | |||
|
271 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' | |||
269 |
|
272 | |||
270 | valid_events = [ |
|
273 | valid_events = [ | |
271 | events.PullRequestCloseEvent, |
|
274 | events.PullRequestCloseEvent, | |
272 | events.PullRequestMergeEvent, |
|
275 | events.PullRequestMergeEvent, | |
273 | events.PullRequestUpdateEvent, |
|
276 | events.PullRequestUpdateEvent, | |
274 | events.PullRequestCommentEvent, |
|
277 | events.PullRequestCommentEvent, | |
275 | events.PullRequestReviewEvent, |
|
278 | events.PullRequestReviewEvent, | |
276 | events.PullRequestCreateEvent, |
|
279 | events.PullRequestCreateEvent, | |
277 | events.RepoPushEvent, |
|
280 | events.RepoPushEvent, | |
278 | events.RepoCreateEvent, |
|
281 | events.RepoCreateEvent, | |
279 | ] |
|
282 | ] | |
280 |
|
283 | |||
281 | def settings_schema(self): |
|
284 | def settings_schema(self): | |
282 | schema = WebhookSettingsSchema() |
|
285 | schema = WebhookSettingsSchema() | |
283 | schema.add(colander.SchemaNode( |
|
286 | schema.add(colander.SchemaNode( | |
284 | colander.Set(), |
|
287 | colander.Set(), | |
285 | widget=deform.widget.CheckboxChoiceWidget( |
|
288 | widget=deform.widget.CheckboxChoiceWidget( | |
286 | values=sorted( |
|
289 | values=sorted( | |
287 | [(e.name, e.display_name) for e in self.valid_events] |
|
290 | [(e.name, e.display_name) for e in self.valid_events] | |
288 | ) |
|
291 | ) | |
289 | ), |
|
292 | ), | |
290 | description="Events activated for this integration", |
|
293 | description="Events activated for this integration", | |
291 | name='events' |
|
294 | name='events' | |
292 | )) |
|
295 | )) | |
293 | return schema |
|
296 | return schema | |
294 |
|
297 | |||
295 | def send_event(self, event): |
|
298 | def send_event(self, event): | |
296 | log.debug('handling event %s with Webhook integration %s', |
|
299 | log.debug('handling event %s with Webhook integration %s', | |
297 | event.name, self) |
|
300 | event.name, self) | |
298 |
|
301 | |||
299 | if event.__class__ not in self.valid_events: |
|
302 | if event.__class__ not in self.valid_events: | |
300 | log.debug('event not valid: %r' % event) |
|
303 | log.debug('event not valid: %r' % event) | |
301 | return |
|
304 | return | |
302 |
|
305 | |||
303 | if event.name not in self.settings['events']: |
|
306 | if event.name not in self.settings['events']: | |
304 | log.debug('event ignored: %r' % event) |
|
307 | log.debug('event ignored: %r' % event) | |
305 | return |
|
308 | return | |
306 |
|
309 | |||
307 | data = event.as_dict() |
|
310 | data = event.as_dict() | |
308 | template_url = self.settings['url'] |
|
311 | template_url = self.settings['url'] | |
309 |
|
312 | |||
310 | headers = {} |
|
313 | headers = {} | |
311 | head_key = self.settings.get('custom_header_key') |
|
314 | head_key = self.settings.get('custom_header_key') | |
312 | head_val = self.settings.get('custom_header_val') |
|
315 | head_val = self.settings.get('custom_header_val') | |
313 | if head_key and head_val: |
|
316 | if head_key and head_val: | |
314 | headers = {head_key: head_val} |
|
317 | headers = {head_key: head_val} | |
315 |
|
318 | |||
316 | handler = WebhookHandler( |
|
319 | handler = WebhookHandler( | |
317 | template_url, self.settings['secret_token'], headers) |
|
320 | template_url, self.settings['secret_token'], headers) | |
318 |
|
321 | |||
319 | url_calls = handler(event, data) |
|
322 | url_calls = handler(event, data) | |
320 | log.debug('webhook: calling following urls: %s', |
|
323 | log.debug('webhook: calling following urls: %s', | |
321 | [x[0] for x in url_calls]) |
|
324 | [x[0] for x in url_calls]) | |
322 |
|
325 | |||
323 | run_task(post_to_webhook, url_calls, self.settings) |
|
326 | run_task(post_to_webhook, url_calls, self.settings) | |
324 |
|
327 | |||
325 |
|
328 | |||
326 | @async_task(ignore_result=True, base=RequestContextTask) |
|
329 | @async_task(ignore_result=True, base=RequestContextTask) | |
327 | def post_to_webhook(url_calls, settings): |
|
330 | def post_to_webhook(url_calls, settings): | |
328 | """ |
|
331 | """ | |
329 | Example data:: |
|
332 | Example data:: | |
330 |
|
333 | |||
331 | {'actor': {'user_id': 2, 'username': u'admin'}, |
|
334 | {'actor': {'user_id': 2, 'username': u'admin'}, | |
332 | 'actor_ip': u'192.168.157.1', |
|
335 | 'actor_ip': u'192.168.157.1', | |
333 | 'name': 'repo-push', |
|
336 | 'name': 'repo-push', | |
334 | 'push': {'branches': [{'name': u'default', |
|
337 | 'push': {'branches': [{'name': u'default', | |
335 | 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}], |
|
338 | 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}], | |
336 | 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>', |
|
339 | 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>', | |
337 | 'branch': u'default', |
|
340 | 'branch': u'default', | |
338 | 'date': datetime.datetime(2017, 11, 30, 12, 59, 48), |
|
341 | 'date': datetime.datetime(2017, 11, 30, 12, 59, 48), | |
339 | 'issues': [], |
|
342 | 'issues': [], | |
340 | 'mentions': [], |
|
343 | 'mentions': [], | |
341 | 'message': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
344 | 'message': u'commit Thu 30 Nov 2017 13:59:48 CET', | |
342 | 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
345 | 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET', | |
343 | 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
346 | 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET', | |
344 | 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}], |
|
347 | 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}], | |
345 | 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
348 | 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', | |
346 | 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
349 | 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', | |
347 | 'refs': {'bookmarks': [], 'branches': [u'default'], 'tags': [u'tip']}, |
|
350 | 'refs': {'bookmarks': [], 'branches': [u'default'], 'tags': [u'tip']}, | |
348 | 'reviewers': [], |
|
351 | 'reviewers': [], | |
349 | 'revision': 9L, |
|
352 | 'revision': 9L, | |
350 | 'short_id': 'a815cc738b96', |
|
353 | 'short_id': 'a815cc738b96', | |
351 | 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}], |
|
354 | 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}], | |
352 | 'issues': {}}, |
|
355 | 'issues': {}}, | |
353 | 'repo': {'extra_fields': '', |
|
356 | 'repo': {'extra_fields': '', | |
354 | 'permalink_url': u'http://rc.local:8080/_7', |
|
357 | 'permalink_url': u'http://rc.local:8080/_7', | |
355 | 'repo_id': 7, |
|
358 | 'repo_id': 7, | |
356 | 'repo_name': u'hg-repo', |
|
359 | 'repo_name': u'hg-repo', | |
357 | 'repo_type': u'hg', |
|
360 | 'repo_type': u'hg', | |
358 | 'url': u'http://rc.local:8080/hg-repo'}, |
|
361 | 'url': u'http://rc.local:8080/hg-repo'}, | |
359 | 'server_url': u'http://rc.local:8080', |
|
362 | 'server_url': u'http://rc.local:8080', | |
360 | 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276) |
|
363 | 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276) | |
361 |
|
364 | |||
362 | """ |
|
365 | """ | |
363 | max_retries = 3 |
|
366 | max_retries = 3 | |
364 | retries = Retry( |
|
367 | retries = Retry( | |
365 | total=max_retries, |
|
368 | total=max_retries, | |
366 | backoff_factor=0.15, |
|
369 | backoff_factor=0.15, | |
367 | status_forcelist=[500, 502, 503, 504]) |
|
370 | status_forcelist=[500, 502, 503, 504]) | |
368 | call_headers = { |
|
371 | call_headers = { | |
369 | 'User-Agent': 'RhodeCode-webhook-caller/{}'.format( |
|
372 | 'User-Agent': 'RhodeCode-webhook-caller/{}'.format( | |
370 | rhodecode.__version__) |
|
373 | rhodecode.__version__) | |
371 | } # updated below with custom ones, allows override |
|
374 | } # updated below with custom ones, allows override | |
372 |
|
375 | |||
373 | for url, token, headers, data in url_calls: |
|
376 | for url, token, headers, data in url_calls: | |
374 | req_session = requests.Session() |
|
377 | req_session = requests.Session() | |
375 | req_session.mount( # retry max N times |
|
378 | req_session.mount( # retry max N times | |
376 | 'http://', requests.adapters.HTTPAdapter(max_retries=retries)) |
|
379 | 'http://', requests.adapters.HTTPAdapter(max_retries=retries)) | |
377 |
|
380 | |||
378 | method = settings.get('method_type') or 'post' |
|
381 | method = settings.get('method_type') or 'post' | |
379 | call_method = getattr(req_session, method) |
|
382 | call_method = getattr(req_session, method) | |
380 |
|
383 | |||
381 | headers = headers or {} |
|
384 | headers = headers or {} | |
382 | call_headers.update(headers) |
|
385 | call_headers.update(headers) | |
383 | auth = get_auth(settings) |
|
386 | auth = get_auth(settings) | |
384 |
|
387 | |||
385 | log.debug('calling Webhook with method: %s, and auth:%s', |
|
388 | log.debug('calling Webhook with method: %s, and auth:%s', | |
386 | call_method, auth) |
|
389 | call_method, auth) | |
387 | if settings.get('log_data'): |
|
390 | if settings.get('log_data'): | |
388 | log.debug('calling webhook with data: %s', data) |
|
391 | log.debug('calling webhook with data: %s', data) | |
389 | resp = call_method(url, json={ |
|
392 | resp = call_method(url, json={ | |
390 | 'token': token, |
|
393 | 'token': token, | |
391 | 'event': data |
|
394 | 'event': data | |
392 | }, headers=call_headers, auth=auth) |
|
395 | }, headers=call_headers, auth=auth) | |
393 | log.debug('Got Webhook response: %s', resp) |
|
396 | log.debug('Got Webhook response: %s', resp) | |
394 |
|
397 | |||
395 | resp.raise_for_status() # raise exception on a failed request |
|
398 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,256 +1,256 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
3 |
|
3 | |||
4 | <%def name="breadcrumbs_links()"> |
|
4 | <%def name="breadcrumbs_links()"> | |
5 | %if c.repo: |
|
5 | %if c.repo: | |
6 | ${h.link_to('Settings',h.route_path('edit_repo', repo_name=c.repo.repo_name))} |
|
6 | ${h.link_to('Settings',h.route_path('edit_repo', repo_name=c.repo.repo_name))} | |
7 | %elif c.repo_group: |
|
7 | %elif c.repo_group: | |
8 | ${h.link_to(_('Admin'),h.route_path('admin_home'))} |
|
8 | ${h.link_to(_('Admin'),h.route_path('admin_home'))} | |
9 | » |
|
9 | » | |
10 | ${h.link_to(_('Repository Groups'),h.route_path('repo_groups'))} |
|
10 | ${h.link_to(_('Repository Groups'),h.route_path('repo_groups'))} | |
11 | » |
|
11 | » | |
12 | ${h.link_to(c.repo_group.group_name,h.route_path('edit_repo_group', repo_group_name=c.repo_group.group_name))} |
|
12 | ${h.link_to(c.repo_group.group_name,h.route_path('edit_repo_group', repo_group_name=c.repo_group.group_name))} | |
13 | %else: |
|
13 | %else: | |
14 | ${h.link_to(_('Admin'),h.route_path('admin_home'))} |
|
14 | ${h.link_to(_('Admin'),h.route_path('admin_home'))} | |
15 | » |
|
15 | » | |
16 | ${h.link_to(_('Settings'),h.route_path('admin_settings'))} |
|
16 | ${h.link_to(_('Settings'),h.route_path('admin_settings'))} | |
17 | %endif |
|
17 | %endif | |
18 | %if c.current_IntegrationType: |
|
18 | %if c.current_IntegrationType: | |
19 | » |
|
19 | » | |
20 | %if c.repo: |
|
20 | %if c.repo: | |
21 | ${h.link_to(_('Integrations'), |
|
21 | ${h.link_to(_('Integrations'), | |
22 | request.route_path(route_name='repo_integrations_home', |
|
22 | request.route_path(route_name='repo_integrations_home', | |
23 | repo_name=c.repo.repo_name))} |
|
23 | repo_name=c.repo.repo_name))} | |
24 | %elif c.repo_group: |
|
24 | %elif c.repo_group: | |
25 | ${h.link_to(_('Integrations'), |
|
25 | ${h.link_to(_('Integrations'), | |
26 | request.route_path(route_name='repo_group_integrations_home', |
|
26 | request.route_path(route_name='repo_group_integrations_home', | |
27 | repo_group_name=c.repo_group.group_name))} |
|
27 | repo_group_name=c.repo_group.group_name))} | |
28 | %else: |
|
28 | %else: | |
29 | ${h.link_to(_('Integrations'), |
|
29 | ${h.link_to(_('Integrations'), | |
30 | request.route_path(route_name='global_integrations_home'))} |
|
30 | request.route_path(route_name='global_integrations_home'))} | |
31 | %endif |
|
31 | %endif | |
32 | » |
|
32 | » | |
33 | ${c.current_IntegrationType.display_name} |
|
33 | ${c.current_IntegrationType.display_name} | |
34 | %else: |
|
34 | %else: | |
35 | » |
|
35 | » | |
36 | ${_('Integrations')} |
|
36 | ${_('Integrations')} | |
37 | %endif |
|
37 | %endif | |
38 | </%def> |
|
38 | </%def> | |
39 |
|
39 | |||
40 | <div class="panel panel-default"> |
|
40 | <div class="panel panel-default"> | |
41 | <div class="panel-heading"> |
|
41 | <div class="panel-heading"> | |
42 | <h3 class="panel-title"> |
|
42 | <h3 class="panel-title"> | |
43 | %if c.repo: |
|
43 | %if c.repo: | |
44 | ${_('Current Integrations for Repository: {repo_name}').format(repo_name=c.repo.repo_name)} |
|
44 | ${_('Current Integrations for Repository: {repo_name}').format(repo_name=c.repo.repo_name)} | |
45 | %elif c.repo_group: |
|
45 | %elif c.repo_group: | |
46 | ${_('Current Integrations for repository group: {repo_group_name}').format(repo_group_name=c.repo_group.group_name)} |
|
46 | ${_('Current Integrations for repository group: {repo_group_name}').format(repo_group_name=c.repo_group.group_name)} | |
47 | %else: |
|
47 | %else: | |
48 | ${_('Current Integrations')} |
|
48 | ${_('Current Integrations')} | |
49 | %endif |
|
49 | %endif | |
50 | </h3> |
|
50 | </h3> | |
51 | </div> |
|
51 | </div> | |
52 | <div class="panel-body"> |
|
52 | <div class="panel-body"> | |
53 | <% |
|
53 | <% | |
54 | if c.repo: |
|
54 | if c.repo: | |
55 | home_url = request.route_path('repo_integrations_home', |
|
55 | home_url = request.route_path('repo_integrations_home', | |
56 | repo_name=c.repo.repo_name) |
|
56 | repo_name=c.repo.repo_name) | |
57 | elif c.repo_group: |
|
57 | elif c.repo_group: | |
58 | home_url = request.route_path('repo_group_integrations_home', |
|
58 | home_url = request.route_path('repo_group_integrations_home', | |
59 | repo_group_name=c.repo_group.group_name) |
|
59 | repo_group_name=c.repo_group.group_name) | |
60 | else: |
|
60 | else: | |
61 | home_url = request.route_path('global_integrations_home') |
|
61 | home_url = request.route_path('global_integrations_home') | |
62 | %> |
|
62 | %> | |
63 |
|
63 | |||
64 | <a href="${home_url}" class="btn ${not c.current_IntegrationType and 'btn-primary' or ''}">${_('All')}</a> |
|
64 | <a href="${home_url}" class="btn ${not c.current_IntegrationType and 'btn-primary' or ''}">${_('All')}</a> | |
65 |
|
65 | |||
66 | %for integration_key, IntegrationType in c.available_integrations.items(): |
|
66 | %for integration_key, IntegrationType in c.available_integrations.items(): | |
67 | % if not IntegrationType.is_dummy: |
|
67 | % if not IntegrationType.is_dummy: | |
68 | <% |
|
68 | <% | |
69 | if c.repo: |
|
69 | if c.repo: | |
70 | list_url = request.route_path('repo_integrations_list', |
|
70 | list_url = request.route_path('repo_integrations_list', | |
71 | repo_name=c.repo.repo_name, |
|
71 | repo_name=c.repo.repo_name, | |
72 | integration=integration_key) |
|
72 | integration=integration_key) | |
73 | elif c.repo_group: |
|
73 | elif c.repo_group: | |
74 | list_url = request.route_path('repo_group_integrations_list', |
|
74 | list_url = request.route_path('repo_group_integrations_list', | |
75 | repo_group_name=c.repo_group.group_name, |
|
75 | repo_group_name=c.repo_group.group_name, | |
76 | integration=integration_key) |
|
76 | integration=integration_key) | |
77 | else: |
|
77 | else: | |
78 | list_url = request.route_path('global_integrations_list', |
|
78 | list_url = request.route_path('global_integrations_list', | |
79 | integration=integration_key) |
|
79 | integration=integration_key) | |
80 | %> |
|
80 | %> | |
81 | <a href="${list_url}" |
|
81 | <a href="${list_url}" | |
82 | class="btn ${c.current_IntegrationType and integration_key == c.current_IntegrationType.key and 'btn-primary' or ''}"> |
|
82 | class="btn ${c.current_IntegrationType and integration_key == c.current_IntegrationType.key and 'btn-primary' or ''}"> | |
83 | ${IntegrationType.display_name} |
|
83 | ${IntegrationType.display_name} | |
84 | </a> |
|
84 | </a> | |
85 | % endif |
|
85 | % endif | |
86 | %endfor |
|
86 | %endfor | |
87 |
|
87 | |||
88 | <% |
|
88 | <% | |
89 | integration_type = c.current_IntegrationType and c.current_IntegrationType.display_name or '' |
|
89 | integration_type = c.current_IntegrationType and c.current_IntegrationType.display_name or '' | |
90 |
|
90 | |||
91 | if c.repo: |
|
91 | if c.repo: | |
92 | create_url = h.route_path('repo_integrations_new', repo_name=c.repo.repo_name) |
|
92 | create_url = h.route_path('repo_integrations_new', repo_name=c.repo.repo_name) | |
93 | elif c.repo_group: |
|
93 | elif c.repo_group: | |
94 | create_url = h.route_path('repo_group_integrations_new', repo_group_name=c.repo_group.group_name) |
|
94 | create_url = h.route_path('repo_group_integrations_new', repo_group_name=c.repo_group.group_name) | |
95 | else: |
|
95 | else: | |
96 | create_url = h.route_path('global_integrations_new') |
|
96 | create_url = h.route_path('global_integrations_new') | |
97 | %> |
|
97 | %> | |
98 | <p class="pull-right"> |
|
98 | <p class="pull-right"> | |
99 | <a href="${create_url}" class="btn btn-small btn-success">${_(u'Create new integration')}</a> |
|
99 | <a href="${create_url}" class="btn btn-small btn-success">${_(u'Create new integration')}</a> | |
100 | </p> |
|
100 | </p> | |
101 |
|
101 | |||
102 | <table class="rctable integrations"> |
|
102 | <table class="rctable integrations"> | |
103 | <thead> |
|
103 | <thead> | |
104 | <tr> |
|
104 | <tr> | |
105 | <th><a href="?sort=enabled:${c.rev_sort_dir}">${_('Enabled')}</a></th> |
|
105 | <th><a href="?sort=enabled:${c.rev_sort_dir}">${_('Enabled')}</a></th> | |
106 | <th><a href="?sort=name:${c.rev_sort_dir}">${_('Name')}</a></th> |
|
106 | <th><a href="?sort=name:${c.rev_sort_dir}">${_('Name')}</a></th> | |
107 | <th colspan="2"><a href="?sort=integration_type:${c.rev_sort_dir}">${_('Type')}</a></th> |
|
107 | <th colspan="2"><a href="?sort=integration_type:${c.rev_sort_dir}">${_('Type')}</a></th> | |
108 | <th><a href="?sort=scope:${c.rev_sort_dir}">${_('Scope')}</a></th> |
|
108 | <th><a href="?sort=scope:${c.rev_sort_dir}">${_('Scope')}</a></th> | |
109 | <th>${_('Actions')}</th> |
|
109 | <th>${_('Actions')}</th> | |
110 | <th></th> |
|
110 | <th></th> | |
111 | </tr> |
|
111 | </tr> | |
112 | </thead> |
|
112 | </thead> | |
113 | <tbody> |
|
113 | <tbody> | |
114 | %if not c.integrations_list: |
|
114 | %if not c.integrations_list: | |
115 | <tr> |
|
115 | <tr> | |
116 | <td colspan="7"> |
|
116 | <td colspan="7"> | |
117 |
|
117 | |||
118 | %if c.repo: |
|
118 | %if c.repo: | |
119 | ${_('No {type} integrations for repo {repo} exist yet.').format(type=integration_type, repo=c.repo.repo_name)} |
|
119 | ${_('No {type} integrations for repo {repo} exist yet.').format(type=integration_type, repo=c.repo.repo_name)} | |
120 | %elif c.repo_group: |
|
120 | %elif c.repo_group: | |
121 | ${_('No {type} integrations for repogroup {repogroup} exist yet.').format(type=integration_type, repogroup=c.repo_group.group_name)} |
|
121 | ${_('No {type} integrations for repogroup {repogroup} exist yet.').format(type=integration_type, repogroup=c.repo_group.group_name)} | |
122 | %else: |
|
122 | %else: | |
123 | ${_('No {type} integrations exist yet.').format(type=integration_type)} |
|
123 | ${_('No {type} integrations exist yet.').format(type=integration_type)} | |
124 | %endif |
|
124 | %endif | |
125 |
|
125 | |||
126 | %if c.current_IntegrationType: |
|
126 | %if c.current_IntegrationType: | |
127 | <% |
|
127 | <% | |
128 | if c.repo: |
|
128 | if c.repo: | |
129 | create_url = h.route_path('repo_integrations_create', repo_name=c.repo.repo_name, integration=c.current_IntegrationType.key) |
|
129 | create_url = h.route_path('repo_integrations_create', repo_name=c.repo.repo_name, integration=c.current_IntegrationType.key) | |
130 | elif c.repo_group: |
|
130 | elif c.repo_group: | |
131 | create_url = h.route_path('repo_group_integrations_create', repo_group_name=c.repo_group.group_name, integration=c.current_IntegrationType.key) |
|
131 | create_url = h.route_path('repo_group_integrations_create', repo_group_name=c.repo_group.group_name, integration=c.current_IntegrationType.key) | |
132 | else: |
|
132 | else: | |
133 | create_url = h.route_path('global_integrations_create', integration=c.current_IntegrationType.key) |
|
133 | create_url = h.route_path('global_integrations_create', integration=c.current_IntegrationType.key) | |
134 | %> |
|
134 | %> | |
135 | %endif |
|
135 | %endif | |
136 |
|
136 | |||
137 | <a href="${create_url}">${_(u'Create one')}</a> |
|
137 | <a href="${create_url}">${_(u'Create one')}</a> | |
138 | </td> |
|
138 | </td> | |
139 | </tr> |
|
139 | </tr> | |
140 | %endif |
|
140 | %endif | |
141 | %for IntegrationType, integration in c.integrations_list: |
|
141 | %for IntegrationType, integration in c.integrations_list: | |
142 | <tr id="integration_${integration.integration_id}"> |
|
142 | <tr id="integration_${integration.integration_id}"> | |
143 | <td class="td-enabled"> |
|
143 | <td class="td-enabled"> | |
144 | %if integration.enabled: |
|
144 | %if integration.enabled: | |
145 | <div class="flag_status approved pull-left"></div> |
|
145 | <div class="flag_status approved pull-left"></div> | |
146 | %else: |
|
146 | %else: | |
147 | <div class="flag_status rejected pull-left"></div> |
|
147 | <div class="flag_status rejected pull-left"></div> | |
148 | %endif |
|
148 | %endif | |
149 | </td> |
|
149 | </td> | |
150 | <td class="td-description"> |
|
150 | <td class="td-description"> | |
151 | ${integration.name} |
|
151 | ${integration.name} | |
152 | </td> |
|
152 | </td> | |
153 | <td class="td-icon"> |
|
153 | <td class="td-icon"> | |
154 | %if integration.integration_type in c.available_integrations: |
|
154 | %if integration.integration_type in c.available_integrations: | |
155 | <div class="integration-icon"> |
|
155 | <div class="integration-icon"> | |
156 | ${c.available_integrations[integration.integration_type].icon|n} |
|
156 | ${c.available_integrations[integration.integration_type].icon()|n} | |
157 | </div> |
|
157 | </div> | |
158 | %else: |
|
158 | %else: | |
159 | ? |
|
159 | ? | |
160 | %endif |
|
160 | %endif | |
161 | </td> |
|
161 | </td> | |
162 | <td class="td-type"> |
|
162 | <td class="td-type"> | |
163 | ${integration.integration_type} |
|
163 | ${integration.integration_type} | |
164 | </td> |
|
164 | </td> | |
165 | <td class="td-scope"> |
|
165 | <td class="td-scope"> | |
166 | %if integration.repo: |
|
166 | %if integration.repo: | |
167 | <a href="${h.route_path('repo_summary', repo_name=integration.repo.repo_name)}"> |
|
167 | <a href="${h.route_path('repo_summary', repo_name=integration.repo.repo_name)}"> | |
168 | ${_('repo')}:${integration.repo.repo_name} |
|
168 | ${_('repo')}:${integration.repo.repo_name} | |
169 | </a> |
|
169 | </a> | |
170 | %elif integration.repo_group: |
|
170 | %elif integration.repo_group: | |
171 | <a href="${h.route_path('repo_group_home', repo_group_name=integration.repo_group.group_name)}"> |
|
171 | <a href="${h.route_path('repo_group_home', repo_group_name=integration.repo_group.group_name)}"> | |
172 | ${_('repogroup')}:${integration.repo_group.group_name} |
|
172 | ${_('repogroup')}:${integration.repo_group.group_name} | |
173 | %if integration.child_repos_only: |
|
173 | %if integration.child_repos_only: | |
174 | ${_('child repos only')} |
|
174 | ${_('child repos only')} | |
175 | %else: |
|
175 | %else: | |
176 | ${_('cascade to all')} |
|
176 | ${_('cascade to all')} | |
177 | %endif |
|
177 | %endif | |
178 | </a> |
|
178 | </a> | |
179 | %else: |
|
179 | %else: | |
180 | %if integration.child_repos_only: |
|
180 | %if integration.child_repos_only: | |
181 | ${_('top level repos only')} |
|
181 | ${_('top level repos only')} | |
182 | %else: |
|
182 | %else: | |
183 | ${_('global')} |
|
183 | ${_('global')} | |
184 | %endif |
|
184 | %endif | |
185 | </td> |
|
185 | </td> | |
186 | %endif |
|
186 | %endif | |
187 | <td class="td-action"> |
|
187 | <td class="td-action"> | |
188 | %if not IntegrationType: |
|
188 | %if not IntegrationType: | |
189 | ${_('unknown integration')} |
|
189 | ${_('unknown integration')} | |
190 | %else: |
|
190 | %else: | |
191 | <% |
|
191 | <% | |
192 | if c.repo: |
|
192 | if c.repo: | |
193 | edit_url = request.route_path('repo_integrations_edit', |
|
193 | edit_url = request.route_path('repo_integrations_edit', | |
194 | repo_name=c.repo.repo_name, |
|
194 | repo_name=c.repo.repo_name, | |
195 | integration=integration.integration_type, |
|
195 | integration=integration.integration_type, | |
196 | integration_id=integration.integration_id) |
|
196 | integration_id=integration.integration_id) | |
197 | elif c.repo_group: |
|
197 | elif c.repo_group: | |
198 | edit_url = request.route_path('repo_group_integrations_edit', |
|
198 | edit_url = request.route_path('repo_group_integrations_edit', | |
199 | repo_group_name=c.repo_group.group_name, |
|
199 | repo_group_name=c.repo_group.group_name, | |
200 | integration=integration.integration_type, |
|
200 | integration=integration.integration_type, | |
201 | integration_id=integration.integration_id) |
|
201 | integration_id=integration.integration_id) | |
202 | else: |
|
202 | else: | |
203 | edit_url = request.route_path('global_integrations_edit', |
|
203 | edit_url = request.route_path('global_integrations_edit', | |
204 | integration=integration.integration_type, |
|
204 | integration=integration.integration_type, | |
205 | integration_id=integration.integration_id) |
|
205 | integration_id=integration.integration_id) | |
206 | %> |
|
206 | %> | |
207 | <div class="grid_edit"> |
|
207 | <div class="grid_edit"> | |
208 | <a href="${edit_url}">${_('Edit')}</a> |
|
208 | <a href="${edit_url}">${_('Edit')}</a> | |
209 | </div> |
|
209 | </div> | |
210 | <div class="grid_delete"> |
|
210 | <div class="grid_delete"> | |
211 | <a href="${edit_url}" |
|
211 | <a href="${edit_url}" | |
212 | class="btn btn-link btn-danger delete_integration_entry" |
|
212 | class="btn btn-link btn-danger delete_integration_entry" | |
213 | data-desc="${integration.name}" |
|
213 | data-desc="${integration.name}" | |
214 | data-uid="${integration.integration_id}"> |
|
214 | data-uid="${integration.integration_id}"> | |
215 | ${_('Delete')} |
|
215 | ${_('Delete')} | |
216 | </a> |
|
216 | </a> | |
217 | </div> |
|
217 | </div> | |
218 | %endif |
|
218 | %endif | |
219 | </td> |
|
219 | </td> | |
220 | </tr> |
|
220 | </tr> | |
221 | %endfor |
|
221 | %endfor | |
222 | <tr id="last-row"></tr> |
|
222 | <tr id="last-row"></tr> | |
223 | </tbody> |
|
223 | </tbody> | |
224 | </table> |
|
224 | </table> | |
225 | <div class="integrations-paginator"> |
|
225 | <div class="integrations-paginator"> | |
226 | <div class="pagination-wh pagination-left"> |
|
226 | <div class="pagination-wh pagination-left"> | |
227 | ${c.integrations_list.pager('$link_previous ~2~ $link_next')} |
|
227 | ${c.integrations_list.pager('$link_previous ~2~ $link_next')} | |
228 | </div> |
|
228 | </div> | |
229 | </div> |
|
229 | </div> | |
230 | </div> |
|
230 | </div> | |
231 | </div> |
|
231 | </div> | |
232 | <script type="text/javascript"> |
|
232 | <script type="text/javascript"> | |
233 | var delete_integration = function(entry) { |
|
233 | var delete_integration = function(entry) { | |
234 | if (confirm("Confirm to remove this integration: "+$(entry).data('desc'))) { |
|
234 | if (confirm("Confirm to remove this integration: "+$(entry).data('desc'))) { | |
235 | var request = $.ajax({ |
|
235 | var request = $.ajax({ | |
236 | type: "POST", |
|
236 | type: "POST", | |
237 | url: $(entry).attr('href'), |
|
237 | url: $(entry).attr('href'), | |
238 | data: { |
|
238 | data: { | |
239 | 'delete': 'delete', |
|
239 | 'delete': 'delete', | |
240 | 'csrf_token': CSRF_TOKEN |
|
240 | 'csrf_token': CSRF_TOKEN | |
241 | }, |
|
241 | }, | |
242 | success: function(){ |
|
242 | success: function(){ | |
243 | location.reload(); |
|
243 | location.reload(); | |
244 | }, |
|
244 | }, | |
245 | error: function(data, textStatus, errorThrown){ |
|
245 | error: function(data, textStatus, errorThrown){ | |
246 | alert("Error while deleting entry.\nError code {0} ({1}). URL: {2}".format(data.status,data.statusText,$(entry)[0].url)); |
|
246 | alert("Error while deleting entry.\nError code {0} ({1}). URL: {2}".format(data.status,data.statusText,$(entry)[0].url)); | |
247 | } |
|
247 | } | |
248 | }); |
|
248 | }); | |
249 | }; |
|
249 | }; | |
250 | }; |
|
250 | }; | |
251 |
|
251 | |||
252 | $('.delete_integration_entry').on('click', function(e){ |
|
252 | $('.delete_integration_entry').on('click', function(e){ | |
253 | e.preventDefault(); |
|
253 | e.preventDefault(); | |
254 | delete_integration(this); |
|
254 | delete_integration(this); | |
255 | }); |
|
255 | }); | |
256 | </script> No newline at end of file |
|
256 | </script> |
@@ -1,68 +1,68 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
3 | <%namespace name="widgets" file="/widgets.mako"/> |
|
3 | <%namespace name="widgets" file="/widgets.mako"/> | |
4 |
|
4 | |||
5 | <%def name="breadcrumbs_links()"> |
|
5 | <%def name="breadcrumbs_links()"> | |
6 | %if c.repo: |
|
6 | %if c.repo: | |
7 | ${h.link_to('Settings',h.route_path('edit_repo', repo_name=c.repo.repo_name))} |
|
7 | ${h.link_to('Settings',h.route_path('edit_repo', repo_name=c.repo.repo_name))} | |
8 | » |
|
8 | » | |
9 | ${h.link_to(_('Integrations'),request.route_url(route_name='repo_integrations_home', repo_name=c.repo.repo_name))} |
|
9 | ${h.link_to(_('Integrations'),request.route_url(route_name='repo_integrations_home', repo_name=c.repo.repo_name))} | |
10 | %elif c.repo_group: |
|
10 | %elif c.repo_group: | |
11 | ${h.link_to(_('Admin'),h.route_path('admin_home'))} |
|
11 | ${h.link_to(_('Admin'),h.route_path('admin_home'))} | |
12 | » |
|
12 | » | |
13 | ${h.link_to(_('Repository Groups'),h.route_path('repo_groups'))} |
|
13 | ${h.link_to(_('Repository Groups'),h.route_path('repo_groups'))} | |
14 | » |
|
14 | » | |
15 | ${h.link_to(c.repo_group.group_name,h.route_path('edit_repo_group', repo_group_name=c.repo_group.group_name))} |
|
15 | ${h.link_to(c.repo_group.group_name,h.route_path('edit_repo_group', repo_group_name=c.repo_group.group_name))} | |
16 | » |
|
16 | » | |
17 | ${h.link_to(_('Integrations'),request.route_url(route_name='repo_group_integrations_home', repo_group_name=c.repo_group.group_name))} |
|
17 | ${h.link_to(_('Integrations'),request.route_url(route_name='repo_group_integrations_home', repo_group_name=c.repo_group.group_name))} | |
18 | %else: |
|
18 | %else: | |
19 | ${h.link_to(_('Admin'),h.route_path('admin_home'))} |
|
19 | ${h.link_to(_('Admin'),h.route_path('admin_home'))} | |
20 | » |
|
20 | » | |
21 | ${h.link_to(_('Settings'),h.route_path('admin_settings'))} |
|
21 | ${h.link_to(_('Settings'),h.route_path('admin_settings'))} | |
22 | » |
|
22 | » | |
23 | ${h.link_to(_('Integrations'),request.route_url(route_name='global_integrations_home'))} |
|
23 | ${h.link_to(_('Integrations'),request.route_url(route_name='global_integrations_home'))} | |
24 | %endif |
|
24 | %endif | |
25 | » |
|
25 | » | |
26 | ${_('Create new integration')} |
|
26 | ${_('Create new integration')} | |
27 | </%def> |
|
27 | </%def> | |
28 | <%widgets:panel class_='integrations'> |
|
28 | <%widgets:panel class_='integrations'> | |
29 | <%def name="title()"> |
|
29 | <%def name="title()"> | |
30 | %if c.repo: |
|
30 | %if c.repo: | |
31 | ${_('Create New Integration for repository: {repo_name}').format(repo_name=c.repo.repo_name)} |
|
31 | ${_('Create New Integration for repository: {repo_name}').format(repo_name=c.repo.repo_name)} | |
32 | %elif c.repo_group: |
|
32 | %elif c.repo_group: | |
33 | ${_('Create New Integration for repository group: {repo_group_name}').format(repo_group_name=c.repo_group.group_name)} |
|
33 | ${_('Create New Integration for repository group: {repo_group_name}').format(repo_group_name=c.repo_group.group_name)} | |
34 | %else: |
|
34 | %else: | |
35 | ${_('Create New Global Integration')} |
|
35 | ${_('Create New Global Integration')} | |
36 | %endif |
|
36 | %endif | |
37 | </%def> |
|
37 | </%def> | |
38 |
|
38 | |||
39 | %for integration, IntegrationObject in c.available_integrations.items(): |
|
39 | %for integration, IntegrationObject in c.available_integrations.items(): | |
40 | <% |
|
40 | <% | |
41 | if c.repo: |
|
41 | if c.repo: | |
42 | create_url = request.route_path('repo_integrations_create', |
|
42 | create_url = request.route_path('repo_integrations_create', | |
43 | repo_name=c.repo.repo_name, |
|
43 | repo_name=c.repo.repo_name, | |
44 | integration=integration) |
|
44 | integration=integration) | |
45 | elif c.repo_group: |
|
45 | elif c.repo_group: | |
46 | create_url = request.route_path('repo_group_integrations_create', |
|
46 | create_url = request.route_path('repo_group_integrations_create', | |
47 | repo_group_name=c.repo_group.group_name, |
|
47 | repo_group_name=c.repo_group.group_name, | |
48 | integration=integration) |
|
48 | integration=integration) | |
49 | else: |
|
49 | else: | |
50 | create_url = request.route_path('global_integrations_create', |
|
50 | create_url = request.route_path('global_integrations_create', | |
51 | integration=integration) |
|
51 | integration=integration) | |
52 | if IntegrationObject.is_dummy: |
|
52 | if IntegrationObject.is_dummy: | |
53 | create_url = request.current_route_path() |
|
53 | create_url = request.current_route_path() | |
54 | %> |
|
54 | %> | |
55 | <a href="${create_url}" class="integration-box ${'dummy-integration' if IntegrationObject.is_dummy else ''}"> |
|
55 | <a href="${create_url}" class="integration-box ${'dummy-integration' if IntegrationObject.is_dummy else ''}"> | |
56 | <%widgets:panel> |
|
56 | <%widgets:panel> | |
57 | <h2> |
|
57 | <h2> | |
58 | <div class="integration-icon"> |
|
58 | <div class="integration-icon"> | |
59 | ${IntegrationObject.icon|n} |
|
59 | ${IntegrationObject.icon()|n} | |
60 | </div> |
|
60 | </div> | |
61 | ${IntegrationObject.display_name} |
|
61 | ${IntegrationObject.display_name} | |
62 | </h2> |
|
62 | </h2> | |
63 | ${IntegrationObject.description or _('No description available')} |
|
63 | ${IntegrationObject.description or _('No description available')} | |
64 | </%widgets:panel> |
|
64 | </%widgets:panel> | |
65 | </a> |
|
65 | </a> | |
66 | %endfor |
|
66 | %endfor | |
67 | <div style="clear:both"></div> |
|
67 | <div style="clear:both"></div> | |
68 | </%widgets:panel> |
|
68 | </%widgets:panel> |
@@ -1,1858 +1,1861 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import collections |
|
21 | import collections | |
22 | import datetime |
|
22 | import datetime | |
23 | import hashlib |
|
23 | import hashlib | |
24 | import os |
|
24 | import os | |
25 | import re |
|
25 | import re | |
26 | import pprint |
|
26 | import pprint | |
27 | import shutil |
|
27 | import shutil | |
28 | import socket |
|
28 | import socket | |
29 | import subprocess32 |
|
29 | import subprocess32 | |
30 | import time |
|
30 | import time | |
31 | import uuid |
|
31 | import uuid | |
32 | import dateutil.tz |
|
32 | import dateutil.tz | |
33 | import functools |
|
33 | import functools | |
34 |
|
34 | |||
35 | import mock |
|
35 | import mock | |
36 | import pyramid.testing |
|
36 | import pyramid.testing | |
37 | import pytest |
|
37 | import pytest | |
38 | import colander |
|
38 | import colander | |
39 | import requests |
|
39 | import requests | |
40 | import pyramid.paster |
|
40 | import pyramid.paster | |
41 |
|
41 | |||
42 | import rhodecode |
|
42 | import rhodecode | |
43 | from rhodecode.lib.utils2 import AttributeDict |
|
43 | from rhodecode.lib.utils2 import AttributeDict | |
44 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
44 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
45 | from rhodecode.model.comment import CommentsModel |
|
45 | from rhodecode.model.comment import CommentsModel | |
46 | from rhodecode.model.db import ( |
|
46 | from rhodecode.model.db import ( | |
47 | PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup, |
|
47 | PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup, | |
48 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) |
|
48 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) | |
49 | from rhodecode.model.meta import Session |
|
49 | from rhodecode.model.meta import Session | |
50 | from rhodecode.model.pull_request import PullRequestModel |
|
50 | from rhodecode.model.pull_request import PullRequestModel | |
51 | from rhodecode.model.repo import RepoModel |
|
51 | from rhodecode.model.repo import RepoModel | |
52 | from rhodecode.model.repo_group import RepoGroupModel |
|
52 | from rhodecode.model.repo_group import RepoGroupModel | |
53 | from rhodecode.model.user import UserModel |
|
53 | from rhodecode.model.user import UserModel | |
54 | from rhodecode.model.settings import VcsSettingsModel |
|
54 | from rhodecode.model.settings import VcsSettingsModel | |
55 | from rhodecode.model.user_group import UserGroupModel |
|
55 | from rhodecode.model.user_group import UserGroupModel | |
56 | from rhodecode.model.integration import IntegrationModel |
|
56 | from rhodecode.model.integration import IntegrationModel | |
57 | from rhodecode.integrations import integration_type_registry |
|
57 | from rhodecode.integrations import integration_type_registry | |
58 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
58 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
59 | from rhodecode.lib.utils import repo2db_mapper |
|
59 | from rhodecode.lib.utils import repo2db_mapper | |
60 | from rhodecode.lib.vcs import create_vcsserver_proxy |
|
60 | from rhodecode.lib.vcs import create_vcsserver_proxy | |
61 | from rhodecode.lib.vcs.backends import get_backend |
|
61 | from rhodecode.lib.vcs.backends import get_backend | |
62 | from rhodecode.lib.vcs.nodes import FileNode |
|
62 | from rhodecode.lib.vcs.nodes import FileNode | |
63 | from rhodecode.tests import ( |
|
63 | from rhodecode.tests import ( | |
64 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, |
|
64 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, | |
65 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, |
|
65 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, | |
66 | TEST_USER_REGULAR_PASS) |
|
66 | TEST_USER_REGULAR_PASS) | |
67 | from rhodecode.tests.utils import CustomTestApp, set_anonymous_access |
|
67 | from rhodecode.tests.utils import CustomTestApp, set_anonymous_access | |
68 | from rhodecode.tests.fixture import Fixture |
|
68 | from rhodecode.tests.fixture import Fixture | |
69 | from rhodecode.config import utils as config_utils |
|
69 | from rhodecode.config import utils as config_utils | |
70 |
|
70 | |||
71 | def _split_comma(value): |
|
71 | def _split_comma(value): | |
72 | return value.split(',') |
|
72 | return value.split(',') | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | def pytest_addoption(parser): |
|
75 | def pytest_addoption(parser): | |
76 | parser.addoption( |
|
76 | parser.addoption( | |
77 | '--keep-tmp-path', action='store_true', |
|
77 | '--keep-tmp-path', action='store_true', | |
78 | help="Keep the test temporary directories") |
|
78 | help="Keep the test temporary directories") | |
79 | parser.addoption( |
|
79 | parser.addoption( | |
80 | '--backends', action='store', type=_split_comma, |
|
80 | '--backends', action='store', type=_split_comma, | |
81 | default=['git', 'hg', 'svn'], |
|
81 | default=['git', 'hg', 'svn'], | |
82 | help="Select which backends to test for backend specific tests.") |
|
82 | help="Select which backends to test for backend specific tests.") | |
83 | parser.addoption( |
|
83 | parser.addoption( | |
84 | '--dbs', action='store', type=_split_comma, |
|
84 | '--dbs', action='store', type=_split_comma, | |
85 | default=['sqlite'], |
|
85 | default=['sqlite'], | |
86 | help="Select which database to test for database specific tests. " |
|
86 | help="Select which database to test for database specific tests. " | |
87 | "Possible options are sqlite,postgres,mysql") |
|
87 | "Possible options are sqlite,postgres,mysql") | |
88 | parser.addoption( |
|
88 | parser.addoption( | |
89 | '--appenlight', '--ae', action='store_true', |
|
89 | '--appenlight', '--ae', action='store_true', | |
90 | help="Track statistics in appenlight.") |
|
90 | help="Track statistics in appenlight.") | |
91 | parser.addoption( |
|
91 | parser.addoption( | |
92 | '--appenlight-api-key', '--ae-key', |
|
92 | '--appenlight-api-key', '--ae-key', | |
93 | help="API key for Appenlight.") |
|
93 | help="API key for Appenlight.") | |
94 | parser.addoption( |
|
94 | parser.addoption( | |
95 | '--appenlight-url', '--ae-url', |
|
95 | '--appenlight-url', '--ae-url', | |
96 | default="https://ae.rhodecode.com", |
|
96 | default="https://ae.rhodecode.com", | |
97 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") |
|
97 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") | |
98 | parser.addoption( |
|
98 | parser.addoption( | |
99 | '--sqlite-connection-string', action='store', |
|
99 | '--sqlite-connection-string', action='store', | |
100 | default='', help="Connection string for the dbs tests with SQLite") |
|
100 | default='', help="Connection string for the dbs tests with SQLite") | |
101 | parser.addoption( |
|
101 | parser.addoption( | |
102 | '--postgres-connection-string', action='store', |
|
102 | '--postgres-connection-string', action='store', | |
103 | default='', help="Connection string for the dbs tests with Postgres") |
|
103 | default='', help="Connection string for the dbs tests with Postgres") | |
104 | parser.addoption( |
|
104 | parser.addoption( | |
105 | '--mysql-connection-string', action='store', |
|
105 | '--mysql-connection-string', action='store', | |
106 | default='', help="Connection string for the dbs tests with MySQL") |
|
106 | default='', help="Connection string for the dbs tests with MySQL") | |
107 | parser.addoption( |
|
107 | parser.addoption( | |
108 | '--repeat', type=int, default=100, |
|
108 | '--repeat', type=int, default=100, | |
109 | help="Number of repetitions in performance tests.") |
|
109 | help="Number of repetitions in performance tests.") | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | def pytest_configure(config): |
|
112 | def pytest_configure(config): | |
113 | from rhodecode.config import patches |
|
113 | from rhodecode.config import patches | |
114 |
|
114 | |||
115 |
|
115 | |||
116 | def pytest_collection_modifyitems(session, config, items): |
|
116 | def pytest_collection_modifyitems(session, config, items): | |
117 | # nottest marked, compare nose, used for transition from nose to pytest |
|
117 | # nottest marked, compare nose, used for transition from nose to pytest | |
118 | remaining = [ |
|
118 | remaining = [ | |
119 | i for i in items if getattr(i.obj, '__test__', True)] |
|
119 | i for i in items if getattr(i.obj, '__test__', True)] | |
120 | items[:] = remaining |
|
120 | items[:] = remaining | |
121 |
|
121 | |||
122 |
|
122 | |||
123 | def pytest_generate_tests(metafunc): |
|
123 | def pytest_generate_tests(metafunc): | |
124 | # Support test generation based on --backend parameter |
|
124 | # Support test generation based on --backend parameter | |
125 | if 'backend_alias' in metafunc.fixturenames: |
|
125 | if 'backend_alias' in metafunc.fixturenames: | |
126 | backends = get_backends_from_metafunc(metafunc) |
|
126 | backends = get_backends_from_metafunc(metafunc) | |
127 | scope = None |
|
127 | scope = None | |
128 | if not backends: |
|
128 | if not backends: | |
129 | pytest.skip("Not enabled for any of selected backends") |
|
129 | pytest.skip("Not enabled for any of selected backends") | |
130 | metafunc.parametrize('backend_alias', backends, scope=scope) |
|
130 | metafunc.parametrize('backend_alias', backends, scope=scope) | |
131 | elif hasattr(metafunc.function, 'backends'): |
|
131 | elif hasattr(metafunc.function, 'backends'): | |
132 | backends = get_backends_from_metafunc(metafunc) |
|
132 | backends = get_backends_from_metafunc(metafunc) | |
133 | if not backends: |
|
133 | if not backends: | |
134 | pytest.skip("Not enabled for any of selected backends") |
|
134 | pytest.skip("Not enabled for any of selected backends") | |
135 |
|
135 | |||
136 |
|
136 | |||
137 | def get_backends_from_metafunc(metafunc): |
|
137 | def get_backends_from_metafunc(metafunc): | |
138 | requested_backends = set(metafunc.config.getoption('--backends')) |
|
138 | requested_backends = set(metafunc.config.getoption('--backends')) | |
139 | if hasattr(metafunc.function, 'backends'): |
|
139 | if hasattr(metafunc.function, 'backends'): | |
140 | # Supported backends by this test function, created from |
|
140 | # Supported backends by this test function, created from | |
141 | # pytest.mark.backends |
|
141 | # pytest.mark.backends | |
142 | backends = metafunc.function.backends.args |
|
142 | backends = metafunc.function.backends.args | |
143 | elif hasattr(metafunc.cls, 'backend_alias'): |
|
143 | elif hasattr(metafunc.cls, 'backend_alias'): | |
144 | # Support class attribute "backend_alias", this is mainly |
|
144 | # Support class attribute "backend_alias", this is mainly | |
145 | # for legacy reasons for tests not yet using pytest.mark.backends |
|
145 | # for legacy reasons for tests not yet using pytest.mark.backends | |
146 | backends = [metafunc.cls.backend_alias] |
|
146 | backends = [metafunc.cls.backend_alias] | |
147 | else: |
|
147 | else: | |
148 | backends = metafunc.config.getoption('--backends') |
|
148 | backends = metafunc.config.getoption('--backends') | |
149 | return requested_backends.intersection(backends) |
|
149 | return requested_backends.intersection(backends) | |
150 |
|
150 | |||
151 |
|
151 | |||
152 | @pytest.fixture(scope='session', autouse=True) |
|
152 | @pytest.fixture(scope='session', autouse=True) | |
153 | def activate_example_rcextensions(request): |
|
153 | def activate_example_rcextensions(request): | |
154 | """ |
|
154 | """ | |
155 | Patch in an example rcextensions module which verifies passed in kwargs. |
|
155 | Patch in an example rcextensions module which verifies passed in kwargs. | |
156 | """ |
|
156 | """ | |
157 | from rhodecode.tests.other import example_rcextensions |
|
157 | from rhodecode.tests.other import example_rcextensions | |
158 |
|
158 | |||
159 | old_extensions = rhodecode.EXTENSIONS |
|
159 | old_extensions = rhodecode.EXTENSIONS | |
160 | rhodecode.EXTENSIONS = example_rcextensions |
|
160 | rhodecode.EXTENSIONS = example_rcextensions | |
161 |
|
161 | |||
162 | @request.addfinalizer |
|
162 | @request.addfinalizer | |
163 | def cleanup(): |
|
163 | def cleanup(): | |
164 | rhodecode.EXTENSIONS = old_extensions |
|
164 | rhodecode.EXTENSIONS = old_extensions | |
165 |
|
165 | |||
166 |
|
166 | |||
167 | @pytest.fixture |
|
167 | @pytest.fixture | |
168 | def capture_rcextensions(): |
|
168 | def capture_rcextensions(): | |
169 | """ |
|
169 | """ | |
170 | Returns the recorded calls to entry points in rcextensions. |
|
170 | Returns the recorded calls to entry points in rcextensions. | |
171 | """ |
|
171 | """ | |
172 | calls = rhodecode.EXTENSIONS.calls |
|
172 | calls = rhodecode.EXTENSIONS.calls | |
173 | calls.clear() |
|
173 | calls.clear() | |
174 | # Note: At this moment, it is still the empty dict, but that will |
|
174 | # Note: At this moment, it is still the empty dict, but that will | |
175 | # be filled during the test run and since it is a reference this |
|
175 | # be filled during the test run and since it is a reference this | |
176 | # is enough to make it work. |
|
176 | # is enough to make it work. | |
177 | return calls |
|
177 | return calls | |
178 |
|
178 | |||
179 |
|
179 | |||
180 | @pytest.fixture(scope='session') |
|
180 | @pytest.fixture(scope='session') | |
181 | def http_environ_session(): |
|
181 | def http_environ_session(): | |
182 | """ |
|
182 | """ | |
183 | Allow to use "http_environ" in session scope. |
|
183 | Allow to use "http_environ" in session scope. | |
184 | """ |
|
184 | """ | |
185 | return http_environ( |
|
185 | return http_environ( | |
186 | http_host_stub=http_host_stub()) |
|
186 | http_host_stub=http_host_stub()) | |
187 |
|
187 | |||
188 |
|
188 | |||
189 | @pytest.fixture |
|
189 | @pytest.fixture | |
190 | def http_host_stub(): |
|
190 | def http_host_stub(): | |
191 | """ |
|
191 | """ | |
192 | Value of HTTP_HOST in the test run. |
|
192 | Value of HTTP_HOST in the test run. | |
193 | """ |
|
193 | """ | |
194 | return 'example.com:80' |
|
194 | return 'example.com:80' | |
195 |
|
195 | |||
196 |
|
196 | |||
197 | @pytest.fixture |
|
197 | @pytest.fixture | |
198 | def http_host_only_stub(): |
|
198 | def http_host_only_stub(): | |
199 | """ |
|
199 | """ | |
200 | Value of HTTP_HOST in the test run. |
|
200 | Value of HTTP_HOST in the test run. | |
201 | """ |
|
201 | """ | |
202 | return http_host_stub().split(':')[0] |
|
202 | return http_host_stub().split(':')[0] | |
203 |
|
203 | |||
204 |
|
204 | |||
205 | @pytest.fixture |
|
205 | @pytest.fixture | |
206 | def http_environ(http_host_stub): |
|
206 | def http_environ(http_host_stub): | |
207 | """ |
|
207 | """ | |
208 | HTTP extra environ keys. |
|
208 | HTTP extra environ keys. | |
209 |
|
209 | |||
210 | User by the test application and as well for setting up the pylons |
|
210 | User by the test application and as well for setting up the pylons | |
211 | environment. In the case of the fixture "app" it should be possible |
|
211 | environment. In the case of the fixture "app" it should be possible | |
212 | to override this for a specific test case. |
|
212 | to override this for a specific test case. | |
213 | """ |
|
213 | """ | |
214 | return { |
|
214 | return { | |
215 | 'SERVER_NAME': http_host_only_stub(), |
|
215 | 'SERVER_NAME': http_host_only_stub(), | |
216 | 'SERVER_PORT': http_host_stub.split(':')[1], |
|
216 | 'SERVER_PORT': http_host_stub.split(':')[1], | |
217 | 'HTTP_HOST': http_host_stub, |
|
217 | 'HTTP_HOST': http_host_stub, | |
218 | 'HTTP_USER_AGENT': 'rc-test-agent', |
|
218 | 'HTTP_USER_AGENT': 'rc-test-agent', | |
219 | 'REQUEST_METHOD': 'GET' |
|
219 | 'REQUEST_METHOD': 'GET' | |
220 | } |
|
220 | } | |
221 |
|
221 | |||
222 |
|
222 | |||
223 | @pytest.fixture(scope='session') |
|
223 | @pytest.fixture(scope='session') | |
224 | def baseapp(ini_config, vcsserver, http_environ_session): |
|
224 | def baseapp(ini_config, vcsserver, http_environ_session): | |
225 | from rhodecode.lib.pyramid_utils import get_app_config |
|
225 | from rhodecode.lib.pyramid_utils import get_app_config | |
226 | from rhodecode.config.middleware import make_pyramid_app |
|
226 | from rhodecode.config.middleware import make_pyramid_app | |
227 |
|
227 | |||
228 | print("Using the RhodeCode configuration:{}".format(ini_config)) |
|
228 | print("Using the RhodeCode configuration:{}".format(ini_config)) | |
229 | pyramid.paster.setup_logging(ini_config) |
|
229 | pyramid.paster.setup_logging(ini_config) | |
230 |
|
230 | |||
231 | settings = get_app_config(ini_config) |
|
231 | settings = get_app_config(ini_config) | |
232 | app = make_pyramid_app({'__file__': ini_config}, **settings) |
|
232 | app = make_pyramid_app({'__file__': ini_config}, **settings) | |
233 |
|
233 | |||
234 | return app |
|
234 | return app | |
235 |
|
235 | |||
236 |
|
236 | |||
237 | @pytest.fixture(scope='function') |
|
237 | @pytest.fixture(scope='function') | |
238 | def app(request, config_stub, baseapp, http_environ): |
|
238 | def app(request, config_stub, baseapp, http_environ): | |
239 | app = CustomTestApp( |
|
239 | app = CustomTestApp( | |
240 | baseapp, |
|
240 | baseapp, | |
241 | extra_environ=http_environ) |
|
241 | extra_environ=http_environ) | |
242 | if request.cls: |
|
242 | if request.cls: | |
243 | request.cls.app = app |
|
243 | request.cls.app = app | |
244 | return app |
|
244 | return app | |
245 |
|
245 | |||
246 |
|
246 | |||
247 | @pytest.fixture(scope='session') |
|
247 | @pytest.fixture(scope='session') | |
248 | def app_settings(baseapp, ini_config): |
|
248 | def app_settings(baseapp, ini_config): | |
249 | """ |
|
249 | """ | |
250 | Settings dictionary used to create the app. |
|
250 | Settings dictionary used to create the app. | |
251 |
|
251 | |||
252 | Parses the ini file and passes the result through the sanitize and apply |
|
252 | Parses the ini file and passes the result through the sanitize and apply | |
253 | defaults mechanism in `rhodecode.config.middleware`. |
|
253 | defaults mechanism in `rhodecode.config.middleware`. | |
254 | """ |
|
254 | """ | |
255 | return baseapp.config.get_settings() |
|
255 | return baseapp.config.get_settings() | |
256 |
|
256 | |||
257 |
|
257 | |||
258 | @pytest.fixture(scope='session') |
|
258 | @pytest.fixture(scope='session') | |
259 | def db_connection(ini_settings): |
|
259 | def db_connection(ini_settings): | |
260 | # Initialize the database connection. |
|
260 | # Initialize the database connection. | |
261 | config_utils.initialize_database(ini_settings) |
|
261 | config_utils.initialize_database(ini_settings) | |
262 |
|
262 | |||
263 |
|
263 | |||
264 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) |
|
264 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) | |
265 |
|
265 | |||
266 |
|
266 | |||
267 | def _autologin_user(app, *args): |
|
267 | def _autologin_user(app, *args): | |
268 | session = login_user_session(app, *args) |
|
268 | session = login_user_session(app, *args) | |
269 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) |
|
269 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) | |
270 | return LoginData(csrf_token, session['rhodecode_user']) |
|
270 | return LoginData(csrf_token, session['rhodecode_user']) | |
271 |
|
271 | |||
272 |
|
272 | |||
273 | @pytest.fixture |
|
273 | @pytest.fixture | |
274 | def autologin_user(app): |
|
274 | def autologin_user(app): | |
275 | """ |
|
275 | """ | |
276 | Utility fixture which makes sure that the admin user is logged in |
|
276 | Utility fixture which makes sure that the admin user is logged in | |
277 | """ |
|
277 | """ | |
278 | return _autologin_user(app) |
|
278 | return _autologin_user(app) | |
279 |
|
279 | |||
280 |
|
280 | |||
281 | @pytest.fixture |
|
281 | @pytest.fixture | |
282 | def autologin_regular_user(app): |
|
282 | def autologin_regular_user(app): | |
283 | """ |
|
283 | """ | |
284 | Utility fixture which makes sure that the regular user is logged in |
|
284 | Utility fixture which makes sure that the regular user is logged in | |
285 | """ |
|
285 | """ | |
286 | return _autologin_user( |
|
286 | return _autologin_user( | |
287 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
287 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
288 |
|
288 | |||
289 |
|
289 | |||
290 | @pytest.fixture(scope='function') |
|
290 | @pytest.fixture(scope='function') | |
291 | def csrf_token(request, autologin_user): |
|
291 | def csrf_token(request, autologin_user): | |
292 | return autologin_user.csrf_token |
|
292 | return autologin_user.csrf_token | |
293 |
|
293 | |||
294 |
|
294 | |||
295 | @pytest.fixture(scope='function') |
|
295 | @pytest.fixture(scope='function') | |
296 | def xhr_header(request): |
|
296 | def xhr_header(request): | |
297 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} |
|
297 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} | |
298 |
|
298 | |||
299 |
|
299 | |||
300 | @pytest.fixture |
|
300 | @pytest.fixture | |
301 | def real_crypto_backend(monkeypatch): |
|
301 | def real_crypto_backend(monkeypatch): | |
302 | """ |
|
302 | """ | |
303 | Switch the production crypto backend on for this test. |
|
303 | Switch the production crypto backend on for this test. | |
304 |
|
304 | |||
305 | During the test run the crypto backend is replaced with a faster |
|
305 | During the test run the crypto backend is replaced with a faster | |
306 | implementation based on the MD5 algorithm. |
|
306 | implementation based on the MD5 algorithm. | |
307 | """ |
|
307 | """ | |
308 | monkeypatch.setattr(rhodecode, 'is_test', False) |
|
308 | monkeypatch.setattr(rhodecode, 'is_test', False) | |
309 |
|
309 | |||
310 |
|
310 | |||
311 | @pytest.fixture(scope='class') |
|
311 | @pytest.fixture(scope='class') | |
312 | def index_location(request, baseapp): |
|
312 | def index_location(request, baseapp): | |
313 | index_location = baseapp.config.get_settings()['search.location'] |
|
313 | index_location = baseapp.config.get_settings()['search.location'] | |
314 | if request.cls: |
|
314 | if request.cls: | |
315 | request.cls.index_location = index_location |
|
315 | request.cls.index_location = index_location | |
316 | return index_location |
|
316 | return index_location | |
317 |
|
317 | |||
318 |
|
318 | |||
319 | @pytest.fixture(scope='session', autouse=True) |
|
319 | @pytest.fixture(scope='session', autouse=True) | |
320 | def tests_tmp_path(request): |
|
320 | def tests_tmp_path(request): | |
321 | """ |
|
321 | """ | |
322 | Create temporary directory to be used during the test session. |
|
322 | Create temporary directory to be used during the test session. | |
323 | """ |
|
323 | """ | |
324 | if not os.path.exists(TESTS_TMP_PATH): |
|
324 | if not os.path.exists(TESTS_TMP_PATH): | |
325 | os.makedirs(TESTS_TMP_PATH) |
|
325 | os.makedirs(TESTS_TMP_PATH) | |
326 |
|
326 | |||
327 | if not request.config.getoption('--keep-tmp-path'): |
|
327 | if not request.config.getoption('--keep-tmp-path'): | |
328 | @request.addfinalizer |
|
328 | @request.addfinalizer | |
329 | def remove_tmp_path(): |
|
329 | def remove_tmp_path(): | |
330 | shutil.rmtree(TESTS_TMP_PATH) |
|
330 | shutil.rmtree(TESTS_TMP_PATH) | |
331 |
|
331 | |||
332 | return TESTS_TMP_PATH |
|
332 | return TESTS_TMP_PATH | |
333 |
|
333 | |||
334 |
|
334 | |||
335 | @pytest.fixture |
|
335 | @pytest.fixture | |
336 | def test_repo_group(request): |
|
336 | def test_repo_group(request): | |
337 | """ |
|
337 | """ | |
338 | Create a temporary repository group, and destroy it after |
|
338 | Create a temporary repository group, and destroy it after | |
339 | usage automatically |
|
339 | usage automatically | |
340 | """ |
|
340 | """ | |
341 | fixture = Fixture() |
|
341 | fixture = Fixture() | |
342 | repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '') |
|
342 | repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '') | |
343 | repo_group = fixture.create_repo_group(repogroupid) |
|
343 | repo_group = fixture.create_repo_group(repogroupid) | |
344 |
|
344 | |||
345 | def _cleanup(): |
|
345 | def _cleanup(): | |
346 | fixture.destroy_repo_group(repogroupid) |
|
346 | fixture.destroy_repo_group(repogroupid) | |
347 |
|
347 | |||
348 | request.addfinalizer(_cleanup) |
|
348 | request.addfinalizer(_cleanup) | |
349 | return repo_group |
|
349 | return repo_group | |
350 |
|
350 | |||
351 |
|
351 | |||
352 | @pytest.fixture |
|
352 | @pytest.fixture | |
353 | def test_user_group(request): |
|
353 | def test_user_group(request): | |
354 | """ |
|
354 | """ | |
355 | Create a temporary user group, and destroy it after |
|
355 | Create a temporary user group, and destroy it after | |
356 | usage automatically |
|
356 | usage automatically | |
357 | """ |
|
357 | """ | |
358 | fixture = Fixture() |
|
358 | fixture = Fixture() | |
359 | usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '') |
|
359 | usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '') | |
360 | user_group = fixture.create_user_group(usergroupid) |
|
360 | user_group = fixture.create_user_group(usergroupid) | |
361 |
|
361 | |||
362 | def _cleanup(): |
|
362 | def _cleanup(): | |
363 | fixture.destroy_user_group(user_group) |
|
363 | fixture.destroy_user_group(user_group) | |
364 |
|
364 | |||
365 | request.addfinalizer(_cleanup) |
|
365 | request.addfinalizer(_cleanup) | |
366 | return user_group |
|
366 | return user_group | |
367 |
|
367 | |||
368 |
|
368 | |||
369 | @pytest.fixture(scope='session') |
|
369 | @pytest.fixture(scope='session') | |
370 | def test_repo(request): |
|
370 | def test_repo(request): | |
371 | container = TestRepoContainer() |
|
371 | container = TestRepoContainer() | |
372 | request.addfinalizer(container._cleanup) |
|
372 | request.addfinalizer(container._cleanup) | |
373 | return container |
|
373 | return container | |
374 |
|
374 | |||
375 |
|
375 | |||
376 | class TestRepoContainer(object): |
|
376 | class TestRepoContainer(object): | |
377 | """ |
|
377 | """ | |
378 | Container for test repositories which are used read only. |
|
378 | Container for test repositories which are used read only. | |
379 |
|
379 | |||
380 | Repositories will be created on demand and re-used during the lifetime |
|
380 | Repositories will be created on demand and re-used during the lifetime | |
381 | of this object. |
|
381 | of this object. | |
382 |
|
382 | |||
383 | Usage to get the svn test repository "minimal":: |
|
383 | Usage to get the svn test repository "minimal":: | |
384 |
|
384 | |||
385 | test_repo = TestContainer() |
|
385 | test_repo = TestContainer() | |
386 | repo = test_repo('minimal', 'svn') |
|
386 | repo = test_repo('minimal', 'svn') | |
387 |
|
387 | |||
388 | """ |
|
388 | """ | |
389 |
|
389 | |||
390 | dump_extractors = { |
|
390 | dump_extractors = { | |
391 | 'git': utils.extract_git_repo_from_dump, |
|
391 | 'git': utils.extract_git_repo_from_dump, | |
392 | 'hg': utils.extract_hg_repo_from_dump, |
|
392 | 'hg': utils.extract_hg_repo_from_dump, | |
393 | 'svn': utils.extract_svn_repo_from_dump, |
|
393 | 'svn': utils.extract_svn_repo_from_dump, | |
394 | } |
|
394 | } | |
395 |
|
395 | |||
396 | def __init__(self): |
|
396 | def __init__(self): | |
397 | self._cleanup_repos = [] |
|
397 | self._cleanup_repos = [] | |
398 | self._fixture = Fixture() |
|
398 | self._fixture = Fixture() | |
399 | self._repos = {} |
|
399 | self._repos = {} | |
400 |
|
400 | |||
401 | def __call__(self, dump_name, backend_alias, config=None): |
|
401 | def __call__(self, dump_name, backend_alias, config=None): | |
402 | key = (dump_name, backend_alias) |
|
402 | key = (dump_name, backend_alias) | |
403 | if key not in self._repos: |
|
403 | if key not in self._repos: | |
404 | repo = self._create_repo(dump_name, backend_alias, config) |
|
404 | repo = self._create_repo(dump_name, backend_alias, config) | |
405 | self._repos[key] = repo.repo_id |
|
405 | self._repos[key] = repo.repo_id | |
406 | return Repository.get(self._repos[key]) |
|
406 | return Repository.get(self._repos[key]) | |
407 |
|
407 | |||
408 | def _create_repo(self, dump_name, backend_alias, config): |
|
408 | def _create_repo(self, dump_name, backend_alias, config): | |
409 | repo_name = '%s-%s' % (backend_alias, dump_name) |
|
409 | repo_name = '%s-%s' % (backend_alias, dump_name) | |
410 | backend_class = get_backend(backend_alias) |
|
410 | backend_class = get_backend(backend_alias) | |
411 | dump_extractor = self.dump_extractors[backend_alias] |
|
411 | dump_extractor = self.dump_extractors[backend_alias] | |
412 | repo_path = dump_extractor(dump_name, repo_name) |
|
412 | repo_path = dump_extractor(dump_name, repo_name) | |
413 |
|
413 | |||
414 | vcs_repo = backend_class(repo_path, config=config) |
|
414 | vcs_repo = backend_class(repo_path, config=config) | |
415 | repo2db_mapper({repo_name: vcs_repo}) |
|
415 | repo2db_mapper({repo_name: vcs_repo}) | |
416 |
|
416 | |||
417 | repo = RepoModel().get_by_repo_name(repo_name) |
|
417 | repo = RepoModel().get_by_repo_name(repo_name) | |
418 | self._cleanup_repos.append(repo_name) |
|
418 | self._cleanup_repos.append(repo_name) | |
419 | return repo |
|
419 | return repo | |
420 |
|
420 | |||
421 | def _cleanup(self): |
|
421 | def _cleanup(self): | |
422 | for repo_name in reversed(self._cleanup_repos): |
|
422 | for repo_name in reversed(self._cleanup_repos): | |
423 | self._fixture.destroy_repo(repo_name) |
|
423 | self._fixture.destroy_repo(repo_name) | |
424 |
|
424 | |||
425 |
|
425 | |||
426 | @pytest.fixture |
|
426 | @pytest.fixture | |
427 | def backend(request, backend_alias, baseapp, test_repo): |
|
427 | def backend(request, backend_alias, baseapp, test_repo): | |
428 | """ |
|
428 | """ | |
429 | Parametrized fixture which represents a single backend implementation. |
|
429 | Parametrized fixture which represents a single backend implementation. | |
430 |
|
430 | |||
431 | It respects the option `--backends` to focus the test run on specific |
|
431 | It respects the option `--backends` to focus the test run on specific | |
432 | backend implementations. |
|
432 | backend implementations. | |
433 |
|
433 | |||
434 | It also supports `pytest.mark.xfail_backends` to mark tests as failing |
|
434 | It also supports `pytest.mark.xfail_backends` to mark tests as failing | |
435 | for specific backends. This is intended as a utility for incremental |
|
435 | for specific backends. This is intended as a utility for incremental | |
436 | development of a new backend implementation. |
|
436 | development of a new backend implementation. | |
437 | """ |
|
437 | """ | |
438 | if backend_alias not in request.config.getoption('--backends'): |
|
438 | if backend_alias not in request.config.getoption('--backends'): | |
439 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
439 | pytest.skip("Backend %s not selected." % (backend_alias, )) | |
440 |
|
440 | |||
441 | utils.check_xfail_backends(request.node, backend_alias) |
|
441 | utils.check_xfail_backends(request.node, backend_alias) | |
442 | utils.check_skip_backends(request.node, backend_alias) |
|
442 | utils.check_skip_backends(request.node, backend_alias) | |
443 |
|
443 | |||
444 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
444 | repo_name = 'vcs_test_%s' % (backend_alias, ) | |
445 | backend = Backend( |
|
445 | backend = Backend( | |
446 | alias=backend_alias, |
|
446 | alias=backend_alias, | |
447 | repo_name=repo_name, |
|
447 | repo_name=repo_name, | |
448 | test_name=request.node.name, |
|
448 | test_name=request.node.name, | |
449 | test_repo_container=test_repo) |
|
449 | test_repo_container=test_repo) | |
450 | request.addfinalizer(backend.cleanup) |
|
450 | request.addfinalizer(backend.cleanup) | |
451 | return backend |
|
451 | return backend | |
452 |
|
452 | |||
453 |
|
453 | |||
454 | @pytest.fixture |
|
454 | @pytest.fixture | |
455 | def backend_git(request, baseapp, test_repo): |
|
455 | def backend_git(request, baseapp, test_repo): | |
456 | return backend(request, 'git', baseapp, test_repo) |
|
456 | return backend(request, 'git', baseapp, test_repo) | |
457 |
|
457 | |||
458 |
|
458 | |||
459 | @pytest.fixture |
|
459 | @pytest.fixture | |
460 | def backend_hg(request, baseapp, test_repo): |
|
460 | def backend_hg(request, baseapp, test_repo): | |
461 | return backend(request, 'hg', baseapp, test_repo) |
|
461 | return backend(request, 'hg', baseapp, test_repo) | |
462 |
|
462 | |||
463 |
|
463 | |||
464 | @pytest.fixture |
|
464 | @pytest.fixture | |
465 | def backend_svn(request, baseapp, test_repo): |
|
465 | def backend_svn(request, baseapp, test_repo): | |
466 | return backend(request, 'svn', baseapp, test_repo) |
|
466 | return backend(request, 'svn', baseapp, test_repo) | |
467 |
|
467 | |||
468 |
|
468 | |||
469 | @pytest.fixture |
|
469 | @pytest.fixture | |
470 | def backend_random(backend_git): |
|
470 | def backend_random(backend_git): | |
471 | """ |
|
471 | """ | |
472 | Use this to express that your tests need "a backend. |
|
472 | Use this to express that your tests need "a backend. | |
473 |
|
473 | |||
474 | A few of our tests need a backend, so that we can run the code. This |
|
474 | A few of our tests need a backend, so that we can run the code. This | |
475 | fixture is intended to be used for such cases. It will pick one of the |
|
475 | fixture is intended to be used for such cases. It will pick one of the | |
476 | backends and run the tests. |
|
476 | backends and run the tests. | |
477 |
|
477 | |||
478 | The fixture `backend` would run the test multiple times for each |
|
478 | The fixture `backend` would run the test multiple times for each | |
479 | available backend which is a pure waste of time if the test is |
|
479 | available backend which is a pure waste of time if the test is | |
480 | independent of the backend type. |
|
480 | independent of the backend type. | |
481 | """ |
|
481 | """ | |
482 | # TODO: johbo: Change this to pick a random backend |
|
482 | # TODO: johbo: Change this to pick a random backend | |
483 | return backend_git |
|
483 | return backend_git | |
484 |
|
484 | |||
485 |
|
485 | |||
486 | @pytest.fixture |
|
486 | @pytest.fixture | |
487 | def backend_stub(backend_git): |
|
487 | def backend_stub(backend_git): | |
488 | """ |
|
488 | """ | |
489 | Use this to express that your tests need a backend stub |
|
489 | Use this to express that your tests need a backend stub | |
490 |
|
490 | |||
491 | TODO: mikhail: Implement a real stub logic instead of returning |
|
491 | TODO: mikhail: Implement a real stub logic instead of returning | |
492 | a git backend |
|
492 | a git backend | |
493 | """ |
|
493 | """ | |
494 | return backend_git |
|
494 | return backend_git | |
495 |
|
495 | |||
496 |
|
496 | |||
497 | @pytest.fixture |
|
497 | @pytest.fixture | |
498 | def repo_stub(backend_stub): |
|
498 | def repo_stub(backend_stub): | |
499 | """ |
|
499 | """ | |
500 | Use this to express that your tests need a repository stub |
|
500 | Use this to express that your tests need a repository stub | |
501 | """ |
|
501 | """ | |
502 | return backend_stub.create_repo() |
|
502 | return backend_stub.create_repo() | |
503 |
|
503 | |||
504 |
|
504 | |||
505 | class Backend(object): |
|
505 | class Backend(object): | |
506 | """ |
|
506 | """ | |
507 | Represents the test configuration for one supported backend |
|
507 | Represents the test configuration for one supported backend | |
508 |
|
508 | |||
509 | Provides easy access to different test repositories based on |
|
509 | Provides easy access to different test repositories based on | |
510 | `__getitem__`. Such repositories will only be created once per test |
|
510 | `__getitem__`. Such repositories will only be created once per test | |
511 | session. |
|
511 | session. | |
512 | """ |
|
512 | """ | |
513 |
|
513 | |||
514 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
514 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') | |
515 | _master_repo = None |
|
515 | _master_repo = None | |
516 | _commit_ids = {} |
|
516 | _commit_ids = {} | |
517 |
|
517 | |||
518 | def __init__(self, alias, repo_name, test_name, test_repo_container): |
|
518 | def __init__(self, alias, repo_name, test_name, test_repo_container): | |
519 | self.alias = alias |
|
519 | self.alias = alias | |
520 | self.repo_name = repo_name |
|
520 | self.repo_name = repo_name | |
521 | self._cleanup_repos = [] |
|
521 | self._cleanup_repos = [] | |
522 | self._test_name = test_name |
|
522 | self._test_name = test_name | |
523 | self._test_repo_container = test_repo_container |
|
523 | self._test_repo_container = test_repo_container | |
524 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or |
|
524 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or | |
525 | # Fixture will survive in the end. |
|
525 | # Fixture will survive in the end. | |
526 | self._fixture = Fixture() |
|
526 | self._fixture = Fixture() | |
527 |
|
527 | |||
528 | def __getitem__(self, key): |
|
528 | def __getitem__(self, key): | |
529 | return self._test_repo_container(key, self.alias) |
|
529 | return self._test_repo_container(key, self.alias) | |
530 |
|
530 | |||
531 | def create_test_repo(self, key, config=None): |
|
531 | def create_test_repo(self, key, config=None): | |
532 | return self._test_repo_container(key, self.alias, config) |
|
532 | return self._test_repo_container(key, self.alias, config) | |
533 |
|
533 | |||
534 | @property |
|
534 | @property | |
535 | def repo(self): |
|
535 | def repo(self): | |
536 | """ |
|
536 | """ | |
537 | Returns the "current" repository. This is the vcs_test repo or the |
|
537 | Returns the "current" repository. This is the vcs_test repo or the | |
538 | last repo which has been created with `create_repo`. |
|
538 | last repo which has been created with `create_repo`. | |
539 | """ |
|
539 | """ | |
540 | from rhodecode.model.db import Repository |
|
540 | from rhodecode.model.db import Repository | |
541 | return Repository.get_by_repo_name(self.repo_name) |
|
541 | return Repository.get_by_repo_name(self.repo_name) | |
542 |
|
542 | |||
543 | @property |
|
543 | @property | |
544 | def default_branch_name(self): |
|
544 | def default_branch_name(self): | |
545 | VcsRepository = get_backend(self.alias) |
|
545 | VcsRepository = get_backend(self.alias) | |
546 | return VcsRepository.DEFAULT_BRANCH_NAME |
|
546 | return VcsRepository.DEFAULT_BRANCH_NAME | |
547 |
|
547 | |||
548 | @property |
|
548 | @property | |
549 | def default_head_id(self): |
|
549 | def default_head_id(self): | |
550 | """ |
|
550 | """ | |
551 | Returns the default head id of the underlying backend. |
|
551 | Returns the default head id of the underlying backend. | |
552 |
|
552 | |||
553 | This will be the default branch name in case the backend does have a |
|
553 | This will be the default branch name in case the backend does have a | |
554 | default branch. In the other cases it will point to a valid head |
|
554 | default branch. In the other cases it will point to a valid head | |
555 | which can serve as the base to create a new commit on top of it. |
|
555 | which can serve as the base to create a new commit on top of it. | |
556 | """ |
|
556 | """ | |
557 | vcsrepo = self.repo.scm_instance() |
|
557 | vcsrepo = self.repo.scm_instance() | |
558 | head_id = ( |
|
558 | head_id = ( | |
559 | vcsrepo.DEFAULT_BRANCH_NAME or |
|
559 | vcsrepo.DEFAULT_BRANCH_NAME or | |
560 | vcsrepo.commit_ids[-1]) |
|
560 | vcsrepo.commit_ids[-1]) | |
561 | return head_id |
|
561 | return head_id | |
562 |
|
562 | |||
563 | @property |
|
563 | @property | |
564 | def commit_ids(self): |
|
564 | def commit_ids(self): | |
565 | """ |
|
565 | """ | |
566 | Returns the list of commits for the last created repository |
|
566 | Returns the list of commits for the last created repository | |
567 | """ |
|
567 | """ | |
568 | return self._commit_ids |
|
568 | return self._commit_ids | |
569 |
|
569 | |||
570 | def create_master_repo(self, commits): |
|
570 | def create_master_repo(self, commits): | |
571 | """ |
|
571 | """ | |
572 | Create a repository and remember it as a template. |
|
572 | Create a repository and remember it as a template. | |
573 |
|
573 | |||
574 | This allows to easily create derived repositories to construct |
|
574 | This allows to easily create derived repositories to construct | |
575 | more complex scenarios for diff, compare and pull requests. |
|
575 | more complex scenarios for diff, compare and pull requests. | |
576 |
|
576 | |||
577 | Returns a commit map which maps from commit message to raw_id. |
|
577 | Returns a commit map which maps from commit message to raw_id. | |
578 | """ |
|
578 | """ | |
579 | self._master_repo = self.create_repo(commits=commits) |
|
579 | self._master_repo = self.create_repo(commits=commits) | |
580 | return self._commit_ids |
|
580 | return self._commit_ids | |
581 |
|
581 | |||
582 | def create_repo( |
|
582 | def create_repo( | |
583 | self, commits=None, number_of_commits=0, heads=None, |
|
583 | self, commits=None, number_of_commits=0, heads=None, | |
584 | name_suffix=u'', **kwargs): |
|
584 | name_suffix=u'', **kwargs): | |
585 | """ |
|
585 | """ | |
586 | Create a repository and record it for later cleanup. |
|
586 | Create a repository and record it for later cleanup. | |
587 |
|
587 | |||
588 | :param commits: Optional. A sequence of dict instances. |
|
588 | :param commits: Optional. A sequence of dict instances. | |
589 | Will add a commit per entry to the new repository. |
|
589 | Will add a commit per entry to the new repository. | |
590 | :param number_of_commits: Optional. If set to a number, this number of |
|
590 | :param number_of_commits: Optional. If set to a number, this number of | |
591 | commits will be added to the new repository. |
|
591 | commits will be added to the new repository. | |
592 | :param heads: Optional. Can be set to a sequence of of commit |
|
592 | :param heads: Optional. Can be set to a sequence of of commit | |
593 | names which shall be pulled in from the master repository. |
|
593 | names which shall be pulled in from the master repository. | |
594 |
|
594 | |||
595 | """ |
|
595 | """ | |
596 | self.repo_name = self._next_repo_name() + name_suffix |
|
596 | self.repo_name = self._next_repo_name() + name_suffix | |
597 | repo = self._fixture.create_repo( |
|
597 | repo = self._fixture.create_repo( | |
598 | self.repo_name, repo_type=self.alias, **kwargs) |
|
598 | self.repo_name, repo_type=self.alias, **kwargs) | |
599 | self._cleanup_repos.append(repo.repo_name) |
|
599 | self._cleanup_repos.append(repo.repo_name) | |
600 |
|
600 | |||
601 | commits = commits or [ |
|
601 | commits = commits or [ | |
602 | {'message': 'Commit %s of %s' % (x, self.repo_name)} |
|
602 | {'message': 'Commit %s of %s' % (x, self.repo_name)} | |
603 | for x in xrange(number_of_commits)] |
|
603 | for x in xrange(number_of_commits)] | |
604 | self._add_commits_to_repo(repo.scm_instance(), commits) |
|
604 | self._add_commits_to_repo(repo.scm_instance(), commits) | |
605 | if heads: |
|
605 | if heads: | |
606 | self.pull_heads(repo, heads) |
|
606 | self.pull_heads(repo, heads) | |
607 |
|
607 | |||
608 | return repo |
|
608 | return repo | |
609 |
|
609 | |||
610 | def pull_heads(self, repo, heads): |
|
610 | def pull_heads(self, repo, heads): | |
611 | """ |
|
611 | """ | |
612 | Make sure that repo contains all commits mentioned in `heads` |
|
612 | Make sure that repo contains all commits mentioned in `heads` | |
613 | """ |
|
613 | """ | |
614 | vcsmaster = self._master_repo.scm_instance() |
|
614 | vcsmaster = self._master_repo.scm_instance() | |
615 | vcsrepo = repo.scm_instance() |
|
615 | vcsrepo = repo.scm_instance() | |
616 | vcsrepo.config.clear_section('hooks') |
|
616 | vcsrepo.config.clear_section('hooks') | |
617 | commit_ids = [self._commit_ids[h] for h in heads] |
|
617 | commit_ids = [self._commit_ids[h] for h in heads] | |
618 | vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids) |
|
618 | vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids) | |
619 |
|
619 | |||
620 | def create_fork(self): |
|
620 | def create_fork(self): | |
621 | repo_to_fork = self.repo_name |
|
621 | repo_to_fork = self.repo_name | |
622 | self.repo_name = self._next_repo_name() |
|
622 | self.repo_name = self._next_repo_name() | |
623 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) |
|
623 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) | |
624 | self._cleanup_repos.append(self.repo_name) |
|
624 | self._cleanup_repos.append(self.repo_name) | |
625 | return repo |
|
625 | return repo | |
626 |
|
626 | |||
627 | def new_repo_name(self, suffix=u''): |
|
627 | def new_repo_name(self, suffix=u''): | |
628 | self.repo_name = self._next_repo_name() + suffix |
|
628 | self.repo_name = self._next_repo_name() + suffix | |
629 | self._cleanup_repos.append(self.repo_name) |
|
629 | self._cleanup_repos.append(self.repo_name) | |
630 | return self.repo_name |
|
630 | return self.repo_name | |
631 |
|
631 | |||
632 | def _next_repo_name(self): |
|
632 | def _next_repo_name(self): | |
633 | return u"%s_%s" % ( |
|
633 | return u"%s_%s" % ( | |
634 | self.invalid_repo_name.sub(u'_', self._test_name), |
|
634 | self.invalid_repo_name.sub(u'_', self._test_name), | |
635 | len(self._cleanup_repos)) |
|
635 | len(self._cleanup_repos)) | |
636 |
|
636 | |||
637 | def ensure_file(self, filename, content='Test content\n'): |
|
637 | def ensure_file(self, filename, content='Test content\n'): | |
638 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
638 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" | |
639 | commits = [ |
|
639 | commits = [ | |
640 | {'added': [ |
|
640 | {'added': [ | |
641 | FileNode(filename, content=content), |
|
641 | FileNode(filename, content=content), | |
642 | ]}, |
|
642 | ]}, | |
643 | ] |
|
643 | ] | |
644 | self._add_commits_to_repo(self.repo.scm_instance(), commits) |
|
644 | self._add_commits_to_repo(self.repo.scm_instance(), commits) | |
645 |
|
645 | |||
646 | def enable_downloads(self): |
|
646 | def enable_downloads(self): | |
647 | repo = self.repo |
|
647 | repo = self.repo | |
648 | repo.enable_downloads = True |
|
648 | repo.enable_downloads = True | |
649 | Session().add(repo) |
|
649 | Session().add(repo) | |
650 | Session().commit() |
|
650 | Session().commit() | |
651 |
|
651 | |||
652 | def cleanup(self): |
|
652 | def cleanup(self): | |
653 | for repo_name in reversed(self._cleanup_repos): |
|
653 | for repo_name in reversed(self._cleanup_repos): | |
654 | self._fixture.destroy_repo(repo_name) |
|
654 | self._fixture.destroy_repo(repo_name) | |
655 |
|
655 | |||
656 | def _add_commits_to_repo(self, repo, commits): |
|
656 | def _add_commits_to_repo(self, repo, commits): | |
657 | commit_ids = _add_commits_to_repo(repo, commits) |
|
657 | commit_ids = _add_commits_to_repo(repo, commits) | |
658 | if not commit_ids: |
|
658 | if not commit_ids: | |
659 | return |
|
659 | return | |
660 | self._commit_ids = commit_ids |
|
660 | self._commit_ids = commit_ids | |
661 |
|
661 | |||
662 | # Creating refs for Git to allow fetching them from remote repository |
|
662 | # Creating refs for Git to allow fetching them from remote repository | |
663 | if self.alias == 'git': |
|
663 | if self.alias == 'git': | |
664 | refs = {} |
|
664 | refs = {} | |
665 | for message in self._commit_ids: |
|
665 | for message in self._commit_ids: | |
666 | # TODO: mikhail: do more special chars replacements |
|
666 | # TODO: mikhail: do more special chars replacements | |
667 | ref_name = 'refs/test-refs/{}'.format( |
|
667 | ref_name = 'refs/test-refs/{}'.format( | |
668 | message.replace(' ', '')) |
|
668 | message.replace(' ', '')) | |
669 | refs[ref_name] = self._commit_ids[message] |
|
669 | refs[ref_name] = self._commit_ids[message] | |
670 | self._create_refs(repo, refs) |
|
670 | self._create_refs(repo, refs) | |
671 |
|
671 | |||
672 | def _create_refs(self, repo, refs): |
|
672 | def _create_refs(self, repo, refs): | |
673 | for ref_name in refs: |
|
673 | for ref_name in refs: | |
674 | repo.set_refs(ref_name, refs[ref_name]) |
|
674 | repo.set_refs(ref_name, refs[ref_name]) | |
675 |
|
675 | |||
676 |
|
676 | |||
677 | @pytest.fixture |
|
677 | @pytest.fixture | |
678 | def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo): |
|
678 | def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo): | |
679 | """ |
|
679 | """ | |
680 | Parametrized fixture which represents a single vcs backend implementation. |
|
680 | Parametrized fixture which represents a single vcs backend implementation. | |
681 |
|
681 | |||
682 | See the fixture `backend` for more details. This one implements the same |
|
682 | See the fixture `backend` for more details. This one implements the same | |
683 | concept, but on vcs level. So it does not provide model instances etc. |
|
683 | concept, but on vcs level. So it does not provide model instances etc. | |
684 |
|
684 | |||
685 | Parameters are generated dynamically, see :func:`pytest_generate_tests` |
|
685 | Parameters are generated dynamically, see :func:`pytest_generate_tests` | |
686 | for how this works. |
|
686 | for how this works. | |
687 | """ |
|
687 | """ | |
688 | if backend_alias not in request.config.getoption('--backends'): |
|
688 | if backend_alias not in request.config.getoption('--backends'): | |
689 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
689 | pytest.skip("Backend %s not selected." % (backend_alias, )) | |
690 |
|
690 | |||
691 | utils.check_xfail_backends(request.node, backend_alias) |
|
691 | utils.check_xfail_backends(request.node, backend_alias) | |
692 | utils.check_skip_backends(request.node, backend_alias) |
|
692 | utils.check_skip_backends(request.node, backend_alias) | |
693 |
|
693 | |||
694 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
694 | repo_name = 'vcs_test_%s' % (backend_alias, ) | |
695 | repo_path = os.path.join(tests_tmp_path, repo_name) |
|
695 | repo_path = os.path.join(tests_tmp_path, repo_name) | |
696 | backend = VcsBackend( |
|
696 | backend = VcsBackend( | |
697 | alias=backend_alias, |
|
697 | alias=backend_alias, | |
698 | repo_path=repo_path, |
|
698 | repo_path=repo_path, | |
699 | test_name=request.node.name, |
|
699 | test_name=request.node.name, | |
700 | test_repo_container=test_repo) |
|
700 | test_repo_container=test_repo) | |
701 | request.addfinalizer(backend.cleanup) |
|
701 | request.addfinalizer(backend.cleanup) | |
702 | return backend |
|
702 | return backend | |
703 |
|
703 | |||
704 |
|
704 | |||
705 | @pytest.fixture |
|
705 | @pytest.fixture | |
706 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): |
|
706 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): | |
707 | return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo) |
|
707 | return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo) | |
708 |
|
708 | |||
709 |
|
709 | |||
710 | @pytest.fixture |
|
710 | @pytest.fixture | |
711 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): |
|
711 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): | |
712 | return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo) |
|
712 | return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo) | |
713 |
|
713 | |||
714 |
|
714 | |||
715 | @pytest.fixture |
|
715 | @pytest.fixture | |
716 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): |
|
716 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): | |
717 | return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo) |
|
717 | return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo) | |
718 |
|
718 | |||
719 |
|
719 | |||
720 | @pytest.fixture |
|
720 | @pytest.fixture | |
721 | def vcsbackend_random(vcsbackend_git): |
|
721 | def vcsbackend_random(vcsbackend_git): | |
722 | """ |
|
722 | """ | |
723 | Use this to express that your tests need "a vcsbackend". |
|
723 | Use this to express that your tests need "a vcsbackend". | |
724 |
|
724 | |||
725 | The fixture `vcsbackend` would run the test multiple times for each |
|
725 | The fixture `vcsbackend` would run the test multiple times for each | |
726 | available vcs backend which is a pure waste of time if the test is |
|
726 | available vcs backend which is a pure waste of time if the test is | |
727 | independent of the vcs backend type. |
|
727 | independent of the vcs backend type. | |
728 | """ |
|
728 | """ | |
729 | # TODO: johbo: Change this to pick a random backend |
|
729 | # TODO: johbo: Change this to pick a random backend | |
730 | return vcsbackend_git |
|
730 | return vcsbackend_git | |
731 |
|
731 | |||
732 |
|
732 | |||
733 | @pytest.fixture |
|
733 | @pytest.fixture | |
734 | def vcsbackend_stub(vcsbackend_git): |
|
734 | def vcsbackend_stub(vcsbackend_git): | |
735 | """ |
|
735 | """ | |
736 | Use this to express that your test just needs a stub of a vcsbackend. |
|
736 | Use this to express that your test just needs a stub of a vcsbackend. | |
737 |
|
737 | |||
738 | Plan is to eventually implement an in-memory stub to speed tests up. |
|
738 | Plan is to eventually implement an in-memory stub to speed tests up. | |
739 | """ |
|
739 | """ | |
740 | return vcsbackend_git |
|
740 | return vcsbackend_git | |
741 |
|
741 | |||
742 |
|
742 | |||
743 | class VcsBackend(object): |
|
743 | class VcsBackend(object): | |
744 | """ |
|
744 | """ | |
745 | Represents the test configuration for one supported vcs backend. |
|
745 | Represents the test configuration for one supported vcs backend. | |
746 | """ |
|
746 | """ | |
747 |
|
747 | |||
748 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
748 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') | |
749 |
|
749 | |||
750 | def __init__(self, alias, repo_path, test_name, test_repo_container): |
|
750 | def __init__(self, alias, repo_path, test_name, test_repo_container): | |
751 | self.alias = alias |
|
751 | self.alias = alias | |
752 | self._repo_path = repo_path |
|
752 | self._repo_path = repo_path | |
753 | self._cleanup_repos = [] |
|
753 | self._cleanup_repos = [] | |
754 | self._test_name = test_name |
|
754 | self._test_name = test_name | |
755 | self._test_repo_container = test_repo_container |
|
755 | self._test_repo_container = test_repo_container | |
756 |
|
756 | |||
757 | def __getitem__(self, key): |
|
757 | def __getitem__(self, key): | |
758 | return self._test_repo_container(key, self.alias).scm_instance() |
|
758 | return self._test_repo_container(key, self.alias).scm_instance() | |
759 |
|
759 | |||
760 | @property |
|
760 | @property | |
761 | def repo(self): |
|
761 | def repo(self): | |
762 | """ |
|
762 | """ | |
763 | Returns the "current" repository. This is the vcs_test repo of the last |
|
763 | Returns the "current" repository. This is the vcs_test repo of the last | |
764 | repo which has been created. |
|
764 | repo which has been created. | |
765 | """ |
|
765 | """ | |
766 | Repository = get_backend(self.alias) |
|
766 | Repository = get_backend(self.alias) | |
767 | return Repository(self._repo_path) |
|
767 | return Repository(self._repo_path) | |
768 |
|
768 | |||
769 | @property |
|
769 | @property | |
770 | def backend(self): |
|
770 | def backend(self): | |
771 | """ |
|
771 | """ | |
772 | Returns the backend implementation class. |
|
772 | Returns the backend implementation class. | |
773 | """ |
|
773 | """ | |
774 | return get_backend(self.alias) |
|
774 | return get_backend(self.alias) | |
775 |
|
775 | |||
776 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None): |
|
776 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None): | |
777 | repo_name = self._next_repo_name() |
|
777 | repo_name = self._next_repo_name() | |
778 | self._repo_path = get_new_dir(repo_name) |
|
778 | self._repo_path = get_new_dir(repo_name) | |
779 | repo_class = get_backend(self.alias) |
|
779 | repo_class = get_backend(self.alias) | |
780 | src_url = None |
|
780 | src_url = None | |
781 | if _clone_repo: |
|
781 | if _clone_repo: | |
782 | src_url = _clone_repo.path |
|
782 | src_url = _clone_repo.path | |
783 | repo = repo_class(self._repo_path, create=True, src_url=src_url) |
|
783 | repo = repo_class(self._repo_path, create=True, src_url=src_url) | |
784 | self._cleanup_repos.append(repo) |
|
784 | self._cleanup_repos.append(repo) | |
785 |
|
785 | |||
786 | commits = commits or [ |
|
786 | commits = commits or [ | |
787 | {'message': 'Commit %s of %s' % (x, repo_name)} |
|
787 | {'message': 'Commit %s of %s' % (x, repo_name)} | |
788 | for x in xrange(number_of_commits)] |
|
788 | for x in xrange(number_of_commits)] | |
789 | _add_commits_to_repo(repo, commits) |
|
789 | _add_commits_to_repo(repo, commits) | |
790 | return repo |
|
790 | return repo | |
791 |
|
791 | |||
792 | def clone_repo(self, repo): |
|
792 | def clone_repo(self, repo): | |
793 | return self.create_repo(_clone_repo=repo) |
|
793 | return self.create_repo(_clone_repo=repo) | |
794 |
|
794 | |||
795 | def cleanup(self): |
|
795 | def cleanup(self): | |
796 | for repo in self._cleanup_repos: |
|
796 | for repo in self._cleanup_repos: | |
797 | shutil.rmtree(repo.path) |
|
797 | shutil.rmtree(repo.path) | |
798 |
|
798 | |||
799 | def new_repo_path(self): |
|
799 | def new_repo_path(self): | |
800 | repo_name = self._next_repo_name() |
|
800 | repo_name = self._next_repo_name() | |
801 | self._repo_path = get_new_dir(repo_name) |
|
801 | self._repo_path = get_new_dir(repo_name) | |
802 | return self._repo_path |
|
802 | return self._repo_path | |
803 |
|
803 | |||
804 | def _next_repo_name(self): |
|
804 | def _next_repo_name(self): | |
805 | return "%s_%s" % ( |
|
805 | return "%s_%s" % ( | |
806 | self.invalid_repo_name.sub('_', self._test_name), |
|
806 | self.invalid_repo_name.sub('_', self._test_name), | |
807 | len(self._cleanup_repos)) |
|
807 | len(self._cleanup_repos)) | |
808 |
|
808 | |||
809 | def add_file(self, repo, filename, content='Test content\n'): |
|
809 | def add_file(self, repo, filename, content='Test content\n'): | |
810 | imc = repo.in_memory_commit |
|
810 | imc = repo.in_memory_commit | |
811 | imc.add(FileNode(filename, content=content)) |
|
811 | imc.add(FileNode(filename, content=content)) | |
812 | imc.commit( |
|
812 | imc.commit( | |
813 | message=u'Automatic commit from vcsbackend fixture', |
|
813 | message=u'Automatic commit from vcsbackend fixture', | |
814 | author=u'Automatic') |
|
814 | author=u'Automatic') | |
815 |
|
815 | |||
816 | def ensure_file(self, filename, content='Test content\n'): |
|
816 | def ensure_file(self, filename, content='Test content\n'): | |
817 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
817 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" | |
818 | self.add_file(self.repo, filename, content) |
|
818 | self.add_file(self.repo, filename, content) | |
819 |
|
819 | |||
820 |
|
820 | |||
821 | def _add_commits_to_repo(vcs_repo, commits): |
|
821 | def _add_commits_to_repo(vcs_repo, commits): | |
822 | commit_ids = {} |
|
822 | commit_ids = {} | |
823 | if not commits: |
|
823 | if not commits: | |
824 | return commit_ids |
|
824 | return commit_ids | |
825 |
|
825 | |||
826 | imc = vcs_repo.in_memory_commit |
|
826 | imc = vcs_repo.in_memory_commit | |
827 | commit = None |
|
827 | commit = None | |
828 |
|
828 | |||
829 | for idx, commit in enumerate(commits): |
|
829 | for idx, commit in enumerate(commits): | |
830 | message = unicode(commit.get('message', 'Commit %s' % idx)) |
|
830 | message = unicode(commit.get('message', 'Commit %s' % idx)) | |
831 |
|
831 | |||
832 | for node in commit.get('added', []): |
|
832 | for node in commit.get('added', []): | |
833 | imc.add(FileNode(node.path, content=node.content)) |
|
833 | imc.add(FileNode(node.path, content=node.content)) | |
834 | for node in commit.get('changed', []): |
|
834 | for node in commit.get('changed', []): | |
835 | imc.change(FileNode(node.path, content=node.content)) |
|
835 | imc.change(FileNode(node.path, content=node.content)) | |
836 | for node in commit.get('removed', []): |
|
836 | for node in commit.get('removed', []): | |
837 | imc.remove(FileNode(node.path)) |
|
837 | imc.remove(FileNode(node.path)) | |
838 |
|
838 | |||
839 | parents = [ |
|
839 | parents = [ | |
840 | vcs_repo.get_commit(commit_id=commit_ids[p]) |
|
840 | vcs_repo.get_commit(commit_id=commit_ids[p]) | |
841 | for p in commit.get('parents', [])] |
|
841 | for p in commit.get('parents', [])] | |
842 |
|
842 | |||
843 | operations = ('added', 'changed', 'removed') |
|
843 | operations = ('added', 'changed', 'removed') | |
844 | if not any((commit.get(o) for o in operations)): |
|
844 | if not any((commit.get(o) for o in operations)): | |
845 | imc.add(FileNode('file_%s' % idx, content=message)) |
|
845 | imc.add(FileNode('file_%s' % idx, content=message)) | |
846 |
|
846 | |||
847 | commit = imc.commit( |
|
847 | commit = imc.commit( | |
848 | message=message, |
|
848 | message=message, | |
849 | author=unicode(commit.get('author', 'Automatic')), |
|
849 | author=unicode(commit.get('author', 'Automatic')), | |
850 | date=commit.get('date'), |
|
850 | date=commit.get('date'), | |
851 | branch=commit.get('branch'), |
|
851 | branch=commit.get('branch'), | |
852 | parents=parents) |
|
852 | parents=parents) | |
853 |
|
853 | |||
854 | commit_ids[commit.message] = commit.raw_id |
|
854 | commit_ids[commit.message] = commit.raw_id | |
855 |
|
855 | |||
856 | return commit_ids |
|
856 | return commit_ids | |
857 |
|
857 | |||
858 |
|
858 | |||
859 | @pytest.fixture |
|
859 | @pytest.fixture | |
860 | def reposerver(request): |
|
860 | def reposerver(request): | |
861 | """ |
|
861 | """ | |
862 | Allows to serve a backend repository |
|
862 | Allows to serve a backend repository | |
863 | """ |
|
863 | """ | |
864 |
|
864 | |||
865 | repo_server = RepoServer() |
|
865 | repo_server = RepoServer() | |
866 | request.addfinalizer(repo_server.cleanup) |
|
866 | request.addfinalizer(repo_server.cleanup) | |
867 | return repo_server |
|
867 | return repo_server | |
868 |
|
868 | |||
869 |
|
869 | |||
870 | class RepoServer(object): |
|
870 | class RepoServer(object): | |
871 | """ |
|
871 | """ | |
872 | Utility to serve a local repository for the duration of a test case. |
|
872 | Utility to serve a local repository for the duration of a test case. | |
873 |
|
873 | |||
874 | Supports only Subversion so far. |
|
874 | Supports only Subversion so far. | |
875 | """ |
|
875 | """ | |
876 |
|
876 | |||
877 | url = None |
|
877 | url = None | |
878 |
|
878 | |||
879 | def __init__(self): |
|
879 | def __init__(self): | |
880 | self._cleanup_servers = [] |
|
880 | self._cleanup_servers = [] | |
881 |
|
881 | |||
882 | def serve(self, vcsrepo): |
|
882 | def serve(self, vcsrepo): | |
883 | if vcsrepo.alias != 'svn': |
|
883 | if vcsrepo.alias != 'svn': | |
884 | raise TypeError("Backend %s not supported" % vcsrepo.alias) |
|
884 | raise TypeError("Backend %s not supported" % vcsrepo.alias) | |
885 |
|
885 | |||
886 | proc = subprocess32.Popen( |
|
886 | proc = subprocess32.Popen( | |
887 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', |
|
887 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', | |
888 | '--root', vcsrepo.path]) |
|
888 | '--root', vcsrepo.path]) | |
889 | self._cleanup_servers.append(proc) |
|
889 | self._cleanup_servers.append(proc) | |
890 | self.url = 'svn://localhost' |
|
890 | self.url = 'svn://localhost' | |
891 |
|
891 | |||
892 | def cleanup(self): |
|
892 | def cleanup(self): | |
893 | for proc in self._cleanup_servers: |
|
893 | for proc in self._cleanup_servers: | |
894 | proc.terminate() |
|
894 | proc.terminate() | |
895 |
|
895 | |||
896 |
|
896 | |||
897 | @pytest.fixture |
|
897 | @pytest.fixture | |
898 | def pr_util(backend, request, config_stub): |
|
898 | def pr_util(backend, request, config_stub): | |
899 | """ |
|
899 | """ | |
900 | Utility for tests of models and for functional tests around pull requests. |
|
900 | Utility for tests of models and for functional tests around pull requests. | |
901 |
|
901 | |||
902 | It gives an instance of :class:`PRTestUtility` which provides various |
|
902 | It gives an instance of :class:`PRTestUtility` which provides various | |
903 | utility methods around one pull request. |
|
903 | utility methods around one pull request. | |
904 |
|
904 | |||
905 | This fixture uses `backend` and inherits its parameterization. |
|
905 | This fixture uses `backend` and inherits its parameterization. | |
906 | """ |
|
906 | """ | |
907 |
|
907 | |||
908 | util = PRTestUtility(backend) |
|
908 | util = PRTestUtility(backend) | |
909 | request.addfinalizer(util.cleanup) |
|
909 | request.addfinalizer(util.cleanup) | |
910 |
|
910 | |||
911 | return util |
|
911 | return util | |
912 |
|
912 | |||
913 |
|
913 | |||
914 | class PRTestUtility(object): |
|
914 | class PRTestUtility(object): | |
915 |
|
915 | |||
916 | pull_request = None |
|
916 | pull_request = None | |
917 | pull_request_id = None |
|
917 | pull_request_id = None | |
918 | mergeable_patcher = None |
|
918 | mergeable_patcher = None | |
919 | mergeable_mock = None |
|
919 | mergeable_mock = None | |
920 | notification_patcher = None |
|
920 | notification_patcher = None | |
921 |
|
921 | |||
922 | def __init__(self, backend): |
|
922 | def __init__(self, backend): | |
923 | self.backend = backend |
|
923 | self.backend = backend | |
924 |
|
924 | |||
925 | def create_pull_request( |
|
925 | def create_pull_request( | |
926 | self, commits=None, target_head=None, source_head=None, |
|
926 | self, commits=None, target_head=None, source_head=None, | |
927 | revisions=None, approved=False, author=None, mergeable=False, |
|
927 | revisions=None, approved=False, author=None, mergeable=False, | |
928 | enable_notifications=True, name_suffix=u'', reviewers=None, |
|
928 | enable_notifications=True, name_suffix=u'', reviewers=None, | |
929 | title=u"Test", description=u"Description"): |
|
929 | title=u"Test", description=u"Description"): | |
930 | self.set_mergeable(mergeable) |
|
930 | self.set_mergeable(mergeable) | |
931 | if not enable_notifications: |
|
931 | if not enable_notifications: | |
932 | # mock notification side effect |
|
932 | # mock notification side effect | |
933 | self.notification_patcher = mock.patch( |
|
933 | self.notification_patcher = mock.patch( | |
934 | 'rhodecode.model.notification.NotificationModel.create') |
|
934 | 'rhodecode.model.notification.NotificationModel.create') | |
935 | self.notification_patcher.start() |
|
935 | self.notification_patcher.start() | |
936 |
|
936 | |||
937 | if not self.pull_request: |
|
937 | if not self.pull_request: | |
938 | if not commits: |
|
938 | if not commits: | |
939 | commits = [ |
|
939 | commits = [ | |
940 | {'message': 'c1'}, |
|
940 | {'message': 'c1'}, | |
941 | {'message': 'c2'}, |
|
941 | {'message': 'c2'}, | |
942 | {'message': 'c3'}, |
|
942 | {'message': 'c3'}, | |
943 | ] |
|
943 | ] | |
944 | target_head = 'c1' |
|
944 | target_head = 'c1' | |
945 | source_head = 'c2' |
|
945 | source_head = 'c2' | |
946 | revisions = ['c2'] |
|
946 | revisions = ['c2'] | |
947 |
|
947 | |||
948 | self.commit_ids = self.backend.create_master_repo(commits) |
|
948 | self.commit_ids = self.backend.create_master_repo(commits) | |
949 | self.target_repository = self.backend.create_repo( |
|
949 | self.target_repository = self.backend.create_repo( | |
950 | heads=[target_head], name_suffix=name_suffix) |
|
950 | heads=[target_head], name_suffix=name_suffix) | |
951 | self.source_repository = self.backend.create_repo( |
|
951 | self.source_repository = self.backend.create_repo( | |
952 | heads=[source_head], name_suffix=name_suffix) |
|
952 | heads=[source_head], name_suffix=name_suffix) | |
953 | self.author = author or UserModel().get_by_username( |
|
953 | self.author = author or UserModel().get_by_username( | |
954 | TEST_USER_ADMIN_LOGIN) |
|
954 | TEST_USER_ADMIN_LOGIN) | |
955 |
|
955 | |||
956 | model = PullRequestModel() |
|
956 | model = PullRequestModel() | |
957 | self.create_parameters = { |
|
957 | self.create_parameters = { | |
958 | 'created_by': self.author, |
|
958 | 'created_by': self.author, | |
959 | 'source_repo': self.source_repository.repo_name, |
|
959 | 'source_repo': self.source_repository.repo_name, | |
960 | 'source_ref': self._default_branch_reference(source_head), |
|
960 | 'source_ref': self._default_branch_reference(source_head), | |
961 | 'target_repo': self.target_repository.repo_name, |
|
961 | 'target_repo': self.target_repository.repo_name, | |
962 | 'target_ref': self._default_branch_reference(target_head), |
|
962 | 'target_ref': self._default_branch_reference(target_head), | |
963 | 'revisions': [self.commit_ids[r] for r in revisions], |
|
963 | 'revisions': [self.commit_ids[r] for r in revisions], | |
964 | 'reviewers': reviewers or self._get_reviewers(), |
|
964 | 'reviewers': reviewers or self._get_reviewers(), | |
965 | 'title': title, |
|
965 | 'title': title, | |
966 | 'description': description, |
|
966 | 'description': description, | |
967 | } |
|
967 | } | |
968 | self.pull_request = model.create(**self.create_parameters) |
|
968 | self.pull_request = model.create(**self.create_parameters) | |
969 | assert model.get_versions(self.pull_request) == [] |
|
969 | assert model.get_versions(self.pull_request) == [] | |
970 |
|
970 | |||
971 | self.pull_request_id = self.pull_request.pull_request_id |
|
971 | self.pull_request_id = self.pull_request.pull_request_id | |
972 |
|
972 | |||
973 | if approved: |
|
973 | if approved: | |
974 | self.approve() |
|
974 | self.approve() | |
975 |
|
975 | |||
976 | Session().add(self.pull_request) |
|
976 | Session().add(self.pull_request) | |
977 | Session().commit() |
|
977 | Session().commit() | |
978 |
|
978 | |||
979 | return self.pull_request |
|
979 | return self.pull_request | |
980 |
|
980 | |||
981 | def approve(self): |
|
981 | def approve(self): | |
982 | self.create_status_votes( |
|
982 | self.create_status_votes( | |
983 | ChangesetStatus.STATUS_APPROVED, |
|
983 | ChangesetStatus.STATUS_APPROVED, | |
984 | *self.pull_request.reviewers) |
|
984 | *self.pull_request.reviewers) | |
985 |
|
985 | |||
986 | def close(self): |
|
986 | def close(self): | |
987 | PullRequestModel().close_pull_request(self.pull_request, self.author) |
|
987 | PullRequestModel().close_pull_request(self.pull_request, self.author) | |
988 |
|
988 | |||
989 | def _default_branch_reference(self, commit_message): |
|
989 | def _default_branch_reference(self, commit_message): | |
990 | reference = '%s:%s:%s' % ( |
|
990 | reference = '%s:%s:%s' % ( | |
991 | 'branch', |
|
991 | 'branch', | |
992 | self.backend.default_branch_name, |
|
992 | self.backend.default_branch_name, | |
993 | self.commit_ids[commit_message]) |
|
993 | self.commit_ids[commit_message]) | |
994 | return reference |
|
994 | return reference | |
995 |
|
995 | |||
996 | def _get_reviewers(self): |
|
996 | def _get_reviewers(self): | |
997 | return [ |
|
997 | return [ | |
998 | (TEST_USER_REGULAR_LOGIN, ['default1'], False, []), |
|
998 | (TEST_USER_REGULAR_LOGIN, ['default1'], False, []), | |
999 | (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []), |
|
999 | (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []), | |
1000 | ] |
|
1000 | ] | |
1001 |
|
1001 | |||
1002 | def update_source_repository(self, head=None): |
|
1002 | def update_source_repository(self, head=None): | |
1003 | heads = [head or 'c3'] |
|
1003 | heads = [head or 'c3'] | |
1004 | self.backend.pull_heads(self.source_repository, heads=heads) |
|
1004 | self.backend.pull_heads(self.source_repository, heads=heads) | |
1005 |
|
1005 | |||
1006 | def add_one_commit(self, head=None): |
|
1006 | def add_one_commit(self, head=None): | |
1007 | self.update_source_repository(head=head) |
|
1007 | self.update_source_repository(head=head) | |
1008 | old_commit_ids = set(self.pull_request.revisions) |
|
1008 | old_commit_ids = set(self.pull_request.revisions) | |
1009 | PullRequestModel().update_commits(self.pull_request) |
|
1009 | PullRequestModel().update_commits(self.pull_request) | |
1010 | commit_ids = set(self.pull_request.revisions) |
|
1010 | commit_ids = set(self.pull_request.revisions) | |
1011 | new_commit_ids = commit_ids - old_commit_ids |
|
1011 | new_commit_ids = commit_ids - old_commit_ids | |
1012 | assert len(new_commit_ids) == 1 |
|
1012 | assert len(new_commit_ids) == 1 | |
1013 | return new_commit_ids.pop() |
|
1013 | return new_commit_ids.pop() | |
1014 |
|
1014 | |||
1015 | def remove_one_commit(self): |
|
1015 | def remove_one_commit(self): | |
1016 | assert len(self.pull_request.revisions) == 2 |
|
1016 | assert len(self.pull_request.revisions) == 2 | |
1017 | source_vcs = self.source_repository.scm_instance() |
|
1017 | source_vcs = self.source_repository.scm_instance() | |
1018 | removed_commit_id = source_vcs.commit_ids[-1] |
|
1018 | removed_commit_id = source_vcs.commit_ids[-1] | |
1019 |
|
1019 | |||
1020 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, |
|
1020 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, | |
1021 | # remove the if once that's sorted out. |
|
1021 | # remove the if once that's sorted out. | |
1022 | if self.backend.alias == "git": |
|
1022 | if self.backend.alias == "git": | |
1023 | kwargs = {'branch_name': self.backend.default_branch_name} |
|
1023 | kwargs = {'branch_name': self.backend.default_branch_name} | |
1024 | else: |
|
1024 | else: | |
1025 | kwargs = {} |
|
1025 | kwargs = {} | |
1026 | source_vcs.strip(removed_commit_id, **kwargs) |
|
1026 | source_vcs.strip(removed_commit_id, **kwargs) | |
1027 |
|
1027 | |||
1028 | PullRequestModel().update_commits(self.pull_request) |
|
1028 | PullRequestModel().update_commits(self.pull_request) | |
1029 | assert len(self.pull_request.revisions) == 1 |
|
1029 | assert len(self.pull_request.revisions) == 1 | |
1030 | return removed_commit_id |
|
1030 | return removed_commit_id | |
1031 |
|
1031 | |||
1032 | def create_comment(self, linked_to=None): |
|
1032 | def create_comment(self, linked_to=None): | |
1033 | comment = CommentsModel().create( |
|
1033 | comment = CommentsModel().create( | |
1034 | text=u"Test comment", |
|
1034 | text=u"Test comment", | |
1035 | repo=self.target_repository.repo_name, |
|
1035 | repo=self.target_repository.repo_name, | |
1036 | user=self.author, |
|
1036 | user=self.author, | |
1037 | pull_request=self.pull_request) |
|
1037 | pull_request=self.pull_request) | |
1038 | assert comment.pull_request_version_id is None |
|
1038 | assert comment.pull_request_version_id is None | |
1039 |
|
1039 | |||
1040 | if linked_to: |
|
1040 | if linked_to: | |
1041 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1041 | PullRequestModel()._link_comments_to_version(linked_to) | |
1042 |
|
1042 | |||
1043 | return comment |
|
1043 | return comment | |
1044 |
|
1044 | |||
1045 | def create_inline_comment( |
|
1045 | def create_inline_comment( | |
1046 | self, linked_to=None, line_no=u'n1', file_path='file_1'): |
|
1046 | self, linked_to=None, line_no=u'n1', file_path='file_1'): | |
1047 | comment = CommentsModel().create( |
|
1047 | comment = CommentsModel().create( | |
1048 | text=u"Test comment", |
|
1048 | text=u"Test comment", | |
1049 | repo=self.target_repository.repo_name, |
|
1049 | repo=self.target_repository.repo_name, | |
1050 | user=self.author, |
|
1050 | user=self.author, | |
1051 | line_no=line_no, |
|
1051 | line_no=line_no, | |
1052 | f_path=file_path, |
|
1052 | f_path=file_path, | |
1053 | pull_request=self.pull_request) |
|
1053 | pull_request=self.pull_request) | |
1054 | assert comment.pull_request_version_id is None |
|
1054 | assert comment.pull_request_version_id is None | |
1055 |
|
1055 | |||
1056 | if linked_to: |
|
1056 | if linked_to: | |
1057 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1057 | PullRequestModel()._link_comments_to_version(linked_to) | |
1058 |
|
1058 | |||
1059 | return comment |
|
1059 | return comment | |
1060 |
|
1060 | |||
1061 | def create_version_of_pull_request(self): |
|
1061 | def create_version_of_pull_request(self): | |
1062 | pull_request = self.create_pull_request() |
|
1062 | pull_request = self.create_pull_request() | |
1063 | version = PullRequestModel()._create_version_from_snapshot( |
|
1063 | version = PullRequestModel()._create_version_from_snapshot( | |
1064 | pull_request) |
|
1064 | pull_request) | |
1065 | return version |
|
1065 | return version | |
1066 |
|
1066 | |||
1067 | def create_status_votes(self, status, *reviewers): |
|
1067 | def create_status_votes(self, status, *reviewers): | |
1068 | for reviewer in reviewers: |
|
1068 | for reviewer in reviewers: | |
1069 | ChangesetStatusModel().set_status( |
|
1069 | ChangesetStatusModel().set_status( | |
1070 | repo=self.pull_request.target_repo, |
|
1070 | repo=self.pull_request.target_repo, | |
1071 | status=status, |
|
1071 | status=status, | |
1072 | user=reviewer.user_id, |
|
1072 | user=reviewer.user_id, | |
1073 | pull_request=self.pull_request) |
|
1073 | pull_request=self.pull_request) | |
1074 |
|
1074 | |||
1075 | def set_mergeable(self, value): |
|
1075 | def set_mergeable(self, value): | |
1076 | if not self.mergeable_patcher: |
|
1076 | if not self.mergeable_patcher: | |
1077 | self.mergeable_patcher = mock.patch.object( |
|
1077 | self.mergeable_patcher = mock.patch.object( | |
1078 | VcsSettingsModel, 'get_general_settings') |
|
1078 | VcsSettingsModel, 'get_general_settings') | |
1079 | self.mergeable_mock = self.mergeable_patcher.start() |
|
1079 | self.mergeable_mock = self.mergeable_patcher.start() | |
1080 | self.mergeable_mock.return_value = { |
|
1080 | self.mergeable_mock.return_value = { | |
1081 | 'rhodecode_pr_merge_enabled': value} |
|
1081 | 'rhodecode_pr_merge_enabled': value} | |
1082 |
|
1082 | |||
1083 | def cleanup(self): |
|
1083 | def cleanup(self): | |
1084 | # In case the source repository is already cleaned up, the pull |
|
1084 | # In case the source repository is already cleaned up, the pull | |
1085 | # request will already be deleted. |
|
1085 | # request will already be deleted. | |
1086 | pull_request = PullRequest().get(self.pull_request_id) |
|
1086 | pull_request = PullRequest().get(self.pull_request_id) | |
1087 | if pull_request: |
|
1087 | if pull_request: | |
1088 | PullRequestModel().delete(pull_request, pull_request.author) |
|
1088 | PullRequestModel().delete(pull_request, pull_request.author) | |
1089 | Session().commit() |
|
1089 | Session().commit() | |
1090 |
|
1090 | |||
1091 | if self.notification_patcher: |
|
1091 | if self.notification_patcher: | |
1092 | self.notification_patcher.stop() |
|
1092 | self.notification_patcher.stop() | |
1093 |
|
1093 | |||
1094 | if self.mergeable_patcher: |
|
1094 | if self.mergeable_patcher: | |
1095 | self.mergeable_patcher.stop() |
|
1095 | self.mergeable_patcher.stop() | |
1096 |
|
1096 | |||
1097 |
|
1097 | |||
1098 | @pytest.fixture |
|
1098 | @pytest.fixture | |
1099 | def user_admin(baseapp): |
|
1099 | def user_admin(baseapp): | |
1100 | """ |
|
1100 | """ | |
1101 | Provides the default admin test user as an instance of `db.User`. |
|
1101 | Provides the default admin test user as an instance of `db.User`. | |
1102 | """ |
|
1102 | """ | |
1103 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1103 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
1104 | return user |
|
1104 | return user | |
1105 |
|
1105 | |||
1106 |
|
1106 | |||
1107 | @pytest.fixture |
|
1107 | @pytest.fixture | |
1108 | def user_regular(baseapp): |
|
1108 | def user_regular(baseapp): | |
1109 | """ |
|
1109 | """ | |
1110 | Provides the default regular test user as an instance of `db.User`. |
|
1110 | Provides the default regular test user as an instance of `db.User`. | |
1111 | """ |
|
1111 | """ | |
1112 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
1112 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |
1113 | return user |
|
1113 | return user | |
1114 |
|
1114 | |||
1115 |
|
1115 | |||
1116 | @pytest.fixture |
|
1116 | @pytest.fixture | |
1117 | def user_util(request, db_connection): |
|
1117 | def user_util(request, db_connection): | |
1118 | """ |
|
1118 | """ | |
1119 | Provides a wired instance of `UserUtility` with integrated cleanup. |
|
1119 | Provides a wired instance of `UserUtility` with integrated cleanup. | |
1120 | """ |
|
1120 | """ | |
1121 | utility = UserUtility(test_name=request.node.name) |
|
1121 | utility = UserUtility(test_name=request.node.name) | |
1122 | request.addfinalizer(utility.cleanup) |
|
1122 | request.addfinalizer(utility.cleanup) | |
1123 | return utility |
|
1123 | return utility | |
1124 |
|
1124 | |||
1125 |
|
1125 | |||
1126 | # TODO: johbo: Split this up into utilities per domain or something similar |
|
1126 | # TODO: johbo: Split this up into utilities per domain or something similar | |
1127 | class UserUtility(object): |
|
1127 | class UserUtility(object): | |
1128 |
|
1128 | |||
1129 | def __init__(self, test_name="test"): |
|
1129 | def __init__(self, test_name="test"): | |
1130 | self._test_name = self._sanitize_name(test_name) |
|
1130 | self._test_name = self._sanitize_name(test_name) | |
1131 | self.fixture = Fixture() |
|
1131 | self.fixture = Fixture() | |
1132 | self.repo_group_ids = [] |
|
1132 | self.repo_group_ids = [] | |
1133 | self.repos_ids = [] |
|
1133 | self.repos_ids = [] | |
1134 | self.user_ids = [] |
|
1134 | self.user_ids = [] | |
1135 | self.user_group_ids = [] |
|
1135 | self.user_group_ids = [] | |
1136 | self.user_repo_permission_ids = [] |
|
1136 | self.user_repo_permission_ids = [] | |
1137 | self.user_group_repo_permission_ids = [] |
|
1137 | self.user_group_repo_permission_ids = [] | |
1138 | self.user_repo_group_permission_ids = [] |
|
1138 | self.user_repo_group_permission_ids = [] | |
1139 | self.user_group_repo_group_permission_ids = [] |
|
1139 | self.user_group_repo_group_permission_ids = [] | |
1140 | self.user_user_group_permission_ids = [] |
|
1140 | self.user_user_group_permission_ids = [] | |
1141 | self.user_group_user_group_permission_ids = [] |
|
1141 | self.user_group_user_group_permission_ids = [] | |
1142 | self.user_permissions = [] |
|
1142 | self.user_permissions = [] | |
1143 |
|
1143 | |||
1144 | def _sanitize_name(self, name): |
|
1144 | def _sanitize_name(self, name): | |
1145 | for char in ['[', ']']: |
|
1145 | for char in ['[', ']']: | |
1146 | name = name.replace(char, '_') |
|
1146 | name = name.replace(char, '_') | |
1147 | return name |
|
1147 | return name | |
1148 |
|
1148 | |||
1149 | def create_repo_group( |
|
1149 | def create_repo_group( | |
1150 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): |
|
1150 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): | |
1151 | group_name = "{prefix}_repogroup_{count}".format( |
|
1151 | group_name = "{prefix}_repogroup_{count}".format( | |
1152 | prefix=self._test_name, |
|
1152 | prefix=self._test_name, | |
1153 | count=len(self.repo_group_ids)) |
|
1153 | count=len(self.repo_group_ids)) | |
1154 | repo_group = self.fixture.create_repo_group( |
|
1154 | repo_group = self.fixture.create_repo_group( | |
1155 | group_name, cur_user=owner) |
|
1155 | group_name, cur_user=owner) | |
1156 | if auto_cleanup: |
|
1156 | if auto_cleanup: | |
1157 | self.repo_group_ids.append(repo_group.group_id) |
|
1157 | self.repo_group_ids.append(repo_group.group_id) | |
1158 | return repo_group |
|
1158 | return repo_group | |
1159 |
|
1159 | |||
1160 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, |
|
1160 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, | |
1161 | auto_cleanup=True, repo_type='hg'): |
|
1161 | auto_cleanup=True, repo_type='hg'): | |
1162 | repo_name = "{prefix}_repository_{count}".format( |
|
1162 | repo_name = "{prefix}_repository_{count}".format( | |
1163 | prefix=self._test_name, |
|
1163 | prefix=self._test_name, | |
1164 | count=len(self.repos_ids)) |
|
1164 | count=len(self.repos_ids)) | |
1165 |
|
1165 | |||
1166 | repository = self.fixture.create_repo( |
|
1166 | repository = self.fixture.create_repo( | |
1167 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type) |
|
1167 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type) | |
1168 | if auto_cleanup: |
|
1168 | if auto_cleanup: | |
1169 | self.repos_ids.append(repository.repo_id) |
|
1169 | self.repos_ids.append(repository.repo_id) | |
1170 | return repository |
|
1170 | return repository | |
1171 |
|
1171 | |||
1172 | def create_user(self, auto_cleanup=True, **kwargs): |
|
1172 | def create_user(self, auto_cleanup=True, **kwargs): | |
1173 | user_name = "{prefix}_user_{count}".format( |
|
1173 | user_name = "{prefix}_user_{count}".format( | |
1174 | prefix=self._test_name, |
|
1174 | prefix=self._test_name, | |
1175 | count=len(self.user_ids)) |
|
1175 | count=len(self.user_ids)) | |
1176 | user = self.fixture.create_user(user_name, **kwargs) |
|
1176 | user = self.fixture.create_user(user_name, **kwargs) | |
1177 | if auto_cleanup: |
|
1177 | if auto_cleanup: | |
1178 | self.user_ids.append(user.user_id) |
|
1178 | self.user_ids.append(user.user_id) | |
1179 | return user |
|
1179 | return user | |
1180 |
|
1180 | |||
1181 | def create_user_with_group(self): |
|
1181 | def create_user_with_group(self): | |
1182 | user = self.create_user() |
|
1182 | user = self.create_user() | |
1183 | user_group = self.create_user_group(members=[user]) |
|
1183 | user_group = self.create_user_group(members=[user]) | |
1184 | return user, user_group |
|
1184 | return user, user_group | |
1185 |
|
1185 | |||
1186 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, |
|
1186 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, | |
1187 | auto_cleanup=True, **kwargs): |
|
1187 | auto_cleanup=True, **kwargs): | |
1188 | group_name = "{prefix}_usergroup_{count}".format( |
|
1188 | group_name = "{prefix}_usergroup_{count}".format( | |
1189 | prefix=self._test_name, |
|
1189 | prefix=self._test_name, | |
1190 | count=len(self.user_group_ids)) |
|
1190 | count=len(self.user_group_ids)) | |
1191 | user_group = self.fixture.create_user_group( |
|
1191 | user_group = self.fixture.create_user_group( | |
1192 | group_name, cur_user=owner, **kwargs) |
|
1192 | group_name, cur_user=owner, **kwargs) | |
1193 |
|
1193 | |||
1194 | if auto_cleanup: |
|
1194 | if auto_cleanup: | |
1195 | self.user_group_ids.append(user_group.users_group_id) |
|
1195 | self.user_group_ids.append(user_group.users_group_id) | |
1196 | if members: |
|
1196 | if members: | |
1197 | for user in members: |
|
1197 | for user in members: | |
1198 | UserGroupModel().add_user_to_group(user_group, user) |
|
1198 | UserGroupModel().add_user_to_group(user_group, user) | |
1199 | return user_group |
|
1199 | return user_group | |
1200 |
|
1200 | |||
1201 | def grant_user_permission(self, user_name, permission_name): |
|
1201 | def grant_user_permission(self, user_name, permission_name): | |
1202 | self._inherit_default_user_permissions(user_name, False) |
|
1202 | self._inherit_default_user_permissions(user_name, False) | |
1203 | self.user_permissions.append((user_name, permission_name)) |
|
1203 | self.user_permissions.append((user_name, permission_name)) | |
1204 |
|
1204 | |||
1205 | def grant_user_permission_to_repo_group( |
|
1205 | def grant_user_permission_to_repo_group( | |
1206 | self, repo_group, user, permission_name): |
|
1206 | self, repo_group, user, permission_name): | |
1207 | permission = RepoGroupModel().grant_user_permission( |
|
1207 | permission = RepoGroupModel().grant_user_permission( | |
1208 | repo_group, user, permission_name) |
|
1208 | repo_group, user, permission_name) | |
1209 | self.user_repo_group_permission_ids.append( |
|
1209 | self.user_repo_group_permission_ids.append( | |
1210 | (repo_group.group_id, user.user_id)) |
|
1210 | (repo_group.group_id, user.user_id)) | |
1211 | return permission |
|
1211 | return permission | |
1212 |
|
1212 | |||
1213 | def grant_user_group_permission_to_repo_group( |
|
1213 | def grant_user_group_permission_to_repo_group( | |
1214 | self, repo_group, user_group, permission_name): |
|
1214 | self, repo_group, user_group, permission_name): | |
1215 | permission = RepoGroupModel().grant_user_group_permission( |
|
1215 | permission = RepoGroupModel().grant_user_group_permission( | |
1216 | repo_group, user_group, permission_name) |
|
1216 | repo_group, user_group, permission_name) | |
1217 | self.user_group_repo_group_permission_ids.append( |
|
1217 | self.user_group_repo_group_permission_ids.append( | |
1218 | (repo_group.group_id, user_group.users_group_id)) |
|
1218 | (repo_group.group_id, user_group.users_group_id)) | |
1219 | return permission |
|
1219 | return permission | |
1220 |
|
1220 | |||
1221 | def grant_user_permission_to_repo( |
|
1221 | def grant_user_permission_to_repo( | |
1222 | self, repo, user, permission_name): |
|
1222 | self, repo, user, permission_name): | |
1223 | permission = RepoModel().grant_user_permission( |
|
1223 | permission = RepoModel().grant_user_permission( | |
1224 | repo, user, permission_name) |
|
1224 | repo, user, permission_name) | |
1225 | self.user_repo_permission_ids.append( |
|
1225 | self.user_repo_permission_ids.append( | |
1226 | (repo.repo_id, user.user_id)) |
|
1226 | (repo.repo_id, user.user_id)) | |
1227 | return permission |
|
1227 | return permission | |
1228 |
|
1228 | |||
1229 | def grant_user_group_permission_to_repo( |
|
1229 | def grant_user_group_permission_to_repo( | |
1230 | self, repo, user_group, permission_name): |
|
1230 | self, repo, user_group, permission_name): | |
1231 | permission = RepoModel().grant_user_group_permission( |
|
1231 | permission = RepoModel().grant_user_group_permission( | |
1232 | repo, user_group, permission_name) |
|
1232 | repo, user_group, permission_name) | |
1233 | self.user_group_repo_permission_ids.append( |
|
1233 | self.user_group_repo_permission_ids.append( | |
1234 | (repo.repo_id, user_group.users_group_id)) |
|
1234 | (repo.repo_id, user_group.users_group_id)) | |
1235 | return permission |
|
1235 | return permission | |
1236 |
|
1236 | |||
1237 | def grant_user_permission_to_user_group( |
|
1237 | def grant_user_permission_to_user_group( | |
1238 | self, target_user_group, user, permission_name): |
|
1238 | self, target_user_group, user, permission_name): | |
1239 | permission = UserGroupModel().grant_user_permission( |
|
1239 | permission = UserGroupModel().grant_user_permission( | |
1240 | target_user_group, user, permission_name) |
|
1240 | target_user_group, user, permission_name) | |
1241 | self.user_user_group_permission_ids.append( |
|
1241 | self.user_user_group_permission_ids.append( | |
1242 | (target_user_group.users_group_id, user.user_id)) |
|
1242 | (target_user_group.users_group_id, user.user_id)) | |
1243 | return permission |
|
1243 | return permission | |
1244 |
|
1244 | |||
1245 | def grant_user_group_permission_to_user_group( |
|
1245 | def grant_user_group_permission_to_user_group( | |
1246 | self, target_user_group, user_group, permission_name): |
|
1246 | self, target_user_group, user_group, permission_name): | |
1247 | permission = UserGroupModel().grant_user_group_permission( |
|
1247 | permission = UserGroupModel().grant_user_group_permission( | |
1248 | target_user_group, user_group, permission_name) |
|
1248 | target_user_group, user_group, permission_name) | |
1249 | self.user_group_user_group_permission_ids.append( |
|
1249 | self.user_group_user_group_permission_ids.append( | |
1250 | (target_user_group.users_group_id, user_group.users_group_id)) |
|
1250 | (target_user_group.users_group_id, user_group.users_group_id)) | |
1251 | return permission |
|
1251 | return permission | |
1252 |
|
1252 | |||
1253 | def revoke_user_permission(self, user_name, permission_name): |
|
1253 | def revoke_user_permission(self, user_name, permission_name): | |
1254 | self._inherit_default_user_permissions(user_name, True) |
|
1254 | self._inherit_default_user_permissions(user_name, True) | |
1255 | UserModel().revoke_perm(user_name, permission_name) |
|
1255 | UserModel().revoke_perm(user_name, permission_name) | |
1256 |
|
1256 | |||
1257 | def _inherit_default_user_permissions(self, user_name, value): |
|
1257 | def _inherit_default_user_permissions(self, user_name, value): | |
1258 | user = UserModel().get_by_username(user_name) |
|
1258 | user = UserModel().get_by_username(user_name) | |
1259 | user.inherit_default_permissions = value |
|
1259 | user.inherit_default_permissions = value | |
1260 | Session().add(user) |
|
1260 | Session().add(user) | |
1261 | Session().commit() |
|
1261 | Session().commit() | |
1262 |
|
1262 | |||
1263 | def cleanup(self): |
|
1263 | def cleanup(self): | |
1264 | self._cleanup_permissions() |
|
1264 | self._cleanup_permissions() | |
1265 | self._cleanup_repos() |
|
1265 | self._cleanup_repos() | |
1266 | self._cleanup_repo_groups() |
|
1266 | self._cleanup_repo_groups() | |
1267 | self._cleanup_user_groups() |
|
1267 | self._cleanup_user_groups() | |
1268 | self._cleanup_users() |
|
1268 | self._cleanup_users() | |
1269 |
|
1269 | |||
1270 | def _cleanup_permissions(self): |
|
1270 | def _cleanup_permissions(self): | |
1271 | if self.user_permissions: |
|
1271 | if self.user_permissions: | |
1272 | for user_name, permission_name in self.user_permissions: |
|
1272 | for user_name, permission_name in self.user_permissions: | |
1273 | self.revoke_user_permission(user_name, permission_name) |
|
1273 | self.revoke_user_permission(user_name, permission_name) | |
1274 |
|
1274 | |||
1275 | for permission in self.user_repo_permission_ids: |
|
1275 | for permission in self.user_repo_permission_ids: | |
1276 | RepoModel().revoke_user_permission(*permission) |
|
1276 | RepoModel().revoke_user_permission(*permission) | |
1277 |
|
1277 | |||
1278 | for permission in self.user_group_repo_permission_ids: |
|
1278 | for permission in self.user_group_repo_permission_ids: | |
1279 | RepoModel().revoke_user_group_permission(*permission) |
|
1279 | RepoModel().revoke_user_group_permission(*permission) | |
1280 |
|
1280 | |||
1281 | for permission in self.user_repo_group_permission_ids: |
|
1281 | for permission in self.user_repo_group_permission_ids: | |
1282 | RepoGroupModel().revoke_user_permission(*permission) |
|
1282 | RepoGroupModel().revoke_user_permission(*permission) | |
1283 |
|
1283 | |||
1284 | for permission in self.user_group_repo_group_permission_ids: |
|
1284 | for permission in self.user_group_repo_group_permission_ids: | |
1285 | RepoGroupModel().revoke_user_group_permission(*permission) |
|
1285 | RepoGroupModel().revoke_user_group_permission(*permission) | |
1286 |
|
1286 | |||
1287 | for permission in self.user_user_group_permission_ids: |
|
1287 | for permission in self.user_user_group_permission_ids: | |
1288 | UserGroupModel().revoke_user_permission(*permission) |
|
1288 | UserGroupModel().revoke_user_permission(*permission) | |
1289 |
|
1289 | |||
1290 | for permission in self.user_group_user_group_permission_ids: |
|
1290 | for permission in self.user_group_user_group_permission_ids: | |
1291 | UserGroupModel().revoke_user_group_permission(*permission) |
|
1291 | UserGroupModel().revoke_user_group_permission(*permission) | |
1292 |
|
1292 | |||
1293 | def _cleanup_repo_groups(self): |
|
1293 | def _cleanup_repo_groups(self): | |
1294 | def _repo_group_compare(first_group_id, second_group_id): |
|
1294 | def _repo_group_compare(first_group_id, second_group_id): | |
1295 | """ |
|
1295 | """ | |
1296 | Gives higher priority to the groups with the most complex paths |
|
1296 | Gives higher priority to the groups with the most complex paths | |
1297 | """ |
|
1297 | """ | |
1298 | first_group = RepoGroup.get(first_group_id) |
|
1298 | first_group = RepoGroup.get(first_group_id) | |
1299 | second_group = RepoGroup.get(second_group_id) |
|
1299 | second_group = RepoGroup.get(second_group_id) | |
1300 | first_group_parts = ( |
|
1300 | first_group_parts = ( | |
1301 | len(first_group.group_name.split('/')) if first_group else 0) |
|
1301 | len(first_group.group_name.split('/')) if first_group else 0) | |
1302 | second_group_parts = ( |
|
1302 | second_group_parts = ( | |
1303 | len(second_group.group_name.split('/')) if second_group else 0) |
|
1303 | len(second_group.group_name.split('/')) if second_group else 0) | |
1304 | return cmp(second_group_parts, first_group_parts) |
|
1304 | return cmp(second_group_parts, first_group_parts) | |
1305 |
|
1305 | |||
1306 | sorted_repo_group_ids = sorted( |
|
1306 | sorted_repo_group_ids = sorted( | |
1307 | self.repo_group_ids, cmp=_repo_group_compare) |
|
1307 | self.repo_group_ids, cmp=_repo_group_compare) | |
1308 | for repo_group_id in sorted_repo_group_ids: |
|
1308 | for repo_group_id in sorted_repo_group_ids: | |
1309 | self.fixture.destroy_repo_group(repo_group_id) |
|
1309 | self.fixture.destroy_repo_group(repo_group_id) | |
1310 |
|
1310 | |||
1311 | def _cleanup_repos(self): |
|
1311 | def _cleanup_repos(self): | |
1312 | sorted_repos_ids = sorted(self.repos_ids) |
|
1312 | sorted_repos_ids = sorted(self.repos_ids) | |
1313 | for repo_id in sorted_repos_ids: |
|
1313 | for repo_id in sorted_repos_ids: | |
1314 | self.fixture.destroy_repo(repo_id) |
|
1314 | self.fixture.destroy_repo(repo_id) | |
1315 |
|
1315 | |||
1316 | def _cleanup_user_groups(self): |
|
1316 | def _cleanup_user_groups(self): | |
1317 | def _user_group_compare(first_group_id, second_group_id): |
|
1317 | def _user_group_compare(first_group_id, second_group_id): | |
1318 | """ |
|
1318 | """ | |
1319 | Gives higher priority to the groups with the most complex paths |
|
1319 | Gives higher priority to the groups with the most complex paths | |
1320 | """ |
|
1320 | """ | |
1321 | first_group = UserGroup.get(first_group_id) |
|
1321 | first_group = UserGroup.get(first_group_id) | |
1322 | second_group = UserGroup.get(second_group_id) |
|
1322 | second_group = UserGroup.get(second_group_id) | |
1323 | first_group_parts = ( |
|
1323 | first_group_parts = ( | |
1324 | len(first_group.users_group_name.split('/')) |
|
1324 | len(first_group.users_group_name.split('/')) | |
1325 | if first_group else 0) |
|
1325 | if first_group else 0) | |
1326 | second_group_parts = ( |
|
1326 | second_group_parts = ( | |
1327 | len(second_group.users_group_name.split('/')) |
|
1327 | len(second_group.users_group_name.split('/')) | |
1328 | if second_group else 0) |
|
1328 | if second_group else 0) | |
1329 | return cmp(second_group_parts, first_group_parts) |
|
1329 | return cmp(second_group_parts, first_group_parts) | |
1330 |
|
1330 | |||
1331 | sorted_user_group_ids = sorted( |
|
1331 | sorted_user_group_ids = sorted( | |
1332 | self.user_group_ids, cmp=_user_group_compare) |
|
1332 | self.user_group_ids, cmp=_user_group_compare) | |
1333 | for user_group_id in sorted_user_group_ids: |
|
1333 | for user_group_id in sorted_user_group_ids: | |
1334 | self.fixture.destroy_user_group(user_group_id) |
|
1334 | self.fixture.destroy_user_group(user_group_id) | |
1335 |
|
1335 | |||
1336 | def _cleanup_users(self): |
|
1336 | def _cleanup_users(self): | |
1337 | for user_id in self.user_ids: |
|
1337 | for user_id in self.user_ids: | |
1338 | self.fixture.destroy_user(user_id) |
|
1338 | self.fixture.destroy_user(user_id) | |
1339 |
|
1339 | |||
1340 |
|
1340 | |||
1341 | # TODO: Think about moving this into a pytest-pyro package and make it a |
|
1341 | # TODO: Think about moving this into a pytest-pyro package and make it a | |
1342 | # pytest plugin |
|
1342 | # pytest plugin | |
1343 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) |
|
1343 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) | |
1344 | def pytest_runtest_makereport(item, call): |
|
1344 | def pytest_runtest_makereport(item, call): | |
1345 | """ |
|
1345 | """ | |
1346 | Adding the remote traceback if the exception has this information. |
|
1346 | Adding the remote traceback if the exception has this information. | |
1347 |
|
1347 | |||
1348 | VCSServer attaches this information as the attribute `_vcs_server_traceback` |
|
1348 | VCSServer attaches this information as the attribute `_vcs_server_traceback` | |
1349 | to the exception instance. |
|
1349 | to the exception instance. | |
1350 | """ |
|
1350 | """ | |
1351 | outcome = yield |
|
1351 | outcome = yield | |
1352 | report = outcome.get_result() |
|
1352 | report = outcome.get_result() | |
1353 | if call.excinfo: |
|
1353 | if call.excinfo: | |
1354 | _add_vcsserver_remote_traceback(report, call.excinfo.value) |
|
1354 | _add_vcsserver_remote_traceback(report, call.excinfo.value) | |
1355 |
|
1355 | |||
1356 |
|
1356 | |||
1357 | def _add_vcsserver_remote_traceback(report, exc): |
|
1357 | def _add_vcsserver_remote_traceback(report, exc): | |
1358 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) |
|
1358 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) | |
1359 |
|
1359 | |||
1360 | if vcsserver_traceback: |
|
1360 | if vcsserver_traceback: | |
1361 | section = 'VCSServer remote traceback ' + report.when |
|
1361 | section = 'VCSServer remote traceback ' + report.when | |
1362 | report.sections.append((section, vcsserver_traceback)) |
|
1362 | report.sections.append((section, vcsserver_traceback)) | |
1363 |
|
1363 | |||
1364 |
|
1364 | |||
1365 | @pytest.fixture(scope='session') |
|
1365 | @pytest.fixture(scope='session') | |
1366 | def testrun(): |
|
1366 | def testrun(): | |
1367 | return { |
|
1367 | return { | |
1368 | 'uuid': uuid.uuid4(), |
|
1368 | 'uuid': uuid.uuid4(), | |
1369 | 'start': datetime.datetime.utcnow().isoformat(), |
|
1369 | 'start': datetime.datetime.utcnow().isoformat(), | |
1370 | 'timestamp': int(time.time()), |
|
1370 | 'timestamp': int(time.time()), | |
1371 | } |
|
1371 | } | |
1372 |
|
1372 | |||
1373 |
|
1373 | |||
1374 | @pytest.fixture(autouse=True) |
|
1374 | @pytest.fixture(autouse=True) | |
1375 | def collect_appenlight_stats(request, testrun): |
|
1375 | def collect_appenlight_stats(request, testrun): | |
1376 | """ |
|
1376 | """ | |
1377 | This fixture reports memory consumtion of single tests. |
|
1377 | This fixture reports memory consumtion of single tests. | |
1378 |
|
1378 | |||
1379 | It gathers data based on `psutil` and sends them to Appenlight. The option |
|
1379 | It gathers data based on `psutil` and sends them to Appenlight. The option | |
1380 | ``--ae`` has te be used to enable this fixture and the API key for your |
|
1380 | ``--ae`` has te be used to enable this fixture and the API key for your | |
1381 | application has to be provided in ``--ae-key``. |
|
1381 | application has to be provided in ``--ae-key``. | |
1382 | """ |
|
1382 | """ | |
1383 | try: |
|
1383 | try: | |
1384 | # cygwin cannot have yet psutil support. |
|
1384 | # cygwin cannot have yet psutil support. | |
1385 | import psutil |
|
1385 | import psutil | |
1386 | except ImportError: |
|
1386 | except ImportError: | |
1387 | return |
|
1387 | return | |
1388 |
|
1388 | |||
1389 | if not request.config.getoption('--appenlight'): |
|
1389 | if not request.config.getoption('--appenlight'): | |
1390 | return |
|
1390 | return | |
1391 | else: |
|
1391 | else: | |
1392 | # Only request the baseapp fixture if appenlight tracking is |
|
1392 | # Only request the baseapp fixture if appenlight tracking is | |
1393 | # enabled. This will speed up a test run of unit tests by 2 to 3 |
|
1393 | # enabled. This will speed up a test run of unit tests by 2 to 3 | |
1394 | # seconds if appenlight is not enabled. |
|
1394 | # seconds if appenlight is not enabled. | |
1395 | baseapp = request.getfuncargvalue("baseapp") |
|
1395 | baseapp = request.getfuncargvalue("baseapp") | |
1396 | url = '{}/api/logs'.format(request.config.getoption('--appenlight-url')) |
|
1396 | url = '{}/api/logs'.format(request.config.getoption('--appenlight-url')) | |
1397 | client = AppenlightClient( |
|
1397 | client = AppenlightClient( | |
1398 | url=url, |
|
1398 | url=url, | |
1399 | api_key=request.config.getoption('--appenlight-api-key'), |
|
1399 | api_key=request.config.getoption('--appenlight-api-key'), | |
1400 | namespace=request.node.nodeid, |
|
1400 | namespace=request.node.nodeid, | |
1401 | request=str(testrun['uuid']), |
|
1401 | request=str(testrun['uuid']), | |
1402 | testrun=testrun) |
|
1402 | testrun=testrun) | |
1403 |
|
1403 | |||
1404 | client.collect({ |
|
1404 | client.collect({ | |
1405 | 'message': "Starting", |
|
1405 | 'message': "Starting", | |
1406 | }) |
|
1406 | }) | |
1407 |
|
1407 | |||
1408 | server_and_port = baseapp.config.get_settings()['vcs.server'] |
|
1408 | server_and_port = baseapp.config.get_settings()['vcs.server'] | |
1409 | protocol = baseapp.config.get_settings()['vcs.server.protocol'] |
|
1409 | protocol = baseapp.config.get_settings()['vcs.server.protocol'] | |
1410 | server = create_vcsserver_proxy(server_and_port, protocol) |
|
1410 | server = create_vcsserver_proxy(server_and_port, protocol) | |
1411 | with server: |
|
1411 | with server: | |
1412 | vcs_pid = server.get_pid() |
|
1412 | vcs_pid = server.get_pid() | |
1413 | server.run_gc() |
|
1413 | server.run_gc() | |
1414 | vcs_process = psutil.Process(vcs_pid) |
|
1414 | vcs_process = psutil.Process(vcs_pid) | |
1415 | mem = vcs_process.memory_info() |
|
1415 | mem = vcs_process.memory_info() | |
1416 | client.tag_before('vcsserver.rss', mem.rss) |
|
1416 | client.tag_before('vcsserver.rss', mem.rss) | |
1417 | client.tag_before('vcsserver.vms', mem.vms) |
|
1417 | client.tag_before('vcsserver.vms', mem.vms) | |
1418 |
|
1418 | |||
1419 | test_process = psutil.Process() |
|
1419 | test_process = psutil.Process() | |
1420 | mem = test_process.memory_info() |
|
1420 | mem = test_process.memory_info() | |
1421 | client.tag_before('test.rss', mem.rss) |
|
1421 | client.tag_before('test.rss', mem.rss) | |
1422 | client.tag_before('test.vms', mem.vms) |
|
1422 | client.tag_before('test.vms', mem.vms) | |
1423 |
|
1423 | |||
1424 | client.tag_before('time', time.time()) |
|
1424 | client.tag_before('time', time.time()) | |
1425 |
|
1425 | |||
1426 | @request.addfinalizer |
|
1426 | @request.addfinalizer | |
1427 | def send_stats(): |
|
1427 | def send_stats(): | |
1428 | client.tag_after('time', time.time()) |
|
1428 | client.tag_after('time', time.time()) | |
1429 | with server: |
|
1429 | with server: | |
1430 | gc_stats = server.run_gc() |
|
1430 | gc_stats = server.run_gc() | |
1431 | for tag, value in gc_stats.items(): |
|
1431 | for tag, value in gc_stats.items(): | |
1432 | client.tag_after(tag, value) |
|
1432 | client.tag_after(tag, value) | |
1433 | mem = vcs_process.memory_info() |
|
1433 | mem = vcs_process.memory_info() | |
1434 | client.tag_after('vcsserver.rss', mem.rss) |
|
1434 | client.tag_after('vcsserver.rss', mem.rss) | |
1435 | client.tag_after('vcsserver.vms', mem.vms) |
|
1435 | client.tag_after('vcsserver.vms', mem.vms) | |
1436 |
|
1436 | |||
1437 | mem = test_process.memory_info() |
|
1437 | mem = test_process.memory_info() | |
1438 | client.tag_after('test.rss', mem.rss) |
|
1438 | client.tag_after('test.rss', mem.rss) | |
1439 | client.tag_after('test.vms', mem.vms) |
|
1439 | client.tag_after('test.vms', mem.vms) | |
1440 |
|
1440 | |||
1441 | client.collect({ |
|
1441 | client.collect({ | |
1442 | 'message': "Finished", |
|
1442 | 'message': "Finished", | |
1443 | }) |
|
1443 | }) | |
1444 | client.send_stats() |
|
1444 | client.send_stats() | |
1445 |
|
1445 | |||
1446 | return client |
|
1446 | return client | |
1447 |
|
1447 | |||
1448 |
|
1448 | |||
1449 | class AppenlightClient(): |
|
1449 | class AppenlightClient(): | |
1450 |
|
1450 | |||
1451 | url_template = '{url}?protocol_version=0.5' |
|
1451 | url_template = '{url}?protocol_version=0.5' | |
1452 |
|
1452 | |||
1453 | def __init__( |
|
1453 | def __init__( | |
1454 | self, url, api_key, add_server=True, add_timestamp=True, |
|
1454 | self, url, api_key, add_server=True, add_timestamp=True, | |
1455 | namespace=None, request=None, testrun=None): |
|
1455 | namespace=None, request=None, testrun=None): | |
1456 | self.url = self.url_template.format(url=url) |
|
1456 | self.url = self.url_template.format(url=url) | |
1457 | self.api_key = api_key |
|
1457 | self.api_key = api_key | |
1458 | self.add_server = add_server |
|
1458 | self.add_server = add_server | |
1459 | self.add_timestamp = add_timestamp |
|
1459 | self.add_timestamp = add_timestamp | |
1460 | self.namespace = namespace |
|
1460 | self.namespace = namespace | |
1461 | self.request = request |
|
1461 | self.request = request | |
1462 | self.server = socket.getfqdn(socket.gethostname()) |
|
1462 | self.server = socket.getfqdn(socket.gethostname()) | |
1463 | self.tags_before = {} |
|
1463 | self.tags_before = {} | |
1464 | self.tags_after = {} |
|
1464 | self.tags_after = {} | |
1465 | self.stats = [] |
|
1465 | self.stats = [] | |
1466 | self.testrun = testrun or {} |
|
1466 | self.testrun = testrun or {} | |
1467 |
|
1467 | |||
1468 | def tag_before(self, tag, value): |
|
1468 | def tag_before(self, tag, value): | |
1469 | self.tags_before[tag] = value |
|
1469 | self.tags_before[tag] = value | |
1470 |
|
1470 | |||
1471 | def tag_after(self, tag, value): |
|
1471 | def tag_after(self, tag, value): | |
1472 | self.tags_after[tag] = value |
|
1472 | self.tags_after[tag] = value | |
1473 |
|
1473 | |||
1474 | def collect(self, data): |
|
1474 | def collect(self, data): | |
1475 | if self.add_server: |
|
1475 | if self.add_server: | |
1476 | data.setdefault('server', self.server) |
|
1476 | data.setdefault('server', self.server) | |
1477 | if self.add_timestamp: |
|
1477 | if self.add_timestamp: | |
1478 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) |
|
1478 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) | |
1479 | if self.namespace: |
|
1479 | if self.namespace: | |
1480 | data.setdefault('namespace', self.namespace) |
|
1480 | data.setdefault('namespace', self.namespace) | |
1481 | if self.request: |
|
1481 | if self.request: | |
1482 | data.setdefault('request', self.request) |
|
1482 | data.setdefault('request', self.request) | |
1483 | self.stats.append(data) |
|
1483 | self.stats.append(data) | |
1484 |
|
1484 | |||
1485 | def send_stats(self): |
|
1485 | def send_stats(self): | |
1486 | tags = [ |
|
1486 | tags = [ | |
1487 | ('testrun', self.request), |
|
1487 | ('testrun', self.request), | |
1488 | ('testrun.start', self.testrun['start']), |
|
1488 | ('testrun.start', self.testrun['start']), | |
1489 | ('testrun.timestamp', self.testrun['timestamp']), |
|
1489 | ('testrun.timestamp', self.testrun['timestamp']), | |
1490 | ('test', self.namespace), |
|
1490 | ('test', self.namespace), | |
1491 | ] |
|
1491 | ] | |
1492 | for key, value in self.tags_before.items(): |
|
1492 | for key, value in self.tags_before.items(): | |
1493 | tags.append((key + '.before', value)) |
|
1493 | tags.append((key + '.before', value)) | |
1494 | try: |
|
1494 | try: | |
1495 | delta = self.tags_after[key] - value |
|
1495 | delta = self.tags_after[key] - value | |
1496 | tags.append((key + '.delta', delta)) |
|
1496 | tags.append((key + '.delta', delta)) | |
1497 | except Exception: |
|
1497 | except Exception: | |
1498 | pass |
|
1498 | pass | |
1499 | for key, value in self.tags_after.items(): |
|
1499 | for key, value in self.tags_after.items(): | |
1500 | tags.append((key + '.after', value)) |
|
1500 | tags.append((key + '.after', value)) | |
1501 | self.collect({ |
|
1501 | self.collect({ | |
1502 | 'message': "Collected tags", |
|
1502 | 'message': "Collected tags", | |
1503 | 'tags': tags, |
|
1503 | 'tags': tags, | |
1504 | }) |
|
1504 | }) | |
1505 |
|
1505 | |||
1506 | response = requests.post( |
|
1506 | response = requests.post( | |
1507 | self.url, |
|
1507 | self.url, | |
1508 | headers={ |
|
1508 | headers={ | |
1509 | 'X-appenlight-api-key': self.api_key}, |
|
1509 | 'X-appenlight-api-key': self.api_key}, | |
1510 | json=self.stats, |
|
1510 | json=self.stats, | |
1511 | ) |
|
1511 | ) | |
1512 |
|
1512 | |||
1513 | if not response.status_code == 200: |
|
1513 | if not response.status_code == 200: | |
1514 | pprint.pprint(self.stats) |
|
1514 | pprint.pprint(self.stats) | |
1515 | print(response.headers) |
|
1515 | print(response.headers) | |
1516 | print(response.text) |
|
1516 | print(response.text) | |
1517 | raise Exception('Sending to appenlight failed') |
|
1517 | raise Exception('Sending to appenlight failed') | |
1518 |
|
1518 | |||
1519 |
|
1519 | |||
1520 | @pytest.fixture |
|
1520 | @pytest.fixture | |
1521 | def gist_util(request, db_connection): |
|
1521 | def gist_util(request, db_connection): | |
1522 | """ |
|
1522 | """ | |
1523 | Provides a wired instance of `GistUtility` with integrated cleanup. |
|
1523 | Provides a wired instance of `GistUtility` with integrated cleanup. | |
1524 | """ |
|
1524 | """ | |
1525 | utility = GistUtility() |
|
1525 | utility = GistUtility() | |
1526 | request.addfinalizer(utility.cleanup) |
|
1526 | request.addfinalizer(utility.cleanup) | |
1527 | return utility |
|
1527 | return utility | |
1528 |
|
1528 | |||
1529 |
|
1529 | |||
1530 | class GistUtility(object): |
|
1530 | class GistUtility(object): | |
1531 | def __init__(self): |
|
1531 | def __init__(self): | |
1532 | self.fixture = Fixture() |
|
1532 | self.fixture = Fixture() | |
1533 | self.gist_ids = [] |
|
1533 | self.gist_ids = [] | |
1534 |
|
1534 | |||
1535 | def create_gist(self, **kwargs): |
|
1535 | def create_gist(self, **kwargs): | |
1536 | gist = self.fixture.create_gist(**kwargs) |
|
1536 | gist = self.fixture.create_gist(**kwargs) | |
1537 | self.gist_ids.append(gist.gist_id) |
|
1537 | self.gist_ids.append(gist.gist_id) | |
1538 | return gist |
|
1538 | return gist | |
1539 |
|
1539 | |||
1540 | def cleanup(self): |
|
1540 | def cleanup(self): | |
1541 | for id_ in self.gist_ids: |
|
1541 | for id_ in self.gist_ids: | |
1542 | self.fixture.destroy_gists(str(id_)) |
|
1542 | self.fixture.destroy_gists(str(id_)) | |
1543 |
|
1543 | |||
1544 |
|
1544 | |||
1545 | @pytest.fixture |
|
1545 | @pytest.fixture | |
1546 | def enabled_backends(request): |
|
1546 | def enabled_backends(request): | |
1547 | backends = request.config.option.backends |
|
1547 | backends = request.config.option.backends | |
1548 | return backends[:] |
|
1548 | return backends[:] | |
1549 |
|
1549 | |||
1550 |
|
1550 | |||
1551 | @pytest.fixture |
|
1551 | @pytest.fixture | |
1552 | def settings_util(request, db_connection): |
|
1552 | def settings_util(request, db_connection): | |
1553 | """ |
|
1553 | """ | |
1554 | Provides a wired instance of `SettingsUtility` with integrated cleanup. |
|
1554 | Provides a wired instance of `SettingsUtility` with integrated cleanup. | |
1555 | """ |
|
1555 | """ | |
1556 | utility = SettingsUtility() |
|
1556 | utility = SettingsUtility() | |
1557 | request.addfinalizer(utility.cleanup) |
|
1557 | request.addfinalizer(utility.cleanup) | |
1558 | return utility |
|
1558 | return utility | |
1559 |
|
1559 | |||
1560 |
|
1560 | |||
1561 | class SettingsUtility(object): |
|
1561 | class SettingsUtility(object): | |
1562 | def __init__(self): |
|
1562 | def __init__(self): | |
1563 | self.rhodecode_ui_ids = [] |
|
1563 | self.rhodecode_ui_ids = [] | |
1564 | self.rhodecode_setting_ids = [] |
|
1564 | self.rhodecode_setting_ids = [] | |
1565 | self.repo_rhodecode_ui_ids = [] |
|
1565 | self.repo_rhodecode_ui_ids = [] | |
1566 | self.repo_rhodecode_setting_ids = [] |
|
1566 | self.repo_rhodecode_setting_ids = [] | |
1567 |
|
1567 | |||
1568 | def create_repo_rhodecode_ui( |
|
1568 | def create_repo_rhodecode_ui( | |
1569 | self, repo, section, value, key=None, active=True, cleanup=True): |
|
1569 | self, repo, section, value, key=None, active=True, cleanup=True): | |
1570 | key = key or hashlib.sha1( |
|
1570 | key = key or hashlib.sha1( | |
1571 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() |
|
1571 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() | |
1572 |
|
1572 | |||
1573 | setting = RepoRhodeCodeUi() |
|
1573 | setting = RepoRhodeCodeUi() | |
1574 | setting.repository_id = repo.repo_id |
|
1574 | setting.repository_id = repo.repo_id | |
1575 | setting.ui_section = section |
|
1575 | setting.ui_section = section | |
1576 | setting.ui_value = value |
|
1576 | setting.ui_value = value | |
1577 | setting.ui_key = key |
|
1577 | setting.ui_key = key | |
1578 | setting.ui_active = active |
|
1578 | setting.ui_active = active | |
1579 | Session().add(setting) |
|
1579 | Session().add(setting) | |
1580 | Session().commit() |
|
1580 | Session().commit() | |
1581 |
|
1581 | |||
1582 | if cleanup: |
|
1582 | if cleanup: | |
1583 | self.repo_rhodecode_ui_ids.append(setting.ui_id) |
|
1583 | self.repo_rhodecode_ui_ids.append(setting.ui_id) | |
1584 | return setting |
|
1584 | return setting | |
1585 |
|
1585 | |||
1586 | def create_rhodecode_ui( |
|
1586 | def create_rhodecode_ui( | |
1587 | self, section, value, key=None, active=True, cleanup=True): |
|
1587 | self, section, value, key=None, active=True, cleanup=True): | |
1588 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() |
|
1588 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() | |
1589 |
|
1589 | |||
1590 | setting = RhodeCodeUi() |
|
1590 | setting = RhodeCodeUi() | |
1591 | setting.ui_section = section |
|
1591 | setting.ui_section = section | |
1592 | setting.ui_value = value |
|
1592 | setting.ui_value = value | |
1593 | setting.ui_key = key |
|
1593 | setting.ui_key = key | |
1594 | setting.ui_active = active |
|
1594 | setting.ui_active = active | |
1595 | Session().add(setting) |
|
1595 | Session().add(setting) | |
1596 | Session().commit() |
|
1596 | Session().commit() | |
1597 |
|
1597 | |||
1598 | if cleanup: |
|
1598 | if cleanup: | |
1599 | self.rhodecode_ui_ids.append(setting.ui_id) |
|
1599 | self.rhodecode_ui_ids.append(setting.ui_id) | |
1600 | return setting |
|
1600 | return setting | |
1601 |
|
1601 | |||
1602 | def create_repo_rhodecode_setting( |
|
1602 | def create_repo_rhodecode_setting( | |
1603 | self, repo, name, value, type_, cleanup=True): |
|
1603 | self, repo, name, value, type_, cleanup=True): | |
1604 | setting = RepoRhodeCodeSetting( |
|
1604 | setting = RepoRhodeCodeSetting( | |
1605 | repo.repo_id, key=name, val=value, type=type_) |
|
1605 | repo.repo_id, key=name, val=value, type=type_) | |
1606 | Session().add(setting) |
|
1606 | Session().add(setting) | |
1607 | Session().commit() |
|
1607 | Session().commit() | |
1608 |
|
1608 | |||
1609 | if cleanup: |
|
1609 | if cleanup: | |
1610 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) |
|
1610 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) | |
1611 | return setting |
|
1611 | return setting | |
1612 |
|
1612 | |||
1613 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): |
|
1613 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): | |
1614 | setting = RhodeCodeSetting(key=name, val=value, type=type_) |
|
1614 | setting = RhodeCodeSetting(key=name, val=value, type=type_) | |
1615 | Session().add(setting) |
|
1615 | Session().add(setting) | |
1616 | Session().commit() |
|
1616 | Session().commit() | |
1617 |
|
1617 | |||
1618 | if cleanup: |
|
1618 | if cleanup: | |
1619 | self.rhodecode_setting_ids.append(setting.app_settings_id) |
|
1619 | self.rhodecode_setting_ids.append(setting.app_settings_id) | |
1620 |
|
1620 | |||
1621 | return setting |
|
1621 | return setting | |
1622 |
|
1622 | |||
1623 | def cleanup(self): |
|
1623 | def cleanup(self): | |
1624 | for id_ in self.rhodecode_ui_ids: |
|
1624 | for id_ in self.rhodecode_ui_ids: | |
1625 | setting = RhodeCodeUi.get(id_) |
|
1625 | setting = RhodeCodeUi.get(id_) | |
1626 | Session().delete(setting) |
|
1626 | Session().delete(setting) | |
1627 |
|
1627 | |||
1628 | for id_ in self.rhodecode_setting_ids: |
|
1628 | for id_ in self.rhodecode_setting_ids: | |
1629 | setting = RhodeCodeSetting.get(id_) |
|
1629 | setting = RhodeCodeSetting.get(id_) | |
1630 | Session().delete(setting) |
|
1630 | Session().delete(setting) | |
1631 |
|
1631 | |||
1632 | for id_ in self.repo_rhodecode_ui_ids: |
|
1632 | for id_ in self.repo_rhodecode_ui_ids: | |
1633 | setting = RepoRhodeCodeUi.get(id_) |
|
1633 | setting = RepoRhodeCodeUi.get(id_) | |
1634 | Session().delete(setting) |
|
1634 | Session().delete(setting) | |
1635 |
|
1635 | |||
1636 | for id_ in self.repo_rhodecode_setting_ids: |
|
1636 | for id_ in self.repo_rhodecode_setting_ids: | |
1637 | setting = RepoRhodeCodeSetting.get(id_) |
|
1637 | setting = RepoRhodeCodeSetting.get(id_) | |
1638 | Session().delete(setting) |
|
1638 | Session().delete(setting) | |
1639 |
|
1639 | |||
1640 | Session().commit() |
|
1640 | Session().commit() | |
1641 |
|
1641 | |||
1642 |
|
1642 | |||
1643 | @pytest.fixture |
|
1643 | @pytest.fixture | |
1644 | def no_notifications(request): |
|
1644 | def no_notifications(request): | |
1645 | notification_patcher = mock.patch( |
|
1645 | notification_patcher = mock.patch( | |
1646 | 'rhodecode.model.notification.NotificationModel.create') |
|
1646 | 'rhodecode.model.notification.NotificationModel.create') | |
1647 | notification_patcher.start() |
|
1647 | notification_patcher.start() | |
1648 | request.addfinalizer(notification_patcher.stop) |
|
1648 | request.addfinalizer(notification_patcher.stop) | |
1649 |
|
1649 | |||
1650 |
|
1650 | |||
1651 | @pytest.fixture(scope='session') |
|
1651 | @pytest.fixture(scope='session') | |
1652 | def repeat(request): |
|
1652 | def repeat(request): | |
1653 | """ |
|
1653 | """ | |
1654 | The number of repetitions is based on this fixture. |
|
1654 | The number of repetitions is based on this fixture. | |
1655 |
|
1655 | |||
1656 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the |
|
1656 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the | |
1657 | tests are not too slow in our default test suite. |
|
1657 | tests are not too slow in our default test suite. | |
1658 | """ |
|
1658 | """ | |
1659 | return request.config.getoption('--repeat') |
|
1659 | return request.config.getoption('--repeat') | |
1660 |
|
1660 | |||
1661 |
|
1661 | |||
1662 | @pytest.fixture |
|
1662 | @pytest.fixture | |
1663 | def rhodecode_fixtures(): |
|
1663 | def rhodecode_fixtures(): | |
1664 | return Fixture() |
|
1664 | return Fixture() | |
1665 |
|
1665 | |||
1666 |
|
1666 | |||
1667 | @pytest.fixture |
|
1667 | @pytest.fixture | |
1668 | def context_stub(): |
|
1668 | def context_stub(): | |
1669 | """ |
|
1669 | """ | |
1670 | Stub context object. |
|
1670 | Stub context object. | |
1671 | """ |
|
1671 | """ | |
1672 | context = pyramid.testing.DummyResource() |
|
1672 | context = pyramid.testing.DummyResource() | |
1673 | return context |
|
1673 | return context | |
1674 |
|
1674 | |||
1675 |
|
1675 | |||
1676 | @pytest.fixture |
|
1676 | @pytest.fixture | |
1677 | def request_stub(): |
|
1677 | def request_stub(): | |
1678 | """ |
|
1678 | """ | |
1679 | Stub request object. |
|
1679 | Stub request object. | |
1680 | """ |
|
1680 | """ | |
1681 | from rhodecode.lib.base import bootstrap_request |
|
1681 | from rhodecode.lib.base import bootstrap_request | |
1682 | request = bootstrap_request(scheme='https') |
|
1682 | request = bootstrap_request(scheme='https') | |
1683 | return request |
|
1683 | return request | |
1684 |
|
1684 | |||
1685 |
|
1685 | |||
1686 | @pytest.fixture |
|
1686 | @pytest.fixture | |
1687 | def config_stub(request, request_stub): |
|
1687 | def config_stub(request, request_stub): | |
1688 | """ |
|
1688 | """ | |
1689 | Set up pyramid.testing and return the Configurator. |
|
1689 | Set up pyramid.testing and return the Configurator. | |
1690 | """ |
|
1690 | """ | |
1691 | from rhodecode.lib.base import bootstrap_config |
|
1691 | from rhodecode.lib.base import bootstrap_config | |
1692 | config = bootstrap_config(request=request_stub) |
|
1692 | config = bootstrap_config(request=request_stub) | |
1693 |
|
1693 | |||
1694 | @request.addfinalizer |
|
1694 | @request.addfinalizer | |
1695 | def cleanup(): |
|
1695 | def cleanup(): | |
1696 | pyramid.testing.tearDown() |
|
1696 | pyramid.testing.tearDown() | |
1697 |
|
1697 | |||
1698 | return config |
|
1698 | return config | |
1699 |
|
1699 | |||
1700 |
|
1700 | |||
1701 | @pytest.fixture |
|
1701 | @pytest.fixture | |
1702 | def StubIntegrationType(): |
|
1702 | def StubIntegrationType(): | |
1703 | class _StubIntegrationType(IntegrationTypeBase): |
|
1703 | class _StubIntegrationType(IntegrationTypeBase): | |
1704 | """ Test integration type class """ |
|
1704 | """ Test integration type class """ | |
1705 |
|
1705 | |||
1706 | key = 'test' |
|
1706 | key = 'test' | |
1707 | display_name = 'Test integration type' |
|
1707 | display_name = 'Test integration type' | |
1708 | description = 'A test integration type for testing' |
|
1708 | description = 'A test integration type for testing' | |
1709 | icon = 'test_icon_html_image' |
|
1709 | ||
|
1710 | @classmethod | |||
|
1711 | def icon(cls): | |||
|
1712 | return 'test_icon_html_image' | |||
1710 |
|
1713 | |||
1711 | def __init__(self, settings): |
|
1714 | def __init__(self, settings): | |
1712 | super(_StubIntegrationType, self).__init__(settings) |
|
1715 | super(_StubIntegrationType, self).__init__(settings) | |
1713 | self.sent_events = [] # for testing |
|
1716 | self.sent_events = [] # for testing | |
1714 |
|
1717 | |||
1715 | def send_event(self, event): |
|
1718 | def send_event(self, event): | |
1716 | self.sent_events.append(event) |
|
1719 | self.sent_events.append(event) | |
1717 |
|
1720 | |||
1718 | def settings_schema(self): |
|
1721 | def settings_schema(self): | |
1719 | class SettingsSchema(colander.Schema): |
|
1722 | class SettingsSchema(colander.Schema): | |
1720 | test_string_field = colander.SchemaNode( |
|
1723 | test_string_field = colander.SchemaNode( | |
1721 | colander.String(), |
|
1724 | colander.String(), | |
1722 | missing=colander.required, |
|
1725 | missing=colander.required, | |
1723 | title='test string field', |
|
1726 | title='test string field', | |
1724 | ) |
|
1727 | ) | |
1725 | test_int_field = colander.SchemaNode( |
|
1728 | test_int_field = colander.SchemaNode( | |
1726 | colander.Int(), |
|
1729 | colander.Int(), | |
1727 | title='some integer setting', |
|
1730 | title='some integer setting', | |
1728 | ) |
|
1731 | ) | |
1729 | return SettingsSchema() |
|
1732 | return SettingsSchema() | |
1730 |
|
1733 | |||
1731 |
|
1734 | |||
1732 | integration_type_registry.register_integration_type(_StubIntegrationType) |
|
1735 | integration_type_registry.register_integration_type(_StubIntegrationType) | |
1733 | return _StubIntegrationType |
|
1736 | return _StubIntegrationType | |
1734 |
|
1737 | |||
1735 | @pytest.fixture |
|
1738 | @pytest.fixture | |
1736 | def stub_integration_settings(): |
|
1739 | def stub_integration_settings(): | |
1737 | return { |
|
1740 | return { | |
1738 | 'test_string_field': 'some data', |
|
1741 | 'test_string_field': 'some data', | |
1739 | 'test_int_field': 100, |
|
1742 | 'test_int_field': 100, | |
1740 | } |
|
1743 | } | |
1741 |
|
1744 | |||
1742 |
|
1745 | |||
1743 | @pytest.fixture |
|
1746 | @pytest.fixture | |
1744 | def repo_integration_stub(request, repo_stub, StubIntegrationType, |
|
1747 | def repo_integration_stub(request, repo_stub, StubIntegrationType, | |
1745 | stub_integration_settings): |
|
1748 | stub_integration_settings): | |
1746 | integration = IntegrationModel().create( |
|
1749 | integration = IntegrationModel().create( | |
1747 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1750 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1748 | name='test repo integration', |
|
1751 | name='test repo integration', | |
1749 | repo=repo_stub, repo_group=None, child_repos_only=None) |
|
1752 | repo=repo_stub, repo_group=None, child_repos_only=None) | |
1750 |
|
1753 | |||
1751 | @request.addfinalizer |
|
1754 | @request.addfinalizer | |
1752 | def cleanup(): |
|
1755 | def cleanup(): | |
1753 | IntegrationModel().delete(integration) |
|
1756 | IntegrationModel().delete(integration) | |
1754 |
|
1757 | |||
1755 | return integration |
|
1758 | return integration | |
1756 |
|
1759 | |||
1757 |
|
1760 | |||
1758 | @pytest.fixture |
|
1761 | @pytest.fixture | |
1759 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, |
|
1762 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, | |
1760 | stub_integration_settings): |
|
1763 | stub_integration_settings): | |
1761 | integration = IntegrationModel().create( |
|
1764 | integration = IntegrationModel().create( | |
1762 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1765 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1763 | name='test repogroup integration', |
|
1766 | name='test repogroup integration', | |
1764 | repo=None, repo_group=test_repo_group, child_repos_only=True) |
|
1767 | repo=None, repo_group=test_repo_group, child_repos_only=True) | |
1765 |
|
1768 | |||
1766 | @request.addfinalizer |
|
1769 | @request.addfinalizer | |
1767 | def cleanup(): |
|
1770 | def cleanup(): | |
1768 | IntegrationModel().delete(integration) |
|
1771 | IntegrationModel().delete(integration) | |
1769 |
|
1772 | |||
1770 | return integration |
|
1773 | return integration | |
1771 |
|
1774 | |||
1772 |
|
1775 | |||
1773 | @pytest.fixture |
|
1776 | @pytest.fixture | |
1774 | def repogroup_recursive_integration_stub(request, test_repo_group, |
|
1777 | def repogroup_recursive_integration_stub(request, test_repo_group, | |
1775 | StubIntegrationType, stub_integration_settings): |
|
1778 | StubIntegrationType, stub_integration_settings): | |
1776 | integration = IntegrationModel().create( |
|
1779 | integration = IntegrationModel().create( | |
1777 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1780 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1778 | name='test recursive repogroup integration', |
|
1781 | name='test recursive repogroup integration', | |
1779 | repo=None, repo_group=test_repo_group, child_repos_only=False) |
|
1782 | repo=None, repo_group=test_repo_group, child_repos_only=False) | |
1780 |
|
1783 | |||
1781 | @request.addfinalizer |
|
1784 | @request.addfinalizer | |
1782 | def cleanup(): |
|
1785 | def cleanup(): | |
1783 | IntegrationModel().delete(integration) |
|
1786 | IntegrationModel().delete(integration) | |
1784 |
|
1787 | |||
1785 | return integration |
|
1788 | return integration | |
1786 |
|
1789 | |||
1787 |
|
1790 | |||
1788 | @pytest.fixture |
|
1791 | @pytest.fixture | |
1789 | def global_integration_stub(request, StubIntegrationType, |
|
1792 | def global_integration_stub(request, StubIntegrationType, | |
1790 | stub_integration_settings): |
|
1793 | stub_integration_settings): | |
1791 | integration = IntegrationModel().create( |
|
1794 | integration = IntegrationModel().create( | |
1792 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1795 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1793 | name='test global integration', |
|
1796 | name='test global integration', | |
1794 | repo=None, repo_group=None, child_repos_only=None) |
|
1797 | repo=None, repo_group=None, child_repos_only=None) | |
1795 |
|
1798 | |||
1796 | @request.addfinalizer |
|
1799 | @request.addfinalizer | |
1797 | def cleanup(): |
|
1800 | def cleanup(): | |
1798 | IntegrationModel().delete(integration) |
|
1801 | IntegrationModel().delete(integration) | |
1799 |
|
1802 | |||
1800 | return integration |
|
1803 | return integration | |
1801 |
|
1804 | |||
1802 |
|
1805 | |||
1803 | @pytest.fixture |
|
1806 | @pytest.fixture | |
1804 | def root_repos_integration_stub(request, StubIntegrationType, |
|
1807 | def root_repos_integration_stub(request, StubIntegrationType, | |
1805 | stub_integration_settings): |
|
1808 | stub_integration_settings): | |
1806 | integration = IntegrationModel().create( |
|
1809 | integration = IntegrationModel().create( | |
1807 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1810 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1808 | name='test global integration', |
|
1811 | name='test global integration', | |
1809 | repo=None, repo_group=None, child_repos_only=True) |
|
1812 | repo=None, repo_group=None, child_repos_only=True) | |
1810 |
|
1813 | |||
1811 | @request.addfinalizer |
|
1814 | @request.addfinalizer | |
1812 | def cleanup(): |
|
1815 | def cleanup(): | |
1813 | IntegrationModel().delete(integration) |
|
1816 | IntegrationModel().delete(integration) | |
1814 |
|
1817 | |||
1815 | return integration |
|
1818 | return integration | |
1816 |
|
1819 | |||
1817 |
|
1820 | |||
1818 | @pytest.fixture |
|
1821 | @pytest.fixture | |
1819 | def local_dt_to_utc(): |
|
1822 | def local_dt_to_utc(): | |
1820 | def _factory(dt): |
|
1823 | def _factory(dt): | |
1821 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( |
|
1824 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( | |
1822 | dateutil.tz.tzutc()).replace(tzinfo=None) |
|
1825 | dateutil.tz.tzutc()).replace(tzinfo=None) | |
1823 | return _factory |
|
1826 | return _factory | |
1824 |
|
1827 | |||
1825 |
|
1828 | |||
1826 | @pytest.fixture |
|
1829 | @pytest.fixture | |
1827 | def disable_anonymous_user(request, baseapp): |
|
1830 | def disable_anonymous_user(request, baseapp): | |
1828 | set_anonymous_access(False) |
|
1831 | set_anonymous_access(False) | |
1829 |
|
1832 | |||
1830 | @request.addfinalizer |
|
1833 | @request.addfinalizer | |
1831 | def cleanup(): |
|
1834 | def cleanup(): | |
1832 | set_anonymous_access(True) |
|
1835 | set_anonymous_access(True) | |
1833 |
|
1836 | |||
1834 |
|
1837 | |||
1835 | @pytest.fixture(scope='module') |
|
1838 | @pytest.fixture(scope='module') | |
1836 | def rc_fixture(request): |
|
1839 | def rc_fixture(request): | |
1837 | return Fixture() |
|
1840 | return Fixture() | |
1838 |
|
1841 | |||
1839 |
|
1842 | |||
1840 | @pytest.fixture |
|
1843 | @pytest.fixture | |
1841 | def repo_groups(request): |
|
1844 | def repo_groups(request): | |
1842 | fixture = Fixture() |
|
1845 | fixture = Fixture() | |
1843 |
|
1846 | |||
1844 | session = Session() |
|
1847 | session = Session() | |
1845 | zombie_group = fixture.create_repo_group('zombie') |
|
1848 | zombie_group = fixture.create_repo_group('zombie') | |
1846 | parent_group = fixture.create_repo_group('parent') |
|
1849 | parent_group = fixture.create_repo_group('parent') | |
1847 | child_group = fixture.create_repo_group('parent/child') |
|
1850 | child_group = fixture.create_repo_group('parent/child') | |
1848 | groups_in_db = session.query(RepoGroup).all() |
|
1851 | groups_in_db = session.query(RepoGroup).all() | |
1849 | assert len(groups_in_db) == 3 |
|
1852 | assert len(groups_in_db) == 3 | |
1850 | assert child_group.group_parent_id == parent_group.group_id |
|
1853 | assert child_group.group_parent_id == parent_group.group_id | |
1851 |
|
1854 | |||
1852 | @request.addfinalizer |
|
1855 | @request.addfinalizer | |
1853 | def cleanup(): |
|
1856 | def cleanup(): | |
1854 | fixture.destroy_repo_group(zombie_group) |
|
1857 | fixture.destroy_repo_group(zombie_group) | |
1855 | fixture.destroy_repo_group(child_group) |
|
1858 | fixture.destroy_repo_group(child_group) | |
1856 | fixture.destroy_repo_group(parent_group) |
|
1859 | fixture.destroy_repo_group(parent_group) | |
1857 |
|
1860 | |||
1858 | return zombie_group, parent_group, child_group |
|
1861 | return zombie_group, parent_group, child_group |
General Comments 0
You need to be logged in to leave comments.
Login now