##// END OF EJS Templates
release: merge back stable branch into default
marcink -
r3553:10cca1c5 merge default
parent child Browse files
Show More
@@ -0,0 +1,47 b''
1 |RCE| 4.16.1 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2019-03-07
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13
14
15 General
16 ^^^^^^^
17
18 - Docs: added missing reference for the user bookmarks feature.
19
20
21 Security
22 ^^^^^^^^
23
24 - Comments: prevent from allowing to resolve TODO comments across projects. In certain
25 conditions users could resolve TODOs not belonging to the same project.
26
27
28 Performance
29 ^^^^^^^^^^^
30
31
32
33 Fixes
34 ^^^^^
35
36 - Downloads: fixed archive links from file tree view.
37 - Markdown: fixed sanitization of checkbox extensions that removed "checked" attribute.
38 - Upgrade: fixed upgrades from older versions of RhodeCode.
39 - Pull Requests: handle non-ascii branches from short branch selector via URL.
40 - Hooks: fixed again unicode problems with new pull request link generator.
41
42
43
44 Upgrade notes
45 ^^^^^^^^^^^^^
46
47 - Scheduled release addressing problems in 4.16.X releases.
@@ -1,52 +1,53 b''
1 1 1bd3e92b7e2e2d2024152b34bb88dff1db544a71 v4.0.0
2 2 170c5398320ea6cddd50955e88d408794c21d43a v4.0.1
3 3 c3fe200198f5aa34cf2e4066df2881a9cefe3704 v4.1.0
4 4 7fd5c850745e2ea821fb4406af5f4bff9b0a7526 v4.1.1
5 5 41c87da28a179953df86061d817bc35533c66dd2 v4.1.2
6 6 baaf9f5bcea3bae0ef12ae20c8b270482e62abb6 v4.2.0
7 7 32a70c7e56844a825f61df496ee5eaf8c3c4e189 v4.2.1
8 8 fa695cdb411d294679ac081d595ac654e5613b03 v4.3.0
9 9 0e4dc11b58cad833c513fe17bac39e6850edf959 v4.3.1
10 10 8a876f48f5cb1d018b837db28ff928500cb32cfb v4.4.0
11 11 8dd86b410b1aac086ffdfc524ef300f896af5047 v4.4.1
12 12 d2514226abc8d3b4f6fb57765f47d1b6fb360a05 v4.4.2
13 13 27d783325930af6dad2741476c0d0b1b7c8415c2 v4.5.0
14 14 7f2016f352abcbdba4a19d4039c386e9629449da v4.5.1
15 15 416fec799314c70a5c780fb28b3357b08869333a v4.5.2
16 16 27c3b85fafc83143e6678fbc3da69e1615bcac55 v4.6.0
17 17 5ad13deb9118c2a5243d4032d4d9cc174e5872db v4.6.1
18 18 2be921e01fa24bb102696ada596f87464c3666f6 v4.7.0
19 19 7198bdec29c2872c974431d55200d0398354cdb1 v4.7.1
20 20 bd1c8d230fe741c2dfd7100a0ef39fd0774fd581 v4.7.2
21 21 9731914f89765d9628dc4dddc84bc9402aa124c8 v4.8.0
22 22 c5a2b7d0e4bbdebc4a62d7b624befe375207b659 v4.9.0
23 23 d9aa3b27ac9f7e78359775c75fedf7bfece232f1 v4.9.1
24 24 4ba4d74981cec5d6b28b158f875a2540952c2f74 v4.10.0
25 25 0a6821cbd6b0b3c21503002f88800679fa35ab63 v4.10.1
26 26 434ad90ec8d621f4416074b84f6e9ce03964defb v4.10.2
27 27 68baee10e698da2724c6e0f698c03a6abb993bf2 v4.10.3
28 28 00821d3afd1dce3f4767cc353f84a17f7d5218a1 v4.10.4
29 29 22f6744ad8cc274311825f63f953e4dee2ea5cb9 v4.10.5
30 30 96eb24bea2f5f9258775245e3f09f6fa0a4dda01 v4.10.6
31 31 3121217a812c956d7dd5a5875821bd73e8002a32 v4.11.0
32 32 fa98b454715ac5b912f39e84af54345909a2a805 v4.11.1
33 33 3982abcfdcc229a723cebe52d3a9bcff10bba08e v4.11.2
34 34 33195f145db9172f0a8f1487e09207178a6ab065 v4.11.3
35 35 194c74f33e32bbae6fc4d71ec5a999cff3c13605 v4.11.4
36 36 8fbd8b0c3ddc2fa4ac9e4ca16942a03eb593df2d v4.11.5
37 37 f0609aa5d5d05a1ca2f97c3995542236131c9d8a v4.11.6
38 38 b5b30547d90d2e088472a70c84878f429ffbf40d v4.12.0
39 39 9072253aa8894d20c00b4a43dc61c2168c1eff94 v4.12.1
40 40 6a517543ea9ef9987d74371bd2a315eb0b232dc9 v4.12.2
41 41 7fc0731b024c3114be87865eda7ab621cc957e32 v4.12.3
42 42 6d531c0b068c6eda62dddceedc9f845ecb6feb6f v4.12.4
43 43 3d6bf2d81b1564830eb5e83396110d2a9a93eb1e v4.13.0
44 44 5468fc89e708bd90e413cd0d54350017abbdbc0e v4.13.1
45 45 610d621550521c314ee97b3d43473ac0bcf06fb8 v4.13.2
46 46 7dc62c090881fb5d03268141e71e0940d7c3295d v4.13.3
47 47 9151328c1c46b72ba6f00d7640d9141e75aa1ca2 v4.14.0
48 48 a47eeac5dfa41fa6779d90452affba4091c3ade8 v4.14.1
49 49 4b34ce0d2c3c10510626b3b65044939bb7a2cddf v4.15.0
50 50 14502561d22e6b70613674cd675ae9a604b7989f v4.15.1
51 51 4aaa40b605b01af78a9f6882eca561c54b525ef0 v4.15.2
52 52 797744642eca86640ed20bef2cd77445780abaec v4.16.0
53 6c3452c7c25ed35ff269690929e11960ed6ad7d3 v4.16.1
@@ -1,129 +1,130 b''
1 1 .. _rhodecode-release-notes-ref:
2 2
3 3 Release Notes
4 4 =============
5 5
6 6 |RCE| 4.x Versions
7 7 ------------------
8 8
9 9 .. toctree::
10 10 :maxdepth: 1
11 11
12 release-notes-4.16.1.rst
12 13 release-notes-4.16.0.rst
13 14 release-notes-4.15.2.rst
14 15 release-notes-4.15.1.rst
15 16 release-notes-4.15.0.rst
16 17 release-notes-4.14.1.rst
17 18 release-notes-4.14.0.rst
18 19 release-notes-4.13.3.rst
19 20 release-notes-4.13.2.rst
20 21 release-notes-4.13.1.rst
21 22 release-notes-4.13.0.rst
22 23 release-notes-4.12.4.rst
23 24 release-notes-4.12.3.rst
24 25 release-notes-4.12.2.rst
25 26 release-notes-4.12.1.rst
26 27 release-notes-4.12.0.rst
27 28 release-notes-4.11.6.rst
28 29 release-notes-4.11.5.rst
29 30 release-notes-4.11.4.rst
30 31 release-notes-4.11.3.rst
31 32 release-notes-4.11.2.rst
32 33 release-notes-4.11.1.rst
33 34 release-notes-4.11.0.rst
34 35 release-notes-4.10.6.rst
35 36 release-notes-4.10.5.rst
36 37 release-notes-4.10.4.rst
37 38 release-notes-4.10.3.rst
38 39 release-notes-4.10.2.rst
39 40 release-notes-4.10.1.rst
40 41 release-notes-4.10.0.rst
41 42 release-notes-4.9.1.rst
42 43 release-notes-4.9.0.rst
43 44 release-notes-4.8.0.rst
44 45 release-notes-4.7.2.rst
45 46 release-notes-4.7.1.rst
46 47 release-notes-4.7.0.rst
47 48 release-notes-4.6.1.rst
48 49 release-notes-4.6.0.rst
49 50 release-notes-4.5.2.rst
50 51 release-notes-4.5.1.rst
51 52 release-notes-4.5.0.rst
52 53 release-notes-4.4.2.rst
53 54 release-notes-4.4.1.rst
54 55 release-notes-4.4.0.rst
55 56 release-notes-4.3.1.rst
56 57 release-notes-4.3.0.rst
57 58 release-notes-4.2.1.rst
58 59 release-notes-4.2.0.rst
59 60 release-notes-4.1.2.rst
60 61 release-notes-4.1.1.rst
61 62 release-notes-4.1.0.rst
62 63 release-notes-4.0.1.rst
63 64 release-notes-4.0.0.rst
64 65
65 66 |RCE| 3.x Versions
66 67 ------------------
67 68
68 69 .. toctree::
69 70 :maxdepth: 1
70 71
71 72 release-notes-3.8.4.rst
72 73 release-notes-3.8.3.rst
73 74 release-notes-3.8.2.rst
74 75 release-notes-3.8.1.rst
75 76 release-notes-3.8.0.rst
76 77 release-notes-3.7.1.rst
77 78 release-notes-3.7.0.rst
78 79 release-notes-3.6.1.rst
79 80 release-notes-3.6.0.rst
80 81 release-notes-3.5.2.rst
81 82 release-notes-3.5.1.rst
82 83 release-notes-3.5.0.rst
83 84 release-notes-3.4.1.rst
84 85 release-notes-3.4.0.rst
85 86 release-notes-3.3.4.rst
86 87 release-notes-3.3.3.rst
87 88 release-notes-3.3.2.rst
88 89 release-notes-3.3.1.rst
89 90 release-notes-3.3.0.rst
90 91 release-notes-3.2.3.rst
91 92 release-notes-3.2.2.rst
92 93 release-notes-3.2.1.rst
93 94 release-notes-3.2.0.rst
94 95 release-notes-3.1.1.rst
95 96 release-notes-3.1.0.rst
96 97 release-notes-3.0.2.rst
97 98 release-notes-3.0.1.rst
98 99 release-notes-3.0.0.rst
99 100
100 101 |RCE| 2.x Versions
101 102 ------------------
102 103
103 104 .. toctree::
104 105 :maxdepth: 1
105 106
106 107 release-notes-2.2.8.rst
107 108 release-notes-2.2.7.rst
108 109 release-notes-2.2.6.rst
109 110 release-notes-2.2.5.rst
110 111 release-notes-2.2.4.rst
111 112 release-notes-2.2.3.rst
112 113 release-notes-2.2.2.rst
113 114 release-notes-2.2.1.rst
114 115 release-notes-2.2.0.rst
115 116 release-notes-2.1.0.rst
116 117 release-notes-2.0.2.rst
117 118 release-notes-2.0.1.rst
118 119 release-notes-2.0.0.rst
119 120
120 121 |RCE| 1.x Versions
121 122 ------------------
122 123
123 124 .. toctree::
124 125 :maxdepth: 1
125 126
126 127 release-notes-1.7.2.rst
127 128 release-notes-1.7.1.rst
128 129 release-notes-1.7.0.rst
129 130 release-notes-1.6.0.rst
@@ -1,405 +1,405 b''
1 1 all_tags = [
2 2 "a", "abbr", "acronym", "address", "applet", "area", "article", "aside", "audio",
3 3 "b", "base", "basefont", "bdi", "bdo", "bgsound", "big", "blink", "blockquote", "body", "br", "button",
4 4 "canvas", "caption", "center", "cite", "code", "col", "colgroup", "command", "content",
5 5 "data", "datalist", "dd", "del", "detals", "dfn", "dialog", "dir", "div", "dl", "dt",
6 6 "element", "em", "embed",
7 7 "fieldset", "figcaption", "figure", "font", "footer", "form", "frame", "frameset",
8 8 "h1", "h2", "h3", "h4", "h5", "h6", "head", "header", "hgroup", "hr", "html",
9 9 "i", "iframe", "image", "img", "input", "ins", "isindex",
10 10 "kbd", "keygen",
11 11 "label", "legend", "li", "link", "listing",
12 12 "main", "map", "mark", "marquee", "menu", "menuitem", "meta", "meter", "multicol",
13 13 "nav", "nobr", "noembed", "noframes", "noscript",
14 14 "object", "ol", "optgroup", "option", "output",
15 15 "p", "param", "picture", "plaintext", "pre", "progress",
16 16 "q",
17 17 "rp", "rt", "ruby",
18 18 "s", "samp", "script", "section", "select", "shadow", "small", "source", "spacer", "span", "strike", "strong", "style", "sub", "summary", "sup",
19 19 "table", "tbody", "td", "template", "textarea", "tfoot", "th", "thead", "time", "title", "tr", "track", "tt",
20 20 "u", "ul",
21 21 "var", "video",
22 22 "wbr",
23 23 "xmp",
24 24 ]
25 25
26 26 # List tags that, if included in a page, could break markup or open XSS.
27 27 generally_xss_unsafe = [
28 28 "applet", "audio",
29 29 "bgsound", "body",
30 30 "canvas",
31 31 "embed",
32 32 "frame", "frameset",
33 33 "head", "html",
34 34 "iframe",
35 35 "link",
36 36 "meta",
37 37 "object",
38 38 "param",
39 39 "source", "script",
40 40 "ruby", "rt",
41 41 "title", "track",
42 42 "video",
43 43 "xmp"
44 44 ]
45 45
46 46 # Tags that, if included on the page, will probably not break markup or open
47 47 # XSS. Note that these must be combined with attribute whitelisting, or things
48 48 # like <img> and <style> could still be unsafe.
49 49 generally_xss_safe = list(set(all_tags) - set(generally_xss_unsafe))
50 50 generally_xss_safe.sort()
51 51
52 52 # Tags suitable for rendering markdown
53 53 markdown_tags = [
54 54 "h1", "h2", "h3", "h4", "h5", "h6",
55 55 "b", "i", "strong", "em", "tt",
56 56 "p", "br",
57 57 "span", "div", "blockquote", "code", "hr", "pre", "del",
58 58 "ul", "ol", "li",
59 59 "dl", "dd", "dt",
60 60 "table", "thead", "tbody", "tfoot", "tr", "th", "td",
61 61 "img",
62 62 "a",
63 63 "input",
64 64 ]
65 65
66 66 markdown_attrs = {
67 67 "*": ["class", "style", "align"],
68 68 "img": ["src", "alt", "title"],
69 69 "a": ["href", "alt", "title", "name"],
70 70 "abbr": ["title"],
71 71 "acronym": ["title"],
72 72 "pre": ["lang"],
73 "input": ["type", "disabled"]
73 "input": ["type", "disabled", "checked"]
74 74 }
75 75
76 76 standard_styles = [
77 77 # Taken from https://developer.mozilla.org/en-US/docs/Web/CSS/Reference
78 78 # This includes pseudo-classes, pseudo-elements, @-rules, units, and
79 79 # selectors in addition to properties, but it doesn't matter for our
80 80 # purposes -- we don't need to filter styles..
81 81 ":active", "::after (:after)", "align-content", "align-items", "align-self",
82 82 "all", "<angle>", "animation", "animation-delay", "animation-direction",
83 83 "animation-duration", "animation-fill-mode", "animation-iteration-count",
84 84 "animation-name", "animation-play-state", "animation-timing-function",
85 85 "@annotation", "annotation()", "attr()", "::backdrop", "backface-visibility",
86 86 "background", "background-attachment", "background-blend-mode",
87 87 "background-clip", "background-color", "background-image", "background-origin",
88 88 "background-position", "background-repeat", "background-size", "<basic-shape>",
89 89 "::before (:before)", "<blend-mode>", "blur()", "border", "border-bottom",
90 90 "border-bottom-color", "border-bottom-left-radius",
91 91 "border-bottom-right-radius", "border-bottom-style", "border-bottom-width",
92 92 "border-collapse", "border-color", "border-image", "border-image-outset",
93 93 "border-image-repeat", "border-image-slice", "border-image-source",
94 94 "border-image-width", "border-left", "border-left-color", "border-left-style",
95 95 "border-left-width", "border-radius", "border-right", "border-right-color",
96 96 "border-right-style", "border-right-width", "border-spacing", "border-style",
97 97 "border-top", "border-top-color", "border-top-left-radius",
98 98 "border-top-right-radius", "border-top-style", "border-top-width",
99 99 "border-width", "bottom", "box-decoration-break", "box-shadow", "box-sizing",
100 100 "break-after", "break-before", "break-inside", "brightness()", "calc()",
101 101 "caption-side", "ch", "@character-variant", "character-variant()", "@charset",
102 102 ":checked", "circle()", "clear", "clip", "clip-path", "cm", "color", "<color>",
103 103 "columns", "column-count", "column-fill", "column-gap", "column-rule",
104 104 "column-rule-color", "column-rule-style", "column-rule-width", "column-span",
105 105 "column-width", "content", "contrast()", "<counter>", "counter-increment",
106 106 "counter-reset", "@counter-style", "cubic-bezier()", "cursor",
107 107 "<custom-ident>", ":default", "deg", ":dir()", "direction", ":disabled",
108 108 "display", "@document", "dpcm", "dpi", "dppx", "drop-shadow()", "element()",
109 109 "ellipse()", "em", ":empty", "empty-cells", ":enabled", "ex", "filter",
110 110 ":first", ":first-child", "::first-letter", "::first-line",
111 111 ":first-of-type", "flex", "flex-basis", "flex-direction",
112 112 "flex-flow", "flex-grow", "flex-shrink", "flex-wrap", "float", ":focus",
113 113 "font", "@font-face", "font-family", "font-feature-settings",
114 114 "@font-feature-values", "font-kerning", "font-language-override", "font-size",
115 115 "font-size-adjust", "font-stretch", "font-style", "font-synthesis",
116 116 "font-variant", "font-variant-alternates", "font-variant-caps",
117 117 "font-variant-east-asian", "font-variant-ligatures", "font-variant-numeric",
118 118 "font-variant-position", "font-weight", "<frequency>", ":fullscreen", "grad",
119 119 "<gradient>", "grayscale()", "grid", "grid-area", "grid-auto-columns",
120 120 "grid-auto-flow", "grid-auto-position", "grid-auto-rows", "grid-column",
121 121 "grid-column-start", "grid-column-end", "grid-row", "grid-row-start",
122 122 "grid-row-end", "grid-template", "grid-template-areas", "grid-template-rows",
123 123 "grid-template-columns", "height", ":hover", "hsl()", "hsla()", "hue-rotate()",
124 124 "hyphens", "hz", "<image>", "image()", "image-rendering", "image-resolution",
125 125 "image-orientation", "ime-mode", "@import", "in", ":indeterminate", "inherit",
126 126 "initial", ":in-range", "inset()", "<integer>", ":invalid", "invert()",
127 127 "isolation", "justify-content", "@keyframes", "khz", ":lang()", ":last-child",
128 128 ":last-of-type", "left", ":left", "<length>", "letter-spacing",
129 129 "linear-gradient()", "line-break", "line-height", ":link", "list-style",
130 130 "list-style-image", "list-style-position", "list-style-type", "margin",
131 131 "margin-bottom", "margin-left", "margin-right", "margin-top", "marks", "mask",
132 132 "mask-type", "matrix()", "matrix3d()", "max-height", "max-width", "@media",
133 133 "min-height", "minmax()", "min-width", "mix-blend-mode", "mm", "ms",
134 134 "@namespace", ":not()", ":nth-child()", ":nth-last-child()",
135 135 ":nth-last-of-type()", ":nth-of-type()", "<number>", "object-fit",
136 136 "object-position", ":only-child", ":only-of-type", "opacity", "opacity()",
137 137 ":optional", "order", "@ornaments", "ornaments()", "orphans", "outline",
138 138 "outline-color", "outline-offset", "outline-style", "outline-width",
139 139 ":out-of-range", "overflow", "overflow-wrap", "overflow-x", "overflow-y",
140 140 "padding", "padding-bottom", "padding-left", "padding-right", "padding-top",
141 141 "@page", "page-break-after", "page-break-before", "page-break-inside", "pc",
142 142 "<percentage>", "perspective", "perspective()", "perspective-origin",
143 143 "pointer-events", "polygon()", "position", "<position>", "pt", "px", "quotes",
144 144 "rad", "radial-gradient()", "<ratio>", ":read-only", ":read-write", "rect()",
145 145 "rem", "repeat()", "::repeat-index", "::repeat-item",
146 146 "repeating-linear-gradient()", "repeating-radial-gradient()", ":required",
147 147 "resize", "<resolution>", "rgb()", "rgba()", "right", ":right", ":root",
148 148 "rotate()", "rotatex()", "rotatey()", "rotatez()", "rotate3d()", "ruby-align",
149 149 "ruby-merge", "ruby-position", "s", "saturate()", "scale()", "scalex()",
150 150 "scaley()", "scalez()", "scale3d()", ":scope", "scroll-behavior",
151 151 "::selection", "sepia()", "<shape>", "shape-image-threshold", "shape-margin",
152 152 "shape-outside", "skew()", "skewx()", "skewy()", "steps()", "<string>",
153 153 "@styleset", "styleset()", "@stylistic", "stylistic()", "@supports", "@swash",
154 154 "swash()", "symbol()", "table-layout", "tab-size", ":target", "text-align",
155 155 "text-align-last", "text-combine-upright", "text-decoration",
156 156 "text-decoration-color", "text-decoration-line", "text-decoration-style",
157 157 "text-indent", "text-orientation", "text-overflow", "text-rendering",
158 158 "text-shadow", "text-transform", "text-underline-position", "<time>",
159 159 "<timing-function>", "top", "touch-action", "transform", "transform-origin",
160 160 "transform-style", "transition", "transition-delay", "transition-duration",
161 161 "transition-property", "transition-timing-function", "translate()",
162 162 "translatex()", "translatey()", "translatez()", "translate3d()", "turn",
163 163 "unicode-bidi", "unicode-range", "unset", "<uri>", "url()", "<user-ident>",
164 164 ":valid", "::value", "var()", "vertical-align", "vh", "@viewport",
165 165 "visibility", ":visited", "vmax", "vmin", "vw", "white-space", "widows",
166 166 "width", "will-change", "word-break", "word-spacing", "word-wrap",
167 167 "writing-mode", "z-index",
168 168
169 169 ]
170 170
171 171 webkit_prefixed_styles = [
172 172 # Webkit-prefixed styles
173 173 # https://developer.mozilla.org/en-US/docs/Web/CSS/Reference/Webkit_Extensions
174 174 "-webkit-animation", "-webkit-animation-delay", "-webkit-animation-direction",
175 175 "-webkit-animation-duration", "-webkit-animation-fill-mode",
176 176 "-webkit-animation-iteration-count", "-webkit-animation-name",
177 177 "-webkit-animation-play-state", "-webkit-animation-timing-function",
178 178 "-webkit-backface-visibility", "-webkit-border-image", "-webkit-column-count",
179 179 "-webkit-column-gap", "-webkit-column-width", "-webkit-column-rule",
180 180 "-webkit-column-rule-width", "-webkit-column-rule-style",
181 181 "-webkit-column-rule-color", "-webkit-columns", "-webkit-column-span",
182 182 "-webkit-font-feature-settings", "-webkit-font-kerning",
183 183 "-webkit-font-size-delta", "-webkit-font-variant-ligatures",
184 184 "-webkit-grid-column", "-webkit-grid-row", "-webkit-hyphens", "-webkit-mask",
185 185 "-webkit-mask-clip", "-webkit-mask-composite", "-webkit-mask-image",
186 186 "-webkit-mask-origin", "-webkit-mask-position", "-webkit-mask-repeat",
187 187 "-webkit-mask-size", "-webkit-perspective", "-webkit-perspective-origin",
188 188 "-webkit-region-fragment", "-webkit-shape-outside", "-webkit-text-emphasis",
189 189 "-webkit-text-emphasis-color", "-webkit-text-emphasis-position",
190 190 "-webkit-text-emphasis-style", "-webkit-transform", "-webkit-transform-origin",
191 191 "-webkit-transform-style", "-webkit-transition", "-webkit-transition-delay",
192 192 "-webkit-transition-duration", "-webkit-transition-property",
193 193 "-webkit-transition-timing-function", "-epub-word-break", "-epub-writing-mode",
194 194 # WebKit-prefixed properties with an unprefixed counterpart
195 195 "-webkit-background-clip", "-webkit-background-origin",
196 196 "-webkit-background-size", "-webkit-border-bottom-left-radius",
197 197 "-webkit-border-bottom-right-radius", "-webkit-border-radius",
198 198 "-webkit-border-top-left-radius", "-webkit-border-top-right-radius",
199 199 "-webkit-box-sizing", "-epub-caption-side", "-webkit-opacity",
200 200 "-epub-text-transform",
201 201 ]
202 202
203 203 mozilla_prefixed_styles = [
204 204 "-moz-column-count", "-moz-column-fill", "-moz-column-gap",
205 205 "-moz-column-width", "-moz-column-rule", "-moz-column-rule-width",
206 206 "-moz-column-rule-style", "-moz-column-rule-color",
207 207 "-moz-font-feature-settings", "-moz-font-language-override", "-moz-hyphens",
208 208 "-moz-text-align-last", "-moz-text-decoration-color",
209 209 "-moz-text-decoration-line", "-moz-text-decoration-style",
210 210 ]
211 211
212 212 all_prefixed_styles = [
213 213 # From http://peter.sh/experiments/vendor-prefixed-css-property-overview/
214 214 "-ms-accelerator", "-webkit-app-region", "-webkit-appearance",
215 215 "-webkit-appearance", "-moz-appearance", "-webkit-aspect-ratio",
216 216 "-webkit-backdrop-filter", "backface-visibility",
217 217 "-webkit-backface-visibility", "backface-visibility", "backface-visibility",
218 218 "-webkit-background-composite", "-webkit-background-composite", "-moz-binding",
219 219 "-ms-block-progression", "-webkit-border-after", "-webkit-border-after",
220 220 "-webkit-border-after-color", "-webkit-border-after-color",
221 221 "-webkit-border-after-style", "-webkit-border-after-style",
222 222 "-webkit-border-after-width", "-webkit-border-after-width",
223 223 "-webkit-border-before", "-webkit-border-before",
224 224 "-webkit-border-before-color", "-webkit-border-before-color",
225 225 "-webkit-border-before-style", "-webkit-border-before-style",
226 226 "-webkit-border-before-width", "-webkit-border-before-width",
227 227 "-moz-border-bottom-colors", "-webkit-border-end", "-webkit-border-end",
228 228 "-moz-border-end", "-webkit-border-end-color", "-webkit-border-end-color",
229 229 "-moz-border-end-color", "-webkit-border-end-style",
230 230 "-webkit-border-end-style", "-moz-border-end-style",
231 231 "-webkit-border-end-width", "-webkit-border-end-width",
232 232 "-moz-border-end-width", "-webkit-border-fit",
233 233 "-webkit-border-horizontal-spacing", "-webkit-border-horizontal-spacing",
234 234 "-moz-border-left-colors", "-moz-border-right-colors", "-webkit-border-start",
235 235 "-webkit-border-start", "-moz-border-start", "-webkit-border-start-color",
236 236 "-webkit-border-start-color", "-moz-border-start-color",
237 237 "-webkit-border-start-style", "-webkit-border-start-style",
238 238 "-moz-border-start-style", "-webkit-border-start-width",
239 239 "-webkit-border-start-width", "-moz-border-start-width",
240 240 "-moz-border-top-colors", "-webkit-border-vertical-spacing",
241 241 "-webkit-border-vertical-spacing", "-webkit-box-align", "-webkit-box-align",
242 242 "-moz-box-align", "-webkit-box-decoration-break",
243 243 "-webkit-box-decoration-break", "box-decoration-break",
244 244 "-webkit-box-direction", "-webkit-box-direction", "-moz-box-direction",
245 245 "-webkit-box-flex", "-webkit-box-flex", "-moz-box-flex",
246 246 "-webkit-box-flex-group", "-webkit-box-flex-group", "-webkit-box-lines",
247 247 "-webkit-box-lines", "-webkit-box-ordinal-group", "-webkit-box-ordinal-group",
248 248 "-moz-box-ordinal-group", "-webkit-box-orient", "-webkit-box-orient",
249 249 "-moz-box-orient", "-webkit-box-pack", "-webkit-box-pack", "-moz-box-pack",
250 250 "-webkit-box-reflect", "-webkit-box-reflect", "clip-path", "-webkit-clip-path",
251 251 "clip-path", "clip-path", "-webkit-color-correction", "-webkit-column-axis",
252 252 "-webkit-column-break-after", "-webkit-column-break-after",
253 253 "-webkit-column-break-before", "-webkit-column-break-before",
254 254 "-webkit-column-break-inside", "-webkit-column-break-inside",
255 255 "-webkit-column-count", "column-count", "-moz-column-count", "column-count",
256 256 "column-fill", "column-fill", "-moz-column-fill", "column-fill",
257 257 "-webkit-column-gap", "column-gap", "-moz-column-gap", "column-gap",
258 258 "-webkit-column-rule", "column-rule", "-moz-column-rule", "column-rule",
259 259 "-webkit-column-rule-color", "column-rule-color", "-moz-column-rule-color",
260 260 "column-rule-color", "-webkit-column-rule-style", "column-rule-style",
261 261 "-moz-column-rule-style", "column-rule-style", "-webkit-column-rule-width",
262 262 "column-rule-width", "-moz-column-rule-width", "column-rule-width",
263 263 "-webkit-column-span", "column-span", "column-span", "-webkit-column-width",
264 264 "column-width", "-moz-column-width", "column-width", "-webkit-columns",
265 265 "columns", "-moz-columns", "columns", "-ms-content-zoom-chaining",
266 266 "-ms-content-zoom-limit", "-ms-content-zoom-limit-max",
267 267 "-ms-content-zoom-limit-min", "-ms-content-zoom-snap",
268 268 "-ms-content-zoom-snap-points", "-ms-content-zoom-snap-type",
269 269 "-ms-content-zooming", "-moz-control-character-visibility",
270 270 "-webkit-cursor-visibility", "-webkit-dashboard-region", "filter",
271 271 "-webkit-filter", "filter", "filter", "-ms-flex-align", "-ms-flex-item-align",
272 272 "-ms-flex-line-pack", "-ms-flex-negative", "-ms-flex-order", "-ms-flex-pack",
273 273 "-ms-flex-positive", "-ms-flex-preferred-size", "-moz-float-edge",
274 274 "-webkit-flow-from", "-ms-flow-from", "-webkit-flow-into", "-ms-flow-into",
275 275 "-webkit-font-feature-settings", "-webkit-font-feature-settings",
276 276 "font-feature-settings", "font-feature-settings", "font-kerning",
277 277 "-webkit-font-kerning", "font-kerning", "-webkit-font-size-delta",
278 278 "-webkit-font-size-delta", "-webkit-font-smoothing", "-webkit-font-smoothing",
279 279 "font-variant-ligatures", "-webkit-font-variant-ligatures",
280 280 "font-variant-ligatures", "-moz-force-broken-image-icon", "grid",
281 281 "-webkit-grid", "grid", "grid-area", "-webkit-grid-area", "grid-area",
282 282 "grid-auto-columns", "-webkit-grid-auto-columns", "grid-auto-columns",
283 283 "grid-auto-flow", "-webkit-grid-auto-flow", "grid-auto-flow", "grid-auto-rows",
284 284 "-webkit-grid-auto-rows", "grid-auto-rows", "grid-column",
285 285 "-webkit-grid-column", "grid-column", "-ms-grid-column",
286 286 "-ms-grid-column-align", "grid-column-end", "-webkit-grid-column-end",
287 287 "grid-column-end", "-ms-grid-column-span", "grid-column-start",
288 288 "-webkit-grid-column-start", "grid-column-start", "-ms-grid-columns",
289 289 "grid-row", "-webkit-grid-row", "grid-row", "-ms-grid-row",
290 290 "-ms-grid-row-align", "grid-row-end", "-webkit-grid-row-end", "grid-row-end",
291 291 "-ms-grid-row-span", "grid-row-start", "-webkit-grid-row-start",
292 292 "grid-row-start", "-ms-grid-rows", "grid-template", "-webkit-grid-template",
293 293 "grid-template", "grid-template-areas", "-webkit-grid-template-areas",
294 294 "grid-template-areas", "grid-template-columns",
295 295 "-webkit-grid-template-columns", "grid-template-columns", "grid-template-rows",
296 296 "-webkit-grid-template-rows", "grid-template-rows", "-ms-high-contrast-adjust",
297 297 "-webkit-highlight", "-webkit-hyphenate-character",
298 298 "-webkit-hyphenate-character", "-webkit-hyphenate-limit-after",
299 299 "-webkit-hyphenate-limit-before", "-ms-hyphenate-limit-chars",
300 300 "-webkit-hyphenate-limit-lines", "-ms-hyphenate-limit-lines",
301 301 "-ms-hyphenate-limit-zone", "-webkit-hyphens", "-moz-hyphens", "-ms-hyphens",
302 302 "-moz-image-region", "-ms-ime-align", "-webkit-initial-letter",
303 303 "-ms-interpolation-mode", "justify-self", "-webkit-justify-self",
304 304 "-webkit-line-align", "-webkit-line-box-contain", "-webkit-line-box-contain",
305 305 "-webkit-line-break", "-webkit-line-break", "line-break", "-webkit-line-clamp",
306 306 "-webkit-line-clamp", "-webkit-line-grid", "-webkit-line-snap",
307 307 "-webkit-locale", "-webkit-locale", "-webkit-logical-height",
308 308 "-webkit-logical-height", "-webkit-logical-width", "-webkit-logical-width",
309 309 "-webkit-margin-after", "-webkit-margin-after",
310 310 "-webkit-margin-after-collapse", "-webkit-margin-after-collapse",
311 311 "-webkit-margin-before", "-webkit-margin-before",
312 312 "-webkit-margin-before-collapse", "-webkit-margin-before-collapse",
313 313 "-webkit-margin-bottom-collapse", "-webkit-margin-bottom-collapse",
314 314 "-webkit-margin-collapse", "-webkit-margin-collapse", "-webkit-margin-end",
315 315 "-webkit-margin-end", "-moz-margin-end", "-webkit-margin-start",
316 316 "-webkit-margin-start", "-moz-margin-start", "-webkit-margin-top-collapse",
317 317 "-webkit-margin-top-collapse", "-webkit-marquee", "-webkit-marquee-direction",
318 318 "-webkit-marquee-increment", "-webkit-marquee-repetition",
319 319 "-webkit-marquee-speed", "-webkit-marquee-style", "mask", "-webkit-mask",
320 320 "mask", "-webkit-mask-box-image", "-webkit-mask-box-image",
321 321 "-webkit-mask-box-image-outset", "-webkit-mask-box-image-outset",
322 322 "-webkit-mask-box-image-repeat", "-webkit-mask-box-image-repeat",
323 323 "-webkit-mask-box-image-slice", "-webkit-mask-box-image-slice",
324 324 "-webkit-mask-box-image-source", "-webkit-mask-box-image-source",
325 325 "-webkit-mask-box-image-width", "-webkit-mask-box-image-width",
326 326 "-webkit-mask-clip", "-webkit-mask-clip", "-webkit-mask-composite",
327 327 "-webkit-mask-composite", "-webkit-mask-image", "-webkit-mask-image",
328 328 "-webkit-mask-origin", "-webkit-mask-origin", "-webkit-mask-position",
329 329 "-webkit-mask-position", "-webkit-mask-position-x", "-webkit-mask-position-x",
330 330 "-webkit-mask-position-y", "-webkit-mask-position-y", "-webkit-mask-repeat",
331 331 "-webkit-mask-repeat", "-webkit-mask-repeat-x", "-webkit-mask-repeat-x",
332 332 "-webkit-mask-repeat-y", "-webkit-mask-repeat-y", "-webkit-mask-size",
333 333 "-webkit-mask-size", "mask-source-type", "-webkit-mask-source-type",
334 334 "-moz-math-display", "-moz-math-variant", "-webkit-max-logical-height",
335 335 "-webkit-max-logical-height", "-webkit-max-logical-width",
336 336 "-webkit-max-logical-width", "-webkit-min-logical-height",
337 337 "-webkit-min-logical-height", "-webkit-min-logical-width",
338 338 "-webkit-min-logical-width", "-webkit-nbsp-mode", "-moz-orient",
339 339 "-moz-osx-font-smoothing", "-moz-outline-radius",
340 340 "-moz-outline-radius-bottomleft", "-moz-outline-radius-bottomright",
341 341 "-moz-outline-radius-topleft", "-moz-outline-radius-topright",
342 342 "-webkit-overflow-scrolling", "-ms-overflow-style", "-webkit-padding-after",
343 343 "-webkit-padding-after", "-webkit-padding-before", "-webkit-padding-before",
344 344 "-webkit-padding-end", "-webkit-padding-end", "-moz-padding-end",
345 345 "-webkit-padding-start", "-webkit-padding-start", "-moz-padding-start",
346 346 "perspective", "-webkit-perspective", "perspective", "perspective",
347 347 "perspective-origin", "-webkit-perspective-origin", "perspective-origin",
348 348 "perspective-origin", "-webkit-perspective-origin-x",
349 349 "-webkit-perspective-origin-x", "perspective-origin-x",
350 350 "-webkit-perspective-origin-y", "-webkit-perspective-origin-y",
351 351 "perspective-origin-y", "-webkit-print-color-adjust",
352 352 "-webkit-print-color-adjust", "-webkit-region-break-after",
353 353 "-webkit-region-break-before", "-webkit-region-break-inside",
354 354 "-webkit-region-fragment", "-webkit-rtl-ordering", "-webkit-rtl-ordering",
355 355 "-webkit-ruby-position", "-webkit-ruby-position", "ruby-position",
356 356 "-moz-script-level", "-moz-script-min-size", "-moz-script-size-multiplier",
357 357 "-ms-scroll-chaining", "-ms-scroll-limit", "-ms-scroll-limit-x-max",
358 358 "-ms-scroll-limit-x-min", "-ms-scroll-limit-y-max", "-ms-scroll-limit-y-min",
359 359 "-ms-scroll-rails", "-webkit-scroll-snap-coordinate",
360 360 "-webkit-scroll-snap-destination", "-webkit-scroll-snap-points-x",
361 361 "-ms-scroll-snap-points-x", "-webkit-scroll-snap-points-y",
362 362 "-ms-scroll-snap-points-y", "-webkit-scroll-snap-type", "-ms-scroll-snap-type",
363 363 "-ms-scroll-snap-x", "-ms-scroll-snap-y", "-ms-scroll-translation",
364 364 "-ms-scrollbar-3dlight-color", "shape-image-threshold",
365 365 "-webkit-shape-image-threshold", "shape-margin", "-webkit-shape-margin",
366 366 "shape-outside", "-webkit-shape-outside", "-moz-stack-sizing", "tab-size",
367 367 "tab-size", "-moz-tab-size", "-webkit-tap-highlight-color",
368 368 "-webkit-tap-highlight-color", "text-align-last", "-webkit-text-align-last",
369 369 "-moz-text-align-last", "text-align-last", "-webkit-text-combine",
370 370 "-webkit-text-combine", "-ms-text-combine-horizontal", "text-decoration-color",
371 371 "-webkit-text-decoration-color", "text-decoration-color",
372 372 "text-decoration-color", "text-decoration-line",
373 373 "-webkit-text-decoration-line", "text-decoration-line",
374 374 "-webkit-text-decoration-skip", "text-decoration-style",
375 375 "-webkit-text-decoration-style", "text-decoration-style",
376 376 "-webkit-text-decorations-in-effect", "-webkit-text-decorations-in-effect",
377 377 "-webkit-text-emphasis", "text-emphasis", "-webkit-text-emphasis-color",
378 378 "text-emphasis-color", "-webkit-text-emphasis-position",
379 379 "text-emphasis-position", "-webkit-text-emphasis-style", "text-emphasis-style",
380 380 "-webkit-text-fill-color", "-webkit-text-fill-color", "text-justify",
381 381 "-webkit-text-justify", "text-justify", "-webkit-text-orientation",
382 382 "-webkit-text-orientation", "text-orientation", "-webkit-text-security",
383 383 "-webkit-text-security", "-webkit-text-size-adjust", "-moz-text-size-adjust",
384 384 "-ms-text-size-adjust", "-webkit-text-stroke", "-webkit-text-stroke",
385 385 "-webkit-text-stroke-color", "-webkit-text-stroke-color",
386 386 "-webkit-text-stroke-width", "-webkit-text-stroke-width",
387 387 "text-underline-position", "-webkit-text-underline-position",
388 388 "text-underline-position", "-webkit-touch-callout", "-ms-touch-select",
389 389 "transform", "-webkit-transform", "transform", "transform", "transform-origin",
390 390 "-webkit-transform-origin", "transform-origin", "transform-origin",
391 391 "-webkit-transform-origin-x", "-webkit-transform-origin-x",
392 392 "transform-origin-x", "-webkit-transform-origin-y",
393 393 "-webkit-transform-origin-y", "transform-origin-y",
394 394 "-webkit-transform-origin-z", "-webkit-transform-origin-z",
395 395 "transform-origin-z", "transform-style", "-webkit-transform-style",
396 396 "transform-style", "transform-style", "-webkit-user-drag", "-webkit-user-drag",
397 397 "-moz-user-focus", "-moz-user-input", "-webkit-user-modify",
398 398 "-webkit-user-modify", "-moz-user-modify", "-webkit-user-select",
399 399 "-webkit-user-select", "-moz-user-select", "-ms-user-select",
400 400 "-moz-window-dragging", "-moz-window-shadow", "-ms-wrap-flow",
401 401 "-ms-wrap-margin", "-ms-wrap-through", "writing-mode", "-webkit-writing-mode",
402 402 "writing-mode", "writing-mode",
403 403 ]
404 404
405 405 all_styles = standard_styles + all_prefixed_styles No newline at end of file
@@ -1,492 +1,492 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2013-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Set of hooks run by RhodeCode Enterprise
24 24 """
25 25
26 26 import os
27 27 import collections
28 28 import logging
29 29
30 30 import rhodecode
31 31 from rhodecode import events
32 32 from rhodecode.lib import helpers as h
33 33 from rhodecode.lib import audit_logger
34 34 from rhodecode.lib.utils2 import safe_str
35 35 from rhodecode.lib.exceptions import (
36 36 HTTPLockedRC, HTTPBranchProtected, UserCreationError)
37 37 from rhodecode.model.db import Repository, User
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 class HookResponse(object):
43 43 def __init__(self, status, output):
44 44 self.status = status
45 45 self.output = output
46 46
47 47 def __add__(self, other):
48 48 other_status = getattr(other, 'status', 0)
49 49 new_status = max(self.status, other_status)
50 50 other_output = getattr(other, 'output', '')
51 51 new_output = self.output + other_output
52 52
53 53 return HookResponse(new_status, new_output)
54 54
55 55 def __bool__(self):
56 56 return self.status == 0
57 57
58 58
59 59 def is_shadow_repo(extras):
60 60 """
61 61 Returns ``True`` if this is an action executed against a shadow repository.
62 62 """
63 63 return extras['is_shadow_repo']
64 64
65 65
66 66 def _get_scm_size(alias, root_path):
67 67
68 68 if not alias.startswith('.'):
69 69 alias += '.'
70 70
71 71 size_scm, size_root = 0, 0
72 72 for path, unused_dirs, files in os.walk(safe_str(root_path)):
73 73 if path.find(alias) != -1:
74 74 for f in files:
75 75 try:
76 76 size_scm += os.path.getsize(os.path.join(path, f))
77 77 except OSError:
78 78 pass
79 79 else:
80 80 for f in files:
81 81 try:
82 82 size_root += os.path.getsize(os.path.join(path, f))
83 83 except OSError:
84 84 pass
85 85
86 86 size_scm_f = h.format_byte_size_binary(size_scm)
87 87 size_root_f = h.format_byte_size_binary(size_root)
88 88 size_total_f = h.format_byte_size_binary(size_root + size_scm)
89 89
90 90 return size_scm_f, size_root_f, size_total_f
91 91
92 92
93 93 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
94 94 def repo_size(extras):
95 95 """Present size of repository after push."""
96 96 repo = Repository.get_by_repo_name(extras.repository)
97 97 vcs_part = safe_str(u'.%s' % repo.repo_type)
98 98 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
99 99 repo.repo_full_path)
100 100 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
101 101 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
102 102 return HookResponse(0, msg)
103 103
104 104
105 105 def pre_push(extras):
106 106 """
107 107 Hook executed before pushing code.
108 108
109 109 It bans pushing when the repository is locked.
110 110 """
111 111
112 112 user = User.get_by_username(extras.username)
113 113 output = ''
114 114 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
115 115 locked_by = User.get(extras.locked_by[0]).username
116 116 reason = extras.locked_by[2]
117 117 # this exception is interpreted in git/hg middlewares and based
118 118 # on that proper return code is server to client
119 119 _http_ret = HTTPLockedRC(
120 120 _locked_by_explanation(extras.repository, locked_by, reason))
121 121 if str(_http_ret.code).startswith('2'):
122 122 # 2xx Codes don't raise exceptions
123 123 output = _http_ret.title
124 124 else:
125 125 raise _http_ret
126 126
127 127 hook_response = ''
128 128 if not is_shadow_repo(extras):
129 129 if extras.commit_ids and extras.check_branch_perms:
130 130
131 131 auth_user = user.AuthUser()
132 132 repo = Repository.get_by_repo_name(extras.repository)
133 133 affected_branches = []
134 134 if repo.repo_type == 'hg':
135 135 for entry in extras.commit_ids:
136 136 if entry['type'] == 'branch':
137 137 is_forced = bool(entry['multiple_heads'])
138 138 affected_branches.append([entry['name'], is_forced])
139 139 elif repo.repo_type == 'git':
140 140 for entry in extras.commit_ids:
141 141 if entry['type'] == 'heads':
142 142 is_forced = bool(entry['pruned_sha'])
143 143 affected_branches.append([entry['name'], is_forced])
144 144
145 145 for branch_name, is_forced in affected_branches:
146 146
147 147 rule, branch_perm = auth_user.get_rule_and_branch_permission(
148 148 extras.repository, branch_name)
149 149 if not branch_perm:
150 150 # no branch permission found for this branch, just keep checking
151 151 continue
152 152
153 153 if branch_perm == 'branch.push_force':
154 154 continue
155 155 elif branch_perm == 'branch.push' and is_forced is False:
156 156 continue
157 157 elif branch_perm == 'branch.push' and is_forced is True:
158 158 halt_message = 'Branch `{}` changes rejected by rule {}. ' \
159 159 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule)
160 160 else:
161 161 halt_message = 'Branch `{}` changes rejected by rule {}.'.format(
162 162 branch_name, rule)
163 163
164 164 if halt_message:
165 165 _http_ret = HTTPBranchProtected(halt_message)
166 166 raise _http_ret
167 167
168 168 # Propagate to external components. This is done after checking the
169 169 # lock, for consistent behavior.
170 170 hook_response = pre_push_extension(
171 171 repo_store_path=Repository.base_path(), **extras)
172 172 events.trigger(events.RepoPrePushEvent(
173 173 repo_name=extras.repository, extras=extras))
174 174
175 175 return HookResponse(0, output) + hook_response
176 176
177 177
178 178 def pre_pull(extras):
179 179 """
180 180 Hook executed before pulling the code.
181 181
182 182 It bans pulling when the repository is locked.
183 183 """
184 184
185 185 output = ''
186 186 if extras.locked_by[0]:
187 187 locked_by = User.get(extras.locked_by[0]).username
188 188 reason = extras.locked_by[2]
189 189 # this exception is interpreted in git/hg middlewares and based
190 190 # on that proper return code is server to client
191 191 _http_ret = HTTPLockedRC(
192 192 _locked_by_explanation(extras.repository, locked_by, reason))
193 193 if str(_http_ret.code).startswith('2'):
194 194 # 2xx Codes don't raise exceptions
195 195 output = _http_ret.title
196 196 else:
197 197 raise _http_ret
198 198
199 199 # Propagate to external components. This is done after checking the
200 200 # lock, for consistent behavior.
201 201 hook_response = ''
202 202 if not is_shadow_repo(extras):
203 203 extras.hook_type = extras.hook_type or 'pre_pull'
204 204 hook_response = pre_pull_extension(
205 205 repo_store_path=Repository.base_path(), **extras)
206 206 events.trigger(events.RepoPrePullEvent(
207 207 repo_name=extras.repository, extras=extras))
208 208
209 209 return HookResponse(0, output) + hook_response
210 210
211 211
212 212 def post_pull(extras):
213 213 """Hook executed after client pulls the code."""
214 214
215 215 audit_user = audit_logger.UserWrap(
216 216 username=extras.username,
217 217 ip_addr=extras.ip)
218 218 repo = audit_logger.RepoWrap(repo_name=extras.repository)
219 219 audit_logger.store(
220 220 'user.pull', action_data={'user_agent': extras.user_agent},
221 221 user=audit_user, repo=repo, commit=True)
222 222
223 223 output = ''
224 224 # make lock is a tri state False, True, None. We only make lock on True
225 225 if extras.make_lock is True and not is_shadow_repo(extras):
226 226 user = User.get_by_username(extras.username)
227 227 Repository.lock(Repository.get_by_repo_name(extras.repository),
228 228 user.user_id,
229 229 lock_reason=Repository.LOCK_PULL)
230 230 msg = 'Made lock on repo `%s`' % (extras.repository,)
231 231 output += msg
232 232
233 233 if extras.locked_by[0]:
234 234 locked_by = User.get(extras.locked_by[0]).username
235 235 reason = extras.locked_by[2]
236 236 _http_ret = HTTPLockedRC(
237 237 _locked_by_explanation(extras.repository, locked_by, reason))
238 238 if str(_http_ret.code).startswith('2'):
239 239 # 2xx Codes don't raise exceptions
240 240 output += _http_ret.title
241 241
242 242 # Propagate to external components.
243 243 hook_response = ''
244 244 if not is_shadow_repo(extras):
245 245 extras.hook_type = extras.hook_type or 'post_pull'
246 246 hook_response = post_pull_extension(
247 247 repo_store_path=Repository.base_path(), **extras)
248 248 events.trigger(events.RepoPullEvent(
249 249 repo_name=extras.repository, extras=extras))
250 250
251 251 return HookResponse(0, output) + hook_response
252 252
253 253
254 254 def post_push(extras):
255 255 """Hook executed after user pushes to the repository."""
256 256 commit_ids = extras.commit_ids
257 257
258 258 # log the push call
259 259 audit_user = audit_logger.UserWrap(
260 260 username=extras.username, ip_addr=extras.ip)
261 261 repo = audit_logger.RepoWrap(repo_name=extras.repository)
262 262 audit_logger.store(
263 263 'user.push', action_data={
264 264 'user_agent': extras.user_agent,
265 265 'commit_ids': commit_ids[:400]},
266 266 user=audit_user, repo=repo, commit=True)
267 267
268 268 # Propagate to external components.
269 269 output = ''
270 270 # make lock is a tri state False, True, None. We only release lock on False
271 271 if extras.make_lock is False and not is_shadow_repo(extras):
272 272 Repository.unlock(Repository.get_by_repo_name(extras.repository))
273 msg = 'Released lock on repo `%s`\n' % extras.repository
273 msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository))
274 274 output += msg
275 275
276 276 if extras.locked_by[0]:
277 277 locked_by = User.get(extras.locked_by[0]).username
278 278 reason = extras.locked_by[2]
279 279 _http_ret = HTTPLockedRC(
280 280 _locked_by_explanation(extras.repository, locked_by, reason))
281 281 # TODO: johbo: if not?
282 282 if str(_http_ret.code).startswith('2'):
283 283 # 2xx Codes don't raise exceptions
284 284 output += _http_ret.title
285 285
286 286 if extras.new_refs:
287 tmpl = extras.server_url + '/' + extras.repository + \
288 "/pull-request/new?{ref_type}={ref_name}"
287 tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format(
288 safe_str(extras.server_url), safe_str(extras.repository))
289 289
290 290 for branch_name in extras.new_refs['branches']:
291 291 output += 'RhodeCode: open pull request link: {}\n'.format(
292 292 tmpl.format(ref_type='branch', ref_name=safe_str(branch_name)))
293 293
294 294 for book_name in extras.new_refs['bookmarks']:
295 295 output += 'RhodeCode: open pull request link: {}\n'.format(
296 296 tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name)))
297 297
298 298 hook_response = ''
299 299 if not is_shadow_repo(extras):
300 300 hook_response = post_push_extension(
301 301 repo_store_path=Repository.base_path(),
302 302 **extras)
303 303 events.trigger(events.RepoPushEvent(
304 304 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
305 305
306 306 output += 'RhodeCode: push completed\n'
307 307 return HookResponse(0, output) + hook_response
308 308
309 309
310 310 def _locked_by_explanation(repo_name, user_name, reason):
311 311 message = (
312 312 'Repository `%s` locked by user `%s`. Reason:`%s`'
313 313 % (repo_name, user_name, reason))
314 314 return message
315 315
316 316
317 317 def check_allowed_create_user(user_dict, created_by, **kwargs):
318 318 # pre create hooks
319 319 if pre_create_user.is_active():
320 320 hook_result = pre_create_user(created_by=created_by, **user_dict)
321 321 allowed = hook_result.status == 0
322 322 if not allowed:
323 323 reason = hook_result.output
324 324 raise UserCreationError(reason)
325 325
326 326
327 327 class ExtensionCallback(object):
328 328 """
329 329 Forwards a given call to rcextensions, sanitizes keyword arguments.
330 330
331 331 Does check if there is an extension active for that hook. If it is
332 332 there, it will forward all `kwargs_keys` keyword arguments to the
333 333 extension callback.
334 334 """
335 335
336 336 def __init__(self, hook_name, kwargs_keys):
337 337 self._hook_name = hook_name
338 338 self._kwargs_keys = set(kwargs_keys)
339 339
340 340 def __call__(self, *args, **kwargs):
341 341 log.debug('Calling extension callback for `%s`', self._hook_name)
342 342 callback = self._get_callback()
343 343 if not callback:
344 344 log.debug('extension callback `%s` not found, skipping...', self._hook_name)
345 345 return
346 346
347 347 kwargs_to_pass = {}
348 348 for key in self._kwargs_keys:
349 349 try:
350 350 kwargs_to_pass[key] = kwargs[key]
351 351 except KeyError:
352 352 log.error('Failed to fetch %s key. Expected keys: %s',
353 353 key, self._kwargs_keys)
354 354 raise
355 355
356 356 # backward compat for removed api_key for old hooks. This was it works
357 357 # with older rcextensions that require api_key present
358 358 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
359 359 kwargs_to_pass['api_key'] = '_DEPRECATED_'
360 360 return callback(**kwargs_to_pass)
361 361
362 362 def is_active(self):
363 363 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
364 364
365 365 def _get_callback(self):
366 366 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
367 367
368 368
369 369 pre_pull_extension = ExtensionCallback(
370 370 hook_name='PRE_PULL_HOOK',
371 371 kwargs_keys=(
372 372 'server_url', 'config', 'scm', 'username', 'ip', 'action',
373 373 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
374 374
375 375
376 376 post_pull_extension = ExtensionCallback(
377 377 hook_name='PULL_HOOK',
378 378 kwargs_keys=(
379 379 'server_url', 'config', 'scm', 'username', 'ip', 'action',
380 380 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
381 381
382 382
383 383 pre_push_extension = ExtensionCallback(
384 384 hook_name='PRE_PUSH_HOOK',
385 385 kwargs_keys=(
386 386 'server_url', 'config', 'scm', 'username', 'ip', 'action',
387 387 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
388 388
389 389
390 390 post_push_extension = ExtensionCallback(
391 391 hook_name='PUSH_HOOK',
392 392 kwargs_keys=(
393 393 'server_url', 'config', 'scm', 'username', 'ip', 'action',
394 394 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
395 395
396 396
397 397 pre_create_user = ExtensionCallback(
398 398 hook_name='PRE_CREATE_USER_HOOK',
399 399 kwargs_keys=(
400 400 'username', 'password', 'email', 'firstname', 'lastname', 'active',
401 401 'admin', 'created_by'))
402 402
403 403
404 404 log_create_pull_request = ExtensionCallback(
405 405 hook_name='CREATE_PULL_REQUEST',
406 406 kwargs_keys=(
407 407 'server_url', 'config', 'scm', 'username', 'ip', 'action',
408 408 'repository', 'pull_request_id', 'url', 'title', 'description',
409 409 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
410 410 'mergeable', 'source', 'target', 'author', 'reviewers'))
411 411
412 412
413 413 log_merge_pull_request = ExtensionCallback(
414 414 hook_name='MERGE_PULL_REQUEST',
415 415 kwargs_keys=(
416 416 'server_url', 'config', 'scm', 'username', 'ip', 'action',
417 417 'repository', 'pull_request_id', 'url', 'title', 'description',
418 418 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
419 419 'mergeable', 'source', 'target', 'author', 'reviewers'))
420 420
421 421
422 422 log_close_pull_request = ExtensionCallback(
423 423 hook_name='CLOSE_PULL_REQUEST',
424 424 kwargs_keys=(
425 425 'server_url', 'config', 'scm', 'username', 'ip', 'action',
426 426 'repository', 'pull_request_id', 'url', 'title', 'description',
427 427 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
428 428 'mergeable', 'source', 'target', 'author', 'reviewers'))
429 429
430 430
431 431 log_review_pull_request = ExtensionCallback(
432 432 hook_name='REVIEW_PULL_REQUEST',
433 433 kwargs_keys=(
434 434 'server_url', 'config', 'scm', 'username', 'ip', 'action',
435 435 'repository', 'pull_request_id', 'url', 'title', 'description',
436 436 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
437 437 'mergeable', 'source', 'target', 'author', 'reviewers'))
438 438
439 439
440 440 log_update_pull_request = ExtensionCallback(
441 441 hook_name='UPDATE_PULL_REQUEST',
442 442 kwargs_keys=(
443 443 'server_url', 'config', 'scm', 'username', 'ip', 'action',
444 444 'repository', 'pull_request_id', 'url', 'title', 'description',
445 445 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
446 446 'mergeable', 'source', 'target', 'author', 'reviewers'))
447 447
448 448
449 449 log_create_user = ExtensionCallback(
450 450 hook_name='CREATE_USER_HOOK',
451 451 kwargs_keys=(
452 452 'username', 'full_name_or_username', 'full_contact', 'user_id',
453 453 'name', 'firstname', 'short_contact', 'admin', 'lastname',
454 454 'ip_addresses', 'extern_type', 'extern_name',
455 455 'email', 'api_keys', 'last_login',
456 456 'full_name', 'active', 'password', 'emails',
457 457 'inherit_default_permissions', 'created_by', 'created_on'))
458 458
459 459
460 460 log_delete_user = ExtensionCallback(
461 461 hook_name='DELETE_USER_HOOK',
462 462 kwargs_keys=(
463 463 'username', 'full_name_or_username', 'full_contact', 'user_id',
464 464 'name', 'firstname', 'short_contact', 'admin', 'lastname',
465 465 'ip_addresses',
466 466 'email', 'last_login',
467 467 'full_name', 'active', 'password', 'emails',
468 468 'inherit_default_permissions', 'deleted_by'))
469 469
470 470
471 471 log_create_repository = ExtensionCallback(
472 472 hook_name='CREATE_REPO_HOOK',
473 473 kwargs_keys=(
474 474 'repo_name', 'repo_type', 'description', 'private', 'created_on',
475 475 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
476 476 'clone_uri', 'fork_id', 'group_id', 'created_by'))
477 477
478 478
479 479 log_delete_repository = ExtensionCallback(
480 480 hook_name='DELETE_REPO_HOOK',
481 481 kwargs_keys=(
482 482 'repo_name', 'repo_type', 'description', 'private', 'created_on',
483 483 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
484 484 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
485 485
486 486
487 487 log_create_repository_group = ExtensionCallback(
488 488 hook_name='CREATE_REPO_GROUP_HOOK',
489 489 kwargs_keys=(
490 490 'group_name', 'group_parent_id', 'group_description',
491 491 'group_id', 'user_id', 'created_by', 'created_on',
492 492 'enable_locking'))
@@ -1,77 +1,62 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 import re
22
23 21 import markdown
24 22
25 23 from mdx_gfm import GithubFlavoredMarkdownExtension # pragma: no cover
26 24
27 25
28 class FlavoredCheckboxPostprocessor(markdown.postprocessors.Postprocessor):
29 """
30 Adds `flavored_checkbox_list` class to list of checkboxes
31 """
32
33 pattern = re.compile(r'^([*-]) \[([ x])\]')
34
35 def run(self, html):
36 before = '<ul>\n<li><input type="checkbox"'
37 after = '<ul class="flavored_checkbox_list">\n<li><input type="checkbox"'
38 return html.replace(before, after)
39
40
41 26 # Global Vars
42 27 URLIZE_RE = '(%s)' % '|'.join([
43 28 r'<(?:f|ht)tps?://[^>]*>',
44 29 r'\b(?:f|ht)tps?://[^)<>\s]+[^.,)<>\s]',
45 30 r'\bwww\.[^)<>\s]+[^.,)<>\s]',
46 31 r'[^(<\s]+\.(?:com|net|org)\b',
47 32 ])
48 33
49 34
50 35 class UrlizePattern(markdown.inlinepatterns.Pattern):
51 36 """ Return a link Element given an autolink (`http://example/com`). """
52 37 def handleMatch(self, m):
53 38 url = m.group(2)
54 39
55 40 if url.startswith('<'):
56 41 url = url[1:-1]
57 42
58 43 text = url
59 44
60 45 if not url.split('://')[0] in ('http','https','ftp'):
61 46 if '@' in url and not '/' in url:
62 47 url = 'mailto:' + url
63 48 else:
64 49 url = 'http://' + url
65 50
66 51 el = markdown.util.etree.Element("a")
67 52 el.set('href', url)
68 53 el.text = markdown.util.AtomicString(text)
69 54 return el
70 55
71 56
72 57 class UrlizeExtension(markdown.Extension):
73 58 """ Urlize Extension for Python-Markdown. """
74 59
75 60 def extendMarkdown(self, md, md_globals):
76 61 """ Replace autolink with UrlizePattern """
77 62 md.inlinePatterns['autolink'] = UrlizePattern(URLIZE_RE, md)
@@ -1,63 +1,64 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from rhodecode.lib.paster_commands import BasePasterCommand, Command
24 24
25 25 log = logging.getLogger(__name__)
26 26
27 27
28 28 class UpgradeDb(BasePasterCommand):
29 """Command used for paster to upgrade our database to newer version
29 """
30 Command used for paster to upgrade our database to newer version
30 31 """
31 32
32 33 max_args = 1
33 34 min_args = 1
34 35
35 36 usage = "CONFIG_FILE"
36 37 summary = "Upgrades current db to newer version"
37 38 group_name = "RhodeCode"
38 39
39 40 parser = Command.standard_parser(verbose=True)
40 41
41 42 def command(self):
42 43 from rhodecode.lib.rc_commands import upgrade_db
43 44 upgrade_db.command(
44 self.path_to_ini_file, self.options.__dict__.get('force_ask'))
45 self.path_to_ini_file, self.options.__dict__.get('force_ask'), None)
45 46
46 47 def update_parser(self):
47 48 self.parser.add_option('--sql',
48 49 action='store_true',
49 50 dest='just_sql',
50 51 help="Prints upgrade sql for further investigation",
51 52 default=False)
52 53
53 54 self.parser.add_option('--force-yes',
54 55 action='store_true',
55 56 dest='force_ask',
56 57 default=None,
57 58 help='Force yes to every question')
58 59 self.parser.add_option('--force-no',
59 60 action='store_false',
60 61 dest='force_ask',
61 62 default=None,
62 63 help='Force no to every question')
63 64
@@ -1,1716 +1,1717 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 141 opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=True):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 # source or target
150 150 if repo and source:
151 151 q = q.filter(PullRequest.source_repo == repo)
152 152 elif repo:
153 153 q = q.filter(PullRequest.target_repo == repo)
154 154
155 155 # closed,opened
156 156 if statuses:
157 157 q = q.filter(PullRequest.status.in_(statuses))
158 158
159 159 # opened by filter
160 160 if opened_by:
161 161 q = q.filter(PullRequest.user_id.in_(opened_by))
162 162
163 163 # only get those that are in "created" state
164 164 if only_created:
165 165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166 166
167 167 if order_by:
168 168 order_map = {
169 169 'name_raw': PullRequest.pull_request_id,
170 170 'id': PullRequest.pull_request_id,
171 171 'title': PullRequest.title,
172 172 'updated_on_raw': PullRequest.updated_on,
173 173 'target_repo': PullRequest.target_repo_id
174 174 }
175 175 if order_dir == 'asc':
176 176 q = q.order_by(order_map[order_by].asc())
177 177 else:
178 178 q = q.order_by(order_map[order_by].desc())
179 179
180 180 return q
181 181
182 182 def count_all(self, repo_name, source=False, statuses=None,
183 183 opened_by=None):
184 184 """
185 185 Count the number of pull requests for a specific repository.
186 186
187 187 :param repo_name: target or source repo
188 188 :param source: boolean flag to specify if repo_name refers to source
189 189 :param statuses: list of pull request statuses
190 190 :param opened_by: author user of the pull request
191 191 :returns: int number of pull requests
192 192 """
193 193 q = self._prepare_get_all_query(
194 194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195 195
196 196 return q.count()
197 197
198 198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 199 offset=0, length=None, order_by=None, order_dir='desc'):
200 200 """
201 201 Get all pull requests for a specific repository.
202 202
203 203 :param repo_name: target or source repo
204 204 :param source: boolean flag to specify if repo_name refers to source
205 205 :param statuses: list of pull request statuses
206 206 :param opened_by: author user of the pull request
207 207 :param offset: pagination offset
208 208 :param length: length of returned list
209 209 :param order_by: order of the returned list
210 210 :param order_dir: 'asc' or 'desc' ordering direction
211 211 :returns: list of pull requests
212 212 """
213 213 q = self._prepare_get_all_query(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 215 order_by=order_by, order_dir=order_dir)
216 216
217 217 if length:
218 218 pull_requests = q.limit(length).offset(offset).all()
219 219 else:
220 220 pull_requests = q.all()
221 221
222 222 return pull_requests
223 223
224 224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 225 opened_by=None):
226 226 """
227 227 Count the number of pull requests for a specific repository that are
228 228 awaiting review.
229 229
230 230 :param repo_name: target or source repo
231 231 :param source: boolean flag to specify if repo_name refers to source
232 232 :param statuses: list of pull request statuses
233 233 :param opened_by: author user of the pull request
234 234 :returns: int number of pull requests
235 235 """
236 236 pull_requests = self.get_awaiting_review(
237 237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238 238
239 239 return len(pull_requests)
240 240
241 241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 242 opened_by=None, offset=0, length=None,
243 243 order_by=None, order_dir='desc'):
244 244 """
245 245 Get all pull requests for a specific repository that are awaiting
246 246 review.
247 247
248 248 :param repo_name: target or source repo
249 249 :param source: boolean flag to specify if repo_name refers to source
250 250 :param statuses: list of pull request statuses
251 251 :param opened_by: author user of the pull request
252 252 :param offset: pagination offset
253 253 :param length: length of returned list
254 254 :param order_by: order of the returned list
255 255 :param order_dir: 'asc' or 'desc' ordering direction
256 256 :returns: list of pull requests
257 257 """
258 258 pull_requests = self.get_all(
259 259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 260 order_by=order_by, order_dir=order_dir)
261 261
262 262 _filtered_pull_requests = []
263 263 for pr in pull_requests:
264 264 status = pr.calculated_review_status()
265 265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 267 _filtered_pull_requests.append(pr)
268 268 if length:
269 269 return _filtered_pull_requests[offset:offset+length]
270 270 else:
271 271 return _filtered_pull_requests
272 272
273 273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 274 opened_by=None, user_id=None):
275 275 """
276 276 Count the number of pull requests for a specific repository that are
277 277 awaiting review from a specific user.
278 278
279 279 :param repo_name: target or source repo
280 280 :param source: boolean flag to specify if repo_name refers to source
281 281 :param statuses: list of pull request statuses
282 282 :param opened_by: author user of the pull request
283 283 :param user_id: reviewer user of the pull request
284 284 :returns: int number of pull requests
285 285 """
286 286 pull_requests = self.get_awaiting_my_review(
287 287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 288 user_id=user_id)
289 289
290 290 return len(pull_requests)
291 291
292 292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 293 opened_by=None, user_id=None, offset=0,
294 294 length=None, order_by=None, order_dir='desc'):
295 295 """
296 296 Get all pull requests for a specific repository that are awaiting
297 297 review from a specific user.
298 298
299 299 :param repo_name: target or source repo
300 300 :param source: boolean flag to specify if repo_name refers to source
301 301 :param statuses: list of pull request statuses
302 302 :param opened_by: author user of the pull request
303 303 :param user_id: reviewer user of the pull request
304 304 :param offset: pagination offset
305 305 :param length: length of returned list
306 306 :param order_by: order of the returned list
307 307 :param order_dir: 'asc' or 'desc' ordering direction
308 308 :returns: list of pull requests
309 309 """
310 310 pull_requests = self.get_all(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 order_by=order_by, order_dir=order_dir)
313 313
314 314 _my = PullRequestModel().get_not_reviewed(user_id)
315 315 my_participation = []
316 316 for pr in pull_requests:
317 317 if pr in _my:
318 318 my_participation.append(pr)
319 319 _filtered_pull_requests = my_participation
320 320 if length:
321 321 return _filtered_pull_requests[offset:offset+length]
322 322 else:
323 323 return _filtered_pull_requests
324 324
325 325 def get_not_reviewed(self, user_id):
326 326 return [
327 327 x.pull_request for x in PullRequestReviewers.query().filter(
328 328 PullRequestReviewers.user_id == user_id).all()
329 329 ]
330 330
331 331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 332 order_by=None, order_dir='desc'):
333 333 q = PullRequest.query()
334 334 if user_id:
335 335 reviewers_subquery = Session().query(
336 336 PullRequestReviewers.pull_request_id).filter(
337 337 PullRequestReviewers.user_id == user_id).subquery()
338 338 user_filter = or_(
339 339 PullRequest.user_id == user_id,
340 340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 341 )
342 342 q = PullRequest.query().filter(user_filter)
343 343
344 344 # closed,opened
345 345 if statuses:
346 346 q = q.filter(PullRequest.status.in_(statuses))
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'title': PullRequest.title,
352 352 'updated_on_raw': PullRequest.updated_on,
353 353 'target_repo': PullRequest.target_repo_id
354 354 }
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_im_participating_in(self, user_id=None, statuses=None):
363 363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 364 return q.count()
365 365
366 366 def get_im_participating_in(
367 367 self, user_id=None, statuses=None, offset=0,
368 368 length=None, order_by=None, order_dir='desc'):
369 369 """
370 370 Get all Pull requests that i'm participating in, or i have opened
371 371 """
372 372
373 373 q = self._prepare_participating_query(
374 374 user_id, statuses=statuses, order_by=order_by,
375 375 order_dir=order_dir)
376 376
377 377 if length:
378 378 pull_requests = q.limit(length).offset(offset).all()
379 379 else:
380 380 pull_requests = q.all()
381 381
382 382 return pull_requests
383 383
384 384 def get_versions(self, pull_request):
385 385 """
386 386 returns version of pull request sorted by ID descending
387 387 """
388 388 return PullRequestVersion.query()\
389 389 .filter(PullRequestVersion.pull_request == pull_request)\
390 390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 391 .all()
392 392
393 393 def get_pr_version(self, pull_request_id, version=None):
394 394 at_version = None
395 395
396 396 if version and version == 'latest':
397 397 pull_request_ver = PullRequest.get(pull_request_id)
398 398 pull_request_obj = pull_request_ver
399 399 _org_pull_request_obj = pull_request_obj
400 400 at_version = 'latest'
401 401 elif version:
402 402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 403 pull_request_obj = pull_request_ver
404 404 _org_pull_request_obj = pull_request_ver.pull_request
405 405 at_version = pull_request_ver.pull_request_version_id
406 406 else:
407 407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 408 pull_request_id)
409 409
410 410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 411 pull_request_obj, _org_pull_request_obj)
412 412
413 413 return _org_pull_request_obj, pull_request_obj, \
414 414 pull_request_display_obj, at_version
415 415
416 416 def create(self, created_by, source_repo, source_ref, target_repo,
417 417 target_ref, revisions, reviewers, title, description=None,
418 418 description_renderer=None,
419 419 reviewer_data=None, translator=None, auth_user=None):
420 420 translator = translator or get_current_request().translate
421 421
422 422 created_by_user = self._get_user(created_by)
423 423 auth_user = auth_user or created_by_user.AuthUser()
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.description_renderer = description_renderer
436 436 pull_request.author = created_by_user
437 437 pull_request.reviewer_data = reviewer_data
438 438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 439 Session().add(pull_request)
440 440 Session().flush()
441 441
442 442 reviewer_ids = set()
443 443 # members / reviewers
444 444 for reviewer_object in reviewers:
445 445 user_id, reasons, mandatory, rules = reviewer_object
446 446 user = self._get_user(user_id)
447 447
448 448 # skip duplicates
449 449 if user.user_id in reviewer_ids:
450 450 continue
451 451
452 452 reviewer_ids.add(user.user_id)
453 453
454 454 reviewer = PullRequestReviewers()
455 455 reviewer.user = user
456 456 reviewer.pull_request = pull_request
457 457 reviewer.reasons = reasons
458 458 reviewer.mandatory = mandatory
459 459
460 460 # NOTE(marcink): pick only first rule for now
461 461 rule_id = list(rules)[0] if rules else None
462 462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 463 if rule:
464 464 review_group = rule.user_group_vote_rule(user_id)
465 465 # we check if this particular reviewer is member of a voting group
466 466 if review_group:
467 467 # NOTE(marcink):
468 468 # can be that user is member of more but we pick the first same,
469 469 # same as default reviewers algo
470 470 review_group = review_group[0]
471 471
472 472 rule_data = {
473 473 'rule_name':
474 474 rule.review_rule_name,
475 475 'rule_user_group_entry_id':
476 476 review_group.repo_review_rule_users_group_id,
477 477 'rule_user_group_name':
478 478 review_group.users_group.users_group_name,
479 479 'rule_user_group_members':
480 480 [x.user.username for x in review_group.users_group.members],
481 481 'rule_user_group_members_id':
482 482 [x.user.user_id for x in review_group.users_group.members],
483 483 }
484 484 # e.g {'vote_rule': -1, 'mandatory': True}
485 485 rule_data.update(review_group.rule_data())
486 486
487 487 reviewer.rule_data = rule_data
488 488
489 489 Session().add(reviewer)
490 490 Session().flush()
491 491
492 492 # Set approval status to "Under Review" for all commits which are
493 493 # part of this pull request.
494 494 ChangesetStatusModel().set_status(
495 495 repo=target_repo,
496 496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 497 user=created_by_user,
498 498 pull_request=pull_request
499 499 )
500 500 # we commit early at this point. This has to do with a fact
501 501 # that before queries do some row-locking. And because of that
502 502 # we need to commit and finish transaction before below validate call
503 503 # that for large repos could be long resulting in long row locks
504 504 Session().commit()
505 505
506 506 # prepare workspace, and run initial merge simulation. Set state during that
507 507 # operation
508 508 pull_request = PullRequest.get(pull_request.pull_request_id)
509 509
510 510 # set as merging, for simulation, and if finished to created so we mark
511 511 # simulation is working fine
512 512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 513 final_state=PullRequest.STATE_CREATED):
514 514 MergeCheck.validate(
515 515 pull_request, auth_user=auth_user, translator=translator)
516 516
517 517 self.notify_reviewers(pull_request, reviewer_ids)
518 518 self.trigger_pull_request_hook(
519 519 pull_request, created_by_user, 'create')
520 520
521 521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 522 self._log_audit_action(
523 523 'repo.pull_request.create', {'data': creation_data},
524 524 auth_user, pull_request)
525 525
526 526 return pull_request
527 527
528 528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 529 pull_request = self.__get_pull_request(pull_request)
530 530 target_scm = pull_request.target_repo.scm_instance()
531 531 if action == 'create':
532 532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 533 elif action == 'merge':
534 534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 535 elif action == 'close':
536 536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 537 elif action == 'review_status_change':
538 538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 539 elif action == 'update':
540 540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 541 elif action == 'comment':
542 542 # dummy hook ! for comment. We want this function to handle all cases
543 543 def trigger_hook(*args, **kwargs):
544 544 pass
545 545 comment = data['comment']
546 546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 547 else:
548 548 return
549 549
550 550 trigger_hook(
551 551 username=user.username,
552 552 repo_name=pull_request.target_repo.repo_name,
553 553 repo_alias=target_scm.alias,
554 554 pull_request=pull_request,
555 555 data=data)
556 556
557 557 def _get_commit_ids(self, pull_request):
558 558 """
559 559 Return the commit ids of the merged pull request.
560 560
561 561 This method is not dealing correctly yet with the lack of autoupdates
562 562 nor with the implicit target updates.
563 563 For example: if a commit in the source repo is already in the target it
564 564 will be reported anyways.
565 565 """
566 566 merge_rev = pull_request.merge_rev
567 567 if merge_rev is None:
568 568 raise ValueError('This pull request was not merged yet')
569 569
570 570 commit_ids = list(pull_request.revisions)
571 571 if merge_rev not in commit_ids:
572 572 commit_ids.append(merge_rev)
573 573
574 574 return commit_ids
575 575
576 576 def merge_repo(self, pull_request, user, extras):
577 577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 578 extras['user_agent'] = 'internal-merge'
579 579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 580 if merge_state.executed:
581 581 log.debug("Merge was successful, updating the pull request comments.")
582 582 self._comment_and_close_pr(pull_request, user, merge_state)
583 583
584 584 self._log_audit_action(
585 585 'repo.pull_request.merge',
586 586 {'merge_state': merge_state.__dict__},
587 587 user, pull_request)
588 588
589 589 else:
590 590 log.warn("Merge failed, not updating the pull request.")
591 591 return merge_state
592 592
593 593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 594 target_vcs = pull_request.target_repo.scm_instance()
595 595 source_vcs = pull_request.source_repo.scm_instance()
596 596
597 597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 598 pr_id=pull_request.pull_request_id,
599 599 pr_title=pull_request.title,
600 600 source_repo=source_vcs.name,
601 601 source_ref_name=pull_request.source_ref_parts.name,
602 602 target_repo=target_vcs.name,
603 603 target_ref_name=pull_request.target_ref_parts.name,
604 604 )
605 605
606 606 workspace_id = self._workspace_id(pull_request)
607 607 repo_id = pull_request.target_repo.repo_id
608 608 use_rebase = self._use_rebase_for_merging(pull_request)
609 609 close_branch = self._close_branch_before_merging(pull_request)
610 610
611 611 target_ref = self._refresh_reference(
612 612 pull_request.target_ref_parts, target_vcs)
613 613
614 614 callback_daemon, extras = prepare_callback_daemon(
615 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 616 host=vcs_settings.HOOKS_HOST,
617 617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618 618
619 619 with callback_daemon:
620 620 # TODO: johbo: Implement a clean way to run a config_override
621 621 # for a single call.
622 622 target_vcs.config.set(
623 623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624 624
625 625 user_name = user.short_contact
626 626 merge_state = target_vcs.merge(
627 627 repo_id, workspace_id, target_ref, source_vcs,
628 628 pull_request.source_ref_parts,
629 629 user_name=user_name, user_email=user.email,
630 630 message=message, use_rebase=use_rebase,
631 631 close_branch=close_branch)
632 632 return merge_state
633 633
634 634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 636 pull_request.updated_on = datetime.datetime.now()
637 637 close_msg = close_msg or 'Pull request merged and closed'
638 638
639 639 CommentsModel().create(
640 640 text=safe_unicode(close_msg),
641 641 repo=pull_request.target_repo.repo_id,
642 642 user=user.user_id,
643 643 pull_request=pull_request.pull_request_id,
644 644 f_path=None,
645 645 line_no=None,
646 646 closing_pr=True
647 647 )
648 648
649 649 Session().add(pull_request)
650 650 Session().flush()
651 651 # TODO: paris: replace invalidation with less radical solution
652 652 ScmModel().mark_for_invalidation(
653 653 pull_request.target_repo.repo_name)
654 654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655 655
656 656 def has_valid_update_type(self, pull_request):
657 657 source_ref_type = pull_request.source_ref_parts.type
658 658 return source_ref_type in self.REF_TYPES
659 659
660 660 def update_commits(self, pull_request):
661 661 """
662 662 Get the updated list of commits for the pull request
663 663 and return the new pull request version and the list
664 664 of commits processed by this update action
665 665 """
666 666 pull_request = self.__get_pull_request(pull_request)
667 667 source_ref_type = pull_request.source_ref_parts.type
668 668 source_ref_name = pull_request.source_ref_parts.name
669 669 source_ref_id = pull_request.source_ref_parts.commit_id
670 670
671 671 target_ref_type = pull_request.target_ref_parts.type
672 672 target_ref_name = pull_request.target_ref_parts.name
673 673 target_ref_id = pull_request.target_ref_parts.commit_id
674 674
675 675 if not self.has_valid_update_type(pull_request):
676 676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 677 pull_request, source_ref_type)
678 678 return UpdateResponse(
679 679 executed=False,
680 680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 681 old=pull_request, new=None, changes=None,
682 682 source_changed=False, target_changed=False)
683 683
684 684 # source repo
685 685 source_repo = pull_request.source_repo.scm_instance()
686 686 try:
687 687 source_commit = source_repo.get_commit(commit_id=source_ref_name)
688 688 except CommitDoesNotExistError:
689 689 return UpdateResponse(
690 690 executed=False,
691 691 reason=UpdateFailureReason.MISSING_SOURCE_REF,
692 692 old=pull_request, new=None, changes=None,
693 693 source_changed=False, target_changed=False)
694 694
695 695 source_changed = source_ref_id != source_commit.raw_id
696 696
697 697 # target repo
698 698 target_repo = pull_request.target_repo.scm_instance()
699 699 try:
700 700 target_commit = target_repo.get_commit(commit_id=target_ref_name)
701 701 except CommitDoesNotExistError:
702 702 return UpdateResponse(
703 703 executed=False,
704 704 reason=UpdateFailureReason.MISSING_TARGET_REF,
705 705 old=pull_request, new=None, changes=None,
706 706 source_changed=False, target_changed=False)
707 707 target_changed = target_ref_id != target_commit.raw_id
708 708
709 709 if not (source_changed or target_changed):
710 710 log.debug("Nothing changed in pull request %s", pull_request)
711 711 return UpdateResponse(
712 712 executed=False,
713 713 reason=UpdateFailureReason.NO_CHANGE,
714 714 old=pull_request, new=None, changes=None,
715 715 source_changed=target_changed, target_changed=source_changed)
716 716
717 717 change_in_found = 'target repo' if target_changed else 'source repo'
718 718 log.debug('Updating pull request because of change in %s detected',
719 719 change_in_found)
720 720
721 721 # Finally there is a need for an update, in case of source change
722 722 # we create a new version, else just an update
723 723 if source_changed:
724 724 pull_request_version = self._create_version_from_snapshot(pull_request)
725 725 self._link_comments_to_version(pull_request_version)
726 726 else:
727 727 try:
728 728 ver = pull_request.versions[-1]
729 729 except IndexError:
730 730 ver = None
731 731
732 732 pull_request.pull_request_version_id = \
733 733 ver.pull_request_version_id if ver else None
734 734 pull_request_version = pull_request
735 735
736 736 try:
737 737 if target_ref_type in self.REF_TYPES:
738 738 target_commit = target_repo.get_commit(target_ref_name)
739 739 else:
740 740 target_commit = target_repo.get_commit(target_ref_id)
741 741 except CommitDoesNotExistError:
742 742 return UpdateResponse(
743 743 executed=False,
744 744 reason=UpdateFailureReason.MISSING_TARGET_REF,
745 745 old=pull_request, new=None, changes=None,
746 746 source_changed=source_changed, target_changed=target_changed)
747 747
748 748 # re-compute commit ids
749 749 old_commit_ids = pull_request.revisions
750 750 pre_load = ["author", "branch", "date", "message"]
751 751 commit_ranges = target_repo.compare(
752 752 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
753 753 pre_load=pre_load)
754 754
755 755 ancestor = target_repo.get_common_ancestor(
756 756 target_commit.raw_id, source_commit.raw_id, source_repo)
757 757
758 758 pull_request.source_ref = '%s:%s:%s' % (
759 759 source_ref_type, source_ref_name, source_commit.raw_id)
760 760 pull_request.target_ref = '%s:%s:%s' % (
761 761 target_ref_type, target_ref_name, ancestor)
762 762
763 763 pull_request.revisions = [
764 764 commit.raw_id for commit in reversed(commit_ranges)]
765 765 pull_request.updated_on = datetime.datetime.now()
766 766 Session().add(pull_request)
767 767 new_commit_ids = pull_request.revisions
768 768
769 769 old_diff_data, new_diff_data = self._generate_update_diffs(
770 770 pull_request, pull_request_version)
771 771
772 772 # calculate commit and file changes
773 773 changes = self._calculate_commit_id_changes(
774 774 old_commit_ids, new_commit_ids)
775 775 file_changes = self._calculate_file_changes(
776 776 old_diff_data, new_diff_data)
777 777
778 778 # set comments as outdated if DIFFS changed
779 779 CommentsModel().outdate_comments(
780 780 pull_request, old_diff_data=old_diff_data,
781 781 new_diff_data=new_diff_data)
782 782
783 783 commit_changes = (changes.added or changes.removed)
784 784 file_node_changes = (
785 785 file_changes.added or file_changes.modified or file_changes.removed)
786 786 pr_has_changes = commit_changes or file_node_changes
787 787
788 788 # Add an automatic comment to the pull request, in case
789 789 # anything has changed
790 790 if pr_has_changes:
791 791 update_comment = CommentsModel().create(
792 792 text=self._render_update_message(changes, file_changes),
793 793 repo=pull_request.target_repo,
794 794 user=pull_request.author,
795 795 pull_request=pull_request,
796 796 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
797 797
798 798 # Update status to "Under Review" for added commits
799 799 for commit_id in changes.added:
800 800 ChangesetStatusModel().set_status(
801 801 repo=pull_request.source_repo,
802 802 status=ChangesetStatus.STATUS_UNDER_REVIEW,
803 803 comment=update_comment,
804 804 user=pull_request.author,
805 805 pull_request=pull_request,
806 806 revision=commit_id)
807 807
808 808 log.debug(
809 809 'Updated pull request %s, added_ids: %s, common_ids: %s, '
810 810 'removed_ids: %s', pull_request.pull_request_id,
811 811 changes.added, changes.common, changes.removed)
812 812 log.debug(
813 813 'Updated pull request with the following file changes: %s',
814 814 file_changes)
815 815
816 816 log.info(
817 817 "Updated pull request %s from commit %s to commit %s, "
818 818 "stored new version %s of this pull request.",
819 819 pull_request.pull_request_id, source_ref_id,
820 820 pull_request.source_ref_parts.commit_id,
821 821 pull_request_version.pull_request_version_id)
822 822 Session().commit()
823 823 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
824 824
825 825 return UpdateResponse(
826 826 executed=True, reason=UpdateFailureReason.NONE,
827 827 old=pull_request, new=pull_request_version, changes=changes,
828 828 source_changed=source_changed, target_changed=target_changed)
829 829
830 830 def _create_version_from_snapshot(self, pull_request):
831 831 version = PullRequestVersion()
832 832 version.title = pull_request.title
833 833 version.description = pull_request.description
834 834 version.status = pull_request.status
835 835 version.pull_request_state = pull_request.pull_request_state
836 836 version.created_on = datetime.datetime.now()
837 837 version.updated_on = pull_request.updated_on
838 838 version.user_id = pull_request.user_id
839 839 version.source_repo = pull_request.source_repo
840 840 version.source_ref = pull_request.source_ref
841 841 version.target_repo = pull_request.target_repo
842 842 version.target_ref = pull_request.target_ref
843 843
844 844 version._last_merge_source_rev = pull_request._last_merge_source_rev
845 845 version._last_merge_target_rev = pull_request._last_merge_target_rev
846 846 version.last_merge_status = pull_request.last_merge_status
847 847 version.shadow_merge_ref = pull_request.shadow_merge_ref
848 848 version.merge_rev = pull_request.merge_rev
849 849 version.reviewer_data = pull_request.reviewer_data
850 850
851 851 version.revisions = pull_request.revisions
852 852 version.pull_request = pull_request
853 853 Session().add(version)
854 854 Session().flush()
855 855
856 856 return version
857 857
858 858 def _generate_update_diffs(self, pull_request, pull_request_version):
859 859
860 860 diff_context = (
861 861 self.DIFF_CONTEXT +
862 862 CommentsModel.needed_extra_diff_context())
863 863 hide_whitespace_changes = False
864 864 source_repo = pull_request_version.source_repo
865 865 source_ref_id = pull_request_version.source_ref_parts.commit_id
866 866 target_ref_id = pull_request_version.target_ref_parts.commit_id
867 867 old_diff = self._get_diff_from_pr_or_version(
868 868 source_repo, source_ref_id, target_ref_id,
869 869 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
870 870
871 871 source_repo = pull_request.source_repo
872 872 source_ref_id = pull_request.source_ref_parts.commit_id
873 873 target_ref_id = pull_request.target_ref_parts.commit_id
874 874
875 875 new_diff = self._get_diff_from_pr_or_version(
876 876 source_repo, source_ref_id, target_ref_id,
877 877 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
878 878
879 879 old_diff_data = diffs.DiffProcessor(old_diff)
880 880 old_diff_data.prepare()
881 881 new_diff_data = diffs.DiffProcessor(new_diff)
882 882 new_diff_data.prepare()
883 883
884 884 return old_diff_data, new_diff_data
885 885
886 886 def _link_comments_to_version(self, pull_request_version):
887 887 """
888 888 Link all unlinked comments of this pull request to the given version.
889 889
890 890 :param pull_request_version: The `PullRequestVersion` to which
891 891 the comments shall be linked.
892 892
893 893 """
894 894 pull_request = pull_request_version.pull_request
895 895 comments = ChangesetComment.query()\
896 896 .filter(
897 897 # TODO: johbo: Should we query for the repo at all here?
898 898 # Pending decision on how comments of PRs are to be related
899 899 # to either the source repo, the target repo or no repo at all.
900 900 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
901 901 ChangesetComment.pull_request == pull_request,
902 902 ChangesetComment.pull_request_version == None)\
903 903 .order_by(ChangesetComment.comment_id.asc())
904 904
905 905 # TODO: johbo: Find out why this breaks if it is done in a bulk
906 906 # operation.
907 907 for comment in comments:
908 908 comment.pull_request_version_id = (
909 909 pull_request_version.pull_request_version_id)
910 910 Session().add(comment)
911 911
912 912 def _calculate_commit_id_changes(self, old_ids, new_ids):
913 913 added = [x for x in new_ids if x not in old_ids]
914 914 common = [x for x in new_ids if x in old_ids]
915 915 removed = [x for x in old_ids if x not in new_ids]
916 916 total = new_ids
917 917 return ChangeTuple(added, common, removed, total)
918 918
919 919 def _calculate_file_changes(self, old_diff_data, new_diff_data):
920 920
921 921 old_files = OrderedDict()
922 922 for diff_data in old_diff_data.parsed_diff:
923 923 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
924 924
925 925 added_files = []
926 926 modified_files = []
927 927 removed_files = []
928 928 for diff_data in new_diff_data.parsed_diff:
929 929 new_filename = diff_data['filename']
930 930 new_hash = md5_safe(diff_data['raw_diff'])
931 931
932 932 old_hash = old_files.get(new_filename)
933 933 if not old_hash:
934 934 # file is not present in old diff, means it's added
935 935 added_files.append(new_filename)
936 936 else:
937 937 if new_hash != old_hash:
938 938 modified_files.append(new_filename)
939 939 # now remove a file from old, since we have seen it already
940 940 del old_files[new_filename]
941 941
942 942 # removed files is when there are present in old, but not in NEW,
943 943 # since we remove old files that are present in new diff, left-overs
944 944 # if any should be the removed files
945 945 removed_files.extend(old_files.keys())
946 946
947 947 return FileChangeTuple(added_files, modified_files, removed_files)
948 948
949 949 def _render_update_message(self, changes, file_changes):
950 950 """
951 951 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
952 952 so it's always looking the same disregarding on which default
953 953 renderer system is using.
954 954
955 955 :param changes: changes named tuple
956 956 :param file_changes: file changes named tuple
957 957
958 958 """
959 959 new_status = ChangesetStatus.get_status_lbl(
960 960 ChangesetStatus.STATUS_UNDER_REVIEW)
961 961
962 962 changed_files = (
963 963 file_changes.added + file_changes.modified + file_changes.removed)
964 964
965 965 params = {
966 966 'under_review_label': new_status,
967 967 'added_commits': changes.added,
968 968 'removed_commits': changes.removed,
969 969 'changed_files': changed_files,
970 970 'added_files': file_changes.added,
971 971 'modified_files': file_changes.modified,
972 972 'removed_files': file_changes.removed,
973 973 }
974 974 renderer = RstTemplateRenderer()
975 975 return renderer.render('pull_request_update.mako', **params)
976 976
977 977 def edit(self, pull_request, title, description, description_renderer, user):
978 978 pull_request = self.__get_pull_request(pull_request)
979 979 old_data = pull_request.get_api_data(with_merge_state=False)
980 980 if pull_request.is_closed():
981 981 raise ValueError('This pull request is closed')
982 982 if title:
983 983 pull_request.title = title
984 984 pull_request.description = description
985 985 pull_request.updated_on = datetime.datetime.now()
986 986 pull_request.description_renderer = description_renderer
987 987 Session().add(pull_request)
988 988 self._log_audit_action(
989 989 'repo.pull_request.edit', {'old_data': old_data},
990 990 user, pull_request)
991 991
992 992 def update_reviewers(self, pull_request, reviewer_data, user):
993 993 """
994 994 Update the reviewers in the pull request
995 995
996 996 :param pull_request: the pr to update
997 997 :param reviewer_data: list of tuples
998 998 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
999 999 """
1000 1000 pull_request = self.__get_pull_request(pull_request)
1001 1001 if pull_request.is_closed():
1002 1002 raise ValueError('This pull request is closed')
1003 1003
1004 1004 reviewers = {}
1005 1005 for user_id, reasons, mandatory, rules in reviewer_data:
1006 1006 if isinstance(user_id, (int, compat.string_types)):
1007 1007 user_id = self._get_user(user_id).user_id
1008 1008 reviewers[user_id] = {
1009 1009 'reasons': reasons, 'mandatory': mandatory}
1010 1010
1011 1011 reviewers_ids = set(reviewers.keys())
1012 1012 current_reviewers = PullRequestReviewers.query()\
1013 1013 .filter(PullRequestReviewers.pull_request ==
1014 1014 pull_request).all()
1015 1015 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1016 1016
1017 1017 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1018 1018 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1019 1019
1020 1020 log.debug("Adding %s reviewers", ids_to_add)
1021 1021 log.debug("Removing %s reviewers", ids_to_remove)
1022 1022 changed = False
1023 1023 for uid in ids_to_add:
1024 1024 changed = True
1025 1025 _usr = self._get_user(uid)
1026 1026 reviewer = PullRequestReviewers()
1027 1027 reviewer.user = _usr
1028 1028 reviewer.pull_request = pull_request
1029 1029 reviewer.reasons = reviewers[uid]['reasons']
1030 1030 # NOTE(marcink): mandatory shouldn't be changed now
1031 1031 # reviewer.mandatory = reviewers[uid]['reasons']
1032 1032 Session().add(reviewer)
1033 1033 self._log_audit_action(
1034 1034 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1035 1035 user, pull_request)
1036 1036
1037 1037 for uid in ids_to_remove:
1038 1038 changed = True
1039 1039 reviewers = PullRequestReviewers.query()\
1040 1040 .filter(PullRequestReviewers.user_id == uid,
1041 1041 PullRequestReviewers.pull_request == pull_request)\
1042 1042 .all()
1043 1043 # use .all() in case we accidentally added the same person twice
1044 1044 # this CAN happen due to the lack of DB checks
1045 1045 for obj in reviewers:
1046 1046 old_data = obj.get_dict()
1047 1047 Session().delete(obj)
1048 1048 self._log_audit_action(
1049 1049 'repo.pull_request.reviewer.delete',
1050 1050 {'old_data': old_data}, user, pull_request)
1051 1051
1052 1052 if changed:
1053 1053 pull_request.updated_on = datetime.datetime.now()
1054 1054 Session().add(pull_request)
1055 1055
1056 1056 self.notify_reviewers(pull_request, ids_to_add)
1057 1057 return ids_to_add, ids_to_remove
1058 1058
1059 1059 def get_url(self, pull_request, request=None, permalink=False):
1060 1060 if not request:
1061 1061 request = get_current_request()
1062 1062
1063 1063 if permalink:
1064 1064 return request.route_url(
1065 1065 'pull_requests_global',
1066 1066 pull_request_id=pull_request.pull_request_id,)
1067 1067 else:
1068 1068 return request.route_url('pullrequest_show',
1069 1069 repo_name=safe_str(pull_request.target_repo.repo_name),
1070 1070 pull_request_id=pull_request.pull_request_id,)
1071 1071
1072 1072 def get_shadow_clone_url(self, pull_request, request=None):
1073 1073 """
1074 1074 Returns qualified url pointing to the shadow repository. If this pull
1075 1075 request is closed there is no shadow repository and ``None`` will be
1076 1076 returned.
1077 1077 """
1078 1078 if pull_request.is_closed():
1079 1079 return None
1080 1080 else:
1081 1081 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1082 1082 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1083 1083
1084 1084 def notify_reviewers(self, pull_request, reviewers_ids):
1085 1085 # notification to reviewers
1086 1086 if not reviewers_ids:
1087 1087 return
1088 1088
1089 1089 pull_request_obj = pull_request
1090 1090 # get the current participants of this pull request
1091 1091 recipients = reviewers_ids
1092 1092 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1093 1093
1094 1094 pr_source_repo = pull_request_obj.source_repo
1095 1095 pr_target_repo = pull_request_obj.target_repo
1096 1096
1097 1097 pr_url = h.route_url('pullrequest_show',
1098 1098 repo_name=pr_target_repo.repo_name,
1099 1099 pull_request_id=pull_request_obj.pull_request_id,)
1100 1100
1101 1101 # set some variables for email notification
1102 1102 pr_target_repo_url = h.route_url(
1103 1103 'repo_summary', repo_name=pr_target_repo.repo_name)
1104 1104
1105 1105 pr_source_repo_url = h.route_url(
1106 1106 'repo_summary', repo_name=pr_source_repo.repo_name)
1107 1107
1108 1108 # pull request specifics
1109 1109 pull_request_commits = [
1110 1110 (x.raw_id, x.message)
1111 1111 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1112 1112
1113 1113 kwargs = {
1114 1114 'user': pull_request.author,
1115 1115 'pull_request': pull_request_obj,
1116 1116 'pull_request_commits': pull_request_commits,
1117 1117
1118 1118 'pull_request_target_repo': pr_target_repo,
1119 1119 'pull_request_target_repo_url': pr_target_repo_url,
1120 1120
1121 1121 'pull_request_source_repo': pr_source_repo,
1122 1122 'pull_request_source_repo_url': pr_source_repo_url,
1123 1123
1124 1124 'pull_request_url': pr_url,
1125 1125 }
1126 1126
1127 1127 # pre-generate the subject for notification itself
1128 1128 (subject,
1129 1129 _h, _e, # we don't care about those
1130 1130 body_plaintext) = EmailNotificationModel().render_email(
1131 1131 notification_type, **kwargs)
1132 1132
1133 1133 # create notification objects, and emails
1134 1134 NotificationModel().create(
1135 1135 created_by=pull_request.author,
1136 1136 notification_subject=subject,
1137 1137 notification_body=body_plaintext,
1138 1138 notification_type=notification_type,
1139 1139 recipients=recipients,
1140 1140 email_kwargs=kwargs,
1141 1141 )
1142 1142
1143 1143 def delete(self, pull_request, user):
1144 1144 pull_request = self.__get_pull_request(pull_request)
1145 1145 old_data = pull_request.get_api_data(with_merge_state=False)
1146 1146 self._cleanup_merge_workspace(pull_request)
1147 1147 self._log_audit_action(
1148 1148 'repo.pull_request.delete', {'old_data': old_data},
1149 1149 user, pull_request)
1150 1150 Session().delete(pull_request)
1151 1151
1152 1152 def close_pull_request(self, pull_request, user):
1153 1153 pull_request = self.__get_pull_request(pull_request)
1154 1154 self._cleanup_merge_workspace(pull_request)
1155 1155 pull_request.status = PullRequest.STATUS_CLOSED
1156 1156 pull_request.updated_on = datetime.datetime.now()
1157 1157 Session().add(pull_request)
1158 1158 self.trigger_pull_request_hook(
1159 1159 pull_request, pull_request.author, 'close')
1160 1160
1161 1161 pr_data = pull_request.get_api_data(with_merge_state=False)
1162 1162 self._log_audit_action(
1163 1163 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1164 1164
1165 1165 def close_pull_request_with_comment(
1166 1166 self, pull_request, user, repo, message=None, auth_user=None):
1167 1167
1168 1168 pull_request_review_status = pull_request.calculated_review_status()
1169 1169
1170 1170 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1171 1171 # approved only if we have voting consent
1172 1172 status = ChangesetStatus.STATUS_APPROVED
1173 1173 else:
1174 1174 status = ChangesetStatus.STATUS_REJECTED
1175 1175 status_lbl = ChangesetStatus.get_status_lbl(status)
1176 1176
1177 1177 default_message = (
1178 1178 'Closing with status change {transition_icon} {status}.'
1179 1179 ).format(transition_icon='>', status=status_lbl)
1180 1180 text = message or default_message
1181 1181
1182 1182 # create a comment, and link it to new status
1183 1183 comment = CommentsModel().create(
1184 1184 text=text,
1185 1185 repo=repo.repo_id,
1186 1186 user=user.user_id,
1187 1187 pull_request=pull_request.pull_request_id,
1188 1188 status_change=status_lbl,
1189 1189 status_change_type=status,
1190 1190 closing_pr=True,
1191 1191 auth_user=auth_user,
1192 1192 )
1193 1193
1194 1194 # calculate old status before we change it
1195 1195 old_calculated_status = pull_request.calculated_review_status()
1196 1196 ChangesetStatusModel().set_status(
1197 1197 repo.repo_id,
1198 1198 status,
1199 1199 user.user_id,
1200 1200 comment=comment,
1201 1201 pull_request=pull_request.pull_request_id
1202 1202 )
1203 1203
1204 1204 Session().flush()
1205 1205 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1206 1206 # we now calculate the status of pull request again, and based on that
1207 1207 # calculation trigger status change. This might happen in cases
1208 1208 # that non-reviewer admin closes a pr, which means his vote doesn't
1209 1209 # change the status, while if he's a reviewer this might change it.
1210 1210 calculated_status = pull_request.calculated_review_status()
1211 1211 if old_calculated_status != calculated_status:
1212 1212 self.trigger_pull_request_hook(
1213 1213 pull_request, user, 'review_status_change',
1214 1214 data={'status': calculated_status})
1215 1215
1216 1216 # finally close the PR
1217 1217 PullRequestModel().close_pull_request(
1218 1218 pull_request.pull_request_id, user)
1219 1219
1220 1220 return comment, status
1221 1221
1222 1222 def merge_status(self, pull_request, translator=None,
1223 1223 force_shadow_repo_refresh=False):
1224 1224 _ = translator or get_current_request().translate
1225 1225
1226 1226 if not self._is_merge_enabled(pull_request):
1227 1227 return False, _('Server-side pull request merging is disabled.')
1228 1228 if pull_request.is_closed():
1229 1229 return False, _('This pull request is closed.')
1230 1230 merge_possible, msg = self._check_repo_requirements(
1231 1231 target=pull_request.target_repo, source=pull_request.source_repo,
1232 1232 translator=_)
1233 1233 if not merge_possible:
1234 1234 return merge_possible, msg
1235 1235
1236 1236 try:
1237 1237 resp = self._try_merge(
1238 1238 pull_request,
1239 1239 force_shadow_repo_refresh=force_shadow_repo_refresh)
1240 1240 log.debug("Merge response: %s", resp)
1241 1241 status = resp.possible, resp.merge_status_message
1242 1242 except NotImplementedError:
1243 1243 status = False, _('Pull request merging is not supported.')
1244 1244
1245 1245 return status
1246 1246
1247 1247 def _check_repo_requirements(self, target, source, translator):
1248 1248 """
1249 1249 Check if `target` and `source` have compatible requirements.
1250 1250
1251 1251 Currently this is just checking for largefiles.
1252 1252 """
1253 1253 _ = translator
1254 1254 target_has_largefiles = self._has_largefiles(target)
1255 1255 source_has_largefiles = self._has_largefiles(source)
1256 1256 merge_possible = True
1257 1257 message = u''
1258 1258
1259 1259 if target_has_largefiles != source_has_largefiles:
1260 1260 merge_possible = False
1261 1261 if source_has_largefiles:
1262 1262 message = _(
1263 1263 'Target repository large files support is disabled.')
1264 1264 else:
1265 1265 message = _(
1266 1266 'Source repository large files support is disabled.')
1267 1267
1268 1268 return merge_possible, message
1269 1269
1270 1270 def _has_largefiles(self, repo):
1271 1271 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1272 1272 'extensions', 'largefiles')
1273 1273 return largefiles_ui and largefiles_ui[0].active
1274 1274
1275 1275 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1276 1276 """
1277 1277 Try to merge the pull request and return the merge status.
1278 1278 """
1279 1279 log.debug(
1280 1280 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1281 1281 pull_request.pull_request_id, force_shadow_repo_refresh)
1282 1282 target_vcs = pull_request.target_repo.scm_instance()
1283 1283 # Refresh the target reference.
1284 1284 try:
1285 1285 target_ref = self._refresh_reference(
1286 1286 pull_request.target_ref_parts, target_vcs)
1287 1287 except CommitDoesNotExistError:
1288 1288 merge_state = MergeResponse(
1289 1289 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1290 1290 metadata={'target_ref': pull_request.target_ref_parts})
1291 1291 return merge_state
1292 1292
1293 1293 target_locked = pull_request.target_repo.locked
1294 1294 if target_locked and target_locked[0]:
1295 1295 locked_by = 'user:{}'.format(target_locked[0])
1296 1296 log.debug("The target repository is locked by %s.", locked_by)
1297 1297 merge_state = MergeResponse(
1298 1298 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1299 1299 metadata={'locked_by': locked_by})
1300 1300 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1301 1301 pull_request, target_ref):
1302 1302 log.debug("Refreshing the merge status of the repository.")
1303 1303 merge_state = self._refresh_merge_state(
1304 1304 pull_request, target_vcs, target_ref)
1305 1305 else:
1306 1306 possible = pull_request.\
1307 1307 last_merge_status == MergeFailureReason.NONE
1308 1308 merge_state = MergeResponse(
1309 1309 possible, False, None, pull_request.last_merge_status)
1310 1310
1311 1311 return merge_state
1312 1312
1313 1313 def _refresh_reference(self, reference, vcs_repository):
1314 1314 if reference.type in self.UPDATABLE_REF_TYPES:
1315 1315 name_or_id = reference.name
1316 1316 else:
1317 1317 name_or_id = reference.commit_id
1318 1318 refreshed_commit = vcs_repository.get_commit(name_or_id)
1319 1319 refreshed_reference = Reference(
1320 1320 reference.type, reference.name, refreshed_commit.raw_id)
1321 1321 return refreshed_reference
1322 1322
1323 1323 def _needs_merge_state_refresh(self, pull_request, target_reference):
1324 1324 return not(
1325 1325 pull_request.revisions and
1326 1326 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1327 1327 target_reference.commit_id == pull_request._last_merge_target_rev)
1328 1328
1329 1329 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1330 1330 workspace_id = self._workspace_id(pull_request)
1331 1331 source_vcs = pull_request.source_repo.scm_instance()
1332 1332 repo_id = pull_request.target_repo.repo_id
1333 1333 use_rebase = self._use_rebase_for_merging(pull_request)
1334 1334 close_branch = self._close_branch_before_merging(pull_request)
1335 1335 merge_state = target_vcs.merge(
1336 1336 repo_id, workspace_id,
1337 1337 target_reference, source_vcs, pull_request.source_ref_parts,
1338 1338 dry_run=True, use_rebase=use_rebase,
1339 1339 close_branch=close_branch)
1340 1340
1341 1341 # Do not store the response if there was an unknown error.
1342 1342 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1343 1343 pull_request._last_merge_source_rev = \
1344 1344 pull_request.source_ref_parts.commit_id
1345 1345 pull_request._last_merge_target_rev = target_reference.commit_id
1346 1346 pull_request.last_merge_status = merge_state.failure_reason
1347 1347 pull_request.shadow_merge_ref = merge_state.merge_ref
1348 1348 Session().add(pull_request)
1349 1349 Session().commit()
1350 1350
1351 1351 return merge_state
1352 1352
1353 1353 def _workspace_id(self, pull_request):
1354 1354 workspace_id = 'pr-%s' % pull_request.pull_request_id
1355 1355 return workspace_id
1356 1356
1357 1357 def generate_repo_data(self, repo, commit_id=None, branch=None,
1358 1358 bookmark=None, translator=None):
1359 1359 from rhodecode.model.repo import RepoModel
1360 1360
1361 1361 all_refs, selected_ref = \
1362 1362 self._get_repo_pullrequest_sources(
1363 1363 repo.scm_instance(), commit_id=commit_id,
1364 1364 branch=branch, bookmark=bookmark, translator=translator)
1365 1365
1366 1366 refs_select2 = []
1367 1367 for element in all_refs:
1368 1368 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1369 1369 refs_select2.append({'text': element[1], 'children': children})
1370 1370
1371 1371 return {
1372 1372 'user': {
1373 1373 'user_id': repo.user.user_id,
1374 1374 'username': repo.user.username,
1375 1375 'firstname': repo.user.first_name,
1376 1376 'lastname': repo.user.last_name,
1377 1377 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1378 1378 },
1379 1379 'name': repo.repo_name,
1380 1380 'link': RepoModel().get_url(repo),
1381 1381 'description': h.chop_at_smart(repo.description_safe, '\n'),
1382 1382 'refs': {
1383 1383 'all_refs': all_refs,
1384 1384 'selected_ref': selected_ref,
1385 1385 'select2_refs': refs_select2
1386 1386 }
1387 1387 }
1388 1388
1389 1389 def generate_pullrequest_title(self, source, source_ref, target):
1390 1390 return u'{source}#{at_ref} to {target}'.format(
1391 1391 source=source,
1392 1392 at_ref=source_ref,
1393 1393 target=target,
1394 1394 )
1395 1395
1396 1396 def _cleanup_merge_workspace(self, pull_request):
1397 1397 # Merging related cleanup
1398 1398 repo_id = pull_request.target_repo.repo_id
1399 1399 target_scm = pull_request.target_repo.scm_instance()
1400 1400 workspace_id = self._workspace_id(pull_request)
1401 1401
1402 1402 try:
1403 1403 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1404 1404 except NotImplementedError:
1405 1405 pass
1406 1406
1407 1407 def _get_repo_pullrequest_sources(
1408 1408 self, repo, commit_id=None, branch=None, bookmark=None,
1409 1409 translator=None):
1410 1410 """
1411 1411 Return a structure with repo's interesting commits, suitable for
1412 1412 the selectors in pullrequest controller
1413 1413
1414 1414 :param commit_id: a commit that must be in the list somehow
1415 1415 and selected by default
1416 1416 :param branch: a branch that must be in the list and selected
1417 1417 by default - even if closed
1418 1418 :param bookmark: a bookmark that must be in the list and selected
1419 1419 """
1420 1420 _ = translator or get_current_request().translate
1421 1421
1422 1422 commit_id = safe_str(commit_id) if commit_id else None
1423 branch = safe_str(branch) if branch else None
1424 bookmark = safe_str(bookmark) if bookmark else None
1423 branch = safe_unicode(branch) if branch else None
1424 bookmark = safe_unicode(bookmark) if bookmark else None
1425 1425
1426 1426 selected = None
1427 1427
1428 1428 # order matters: first source that has commit_id in it will be selected
1429 1429 sources = []
1430 1430 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1431 1431 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1432 1432
1433 1433 if commit_id:
1434 1434 ref_commit = (h.short_id(commit_id), commit_id)
1435 1435 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1436 1436
1437 1437 sources.append(
1438 1438 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1439 1439 )
1440 1440
1441 1441 groups = []
1442
1442 1443 for group_key, ref_list, group_name, match in sources:
1443 1444 group_refs = []
1444 1445 for ref_name, ref_id in ref_list:
1445 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1446 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1446 1447 group_refs.append((ref_key, ref_name))
1447 1448
1448 1449 if not selected:
1449 1450 if set([commit_id, match]) & set([ref_id, ref_name]):
1450 1451 selected = ref_key
1451 1452
1452 1453 if group_refs:
1453 1454 groups.append((group_refs, group_name))
1454 1455
1455 1456 if not selected:
1456 1457 ref = commit_id or branch or bookmark
1457 1458 if ref:
1458 1459 raise CommitDoesNotExistError(
1459 'No commit refs could be found matching: %s' % ref)
1460 u'No commit refs could be found matching: {}'.format(ref))
1460 1461 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1461 selected = 'branch:%s:%s' % (
1462 repo.DEFAULT_BRANCH_NAME,
1463 repo.branches[repo.DEFAULT_BRANCH_NAME]
1462 selected = u'branch:{}:{}'.format(
1463 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1464 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1464 1465 )
1465 1466 elif repo.commit_ids:
1466 1467 # make the user select in this case
1467 1468 selected = None
1468 1469 else:
1469 1470 raise EmptyRepositoryError()
1470 1471 return groups, selected
1471 1472
1472 1473 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1473 1474 hide_whitespace_changes, diff_context):
1474 1475
1475 1476 return self._get_diff_from_pr_or_version(
1476 1477 source_repo, source_ref_id, target_ref_id,
1477 1478 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1478 1479
1479 1480 def _get_diff_from_pr_or_version(
1480 1481 self, source_repo, source_ref_id, target_ref_id,
1481 1482 hide_whitespace_changes, diff_context):
1482 1483
1483 1484 target_commit = source_repo.get_commit(
1484 1485 commit_id=safe_str(target_ref_id))
1485 1486 source_commit = source_repo.get_commit(
1486 1487 commit_id=safe_str(source_ref_id))
1487 1488 if isinstance(source_repo, Repository):
1488 1489 vcs_repo = source_repo.scm_instance()
1489 1490 else:
1490 1491 vcs_repo = source_repo
1491 1492
1492 1493 # TODO: johbo: In the context of an update, we cannot reach
1493 1494 # the old commit anymore with our normal mechanisms. It needs
1494 1495 # some sort of special support in the vcs layer to avoid this
1495 1496 # workaround.
1496 1497 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1497 1498 vcs_repo.alias == 'git'):
1498 1499 source_commit.raw_id = safe_str(source_ref_id)
1499 1500
1500 1501 log.debug('calculating diff between '
1501 1502 'source_ref:%s and target_ref:%s for repo `%s`',
1502 1503 target_ref_id, source_ref_id,
1503 1504 safe_unicode(vcs_repo.path))
1504 1505
1505 1506 vcs_diff = vcs_repo.get_diff(
1506 1507 commit1=target_commit, commit2=source_commit,
1507 1508 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1508 1509 return vcs_diff
1509 1510
1510 1511 def _is_merge_enabled(self, pull_request):
1511 1512 return self._get_general_setting(
1512 1513 pull_request, 'rhodecode_pr_merge_enabled')
1513 1514
1514 1515 def _use_rebase_for_merging(self, pull_request):
1515 1516 repo_type = pull_request.target_repo.repo_type
1516 1517 if repo_type == 'hg':
1517 1518 return self._get_general_setting(
1518 1519 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1519 1520 elif repo_type == 'git':
1520 1521 return self._get_general_setting(
1521 1522 pull_request, 'rhodecode_git_use_rebase_for_merging')
1522 1523
1523 1524 return False
1524 1525
1525 1526 def _close_branch_before_merging(self, pull_request):
1526 1527 repo_type = pull_request.target_repo.repo_type
1527 1528 if repo_type == 'hg':
1528 1529 return self._get_general_setting(
1529 1530 pull_request, 'rhodecode_hg_close_branch_before_merging')
1530 1531 elif repo_type == 'git':
1531 1532 return self._get_general_setting(
1532 1533 pull_request, 'rhodecode_git_close_branch_before_merging')
1533 1534
1534 1535 return False
1535 1536
1536 1537 def _get_general_setting(self, pull_request, settings_key, default=False):
1537 1538 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1538 1539 settings = settings_model.get_general_settings()
1539 1540 return settings.get(settings_key, default)
1540 1541
1541 1542 def _log_audit_action(self, action, action_data, user, pull_request):
1542 1543 audit_logger.store(
1543 1544 action=action,
1544 1545 action_data=action_data,
1545 1546 user=user,
1546 1547 repo=pull_request.target_repo)
1547 1548
1548 1549 def get_reviewer_functions(self):
1549 1550 """
1550 1551 Fetches functions for validation and fetching default reviewers.
1551 1552 If available we use the EE package, else we fallback to CE
1552 1553 package functions
1553 1554 """
1554 1555 try:
1555 1556 from rc_reviewers.utils import get_default_reviewers_data
1556 1557 from rc_reviewers.utils import validate_default_reviewers
1557 1558 except ImportError:
1558 1559 from rhodecode.apps.repository.utils import get_default_reviewers_data
1559 1560 from rhodecode.apps.repository.utils import validate_default_reviewers
1560 1561
1561 1562 return get_default_reviewers_data, validate_default_reviewers
1562 1563
1563 1564
1564 1565 class MergeCheck(object):
1565 1566 """
1566 1567 Perform Merge Checks and returns a check object which stores information
1567 1568 about merge errors, and merge conditions
1568 1569 """
1569 1570 TODO_CHECK = 'todo'
1570 1571 PERM_CHECK = 'perm'
1571 1572 REVIEW_CHECK = 'review'
1572 1573 MERGE_CHECK = 'merge'
1573 1574
1574 1575 def __init__(self):
1575 1576 self.review_status = None
1576 1577 self.merge_possible = None
1577 1578 self.merge_msg = ''
1578 1579 self.failed = None
1579 1580 self.errors = []
1580 1581 self.error_details = OrderedDict()
1581 1582
1582 1583 def push_error(self, error_type, message, error_key, details):
1583 1584 self.failed = True
1584 1585 self.errors.append([error_type, message])
1585 1586 self.error_details[error_key] = dict(
1586 1587 details=details,
1587 1588 error_type=error_type,
1588 1589 message=message
1589 1590 )
1590 1591
1591 1592 @classmethod
1592 1593 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1593 1594 force_shadow_repo_refresh=False):
1594 1595 _ = translator
1595 1596 merge_check = cls()
1596 1597
1597 1598 # permissions to merge
1598 1599 user_allowed_to_merge = PullRequestModel().check_user_merge(
1599 1600 pull_request, auth_user)
1600 1601 if not user_allowed_to_merge:
1601 1602 log.debug("MergeCheck: cannot merge, approval is pending.")
1602 1603
1603 1604 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1604 1605 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1605 1606 if fail_early:
1606 1607 return merge_check
1607 1608
1608 1609 # permission to merge into the target branch
1609 1610 target_commit_id = pull_request.target_ref_parts.commit_id
1610 1611 if pull_request.target_ref_parts.type == 'branch':
1611 1612 branch_name = pull_request.target_ref_parts.name
1612 1613 else:
1613 1614 # for mercurial we can always figure out the branch from the commit
1614 1615 # in case of bookmark
1615 1616 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1616 1617 branch_name = target_commit.branch
1617 1618
1618 1619 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1619 1620 pull_request.target_repo.repo_name, branch_name)
1620 1621 if branch_perm and branch_perm == 'branch.none':
1621 1622 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1622 1623 branch_name, rule)
1623 1624 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1624 1625 if fail_early:
1625 1626 return merge_check
1626 1627
1627 1628 # review status, must be always present
1628 1629 review_status = pull_request.calculated_review_status()
1629 1630 merge_check.review_status = review_status
1630 1631
1631 1632 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1632 1633 if not status_approved:
1633 1634 log.debug("MergeCheck: cannot merge, approval is pending.")
1634 1635
1635 1636 msg = _('Pull request reviewer approval is pending.')
1636 1637
1637 1638 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1638 1639
1639 1640 if fail_early:
1640 1641 return merge_check
1641 1642
1642 1643 # left over TODOs
1643 1644 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1644 1645 if todos:
1645 1646 log.debug("MergeCheck: cannot merge, {} "
1646 1647 "unresolved TODOs left.".format(len(todos)))
1647 1648
1648 1649 if len(todos) == 1:
1649 1650 msg = _('Cannot merge, {} TODO still not resolved.').format(
1650 1651 len(todos))
1651 1652 else:
1652 1653 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1653 1654 len(todos))
1654 1655
1655 1656 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1656 1657
1657 1658 if fail_early:
1658 1659 return merge_check
1659 1660
1660 1661 # merge possible, here is the filesystem simulation + shadow repo
1661 1662 merge_status, msg = PullRequestModel().merge_status(
1662 1663 pull_request, translator=translator,
1663 1664 force_shadow_repo_refresh=force_shadow_repo_refresh)
1664 1665 merge_check.merge_possible = merge_status
1665 1666 merge_check.merge_msg = msg
1666 1667 if not merge_status:
1667 1668 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1668 1669 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1669 1670
1670 1671 if fail_early:
1671 1672 return merge_check
1672 1673
1673 1674 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1674 1675 return merge_check
1675 1676
1676 1677 @classmethod
1677 1678 def get_merge_conditions(cls, pull_request, translator):
1678 1679 _ = translator
1679 1680 merge_details = {}
1680 1681
1681 1682 model = PullRequestModel()
1682 1683 use_rebase = model._use_rebase_for_merging(pull_request)
1683 1684
1684 1685 if use_rebase:
1685 1686 merge_details['merge_strategy'] = dict(
1686 1687 details={},
1687 1688 message=_('Merge strategy: rebase')
1688 1689 )
1689 1690 else:
1690 1691 merge_details['merge_strategy'] = dict(
1691 1692 details={},
1692 1693 message=_('Merge strategy: explicit merge commit')
1693 1694 )
1694 1695
1695 1696 close_branch = model._close_branch_before_merging(pull_request)
1696 1697 if close_branch:
1697 1698 repo_type = pull_request.target_repo.repo_type
1698 1699 close_msg = ''
1699 1700 if repo_type == 'hg':
1700 1701 close_msg = _('Source branch will be closed after merge.')
1701 1702 elif repo_type == 'git':
1702 1703 close_msg = _('Source branch will be deleted after merge.')
1703 1704
1704 1705 merge_details['close_branch'] = dict(
1705 1706 details={},
1706 1707 message=close_msg
1707 1708 )
1708 1709
1709 1710 return merge_details
1710 1711
1711 1712
1712 1713 ChangeTuple = collections.namedtuple(
1713 1714 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1714 1715
1715 1716 FileChangeTuple = collections.namedtuple(
1716 1717 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,57 +1,57 b''
1 1
2 2 <div id="codeblock" class="browserblock">
3 3 <div class="browser-header">
4 4 <div class="browser-nav">
5 5 ${h.form(h.current_route_path(request), method='GET', id='at_rev_form')}
6 6 <div class="info_box">
7 7 ${h.hidden('refs_filter')}
8 8 <div class="info_box_elem previous">
9 9 <a id="prev_commit_link" data-commit-id="${c.prev_commit.raw_id}" class="pjax-link ${'disabled' if c.url_prev == '#' else ''}" href="${c.url_prev}" title="${_('Previous commit')}"><i class="icon-left"></i></a>
10 10 </div>
11 11 <div class="info_box_elem">${h.text('at_rev',value=c.commit.idx)}</div>
12 12 <div class="info_box_elem next">
13 13 <a id="next_commit_link" data-commit-id="${c.next_commit.raw_id}" class="pjax-link ${'disabled' if c.url_next == '#' else ''}" href="${c.url_next}" title="${_('Next commit')}"><i class="icon-right"></i></a>
14 14 </div>
15 15 </div>
16 16 ${h.end_form()}
17 17
18 18 <div id="search_activate_id" class="search_activate">
19 19 <a class="btn btn-default" id="filter_activate" href="javascript:void(0)">${_('Search File List')}</a>
20 20 </div>
21 21 <div id="search_deactivate_id" class="search_activate hidden">
22 22 <a class="btn btn-default" id="filter_deactivate" href="javascript:void(0)">${_('Close File List')}</a>
23 23 </div>
24 24 % if h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name):
25 25 <div title="${_('Add New File')}" class="btn btn-primary new-file">
26 26 <a href="${h.route_path('repo_files_add_file',repo_name=c.repo_name,commit_id=c.commit.raw_id,f_path=c.f_path, _anchor='edit')}">
27 27 ${_('Add File')}</a>
28 28 </div>
29 29 % endif
30 30 % if c.enable_downloads:
31 31 <% at_path = '{}.zip'.format(request.GET.get('at') or c.commit.raw_id[:6]) %>
32 32 <div title="${_('Download tree at {}').format(at_path)}" class="btn btn-default new-file">
33 <a href="${h.route_path('repo_archivefile',repo_name=c.repo_name, fname=c.commit.raw_id)}">
33 <a href="${h.route_path('repo_archivefile',repo_name=c.repo_name, fname='{}.zip'.format(c.commit.raw_id))}">
34 34 ${_('Download tree at {}').format(at_path)}
35 35 </a>
36 36 </div>
37 37 % endif
38 38 </div>
39 39
40 40 <div class="browser-search">
41 41 <div class="node-filter">
42 42 <div class="node_filter_box hidden" id="node_filter_box_loading" >${_('Loading file list...')}</div>
43 43 <div class="node_filter_box hidden" id="node_filter_box" >
44 44 <div class="node-filter-path">${h.get_last_path_part(c.file)}/</div>
45 45 <div class="node-filter-input">
46 46 <input class="init" type="text" name="filter" size="25" id="node_filter" autocomplete="off">
47 47 </div>
48 48 </div>
49 49 </div>
50 50 </div>
51 51 </div>
52 52 ## file tree is computed from caches, and filled in
53 53 <div id="file-tree">
54 54 ${c.file_tree |n}
55 55 </div>
56 56
57 57 </div>
General Comments 0
You need to be logged in to leave comments. Login now