##// END OF EJS Templates
dependencies: bumped test libraries.
marcink -
r3951:041016db default
parent child Browse files
Show More
@@ -0,0 +1,47 b''
1 import collections
2 # -*- coding: utf-8 -*-
3
4 # Copyright (C) 2010-2019 RhodeCode GmbH
5 #
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Affero General Public License, version 3
8 # (only), as published by the Free Software Foundation.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 #
15 # You should have received a copy of the GNU Affero General Public License
16 # along with this program. If not, see <http://www.gnu.org/licenses/>.
17 #
18 # This program is dual-licensed. If you wish to learn more about the
19 # RhodeCode Enterprise Edition, including its added features, Support services,
20 # and proprietary license terms, please see https://rhodecode.com/licenses/
21
22 import pytest
23
24 from rhodecode.lib import audit_logger
25
26
27 @pytest.mark.parametrize('store_type', [
28 'store_web',
29 'store_api'
30 ])
31 @pytest.mark.parametrize('action, kwargs', [
32 ('repo.edit', {
33 'user': audit_logger.UserWrap(username='test-audit-log', ip_addr='8.8.8.8'),
34 'action_data': {'data': {'hello': 'world'}}
35 }),
36 ('repo.edit', {
37 'user': audit_logger.UserWrap(username=u'marcinkuΕΌmiΕ„', ip_addr='8.8.8.8'),
38 'action_data': {'data': {'hello': u'Δ…Δ™ΕΌΔ…βˆ‘Δ™Δ«Β¨Β¨Δ·Β©'}}
39 }),
40 ('repo.edit', {
41 'user': audit_logger.UserWrap(username='marcinkuΕΌmiΕ„', ip_addr='8.8.8.8'),
42 'action_data': {'data': {'hello': 'Δ…Δ™ΕΌΔ…βˆ‘Δ™Δ«Β¨Β¨Δ·Β©'}}
43 }),
44 ])
45 def test_store_audit_log(app, store_type, action, kwargs):
46 store_action = getattr(audit_logger, store_type)
47 store_action(action, **kwargs)
@@ -1,2378 +1,2385 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "alembic" = super.buildPythonPackage {
8 8 name = "alembic-1.0.10";
9 9 doCheck = false;
10 10 propagatedBuildInputs = [
11 11 self."sqlalchemy"
12 12 self."mako"
13 13 self."python-editor"
14 14 self."python-dateutil"
15 15 ];
16 16 src = fetchurl {
17 17 url = "https://files.pythonhosted.org/packages/6e/8b/fa3bd058cccd5e9177fea4efa26bfb769228fdd3178436ad5e05830ef6ef/alembic-1.0.10.tar.gz";
18 18 sha256 = "1dwl0264r6ri2jyrjr68am04x538ab26xwy4crqjnnhm4alwm3c2";
19 19 };
20 20 meta = {
21 21 license = [ pkgs.lib.licenses.mit ];
22 22 };
23 23 };
24 24 "amqp" = super.buildPythonPackage {
25 25 name = "amqp-2.5.1";
26 26 doCheck = false;
27 27 propagatedBuildInputs = [
28 28 self."vine"
29 29 ];
30 30 src = fetchurl {
31 31 url = "https://files.pythonhosted.org/packages/b5/f5/70e364a1f5fbafc742c098ad88a064b801b0d69cf56bfad13be2c08be4e2/amqp-2.5.1.tar.gz";
32 32 sha256 = "0s2yxnnhhx9hww0n33yn22q6sgnbd6n2nw92050qv2qpc3i1ga8r";
33 33 };
34 34 meta = {
35 35 license = [ pkgs.lib.licenses.bsdOriginal ];
36 36 };
37 37 };
38 38 "appenlight-client" = super.buildPythonPackage {
39 39 name = "appenlight-client-0.6.26";
40 40 doCheck = false;
41 41 propagatedBuildInputs = [
42 42 self."webob"
43 43 self."requests"
44 44 self."six"
45 45 ];
46 46 src = fetchurl {
47 47 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
48 48 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
49 49 };
50 50 meta = {
51 51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 52 };
53 53 };
54 54 "asn1crypto" = super.buildPythonPackage {
55 55 name = "asn1crypto-0.24.0";
56 56 doCheck = false;
57 57 src = fetchurl {
58 58 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
59 59 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.mit ];
63 63 };
64 64 };
65 65 "atomicwrites" = super.buildPythonPackage {
66 66 name = "atomicwrites-1.2.1";
67 67 doCheck = false;
68 68 src = fetchurl {
69 69 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
70 70 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
71 71 };
72 72 meta = {
73 73 license = [ pkgs.lib.licenses.mit ];
74 74 };
75 75 };
76 76 "attrs" = super.buildPythonPackage {
77 77 name = "attrs-19.1.0";
78 78 doCheck = false;
79 79 src = fetchurl {
80 80 url = "https://files.pythonhosted.org/packages/cc/d9/931a24cc5394f19383fbbe3e1147a0291276afa43a0dc3ed0d6cd9fda813/attrs-19.1.0.tar.gz";
81 81 sha256 = "16g33zr5f449lqc5wgvzpknxryfzrfsxcr6kpgxwn7l5fkv71f7h";
82 82 };
83 83 meta = {
84 84 license = [ pkgs.lib.licenses.mit ];
85 85 };
86 86 };
87 87 "babel" = super.buildPythonPackage {
88 88 name = "babel-1.3";
89 89 doCheck = false;
90 90 propagatedBuildInputs = [
91 91 self."pytz"
92 92 ];
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
95 95 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.bsdOriginal ];
99 99 };
100 100 };
101 101 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
102 102 name = "backports.shutil-get-terminal-size-1.0.0";
103 103 doCheck = false;
104 104 src = fetchurl {
105 105 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
106 106 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
107 107 };
108 108 meta = {
109 109 license = [ pkgs.lib.licenses.mit ];
110 110 };
111 111 };
112 112 "beaker" = super.buildPythonPackage {
113 113 name = "beaker-1.9.1";
114 114 doCheck = false;
115 115 propagatedBuildInputs = [
116 116 self."funcsigs"
117 117 ];
118 118 src = fetchurl {
119 119 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
120 120 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
121 121 };
122 122 meta = {
123 123 license = [ pkgs.lib.licenses.bsdOriginal ];
124 124 };
125 125 };
126 126 "beautifulsoup4" = super.buildPythonPackage {
127 127 name = "beautifulsoup4-4.6.3";
128 128 doCheck = false;
129 129 src = fetchurl {
130 130 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
131 131 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
132 132 };
133 133 meta = {
134 134 license = [ pkgs.lib.licenses.mit ];
135 135 };
136 136 };
137 137 "billiard" = super.buildPythonPackage {
138 138 name = "billiard-3.6.1.0";
139 139 doCheck = false;
140 140 src = fetchurl {
141 141 url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz";
142 142 sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q";
143 143 };
144 144 meta = {
145 145 license = [ pkgs.lib.licenses.bsdOriginal ];
146 146 };
147 147 };
148 148 "bleach" = super.buildPythonPackage {
149 149 name = "bleach-3.1.0";
150 150 doCheck = false;
151 151 propagatedBuildInputs = [
152 152 self."six"
153 153 self."webencodings"
154 154 ];
155 155 src = fetchurl {
156 156 url = "https://files.pythonhosted.org/packages/78/5a/0df03e8735cd9c75167528299c738702437589b9c71a849489d00ffa82e8/bleach-3.1.0.tar.gz";
157 157 sha256 = "1yhrgrhkln8bd6gn3imj69g1h4xqah9gaz9q26crqr6gmmvpzprz";
158 158 };
159 159 meta = {
160 160 license = [ pkgs.lib.licenses.asl20 ];
161 161 };
162 162 };
163 163 "bumpversion" = super.buildPythonPackage {
164 164 name = "bumpversion-0.5.3";
165 165 doCheck = false;
166 166 src = fetchurl {
167 167 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
168 168 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
169 169 };
170 170 meta = {
171 171 license = [ pkgs.lib.licenses.mit ];
172 172 };
173 173 };
174 174 "celery" = super.buildPythonPackage {
175 175 name = "celery-4.3.0";
176 176 doCheck = false;
177 177 propagatedBuildInputs = [
178 178 self."pytz"
179 179 self."billiard"
180 180 self."kombu"
181 181 self."vine"
182 182 ];
183 183 src = fetchurl {
184 184 url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz";
185 185 sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac";
186 186 };
187 187 meta = {
188 188 license = [ pkgs.lib.licenses.bsdOriginal ];
189 189 };
190 190 };
191 191 "cffi" = super.buildPythonPackage {
192 192 name = "cffi-1.12.2";
193 193 doCheck = false;
194 194 propagatedBuildInputs = [
195 195 self."pycparser"
196 196 ];
197 197 src = fetchurl {
198 198 url = "https://files.pythonhosted.org/packages/64/7c/27367b38e6cc3e1f49f193deb761fe75cda9f95da37b67b422e62281fcac/cffi-1.12.2.tar.gz";
199 199 sha256 = "19qfks2djya8vix95bmg3xzipjb8w9b8mbj4j5k2hqkc8j58f4z1";
200 200 };
201 201 meta = {
202 202 license = [ pkgs.lib.licenses.mit ];
203 203 };
204 204 };
205 205 "chameleon" = super.buildPythonPackage {
206 206 name = "chameleon-2.24";
207 207 doCheck = false;
208 208 src = fetchurl {
209 209 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
210 210 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
211 211 };
212 212 meta = {
213 213 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
214 214 };
215 215 };
216 216 "channelstream" = super.buildPythonPackage {
217 217 name = "channelstream-0.5.2";
218 218 doCheck = false;
219 219 propagatedBuildInputs = [
220 220 self."gevent"
221 221 self."ws4py"
222 222 self."pyramid"
223 223 self."pyramid-jinja2"
224 224 self."itsdangerous"
225 225 self."requests"
226 226 self."six"
227 227 ];
228 228 src = fetchurl {
229 229 url = "https://files.pythonhosted.org/packages/2b/31/29a8e085cf5bf97fa88e7b947adabfc581a18a3463adf77fb6dada34a65f/channelstream-0.5.2.tar.gz";
230 230 sha256 = "1qbm4xdl5hfkja683x546bncg3rqq8qv79w1m1a1wd48cqqzb6rm";
231 231 };
232 232 meta = {
233 233 license = [ pkgs.lib.licenses.bsdOriginal ];
234 234 };
235 235 };
236 236 "click" = super.buildPythonPackage {
237 237 name = "click-7.0";
238 238 doCheck = false;
239 239 src = fetchurl {
240 240 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
241 241 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
242 242 };
243 243 meta = {
244 244 license = [ pkgs.lib.licenses.bsdOriginal ];
245 245 };
246 246 };
247 247 "colander" = super.buildPythonPackage {
248 248 name = "colander-1.7.0";
249 249 doCheck = false;
250 250 propagatedBuildInputs = [
251 251 self."translationstring"
252 252 self."iso8601"
253 253 self."enum34"
254 254 ];
255 255 src = fetchurl {
256 256 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
257 257 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
258 258 };
259 259 meta = {
260 260 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
261 261 };
262 262 };
263 263 "configobj" = super.buildPythonPackage {
264 264 name = "configobj-5.0.6";
265 265 doCheck = false;
266 266 propagatedBuildInputs = [
267 267 self."six"
268 268 ];
269 269 src = fetchurl {
270 270 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
271 271 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
272 272 };
273 273 meta = {
274 274 license = [ pkgs.lib.licenses.bsdOriginal ];
275 275 };
276 276 };
277 277 "configparser" = super.buildPythonPackage {
278 278 name = "configparser-3.7.4";
279 279 doCheck = false;
280 280 src = fetchurl {
281 281 url = "https://files.pythonhosted.org/packages/e2/1c/83fd53748d8245cb9a3399f705c251d3fc0ce7df04450aac1cfc49dd6a0f/configparser-3.7.4.tar.gz";
282 282 sha256 = "0xac32886ihs2xg7w1gppcq2sgin5qsm8lqwijs5xifq9w0x0q6s";
283 283 };
284 284 meta = {
285 285 license = [ pkgs.lib.licenses.mit ];
286 286 };
287 287 };
288 288 "contextlib2" = super.buildPythonPackage {
289 289 name = "contextlib2-0.5.5";
290 290 doCheck = false;
291 291 src = fetchurl {
292 292 url = "https://files.pythonhosted.org/packages/6e/db/41233498c210b03ab8b072c8ee49b1cd63b3b0c76f8ea0a0e5d02df06898/contextlib2-0.5.5.tar.gz";
293 293 sha256 = "0j6ad6lwwyc9kv71skj098v5l7x5biyj2hs4lc5x1kcixqcr97sh";
294 294 };
295 295 meta = {
296 296 license = [ pkgs.lib.licenses.psfl ];
297 297 };
298 298 };
299 299 "cov-core" = super.buildPythonPackage {
300 300 name = "cov-core-1.15.0";
301 301 doCheck = false;
302 302 propagatedBuildInputs = [
303 303 self."coverage"
304 304 ];
305 305 src = fetchurl {
306 306 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
307 307 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
308 308 };
309 309 meta = {
310 310 license = [ pkgs.lib.licenses.mit ];
311 311 };
312 312 };
313 313 "coverage" = super.buildPythonPackage {
314 name = "coverage-4.5.3";
314 name = "coverage-4.5.4";
315 315 doCheck = false;
316 316 src = fetchurl {
317 url = "https://files.pythonhosted.org/packages/82/70/2280b5b29a0352519bb95ab0ef1ea942d40466ca71c53a2085bdeff7b0eb/coverage-4.5.3.tar.gz";
318 sha256 = "02f6m073qdispn96rc616hg0rnmw1pgqzw3bgxwiwza4zf9hirlx";
317 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
318 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
319 319 };
320 320 meta = {
321 321 license = [ pkgs.lib.licenses.asl20 ];
322 322 };
323 323 };
324 324 "cryptography" = super.buildPythonPackage {
325 325 name = "cryptography-2.6.1";
326 326 doCheck = false;
327 327 propagatedBuildInputs = [
328 328 self."asn1crypto"
329 329 self."six"
330 330 self."cffi"
331 331 self."enum34"
332 332 self."ipaddress"
333 333 ];
334 334 src = fetchurl {
335 335 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
336 336 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
337 337 };
338 338 meta = {
339 339 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
340 340 };
341 341 };
342 342 "cssselect" = super.buildPythonPackage {
343 343 name = "cssselect-1.0.3";
344 344 doCheck = false;
345 345 src = fetchurl {
346 346 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
347 347 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
348 348 };
349 349 meta = {
350 350 license = [ pkgs.lib.licenses.bsdOriginal ];
351 351 };
352 352 };
353 353 "decorator" = super.buildPythonPackage {
354 354 name = "decorator-4.1.2";
355 355 doCheck = false;
356 356 src = fetchurl {
357 357 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
358 358 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
359 359 };
360 360 meta = {
361 361 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
362 362 };
363 363 };
364 364 "deform" = super.buildPythonPackage {
365 365 name = "deform-2.0.7";
366 366 doCheck = false;
367 367 propagatedBuildInputs = [
368 368 self."chameleon"
369 369 self."colander"
370 370 self."iso8601"
371 371 self."peppercorn"
372 372 self."translationstring"
373 373 self."zope.deprecation"
374 374 ];
375 375 src = fetchurl {
376 376 url = "https://files.pythonhosted.org/packages/cf/a1/bc234527b8f181de9acd80e796483c00007658d1e32b7de78f1c2e004d9a/deform-2.0.7.tar.gz";
377 377 sha256 = "0jnpi0zr2hjvbmiz6nm33yqv976dn9lf51vhlzqc0i75xcr9rwig";
378 378 };
379 379 meta = {
380 380 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
381 381 };
382 382 };
383 383 "defusedxml" = super.buildPythonPackage {
384 384 name = "defusedxml-0.6.0";
385 385 doCheck = false;
386 386 src = fetchurl {
387 387 url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz";
388 388 sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n";
389 389 };
390 390 meta = {
391 391 license = [ pkgs.lib.licenses.psfl ];
392 392 };
393 393 };
394 394 "dm.xmlsec.binding" = super.buildPythonPackage {
395 395 name = "dm.xmlsec.binding-1.3.7";
396 396 doCheck = false;
397 397 propagatedBuildInputs = [
398 398 self."setuptools"
399 399 self."lxml"
400 400 ];
401 401 src = fetchurl {
402 402 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
403 403 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
404 404 };
405 405 meta = {
406 406 license = [ pkgs.lib.licenses.bsdOriginal ];
407 407 };
408 408 };
409 409 "docutils" = super.buildPythonPackage {
410 410 name = "docutils-0.14";
411 411 doCheck = false;
412 412 src = fetchurl {
413 413 url = "https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-0.14.tar.gz";
414 414 sha256 = "0x22fs3pdmr42kvz6c654756wja305qv6cx1zbhwlagvxgr4xrji";
415 415 };
416 416 meta = {
417 417 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
418 418 };
419 419 };
420 420 "dogpile.cache" = super.buildPythonPackage {
421 421 name = "dogpile.cache-0.7.1";
422 422 doCheck = false;
423 423 propagatedBuildInputs = [
424 424 self."decorator"
425 425 ];
426 426 src = fetchurl {
427 427 url = "https://files.pythonhosted.org/packages/84/3e/dbf1cfc5228f1d3dca80ef714db2c5aaec5cd9efaf54d7e3daef6bc48b19/dogpile.cache-0.7.1.tar.gz";
428 428 sha256 = "0caazmrzhnfqb5yrp8myhw61ny637jj69wcngrpbvi31jlcpy6v9";
429 429 };
430 430 meta = {
431 431 license = [ pkgs.lib.licenses.bsdOriginal ];
432 432 };
433 433 };
434 434 "dogpile.core" = super.buildPythonPackage {
435 435 name = "dogpile.core-0.4.1";
436 436 doCheck = false;
437 437 src = fetchurl {
438 438 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
439 439 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
440 440 };
441 441 meta = {
442 442 license = [ pkgs.lib.licenses.bsdOriginal ];
443 443 };
444 444 };
445 445 "ecdsa" = super.buildPythonPackage {
446 446 name = "ecdsa-0.13.2";
447 447 doCheck = false;
448 448 src = fetchurl {
449 449 url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz";
450 450 sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw";
451 451 };
452 452 meta = {
453 453 license = [ pkgs.lib.licenses.mit ];
454 454 };
455 455 };
456 456 "elasticsearch" = super.buildPythonPackage {
457 457 name = "elasticsearch-6.3.1";
458 458 doCheck = false;
459 459 propagatedBuildInputs = [
460 460 self."urllib3"
461 461 ];
462 462 src = fetchurl {
463 463 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
464 464 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
465 465 };
466 466 meta = {
467 467 license = [ pkgs.lib.licenses.asl20 ];
468 468 };
469 469 };
470 470 "elasticsearch-dsl" = super.buildPythonPackage {
471 471 name = "elasticsearch-dsl-6.3.1";
472 472 doCheck = false;
473 473 propagatedBuildInputs = [
474 474 self."six"
475 475 self."python-dateutil"
476 476 self."elasticsearch"
477 477 self."ipaddress"
478 478 ];
479 479 src = fetchurl {
480 480 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
481 481 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
482 482 };
483 483 meta = {
484 484 license = [ pkgs.lib.licenses.asl20 ];
485 485 };
486 486 };
487 487 "elasticsearch1" = super.buildPythonPackage {
488 488 name = "elasticsearch1-1.10.0";
489 489 doCheck = false;
490 490 propagatedBuildInputs = [
491 491 self."urllib3"
492 492 ];
493 493 src = fetchurl {
494 494 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
495 495 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
496 496 };
497 497 meta = {
498 498 license = [ pkgs.lib.licenses.asl20 ];
499 499 };
500 500 };
501 501 "elasticsearch1-dsl" = super.buildPythonPackage {
502 502 name = "elasticsearch1-dsl-0.0.12";
503 503 doCheck = false;
504 504 propagatedBuildInputs = [
505 505 self."six"
506 506 self."python-dateutil"
507 507 self."elasticsearch1"
508 508 ];
509 509 src = fetchurl {
510 510 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
511 511 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
512 512 };
513 513 meta = {
514 514 license = [ pkgs.lib.licenses.asl20 ];
515 515 };
516 516 };
517 517 "elasticsearch2" = super.buildPythonPackage {
518 518 name = "elasticsearch2-2.5.0";
519 519 doCheck = false;
520 520 propagatedBuildInputs = [
521 521 self."urllib3"
522 522 ];
523 523 src = fetchurl {
524 524 url = "https://files.pythonhosted.org/packages/84/77/63cf63d4ba11d913b5278406f2a37b0712bec6fc85edfb6151a33eaeba25/elasticsearch2-2.5.0.tar.gz";
525 525 sha256 = "0ky0q16lbvz022yv6q3pix7aamf026p1y994537ccjf0p0dxnbxr";
526 526 };
527 527 meta = {
528 528 license = [ pkgs.lib.licenses.asl20 ];
529 529 };
530 530 };
531 531 "entrypoints" = super.buildPythonPackage {
532 532 name = "entrypoints-0.2.2";
533 533 doCheck = false;
534 534 propagatedBuildInputs = [
535 535 self."configparser"
536 536 ];
537 537 src = fetchurl {
538 538 url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d";
539 539 sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5";
540 540 };
541 541 meta = {
542 542 license = [ pkgs.lib.licenses.mit ];
543 543 };
544 544 };
545 545 "enum34" = super.buildPythonPackage {
546 546 name = "enum34-1.1.6";
547 547 doCheck = false;
548 548 src = fetchurl {
549 549 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
550 550 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
551 551 };
552 552 meta = {
553 553 license = [ pkgs.lib.licenses.bsdOriginal ];
554 554 };
555 555 };
556 556 "formencode" = super.buildPythonPackage {
557 557 name = "formencode-1.2.4";
558 558 doCheck = false;
559 559 src = fetchurl {
560 560 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
561 561 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
562 562 };
563 563 meta = {
564 564 license = [ pkgs.lib.licenses.psfl ];
565 565 };
566 566 };
567 567 "funcsigs" = super.buildPythonPackage {
568 568 name = "funcsigs-1.0.2";
569 569 doCheck = false;
570 570 src = fetchurl {
571 571 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
572 572 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
573 573 };
574 574 meta = {
575 575 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
576 576 };
577 577 };
578 578 "functools32" = super.buildPythonPackage {
579 579 name = "functools32-3.2.3.post2";
580 580 doCheck = false;
581 581 src = fetchurl {
582 582 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
583 583 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
584 584 };
585 585 meta = {
586 586 license = [ pkgs.lib.licenses.psfl ];
587 587 };
588 588 };
589 589 "future" = super.buildPythonPackage {
590 590 name = "future-0.14.3";
591 591 doCheck = false;
592 592 src = fetchurl {
593 593 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
594 594 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
595 595 };
596 596 meta = {
597 597 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
598 598 };
599 599 };
600 600 "futures" = super.buildPythonPackage {
601 601 name = "futures-3.0.2";
602 602 doCheck = false;
603 603 src = fetchurl {
604 604 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
605 605 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
606 606 };
607 607 meta = {
608 608 license = [ pkgs.lib.licenses.bsdOriginal ];
609 609 };
610 610 };
611 611 "gevent" = super.buildPythonPackage {
612 612 name = "gevent-1.4.0";
613 613 doCheck = false;
614 614 propagatedBuildInputs = [
615 615 self."greenlet"
616 616 ];
617 617 src = fetchurl {
618 618 url = "https://files.pythonhosted.org/packages/ed/27/6c49b70808f569b66ec7fac2e78f076e9b204db9cf5768740cff3d5a07ae/gevent-1.4.0.tar.gz";
619 619 sha256 = "1lchr4akw2jkm5v4kz7bdm4wv3knkfhbfn9vkkz4s5yrkcxzmdqy";
620 620 };
621 621 meta = {
622 622 license = [ pkgs.lib.licenses.mit ];
623 623 };
624 624 };
625 625 "gnureadline" = super.buildPythonPackage {
626 626 name = "gnureadline-6.3.8";
627 627 doCheck = false;
628 628 src = fetchurl {
629 629 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
630 630 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
631 631 };
632 632 meta = {
633 633 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
634 634 };
635 635 };
636 636 "gprof2dot" = super.buildPythonPackage {
637 637 name = "gprof2dot-2017.9.19";
638 638 doCheck = false;
639 639 src = fetchurl {
640 640 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
641 641 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
642 642 };
643 643 meta = {
644 644 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
645 645 };
646 646 };
647 647 "greenlet" = super.buildPythonPackage {
648 648 name = "greenlet-0.4.15";
649 649 doCheck = false;
650 650 src = fetchurl {
651 651 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
652 652 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
653 653 };
654 654 meta = {
655 655 license = [ pkgs.lib.licenses.mit ];
656 656 };
657 657 };
658 658 "gunicorn" = super.buildPythonPackage {
659 659 name = "gunicorn-19.9.0";
660 660 doCheck = false;
661 661 src = fetchurl {
662 662 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
663 663 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
664 664 };
665 665 meta = {
666 666 license = [ pkgs.lib.licenses.mit ];
667 667 };
668 668 };
669 669 "hupper" = super.buildPythonPackage {
670 670 name = "hupper-1.6.1";
671 671 doCheck = false;
672 672 src = fetchurl {
673 673 url = "https://files.pythonhosted.org/packages/85/d9/e005d357b11249c5d70ddf5b7adab2e4c0da4e8b0531ff146917a04fe6c0/hupper-1.6.1.tar.gz";
674 674 sha256 = "0d3cvkc8ssgwk54wvhbifj56ry97qi10pfzwfk8vwzzcikbfp3zy";
675 675 };
676 676 meta = {
677 677 license = [ pkgs.lib.licenses.mit ];
678 678 };
679 679 };
680 680 "importlib-metadata" = super.buildPythonPackage {
681 681 name = "importlib-metadata-0.20";
682 682 doCheck = false;
683 683 propagatedBuildInputs = [
684 684 self."zipp"
685 685 self."contextlib2"
686 686 self."configparser"
687 687 self."pathlib2"
688 688 ];
689 689 src = fetchurl {
690 690 url = "https://files.pythonhosted.org/packages/05/41/7d339dd7b507e97f67be812fdf29c4ad991ddd34b1ed0f3c54e8f1c4e0b3/importlib_metadata-0.20.tar.gz";
691 691 sha256 = "13bshj8i98l9gxi6df4xbw1262phmawgr527as20brblwf93a55p";
692 692 };
693 693 meta = {
694 694 license = [ pkgs.lib.licenses.asl20 ];
695 695 };
696 696 };
697 697 "infrae.cache" = super.buildPythonPackage {
698 698 name = "infrae.cache-1.0.1";
699 699 doCheck = false;
700 700 propagatedBuildInputs = [
701 701 self."beaker"
702 702 self."repoze.lru"
703 703 ];
704 704 src = fetchurl {
705 705 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
706 706 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
707 707 };
708 708 meta = {
709 709 license = [ pkgs.lib.licenses.zpl21 ];
710 710 };
711 711 };
712 712 "invoke" = super.buildPythonPackage {
713 713 name = "invoke-0.13.0";
714 714 doCheck = false;
715 715 src = fetchurl {
716 716 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
717 717 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
718 718 };
719 719 meta = {
720 720 license = [ pkgs.lib.licenses.bsdOriginal ];
721 721 };
722 722 };
723 723 "ipaddress" = super.buildPythonPackage {
724 724 name = "ipaddress-1.0.22";
725 725 doCheck = false;
726 726 src = fetchurl {
727 727 url = "https://files.pythonhosted.org/packages/97/8d/77b8cedcfbf93676148518036c6b1ce7f8e14bf07e95d7fd4ddcb8cc052f/ipaddress-1.0.22.tar.gz";
728 728 sha256 = "0b570bm6xqpjwqis15pvdy6lyvvzfndjvkynilcddjj5x98wfimi";
729 729 };
730 730 meta = {
731 731 license = [ pkgs.lib.licenses.psfl ];
732 732 };
733 733 };
734 734 "ipdb" = super.buildPythonPackage {
735 735 name = "ipdb-0.12";
736 736 doCheck = false;
737 737 propagatedBuildInputs = [
738 738 self."setuptools"
739 739 self."ipython"
740 740 ];
741 741 src = fetchurl {
742 742 url = "https://files.pythonhosted.org/packages/6d/43/c3c2e866a8803e196d6209595020a4a6db1a3c5d07c01455669497ae23d0/ipdb-0.12.tar.gz";
743 743 sha256 = "1khr2n7xfy8hg65kj1bsrjq9g7656pp0ybfa8abpbzpdawji3qnw";
744 744 };
745 745 meta = {
746 746 license = [ pkgs.lib.licenses.bsdOriginal ];
747 747 };
748 748 };
749 749 "ipython" = super.buildPythonPackage {
750 750 name = "ipython-5.1.0";
751 751 doCheck = false;
752 752 propagatedBuildInputs = [
753 753 self."setuptools"
754 754 self."decorator"
755 755 self."pickleshare"
756 756 self."simplegeneric"
757 757 self."traitlets"
758 758 self."prompt-toolkit"
759 759 self."pygments"
760 760 self."pexpect"
761 761 self."backports.shutil-get-terminal-size"
762 762 self."pathlib2"
763 763 self."pexpect"
764 764 ];
765 765 src = fetchurl {
766 766 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
767 767 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
768 768 };
769 769 meta = {
770 770 license = [ pkgs.lib.licenses.bsdOriginal ];
771 771 };
772 772 };
773 773 "ipython-genutils" = super.buildPythonPackage {
774 774 name = "ipython-genutils-0.2.0";
775 775 doCheck = false;
776 776 src = fetchurl {
777 777 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
778 778 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
779 779 };
780 780 meta = {
781 781 license = [ pkgs.lib.licenses.bsdOriginal ];
782 782 };
783 783 };
784 784 "iso8601" = super.buildPythonPackage {
785 785 name = "iso8601-0.1.12";
786 786 doCheck = false;
787 787 src = fetchurl {
788 788 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
789 789 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
790 790 };
791 791 meta = {
792 792 license = [ pkgs.lib.licenses.mit ];
793 793 };
794 794 };
795 795 "isodate" = super.buildPythonPackage {
796 796 name = "isodate-0.6.0";
797 797 doCheck = false;
798 798 propagatedBuildInputs = [
799 799 self."six"
800 800 ];
801 801 src = fetchurl {
802 802 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
803 803 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
804 804 };
805 805 meta = {
806 806 license = [ pkgs.lib.licenses.bsdOriginal ];
807 807 };
808 808 };
809 809 "itsdangerous" = super.buildPythonPackage {
810 810 name = "itsdangerous-0.24";
811 811 doCheck = false;
812 812 src = fetchurl {
813 813 url = "https://files.pythonhosted.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
814 814 sha256 = "06856q6x675ly542ig0plbqcyab6ksfzijlyf1hzhgg3sgwgrcyb";
815 815 };
816 816 meta = {
817 817 license = [ pkgs.lib.licenses.bsdOriginal ];
818 818 };
819 819 };
820 820 "jinja2" = super.buildPythonPackage {
821 821 name = "jinja2-2.9.6";
822 822 doCheck = false;
823 823 propagatedBuildInputs = [
824 824 self."markupsafe"
825 825 ];
826 826 src = fetchurl {
827 827 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
828 828 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
829 829 };
830 830 meta = {
831 831 license = [ pkgs.lib.licenses.bsdOriginal ];
832 832 };
833 833 };
834 834 "jsonschema" = super.buildPythonPackage {
835 835 name = "jsonschema-2.6.0";
836 836 doCheck = false;
837 837 propagatedBuildInputs = [
838 838 self."functools32"
839 839 ];
840 840 src = fetchurl {
841 841 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
842 842 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
843 843 };
844 844 meta = {
845 845 license = [ pkgs.lib.licenses.mit ];
846 846 };
847 847 };
848 848 "jupyter-client" = super.buildPythonPackage {
849 849 name = "jupyter-client-5.0.0";
850 850 doCheck = false;
851 851 propagatedBuildInputs = [
852 852 self."traitlets"
853 853 self."jupyter-core"
854 854 self."pyzmq"
855 855 self."python-dateutil"
856 856 ];
857 857 src = fetchurl {
858 858 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
859 859 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
860 860 };
861 861 meta = {
862 862 license = [ pkgs.lib.licenses.bsdOriginal ];
863 863 };
864 864 };
865 865 "jupyter-core" = super.buildPythonPackage {
866 866 name = "jupyter-core-4.5.0";
867 867 doCheck = false;
868 868 propagatedBuildInputs = [
869 869 self."traitlets"
870 870 ];
871 871 src = fetchurl {
872 872 url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz";
873 873 sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic";
874 874 };
875 875 meta = {
876 876 license = [ pkgs.lib.licenses.bsdOriginal ];
877 877 };
878 878 };
879 879 "kombu" = super.buildPythonPackage {
880 880 name = "kombu-4.6.4";
881 881 doCheck = false;
882 882 propagatedBuildInputs = [
883 883 self."amqp"
884 884 self."importlib-metadata"
885 885 ];
886 886 src = fetchurl {
887 887 url = "https://files.pythonhosted.org/packages/52/f2/5a64fc850b0533d2daf09a523406e51e85a8b2a4a2bc87a922a8906ba2aa/kombu-4.6.4.tar.gz";
888 888 sha256 = "16w02mvkxchz7041yia4h8xmqavci88szk18ynxvw4chzcnk3w75";
889 889 };
890 890 meta = {
891 891 license = [ pkgs.lib.licenses.bsdOriginal ];
892 892 };
893 893 };
894 894 "lxml" = super.buildPythonPackage {
895 895 name = "lxml-4.2.5";
896 896 doCheck = false;
897 897 src = fetchurl {
898 898 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
899 899 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
900 900 };
901 901 meta = {
902 902 license = [ pkgs.lib.licenses.bsdOriginal ];
903 903 };
904 904 };
905 905 "mako" = super.buildPythonPackage {
906 906 name = "mako-1.0.7";
907 907 doCheck = false;
908 908 propagatedBuildInputs = [
909 909 self."markupsafe"
910 910 ];
911 911 src = fetchurl {
912 912 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
913 913 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
914 914 };
915 915 meta = {
916 916 license = [ pkgs.lib.licenses.mit ];
917 917 };
918 918 };
919 919 "markdown" = super.buildPythonPackage {
920 920 name = "markdown-2.6.11";
921 921 doCheck = false;
922 922 src = fetchurl {
923 923 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
924 924 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
925 925 };
926 926 meta = {
927 927 license = [ pkgs.lib.licenses.bsdOriginal ];
928 928 };
929 929 };
930 930 "markupsafe" = super.buildPythonPackage {
931 931 name = "markupsafe-1.1.0";
932 932 doCheck = false;
933 933 src = fetchurl {
934 934 url = "https://files.pythonhosted.org/packages/ac/7e/1b4c2e05809a4414ebce0892fe1e32c14ace86ca7d50c70f00979ca9b3a3/MarkupSafe-1.1.0.tar.gz";
935 935 sha256 = "1lxirjypbdd3l9jl4vliilhfnhy7c7f2vlldqg1b0i74khn375sf";
936 936 };
937 937 meta = {
938 938 license = [ pkgs.lib.licenses.bsdOriginal ];
939 939 };
940 940 };
941 941 "meld3" = super.buildPythonPackage {
942 942 name = "meld3-2.0.0";
943 943 doCheck = false;
944 944 src = fetchurl {
945 945 url = "https://files.pythonhosted.org/packages/00/3b/023446ddc1bf0b519c369cbe88269c30c6a64bd10af4817c73f560c302f7/meld3-2.0.0.tar.gz";
946 946 sha256 = "1fbyafwi0d54394hkmp65nf6vk0qm4kipf5z60pdp4244rvadz8y";
947 947 };
948 948 meta = {
949 949 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
950 950 };
951 951 };
952 952 "mistune" = super.buildPythonPackage {
953 953 name = "mistune-0.8.4";
954 954 doCheck = false;
955 955 src = fetchurl {
956 956 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
957 957 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
958 958 };
959 959 meta = {
960 960 license = [ pkgs.lib.licenses.bsdOriginal ];
961 961 };
962 962 };
963 963 "mock" = super.buildPythonPackage {
964 name = "mock-1.0.1";
964 name = "mock-3.0.5";
965 965 doCheck = false;
966 propagatedBuildInputs = [
967 self."six"
968 self."funcsigs"
969 ];
966 970 src = fetchurl {
967 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
968 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
971 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
972 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
969 973 };
970 974 meta = {
971 license = [ pkgs.lib.licenses.bsdOriginal ];
975 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
972 976 };
973 977 };
974 978 "more-itertools" = super.buildPythonPackage {
975 979 name = "more-itertools-5.0.0";
976 980 doCheck = false;
977 981 propagatedBuildInputs = [
978 982 self."six"
979 983 ];
980 984 src = fetchurl {
981 985 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
982 986 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
983 987 };
984 988 meta = {
985 989 license = [ pkgs.lib.licenses.mit ];
986 990 };
987 991 };
988 992 "msgpack-python" = super.buildPythonPackage {
989 993 name = "msgpack-python-0.5.6";
990 994 doCheck = false;
991 995 src = fetchurl {
992 996 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
993 997 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
994 998 };
995 999 meta = {
996 1000 license = [ pkgs.lib.licenses.asl20 ];
997 1001 };
998 1002 };
999 1003 "mysql-python" = super.buildPythonPackage {
1000 1004 name = "mysql-python-1.2.5";
1001 1005 doCheck = false;
1002 1006 src = fetchurl {
1003 1007 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
1004 1008 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
1005 1009 };
1006 1010 meta = {
1007 1011 license = [ pkgs.lib.licenses.gpl1 ];
1008 1012 };
1009 1013 };
1010 1014 "nbconvert" = super.buildPythonPackage {
1011 1015 name = "nbconvert-5.3.1";
1012 1016 doCheck = false;
1013 1017 propagatedBuildInputs = [
1014 1018 self."mistune"
1015 1019 self."jinja2"
1016 1020 self."pygments"
1017 1021 self."traitlets"
1018 1022 self."jupyter-core"
1019 1023 self."nbformat"
1020 1024 self."entrypoints"
1021 1025 self."bleach"
1022 1026 self."pandocfilters"
1023 1027 self."testpath"
1024 1028 ];
1025 1029 src = fetchurl {
1026 1030 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1027 1031 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1028 1032 };
1029 1033 meta = {
1030 1034 license = [ pkgs.lib.licenses.bsdOriginal ];
1031 1035 };
1032 1036 };
1033 1037 "nbformat" = super.buildPythonPackage {
1034 1038 name = "nbformat-4.4.0";
1035 1039 doCheck = false;
1036 1040 propagatedBuildInputs = [
1037 1041 self."ipython-genutils"
1038 1042 self."traitlets"
1039 1043 self."jsonschema"
1040 1044 self."jupyter-core"
1041 1045 ];
1042 1046 src = fetchurl {
1043 1047 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1044 1048 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1045 1049 };
1046 1050 meta = {
1047 1051 license = [ pkgs.lib.licenses.bsdOriginal ];
1048 1052 };
1049 1053 };
1050 1054 "packaging" = super.buildPythonPackage {
1051 1055 name = "packaging-15.2";
1052 1056 doCheck = false;
1053 1057 src = fetchurl {
1054 1058 url = "https://files.pythonhosted.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
1055 1059 sha256 = "1zn60w84bxvw6wypffka18ca66pa1k2cfrq3cq8fnsfja5m3k4ng";
1056 1060 };
1057 1061 meta = {
1058 1062 license = [ pkgs.lib.licenses.asl20 ];
1059 1063 };
1060 1064 };
1061 1065 "pandocfilters" = super.buildPythonPackage {
1062 1066 name = "pandocfilters-1.4.2";
1063 1067 doCheck = false;
1064 1068 src = fetchurl {
1065 1069 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1066 1070 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1067 1071 };
1068 1072 meta = {
1069 1073 license = [ pkgs.lib.licenses.bsdOriginal ];
1070 1074 };
1071 1075 };
1072 1076 "paste" = super.buildPythonPackage {
1073 1077 name = "paste-3.0.8";
1074 1078 doCheck = false;
1075 1079 propagatedBuildInputs = [
1076 1080 self."six"
1077 1081 ];
1078 1082 src = fetchurl {
1079 1083 url = "https://files.pythonhosted.org/packages/66/65/e3acf1663438483c1f6ced0b6c6f3b90da9f0faacb0a6e2aa0f3f9f4b235/Paste-3.0.8.tar.gz";
1080 1084 sha256 = "05w1sh6ky4d7pmdb8nv82n13w22jcn3qsagg5ih3hjmbws9kkwf4";
1081 1085 };
1082 1086 meta = {
1083 1087 license = [ pkgs.lib.licenses.mit ];
1084 1088 };
1085 1089 };
1086 1090 "pastedeploy" = super.buildPythonPackage {
1087 1091 name = "pastedeploy-2.0.1";
1088 1092 doCheck = false;
1089 1093 src = fetchurl {
1090 1094 url = "https://files.pythonhosted.org/packages/19/a0/5623701df7e2478a68a1b685d1a84518024eef994cde7e4da8449a31616f/PasteDeploy-2.0.1.tar.gz";
1091 1095 sha256 = "02imfbbx1mi2h546f3sr37m47dk9qizaqhzzlhx8bkzxa6fzn8yl";
1092 1096 };
1093 1097 meta = {
1094 1098 license = [ pkgs.lib.licenses.mit ];
1095 1099 };
1096 1100 };
1097 1101 "pastescript" = super.buildPythonPackage {
1098 1102 name = "pastescript-3.1.0";
1099 1103 doCheck = false;
1100 1104 propagatedBuildInputs = [
1101 1105 self."paste"
1102 1106 self."pastedeploy"
1103 1107 self."six"
1104 1108 ];
1105 1109 src = fetchurl {
1106 1110 url = "https://files.pythonhosted.org/packages/9e/1d/14db1c283eb21a5d36b6ba1114c13b709629711e64acab653d9994fe346f/PasteScript-3.1.0.tar.gz";
1107 1111 sha256 = "02qcxjjr32ks7a6d4f533wl34ysc7yhwlrfcyqwqbzr52250v4fs";
1108 1112 };
1109 1113 meta = {
1110 1114 license = [ pkgs.lib.licenses.mit ];
1111 1115 };
1112 1116 };
1113 1117 "pathlib2" = super.buildPythonPackage {
1114 1118 name = "pathlib2-2.3.4";
1115 1119 doCheck = false;
1116 1120 propagatedBuildInputs = [
1117 1121 self."six"
1118 1122 self."scandir"
1119 1123 ];
1120 1124 src = fetchurl {
1121 1125 url = "https://files.pythonhosted.org/packages/b5/f4/9c7cc726ece2498b6c8b62d3262aa43f59039b953fe23c9964ac5e18d40b/pathlib2-2.3.4.tar.gz";
1122 1126 sha256 = "1y0f9rkm1924zrc5dn4bwxlhgdkbml82lkcc28l5rgmr7d918q24";
1123 1127 };
1124 1128 meta = {
1125 1129 license = [ pkgs.lib.licenses.mit ];
1126 1130 };
1127 1131 };
1128 1132 "peppercorn" = super.buildPythonPackage {
1129 1133 name = "peppercorn-0.6";
1130 1134 doCheck = false;
1131 1135 src = fetchurl {
1132 1136 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1133 1137 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1134 1138 };
1135 1139 meta = {
1136 1140 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1137 1141 };
1138 1142 };
1139 1143 "pexpect" = super.buildPythonPackage {
1140 1144 name = "pexpect-4.7.0";
1141 1145 doCheck = false;
1142 1146 propagatedBuildInputs = [
1143 1147 self."ptyprocess"
1144 1148 ];
1145 1149 src = fetchurl {
1146 1150 url = "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz";
1147 1151 sha256 = "1sv2rri15zwhds85a4kamwh9pj49qcxv7m4miyr4jfpfwv81yb4y";
1148 1152 };
1149 1153 meta = {
1150 1154 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1151 1155 };
1152 1156 };
1153 1157 "pickleshare" = super.buildPythonPackage {
1154 1158 name = "pickleshare-0.7.5";
1155 1159 doCheck = false;
1156 1160 propagatedBuildInputs = [
1157 1161 self."pathlib2"
1158 1162 ];
1159 1163 src = fetchurl {
1160 1164 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1161 1165 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1162 1166 };
1163 1167 meta = {
1164 1168 license = [ pkgs.lib.licenses.mit ];
1165 1169 };
1166 1170 };
1167 1171 "plaster" = super.buildPythonPackage {
1168 1172 name = "plaster-1.0";
1169 1173 doCheck = false;
1170 1174 propagatedBuildInputs = [
1171 1175 self."setuptools"
1172 1176 ];
1173 1177 src = fetchurl {
1174 1178 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1175 1179 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1176 1180 };
1177 1181 meta = {
1178 1182 license = [ pkgs.lib.licenses.mit ];
1179 1183 };
1180 1184 };
1181 1185 "plaster-pastedeploy" = super.buildPythonPackage {
1182 1186 name = "plaster-pastedeploy-0.7";
1183 1187 doCheck = false;
1184 1188 propagatedBuildInputs = [
1185 1189 self."pastedeploy"
1186 1190 self."plaster"
1187 1191 ];
1188 1192 src = fetchurl {
1189 1193 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1190 1194 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1191 1195 };
1192 1196 meta = {
1193 1197 license = [ pkgs.lib.licenses.mit ];
1194 1198 };
1195 1199 };
1196 1200 "pluggy" = super.buildPythonPackage {
1197 1201 name = "pluggy-0.11.0";
1198 1202 doCheck = false;
1199 1203 src = fetchurl {
1200 1204 url = "https://files.pythonhosted.org/packages/0d/a1/862ab336e8128fde20981d2c1aa8506693412daf5083b1911d539412676b/pluggy-0.11.0.tar.gz";
1201 1205 sha256 = "10511a54dvafw1jrk75mrhml53c7b7w4yaw7241696lc2hfvr895";
1202 1206 };
1203 1207 meta = {
1204 1208 license = [ pkgs.lib.licenses.mit ];
1205 1209 };
1206 1210 };
1207 1211 "prompt-toolkit" = super.buildPythonPackage {
1208 1212 name = "prompt-toolkit-1.0.16";
1209 1213 doCheck = false;
1210 1214 propagatedBuildInputs = [
1211 1215 self."six"
1212 1216 self."wcwidth"
1213 1217 ];
1214 1218 src = fetchurl {
1215 1219 url = "https://files.pythonhosted.org/packages/f1/03/bb36771dc9fa7553ac4bdc639a9ecdf6fda0ff4176faf940d97e3c16e41d/prompt_toolkit-1.0.16.tar.gz";
1216 1220 sha256 = "1d65hm6nf0cbq0q0121m60zzy4s1fpg9fn761s1yxf08dridvkn1";
1217 1221 };
1218 1222 meta = {
1219 1223 license = [ pkgs.lib.licenses.bsdOriginal ];
1220 1224 };
1221 1225 };
1222 1226 "psutil" = super.buildPythonPackage {
1223 1227 name = "psutil-5.6.3";
1224 1228 doCheck = false;
1225 1229 src = fetchurl {
1226 1230 url = "https://files.pythonhosted.org/packages/1c/ca/5b8c1fe032a458c2c4bcbe509d1401dca9dda35c7fc46b36bb81c2834740/psutil-5.6.3.tar.gz";
1227 1231 sha256 = "1wv31zly44qj0rp2acg58xbnc7bf6ffyadasq093l455q30qafl6";
1228 1232 };
1229 1233 meta = {
1230 1234 license = [ pkgs.lib.licenses.bsdOriginal ];
1231 1235 };
1232 1236 };
1233 1237 "psycopg2" = super.buildPythonPackage {
1234 1238 name = "psycopg2-2.8.3";
1235 1239 doCheck = false;
1236 1240 src = fetchurl {
1237 1241 url = "https://files.pythonhosted.org/packages/5c/1c/6997288da181277a0c29bc39a5f9143ff20b8c99f2a7d059cfb55163e165/psycopg2-2.8.3.tar.gz";
1238 1242 sha256 = "0ms4kx0p5n281l89awccix4d05ybmdngnjjpi9jbzd0rhf1nwyl9";
1239 1243 };
1240 1244 meta = {
1241 1245 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1242 1246 };
1243 1247 };
1244 1248 "ptyprocess" = super.buildPythonPackage {
1245 1249 name = "ptyprocess-0.6.0";
1246 1250 doCheck = false;
1247 1251 src = fetchurl {
1248 1252 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1249 1253 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1250 1254 };
1251 1255 meta = {
1252 1256 license = [ ];
1253 1257 };
1254 1258 };
1255 1259 "py" = super.buildPythonPackage {
1256 name = "py-1.6.0";
1260 name = "py-1.8.0";
1257 1261 doCheck = false;
1258 1262 src = fetchurl {
1259 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
1260 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
1263 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
1264 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
1261 1265 };
1262 1266 meta = {
1263 1267 license = [ pkgs.lib.licenses.mit ];
1264 1268 };
1265 1269 };
1266 1270 "py-bcrypt" = super.buildPythonPackage {
1267 1271 name = "py-bcrypt-0.4";
1268 1272 doCheck = false;
1269 1273 src = fetchurl {
1270 1274 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1271 1275 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1272 1276 };
1273 1277 meta = {
1274 1278 license = [ pkgs.lib.licenses.bsdOriginal ];
1275 1279 };
1276 1280 };
1277 1281 "py-gfm" = super.buildPythonPackage {
1278 1282 name = "py-gfm-0.1.4";
1279 1283 doCheck = false;
1280 1284 propagatedBuildInputs = [
1281 1285 self."setuptools"
1282 1286 self."markdown"
1283 1287 ];
1284 1288 src = fetchurl {
1285 1289 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1286 1290 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1287 1291 };
1288 1292 meta = {
1289 1293 license = [ pkgs.lib.licenses.bsdOriginal ];
1290 1294 };
1291 1295 };
1292 1296 "pyasn1" = super.buildPythonPackage {
1293 1297 name = "pyasn1-0.4.7";
1294 1298 doCheck = false;
1295 1299 src = fetchurl {
1296 1300 url = "https://files.pythonhosted.org/packages/ca/f8/2a60a2c88a97558bdd289b6dc9eb75b00bd90ff34155d681ba6dbbcb46b2/pyasn1-0.4.7.tar.gz";
1297 1301 sha256 = "0146ryp4g09ycy8p3l2vigmgfg42n4gb8whgg8cysrhxr9b56jd9";
1298 1302 };
1299 1303 meta = {
1300 1304 license = [ pkgs.lib.licenses.bsdOriginal ];
1301 1305 };
1302 1306 };
1303 1307 "pyasn1-modules" = super.buildPythonPackage {
1304 1308 name = "pyasn1-modules-0.2.6";
1305 1309 doCheck = false;
1306 1310 propagatedBuildInputs = [
1307 1311 self."pyasn1"
1308 1312 ];
1309 1313 src = fetchurl {
1310 1314 url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz";
1311 1315 sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3";
1312 1316 };
1313 1317 meta = {
1314 1318 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
1315 1319 };
1316 1320 };
1317 1321 "pycparser" = super.buildPythonPackage {
1318 1322 name = "pycparser-2.19";
1319 1323 doCheck = false;
1320 1324 src = fetchurl {
1321 1325 url = "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz";
1322 1326 sha256 = "1cr5dcj9628lkz1qlwq3fv97c25363qppkmcayqvd05dpy573259";
1323 1327 };
1324 1328 meta = {
1325 1329 license = [ pkgs.lib.licenses.bsdOriginal ];
1326 1330 };
1327 1331 };
1328 1332 "pycrypto" = super.buildPythonPackage {
1329 1333 name = "pycrypto-2.6.1";
1330 1334 doCheck = false;
1331 1335 src = fetchurl {
1332 1336 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1333 1337 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1334 1338 };
1335 1339 meta = {
1336 1340 license = [ pkgs.lib.licenses.publicDomain ];
1337 1341 };
1338 1342 };
1339 1343 "pycurl" = super.buildPythonPackage {
1340 1344 name = "pycurl-7.43.0.3";
1341 1345 doCheck = false;
1342 1346 src = fetchurl {
1343 1347 url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz";
1344 1348 sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g";
1345 1349 };
1346 1350 meta = {
1347 1351 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1348 1352 };
1349 1353 };
1350 1354 "pygments" = super.buildPythonPackage {
1351 1355 name = "pygments-2.4.2";
1352 1356 doCheck = false;
1353 1357 src = fetchurl {
1354 1358 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
1355 1359 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
1356 1360 };
1357 1361 meta = {
1358 1362 license = [ pkgs.lib.licenses.bsdOriginal ];
1359 1363 };
1360 1364 };
1361 1365 "pymysql" = super.buildPythonPackage {
1362 1366 name = "pymysql-0.8.1";
1363 1367 doCheck = false;
1364 1368 src = fetchurl {
1365 1369 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1366 1370 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1367 1371 };
1368 1372 meta = {
1369 1373 license = [ pkgs.lib.licenses.mit ];
1370 1374 };
1371 1375 };
1372 1376 "pyotp" = super.buildPythonPackage {
1373 1377 name = "pyotp-2.2.7";
1374 1378 doCheck = false;
1375 1379 src = fetchurl {
1376 1380 url = "https://files.pythonhosted.org/packages/b1/ab/477cda97b6ca7baced5106471cb1ac1fe698d1b035983b9f8ee3422989eb/pyotp-2.2.7.tar.gz";
1377 1381 sha256 = "00p69nw431f0s2ilg0hnd77p1l22m06p9rq4f8zfapmavnmzw3xy";
1378 1382 };
1379 1383 meta = {
1380 1384 license = [ pkgs.lib.licenses.mit ];
1381 1385 };
1382 1386 };
1383 1387 "pyparsing" = super.buildPythonPackage {
1384 1388 name = "pyparsing-2.3.0";
1385 1389 doCheck = false;
1386 1390 src = fetchurl {
1387 1391 url = "https://files.pythonhosted.org/packages/d0/09/3e6a5eeb6e04467b737d55f8bba15247ac0876f98fae659e58cd744430c6/pyparsing-2.3.0.tar.gz";
1388 1392 sha256 = "14k5v7n3xqw8kzf42x06bzp184spnlkya2dpjyflax6l3yrallzk";
1389 1393 };
1390 1394 meta = {
1391 1395 license = [ pkgs.lib.licenses.mit ];
1392 1396 };
1393 1397 };
1394 1398 "pyramid" = super.buildPythonPackage {
1395 1399 name = "pyramid-1.10.4";
1396 1400 doCheck = false;
1397 1401 propagatedBuildInputs = [
1398 1402 self."hupper"
1399 1403 self."plaster"
1400 1404 self."plaster-pastedeploy"
1401 1405 self."setuptools"
1402 1406 self."translationstring"
1403 1407 self."venusian"
1404 1408 self."webob"
1405 1409 self."zope.deprecation"
1406 1410 self."zope.interface"
1407 1411 self."repoze.lru"
1408 1412 ];
1409 1413 src = fetchurl {
1410 1414 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1411 1415 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1412 1416 };
1413 1417 meta = {
1414 1418 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1415 1419 };
1416 1420 };
1417 1421 "pyramid-debugtoolbar" = super.buildPythonPackage {
1418 1422 name = "pyramid-debugtoolbar-4.5";
1419 1423 doCheck = false;
1420 1424 propagatedBuildInputs = [
1421 1425 self."pyramid"
1422 1426 self."pyramid-mako"
1423 1427 self."repoze.lru"
1424 1428 self."pygments"
1425 1429 self."ipaddress"
1426 1430 ];
1427 1431 src = fetchurl {
1428 1432 url = "https://files.pythonhosted.org/packages/14/28/1f240239af340d19ee271ac62958158c79edb01a44ad8c9885508dd003d2/pyramid_debugtoolbar-4.5.tar.gz";
1429 1433 sha256 = "0x2p3409pnx66n6dx5vc0mk2r1cp1ydr8mp120w44r9pwcngbibl";
1430 1434 };
1431 1435 meta = {
1432 1436 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1433 1437 };
1434 1438 };
1435 1439 "pyramid-jinja2" = super.buildPythonPackage {
1436 1440 name = "pyramid-jinja2-2.7";
1437 1441 doCheck = false;
1438 1442 propagatedBuildInputs = [
1439 1443 self."pyramid"
1440 1444 self."zope.deprecation"
1441 1445 self."jinja2"
1442 1446 self."markupsafe"
1443 1447 ];
1444 1448 src = fetchurl {
1445 1449 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1446 1450 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1447 1451 };
1448 1452 meta = {
1449 1453 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1450 1454 };
1451 1455 };
1452 1456 "pyramid-mailer" = super.buildPythonPackage {
1453 1457 name = "pyramid-mailer-0.15.1";
1454 1458 doCheck = false;
1455 1459 propagatedBuildInputs = [
1456 1460 self."pyramid"
1457 1461 self."repoze.sendmail"
1458 1462 self."transaction"
1459 1463 ];
1460 1464 src = fetchurl {
1461 1465 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1462 1466 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1463 1467 };
1464 1468 meta = {
1465 1469 license = [ pkgs.lib.licenses.bsdOriginal ];
1466 1470 };
1467 1471 };
1468 1472 "pyramid-mako" = super.buildPythonPackage {
1469 1473 name = "pyramid-mako-1.0.2";
1470 1474 doCheck = false;
1471 1475 propagatedBuildInputs = [
1472 1476 self."pyramid"
1473 1477 self."mako"
1474 1478 ];
1475 1479 src = fetchurl {
1476 1480 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1477 1481 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
1478 1482 };
1479 1483 meta = {
1480 1484 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1481 1485 };
1482 1486 };
1483 1487 "pysqlite" = super.buildPythonPackage {
1484 1488 name = "pysqlite-2.8.3";
1485 1489 doCheck = false;
1486 1490 src = fetchurl {
1487 1491 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1488 1492 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1489 1493 };
1490 1494 meta = {
1491 1495 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1492 1496 };
1493 1497 };
1494 1498 "pytest" = super.buildPythonPackage {
1495 name = "pytest-3.8.2";
1499 name = "pytest-4.6.5";
1496 1500 doCheck = false;
1497 1501 propagatedBuildInputs = [
1498 1502 self."py"
1499 1503 self."six"
1500 self."setuptools"
1504 self."packaging"
1501 1505 self."attrs"
1502 self."more-itertools"
1503 1506 self."atomicwrites"
1504 1507 self."pluggy"
1508 self."importlib-metadata"
1509 self."wcwidth"
1505 1510 self."funcsigs"
1506 1511 self."pathlib2"
1512 self."more-itertools"
1507 1513 ];
1508 1514 src = fetchurl {
1509 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
1510 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
1515 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
1516 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
1511 1517 };
1512 1518 meta = {
1513 1519 license = [ pkgs.lib.licenses.mit ];
1514 1520 };
1515 1521 };
1516 1522 "pytest-cov" = super.buildPythonPackage {
1517 name = "pytest-cov-2.6.0";
1523 name = "pytest-cov-2.7.1";
1518 1524 doCheck = false;
1519 1525 propagatedBuildInputs = [
1520 1526 self."pytest"
1521 1527 self."coverage"
1522 1528 ];
1523 1529 src = fetchurl {
1524 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
1525 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
1530 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
1531 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
1526 1532 };
1527 1533 meta = {
1528 1534 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1529 1535 };
1530 1536 };
1531 1537 "pytest-profiling" = super.buildPythonPackage {
1532 name = "pytest-profiling-1.3.0";
1538 name = "pytest-profiling-1.7.0";
1533 1539 doCheck = false;
1534 1540 propagatedBuildInputs = [
1535 1541 self."six"
1536 1542 self."pytest"
1537 1543 self."gprof2dot"
1538 1544 ];
1539 1545 src = fetchurl {
1540 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
1541 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
1546 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
1547 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
1542 1548 };
1543 1549 meta = {
1544 1550 license = [ pkgs.lib.licenses.mit ];
1545 1551 };
1546 1552 };
1547 1553 "pytest-runner" = super.buildPythonPackage {
1548 name = "pytest-runner-4.2";
1554 name = "pytest-runner-5.1";
1549 1555 doCheck = false;
1550 1556 src = fetchurl {
1551 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
1552 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
1557 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
1558 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
1553 1559 };
1554 1560 meta = {
1555 1561 license = [ pkgs.lib.licenses.mit ];
1556 1562 };
1557 1563 };
1558 1564 "pytest-sugar" = super.buildPythonPackage {
1559 name = "pytest-sugar-0.9.1";
1565 name = "pytest-sugar-0.9.2";
1560 1566 doCheck = false;
1561 1567 propagatedBuildInputs = [
1562 1568 self."pytest"
1563 1569 self."termcolor"
1570 self."packaging"
1564 1571 ];
1565 1572 src = fetchurl {
1566 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
1567 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
1573 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
1574 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
1568 1575 };
1569 1576 meta = {
1570 1577 license = [ pkgs.lib.licenses.bsdOriginal ];
1571 1578 };
1572 1579 };
1573 1580 "pytest-timeout" = super.buildPythonPackage {
1574 name = "pytest-timeout-1.3.2";
1581 name = "pytest-timeout-1.3.3";
1575 1582 doCheck = false;
1576 1583 propagatedBuildInputs = [
1577 1584 self."pytest"
1578 1585 ];
1579 1586 src = fetchurl {
1580 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
1581 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
1587 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
1588 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
1582 1589 };
1583 1590 meta = {
1584 1591 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1585 1592 };
1586 1593 };
1587 1594 "python-dateutil" = super.buildPythonPackage {
1588 1595 name = "python-dateutil-2.8.0";
1589 1596 doCheck = false;
1590 1597 propagatedBuildInputs = [
1591 1598 self."six"
1592 1599 ];
1593 1600 src = fetchurl {
1594 1601 url = "https://files.pythonhosted.org/packages/ad/99/5b2e99737edeb28c71bcbec5b5dda19d0d9ef3ca3e92e3e925e7c0bb364c/python-dateutil-2.8.0.tar.gz";
1595 1602 sha256 = "17nsfhy4xdz1khrfxa61vd7pmvd5z0wa3zb6v4gb4kfnykv0b668";
1596 1603 };
1597 1604 meta = {
1598 1605 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1599 1606 };
1600 1607 };
1601 1608 "python-editor" = super.buildPythonPackage {
1602 1609 name = "python-editor-1.0.4";
1603 1610 doCheck = false;
1604 1611 src = fetchurl {
1605 1612 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1606 1613 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1607 1614 };
1608 1615 meta = {
1609 1616 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1610 1617 };
1611 1618 };
1612 1619 "python-ldap" = super.buildPythonPackage {
1613 1620 name = "python-ldap-3.1.0";
1614 1621 doCheck = false;
1615 1622 propagatedBuildInputs = [
1616 1623 self."pyasn1"
1617 1624 self."pyasn1-modules"
1618 1625 ];
1619 1626 src = fetchurl {
1620 1627 url = "https://files.pythonhosted.org/packages/7f/1c/28d721dff2fcd2fef9d55b40df63a00be26ec8a11e8c6fc612ae642f9cfd/python-ldap-3.1.0.tar.gz";
1621 1628 sha256 = "1i97nwfnraylyn0myxlf3vciicrf5h6fymrcff9c00k581wmx5s1";
1622 1629 };
1623 1630 meta = {
1624 1631 license = [ pkgs.lib.licenses.psfl ];
1625 1632 };
1626 1633 };
1627 1634 "python-memcached" = super.buildPythonPackage {
1628 1635 name = "python-memcached-1.59";
1629 1636 doCheck = false;
1630 1637 propagatedBuildInputs = [
1631 1638 self."six"
1632 1639 ];
1633 1640 src = fetchurl {
1634 1641 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1635 1642 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1636 1643 };
1637 1644 meta = {
1638 1645 license = [ pkgs.lib.licenses.psfl ];
1639 1646 };
1640 1647 };
1641 1648 "python-pam" = super.buildPythonPackage {
1642 1649 name = "python-pam-1.8.4";
1643 1650 doCheck = false;
1644 1651 src = fetchurl {
1645 1652 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1646 1653 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1647 1654 };
1648 1655 meta = {
1649 1656 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1650 1657 };
1651 1658 };
1652 1659 "python-saml" = super.buildPythonPackage {
1653 1660 name = "python-saml-2.4.2";
1654 1661 doCheck = false;
1655 1662 propagatedBuildInputs = [
1656 1663 self."dm.xmlsec.binding"
1657 1664 self."isodate"
1658 1665 self."defusedxml"
1659 1666 ];
1660 1667 src = fetchurl {
1661 1668 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1662 1669 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1663 1670 };
1664 1671 meta = {
1665 1672 license = [ pkgs.lib.licenses.mit ];
1666 1673 };
1667 1674 };
1668 1675 "pytz" = super.buildPythonPackage {
1669 1676 name = "pytz-2019.2";
1670 1677 doCheck = false;
1671 1678 src = fetchurl {
1672 1679 url = "https://files.pythonhosted.org/packages/27/c0/fbd352ca76050952a03db776d241959d5a2ee1abddfeb9e2a53fdb489be4/pytz-2019.2.tar.gz";
1673 1680 sha256 = "0ckb27hhjc8i8gcdvk4d9avld62b7k52yjijc60s2m3y8cpb7h16";
1674 1681 };
1675 1682 meta = {
1676 1683 license = [ pkgs.lib.licenses.mit ];
1677 1684 };
1678 1685 };
1679 1686 "pyzmq" = super.buildPythonPackage {
1680 1687 name = "pyzmq-14.6.0";
1681 1688 doCheck = false;
1682 1689 src = fetchurl {
1683 1690 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1684 1691 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1685 1692 };
1686 1693 meta = {
1687 1694 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1688 1695 };
1689 1696 };
1690 1697 "redis" = super.buildPythonPackage {
1691 1698 name = "redis-3.3.8";
1692 1699 doCheck = false;
1693 1700 src = fetchurl {
1694 1701 url = "https://files.pythonhosted.org/packages/d7/e9/549305f1c2480f8c24abadfaa71c20967cc3269769073b59960e9a566072/redis-3.3.8.tar.gz";
1695 1702 sha256 = "0fyxzqax7lcwzwhvnz0i0q6v62hxyv1mv52ywx3bpff9a2vjz8lq";
1696 1703 };
1697 1704 meta = {
1698 1705 license = [ pkgs.lib.licenses.mit ];
1699 1706 };
1700 1707 };
1701 1708 "repoze.lru" = super.buildPythonPackage {
1702 1709 name = "repoze.lru-0.7";
1703 1710 doCheck = false;
1704 1711 src = fetchurl {
1705 1712 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1706 1713 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1707 1714 };
1708 1715 meta = {
1709 1716 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1710 1717 };
1711 1718 };
1712 1719 "repoze.sendmail" = super.buildPythonPackage {
1713 1720 name = "repoze.sendmail-4.4.1";
1714 1721 doCheck = false;
1715 1722 propagatedBuildInputs = [
1716 1723 self."setuptools"
1717 1724 self."zope.interface"
1718 1725 self."transaction"
1719 1726 ];
1720 1727 src = fetchurl {
1721 1728 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1722 1729 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1723 1730 };
1724 1731 meta = {
1725 1732 license = [ pkgs.lib.licenses.zpl21 ];
1726 1733 };
1727 1734 };
1728 1735 "requests" = super.buildPythonPackage {
1729 1736 name = "requests-2.9.1";
1730 1737 doCheck = false;
1731 1738 src = fetchurl {
1732 1739 url = "https://files.pythonhosted.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1733 1740 sha256 = "0zsqrzlybf25xscgi7ja4s48y2abf9wvjkn47wh984qgs1fq2xy5";
1734 1741 };
1735 1742 meta = {
1736 1743 license = [ pkgs.lib.licenses.asl20 ];
1737 1744 };
1738 1745 };
1739 1746 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1740 1747 name = "rhodecode-enterprise-ce-4.18.0";
1741 1748 buildInputs = [
1742 1749 self."pytest"
1743 1750 self."py"
1744 1751 self."pytest-cov"
1745 1752 self."pytest-sugar"
1746 1753 self."pytest-runner"
1747 1754 self."pytest-profiling"
1748 1755 self."pytest-timeout"
1749 1756 self."gprof2dot"
1750 1757 self."mock"
1751 1758 self."cov-core"
1752 1759 self."coverage"
1753 1760 self."webtest"
1754 1761 self."beautifulsoup4"
1755 1762 self."configobj"
1756 1763 ];
1757 1764 doCheck = true;
1758 1765 propagatedBuildInputs = [
1759 1766 self."amqp"
1760 1767 self."babel"
1761 1768 self."beaker"
1762 1769 self."bleach"
1763 1770 self."celery"
1764 1771 self."channelstream"
1765 1772 self."click"
1766 1773 self."colander"
1767 1774 self."configobj"
1768 1775 self."cssselect"
1769 1776 self."cryptography"
1770 1777 self."decorator"
1771 1778 self."deform"
1772 1779 self."docutils"
1773 1780 self."dogpile.cache"
1774 1781 self."dogpile.core"
1775 1782 self."formencode"
1776 1783 self."future"
1777 1784 self."futures"
1778 1785 self."infrae.cache"
1779 1786 self."iso8601"
1780 1787 self."itsdangerous"
1781 1788 self."kombu"
1782 1789 self."lxml"
1783 1790 self."mako"
1784 1791 self."markdown"
1785 1792 self."markupsafe"
1786 1793 self."msgpack-python"
1787 1794 self."pyotp"
1788 1795 self."packaging"
1789 1796 self."pathlib2"
1790 1797 self."paste"
1791 1798 self."pastedeploy"
1792 1799 self."pastescript"
1793 1800 self."peppercorn"
1794 1801 self."psutil"
1795 1802 self."py-bcrypt"
1796 1803 self."pycurl"
1797 1804 self."pycrypto"
1798 1805 self."pygments"
1799 1806 self."pyparsing"
1800 1807 self."pyramid-debugtoolbar"
1801 1808 self."pyramid-mako"
1802 1809 self."pyramid"
1803 1810 self."pyramid-mailer"
1804 1811 self."python-dateutil"
1805 1812 self."python-ldap"
1806 1813 self."python-memcached"
1807 1814 self."python-pam"
1808 1815 self."python-saml"
1809 1816 self."pytz"
1810 1817 self."tzlocal"
1811 1818 self."pyzmq"
1812 1819 self."py-gfm"
1813 1820 self."redis"
1814 1821 self."repoze.lru"
1815 1822 self."requests"
1816 1823 self."routes"
1817 1824 self."simplejson"
1818 1825 self."six"
1819 1826 self."sqlalchemy"
1820 1827 self."sshpubkeys"
1821 1828 self."subprocess32"
1822 1829 self."supervisor"
1823 1830 self."translationstring"
1824 1831 self."urllib3"
1825 1832 self."urlobject"
1826 1833 self."venusian"
1827 1834 self."weberror"
1828 1835 self."webhelpers2"
1829 1836 self."webhelpers"
1830 1837 self."webob"
1831 1838 self."whoosh"
1832 1839 self."wsgiref"
1833 1840 self."zope.cachedescriptors"
1834 1841 self."zope.deprecation"
1835 1842 self."zope.event"
1836 1843 self."zope.interface"
1837 1844 self."mysql-python"
1838 1845 self."pymysql"
1839 1846 self."pysqlite"
1840 1847 self."psycopg2"
1841 1848 self."nbconvert"
1842 1849 self."nbformat"
1843 1850 self."jupyter-client"
1844 1851 self."alembic"
1845 1852 self."invoke"
1846 1853 self."bumpversion"
1847 1854 self."gevent"
1848 1855 self."greenlet"
1849 1856 self."gunicorn"
1850 1857 self."waitress"
1851 1858 self."ipdb"
1852 1859 self."ipython"
1853 1860 self."rhodecode-tools"
1854 1861 self."appenlight-client"
1855 1862 self."pytest"
1856 1863 self."py"
1857 1864 self."pytest-cov"
1858 1865 self."pytest-sugar"
1859 1866 self."pytest-runner"
1860 1867 self."pytest-profiling"
1861 1868 self."pytest-timeout"
1862 1869 self."gprof2dot"
1863 1870 self."mock"
1864 1871 self."cov-core"
1865 1872 self."coverage"
1866 1873 self."webtest"
1867 1874 self."beautifulsoup4"
1868 1875 ];
1869 1876 src = ./.;
1870 1877 meta = {
1871 1878 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
1872 1879 };
1873 1880 };
1874 1881 "rhodecode-tools" = super.buildPythonPackage {
1875 1882 name = "rhodecode-tools-1.2.1";
1876 1883 doCheck = false;
1877 1884 propagatedBuildInputs = [
1878 1885 self."click"
1879 1886 self."future"
1880 1887 self."six"
1881 1888 self."mako"
1882 1889 self."markupsafe"
1883 1890 self."requests"
1884 1891 self."urllib3"
1885 1892 self."whoosh"
1886 1893 self."elasticsearch"
1887 1894 self."elasticsearch-dsl"
1888 1895 self."elasticsearch2"
1889 1896 self."elasticsearch1-dsl"
1890 1897 ];
1891 1898 src = fetchurl {
1892 1899 url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-10ac93f4-bb7d-4b97-baea-68110743dd5a.tar.gz?md5=962dc77c06aceee62282b98d33149661";
1893 1900 sha256 = "1vfhgf46inbx7jvlfx4fdzh3vz7lh37r291gzb5hx447pfm3qllg";
1894 1901 };
1895 1902 meta = {
1896 1903 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
1897 1904 };
1898 1905 };
1899 1906 "routes" = super.buildPythonPackage {
1900 1907 name = "routes-2.4.1";
1901 1908 doCheck = false;
1902 1909 propagatedBuildInputs = [
1903 1910 self."six"
1904 1911 self."repoze.lru"
1905 1912 ];
1906 1913 src = fetchurl {
1907 1914 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
1908 1915 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
1909 1916 };
1910 1917 meta = {
1911 1918 license = [ pkgs.lib.licenses.mit ];
1912 1919 };
1913 1920 };
1914 1921 "scandir" = super.buildPythonPackage {
1915 1922 name = "scandir-1.10.0";
1916 1923 doCheck = false;
1917 1924 src = fetchurl {
1918 1925 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
1919 1926 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
1920 1927 };
1921 1928 meta = {
1922 1929 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
1923 1930 };
1924 1931 };
1925 1932 "setproctitle" = super.buildPythonPackage {
1926 1933 name = "setproctitle-1.1.10";
1927 1934 doCheck = false;
1928 1935 src = fetchurl {
1929 1936 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
1930 1937 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
1931 1938 };
1932 1939 meta = {
1933 1940 license = [ pkgs.lib.licenses.bsdOriginal ];
1934 1941 };
1935 1942 };
1936 1943 "setuptools" = super.buildPythonPackage {
1937 1944 name = "setuptools-41.2.0";
1938 1945 doCheck = false;
1939 1946 src = fetchurl {
1940 1947 url = "https://files.pythonhosted.org/packages/d9/ca/7279974e489e8b65003fe618a1a741d6350227fa2bf48d16be76c7422423/setuptools-41.2.0.zip";
1941 1948 sha256 = "04k0dp9msmlv3g3zx7f5p8wdjr6hdf5c0bgmczlc4yncwyx6pf36";
1942 1949 };
1943 1950 meta = {
1944 1951 license = [ pkgs.lib.licenses.mit ];
1945 1952 };
1946 1953 };
1947 1954 "simplegeneric" = super.buildPythonPackage {
1948 1955 name = "simplegeneric-0.8.1";
1949 1956 doCheck = false;
1950 1957 src = fetchurl {
1951 1958 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
1952 1959 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
1953 1960 };
1954 1961 meta = {
1955 1962 license = [ pkgs.lib.licenses.zpl21 ];
1956 1963 };
1957 1964 };
1958 1965 "simplejson" = super.buildPythonPackage {
1959 1966 name = "simplejson-3.16.0";
1960 1967 doCheck = false;
1961 1968 src = fetchurl {
1962 1969 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
1963 1970 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
1964 1971 };
1965 1972 meta = {
1966 1973 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
1967 1974 };
1968 1975 };
1969 1976 "six" = super.buildPythonPackage {
1970 1977 name = "six-1.11.0";
1971 1978 doCheck = false;
1972 1979 src = fetchurl {
1973 1980 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
1974 1981 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
1975 1982 };
1976 1983 meta = {
1977 1984 license = [ pkgs.lib.licenses.mit ];
1978 1985 };
1979 1986 };
1980 1987 "sqlalchemy" = super.buildPythonPackage {
1981 1988 name = "sqlalchemy-1.1.18";
1982 1989 doCheck = false;
1983 1990 src = fetchurl {
1984 1991 url = "https://files.pythonhosted.org/packages/cc/4d/96d93ff77cd67aca7618e402191eee3490d8f5f245d6ab7622d35fe504f4/SQLAlchemy-1.1.18.tar.gz";
1985 1992 sha256 = "1ab4ysip6irajfbxl9wy27kv76miaz8h6759hfx92499z4dcf3lb";
1986 1993 };
1987 1994 meta = {
1988 1995 license = [ pkgs.lib.licenses.mit ];
1989 1996 };
1990 1997 };
1991 1998 "sshpubkeys" = super.buildPythonPackage {
1992 1999 name = "sshpubkeys-3.1.0";
1993 2000 doCheck = false;
1994 2001 propagatedBuildInputs = [
1995 2002 self."cryptography"
1996 2003 self."ecdsa"
1997 2004 ];
1998 2005 src = fetchurl {
1999 2006 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
2000 2007 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
2001 2008 };
2002 2009 meta = {
2003 2010 license = [ pkgs.lib.licenses.bsdOriginal ];
2004 2011 };
2005 2012 };
2006 2013 "subprocess32" = super.buildPythonPackage {
2007 2014 name = "subprocess32-3.5.4";
2008 2015 doCheck = false;
2009 2016 src = fetchurl {
2010 2017 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
2011 2018 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
2012 2019 };
2013 2020 meta = {
2014 2021 license = [ pkgs.lib.licenses.psfl ];
2015 2022 };
2016 2023 };
2017 2024 "supervisor" = super.buildPythonPackage {
2018 2025 name = "supervisor-4.0.3";
2019 2026 doCheck = false;
2020 2027 propagatedBuildInputs = [
2021 2028 self."meld3"
2022 2029 ];
2023 2030 src = fetchurl {
2024 2031 url = "https://files.pythonhosted.org/packages/97/48/f38bf70bd9282d1a18d591616557cc1a77a1c627d57dff66ead65c891dc8/supervisor-4.0.3.tar.gz";
2025 2032 sha256 = "17hla7mx6w5m5jzkkjxgqa8wpswqmfhbhf49f692hw78fg0ans7p";
2026 2033 };
2027 2034 meta = {
2028 2035 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2029 2036 };
2030 2037 };
2031 2038 "tempita" = super.buildPythonPackage {
2032 2039 name = "tempita-0.5.2";
2033 2040 doCheck = false;
2034 2041 src = fetchurl {
2035 2042 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2036 2043 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2037 2044 };
2038 2045 meta = {
2039 2046 license = [ pkgs.lib.licenses.mit ];
2040 2047 };
2041 2048 };
2042 2049 "termcolor" = super.buildPythonPackage {
2043 2050 name = "termcolor-1.1.0";
2044 2051 doCheck = false;
2045 2052 src = fetchurl {
2046 2053 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2047 2054 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2048 2055 };
2049 2056 meta = {
2050 2057 license = [ pkgs.lib.licenses.mit ];
2051 2058 };
2052 2059 };
2053 2060 "testpath" = super.buildPythonPackage {
2054 2061 name = "testpath-0.4.2";
2055 2062 doCheck = false;
2056 2063 src = fetchurl {
2057 2064 url = "https://files.pythonhosted.org/packages/06/30/9a7e917066d851d8b4117e85794b5f14516419ea714a8a2681ec6aa8a981/testpath-0.4.2.tar.gz";
2058 2065 sha256 = "1y40hywscnnyb734pnzm55nd8r8kp1072bjxbil83gcd53cv755n";
2059 2066 };
2060 2067 meta = {
2061 2068 license = [ ];
2062 2069 };
2063 2070 };
2064 2071 "traitlets" = super.buildPythonPackage {
2065 2072 name = "traitlets-4.3.2";
2066 2073 doCheck = false;
2067 2074 propagatedBuildInputs = [
2068 2075 self."ipython-genutils"
2069 2076 self."six"
2070 2077 self."decorator"
2071 2078 self."enum34"
2072 2079 ];
2073 2080 src = fetchurl {
2074 2081 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
2075 2082 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
2076 2083 };
2077 2084 meta = {
2078 2085 license = [ pkgs.lib.licenses.bsdOriginal ];
2079 2086 };
2080 2087 };
2081 2088 "transaction" = super.buildPythonPackage {
2082 2089 name = "transaction-2.4.0";
2083 2090 doCheck = false;
2084 2091 propagatedBuildInputs = [
2085 2092 self."zope.interface"
2086 2093 ];
2087 2094 src = fetchurl {
2088 2095 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2089 2096 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2090 2097 };
2091 2098 meta = {
2092 2099 license = [ pkgs.lib.licenses.zpl21 ];
2093 2100 };
2094 2101 };
2095 2102 "translationstring" = super.buildPythonPackage {
2096 2103 name = "translationstring-1.3";
2097 2104 doCheck = false;
2098 2105 src = fetchurl {
2099 2106 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2100 2107 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2101 2108 };
2102 2109 meta = {
2103 2110 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2104 2111 };
2105 2112 };
2106 2113 "tzlocal" = super.buildPythonPackage {
2107 2114 name = "tzlocal-1.5.1";
2108 2115 doCheck = false;
2109 2116 propagatedBuildInputs = [
2110 2117 self."pytz"
2111 2118 ];
2112 2119 src = fetchurl {
2113 2120 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2114 2121 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2115 2122 };
2116 2123 meta = {
2117 2124 license = [ pkgs.lib.licenses.mit ];
2118 2125 };
2119 2126 };
2120 2127 "urllib3" = super.buildPythonPackage {
2121 2128 name = "urllib3-1.24.1";
2122 2129 doCheck = false;
2123 2130 src = fetchurl {
2124 2131 url = "https://files.pythonhosted.org/packages/b1/53/37d82ab391393565f2f831b8eedbffd57db5a718216f82f1a8b4d381a1c1/urllib3-1.24.1.tar.gz";
2125 2132 sha256 = "08lwd9f3hqznyf32vnzwvp87pchx062nkbgyrf67rwlkgj0jk5fy";
2126 2133 };
2127 2134 meta = {
2128 2135 license = [ pkgs.lib.licenses.mit ];
2129 2136 };
2130 2137 };
2131 2138 "urlobject" = super.buildPythonPackage {
2132 2139 name = "urlobject-2.4.3";
2133 2140 doCheck = false;
2134 2141 src = fetchurl {
2135 2142 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2136 2143 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2137 2144 };
2138 2145 meta = {
2139 2146 license = [ pkgs.lib.licenses.publicDomain ];
2140 2147 };
2141 2148 };
2142 2149 "venusian" = super.buildPythonPackage {
2143 2150 name = "venusian-1.2.0";
2144 2151 doCheck = false;
2145 2152 src = fetchurl {
2146 2153 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2147 2154 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2148 2155 };
2149 2156 meta = {
2150 2157 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2151 2158 };
2152 2159 };
2153 2160 "vine" = super.buildPythonPackage {
2154 2161 name = "vine-1.3.0";
2155 2162 doCheck = false;
2156 2163 src = fetchurl {
2157 2164 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2158 2165 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2159 2166 };
2160 2167 meta = {
2161 2168 license = [ pkgs.lib.licenses.bsdOriginal ];
2162 2169 };
2163 2170 };
2164 2171 "waitress" = super.buildPythonPackage {
2165 2172 name = "waitress-1.3.1";
2166 2173 doCheck = false;
2167 2174 src = fetchurl {
2168 2175 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
2169 2176 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
2170 2177 };
2171 2178 meta = {
2172 2179 license = [ pkgs.lib.licenses.zpl21 ];
2173 2180 };
2174 2181 };
2175 2182 "wcwidth" = super.buildPythonPackage {
2176 2183 name = "wcwidth-0.1.7";
2177 2184 doCheck = false;
2178 2185 src = fetchurl {
2179 2186 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
2180 2187 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
2181 2188 };
2182 2189 meta = {
2183 2190 license = [ pkgs.lib.licenses.mit ];
2184 2191 };
2185 2192 };
2186 2193 "webencodings" = super.buildPythonPackage {
2187 2194 name = "webencodings-0.5.1";
2188 2195 doCheck = false;
2189 2196 src = fetchurl {
2190 2197 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2191 2198 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2192 2199 };
2193 2200 meta = {
2194 2201 license = [ pkgs.lib.licenses.bsdOriginal ];
2195 2202 };
2196 2203 };
2197 2204 "weberror" = super.buildPythonPackage {
2198 2205 name = "weberror-0.10.3";
2199 2206 doCheck = false;
2200 2207 propagatedBuildInputs = [
2201 2208 self."webob"
2202 2209 self."tempita"
2203 2210 self."pygments"
2204 2211 self."paste"
2205 2212 ];
2206 2213 src = fetchurl {
2207 2214 url = "https://files.pythonhosted.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
2208 2215 sha256 = "0frg4kvycqpj5bi8asfqfs6bxsr2cvjvb6b56c4d1ai1z57kbjx6";
2209 2216 };
2210 2217 meta = {
2211 2218 license = [ pkgs.lib.licenses.mit ];
2212 2219 };
2213 2220 };
2214 2221 "webhelpers" = super.buildPythonPackage {
2215 2222 name = "webhelpers-1.3";
2216 2223 doCheck = false;
2217 2224 propagatedBuildInputs = [
2218 2225 self."markupsafe"
2219 2226 ];
2220 2227 src = fetchurl {
2221 2228 url = "https://files.pythonhosted.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
2222 2229 sha256 = "10x5i82qdkrvyw18gsybwggfhfpl869siaab89vnndi9x62g51pa";
2223 2230 };
2224 2231 meta = {
2225 2232 license = [ pkgs.lib.licenses.bsdOriginal ];
2226 2233 };
2227 2234 };
2228 2235 "webhelpers2" = super.buildPythonPackage {
2229 2236 name = "webhelpers2-2.0";
2230 2237 doCheck = false;
2231 2238 propagatedBuildInputs = [
2232 2239 self."markupsafe"
2233 2240 self."six"
2234 2241 ];
2235 2242 src = fetchurl {
2236 2243 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2237 2244 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2238 2245 };
2239 2246 meta = {
2240 2247 license = [ pkgs.lib.licenses.mit ];
2241 2248 };
2242 2249 };
2243 2250 "webob" = super.buildPythonPackage {
2244 2251 name = "webob-1.8.5";
2245 2252 doCheck = false;
2246 2253 src = fetchurl {
2247 2254 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2248 2255 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2249 2256 };
2250 2257 meta = {
2251 2258 license = [ pkgs.lib.licenses.mit ];
2252 2259 };
2253 2260 };
2254 2261 "webtest" = super.buildPythonPackage {
2255 2262 name = "webtest-2.0.33";
2256 2263 doCheck = false;
2257 2264 propagatedBuildInputs = [
2258 2265 self."six"
2259 2266 self."webob"
2260 2267 self."waitress"
2261 2268 self."beautifulsoup4"
2262 2269 ];
2263 2270 src = fetchurl {
2264 2271 url = "https://files.pythonhosted.org/packages/a8/b0/ffc9413b637dbe26e291429bb0f6ed731e518d0cd03da28524a8fe2e8a8f/WebTest-2.0.33.tar.gz";
2265 2272 sha256 = "1l3z0cwqslsf4rcrhi2gr8kdfh74wn2dw76376i4g9i38gz8wd21";
2266 2273 };
2267 2274 meta = {
2268 2275 license = [ pkgs.lib.licenses.mit ];
2269 2276 };
2270 2277 };
2271 2278 "whoosh" = super.buildPythonPackage {
2272 2279 name = "whoosh-2.7.4";
2273 2280 doCheck = false;
2274 2281 src = fetchurl {
2275 2282 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2276 2283 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2277 2284 };
2278 2285 meta = {
2279 2286 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2280 2287 };
2281 2288 };
2282 2289 "ws4py" = super.buildPythonPackage {
2283 2290 name = "ws4py-0.5.1";
2284 2291 doCheck = false;
2285 2292 src = fetchurl {
2286 2293 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2287 2294 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2288 2295 };
2289 2296 meta = {
2290 2297 license = [ pkgs.lib.licenses.bsdOriginal ];
2291 2298 };
2292 2299 };
2293 2300 "wsgiref" = super.buildPythonPackage {
2294 2301 name = "wsgiref-0.1.2";
2295 2302 doCheck = false;
2296 2303 src = fetchurl {
2297 2304 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2298 2305 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2299 2306 };
2300 2307 meta = {
2301 2308 license = [ { fullName = "PSF or ZPL"; } ];
2302 2309 };
2303 2310 };
2304 2311 "zipp" = super.buildPythonPackage {
2305 2312 name = "zipp-0.6.0";
2306 2313 doCheck = false;
2307 2314 propagatedBuildInputs = [
2308 2315 self."more-itertools"
2309 2316 ];
2310 2317 src = fetchurl {
2311 2318 url = "https://files.pythonhosted.org/packages/57/dd/585d728479d97d25aeeb9aa470d36a4ad8d0ba5610f84e14770128ce6ff7/zipp-0.6.0.tar.gz";
2312 2319 sha256 = "13ndkf7vklw978a4gdl1yfvn8hch28429a0iam67sg4nrp5v261p";
2313 2320 };
2314 2321 meta = {
2315 2322 license = [ pkgs.lib.licenses.mit ];
2316 2323 };
2317 2324 };
2318 2325 "zope.cachedescriptors" = super.buildPythonPackage {
2319 2326 name = "zope.cachedescriptors-4.3.1";
2320 2327 doCheck = false;
2321 2328 propagatedBuildInputs = [
2322 2329 self."setuptools"
2323 2330 ];
2324 2331 src = fetchurl {
2325 2332 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2326 2333 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2327 2334 };
2328 2335 meta = {
2329 2336 license = [ pkgs.lib.licenses.zpl21 ];
2330 2337 };
2331 2338 };
2332 2339 "zope.deprecation" = super.buildPythonPackage {
2333 2340 name = "zope.deprecation-4.4.0";
2334 2341 doCheck = false;
2335 2342 propagatedBuildInputs = [
2336 2343 self."setuptools"
2337 2344 ];
2338 2345 src = fetchurl {
2339 2346 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2340 2347 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2341 2348 };
2342 2349 meta = {
2343 2350 license = [ pkgs.lib.licenses.zpl21 ];
2344 2351 };
2345 2352 };
2346 2353 "zope.event" = super.buildPythonPackage {
2347 2354 name = "zope.event-4.4";
2348 2355 doCheck = false;
2349 2356 propagatedBuildInputs = [
2350 2357 self."setuptools"
2351 2358 ];
2352 2359 src = fetchurl {
2353 2360 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2354 2361 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2355 2362 };
2356 2363 meta = {
2357 2364 license = [ pkgs.lib.licenses.zpl21 ];
2358 2365 };
2359 2366 };
2360 2367 "zope.interface" = super.buildPythonPackage {
2361 2368 name = "zope.interface-4.6.0";
2362 2369 doCheck = false;
2363 2370 propagatedBuildInputs = [
2364 2371 self."setuptools"
2365 2372 ];
2366 2373 src = fetchurl {
2367 2374 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2368 2375 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2369 2376 };
2370 2377 meta = {
2371 2378 license = [ pkgs.lib.licenses.zpl21 ];
2372 2379 };
2373 2380 };
2374 2381
2375 2382 ### Test requirements
2376 2383
2377 2384
2378 2385 }
@@ -1,16 +1,19 b''
1 1 [pytest]
2 2 testpaths = rhodecode
3 3 norecursedirs = rhodecode/public rhodecode/templates tests/scripts
4 4 cache_dir = /tmp/.pytest_cache
5 5
6 6 pyramid_config = rhodecode/tests/rhodecode.ini
7 7 vcsserver_protocol = http
8 8 vcsserver_config_http = rhodecode/tests/vcsserver_http.ini
9 9
10 10 addopts =
11 11 --pdbcls=IPython.terminal.debugger:TerminalPdb
12 --strict-markers
12 13
13 14 markers =
14 15 vcs_operations: Mark tests depending on a running RhodeCode instance.
15 16 xfail_backends: Mark tests as xfail for given backends.
16 17 skip_backends: Mark tests as skipped for given backends.
18 backends: Mark backends
19 dbs: database markers for running tests for given DB
@@ -1,16 +1,16 b''
1 1 # test related requirements
2 pytest==3.8.2
3 py==1.6.0
4 pytest-cov==2.6.0
5 pytest-sugar==0.9.1
6 pytest-runner==4.2.0
7 pytest-profiling==1.3.0
8 pytest-timeout==1.3.2
2 pytest==4.6.5
3 py==1.8.0
4 pytest-cov==2.7.1
5 pytest-sugar==0.9.2
6 pytest-runner==5.1.0
7 pytest-profiling==1.7.0
8 pytest-timeout==1.3.3
9 9 gprof2dot==2017.9.19
10 10
11 mock==1.0.1
11 mock==3.0.5
12 12 cov-core==1.15.0
13 coverage==4.5.3
13 coverage==4.5.4
14 14
15 15 webtest==2.0.33
16 16 beautifulsoup4==4.6.3
@@ -1,107 +1,110 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23
24 24 from rhodecode.model.db import User, ChangesetComment
25 25 from rhodecode.model.meta import Session
26 26 from rhodecode.model.comment import CommentsModel
27 27 from rhodecode.api.tests.utils import (
28 28 build_data, api_call, assert_error, assert_call_ok)
29 29
30 30
31 31 @pytest.fixture()
32 32 def make_repo_comments_factory(request):
33 33
34 def maker(repo):
34 class Make(object):
35
36 def make_comments(self, repo):
35 37 user = User.get_first_super_admin()
36 38 commit = repo.scm_instance()[0]
37 39
38 40 commit_id = commit.raw_id
39 41 file_0 = commit.affected_files[0]
40 42 comments = []
41 43
42 44 # general
43 45 CommentsModel().create(
44 46 text='General Comment', repo=repo, user=user, commit_id=commit_id,
45 47 comment_type=ChangesetComment.COMMENT_TYPE_NOTE, send_email=False)
46 48
47 49 # inline
48 50 CommentsModel().create(
49 51 text='Inline Comment', repo=repo, user=user, commit_id=commit_id,
50 52 f_path=file_0, line_no='n1',
51 53 comment_type=ChangesetComment.COMMENT_TYPE_NOTE, send_email=False)
52 54
53 55 # todo
54 56 CommentsModel().create(
55 57 text='INLINE TODO Comment', repo=repo, user=user, commit_id=commit_id,
56 58 f_path=file_0, line_no='n1',
57 59 comment_type=ChangesetComment.COMMENT_TYPE_TODO, send_email=False)
58 60
59 61 @request.addfinalizer
60 62 def cleanup():
61 63 for comment in comments:
62 64 Session().delete(comment)
63 return maker
65 return Make()
64 66
65 67
66 68 @pytest.mark.usefixtures("testuser_api", "app")
67 69 class TestGetRepo(object):
68 70
69 71 @pytest.mark.parametrize('filters, expected_count', [
70 72 ({}, 3),
71 73 ({'comment_type': ChangesetComment.COMMENT_TYPE_NOTE}, 2),
72 74 ({'comment_type': ChangesetComment.COMMENT_TYPE_TODO}, 1),
73 75 ({'commit_id': 'FILLED DYNAMIC'}, 3),
74 76 ])
75 77 def test_api_get_repo_comments(self, backend, user_util,
76 78 make_repo_comments_factory, filters, expected_count):
77 79 commits = [{'message': 'A'}, {'message': 'B'}]
78 80 repo = backend.create_repo(commits=commits)
79 make_repo_comments_factory(repo)
81 make_repo_comments_factory.make_comments(repo)
80 82
81 83 api_call_params = {'repoid': repo.repo_name,}
82 84 api_call_params.update(filters)
83 85
84 86 if 'commit_id' in api_call_params:
85 87 commit = repo.scm_instance()[0]
86 88 commit_id = commit.raw_id
87 89 api_call_params['commit_id'] = commit_id
88 90
89 91 id_, params = build_data(self.apikey, 'get_repo_comments', **api_call_params)
90 92 response = api_call(self.app, params)
91 93 result = assert_call_ok(id_, given=response.body)
92 94
93 95 assert len(result) == expected_count
94 96
95 def test_api_get_repo_comments_wrong_comment_typ(self, backend_hg):
97 def test_api_get_repo_comments_wrong_comment_type(
98 self, make_repo_comments_factory, backend_hg):
99 commits = [{'message': 'A'}, {'message': 'B'}]
100 repo = backend_hg.create_repo(commits=commits)
101 make_repo_comments_factory.make_comments(repo)
96 102
97 repo = backend_hg.create_repo()
98 make_repo_comments_factory(repo)
99
100 api_call_params = {'repoid': repo.repo_name,}
103 api_call_params = {'repoid': repo.repo_name}
101 104 api_call_params.update({'comment_type': 'bogus'})
102 105
103 106 expected = 'comment_type must be one of `{}` got {}'.format(
104 107 ChangesetComment.COMMENT_TYPES, 'bogus')
105 108 id_, params = build_data(self.apikey, 'get_repo_comments', **api_call_params)
106 109 response = api_call(self.app, params)
107 110 assert_error(id_, expected, given=response.body)
@@ -1,292 +1,293 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 from subprocess32 import Popen, PIPE
22 22 import os
23 23 import shutil
24 24 import sys
25 25 import tempfile
26 26
27 27 import pytest
28 28 from sqlalchemy.engine import url
29 29
30 30 from rhodecode.tests.fixture import TestINI
31 31
32 32
33 33 def _get_dbs_from_metafunc(metafunc):
34 if hasattr(metafunc.function, 'dbs'):
35 # Supported backends by this test function, created from
36 # pytest.mark.dbs
37 backends = metafunc.definition.get_closest_marker('dbs').args
34 dbs_mark = metafunc.definition.get_closest_marker('dbs')
35
36 if dbs_mark:
37 # Supported backends by this test function, created from pytest.mark.dbs
38 backends = dbs_mark.args
38 39 else:
39 40 backends = metafunc.config.getoption('--dbs')
40 41 return backends
41 42
42 43
43 44 def pytest_generate_tests(metafunc):
44 45 # Support test generation based on --dbs parameter
45 46 if 'db_backend' in metafunc.fixturenames:
46 47 requested_backends = set(metafunc.config.getoption('--dbs'))
47 48 backends = _get_dbs_from_metafunc(metafunc)
48 49 backends = requested_backends.intersection(backends)
49 50 # TODO: johbo: Disabling a backend did not work out with
50 51 # parametrization, find better way to achieve this.
51 52 if not backends:
52 53 metafunc.function._skip = True
53 54 metafunc.parametrize('db_backend_name', backends)
54 55
55 56
56 57 def pytest_collection_modifyitems(session, config, items):
57 58 remaining = [
58 59 i for i in items if not getattr(i.obj, '_skip', False)]
59 60 items[:] = remaining
60 61
61 62
62 63 @pytest.fixture()
63 64 def db_backend(
64 65 request, db_backend_name, ini_config, tmpdir_factory):
65 66 basetemp = tmpdir_factory.getbasetemp().strpath
66 67 klass = _get_backend(db_backend_name)
67 68
68 69 option_name = '--{}-connection-string'.format(db_backend_name)
69 70 connection_string = request.config.getoption(option_name) or None
70 71
71 72 return klass(
72 73 config_file=ini_config, basetemp=basetemp,
73 74 connection_string=connection_string)
74 75
75 76
76 77 def _get_backend(backend_type):
77 78 return {
78 79 'sqlite': SQLiteDBBackend,
79 80 'postgres': PostgresDBBackend,
80 81 'mysql': MySQLDBBackend,
81 82 '': EmptyDBBackend
82 83 }[backend_type]
83 84
84 85
85 86 class DBBackend(object):
86 87 _store = os.path.dirname(os.path.abspath(__file__))
87 88 _type = None
88 89 _base_ini_config = [{'app:main': {'vcs.start_server': 'false',
89 90 'startup.import_repos': 'false',
90 91 'is_test': 'False'}}]
91 92 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
92 93 _base_db_name = 'rhodecode_test_db_backend'
93 94
94 95 def __init__(
95 96 self, config_file, db_name=None, basetemp=None,
96 97 connection_string=None):
97 98
98 99 from rhodecode.lib.vcs.backends.hg import largefiles_store
99 100 from rhodecode.lib.vcs.backends.git import lfs_store
100 101
101 102 self.fixture_store = os.path.join(self._store, self._type)
102 103 self.db_name = db_name or self._base_db_name
103 104 self._base_ini_file = config_file
104 105 self.stderr = ''
105 106 self.stdout = ''
106 107 self._basetemp = basetemp or tempfile.gettempdir()
107 108 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
108 109 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
109 110 self._repos_git_lfs_store = lfs_store(self._basetemp)
110 111 self.connection_string = connection_string
111 112
112 113 @property
113 114 def connection_string(self):
114 115 return self._connection_string
115 116
116 117 @connection_string.setter
117 118 def connection_string(self, new_connection_string):
118 119 if not new_connection_string:
119 120 new_connection_string = self.get_default_connection_string()
120 121 else:
121 122 new_connection_string = new_connection_string.format(
122 123 db_name=self.db_name)
123 124 url_parts = url.make_url(new_connection_string)
124 125 self._connection_string = new_connection_string
125 126 self.user = url_parts.username
126 127 self.password = url_parts.password
127 128 self.host = url_parts.host
128 129
129 130 def get_default_connection_string(self):
130 131 raise NotImplementedError('default connection_string is required.')
131 132
132 133 def execute(self, cmd, env=None, *args):
133 134 """
134 135 Runs command on the system with given ``args``.
135 136 """
136 137
137 138 command = cmd + ' ' + ' '.join(args)
138 139 sys.stdout.write(command)
139 140
140 141 # Tell Python to use UTF-8 encoding out stdout
141 142 _env = os.environ.copy()
142 143 _env['PYTHONIOENCODING'] = 'UTF-8'
143 144 if env:
144 145 _env.update(env)
145 146 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
146 147 self.stdout, self.stderr = self.p.communicate()
147 148 sys.stdout.write('COMMAND:'+command+'\n')
148 149 sys.stdout.write(self.stdout)
149 150 return self.stdout, self.stderr
150 151
151 152 def assert_returncode_success(self):
152 153 if not self.p.returncode == 0:
153 154 print(self.stderr)
154 155 raise AssertionError('non 0 retcode:{}'.format(self.p.returncode))
155 156
156 157 def assert_correct_output(self, stdout, version):
157 158 assert 'UPGRADE FOR STEP {} COMPLETED'.format(version) in stdout
158 159
159 160 def setup_rhodecode_db(self, ini_params=None, env=None):
160 161 if not ini_params:
161 162 ini_params = self._base_ini_config
162 163
163 164 ini_params.extend(self._db_url)
164 165 with TestINI(self._base_ini_file, ini_params,
165 166 self._type, destroy=True) as _ini_file:
166 167
167 168 if not os.path.isdir(self._repos_location):
168 169 os.makedirs(self._repos_location)
169 170 if not os.path.isdir(self._repos_hg_largefiles_store):
170 171 os.makedirs(self._repos_hg_largefiles_store)
171 172 if not os.path.isdir(self._repos_git_lfs_store):
172 173 os.makedirs(self._repos_git_lfs_store)
173 174
174 175 return self.execute(
175 176 "rc-setup-app {0} --user=marcink "
176 177 "--email=marcin@rhodeocode.com --password={1} "
177 178 "--repos={2} --force-yes".format(
178 179 _ini_file, 'qweqwe', self._repos_location), env=env)
179 180
180 181 def upgrade_database(self, ini_params=None):
181 182 if not ini_params:
182 183 ini_params = self._base_ini_config
183 184 ini_params.extend(self._db_url)
184 185
185 186 test_ini = TestINI(
186 187 self._base_ini_file, ini_params, self._type, destroy=True)
187 188 with test_ini as ini_file:
188 189 if not os.path.isdir(self._repos_location):
189 190 os.makedirs(self._repos_location)
190 191
191 192 return self.execute(
192 193 "rc-upgrade-db {0} --force-yes".format(ini_file))
193 194
194 195 def setup_db(self):
195 196 raise NotImplementedError
196 197
197 198 def teardown_db(self):
198 199 raise NotImplementedError
199 200
200 201 def import_dump(self, dumpname):
201 202 raise NotImplementedError
202 203
203 204
204 205 class EmptyDBBackend(DBBackend):
205 206 _type = ''
206 207
207 208 def setup_db(self):
208 209 pass
209 210
210 211 def teardown_db(self):
211 212 pass
212 213
213 214 def import_dump(self, dumpname):
214 215 pass
215 216
216 217 def assert_returncode_success(self):
217 218 assert True
218 219
219 220
220 221 class SQLiteDBBackend(DBBackend):
221 222 _type = 'sqlite'
222 223
223 224 def get_default_connection_string(self):
224 225 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
225 226
226 227 def setup_db(self):
227 228 # dump schema for tests
228 229 # cp -v $TEST_DB_NAME
229 230 self._db_url = [{'app:main': {
230 231 'sqlalchemy.db1.url': self.connection_string}}]
231 232
232 233 def import_dump(self, dumpname):
233 234 dump = os.path.join(self.fixture_store, dumpname)
234 235 target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self))
235 236 return self.execute('cp -v {} {}'.format(dump, target))
236 237
237 238 def teardown_db(self):
238 239 return self.execute("rm -rf {}.sqlite".format(
239 240 os.path.join(self._basetemp, self.db_name)))
240 241
241 242
242 243 class MySQLDBBackend(DBBackend):
243 244 _type = 'mysql'
244 245
245 246 def get_default_connection_string(self):
246 247 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
247 248
248 249 def setup_db(self):
249 250 # dump schema for tests
250 251 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
251 252 self._db_url = [{'app:main': {
252 253 'sqlalchemy.db1.url': self.connection_string}}]
253 254 return self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
254 255 self.user, self.password, self.db_name))
255 256
256 257 def import_dump(self, dumpname):
257 258 dump = os.path.join(self.fixture_store, dumpname)
258 259 return self.execute("mysql -u{} -p{} {} < {}".format(
259 260 self.user, self.password, self.db_name, dump))
260 261
261 262 def teardown_db(self):
262 263 return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
263 264 self.user, self.password, self.db_name))
264 265
265 266
266 267 class PostgresDBBackend(DBBackend):
267 268 _type = 'postgres'
268 269
269 270 def get_default_connection_string(self):
270 271 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
271 272
272 273 def setup_db(self):
273 274 # dump schema for tests
274 275 # pg_dump -U postgres -h localhost $TEST_DB_NAME
275 276 self._db_url = [{'app:main': {
276 277 'sqlalchemy.db1.url':
277 278 self.connection_string}}]
278 279 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
279 280 "-c 'create database '{}';'".format(
280 281 self.password, self.user, self.db_name))
281 282
282 283 def teardown_db(self):
283 284 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
284 285 "-c 'drop database if exists '{}';'".format(
285 286 self.password, self.user, self.db_name))
286 287
287 288 def import_dump(self, dumpname):
288 289 dump = os.path.join(self.fixture_store, dumpname)
289 290 return self.execute(
290 291 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
291 292 "-f {}".format(
292 293 self.password, self.user, self.db_name, dump))
@@ -1,195 +1,189 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 import os
22 import stat
23 import sys
24
25 21 import pytest
26 22 from mock import Mock, patch, DEFAULT
27 23
28 24 import rhodecode
29 25 from rhodecode.model import db, scm
30 from rhodecode.tests import no_newline_id_generator
31 26
32 27
33 28 def test_scm_instance_config(backend):
34 29 repo = backend.create_repo()
35 30 with patch.multiple('rhodecode.model.db.Repository',
36 31 _get_instance=DEFAULT,
37 32 _get_instance_cached=DEFAULT) as mocks:
33
38 34 repo.scm_instance()
39 35 mocks['_get_instance'].assert_called_with(
40 36 config=None, cache=False)
41 37
42 config = {'some': 'value'}
43 repo.scm_instance(config=config)
38 repo.scm_instance(vcs_full_cache=False)
44 39 mocks['_get_instance'].assert_called_with(
45 config=config, cache=False)
40 config=None, cache=False)
46 41
47 with patch.dict(rhodecode.CONFIG, {'vcs_full_cache': 'true'}):
48 repo.scm_instance(config=config)
42 repo.scm_instance(vcs_full_cache=True)
49 43 mocks['_get_instance_cached'].assert_called()
50 44
51 45
52 46 def test_get_instance_config(backend):
53 47 repo = backend.create_repo()
54 48 vcs_class = Mock()
55 49 with patch.multiple('rhodecode.lib.vcs.backends',
56 50 get_scm=DEFAULT,
57 51 get_backend=DEFAULT) as mocks:
58 52 mocks['get_scm'].return_value = backend.alias
59 53 mocks['get_backend'].return_value = vcs_class
60 54 with patch('rhodecode.model.db.Repository._config') as config_mock:
61 55 repo._get_instance()
62 56 vcs_class.assert_called_with(
63 57 repo_path=repo.repo_full_path, config=config_mock,
64 58 create=False, with_wire={'cache': True, 'repo_state_uid': None})
65 59
66 60 new_config = {'override': 'old_config'}
67 61 repo._get_instance(config=new_config)
68 62 vcs_class.assert_called_with(
69 63 repo_path=repo.repo_full_path, config=new_config, create=False,
70 64 with_wire={'cache': True, 'repo_state_uid': None})
71 65
72 66
73 67 def test_mark_for_invalidation_config(backend):
74 68 repo = backend.create_repo()
75 69 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
76 70 scm.ScmModel().mark_for_invalidation(repo.repo_name)
77 71 _, kwargs = _mock.call_args
78 72 assert kwargs['config'].__dict__ == repo._config.__dict__
79 73
80 74
81 75 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
82 76 commits = [{'message': 'A'}, {'message': 'B'}]
83 77 repo = backend.create_repo(commits=commits)
84 78 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
85 79 assert repo.changeset_cache['revision'] == 1
86 80
87 81
88 82 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
89 83 repo = backend.create_repo()
90 84 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
91 85 assert repo.changeset_cache['revision'] == -1
92 86
93 87
94 88 def test_strip_with_multiple_heads(backend_hg):
95 89 commits = [
96 90 {'message': 'A'},
97 91 {'message': 'a'},
98 92 {'message': 'b'},
99 93 {'message': 'B', 'parents': ['A']},
100 94 {'message': 'a1'},
101 95 ]
102 96 repo = backend_hg.create_repo(commits=commits)
103 97 commit_ids = backend_hg.commit_ids
104 98
105 99 model = scm.ScmModel()
106 100 model.strip(repo, commit_ids['b'], branch=None)
107 101
108 102 vcs_repo = repo.scm_instance()
109 103 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
110 104 assert len(rest_commit_ids) == 4
111 105 assert commit_ids['b'] not in rest_commit_ids
112 106
113 107
114 108 def test_strip_with_single_heads(backend_hg):
115 109 commits = [
116 110 {'message': 'A'},
117 111 {'message': 'a'},
118 112 {'message': 'b'},
119 113 ]
120 114 repo = backend_hg.create_repo(commits=commits)
121 115 commit_ids = backend_hg.commit_ids
122 116
123 117 model = scm.ScmModel()
124 118 model.strip(repo, commit_ids['b'], branch=None)
125 119
126 120 vcs_repo = repo.scm_instance()
127 121 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
128 122 assert len(rest_commit_ids) == 2
129 123 assert commit_ids['b'] not in rest_commit_ids
130 124
131 125
132 126 def test_get_nodes_returns_unicode_flat(backend):
133 127 repo = backend.repo
134 128 commit_id = repo.get_commit(commit_idx=0).raw_id
135 129 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=True)
136 130 assert_contains_only_unicode(directories)
137 131 assert_contains_only_unicode(files)
138 132
139 133
140 134 def test_get_nodes_returns_unicode_non_flat(backend):
141 135 repo = backend.repo
142 136 commit_id = repo.get_commit(commit_idx=0).raw_id
143 137
144 138 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=False)
145 139 # johbo: Checking only the names for now, since that is the critical
146 140 # part.
147 141 assert_contains_only_unicode([d['name'] for d in directories])
148 142 assert_contains_only_unicode([f['name'] for f in files])
149 143
150 144
151 145 def test_get_nodes_max_file_bytes(backend_random):
152 146 repo = backend_random.repo
153 147 max_file_bytes = 10
154 148 directories, files = scm.ScmModel().get_nodes(
155 149 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
156 150 extended_info=True, flat=False)
157 151 assert any(file['content'] and len(file['content']) > max_file_bytes
158 152 for file in files)
159 153
160 154 directories, files = scm.ScmModel().get_nodes(
161 155 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
162 156 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
163 157 assert all(
164 158 file['content'] is None if file['size'] > max_file_bytes else True
165 159 for file in files)
166 160
167 161
168 162 def assert_contains_only_unicode(structure):
169 163 assert structure
170 164 for value in structure:
171 165 assert isinstance(value, unicode)
172 166
173 167
174 168 @pytest.mark.backends("hg", "git")
175 169 def test_get_non_unicode_reference(backend):
176 170 model = scm.ScmModel()
177 171 non_unicode_list = ["AdΔ±nΔ±".decode("cp1254")]
178 172
179 173 def scm_instance():
180 174 return Mock(
181 175 branches=non_unicode_list, bookmarks=non_unicode_list,
182 176 tags=non_unicode_list, alias=backend.alias)
183 177
184 178 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
185 179 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
186 180 if backend.alias == 'hg':
187 181 valid_choices = [
188 182 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
189 183 u'book:Ad\xc4\xb1n\xc4\xb1', u'tag:Ad\xc4\xb1n\xc4\xb1']
190 184 else:
191 185 valid_choices = [
192 186 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
193 187 u'tag:Ad\xc4\xb1n\xc4\xb1']
194 188
195 189 assert choices == valid_choices
@@ -1,1826 +1,1817 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 import functools
34 33
35 34 import mock
36 35 import pyramid.testing
37 36 import pytest
38 37 import colander
39 38 import requests
40 39 import pyramid.paster
41 40
42 41 import rhodecode
43 42 from rhodecode.lib.utils2 import AttributeDict
44 43 from rhodecode.model.changeset_status import ChangesetStatusModel
45 44 from rhodecode.model.comment import CommentsModel
46 45 from rhodecode.model.db import (
47 46 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 47 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 48 from rhodecode.model.meta import Session
50 49 from rhodecode.model.pull_request import PullRequestModel
51 50 from rhodecode.model.repo import RepoModel
52 51 from rhodecode.model.repo_group import RepoGroupModel
53 52 from rhodecode.model.user import UserModel
54 53 from rhodecode.model.settings import VcsSettingsModel
55 54 from rhodecode.model.user_group import UserGroupModel
56 55 from rhodecode.model.integration import IntegrationModel
57 56 from rhodecode.integrations import integration_type_registry
58 57 from rhodecode.integrations.types.base import IntegrationTypeBase
59 58 from rhodecode.lib.utils import repo2db_mapper
60 59 from rhodecode.lib.vcs.backends import get_backend
61 60 from rhodecode.lib.vcs.nodes import FileNode
62 61 from rhodecode.tests import (
63 62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 64 TEST_USER_REGULAR_PASS)
66 65 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 66 from rhodecode.tests.fixture import Fixture
68 67 from rhodecode.config import utils as config_utils
69 68
69
70 70 def _split_comma(value):
71 71 return value.split(',')
72 72
73 73
74 74 def pytest_addoption(parser):
75 75 parser.addoption(
76 76 '--keep-tmp-path', action='store_true',
77 77 help="Keep the test temporary directories")
78 78 parser.addoption(
79 79 '--backends', action='store', type=_split_comma,
80 80 default=['git', 'hg', 'svn'],
81 81 help="Select which backends to test for backend specific tests.")
82 82 parser.addoption(
83 83 '--dbs', action='store', type=_split_comma,
84 84 default=['sqlite'],
85 85 help="Select which database to test for database specific tests. "
86 86 "Possible options are sqlite,postgres,mysql")
87 87 parser.addoption(
88 88 '--appenlight', '--ae', action='store_true',
89 89 help="Track statistics in appenlight.")
90 90 parser.addoption(
91 91 '--appenlight-api-key', '--ae-key',
92 92 help="API key for Appenlight.")
93 93 parser.addoption(
94 94 '--appenlight-url', '--ae-url',
95 95 default="https://ae.rhodecode.com",
96 96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 97 parser.addoption(
98 98 '--sqlite-connection-string', action='store',
99 99 default='', help="Connection string for the dbs tests with SQLite")
100 100 parser.addoption(
101 101 '--postgres-connection-string', action='store',
102 102 default='', help="Connection string for the dbs tests with Postgres")
103 103 parser.addoption(
104 104 '--mysql-connection-string', action='store',
105 105 default='', help="Connection string for the dbs tests with MySQL")
106 106 parser.addoption(
107 107 '--repeat', type=int, default=100,
108 108 help="Number of repetitions in performance tests.")
109 109
110 110
111 111 def pytest_configure(config):
112 112 from rhodecode.config import patches
113 113
114 114
115 115 def pytest_collection_modifyitems(session, config, items):
116 116 # nottest marked, compare nose, used for transition from nose to pytest
117 117 remaining = [
118 118 i for i in items if getattr(i.obj, '__test__', True)]
119 119 items[:] = remaining
120 120
121 121
122 122 def pytest_generate_tests(metafunc):
123
123 124 # Support test generation based on --backend parameter
124 125 if 'backend_alias' in metafunc.fixturenames:
125 126 backends = get_backends_from_metafunc(metafunc)
126 127 scope = None
127 128 if not backends:
128 129 pytest.skip("Not enabled for any of selected backends")
130
129 131 metafunc.parametrize('backend_alias', backends, scope=scope)
130 elif hasattr(metafunc.function, 'backends'):
132
133 backend_mark = metafunc.definition.get_closest_marker('backends')
134 if backend_mark:
131 135 backends = get_backends_from_metafunc(metafunc)
132 136 if not backends:
133 137 pytest.skip("Not enabled for any of selected backends")
134 138
135 139
136 140 def get_backends_from_metafunc(metafunc):
137 141 requested_backends = set(metafunc.config.getoption('--backends'))
138 if hasattr(metafunc.function, 'backends'):
142 backend_mark = metafunc.definition.get_closest_marker('backends')
143 if backend_mark:
139 144 # Supported backends by this test function, created from
140 145 # pytest.mark.backends
141 backends = metafunc.definition.get_closest_marker('backends').args
146 backends = backend_mark.args
142 147 elif hasattr(metafunc.cls, 'backend_alias'):
143 148 # Support class attribute "backend_alias", this is mainly
144 149 # for legacy reasons for tests not yet using pytest.mark.backends
145 150 backends = [metafunc.cls.backend_alias]
146 151 else:
147 152 backends = metafunc.config.getoption('--backends')
148 153 return requested_backends.intersection(backends)
149 154
150 155
151 156 @pytest.fixture(scope='session', autouse=True)
152 157 def activate_example_rcextensions(request):
153 158 """
154 159 Patch in an example rcextensions module which verifies passed in kwargs.
155 160 """
156 161 from rhodecode.config import rcextensions
157 162
158 163 old_extensions = rhodecode.EXTENSIONS
159 164 rhodecode.EXTENSIONS = rcextensions
160 165 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
161 166
162 167 @request.addfinalizer
163 168 def cleanup():
164 169 rhodecode.EXTENSIONS = old_extensions
165 170
166 171
167 172 @pytest.fixture()
168 173 def capture_rcextensions():
169 174 """
170 175 Returns the recorded calls to entry points in rcextensions.
171 176 """
172 177 calls = rhodecode.EXTENSIONS.calls
173 178 calls.clear()
174 179 # Note: At this moment, it is still the empty dict, but that will
175 180 # be filled during the test run and since it is a reference this
176 181 # is enough to make it work.
177 182 return calls
178 183
179 184
180 185 @pytest.fixture(scope='session')
181 186 def http_environ_session():
182 187 """
183 188 Allow to use "http_environ" in session scope.
184 189 """
185 190 return plain_http_environ()
186 191
187 192
188 193 def plain_http_host_stub():
189 194 """
190 195 Value of HTTP_HOST in the test run.
191 196 """
192 197 return 'example.com:80'
193 198
194 199
195 200 @pytest.fixture()
196 201 def http_host_stub():
197 202 """
198 203 Value of HTTP_HOST in the test run.
199 204 """
200 205 return plain_http_host_stub()
201 206
202 207
203 208 def plain_http_host_only_stub():
204 209 """
205 210 Value of HTTP_HOST in the test run.
206 211 """
207 212 return plain_http_host_stub().split(':')[0]
208 213
209 214
210 215 @pytest.fixture()
211 216 def http_host_only_stub():
212 217 """
213 218 Value of HTTP_HOST in the test run.
214 219 """
215 220 return plain_http_host_only_stub()
216 221
217 222
218 223 def plain_http_environ():
219 224 """
220 225 HTTP extra environ keys.
221 226
222 227 User by the test application and as well for setting up the pylons
223 228 environment. In the case of the fixture "app" it should be possible
224 229 to override this for a specific test case.
225 230 """
226 231 return {
227 232 'SERVER_NAME': plain_http_host_only_stub(),
228 233 'SERVER_PORT': plain_http_host_stub().split(':')[1],
229 234 'HTTP_HOST': plain_http_host_stub(),
230 235 'HTTP_USER_AGENT': 'rc-test-agent',
231 236 'REQUEST_METHOD': 'GET'
232 237 }
233 238
234 239
235 240 @pytest.fixture()
236 241 def http_environ():
237 242 """
238 243 HTTP extra environ keys.
239 244
240 245 User by the test application and as well for setting up the pylons
241 246 environment. In the case of the fixture "app" it should be possible
242 247 to override this for a specific test case.
243 248 """
244 249 return plain_http_environ()
245 250
246 251
247 252 @pytest.fixture(scope='session')
248 253 def baseapp(ini_config, vcsserver, http_environ_session):
249 254 from rhodecode.lib.pyramid_utils import get_app_config
250 255 from rhodecode.config.middleware import make_pyramid_app
251 256
252 257 print("Using the RhodeCode configuration:{}".format(ini_config))
253 258 pyramid.paster.setup_logging(ini_config)
254 259
255 260 settings = get_app_config(ini_config)
256 261 app = make_pyramid_app({'__file__': ini_config}, **settings)
257 262
258 263 return app
259 264
260 265
261 266 @pytest.fixture(scope='function')
262 267 def app(request, config_stub, baseapp, http_environ):
263 268 app = CustomTestApp(
264 269 baseapp,
265 270 extra_environ=http_environ)
266 271 if request.cls:
267 272 request.cls.app = app
268 273 return app
269 274
270 275
271 276 @pytest.fixture(scope='session')
272 277 def app_settings(baseapp, ini_config):
273 278 """
274 279 Settings dictionary used to create the app.
275 280
276 281 Parses the ini file and passes the result through the sanitize and apply
277 282 defaults mechanism in `rhodecode.config.middleware`.
278 283 """
279 284 return baseapp.config.get_settings()
280 285
281 286
282 287 @pytest.fixture(scope='session')
283 288 def db_connection(ini_settings):
284 289 # Initialize the database connection.
285 290 config_utils.initialize_database(ini_settings)
286 291
287 292
288 293 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
289 294
290 295
291 296 def _autologin_user(app, *args):
292 297 session = login_user_session(app, *args)
293 298 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
294 299 return LoginData(csrf_token, session['rhodecode_user'])
295 300
296 301
297 302 @pytest.fixture()
298 303 def autologin_user(app):
299 304 """
300 305 Utility fixture which makes sure that the admin user is logged in
301 306 """
302 307 return _autologin_user(app)
303 308
304 309
305 310 @pytest.fixture()
306 311 def autologin_regular_user(app):
307 312 """
308 313 Utility fixture which makes sure that the regular user is logged in
309 314 """
310 315 return _autologin_user(
311 316 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
312 317
313 318
314 319 @pytest.fixture(scope='function')
315 320 def csrf_token(request, autologin_user):
316 321 return autologin_user.csrf_token
317 322
318 323
319 324 @pytest.fixture(scope='function')
320 325 def xhr_header(request):
321 326 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
322 327
323 328
324 329 @pytest.fixture()
325 330 def real_crypto_backend(monkeypatch):
326 331 """
327 332 Switch the production crypto backend on for this test.
328 333
329 334 During the test run the crypto backend is replaced with a faster
330 335 implementation based on the MD5 algorithm.
331 336 """
332 337 monkeypatch.setattr(rhodecode, 'is_test', False)
333 338
334 339
335 340 @pytest.fixture(scope='class')
336 341 def index_location(request, baseapp):
337 342 index_location = baseapp.config.get_settings()['search.location']
338 343 if request.cls:
339 344 request.cls.index_location = index_location
340 345 return index_location
341 346
342 347
343 348 @pytest.fixture(scope='session', autouse=True)
344 349 def tests_tmp_path(request):
345 350 """
346 351 Create temporary directory to be used during the test session.
347 352 """
348 353 if not os.path.exists(TESTS_TMP_PATH):
349 354 os.makedirs(TESTS_TMP_PATH)
350 355
351 356 if not request.config.getoption('--keep-tmp-path'):
352 357 @request.addfinalizer
353 358 def remove_tmp_path():
354 359 shutil.rmtree(TESTS_TMP_PATH)
355 360
356 361 return TESTS_TMP_PATH
357 362
358 363
359 364 @pytest.fixture()
360 365 def test_repo_group(request):
361 366 """
362 367 Create a temporary repository group, and destroy it after
363 368 usage automatically
364 369 """
365 370 fixture = Fixture()
366 371 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
367 372 repo_group = fixture.create_repo_group(repogroupid)
368 373
369 374 def _cleanup():
370 375 fixture.destroy_repo_group(repogroupid)
371 376
372 377 request.addfinalizer(_cleanup)
373 378 return repo_group
374 379
375 380
376 381 @pytest.fixture()
377 382 def test_user_group(request):
378 383 """
379 384 Create a temporary user group, and destroy it after
380 385 usage automatically
381 386 """
382 387 fixture = Fixture()
383 388 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
384 389 user_group = fixture.create_user_group(usergroupid)
385 390
386 391 def _cleanup():
387 392 fixture.destroy_user_group(user_group)
388 393
389 394 request.addfinalizer(_cleanup)
390 395 return user_group
391 396
392 397
393 398 @pytest.fixture(scope='session')
394 399 def test_repo(request):
395 400 container = TestRepoContainer()
396 401 request.addfinalizer(container._cleanup)
397 402 return container
398 403
399 404
400 405 class TestRepoContainer(object):
401 406 """
402 407 Container for test repositories which are used read only.
403 408
404 409 Repositories will be created on demand and re-used during the lifetime
405 410 of this object.
406 411
407 412 Usage to get the svn test repository "minimal"::
408 413
409 414 test_repo = TestContainer()
410 415 repo = test_repo('minimal', 'svn')
411 416
412 417 """
413 418
414 419 dump_extractors = {
415 420 'git': utils.extract_git_repo_from_dump,
416 421 'hg': utils.extract_hg_repo_from_dump,
417 422 'svn': utils.extract_svn_repo_from_dump,
418 423 }
419 424
420 425 def __init__(self):
421 426 self._cleanup_repos = []
422 427 self._fixture = Fixture()
423 428 self._repos = {}
424 429
425 430 def __call__(self, dump_name, backend_alias, config=None):
426 431 key = (dump_name, backend_alias)
427 432 if key not in self._repos:
428 433 repo = self._create_repo(dump_name, backend_alias, config)
429 434 self._repos[key] = repo.repo_id
430 435 return Repository.get(self._repos[key])
431 436
432 437 def _create_repo(self, dump_name, backend_alias, config):
433 438 repo_name = '%s-%s' % (backend_alias, dump_name)
434 439 backend = get_backend(backend_alias)
435 440 dump_extractor = self.dump_extractors[backend_alias]
436 441 repo_path = dump_extractor(dump_name, repo_name)
437 442
438 443 vcs_repo = backend(repo_path, config=config)
439 444 repo2db_mapper({repo_name: vcs_repo})
440 445
441 446 repo = RepoModel().get_by_repo_name(repo_name)
442 447 self._cleanup_repos.append(repo_name)
443 448 return repo
444 449
445 450 def _cleanup(self):
446 451 for repo_name in reversed(self._cleanup_repos):
447 452 self._fixture.destroy_repo(repo_name)
448 453
449 454
450 455 def backend_base(request, backend_alias, baseapp, test_repo):
451 456 if backend_alias not in request.config.getoption('--backends'):
452 457 pytest.skip("Backend %s not selected." % (backend_alias, ))
453 458
454 459 utils.check_xfail_backends(request.node, backend_alias)
455 460 utils.check_skip_backends(request.node, backend_alias)
456 461
457 462 repo_name = 'vcs_test_%s' % (backend_alias, )
458 463 backend = Backend(
459 464 alias=backend_alias,
460 465 repo_name=repo_name,
461 466 test_name=request.node.name,
462 467 test_repo_container=test_repo)
463 468 request.addfinalizer(backend.cleanup)
464 469 return backend
465 470
466 471
467 472 @pytest.fixture()
468 473 def backend(request, backend_alias, baseapp, test_repo):
469 474 """
470 475 Parametrized fixture which represents a single backend implementation.
471 476
472 477 It respects the option `--backends` to focus the test run on specific
473 478 backend implementations.
474 479
475 480 It also supports `pytest.mark.xfail_backends` to mark tests as failing
476 481 for specific backends. This is intended as a utility for incremental
477 482 development of a new backend implementation.
478 483 """
479 484 return backend_base(request, backend_alias, baseapp, test_repo)
480 485
481 486
482 487 @pytest.fixture()
483 488 def backend_git(request, baseapp, test_repo):
484 489 return backend_base(request, 'git', baseapp, test_repo)
485 490
486 491
487 492 @pytest.fixture()
488 493 def backend_hg(request, baseapp, test_repo):
489 494 return backend_base(request, 'hg', baseapp, test_repo)
490 495
491 496
492 497 @pytest.fixture()
493 498 def backend_svn(request, baseapp, test_repo):
494 499 return backend_base(request, 'svn', baseapp, test_repo)
495 500
496 501
497 502 @pytest.fixture()
498 503 def backend_random(backend_git):
499 504 """
500 505 Use this to express that your tests need "a backend.
501 506
502 507 A few of our tests need a backend, so that we can run the code. This
503 508 fixture is intended to be used for such cases. It will pick one of the
504 509 backends and run the tests.
505 510
506 511 The fixture `backend` would run the test multiple times for each
507 512 available backend which is a pure waste of time if the test is
508 513 independent of the backend type.
509 514 """
510 515 # TODO: johbo: Change this to pick a random backend
511 516 return backend_git
512 517
513 518
514 519 @pytest.fixture()
515 520 def backend_stub(backend_git):
516 521 """
517 522 Use this to express that your tests need a backend stub
518 523
519 524 TODO: mikhail: Implement a real stub logic instead of returning
520 525 a git backend
521 526 """
522 527 return backend_git
523 528
524 529
525 530 @pytest.fixture()
526 531 def repo_stub(backend_stub):
527 532 """
528 533 Use this to express that your tests need a repository stub
529 534 """
530 535 return backend_stub.create_repo()
531 536
532 537
533 538 class Backend(object):
534 539 """
535 540 Represents the test configuration for one supported backend
536 541
537 542 Provides easy access to different test repositories based on
538 543 `__getitem__`. Such repositories will only be created once per test
539 544 session.
540 545 """
541 546
542 547 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
543 548 _master_repo = None
544 549 _commit_ids = {}
545 550
546 551 def __init__(self, alias, repo_name, test_name, test_repo_container):
547 552 self.alias = alias
548 553 self.repo_name = repo_name
549 554 self._cleanup_repos = []
550 555 self._test_name = test_name
551 556 self._test_repo_container = test_repo_container
552 557 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
553 558 # Fixture will survive in the end.
554 559 self._fixture = Fixture()
555 560
556 561 def __getitem__(self, key):
557 562 return self._test_repo_container(key, self.alias)
558 563
559 564 def create_test_repo(self, key, config=None):
560 565 return self._test_repo_container(key, self.alias, config)
561 566
562 567 @property
563 568 def repo(self):
564 569 """
565 570 Returns the "current" repository. This is the vcs_test repo or the
566 571 last repo which has been created with `create_repo`.
567 572 """
568 573 from rhodecode.model.db import Repository
569 574 return Repository.get_by_repo_name(self.repo_name)
570 575
571 576 @property
572 577 def default_branch_name(self):
573 578 VcsRepository = get_backend(self.alias)
574 579 return VcsRepository.DEFAULT_BRANCH_NAME
575 580
576 581 @property
577 582 def default_head_id(self):
578 583 """
579 584 Returns the default head id of the underlying backend.
580 585
581 586 This will be the default branch name in case the backend does have a
582 587 default branch. In the other cases it will point to a valid head
583 588 which can serve as the base to create a new commit on top of it.
584 589 """
585 590 vcsrepo = self.repo.scm_instance()
586 591 head_id = (
587 592 vcsrepo.DEFAULT_BRANCH_NAME or
588 593 vcsrepo.commit_ids[-1])
589 594 return head_id
590 595
591 596 @property
592 597 def commit_ids(self):
593 598 """
594 599 Returns the list of commits for the last created repository
595 600 """
596 601 return self._commit_ids
597 602
598 603 def create_master_repo(self, commits):
599 604 """
600 605 Create a repository and remember it as a template.
601 606
602 607 This allows to easily create derived repositories to construct
603 608 more complex scenarios for diff, compare and pull requests.
604 609
605 610 Returns a commit map which maps from commit message to raw_id.
606 611 """
607 612 self._master_repo = self.create_repo(commits=commits)
608 613 return self._commit_ids
609 614
610 615 def create_repo(
611 616 self, commits=None, number_of_commits=0, heads=None,
612 617 name_suffix=u'', bare=False, **kwargs):
613 618 """
614 619 Create a repository and record it for later cleanup.
615 620
616 621 :param commits: Optional. A sequence of dict instances.
617 622 Will add a commit per entry to the new repository.
618 623 :param number_of_commits: Optional. If set to a number, this number of
619 624 commits will be added to the new repository.
620 625 :param heads: Optional. Can be set to a sequence of of commit
621 626 names which shall be pulled in from the master repository.
622 627 :param name_suffix: adds special suffix to generated repo name
623 628 :param bare: set a repo as bare (no checkout)
624 629 """
625 630 self.repo_name = self._next_repo_name() + name_suffix
626 631 repo = self._fixture.create_repo(
627 632 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
628 633 self._cleanup_repos.append(repo.repo_name)
629 634
630 635 commits = commits or [
631 636 {'message': 'Commit %s of %s' % (x, self.repo_name)}
632 637 for x in range(number_of_commits)]
633 638 vcs_repo = repo.scm_instance()
634 639 vcs_repo.count()
635 640 self._add_commits_to_repo(vcs_repo, commits)
636 641 if heads:
637 642 self.pull_heads(repo, heads)
638 643
639 644 return repo
640 645
641 646 def pull_heads(self, repo, heads):
642 647 """
643 648 Make sure that repo contains all commits mentioned in `heads`
644 649 """
645 650 vcsmaster = self._master_repo.scm_instance()
646 651 vcsrepo = repo.scm_instance()
647 652 vcsrepo.config.clear_section('hooks')
648 653 commit_ids = [self._commit_ids[h] for h in heads]
649 654 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
650 655
651 656 def create_fork(self):
652 657 repo_to_fork = self.repo_name
653 658 self.repo_name = self._next_repo_name()
654 659 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
655 660 self._cleanup_repos.append(self.repo_name)
656 661 return repo
657 662
658 663 def new_repo_name(self, suffix=u''):
659 664 self.repo_name = self._next_repo_name() + suffix
660 665 self._cleanup_repos.append(self.repo_name)
661 666 return self.repo_name
662 667
663 668 def _next_repo_name(self):
664 669 return u"%s_%s" % (
665 670 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
666 671
667 672 def ensure_file(self, filename, content='Test content\n'):
668 673 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
669 674 commits = [
670 675 {'added': [
671 676 FileNode(filename, content=content),
672 677 ]},
673 678 ]
674 679 self._add_commits_to_repo(self.repo.scm_instance(), commits)
675 680
676 681 def enable_downloads(self):
677 682 repo = self.repo
678 683 repo.enable_downloads = True
679 684 Session().add(repo)
680 685 Session().commit()
681 686
682 687 def cleanup(self):
683 688 for repo_name in reversed(self._cleanup_repos):
684 689 self._fixture.destroy_repo(repo_name)
685 690
686 691 def _add_commits_to_repo(self, repo, commits):
687 692 commit_ids = _add_commits_to_repo(repo, commits)
688 693 if not commit_ids:
689 694 return
690 695 self._commit_ids = commit_ids
691 696
692 697 # Creating refs for Git to allow fetching them from remote repository
693 698 if self.alias == 'git':
694 699 refs = {}
695 700 for message in self._commit_ids:
696 701 # TODO: mikhail: do more special chars replacements
697 702 ref_name = 'refs/test-refs/{}'.format(
698 703 message.replace(' ', ''))
699 704 refs[ref_name] = self._commit_ids[message]
700 705 self._create_refs(repo, refs)
701 706
702 707 def _create_refs(self, repo, refs):
703 708 for ref_name in refs:
704 709 repo.set_refs(ref_name, refs[ref_name])
705 710
706 711
707 712 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
708 713 if backend_alias not in request.config.getoption('--backends'):
709 714 pytest.skip("Backend %s not selected." % (backend_alias, ))
710 715
711 716 utils.check_xfail_backends(request.node, backend_alias)
712 717 utils.check_skip_backends(request.node, backend_alias)
713 718
714 719 repo_name = 'vcs_test_%s' % (backend_alias, )
715 720 repo_path = os.path.join(tests_tmp_path, repo_name)
716 721 backend = VcsBackend(
717 722 alias=backend_alias,
718 723 repo_path=repo_path,
719 724 test_name=request.node.name,
720 725 test_repo_container=test_repo)
721 726 request.addfinalizer(backend.cleanup)
722 727 return backend
723 728
724 729
725 730 @pytest.fixture()
726 731 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
727 732 """
728 733 Parametrized fixture which represents a single vcs backend implementation.
729 734
730 735 See the fixture `backend` for more details. This one implements the same
731 736 concept, but on vcs level. So it does not provide model instances etc.
732 737
733 738 Parameters are generated dynamically, see :func:`pytest_generate_tests`
734 739 for how this works.
735 740 """
736 741 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
737 742
738 743
739 744 @pytest.fixture()
740 745 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
741 746 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
742 747
743 748
744 749 @pytest.fixture()
745 750 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
746 751 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
747 752
748 753
749 754 @pytest.fixture()
750 755 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
751 756 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
752 757
753 758
754 759 @pytest.fixture()
755 760 def vcsbackend_stub(vcsbackend_git):
756 761 """
757 762 Use this to express that your test just needs a stub of a vcsbackend.
758 763
759 764 Plan is to eventually implement an in-memory stub to speed tests up.
760 765 """
761 766 return vcsbackend_git
762 767
763 768
764 769 class VcsBackend(object):
765 770 """
766 771 Represents the test configuration for one supported vcs backend.
767 772 """
768 773
769 774 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
770 775
771 776 def __init__(self, alias, repo_path, test_name, test_repo_container):
772 777 self.alias = alias
773 778 self._repo_path = repo_path
774 779 self._cleanup_repos = []
775 780 self._test_name = test_name
776 781 self._test_repo_container = test_repo_container
777 782
778 783 def __getitem__(self, key):
779 784 return self._test_repo_container(key, self.alias).scm_instance()
780 785
781 786 @property
782 787 def repo(self):
783 788 """
784 789 Returns the "current" repository. This is the vcs_test repo of the last
785 790 repo which has been created.
786 791 """
787 792 Repository = get_backend(self.alias)
788 793 return Repository(self._repo_path)
789 794
790 795 @property
791 796 def backend(self):
792 797 """
793 798 Returns the backend implementation class.
794 799 """
795 800 return get_backend(self.alias)
796 801
797 802 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
798 803 bare=False):
799 804 repo_name = self._next_repo_name()
800 805 self._repo_path = get_new_dir(repo_name)
801 806 repo_class = get_backend(self.alias)
802 807 src_url = None
803 808 if _clone_repo:
804 809 src_url = _clone_repo.path
805 810 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
806 811 self._cleanup_repos.append(repo)
807 812
808 813 commits = commits or [
809 814 {'message': 'Commit %s of %s' % (x, repo_name)}
810 815 for x in xrange(number_of_commits)]
811 816 _add_commits_to_repo(repo, commits)
812 817 return repo
813 818
814 819 def clone_repo(self, repo):
815 820 return self.create_repo(_clone_repo=repo)
816 821
817 822 def cleanup(self):
818 823 for repo in self._cleanup_repos:
819 824 shutil.rmtree(repo.path)
820 825
821 826 def new_repo_path(self):
822 827 repo_name = self._next_repo_name()
823 828 self._repo_path = get_new_dir(repo_name)
824 829 return self._repo_path
825 830
826 831 def _next_repo_name(self):
827 832 return "%s_%s" % (
828 833 self.invalid_repo_name.sub('_', self._test_name),
829 834 len(self._cleanup_repos))
830 835
831 836 def add_file(self, repo, filename, content='Test content\n'):
832 837 imc = repo.in_memory_commit
833 838 imc.add(FileNode(filename, content=content))
834 839 imc.commit(
835 840 message=u'Automatic commit from vcsbackend fixture',
836 841 author=u'Automatic <automatic@rhodecode.com>')
837 842
838 843 def ensure_file(self, filename, content='Test content\n'):
839 844 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
840 845 self.add_file(self.repo, filename, content)
841 846
842 847
843 848 def _add_commits_to_repo(vcs_repo, commits):
844 849 commit_ids = {}
845 850 if not commits:
846 851 return commit_ids
847 852
848 853 imc = vcs_repo.in_memory_commit
849 854 commit = None
850 855
851 856 for idx, commit in enumerate(commits):
852 857 message = unicode(commit.get('message', 'Commit %s' % idx))
853 858
854 859 for node in commit.get('added', []):
855 860 imc.add(FileNode(node.path, content=node.content))
856 861 for node in commit.get('changed', []):
857 862 imc.change(FileNode(node.path, content=node.content))
858 863 for node in commit.get('removed', []):
859 864 imc.remove(FileNode(node.path))
860 865
861 866 parents = [
862 867 vcs_repo.get_commit(commit_id=commit_ids[p])
863 868 for p in commit.get('parents', [])]
864 869
865 870 operations = ('added', 'changed', 'removed')
866 871 if not any((commit.get(o) for o in operations)):
867 872 imc.add(FileNode('file_%s' % idx, content=message))
868 873
869 874 commit = imc.commit(
870 875 message=message,
871 876 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
872 877 date=commit.get('date'),
873 878 branch=commit.get('branch'),
874 879 parents=parents)
875 880
876 881 commit_ids[commit.message] = commit.raw_id
877 882
878 883 return commit_ids
879 884
880 885
881 886 @pytest.fixture()
882 887 def reposerver(request):
883 888 """
884 889 Allows to serve a backend repository
885 890 """
886 891
887 892 repo_server = RepoServer()
888 893 request.addfinalizer(repo_server.cleanup)
889 894 return repo_server
890 895
891 896
892 897 class RepoServer(object):
893 898 """
894 899 Utility to serve a local repository for the duration of a test case.
895 900
896 901 Supports only Subversion so far.
897 902 """
898 903
899 904 url = None
900 905
901 906 def __init__(self):
902 907 self._cleanup_servers = []
903 908
904 909 def serve(self, vcsrepo):
905 910 if vcsrepo.alias != 'svn':
906 911 raise TypeError("Backend %s not supported" % vcsrepo.alias)
907 912
908 913 proc = subprocess32.Popen(
909 914 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
910 915 '--root', vcsrepo.path])
911 916 self._cleanup_servers.append(proc)
912 917 self.url = 'svn://localhost'
913 918
914 919 def cleanup(self):
915 920 for proc in self._cleanup_servers:
916 921 proc.terminate()
917 922
918 923
919 924 @pytest.fixture()
920 925 def pr_util(backend, request, config_stub):
921 926 """
922 927 Utility for tests of models and for functional tests around pull requests.
923 928
924 929 It gives an instance of :class:`PRTestUtility` which provides various
925 930 utility methods around one pull request.
926 931
927 932 This fixture uses `backend` and inherits its parameterization.
928 933 """
929 934
930 935 util = PRTestUtility(backend)
931 936 request.addfinalizer(util.cleanup)
932 937
933 938 return util
934 939
935 940
936 941 class PRTestUtility(object):
937 942
938 943 pull_request = None
939 944 pull_request_id = None
940 945 mergeable_patcher = None
941 946 mergeable_mock = None
942 947 notification_patcher = None
943 948
944 949 def __init__(self, backend):
945 950 self.backend = backend
946 951
947 952 def create_pull_request(
948 953 self, commits=None, target_head=None, source_head=None,
949 954 revisions=None, approved=False, author=None, mergeable=False,
950 955 enable_notifications=True, name_suffix=u'', reviewers=None,
951 956 title=u"Test", description=u"Description"):
952 957 self.set_mergeable(mergeable)
953 958 if not enable_notifications:
954 959 # mock notification side effect
955 960 self.notification_patcher = mock.patch(
956 961 'rhodecode.model.notification.NotificationModel.create')
957 962 self.notification_patcher.start()
958 963
959 964 if not self.pull_request:
960 965 if not commits:
961 966 commits = [
962 967 {'message': 'c1'},
963 968 {'message': 'c2'},
964 969 {'message': 'c3'},
965 970 ]
966 971 target_head = 'c1'
967 972 source_head = 'c2'
968 973 revisions = ['c2']
969 974
970 975 self.commit_ids = self.backend.create_master_repo(commits)
971 976 self.target_repository = self.backend.create_repo(
972 977 heads=[target_head], name_suffix=name_suffix)
973 978 self.source_repository = self.backend.create_repo(
974 979 heads=[source_head], name_suffix=name_suffix)
975 980 self.author = author or UserModel().get_by_username(
976 981 TEST_USER_ADMIN_LOGIN)
977 982
978 983 model = PullRequestModel()
979 984 self.create_parameters = {
980 985 'created_by': self.author,
981 986 'source_repo': self.source_repository.repo_name,
982 987 'source_ref': self._default_branch_reference(source_head),
983 988 'target_repo': self.target_repository.repo_name,
984 989 'target_ref': self._default_branch_reference(target_head),
985 990 'revisions': [self.commit_ids[r] for r in revisions],
986 991 'reviewers': reviewers or self._get_reviewers(),
987 992 'title': title,
988 993 'description': description,
989 994 }
990 995 self.pull_request = model.create(**self.create_parameters)
991 996 assert model.get_versions(self.pull_request) == []
992 997
993 998 self.pull_request_id = self.pull_request.pull_request_id
994 999
995 1000 if approved:
996 1001 self.approve()
997 1002
998 1003 Session().add(self.pull_request)
999 1004 Session().commit()
1000 1005
1001 1006 return self.pull_request
1002 1007
1003 1008 def approve(self):
1004 1009 self.create_status_votes(
1005 1010 ChangesetStatus.STATUS_APPROVED,
1006 1011 *self.pull_request.reviewers)
1007 1012
1008 1013 def close(self):
1009 1014 PullRequestModel().close_pull_request(self.pull_request, self.author)
1010 1015
1011 1016 def _default_branch_reference(self, commit_message):
1012 1017 reference = '%s:%s:%s' % (
1013 1018 'branch',
1014 1019 self.backend.default_branch_name,
1015 1020 self.commit_ids[commit_message])
1016 1021 return reference
1017 1022
1018 1023 def _get_reviewers(self):
1019 1024 return [
1020 1025 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1021 1026 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1022 1027 ]
1023 1028
1024 1029 def update_source_repository(self, head=None):
1025 1030 heads = [head or 'c3']
1026 1031 self.backend.pull_heads(self.source_repository, heads=heads)
1027 1032
1028 1033 def add_one_commit(self, head=None):
1029 1034 self.update_source_repository(head=head)
1030 1035 old_commit_ids = set(self.pull_request.revisions)
1031 1036 PullRequestModel().update_commits(self.pull_request)
1032 1037 commit_ids = set(self.pull_request.revisions)
1033 1038 new_commit_ids = commit_ids - old_commit_ids
1034 1039 assert len(new_commit_ids) == 1
1035 1040 return new_commit_ids.pop()
1036 1041
1037 1042 def remove_one_commit(self):
1038 1043 assert len(self.pull_request.revisions) == 2
1039 1044 source_vcs = self.source_repository.scm_instance()
1040 1045 removed_commit_id = source_vcs.commit_ids[-1]
1041 1046
1042 1047 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1043 1048 # remove the if once that's sorted out.
1044 1049 if self.backend.alias == "git":
1045 1050 kwargs = {'branch_name': self.backend.default_branch_name}
1046 1051 else:
1047 1052 kwargs = {}
1048 1053 source_vcs.strip(removed_commit_id, **kwargs)
1049 1054
1050 1055 PullRequestModel().update_commits(self.pull_request)
1051 1056 assert len(self.pull_request.revisions) == 1
1052 1057 return removed_commit_id
1053 1058
1054 1059 def create_comment(self, linked_to=None):
1055 1060 comment = CommentsModel().create(
1056 1061 text=u"Test comment",
1057 1062 repo=self.target_repository.repo_name,
1058 1063 user=self.author,
1059 1064 pull_request=self.pull_request)
1060 1065 assert comment.pull_request_version_id is None
1061 1066
1062 1067 if linked_to:
1063 1068 PullRequestModel()._link_comments_to_version(linked_to)
1064 1069
1065 1070 return comment
1066 1071
1067 1072 def create_inline_comment(
1068 1073 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1069 1074 comment = CommentsModel().create(
1070 1075 text=u"Test comment",
1071 1076 repo=self.target_repository.repo_name,
1072 1077 user=self.author,
1073 1078 line_no=line_no,
1074 1079 f_path=file_path,
1075 1080 pull_request=self.pull_request)
1076 1081 assert comment.pull_request_version_id is None
1077 1082
1078 1083 if linked_to:
1079 1084 PullRequestModel()._link_comments_to_version(linked_to)
1080 1085
1081 1086 return comment
1082 1087
1083 1088 def create_version_of_pull_request(self):
1084 1089 pull_request = self.create_pull_request()
1085 1090 version = PullRequestModel()._create_version_from_snapshot(
1086 1091 pull_request)
1087 1092 return version
1088 1093
1089 1094 def create_status_votes(self, status, *reviewers):
1090 1095 for reviewer in reviewers:
1091 1096 ChangesetStatusModel().set_status(
1092 1097 repo=self.pull_request.target_repo,
1093 1098 status=status,
1094 1099 user=reviewer.user_id,
1095 1100 pull_request=self.pull_request)
1096 1101
1097 1102 def set_mergeable(self, value):
1098 1103 if not self.mergeable_patcher:
1099 1104 self.mergeable_patcher = mock.patch.object(
1100 1105 VcsSettingsModel, 'get_general_settings')
1101 1106 self.mergeable_mock = self.mergeable_patcher.start()
1102 1107 self.mergeable_mock.return_value = {
1103 1108 'rhodecode_pr_merge_enabled': value}
1104 1109
1105 1110 def cleanup(self):
1106 1111 # In case the source repository is already cleaned up, the pull
1107 1112 # request will already be deleted.
1108 1113 pull_request = PullRequest().get(self.pull_request_id)
1109 1114 if pull_request:
1110 1115 PullRequestModel().delete(pull_request, pull_request.author)
1111 1116 Session().commit()
1112 1117
1113 1118 if self.notification_patcher:
1114 1119 self.notification_patcher.stop()
1115 1120
1116 1121 if self.mergeable_patcher:
1117 1122 self.mergeable_patcher.stop()
1118 1123
1119 1124
1120 1125 @pytest.fixture()
1121 1126 def user_admin(baseapp):
1122 1127 """
1123 1128 Provides the default admin test user as an instance of `db.User`.
1124 1129 """
1125 1130 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1126 1131 return user
1127 1132
1128 1133
1129 1134 @pytest.fixture()
1130 1135 def user_regular(baseapp):
1131 1136 """
1132 1137 Provides the default regular test user as an instance of `db.User`.
1133 1138 """
1134 1139 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1135 1140 return user
1136 1141
1137 1142
1138 1143 @pytest.fixture()
1139 1144 def user_util(request, db_connection):
1140 1145 """
1141 1146 Provides a wired instance of `UserUtility` with integrated cleanup.
1142 1147 """
1143 1148 utility = UserUtility(test_name=request.node.name)
1144 1149 request.addfinalizer(utility.cleanup)
1145 1150 return utility
1146 1151
1147 1152
1148 1153 # TODO: johbo: Split this up into utilities per domain or something similar
1149 1154 class UserUtility(object):
1150 1155
1151 1156 def __init__(self, test_name="test"):
1152 1157 self._test_name = self._sanitize_name(test_name)
1153 1158 self.fixture = Fixture()
1154 1159 self.repo_group_ids = []
1155 1160 self.repos_ids = []
1156 1161 self.user_ids = []
1157 1162 self.user_group_ids = []
1158 1163 self.user_repo_permission_ids = []
1159 1164 self.user_group_repo_permission_ids = []
1160 1165 self.user_repo_group_permission_ids = []
1161 1166 self.user_group_repo_group_permission_ids = []
1162 1167 self.user_user_group_permission_ids = []
1163 1168 self.user_group_user_group_permission_ids = []
1164 1169 self.user_permissions = []
1165 1170
1166 1171 def _sanitize_name(self, name):
1167 1172 for char in ['[', ']']:
1168 1173 name = name.replace(char, '_')
1169 1174 return name
1170 1175
1171 1176 def create_repo_group(
1172 1177 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1173 1178 group_name = "{prefix}_repogroup_{count}".format(
1174 1179 prefix=self._test_name,
1175 1180 count=len(self.repo_group_ids))
1176 1181 repo_group = self.fixture.create_repo_group(
1177 1182 group_name, cur_user=owner)
1178 1183 if auto_cleanup:
1179 1184 self.repo_group_ids.append(repo_group.group_id)
1180 1185 return repo_group
1181 1186
1182 1187 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1183 1188 auto_cleanup=True, repo_type='hg', bare=False):
1184 1189 repo_name = "{prefix}_repository_{count}".format(
1185 1190 prefix=self._test_name,
1186 1191 count=len(self.repos_ids))
1187 1192
1188 1193 repository = self.fixture.create_repo(
1189 1194 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1190 1195 if auto_cleanup:
1191 1196 self.repos_ids.append(repository.repo_id)
1192 1197 return repository
1193 1198
1194 1199 def create_user(self, auto_cleanup=True, **kwargs):
1195 1200 user_name = "{prefix}_user_{count}".format(
1196 1201 prefix=self._test_name,
1197 1202 count=len(self.user_ids))
1198 1203 user = self.fixture.create_user(user_name, **kwargs)
1199 1204 if auto_cleanup:
1200 1205 self.user_ids.append(user.user_id)
1201 1206 return user
1202 1207
1203 1208 def create_additional_user_email(self, user, email):
1204 1209 uem = self.fixture.create_additional_user_email(user=user, email=email)
1205 1210 return uem
1206 1211
1207 1212 def create_user_with_group(self):
1208 1213 user = self.create_user()
1209 1214 user_group = self.create_user_group(members=[user])
1210 1215 return user, user_group
1211 1216
1212 1217 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1213 1218 auto_cleanup=True, **kwargs):
1214 1219 group_name = "{prefix}_usergroup_{count}".format(
1215 1220 prefix=self._test_name,
1216 1221 count=len(self.user_group_ids))
1217 1222 user_group = self.fixture.create_user_group(
1218 1223 group_name, cur_user=owner, **kwargs)
1219 1224
1220 1225 if auto_cleanup:
1221 1226 self.user_group_ids.append(user_group.users_group_id)
1222 1227 if members:
1223 1228 for user in members:
1224 1229 UserGroupModel().add_user_to_group(user_group, user)
1225 1230 return user_group
1226 1231
1227 1232 def grant_user_permission(self, user_name, permission_name):
1228 1233 self.inherit_default_user_permissions(user_name, False)
1229 1234 self.user_permissions.append((user_name, permission_name))
1230 1235
1231 1236 def grant_user_permission_to_repo_group(
1232 1237 self, repo_group, user, permission_name):
1233 1238 permission = RepoGroupModel().grant_user_permission(
1234 1239 repo_group, user, permission_name)
1235 1240 self.user_repo_group_permission_ids.append(
1236 1241 (repo_group.group_id, user.user_id))
1237 1242 return permission
1238 1243
1239 1244 def grant_user_group_permission_to_repo_group(
1240 1245 self, repo_group, user_group, permission_name):
1241 1246 permission = RepoGroupModel().grant_user_group_permission(
1242 1247 repo_group, user_group, permission_name)
1243 1248 self.user_group_repo_group_permission_ids.append(
1244 1249 (repo_group.group_id, user_group.users_group_id))
1245 1250 return permission
1246 1251
1247 1252 def grant_user_permission_to_repo(
1248 1253 self, repo, user, permission_name):
1249 1254 permission = RepoModel().grant_user_permission(
1250 1255 repo, user, permission_name)
1251 1256 self.user_repo_permission_ids.append(
1252 1257 (repo.repo_id, user.user_id))
1253 1258 return permission
1254 1259
1255 1260 def grant_user_group_permission_to_repo(
1256 1261 self, repo, user_group, permission_name):
1257 1262 permission = RepoModel().grant_user_group_permission(
1258 1263 repo, user_group, permission_name)
1259 1264 self.user_group_repo_permission_ids.append(
1260 1265 (repo.repo_id, user_group.users_group_id))
1261 1266 return permission
1262 1267
1263 1268 def grant_user_permission_to_user_group(
1264 1269 self, target_user_group, user, permission_name):
1265 1270 permission = UserGroupModel().grant_user_permission(
1266 1271 target_user_group, user, permission_name)
1267 1272 self.user_user_group_permission_ids.append(
1268 1273 (target_user_group.users_group_id, user.user_id))
1269 1274 return permission
1270 1275
1271 1276 def grant_user_group_permission_to_user_group(
1272 1277 self, target_user_group, user_group, permission_name):
1273 1278 permission = UserGroupModel().grant_user_group_permission(
1274 1279 target_user_group, user_group, permission_name)
1275 1280 self.user_group_user_group_permission_ids.append(
1276 1281 (target_user_group.users_group_id, user_group.users_group_id))
1277 1282 return permission
1278 1283
1279 1284 def revoke_user_permission(self, user_name, permission_name):
1280 1285 self.inherit_default_user_permissions(user_name, True)
1281 1286 UserModel().revoke_perm(user_name, permission_name)
1282 1287
1283 1288 def inherit_default_user_permissions(self, user_name, value):
1284 1289 user = UserModel().get_by_username(user_name)
1285 1290 user.inherit_default_permissions = value
1286 1291 Session().add(user)
1287 1292 Session().commit()
1288 1293
1289 1294 def cleanup(self):
1290 1295 self._cleanup_permissions()
1291 1296 self._cleanup_repos()
1292 1297 self._cleanup_repo_groups()
1293 1298 self._cleanup_user_groups()
1294 1299 self._cleanup_users()
1295 1300
1296 1301 def _cleanup_permissions(self):
1297 1302 if self.user_permissions:
1298 1303 for user_name, permission_name in self.user_permissions:
1299 1304 self.revoke_user_permission(user_name, permission_name)
1300 1305
1301 1306 for permission in self.user_repo_permission_ids:
1302 1307 RepoModel().revoke_user_permission(*permission)
1303 1308
1304 1309 for permission in self.user_group_repo_permission_ids:
1305 1310 RepoModel().revoke_user_group_permission(*permission)
1306 1311
1307 1312 for permission in self.user_repo_group_permission_ids:
1308 1313 RepoGroupModel().revoke_user_permission(*permission)
1309 1314
1310 1315 for permission in self.user_group_repo_group_permission_ids:
1311 1316 RepoGroupModel().revoke_user_group_permission(*permission)
1312 1317
1313 1318 for permission in self.user_user_group_permission_ids:
1314 1319 UserGroupModel().revoke_user_permission(*permission)
1315 1320
1316 1321 for permission in self.user_group_user_group_permission_ids:
1317 1322 UserGroupModel().revoke_user_group_permission(*permission)
1318 1323
1319 1324 def _cleanup_repo_groups(self):
1320 1325 def _repo_group_compare(first_group_id, second_group_id):
1321 1326 """
1322 1327 Gives higher priority to the groups with the most complex paths
1323 1328 """
1324 1329 first_group = RepoGroup.get(first_group_id)
1325 1330 second_group = RepoGroup.get(second_group_id)
1326 1331 first_group_parts = (
1327 1332 len(first_group.group_name.split('/')) if first_group else 0)
1328 1333 second_group_parts = (
1329 1334 len(second_group.group_name.split('/')) if second_group else 0)
1330 1335 return cmp(second_group_parts, first_group_parts)
1331 1336
1332 1337 sorted_repo_group_ids = sorted(
1333 1338 self.repo_group_ids, cmp=_repo_group_compare)
1334 1339 for repo_group_id in sorted_repo_group_ids:
1335 1340 self.fixture.destroy_repo_group(repo_group_id)
1336 1341
1337 1342 def _cleanup_repos(self):
1338 1343 sorted_repos_ids = sorted(self.repos_ids)
1339 1344 for repo_id in sorted_repos_ids:
1340 1345 self.fixture.destroy_repo(repo_id)
1341 1346
1342 1347 def _cleanup_user_groups(self):
1343 1348 def _user_group_compare(first_group_id, second_group_id):
1344 1349 """
1345 1350 Gives higher priority to the groups with the most complex paths
1346 1351 """
1347 1352 first_group = UserGroup.get(first_group_id)
1348 1353 second_group = UserGroup.get(second_group_id)
1349 1354 first_group_parts = (
1350 1355 len(first_group.users_group_name.split('/'))
1351 1356 if first_group else 0)
1352 1357 second_group_parts = (
1353 1358 len(second_group.users_group_name.split('/'))
1354 1359 if second_group else 0)
1355 1360 return cmp(second_group_parts, first_group_parts)
1356 1361
1357 1362 sorted_user_group_ids = sorted(
1358 1363 self.user_group_ids, cmp=_user_group_compare)
1359 1364 for user_group_id in sorted_user_group_ids:
1360 1365 self.fixture.destroy_user_group(user_group_id)
1361 1366
1362 1367 def _cleanup_users(self):
1363 1368 for user_id in self.user_ids:
1364 1369 self.fixture.destroy_user(user_id)
1365 1370
1366 1371
1367 1372 # TODO: Think about moving this into a pytest-pyro package and make it a
1368 1373 # pytest plugin
1369 1374 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1370 1375 def pytest_runtest_makereport(item, call):
1371 1376 """
1372 1377 Adding the remote traceback if the exception has this information.
1373 1378
1374 1379 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1375 1380 to the exception instance.
1376 1381 """
1377 1382 outcome = yield
1378 1383 report = outcome.get_result()
1379 1384 if call.excinfo:
1380 1385 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1381 1386
1382 1387
1383 1388 def _add_vcsserver_remote_traceback(report, exc):
1384 1389 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1385 1390
1386 1391 if vcsserver_traceback:
1387 1392 section = 'VCSServer remote traceback ' + report.when
1388 1393 report.sections.append((section, vcsserver_traceback))
1389 1394
1390 1395
1391 1396 @pytest.fixture(scope='session')
1392 1397 def testrun():
1393 1398 return {
1394 1399 'uuid': uuid.uuid4(),
1395 1400 'start': datetime.datetime.utcnow().isoformat(),
1396 1401 'timestamp': int(time.time()),
1397 1402 }
1398 1403
1399 1404
1400 1405 class AppenlightClient(object):
1401 1406
1402 1407 url_template = '{url}?protocol_version=0.5'
1403 1408
1404 1409 def __init__(
1405 1410 self, url, api_key, add_server=True, add_timestamp=True,
1406 1411 namespace=None, request=None, testrun=None):
1407 1412 self.url = self.url_template.format(url=url)
1408 1413 self.api_key = api_key
1409 1414 self.add_server = add_server
1410 1415 self.add_timestamp = add_timestamp
1411 1416 self.namespace = namespace
1412 1417 self.request = request
1413 1418 self.server = socket.getfqdn(socket.gethostname())
1414 1419 self.tags_before = {}
1415 1420 self.tags_after = {}
1416 1421 self.stats = []
1417 1422 self.testrun = testrun or {}
1418 1423
1419 1424 def tag_before(self, tag, value):
1420 1425 self.tags_before[tag] = value
1421 1426
1422 1427 def tag_after(self, tag, value):
1423 1428 self.tags_after[tag] = value
1424 1429
1425 1430 def collect(self, data):
1426 1431 if self.add_server:
1427 1432 data.setdefault('server', self.server)
1428 1433 if self.add_timestamp:
1429 1434 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1430 1435 if self.namespace:
1431 1436 data.setdefault('namespace', self.namespace)
1432 1437 if self.request:
1433 1438 data.setdefault('request', self.request)
1434 1439 self.stats.append(data)
1435 1440
1436 1441 def send_stats(self):
1437 1442 tags = [
1438 1443 ('testrun', self.request),
1439 1444 ('testrun.start', self.testrun['start']),
1440 1445 ('testrun.timestamp', self.testrun['timestamp']),
1441 1446 ('test', self.namespace),
1442 1447 ]
1443 1448 for key, value in self.tags_before.items():
1444 1449 tags.append((key + '.before', value))
1445 1450 try:
1446 1451 delta = self.tags_after[key] - value
1447 1452 tags.append((key + '.delta', delta))
1448 1453 except Exception:
1449 1454 pass
1450 1455 for key, value in self.tags_after.items():
1451 1456 tags.append((key + '.after', value))
1452 1457 self.collect({
1453 1458 'message': "Collected tags",
1454 1459 'tags': tags,
1455 1460 })
1456 1461
1457 1462 response = requests.post(
1458 1463 self.url,
1459 1464 headers={
1460 1465 'X-appenlight-api-key': self.api_key},
1461 1466 json=self.stats,
1462 1467 )
1463 1468
1464 1469 if not response.status_code == 200:
1465 1470 pprint.pprint(self.stats)
1466 1471 print(response.headers)
1467 1472 print(response.text)
1468 1473 raise Exception('Sending to appenlight failed')
1469 1474
1470 1475
1471 1476 @pytest.fixture()
1472 1477 def gist_util(request, db_connection):
1473 1478 """
1474 1479 Provides a wired instance of `GistUtility` with integrated cleanup.
1475 1480 """
1476 1481 utility = GistUtility()
1477 1482 request.addfinalizer(utility.cleanup)
1478 1483 return utility
1479 1484
1480 1485
1481 1486 class GistUtility(object):
1482 1487 def __init__(self):
1483 1488 self.fixture = Fixture()
1484 1489 self.gist_ids = []
1485 1490
1486 1491 def create_gist(self, **kwargs):
1487 1492 gist = self.fixture.create_gist(**kwargs)
1488 1493 self.gist_ids.append(gist.gist_id)
1489 1494 return gist
1490 1495
1491 1496 def cleanup(self):
1492 1497 for id_ in self.gist_ids:
1493 1498 self.fixture.destroy_gists(str(id_))
1494 1499
1495 1500
1496 1501 @pytest.fixture()
1497 1502 def enabled_backends(request):
1498 1503 backends = request.config.option.backends
1499 1504 return backends[:]
1500 1505
1501 1506
1502 1507 @pytest.fixture()
1503 1508 def settings_util(request, db_connection):
1504 1509 """
1505 1510 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1506 1511 """
1507 1512 utility = SettingsUtility()
1508 1513 request.addfinalizer(utility.cleanup)
1509 1514 return utility
1510 1515
1511 1516
1512 1517 class SettingsUtility(object):
1513 1518 def __init__(self):
1514 1519 self.rhodecode_ui_ids = []
1515 1520 self.rhodecode_setting_ids = []
1516 1521 self.repo_rhodecode_ui_ids = []
1517 1522 self.repo_rhodecode_setting_ids = []
1518 1523
1519 1524 def create_repo_rhodecode_ui(
1520 1525 self, repo, section, value, key=None, active=True, cleanup=True):
1521 1526 key = key or hashlib.sha1(
1522 1527 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1523 1528
1524 1529 setting = RepoRhodeCodeUi()
1525 1530 setting.repository_id = repo.repo_id
1526 1531 setting.ui_section = section
1527 1532 setting.ui_value = value
1528 1533 setting.ui_key = key
1529 1534 setting.ui_active = active
1530 1535 Session().add(setting)
1531 1536 Session().commit()
1532 1537
1533 1538 if cleanup:
1534 1539 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1535 1540 return setting
1536 1541
1537 1542 def create_rhodecode_ui(
1538 1543 self, section, value, key=None, active=True, cleanup=True):
1539 1544 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1540 1545
1541 1546 setting = RhodeCodeUi()
1542 1547 setting.ui_section = section
1543 1548 setting.ui_value = value
1544 1549 setting.ui_key = key
1545 1550 setting.ui_active = active
1546 1551 Session().add(setting)
1547 1552 Session().commit()
1548 1553
1549 1554 if cleanup:
1550 1555 self.rhodecode_ui_ids.append(setting.ui_id)
1551 1556 return setting
1552 1557
1553 1558 def create_repo_rhodecode_setting(
1554 1559 self, repo, name, value, type_, cleanup=True):
1555 1560 setting = RepoRhodeCodeSetting(
1556 1561 repo.repo_id, key=name, val=value, type=type_)
1557 1562 Session().add(setting)
1558 1563 Session().commit()
1559 1564
1560 1565 if cleanup:
1561 1566 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1562 1567 return setting
1563 1568
1564 1569 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1565 1570 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1566 1571 Session().add(setting)
1567 1572 Session().commit()
1568 1573
1569 1574 if cleanup:
1570 1575 self.rhodecode_setting_ids.append(setting.app_settings_id)
1571 1576
1572 1577 return setting
1573 1578
1574 1579 def cleanup(self):
1575 1580 for id_ in self.rhodecode_ui_ids:
1576 1581 setting = RhodeCodeUi.get(id_)
1577 1582 Session().delete(setting)
1578 1583
1579 1584 for id_ in self.rhodecode_setting_ids:
1580 1585 setting = RhodeCodeSetting.get(id_)
1581 1586 Session().delete(setting)
1582 1587
1583 1588 for id_ in self.repo_rhodecode_ui_ids:
1584 1589 setting = RepoRhodeCodeUi.get(id_)
1585 1590 Session().delete(setting)
1586 1591
1587 1592 for id_ in self.repo_rhodecode_setting_ids:
1588 1593 setting = RepoRhodeCodeSetting.get(id_)
1589 1594 Session().delete(setting)
1590 1595
1591 1596 Session().commit()
1592 1597
1593 1598
1594 1599 @pytest.fixture()
1595 1600 def no_notifications(request):
1596 1601 notification_patcher = mock.patch(
1597 1602 'rhodecode.model.notification.NotificationModel.create')
1598 1603 notification_patcher.start()
1599 1604 request.addfinalizer(notification_patcher.stop)
1600 1605
1601 1606
1602 1607 @pytest.fixture(scope='session')
1603 1608 def repeat(request):
1604 1609 """
1605 1610 The number of repetitions is based on this fixture.
1606 1611
1607 1612 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1608 1613 tests are not too slow in our default test suite.
1609 1614 """
1610 1615 return request.config.getoption('--repeat')
1611 1616
1612 1617
1613 1618 @pytest.fixture()
1614 1619 def rhodecode_fixtures():
1615 1620 return Fixture()
1616 1621
1617 1622
1618 1623 @pytest.fixture()
1619 1624 def context_stub():
1620 1625 """
1621 1626 Stub context object.
1622 1627 """
1623 1628 context = pyramid.testing.DummyResource()
1624 1629 return context
1625 1630
1626 1631
1627 1632 @pytest.fixture()
1628 1633 def request_stub():
1629 1634 """
1630 1635 Stub request object.
1631 1636 """
1632 1637 from rhodecode.lib.base import bootstrap_request
1633 1638 request = bootstrap_request(scheme='https')
1634 1639 return request
1635 1640
1636 1641
1637 1642 @pytest.fixture()
1638 1643 def config_stub(request, request_stub):
1639 1644 """
1640 1645 Set up pyramid.testing and return the Configurator.
1641 1646 """
1642 1647 from rhodecode.lib.base import bootstrap_config
1643 1648 config = bootstrap_config(request=request_stub)
1644 1649
1645 1650 @request.addfinalizer
1646 1651 def cleanup():
1647 1652 pyramid.testing.tearDown()
1648 1653
1649 1654 return config
1650 1655
1651 1656
1652 1657 @pytest.fixture()
1653 1658 def StubIntegrationType():
1654 1659 class _StubIntegrationType(IntegrationTypeBase):
1655 1660 """ Test integration type class """
1656 1661
1657 1662 key = 'test'
1658 1663 display_name = 'Test integration type'
1659 1664 description = 'A test integration type for testing'
1660 1665
1661 1666 @classmethod
1662 1667 def icon(cls):
1663 1668 return 'test_icon_html_image'
1664 1669
1665 1670 def __init__(self, settings):
1666 1671 super(_StubIntegrationType, self).__init__(settings)
1667 1672 self.sent_events = [] # for testing
1668 1673
1669 1674 def send_event(self, event):
1670 1675 self.sent_events.append(event)
1671 1676
1672 1677 def settings_schema(self):
1673 1678 class SettingsSchema(colander.Schema):
1674 1679 test_string_field = colander.SchemaNode(
1675 1680 colander.String(),
1676 1681 missing=colander.required,
1677 1682 title='test string field',
1678 1683 )
1679 1684 test_int_field = colander.SchemaNode(
1680 1685 colander.Int(),
1681 1686 title='some integer setting',
1682 1687 )
1683 1688 return SettingsSchema()
1684 1689
1685 1690
1686 1691 integration_type_registry.register_integration_type(_StubIntegrationType)
1687 1692 return _StubIntegrationType
1688 1693
1689 1694 @pytest.fixture()
1690 1695 def stub_integration_settings():
1691 1696 return {
1692 1697 'test_string_field': 'some data',
1693 1698 'test_int_field': 100,
1694 1699 }
1695 1700
1696 1701
1697 1702 @pytest.fixture()
1698 1703 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1699 1704 stub_integration_settings):
1700 1705 integration = IntegrationModel().create(
1701 1706 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1702 1707 name='test repo integration',
1703 1708 repo=repo_stub, repo_group=None, child_repos_only=None)
1704 1709
1705 1710 @request.addfinalizer
1706 1711 def cleanup():
1707 1712 IntegrationModel().delete(integration)
1708 1713
1709 1714 return integration
1710 1715
1711 1716
1712 1717 @pytest.fixture()
1713 1718 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1714 1719 stub_integration_settings):
1715 1720 integration = IntegrationModel().create(
1716 1721 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1717 1722 name='test repogroup integration',
1718 1723 repo=None, repo_group=test_repo_group, child_repos_only=True)
1719 1724
1720 1725 @request.addfinalizer
1721 1726 def cleanup():
1722 1727 IntegrationModel().delete(integration)
1723 1728
1724 1729 return integration
1725 1730
1726 1731
1727 1732 @pytest.fixture()
1728 1733 def repogroup_recursive_integration_stub(request, test_repo_group,
1729 1734 StubIntegrationType, stub_integration_settings):
1730 1735 integration = IntegrationModel().create(
1731 1736 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1732 1737 name='test recursive repogroup integration',
1733 1738 repo=None, repo_group=test_repo_group, child_repos_only=False)
1734 1739
1735 1740 @request.addfinalizer
1736 1741 def cleanup():
1737 1742 IntegrationModel().delete(integration)
1738 1743
1739 1744 return integration
1740 1745
1741 1746
1742 1747 @pytest.fixture()
1743 1748 def global_integration_stub(request, StubIntegrationType,
1744 1749 stub_integration_settings):
1745 1750 integration = IntegrationModel().create(
1746 1751 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1747 1752 name='test global integration',
1748 1753 repo=None, repo_group=None, child_repos_only=None)
1749 1754
1750 1755 @request.addfinalizer
1751 1756 def cleanup():
1752 1757 IntegrationModel().delete(integration)
1753 1758
1754 1759 return integration
1755 1760
1756 1761
1757 1762 @pytest.fixture()
1758 1763 def root_repos_integration_stub(request, StubIntegrationType,
1759 1764 stub_integration_settings):
1760 1765 integration = IntegrationModel().create(
1761 1766 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1762 1767 name='test global integration',
1763 1768 repo=None, repo_group=None, child_repos_only=True)
1764 1769
1765 1770 @request.addfinalizer
1766 1771 def cleanup():
1767 1772 IntegrationModel().delete(integration)
1768 1773
1769 1774 return integration
1770 1775
1771 1776
1772 1777 @pytest.fixture()
1773 1778 def local_dt_to_utc():
1774 1779 def _factory(dt):
1775 1780 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1776 1781 dateutil.tz.tzutc()).replace(tzinfo=None)
1777 1782 return _factory
1778 1783
1779 1784
1780 1785 @pytest.fixture()
1781 1786 def disable_anonymous_user(request, baseapp):
1782 1787 set_anonymous_access(False)
1783 1788
1784 1789 @request.addfinalizer
1785 1790 def cleanup():
1786 1791 set_anonymous_access(True)
1787 1792
1788 1793
1789 1794 @pytest.fixture(scope='module')
1790 1795 def rc_fixture(request):
1791 1796 return Fixture()
1792 1797
1793 1798
1794 1799 @pytest.fixture()
1795 1800 def repo_groups(request):
1796 1801 fixture = Fixture()
1797 1802
1798 1803 session = Session()
1799 1804 zombie_group = fixture.create_repo_group('zombie')
1800 1805 parent_group = fixture.create_repo_group('parent')
1801 1806 child_group = fixture.create_repo_group('parent/child')
1802 1807 groups_in_db = session.query(RepoGroup).all()
1803 1808 assert len(groups_in_db) == 3
1804 1809 assert child_group.group_parent_id == parent_group.group_id
1805 1810
1806 1811 @request.addfinalizer
1807 1812 def cleanup():
1808 1813 fixture.destroy_repo_group(zombie_group)
1809 1814 fixture.destroy_repo_group(child_group)
1810 1815 fixture.destroy_repo_group(parent_group)
1811 1816
1812 1817 return zombie_group, parent_group, child_group
1813
1814
1815 @pytest.fixture(scope="session")
1816 def tmp_path_factory(request):
1817 """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.
1818 """
1819
1820 class TempPathFactory:
1821
1822 def mktemp(self, basename):
1823 import tempfile
1824 return tempfile.mktemp(basename)
1825
1826 return TempPathFactory()
@@ -1,1274 +1,1274 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import mock
23 23 import os
24 24 import sys
25 25 import shutil
26 26
27 27 import pytest
28 28
29 29 from rhodecode.lib.utils import make_db_config
30 30 from rhodecode.lib.vcs.backends.base import Reference
31 31 from rhodecode.lib.vcs.backends.git import (
32 32 GitRepository, GitCommit, discover_git_version)
33 33 from rhodecode.lib.vcs.exceptions import (
34 34 RepositoryError, VCSError, NodeDoesNotExistError)
35 35 from rhodecode.lib.vcs.nodes import (
36 36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39 39
40 40
41 41 pytestmark = pytest.mark.backends("git")
42 42
43 43
44 44 class TestGitRepository(object):
45 45
46 46 @pytest.fixture(autouse=True)
47 47 def prepare(self, request, baseapp):
48 48 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
49 49 self.repo.count()
50 50
51 def get_clone_repo(self, tmp_path_factory):
51 def get_clone_repo(self, tmpdir):
52 52 """
53 53 Return a non bare clone of the base repo.
54 54 """
55 clone_path = tmp_path_factory.mktemp('clone-url')
55 clone_path = str(tmpdir.join('clone-repo'))
56 56 repo_clone = GitRepository(
57 57 clone_path, create=True, src_url=self.repo.path, bare=False)
58 58
59 59 return repo_clone
60 60
61 def get_empty_repo(self, tmp_path_factory, bare=False):
61 def get_empty_repo(self, tmpdir, bare=False):
62 62 """
63 63 Return a non bare empty repo.
64 64 """
65 clone_path = tmp_path_factory.mktemp('empty-repo')
65 clone_path = str(tmpdir.join('empty-repo'))
66 66 return GitRepository(clone_path, create=True, bare=bare)
67 67
68 68 def test_wrong_repo_path(self):
69 69 wrong_repo_path = '/tmp/errorrepo_git'
70 70 with pytest.raises(RepositoryError):
71 71 GitRepository(wrong_repo_path)
72 72
73 73 def test_repo_clone(self, tmp_path_factory):
74 74 repo = GitRepository(TEST_GIT_REPO)
75 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE
75 clone_path = '{}_{}'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE)
76 76 repo_clone = GitRepository(
77 77 clone_path,
78 78 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
79 79
80 80 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
81 81 # Checking hashes of commits should be enough
82 82 for commit in repo.get_commits():
83 83 raw_id = commit.raw_id
84 84 assert raw_id == repo_clone.get_commit(raw_id).raw_id
85 85
86 86 def test_repo_clone_without_create(self):
87 87 with pytest.raises(RepositoryError):
88 88 GitRepository(
89 89 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
90 90
91 91 def test_repo_clone_with_update(self, tmp_path_factory):
92 92 repo = GitRepository(TEST_GIT_REPO)
93 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_update'
93 clone_path = '{}_{}_update'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE)
94 94
95 95 repo_clone = GitRepository(
96 96 clone_path,
97 97 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
98 98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 99
100 100 # check if current workdir was updated
101 101 fpath = os.path.join(clone_path, 'MANIFEST.in')
102 102 assert os.path.isfile(fpath)
103 103
104 104 def test_repo_clone_without_update(self, tmp_path_factory):
105 105 repo = GitRepository(TEST_GIT_REPO)
106 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_without_update'
106 clone_path = '{}_{}_without_update'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE)
107 107 repo_clone = GitRepository(
108 108 clone_path,
109 109 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
110 110 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
111 111 # check if current workdir was *NOT* updated
112 112 fpath = os.path.join(clone_path, 'MANIFEST.in')
113 113 # Make sure it's not bare repo
114 114 assert not repo_clone.bare
115 115 assert not os.path.isfile(fpath)
116 116
117 117 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
118 118 repo = GitRepository(TEST_GIT_REPO)
119 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_bare.git'
119 clone_path = '{}_{}_bare.git'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE)
120 120 repo_clone = GitRepository(
121 121 clone_path, create=True, src_url=repo.path, bare=True)
122 122 assert repo_clone.bare
123 123
124 124 def test_create_repo_is_not_bare_by_default(self):
125 125 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
126 126 assert not repo.bare
127 127
128 128 def test_create_bare_repo(self):
129 129 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
130 130 assert repo.bare
131 131
132 132 def test_update_server_info(self):
133 133 self.repo._update_server_info()
134 134
135 135 def test_fetch(self, vcsbackend_git):
136 136 # Note: This is a git specific part of the API, it's only implemented
137 137 # by the git backend.
138 138 source_repo = vcsbackend_git.repo
139 139 target_repo = vcsbackend_git.create_repo(bare=True)
140 140 target_repo.fetch(source_repo.path)
141 141 # Note: Get a fresh instance, avoids caching trouble
142 142 target_repo = vcsbackend_git.backend(target_repo.path)
143 143 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
144 144
145 145 def test_commit_ids(self):
146 146 # there are 112 commits (by now)
147 147 # so we can assume they would be available from now on
148 148 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
149 149 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
150 150 'fa6600f6848800641328adbf7811fd2372c02ab2',
151 151 '102607b09cdd60e2793929c4f90478be29f85a17',
152 152 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
153 153 '2d1028c054665b962fa3d307adfc923ddd528038',
154 154 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
155 155 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
156 156 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
157 157 '8430a588b43b5d6da365400117c89400326e7992',
158 158 'd955cd312c17b02143c04fa1099a352b04368118',
159 159 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
160 160 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
161 161 'f298fe1189f1b69779a4423f40b48edf92a703fc',
162 162 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
163 163 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
164 164 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
165 165 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
166 166 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
167 167 '45223f8f114c64bf4d6f853e3c35a369a6305520',
168 168 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
169 169 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
170 170 '27d48942240f5b91dfda77accd2caac94708cc7d',
171 171 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
172 172 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
173 173 assert subset.issubset(set(self.repo.commit_ids))
174 174
175 175 def test_slicing(self):
176 176 # 4 1 5 10 95
177 177 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
178 178 (10, 20, 10), (5, 100, 95)]:
179 179 commit_ids = list(self.repo[sfrom:sto])
180 180 assert len(commit_ids) == size
181 181 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
182 182 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
183 183
184 184 def test_branches(self):
185 185 # TODO: Need more tests here
186 186 # Removed (those are 'remotes' branches for cloned repo)
187 187 # assert 'master' in self.repo.branches
188 188 # assert 'gittree' in self.repo.branches
189 189 # assert 'web-branch' in self.repo.branches
190 190 for __, commit_id in self.repo.branches.items():
191 191 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
192 192
193 193 def test_tags(self):
194 194 # TODO: Need more tests here
195 195 assert 'v0.1.1' in self.repo.tags
196 196 assert 'v0.1.2' in self.repo.tags
197 197 for __, commit_id in self.repo.tags.items():
198 198 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
199 199
200 200 def _test_single_commit_cache(self, commit_id):
201 201 commit = self.repo.get_commit(commit_id)
202 202 assert commit_id in self.repo.commits
203 203 assert commit is self.repo.commits[commit_id]
204 204
205 205 def test_initial_commit(self):
206 206 commit_id = self.repo.commit_ids[0]
207 207 init_commit = self.repo.get_commit(commit_id)
208 208 init_author = init_commit.author
209 209
210 210 assert init_commit.message == 'initial import\n'
211 211 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
212 212 assert init_author == init_commit.committer
213 213 for path in ('vcs/__init__.py',
214 214 'vcs/backends/BaseRepository.py',
215 215 'vcs/backends/__init__.py'):
216 216 assert isinstance(init_commit.get_node(path), FileNode)
217 217 for path in ('', 'vcs', 'vcs/backends'):
218 218 assert isinstance(init_commit.get_node(path), DirNode)
219 219
220 220 with pytest.raises(NodeDoesNotExistError):
221 221 init_commit.get_node(path='foobar')
222 222
223 223 node = init_commit.get_node('vcs/')
224 224 assert hasattr(node, 'kind')
225 225 assert node.kind == NodeKind.DIR
226 226
227 227 node = init_commit.get_node('vcs')
228 228 assert hasattr(node, 'kind')
229 229 assert node.kind == NodeKind.DIR
230 230
231 231 node = init_commit.get_node('vcs/__init__.py')
232 232 assert hasattr(node, 'kind')
233 233 assert node.kind == NodeKind.FILE
234 234
235 235 def test_not_existing_commit(self):
236 236 with pytest.raises(RepositoryError):
237 237 self.repo.get_commit('f' * 40)
238 238
239 239 def test_commit10(self):
240 240
241 241 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
242 242 README = """===
243 243 VCS
244 244 ===
245 245
246 246 Various Version Control System management abstraction layer for Python.
247 247
248 248 Introduction
249 249 ------------
250 250
251 251 TODO: To be written...
252 252
253 253 """
254 254 node = commit10.get_node('README.rst')
255 255 assert node.kind == NodeKind.FILE
256 256 assert node.content == README
257 257
258 258 def test_head(self):
259 259 assert self.repo.head == self.repo.get_commit().raw_id
260 260
261 def test_checkout_with_create(self, tmp_path_factory):
262 repo_clone = self.get_clone_repo(tmp_path_factory)
261 def test_checkout_with_create(self, tmpdir):
262 repo_clone = self.get_clone_repo(tmpdir)
263 263
264 264 new_branch = 'new_branch'
265 265 assert repo_clone._current_branch() == 'master'
266 266 assert set(repo_clone.branches) == {'master'}
267 267 repo_clone._checkout(new_branch, create=True)
268 268
269 269 # Branches is a lazy property so we need to recrete the Repo object.
270 270 repo_clone = GitRepository(repo_clone.path)
271 271 assert set(repo_clone.branches) == {'master', new_branch}
272 272 assert repo_clone._current_branch() == new_branch
273 273
274 def test_checkout(self, tmp_path_factory):
275 repo_clone = self.get_clone_repo(tmp_path_factory)
274 def test_checkout(self, tmpdir):
275 repo_clone = self.get_clone_repo(tmpdir)
276 276
277 277 repo_clone._checkout('new_branch', create=True)
278 278 repo_clone._checkout('master')
279 279
280 280 assert repo_clone._current_branch() == 'master'
281 281
282 def test_checkout_same_branch(self, tmp_path_factory):
283 repo_clone = self.get_clone_repo(tmp_path_factory)
282 def test_checkout_same_branch(self, tmpdir):
283 repo_clone = self.get_clone_repo(tmpdir)
284 284
285 285 repo_clone._checkout('master')
286 286 assert repo_clone._current_branch() == 'master'
287 287
288 def test_checkout_branch_already_exists(self, tmp_path_factory):
289 repo_clone = self.get_clone_repo(tmp_path_factory)
288 def test_checkout_branch_already_exists(self, tmpdir):
289 repo_clone = self.get_clone_repo(tmpdir)
290 290
291 291 with pytest.raises(RepositoryError):
292 292 repo_clone._checkout('master', create=True)
293 293
294 294 def test_checkout_bare_repo(self):
295 295 with pytest.raises(RepositoryError):
296 296 self.repo._checkout('master')
297 297
298 298 def test_current_branch_bare_repo(self):
299 299 with pytest.raises(RepositoryError):
300 300 self.repo._current_branch()
301 301
302 def test_current_branch_empty_repo(self, tmp_path_factory):
303 repo = self.get_empty_repo(tmp_path_factory)
302 def test_current_branch_empty_repo(self, tmpdir):
303 repo = self.get_empty_repo(tmpdir)
304 304 assert repo._current_branch() is None
305 305
306 306 def test_local_clone(self, tmp_path_factory):
307 clone_path = tmp_path_factory.mktemp('test-local-clone')
307 clone_path = str(tmp_path_factory.mktemp('test-local-clone'))
308 308 self.repo._local_clone(clone_path, 'master')
309 309 repo_clone = GitRepository(clone_path)
310 310
311 311 assert self.repo.commit_ids == repo_clone.commit_ids
312 312
313 def test_local_clone_with_specific_branch(self, tmp_path_factory):
314 source_repo = self.get_clone_repo(tmp_path_factory)
313 def test_local_clone_with_specific_branch(self, tmpdir):
314 source_repo = self.get_clone_repo(tmpdir)
315 315
316 316 # Create a new branch in source repo
317 317 new_branch_commit = source_repo.commit_ids[-3]
318 318 source_repo._checkout(new_branch_commit)
319 319 source_repo._checkout('new_branch', create=True)
320 320
321 clone_path = tmp_path_factory.mktemp('git-clone-path-1')
321 clone_path = str(tmpdir.join('git-clone-path-1'))
322 322 source_repo._local_clone(clone_path, 'new_branch')
323 323 repo_clone = GitRepository(clone_path)
324 324
325 325 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
326 326
327 clone_path = tmp_path_factory.mktemp('git-clone-path-2')
327 clone_path = str(tmpdir.join('git-clone-path-2'))
328 328 source_repo._local_clone(clone_path, 'master')
329 329 repo_clone = GitRepository(clone_path)
330 330
331 331 assert source_repo.commit_ids == repo_clone.commit_ids
332 332
333 333 def test_local_clone_fails_if_target_exists(self):
334 334 with pytest.raises(RepositoryError):
335 335 self.repo._local_clone(self.repo.path, 'master')
336 336
337 def test_local_fetch(self, tmp_path_factory):
338 target_repo = self.get_empty_repo(tmp_path_factory)
339 source_repo = self.get_clone_repo(tmp_path_factory)
337 def test_local_fetch(self, tmpdir):
338 target_repo = self.get_empty_repo(tmpdir)
339 source_repo = self.get_clone_repo(tmpdir)
340 340
341 341 # Create a new branch in source repo
342 342 master_commit = source_repo.commit_ids[-1]
343 343 new_branch_commit = source_repo.commit_ids[-3]
344 344 source_repo._checkout(new_branch_commit)
345 345 source_repo._checkout('new_branch', create=True)
346 346
347 347 target_repo._local_fetch(source_repo.path, 'new_branch')
348 348 assert target_repo._last_fetch_heads() == [new_branch_commit]
349 349
350 350 target_repo._local_fetch(source_repo.path, 'master')
351 351 assert target_repo._last_fetch_heads() == [master_commit]
352 352
353 def test_local_fetch_from_bare_repo(self, tmp_path_factory):
354 target_repo = self.get_empty_repo(tmp_path_factory)
353 def test_local_fetch_from_bare_repo(self, tmpdir):
354 target_repo = self.get_empty_repo(tmpdir)
355 355 target_repo._local_fetch(self.repo.path, 'master')
356 356
357 357 master_commit = self.repo.commit_ids[-1]
358 358 assert target_repo._last_fetch_heads() == [master_commit]
359 359
360 360 def test_local_fetch_from_same_repo(self):
361 361 with pytest.raises(ValueError):
362 362 self.repo._local_fetch(self.repo.path, 'master')
363 363
364 def test_local_fetch_branch_does_not_exist(self, tmp_path_factory):
365 target_repo = self.get_empty_repo(tmp_path_factory)
364 def test_local_fetch_branch_does_not_exist(self, tmpdir):
365 target_repo = self.get_empty_repo(tmpdir)
366 366
367 367 with pytest.raises(RepositoryError):
368 368 target_repo._local_fetch(self.repo.path, 'new_branch')
369 369
370 def test_local_pull(self, tmp_path_factory):
371 target_repo = self.get_empty_repo(tmp_path_factory)
372 source_repo = self.get_clone_repo(tmp_path_factory)
370 def test_local_pull(self, tmpdir):
371 target_repo = self.get_empty_repo(tmpdir)
372 source_repo = self.get_clone_repo(tmpdir)
373 373
374 374 # Create a new branch in source repo
375 375 master_commit = source_repo.commit_ids[-1]
376 376 new_branch_commit = source_repo.commit_ids[-3]
377 377 source_repo._checkout(new_branch_commit)
378 378 source_repo._checkout('new_branch', create=True)
379 379
380 380 target_repo._local_pull(source_repo.path, 'new_branch')
381 381 target_repo = GitRepository(target_repo.path)
382 382 assert target_repo.head == new_branch_commit
383 383
384 384 target_repo._local_pull(source_repo.path, 'master')
385 385 target_repo = GitRepository(target_repo.path)
386 386 assert target_repo.head == master_commit
387 387
388 388 def test_local_pull_in_bare_repo(self):
389 389 with pytest.raises(RepositoryError):
390 390 self.repo._local_pull(self.repo.path, 'master')
391 391
392 def test_local_merge(self, tmp_path_factory):
393 target_repo = self.get_empty_repo(tmp_path_factory)
394 source_repo = self.get_clone_repo(tmp_path_factory)
392 def test_local_merge(self, tmpdir):
393 target_repo = self.get_empty_repo(tmpdir)
394 source_repo = self.get_clone_repo(tmpdir)
395 395
396 396 # Create a new branch in source repo
397 397 master_commit = source_repo.commit_ids[-1]
398 398 new_branch_commit = source_repo.commit_ids[-3]
399 399 source_repo._checkout(new_branch_commit)
400 400 source_repo._checkout('new_branch', create=True)
401 401
402 402 # This is required as one cannot do a -ff-only merge in an empty repo.
403 403 target_repo._local_pull(source_repo.path, 'new_branch')
404 404
405 405 target_repo._local_fetch(source_repo.path, 'master')
406 406 merge_message = 'Merge message\n\nDescription:...'
407 407 user_name = 'Albert Einstein'
408 408 user_email = 'albert@einstein.com'
409 409 target_repo._local_merge(merge_message, user_name, user_email,
410 410 target_repo._last_fetch_heads())
411 411
412 412 target_repo = GitRepository(target_repo.path)
413 413 assert target_repo.commit_ids[-2] == master_commit
414 414 last_commit = target_repo.get_commit(target_repo.head)
415 415 assert last_commit.message.strip() == merge_message
416 416 assert last_commit.author == '%s <%s>' % (user_name, user_email)
417 417
418 418 assert not os.path.exists(
419 419 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
420 420
421 421 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
422 422 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
423 423 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
424 424
425 425 target_repo._local_fetch(self.repo.path, 'master')
426 426 with pytest.raises(RepositoryError):
427 427 target_repo._local_merge(
428 428 'merge_message', 'user name', 'user@name.com',
429 429 target_repo._last_fetch_heads())
430 430
431 431 # Check we are not left in an intermediate merge state
432 432 assert not os.path.exists(
433 433 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
434 434
435 def test_local_merge_into_empty_repo(self, tmp_path_factory):
436 target_repo = self.get_empty_repo(tmp_path_factory)
435 def test_local_merge_into_empty_repo(self, tmpdir):
436 target_repo = self.get_empty_repo(tmpdir)
437 437
438 438 # This is required as one cannot do a -ff-only merge in an empty repo.
439 439 target_repo._local_fetch(self.repo.path, 'master')
440 440 with pytest.raises(RepositoryError):
441 441 target_repo._local_merge(
442 442 'merge_message', 'user name', 'user@name.com',
443 443 target_repo._last_fetch_heads())
444 444
445 445 def test_local_merge_in_bare_repo(self):
446 446 with pytest.raises(RepositoryError):
447 447 self.repo._local_merge(
448 448 'merge_message', 'user name', 'user@name.com', None)
449 449
450 def test_local_push_non_bare(self, tmp_path_factory):
451 target_repo = self.get_empty_repo(tmp_path_factory)
450 def test_local_push_non_bare(self, tmpdir):
451 target_repo = self.get_empty_repo(tmpdir)
452 452
453 453 pushed_branch = 'pushed_branch'
454 454 self.repo._local_push('master', target_repo.path, pushed_branch)
455 455 # Fix the HEAD of the target repo, or otherwise GitRepository won't
456 456 # report any branches.
457 457 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
458 458 f.write('ref: refs/heads/%s' % pushed_branch)
459 459
460 460 target_repo = GitRepository(target_repo.path)
461 461
462 462 assert (target_repo.branches[pushed_branch] ==
463 463 self.repo.branches['master'])
464 464
465 def test_local_push_bare(self, tmp_path_factory):
466 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
465 def test_local_push_bare(self, tmpdir):
466 target_repo = self.get_empty_repo(tmpdir, bare=True)
467 467
468 468 pushed_branch = 'pushed_branch'
469 469 self.repo._local_push('master', target_repo.path, pushed_branch)
470 470 # Fix the HEAD of the target repo, or otherwise GitRepository won't
471 471 # report any branches.
472 472 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
473 473 f.write('ref: refs/heads/%s' % pushed_branch)
474 474
475 475 target_repo = GitRepository(target_repo.path)
476 476
477 477 assert (target_repo.branches[pushed_branch] ==
478 478 self.repo.branches['master'])
479 479
480 def test_local_push_non_bare_target_branch_is_checked_out(self, tmp_path_factory):
481 target_repo = self.get_clone_repo(tmp_path_factory)
480 def test_local_push_non_bare_target_branch_is_checked_out(self, tmpdir):
481 target_repo = self.get_clone_repo(tmpdir)
482 482
483 483 pushed_branch = 'pushed_branch'
484 484 # Create a new branch in source repo
485 485 new_branch_commit = target_repo.commit_ids[-3]
486 486 target_repo._checkout(new_branch_commit)
487 487 target_repo._checkout(pushed_branch, create=True)
488 488
489 489 self.repo._local_push('master', target_repo.path, pushed_branch)
490 490
491 491 target_repo = GitRepository(target_repo.path)
492 492
493 493 assert (target_repo.branches[pushed_branch] ==
494 494 self.repo.branches['master'])
495 495
496 496 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
497 497 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
498 498 with pytest.raises(RepositoryError):
499 499 self.repo._local_push('master', target_repo.path, 'master')
500 500
501 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmp_path_factory):
502 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
501 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmpdir):
502 target_repo = self.get_empty_repo(tmpdir, bare=True)
503 503
504 504 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
505 505 self.repo._local_push(
506 506 'master', target_repo.path, 'master', enable_hooks=True)
507 507 env = run_mock.call_args[1]['extra_env']
508 508 assert 'RC_SKIP_HOOKS' not in env
509 509
510 510 def _add_failing_hook(self, repo_path, hook_name, bare=False):
511 511 path_components = (
512 512 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
513 513 hook_path = os.path.join(repo_path, *path_components)
514 514 with open(hook_path, 'w') as f:
515 515 script_lines = [
516 516 '#!%s' % sys.executable,
517 517 'import os',
518 518 'import sys',
519 519 'if os.environ.get("RC_SKIP_HOOKS"):',
520 520 ' sys.exit(0)',
521 521 'sys.exit(1)',
522 522 ]
523 523 f.write('\n'.join(script_lines))
524 524 os.chmod(hook_path, 0o755)
525 525
526 def test_local_push_does_not_execute_hook(self, tmp_path_factory):
527 target_repo = self.get_empty_repo(tmp_path_factory)
526 def test_local_push_does_not_execute_hook(self, tmpdir):
527 target_repo = self.get_empty_repo(tmpdir)
528 528
529 529 pushed_branch = 'pushed_branch'
530 530 self._add_failing_hook(target_repo.path, 'pre-receive')
531 531 self.repo._local_push('master', target_repo.path, pushed_branch)
532 532 # Fix the HEAD of the target repo, or otherwise GitRepository won't
533 533 # report any branches.
534 534 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
535 535 f.write('ref: refs/heads/%s' % pushed_branch)
536 536
537 537 target_repo = GitRepository(target_repo.path)
538 538
539 539 assert (target_repo.branches[pushed_branch] ==
540 540 self.repo.branches['master'])
541 541
542 def test_local_push_executes_hook(self, tmp_path_factory):
543 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
542 def test_local_push_executes_hook(self, tmpdir):
543 target_repo = self.get_empty_repo(tmpdir, bare=True)
544 544 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
545 545 with pytest.raises(RepositoryError):
546 546 self.repo._local_push(
547 547 'master', target_repo.path, 'master', enable_hooks=True)
548 548
549 549 def test_maybe_prepare_merge_workspace(self):
550 550 workspace = self.repo._maybe_prepare_merge_workspace(
551 551 2, 'pr2', Reference('branch', 'master', 'unused'),
552 552 Reference('branch', 'master', 'unused'))
553 553
554 554 assert os.path.isdir(workspace)
555 555 workspace_repo = GitRepository(workspace)
556 556 assert workspace_repo.branches == self.repo.branches
557 557
558 558 # Calling it a second time should also succeed
559 559 workspace = self.repo._maybe_prepare_merge_workspace(
560 560 2, 'pr2', Reference('branch', 'master', 'unused'),
561 561 Reference('branch', 'master', 'unused'))
562 562 assert os.path.isdir(workspace)
563 563
564 564 def test_maybe_prepare_merge_workspace_different_refs(self):
565 565 workspace = self.repo._maybe_prepare_merge_workspace(
566 566 2, 'pr2', Reference('branch', 'master', 'unused'),
567 567 Reference('branch', 'develop', 'unused'))
568 568
569 569 assert os.path.isdir(workspace)
570 570 workspace_repo = GitRepository(workspace)
571 571 assert workspace_repo.branches == self.repo.branches
572 572
573 573 # Calling it a second time should also succeed
574 574 workspace = self.repo._maybe_prepare_merge_workspace(
575 575 2, 'pr2', Reference('branch', 'master', 'unused'),
576 576 Reference('branch', 'develop', 'unused'))
577 577 assert os.path.isdir(workspace)
578 578
579 579 def test_cleanup_merge_workspace(self):
580 580 workspace = self.repo._maybe_prepare_merge_workspace(
581 581 2, 'pr3', Reference('branch', 'master', 'unused'),
582 582 Reference('branch', 'master', 'unused'))
583 583 self.repo.cleanup_merge_workspace(2, 'pr3')
584 584
585 585 assert not os.path.exists(workspace)
586 586
587 587 def test_cleanup_merge_workspace_invalid_workspace_id(self):
588 588 # No assert: because in case of an inexistent workspace this function
589 589 # should still succeed.
590 590 self.repo.cleanup_merge_workspace(1, 'pr4')
591 591
592 592 def test_set_refs(self):
593 593 test_ref = 'refs/test-refs/abcde'
594 594 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
595 595
596 596 self.repo.set_refs(test_ref, test_commit_id)
597 597 stdout, _ = self.repo.run_git_command(['show-ref'])
598 598 assert test_ref in stdout
599 599 assert test_commit_id in stdout
600 600
601 601 def test_remove_ref(self):
602 602 test_ref = 'refs/test-refs/abcde'
603 603 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
604 604 self.repo.set_refs(test_ref, test_commit_id)
605 605 stdout, _ = self.repo.run_git_command(['show-ref'])
606 606 assert test_ref in stdout
607 607 assert test_commit_id in stdout
608 608
609 609 self.repo.remove_ref(test_ref)
610 610 stdout, _ = self.repo.run_git_command(['show-ref'])
611 611 assert test_ref not in stdout
612 612 assert test_commit_id not in stdout
613 613
614 614
615 615 class TestGitCommit(object):
616 616
617 617 @pytest.fixture(autouse=True)
618 618 def prepare(self):
619 619 self.repo = GitRepository(TEST_GIT_REPO)
620 620
621 621 def test_default_commit(self):
622 622 tip = self.repo.get_commit()
623 623 assert tip == self.repo.get_commit(None)
624 624 assert tip == self.repo.get_commit('tip')
625 625
626 626 def test_root_node(self):
627 627 tip = self.repo.get_commit()
628 628 assert tip.root is tip.get_node('')
629 629
630 630 def test_lazy_fetch(self):
631 631 """
632 632 Test if commit's nodes expands and are cached as we walk through
633 633 the commit. This test is somewhat hard to write as order of tests
634 634 is a key here. Written by running command after command in a shell.
635 635 """
636 636 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
637 637 assert commit_id in self.repo.commit_ids
638 638 commit = self.repo.get_commit(commit_id)
639 639 assert len(commit.nodes) == 0
640 640 root = commit.root
641 641 assert len(commit.nodes) == 1
642 642 assert len(root.nodes) == 8
643 643 # accessing root.nodes updates commit.nodes
644 644 assert len(commit.nodes) == 9
645 645
646 646 docs = root.get_node('docs')
647 647 # we haven't yet accessed anything new as docs dir was already cached
648 648 assert len(commit.nodes) == 9
649 649 assert len(docs.nodes) == 8
650 650 # accessing docs.nodes updates commit.nodes
651 651 assert len(commit.nodes) == 17
652 652
653 653 assert docs is commit.get_node('docs')
654 654 assert docs is root.nodes[0]
655 655 assert docs is root.dirs[0]
656 656 assert docs is commit.get_node('docs')
657 657
658 658 def test_nodes_with_commit(self):
659 659 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
660 660 commit = self.repo.get_commit(commit_id)
661 661 root = commit.root
662 662 docs = root.get_node('docs')
663 663 assert docs is commit.get_node('docs')
664 664 api = docs.get_node('api')
665 665 assert api is commit.get_node('docs/api')
666 666 index = api.get_node('index.rst')
667 667 assert index is commit.get_node('docs/api/index.rst')
668 668 assert index is commit.get_node('docs')\
669 669 .get_node('api')\
670 670 .get_node('index.rst')
671 671
672 672 def test_branch_and_tags(self):
673 673 """
674 674 rev0 = self.repo.commit_ids[0]
675 675 commit0 = self.repo.get_commit(rev0)
676 676 assert commit0.branch == 'master'
677 677 assert commit0.tags == []
678 678
679 679 rev10 = self.repo.commit_ids[10]
680 680 commit10 = self.repo.get_commit(rev10)
681 681 assert commit10.branch == 'master'
682 682 assert commit10.tags == []
683 683
684 684 rev44 = self.repo.commit_ids[44]
685 685 commit44 = self.repo.get_commit(rev44)
686 686 assert commit44.branch == 'web-branch'
687 687
688 688 tip = self.repo.get_commit('tip')
689 689 assert 'tip' in tip.tags
690 690 """
691 691 # Those tests would fail - branches are now going
692 692 # to be changed at main API in order to support git backend
693 693 pass
694 694
695 695 def test_file_size(self):
696 696 to_check = (
697 697 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
698 698 'vcs/backends/BaseRepository.py', 502),
699 699 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
700 700 'vcs/backends/hg.py', 854),
701 701 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
702 702 'setup.py', 1068),
703 703
704 704 ('d955cd312c17b02143c04fa1099a352b04368118',
705 705 'vcs/backends/base.py', 2921),
706 706 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
707 707 'vcs/backends/base.py', 3936),
708 708 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
709 709 'vcs/backends/base.py', 6189),
710 710 )
711 711 for commit_id, path, size in to_check:
712 712 node = self.repo.get_commit(commit_id).get_node(path)
713 713 assert node.is_file()
714 714 assert node.size == size
715 715
716 716 def test_file_history_from_commits(self):
717 717 node = self.repo[10].get_node('setup.py')
718 718 commit_ids = [commit.raw_id for commit in node.history]
719 719 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
720 720
721 721 node = self.repo[20].get_node('setup.py')
722 722 node_ids = [commit.raw_id for commit in node.history]
723 723 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
724 724 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
725 725
726 726 # special case we check history from commit that has this particular
727 727 # file changed this means we check if it's included as well
728 728 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
729 729 .get_node('setup.py')
730 730 node_ids = [commit.raw_id for commit in node.history]
731 731 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
732 732 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
733 733
734 734 def test_file_history(self):
735 735 # we can only check if those commits are present in the history
736 736 # as we cannot update this test every time file is changed
737 737 files = {
738 738 'setup.py': [
739 739 '54386793436c938cff89326944d4c2702340037d',
740 740 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
741 741 '998ed409c795fec2012b1c0ca054d99888b22090',
742 742 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
743 743 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
744 744 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
745 745 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
746 746 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
747 747 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
748 748 ],
749 749 'vcs/nodes.py': [
750 750 '33fa3223355104431402a888fa77a4e9956feb3e',
751 751 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
752 752 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
753 753 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
754 754 'c877b68d18e792a66b7f4c529ea02c8f80801542',
755 755 '4313566d2e417cb382948f8d9d7c765330356054',
756 756 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
757 757 '54386793436c938cff89326944d4c2702340037d',
758 758 '54000345d2e78b03a99d561399e8e548de3f3203',
759 759 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
760 760 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
761 761 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
762 762 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
763 763 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
764 764 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
765 765 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
766 766 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
767 767 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
768 768 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
769 769 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
770 770 'f15c21f97864b4f071cddfbf2750ec2e23859414',
771 771 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
772 772 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
773 773 '84dec09632a4458f79f50ddbbd155506c460b4f9',
774 774 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
775 775 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
776 776 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
777 777 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
778 778 '6970b057cffe4aab0a792aa634c89f4bebf01441',
779 779 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
780 780 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
781 781 ],
782 782 'vcs/backends/git.py': [
783 783 '4cf116ad5a457530381135e2f4c453e68a1b0105',
784 784 '9a751d84d8e9408e736329767387f41b36935153',
785 785 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
786 786 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
787 787 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
788 788 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
789 789 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
790 790 '54000345d2e78b03a99d561399e8e548de3f3203',
791 791 ],
792 792 }
793 793 for path, commit_ids in files.items():
794 794 node = self.repo.get_commit(commit_ids[0]).get_node(path)
795 795 node_ids = [commit.raw_id for commit in node.history]
796 796 assert set(commit_ids).issubset(set(node_ids)), (
797 797 "We assumed that %s is subset of commit_ids for which file %s "
798 798 "has been changed, and history of that node returned: %s"
799 799 % (commit_ids, path, node_ids))
800 800
801 801 def test_file_annotate(self):
802 802 files = {
803 803 'vcs/backends/__init__.py': {
804 804 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
805 805 'lines_no': 1,
806 806 'commits': [
807 807 'c1214f7e79e02fc37156ff215cd71275450cffc3',
808 808 ],
809 809 },
810 810 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
811 811 'lines_no': 21,
812 812 'commits': [
813 813 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
814 814 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
815 815 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
816 816 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
817 817 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
818 818 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
819 819 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
820 820 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
821 821 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
822 822 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
823 823 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
824 824 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
825 825 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
826 826 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
827 827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
828 828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
829 829 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
830 830 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 834 ],
835 835 },
836 836 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
837 837 'lines_no': 32,
838 838 'commits': [
839 839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 841 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
842 842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 844 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
845 845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 846 '54000345d2e78b03a99d561399e8e548de3f3203',
847 847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 849 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
850 850 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
851 851 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
852 852 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
853 853 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
854 854 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
855 855 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
856 856 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
857 857 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
858 858 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
859 859 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
860 860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
864 864 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
865 865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 866 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 868 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
869 869 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
870 870 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
871 871 ],
872 872 },
873 873 },
874 874 }
875 875
876 876 for fname, commit_dict in files.items():
877 877 for commit_id, __ in commit_dict.items():
878 878 commit = self.repo.get_commit(commit_id)
879 879
880 880 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
881 881 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
882 882 assert l1_1 == l1_2
883 883 l1 = l1_1
884 884 l2 = files[fname][commit_id]['commits']
885 885 assert l1 == l2, (
886 886 "The lists of commit_ids for %s@commit_id %s"
887 887 "from annotation list should match each other, "
888 888 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
889 889
890 890 def test_files_state(self):
891 891 """
892 892 Tests state of FileNodes.
893 893 """
894 894 node = self.repo\
895 895 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
896 896 .get_node('vcs/utils/diffs.py')
897 897 assert node.state, NodeState.ADDED
898 898 assert node.added
899 899 assert not node.changed
900 900 assert not node.not_changed
901 901 assert not node.removed
902 902
903 903 node = self.repo\
904 904 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
905 905 .get_node('.hgignore')
906 906 assert node.state, NodeState.CHANGED
907 907 assert not node.added
908 908 assert node.changed
909 909 assert not node.not_changed
910 910 assert not node.removed
911 911
912 912 node = self.repo\
913 913 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
914 914 .get_node('setup.py')
915 915 assert node.state, NodeState.NOT_CHANGED
916 916 assert not node.added
917 917 assert not node.changed
918 918 assert node.not_changed
919 919 assert not node.removed
920 920
921 921 # If node has REMOVED state then trying to fetch it would raise
922 922 # CommitError exception
923 923 commit = self.repo.get_commit(
924 924 'fa6600f6848800641328adbf7811fd2372c02ab2')
925 925 path = 'vcs/backends/BaseRepository.py'
926 926 with pytest.raises(NodeDoesNotExistError):
927 927 commit.get_node(path)
928 928 # but it would be one of ``removed`` (commit's attribute)
929 929 assert path in [rf.path for rf in commit.removed]
930 930
931 931 commit = self.repo.get_commit(
932 932 '54386793436c938cff89326944d4c2702340037d')
933 933 changed = [
934 934 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
935 935 'vcs/nodes.py']
936 936 assert set(changed) == set([f.path for f in commit.changed])
937 937
938 938 def test_unicode_branch_refs(self):
939 939 unicode_branches = {
940 940 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
941 941 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
942 942 }
943 943 with mock.patch(
944 944 ("rhodecode.lib.vcs.backends.git.repository"
945 945 ".GitRepository._refs"),
946 946 unicode_branches):
947 947 branches = self.repo.branches
948 948
949 949 assert 'unicode' in branches
950 950 assert u'uniΓ§ΓΆβˆ‚e' in branches
951 951
952 952 def test_unicode_tag_refs(self):
953 953 unicode_tags = {
954 954 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
955 955 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
956 956 }
957 957 with mock.patch(
958 958 ("rhodecode.lib.vcs.backends.git.repository"
959 959 ".GitRepository._refs"),
960 960 unicode_tags):
961 961 tags = self.repo.tags
962 962
963 963 assert 'unicode' in tags
964 964 assert u'uniΓ§ΓΆβˆ‚e' in tags
965 965
966 966 def test_commit_message_is_unicode(self):
967 967 for commit in self.repo:
968 968 assert type(commit.message) == unicode
969 969
970 970 def test_commit_author_is_unicode(self):
971 971 for commit in self.repo:
972 972 assert type(commit.author) == unicode
973 973
974 974 def test_repo_files_content_is_unicode(self):
975 975 commit = self.repo.get_commit()
976 976 for node in commit.get_node('/'):
977 977 if node.is_file():
978 978 assert type(node.content) == unicode
979 979
980 980 def test_wrong_path(self):
981 981 # There is 'setup.py' in the root dir but not there:
982 982 path = 'foo/bar/setup.py'
983 983 tip = self.repo.get_commit()
984 984 with pytest.raises(VCSError):
985 985 tip.get_node(path)
986 986
987 987 @pytest.mark.parametrize("author_email, commit_id", [
988 988 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
989 989 ('lukasz.balcerzak@python-center.pl',
990 990 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
991 991 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
992 992 ])
993 993 def test_author_email(self, author_email, commit_id):
994 994 commit = self.repo.get_commit(commit_id)
995 995 assert author_email == commit.author_email
996 996
997 997 @pytest.mark.parametrize("author, commit_id", [
998 998 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
999 999 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1000 1000 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1001 1001 ])
1002 1002 def test_author_username(self, author, commit_id):
1003 1003 commit = self.repo.get_commit(commit_id)
1004 1004 assert author == commit.author_name
1005 1005
1006 1006
1007 1007 class TestLargeFileRepo(object):
1008 1008
1009 1009 def test_large_file(self, backend_git):
1010 1010 conf = make_db_config()
1011 1011 repo = backend_git.create_test_repo('largefiles', conf)
1012 1012
1013 1013 tip = repo.scm_instance().get_commit()
1014 1014
1015 1015 # extract stored LF node into the origin cache
1016 1016 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1017 1017
1018 1018 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1019 1019 oid_path = os.path.join(lfs_store, oid)
1020 1020 oid_destination = os.path.join(
1021 1021 conf.get('vcs_git_lfs', 'store_location'), oid)
1022 1022 shutil.copy(oid_path, oid_destination)
1023 1023
1024 1024 node = tip.get_node('1MB.zip')
1025 1025
1026 1026 lf_node = node.get_largefile_node()
1027 1027
1028 1028 assert lf_node.is_largefile() is True
1029 1029 assert lf_node.size == 1024000
1030 1030 assert lf_node.name == '1MB.zip'
1031 1031
1032 1032
1033 1033 @pytest.mark.usefixtures("vcs_repository_support")
1034 1034 class TestGitSpecificWithRepo(BackendTestMixin):
1035 1035
1036 1036 @classmethod
1037 1037 def _get_commits(cls):
1038 1038 return [
1039 1039 {
1040 1040 'message': 'Initial',
1041 1041 'author': 'Joe Doe <joe.doe@example.com>',
1042 1042 'date': datetime.datetime(2010, 1, 1, 20),
1043 1043 'added': [
1044 1044 FileNode('foobar/static/js/admin/base.js', content='base'),
1045 1045 FileNode(
1046 1046 'foobar/static/admin', content='admin',
1047 1047 mode=0o120000), # this is a link
1048 1048 FileNode('foo', content='foo'),
1049 1049 ],
1050 1050 },
1051 1051 {
1052 1052 'message': 'Second',
1053 1053 'author': 'Joe Doe <joe.doe@example.com>',
1054 1054 'date': datetime.datetime(2010, 1, 1, 22),
1055 1055 'added': [
1056 1056 FileNode('foo2', content='foo2'),
1057 1057 ],
1058 1058 },
1059 1059 ]
1060 1060
1061 1061 def test_paths_slow_traversing(self):
1062 1062 commit = self.repo.get_commit()
1063 1063 assert commit.get_node('foobar').get_node('static').get_node('js')\
1064 1064 .get_node('admin').get_node('base.js').content == 'base'
1065 1065
1066 1066 def test_paths_fast_traversing(self):
1067 1067 commit = self.repo.get_commit()
1068 1068 assert commit.get_node('foobar/static/js/admin/base.js').content == 'base'
1069 1069
1070 1070 def test_get_diff_runs_git_command_with_hashes(self):
1071 1071 comm1 = self.repo[0]
1072 1072 comm2 = self.repo[1]
1073 1073
1074 1074 with mock.patch.object(self.repo, '_remote') as remote_mock:
1075 1075 self.repo.get_diff(comm1, comm2)
1076 1076
1077 1077 remote_mock.diff.assert_called_once_with(
1078 1078 comm1.raw_id, comm2.raw_id,
1079 1079 file_filter=None, opt_ignorews=False, context=3)
1080 1080
1081 1081 def test_get_diff_runs_git_command_with_str_hashes(self):
1082 1082 comm2 = self.repo[1]
1083 1083 with mock.patch.object(self.repo, '_remote') as remote_mock:
1084 1084 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1085 1085 remote_mock.diff.assert_called_once_with(
1086 1086 self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id,
1087 1087 file_filter=None, opt_ignorews=False, context=3)
1088 1088
1089 1089 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1090 1090 comm1 = self.repo[0]
1091 1091 comm2 = self.repo[1]
1092 1092 with mock.patch.object(self.repo, '_remote') as remote_mock:
1093 1093 self.repo.get_diff(comm1, comm2, 'foo')
1094 1094 remote_mock.diff.assert_called_once_with(
1095 1095 self.repo._lookup_commit(0), comm2.raw_id,
1096 1096 file_filter='foo', opt_ignorews=False, context=3)
1097 1097
1098 1098
1099 1099 @pytest.mark.usefixtures("vcs_repository_support")
1100 1100 class TestGitRegression(BackendTestMixin):
1101 1101
1102 1102 @classmethod
1103 1103 def _get_commits(cls):
1104 1104 return [
1105 1105 {
1106 1106 'message': 'Initial',
1107 1107 'author': 'Joe Doe <joe.doe@example.com>',
1108 1108 'date': datetime.datetime(2010, 1, 1, 20),
1109 1109 'added': [
1110 1110 FileNode('bot/__init__.py', content='base'),
1111 1111 FileNode('bot/templates/404.html', content='base'),
1112 1112 FileNode('bot/templates/500.html', content='base'),
1113 1113 ],
1114 1114 },
1115 1115 {
1116 1116 'message': 'Second',
1117 1117 'author': 'Joe Doe <joe.doe@example.com>',
1118 1118 'date': datetime.datetime(2010, 1, 1, 22),
1119 1119 'added': [
1120 1120 FileNode('bot/build/migrations/1.py', content='foo2'),
1121 1121 FileNode('bot/build/migrations/2.py', content='foo2'),
1122 1122 FileNode(
1123 1123 'bot/build/static/templates/f.html', content='foo2'),
1124 1124 FileNode(
1125 1125 'bot/build/static/templates/f1.html', content='foo2'),
1126 1126 FileNode('bot/build/templates/err.html', content='foo2'),
1127 1127 FileNode('bot/build/templates/err2.html', content='foo2'),
1128 1128 ],
1129 1129 },
1130 1130 ]
1131 1131
1132 1132 @pytest.mark.parametrize("path, expected_paths", [
1133 1133 ('bot', [
1134 1134 'bot/build',
1135 1135 'bot/templates',
1136 1136 'bot/__init__.py']),
1137 1137 ('bot/build', [
1138 1138 'bot/build/migrations',
1139 1139 'bot/build/static',
1140 1140 'bot/build/templates']),
1141 1141 ('bot/build/static', [
1142 1142 'bot/build/static/templates']),
1143 1143 ('bot/build/static/templates', [
1144 1144 'bot/build/static/templates/f.html',
1145 1145 'bot/build/static/templates/f1.html']),
1146 1146 ('bot/build/templates', [
1147 1147 'bot/build/templates/err.html',
1148 1148 'bot/build/templates/err2.html']),
1149 1149 ('bot/templates/', [
1150 1150 'bot/templates/404.html',
1151 1151 'bot/templates/500.html']),
1152 1152 ])
1153 1153 def test_similar_paths(self, path, expected_paths):
1154 1154 commit = self.repo.get_commit()
1155 1155 paths = [n.path for n in commit.get_nodes(path)]
1156 1156 assert paths == expected_paths
1157 1157
1158 1158
1159 1159 class TestDiscoverGitVersion(object):
1160 1160
1161 1161 def test_returns_git_version(self, baseapp):
1162 1162 version = discover_git_version()
1163 1163 assert version
1164 1164
1165 1165 def test_returns_empty_string_without_vcsserver(self):
1166 1166 mock_connection = mock.Mock()
1167 1167 mock_connection.discover_git_version = mock.Mock(
1168 1168 side_effect=Exception)
1169 1169 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1170 1170 version = discover_git_version()
1171 1171 assert version == ''
1172 1172
1173 1173
1174 1174 class TestGetSubmoduleUrl(object):
1175 1175 def test_submodules_file_found(self):
1176 1176 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1177 1177 node = mock.Mock()
1178 1178 with mock.patch.object(
1179 1179 commit, 'get_node', return_value=node) as get_node_mock:
1180 1180 node.content = (
1181 1181 '[submodule "subrepo1"]\n'
1182 1182 '\tpath = subrepo1\n'
1183 1183 '\turl = https://code.rhodecode.com/dulwich\n'
1184 1184 )
1185 1185 result = commit._get_submodule_url('subrepo1')
1186 1186 get_node_mock.assert_called_once_with('.gitmodules')
1187 1187 assert result == 'https://code.rhodecode.com/dulwich'
1188 1188
1189 1189 def test_complex_submodule_path(self):
1190 1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1191 1191 node = mock.Mock()
1192 1192 with mock.patch.object(
1193 1193 commit, 'get_node', return_value=node) as get_node_mock:
1194 1194 node.content = (
1195 1195 '[submodule "complex/subrepo/path"]\n'
1196 1196 '\tpath = complex/subrepo/path\n'
1197 1197 '\turl = https://code.rhodecode.com/dulwich\n'
1198 1198 )
1199 1199 result = commit._get_submodule_url('complex/subrepo/path')
1200 1200 get_node_mock.assert_called_once_with('.gitmodules')
1201 1201 assert result == 'https://code.rhodecode.com/dulwich'
1202 1202
1203 1203 def test_submodules_file_not_found(self):
1204 1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1205 1205 with mock.patch.object(
1206 1206 commit, 'get_node', side_effect=NodeDoesNotExistError):
1207 1207 result = commit._get_submodule_url('complex/subrepo/path')
1208 1208 assert result is None
1209 1209
1210 1210 def test_path_not_found(self):
1211 1211 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1212 1212 node = mock.Mock()
1213 1213 with mock.patch.object(
1214 1214 commit, 'get_node', return_value=node) as get_node_mock:
1215 1215 node.content = (
1216 1216 '[submodule "subrepo1"]\n'
1217 1217 '\tpath = subrepo1\n'
1218 1218 '\turl = https://code.rhodecode.com/dulwich\n'
1219 1219 )
1220 1220 result = commit._get_submodule_url('subrepo2')
1221 1221 get_node_mock.assert_called_once_with('.gitmodules')
1222 1222 assert result is None
1223 1223
1224 1224 def test_returns_cached_values(self):
1225 1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1226 1226 node = mock.Mock()
1227 1227 with mock.patch.object(
1228 1228 commit, 'get_node', return_value=node) as get_node_mock:
1229 1229 node.content = (
1230 1230 '[submodule "subrepo1"]\n'
1231 1231 '\tpath = subrepo1\n'
1232 1232 '\turl = https://code.rhodecode.com/dulwich\n'
1233 1233 )
1234 1234 for _ in range(3):
1235 1235 commit._get_submodule_url('subrepo1')
1236 1236 get_node_mock.assert_called_once_with('.gitmodules')
1237 1237
1238 1238 def test_get_node_returns_a_link(self):
1239 1239 repository = mock.Mock()
1240 1240 repository.alias = 'git'
1241 1241 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1242 1242 submodule_url = 'https://code.rhodecode.com/dulwich'
1243 1243 get_id_patch = mock.patch.object(
1244 1244 commit, '_get_tree_id_for_path', return_value=(1, 'link'))
1245 1245 get_submodule_patch = mock.patch.object(
1246 1246 commit, '_get_submodule_url', return_value=submodule_url)
1247 1247
1248 1248 with get_id_patch, get_submodule_patch as submodule_mock:
1249 1249 node = commit.get_node('/abcde')
1250 1250
1251 1251 submodule_mock.assert_called_once_with('/abcde')
1252 1252 assert type(node) == SubModuleNode
1253 1253 assert node.url == submodule_url
1254 1254
1255 1255 def test_get_nodes_returns_links(self):
1256 1256 repository = mock.MagicMock()
1257 1257 repository.alias = 'git'
1258 1258 repository._remote.tree_items.return_value = [
1259 1259 ('subrepo', 'stat', 1, 'link')
1260 1260 ]
1261 1261 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1262 1262 submodule_url = 'https://code.rhodecode.com/dulwich'
1263 1263 get_id_patch = mock.patch.object(
1264 1264 commit, '_get_tree_id_for_path', return_value=(1, 'tree'))
1265 1265 get_submodule_patch = mock.patch.object(
1266 1266 commit, '_get_submodule_url', return_value=submodule_url)
1267 1267
1268 1268 with get_id_patch, get_submodule_patch as submodule_mock:
1269 1269 nodes = commit.get_nodes('/abcde')
1270 1270
1271 1271 submodule_mock.assert_called_once_with('/abcde/subrepo')
1272 1272 assert len(nodes) == 1
1273 1273 assert type(nodes[0]) == SubModuleNode
1274 1274 assert nodes[0].url == submodule_url
General Comments 0
You need to be logged in to leave comments. Login now