##// END OF EJS Templates
merged stable into default
super-admin -
r947:4c8695a6 merge default
parent child Browse files
Show More
@@ -0,0 +1,16 b''
1 [DEFAULT]
2 done = false
3
4 [task:bump_version]
5 done = true
6
7 [task:fixes_on_stable]
8 done = true
9
10 [task:pip2nix_generated]
11 done = true
12
13 [release]
14 state = prepared
15 version = 4.25.2
16
@@ -0,0 +1,390 b''
1 import sys
2 import threading
3 import weakref
4 from base64 import b64encode
5 from logging import getLogger
6 from os import urandom
7
8 from redis import StrictRedis
9
10 __version__ = '3.7.0'
11
12 loggers = {
13 k: getLogger("vcsserver." + ".".join((__name__, k)))
14 for k in [
15 "acquire",
16 "refresh.thread.start",
17 "refresh.thread.stop",
18 "refresh.thread.exit",
19 "refresh.start",
20 "refresh.shutdown",
21 "refresh.exit",
22 "release",
23 ]
24 }
25
26 PY3 = sys.version_info[0] == 3
27
28 if PY3:
29 text_type = str
30 binary_type = bytes
31 else:
32 text_type = unicode # noqa
33 binary_type = str
34
35
36 # Check if the id match. If not, return an error code.
37 UNLOCK_SCRIPT = b"""
38 if redis.call("get", KEYS[1]) ~= ARGV[1] then
39 return 1
40 else
41 redis.call("del", KEYS[2])
42 redis.call("lpush", KEYS[2], 1)
43 redis.call("pexpire", KEYS[2], ARGV[2])
44 redis.call("del", KEYS[1])
45 return 0
46 end
47 """
48
49 # Covers both cases when key doesn't exist and doesn't equal to lock's id
50 EXTEND_SCRIPT = b"""
51 if redis.call("get", KEYS[1]) ~= ARGV[1] then
52 return 1
53 elseif redis.call("ttl", KEYS[1]) < 0 then
54 return 2
55 else
56 redis.call("expire", KEYS[1], ARGV[2])
57 return 0
58 end
59 """
60
61 RESET_SCRIPT = b"""
62 redis.call('del', KEYS[2])
63 redis.call('lpush', KEYS[2], 1)
64 redis.call('pexpire', KEYS[2], ARGV[2])
65 return redis.call('del', KEYS[1])
66 """
67
68 RESET_ALL_SCRIPT = b"""
69 local locks = redis.call('keys', 'lock:*')
70 local signal
71 for _, lock in pairs(locks) do
72 signal = 'lock-signal:' .. string.sub(lock, 6)
73 redis.call('del', signal)
74 redis.call('lpush', signal, 1)
75 redis.call('expire', signal, 1)
76 redis.call('del', lock)
77 end
78 return #locks
79 """
80
81
82 class AlreadyAcquired(RuntimeError):
83 pass
84
85
86 class NotAcquired(RuntimeError):
87 pass
88
89
90 class AlreadyStarted(RuntimeError):
91 pass
92
93
94 class TimeoutNotUsable(RuntimeError):
95 pass
96
97
98 class InvalidTimeout(RuntimeError):
99 pass
100
101
102 class TimeoutTooLarge(RuntimeError):
103 pass
104
105
106 class NotExpirable(RuntimeError):
107 pass
108
109
110 class Lock(object):
111 """
112 A Lock context manager implemented via redis SETNX/BLPOP.
113 """
114 unlock_script = None
115 extend_script = None
116 reset_script = None
117 reset_all_script = None
118
119 def __init__(self, redis_client, name, expire=None, id=None, auto_renewal=False, strict=True, signal_expire=1000):
120 """
121 :param redis_client:
122 An instance of :class:`~StrictRedis`.
123 :param name:
124 The name (redis key) the lock should have.
125 :param expire:
126 The lock expiry time in seconds. If left at the default (None)
127 the lock will not expire.
128 :param id:
129 The ID (redis value) the lock should have. A random value is
130 generated when left at the default.
131
132 Note that if you specify this then the lock is marked as "held". Acquires
133 won't be possible.
134 :param auto_renewal:
135 If set to ``True``, Lock will automatically renew the lock so that it
136 doesn't expire for as long as the lock is held (acquire() called
137 or running in a context manager).
138
139 Implementation note: Renewal will happen using a daemon thread with
140 an interval of ``expire*2/3``. If wishing to use a different renewal
141 time, subclass Lock, call ``super().__init__()`` then set
142 ``self._lock_renewal_interval`` to your desired interval.
143 :param strict:
144 If set ``True`` then the ``redis_client`` needs to be an instance of ``redis.StrictRedis``.
145 :param signal_expire:
146 Advanced option to override signal list expiration in milliseconds. Increase it for very slow clients. Default: ``1000``.
147 """
148 if strict and not isinstance(redis_client, StrictRedis):
149 raise ValueError("redis_client must be instance of StrictRedis. "
150 "Use strict=False if you know what you're doing.")
151 if auto_renewal and expire is None:
152 raise ValueError("Expire may not be None when auto_renewal is set")
153
154 self._client = redis_client
155
156 if expire:
157 expire = int(expire)
158 if expire < 0:
159 raise ValueError("A negative expire is not acceptable.")
160 else:
161 expire = None
162 self._expire = expire
163
164 self._signal_expire = signal_expire
165 if id is None:
166 self._id = b64encode(urandom(18)).decode('ascii')
167 elif isinstance(id, binary_type):
168 try:
169 self._id = id.decode('ascii')
170 except UnicodeDecodeError:
171 self._id = b64encode(id).decode('ascii')
172 elif isinstance(id, text_type):
173 self._id = id
174 else:
175 raise TypeError("Incorrect type for `id`. Must be bytes/str not %s." % type(id))
176 self._name = 'lock:' + name
177 self._signal = 'lock-signal:' + name
178 self._lock_renewal_interval = (float(expire) * 2 / 3
179 if auto_renewal
180 else None)
181 self._lock_renewal_thread = None
182
183 self.register_scripts(redis_client)
184
185 @classmethod
186 def register_scripts(cls, redis_client):
187 global reset_all_script
188 if reset_all_script is None:
189 reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
190 cls.unlock_script = redis_client.register_script(UNLOCK_SCRIPT)
191 cls.extend_script = redis_client.register_script(EXTEND_SCRIPT)
192 cls.reset_script = redis_client.register_script(RESET_SCRIPT)
193 cls.reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
194
195 @property
196 def _held(self):
197 return self.id == self.get_owner_id()
198
199 def reset(self):
200 """
201 Forcibly deletes the lock. Use this with care.
202 """
203 self.reset_script(client=self._client, keys=(self._name, self._signal), args=(self.id, self._signal_expire))
204
205 @property
206 def id(self):
207 return self._id
208
209 def get_owner_id(self):
210 owner_id = self._client.get(self._name)
211 if isinstance(owner_id, binary_type):
212 owner_id = owner_id.decode('ascii', 'replace')
213 return owner_id
214
215 def acquire(self, blocking=True, timeout=None):
216 """
217 :param blocking:
218 Boolean value specifying whether lock should be blocking or not.
219 :param timeout:
220 An integer value specifying the maximum number of seconds to block.
221 """
222 logger = loggers["acquire"]
223
224 logger.debug("Getting acquire on %r ...", self._name)
225
226 if self._held:
227 owner_id = self.get_owner_id()
228 raise AlreadyAcquired("Already acquired from this Lock instance. Lock id: {}".format(owner_id))
229
230 if not blocking and timeout is not None:
231 raise TimeoutNotUsable("Timeout cannot be used if blocking=False")
232
233 if timeout:
234 timeout = int(timeout)
235 if timeout < 0:
236 raise InvalidTimeout("Timeout (%d) cannot be less than or equal to 0" % timeout)
237
238 if self._expire and not self._lock_renewal_interval and timeout > self._expire:
239 raise TimeoutTooLarge("Timeout (%d) cannot be greater than expire (%d)" % (timeout, self._expire))
240
241 busy = True
242 blpop_timeout = timeout or self._expire or 0
243 timed_out = False
244 while busy:
245 busy = not self._client.set(self._name, self._id, nx=True, ex=self._expire)
246 if busy:
247 if timed_out:
248 return False
249 elif blocking:
250 timed_out = not self._client.blpop(self._signal, blpop_timeout) and timeout
251 else:
252 logger.warning("Failed to get %r.", self._name)
253 return False
254
255 logger.info("Got lock for %r.", self._name)
256 if self._lock_renewal_interval is not None:
257 self._start_lock_renewer()
258 return True
259
260 def extend(self, expire=None):
261 """Extends expiration time of the lock.
262
263 :param expire:
264 New expiration time. If ``None`` - `expire` provided during
265 lock initialization will be taken.
266 """
267 if expire:
268 expire = int(expire)
269 if expire < 0:
270 raise ValueError("A negative expire is not acceptable.")
271 elif self._expire is not None:
272 expire = self._expire
273 else:
274 raise TypeError(
275 "To extend a lock 'expire' must be provided as an "
276 "argument to extend() method or at initialization time."
277 )
278
279 error = self.extend_script(client=self._client, keys=(self._name, self._signal), args=(self._id, expire))
280 if error == 1:
281 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
282 elif error == 2:
283 raise NotExpirable("Lock %s has no assigned expiration time" % self._name)
284 elif error:
285 raise RuntimeError("Unsupported error code %s from EXTEND script" % error)
286
287 @staticmethod
288 def _lock_renewer(lockref, interval, stop):
289 """
290 Renew the lock key in redis every `interval` seconds for as long
291 as `self._lock_renewal_thread.should_exit` is False.
292 """
293 while not stop.wait(timeout=interval):
294 loggers["refresh.thread.start"].debug("Refreshing lock")
295 lock = lockref()
296 if lock is None:
297 loggers["refresh.thread.stop"].debug(
298 "The lock no longer exists, stopping lock refreshing"
299 )
300 break
301 lock.extend(expire=lock._expire)
302 del lock
303 loggers["refresh.thread.exit"].debug("Exit requested, stopping lock refreshing")
304
305 def _start_lock_renewer(self):
306 """
307 Starts the lock refresher thread.
308 """
309 if self._lock_renewal_thread is not None:
310 raise AlreadyStarted("Lock refresh thread already started")
311
312 loggers["refresh.start"].debug(
313 "Starting thread to refresh lock every %s seconds",
314 self._lock_renewal_interval
315 )
316 self._lock_renewal_stop = threading.Event()
317 self._lock_renewal_thread = threading.Thread(
318 group=None,
319 target=self._lock_renewer,
320 kwargs={'lockref': weakref.ref(self),
321 'interval': self._lock_renewal_interval,
322 'stop': self._lock_renewal_stop}
323 )
324 self._lock_renewal_thread.setDaemon(True)
325 self._lock_renewal_thread.start()
326
327 def _stop_lock_renewer(self):
328 """
329 Stop the lock renewer.
330
331 This signals the renewal thread and waits for its exit.
332 """
333 if self._lock_renewal_thread is None or not self._lock_renewal_thread.is_alive():
334 return
335 loggers["refresh.shutdown"].debug("Signalling the lock refresher to stop")
336 self._lock_renewal_stop.set()
337 self._lock_renewal_thread.join()
338 self._lock_renewal_thread = None
339 loggers["refresh.exit"].debug("Lock refresher has stopped")
340
341 def __enter__(self):
342 acquired = self.acquire(blocking=True)
343 assert acquired, "Lock wasn't acquired, but blocking=True"
344 return self
345
346 def __exit__(self, exc_type=None, exc_value=None, traceback=None):
347 self.release()
348
349 def release(self):
350 """Releases the lock, that was acquired with the same object.
351
352 .. note::
353
354 If you want to release a lock that you acquired in a different place you have two choices:
355
356 * Use ``Lock("name", id=id_from_other_place).release()``
357 * Use ``Lock("name").reset()``
358 """
359 if self._lock_renewal_thread is not None:
360 self._stop_lock_renewer()
361 loggers["release"].debug("Releasing %r.", self._name)
362 error = self.unlock_script(client=self._client, keys=(self._name, self._signal), args=(self._id, self._signal_expire))
363 if error == 1:
364 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
365 elif error:
366 raise RuntimeError("Unsupported error code %s from EXTEND script." % error)
367
368 def locked(self):
369 """
370 Return true if the lock is acquired.
371
372 Checks that lock with same name already exists. This method returns true, even if
373 lock have another id.
374 """
375 return self._client.exists(self._name) == 1
376
377
378 reset_all_script = None
379
380
381 def reset_all(redis_client):
382 """
383 Forcibly deletes all locks if its remains (like a crash reason). Use this with care.
384
385 :param redis_client:
386 An instance of :class:`~StrictRedis`.
387 """
388 Lock.register_scripts(redis_client)
389
390 reset_all_script(client=redis_client) # noqa
@@ -1,5 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.25.0
2 current_version = 4.25.2
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6
@@ -1,76 +1,79 b''
1 1 c6fad7d1e61f22b1f4a4863eff207a04c27e9462 v4.0.0
2 2 77b6e243b4cc5b702c15abd6d737798edbac60dc v4.0.1
3 3 a359c072337fdd8e1e71df72cc520b8a9b042f80 v4.1.0
4 4 49aa7ed030a36b7ceba149a21e587cb5d20b4946 v4.1.1
5 5 f38ed1e1a31dce3c170b4d31585ba43471cf0705 v4.1.2
6 6 21269ba7bafd8f0c77e79dd86a31eb9bce7643d2 v4.2.0
7 7 b53930c918c25b2c8f69ceddc6641e511be27fd3 v4.2.1
8 8 6627ff4119723d8b2b60918e8b1aa49e9f055aab v4.3.0
9 9 d38f2c2b861dde6c4178923f7cf15ea58b85aa92 v4.3.1
10 10 1232313f9e6adac5ce5399c2a891dc1e72b79022 v4.4.0
11 11 cbb9f1d329ae5768379cdec55a62ebdd546c4e27 v4.4.1
12 12 24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17 v4.4.2
13 13 beaeeaa440cd17471110d4621b8816506c0dff4a v4.5.0
14 14 668e5c656f61dd94595611844e1106d1361aa6a7 v4.5.1
15 15 ae0640240cb7a77e6dc8c77e64dd80d79732cb5b v4.5.2
16 16 7af06899f426813583fe60449d7517cc49c15b28 v4.6.0
17 17 8f7f4299bf341b43f94dadafa1ea73d6cea2c9ba v4.6.1
18 18 de00a831a0709ffaac57f948738ea927b97223a9 v4.7.0
19 19 57f527e0646d731768fb5e0fe742b12a35bdc63b v4.7.1
20 20 f9b09787da9845e4a105f4bffdc252099902cefb v4.7.2
21 21 0b7c790b726f08385e6ebdf4f257c905787b9244 v4.8.0
22 22 f4123e725b74d0e82fe89982ab8791a66062e2b3 v4.9.0
23 23 940bac044a0fe1ec839759df81399b50141be720 v4.9.1
24 24 582d9ebbe46bdddac4b26eacae36ee5ecabca267 v4.10.0
25 25 12fbd08d0ab57acce9c0bdccee75633cfa08d7f4 v4.10.1
26 26 78352f95021a9d128f5803fdbca7036daef5dabe v4.10.2
27 27 a47ccfb020cda78c8680e3844aaf0b82b1390f3b v4.10.3
28 28 347ae9ae544bba8deb417995285287a3b6be1611 v4.10.4
29 29 9b257ac49841f850434be0d518baca0827e6c8cc v4.10.5
30 30 e8bf26eea118694edc4ffe50c6c5aa91022bc434 v4.10.6
31 31 71fa9274ba59fb982104f0b9b3d0d024c78675f7 v4.11.0
32 32 92471577ef25636e5babe8001d47fc8e51521522 v4.11.1
33 33 0277edbcda5a8d075e1e41a95bcee6dcf21f3f77 v4.11.2
34 34 6c5ecbf0778ef870e5b23d9fad5340135b563356 v4.11.3
35 35 be788a89a939ebd63606220064bd624fa9d5c9c9 v4.11.4
36 36 15c90a04098a373ac761fab07695fd80dde3bcdb v4.11.5
37 37 77aff155b3251cc00394a49f5e8f2c99e33149a7 v4.11.6
38 38 c218a1ce5d370c2e671d42a91684b3fc2c91b81d v4.12.0
39 39 80085fb846cc948195a5c76b579ca34cbc49b59b v4.12.1
40 40 346f04fc8a18df3235defbe6e71bd552c0d46481 v4.12.2
41 41 764fdd752322f3e0c13ea00957f2d548bf4363a7 v4.12.3
42 42 b58038974a5cecbb9c100d32ad2e4c68582f1a78 v4.12.4
43 43 e1d42d92a0fec0c80b56c82f37bc7b5472613706 v4.13.0
44 44 c3ded3ff17e9bb2a47002a808984a7a946f58a1c v4.13.1
45 45 7ff81aa47b1b40cdef9dd5bcdd439f59c269db3d v4.13.2
46 46 628a08e6aaeff2c3f9e0e268e854f870e6778e53 v4.13.3
47 47 941d675f10cfa7d774815bfacfb37085751b7a0d v4.14.0
48 48 75e11d32c0be0a457198f07888e7ef650cfa6888 v4.14.1
49 49 6c6f49fda0191c4641dcd43aa0d4376b8b728d40 v4.15.0
50 50 184dea5e01c36e6474c83d3bb34719cdfec22b0d v4.15.1
51 51 a4dc3669345553582296b2ce1485229a6c6f0522 v4.15.2
52 52 d2a4a1a66f204668841da1cdccfa29083e1ef7a3 v4.16.0
53 53 744cf8f2c8f23051978fc293404bf475cc5a31f6 v4.16.1
54 54 e68aff93ce4ad11fea13420e914f7dfb05c39566 v4.16.2
55 55 647aeff9752dc1aa00796fa280d0d2ce2f511bc9 v4.17.0
56 56 5e0c2990e095bba1dc903cf0e6ef6ac035e0ccf9 v4.17.1
57 57 8a824544d95037d76d99b104b5d2363858101d53 v4.17.2
58 58 ccd806a2d9482f61bd7e8956a02a28eb24a1d46a v4.17.3
59 59 e533ca02ccc205189b7bad9f227a312212772022 v4.17.4
60 60 ba6a6dc9ecd7fd8b1dcd6eb0c4ee0210e897c426 v4.18.0
61 61 17bc818b41bcf6883b9ff0da31f01d8c2a5d0781 v4.18.1
62 62 1e9f12aa01f82c335abc9017efe94ce1c30b52ba v4.18.2
63 63 f4cc6b3c5680bdf4541d7d442fbb7086640fb547 v4.18.3
64 64 5dc0277e4f77bd4cc3042d99625bb5d3ba480c8c v4.19.0
65 65 3a815eeb1b1efa340dda9b81a8da3cf24a7d605b v4.19.1
66 66 8841da3680fba841e5a54ebccd8ca56c078f7553 v4.19.2
67 67 4b0dec7fd80b1ca38e5073e5e562a5a450f73669 v4.19.3
68 68 1485aa75ffe1b1ec48352dce7b7492d92f85e95f v4.20.0
69 69 5b740274011766ef2f73803cc196d081e1e7f1d4 v4.20.1
70 70 5a7835234e2c45e8fb8184c60f548a64b5842af8 v4.21.0
71 71 26af88343015f8b89d5a66f92bc7547c51fcf0df v4.22.0
72 72 cf54e5f700fe5dc50af1a1bdf5197c18cf52105f v4.23.0
73 73 179d989bcfe02c6227f9f6aa9236cbbe1c14c400 v4.23.1
74 74 383aee8b1652affaa26aefe336a89ee366b2b26d v4.23.2
75 75 bc1a8141cc51fc23c455ebc50c6609c810b46f8d v4.24.0
76 76 530a1c03caabc806ea1ef34605f8f67f18c70e55 v4.24.1
77 5908ae65cee1043982e1b26d7b618af5fcfebbb3 v4.25.0
78 cce8bcdf75090d5943a1e9706fe5212d7b5d1fa1 v4.25.1
79 8610c4bf846c63bbc95d3ddfb53fadaaa9c7aa42 v4.25.2
@@ -1,1103 +1,1103 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "atomicwrites" = super.buildPythonPackage {
8 8 name = "atomicwrites-1.3.0";
9 9 doCheck = false;
10 10 src = fetchurl {
11 11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
12 12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.mit ];
16 16 };
17 17 };
18 18 "attrs" = super.buildPythonPackage {
19 19 name = "attrs-19.3.0";
20 20 doCheck = false;
21 21 src = fetchurl {
22 22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
23 23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
24 24 };
25 25 meta = {
26 26 license = [ pkgs.lib.licenses.mit ];
27 27 };
28 28 };
29 29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 30 name = "backports.shutil-get-terminal-size-1.0.0";
31 31 doCheck = false;
32 32 src = fetchurl {
33 33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 35 };
36 36 meta = {
37 37 license = [ pkgs.lib.licenses.mit ];
38 38 };
39 39 };
40 40 "beautifulsoup4" = super.buildPythonPackage {
41 41 name = "beautifulsoup4-4.6.3";
42 42 doCheck = false;
43 43 src = fetchurl {
44 44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 46 };
47 47 meta = {
48 48 license = [ pkgs.lib.licenses.mit ];
49 49 };
50 50 };
51 51 "cffi" = super.buildPythonPackage {
52 52 name = "cffi-1.12.3";
53 53 doCheck = false;
54 54 propagatedBuildInputs = [
55 55 self."pycparser"
56 56 ];
57 57 src = fetchurl {
58 58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
59 59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.mit ];
63 63 };
64 64 };
65 65 "configobj" = super.buildPythonPackage {
66 66 name = "configobj-5.0.6";
67 67 doCheck = false;
68 68 propagatedBuildInputs = [
69 69 self."six"
70 70 ];
71 71 src = fetchurl {
72 72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
73 73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.bsdOriginal ];
77 77 };
78 78 };
79 79 "configparser" = super.buildPythonPackage {
80 80 name = "configparser-4.0.2";
81 81 doCheck = false;
82 82 src = fetchurl {
83 83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
84 84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
85 85 };
86 86 meta = {
87 87 license = [ pkgs.lib.licenses.mit ];
88 88 };
89 89 };
90 90 "contextlib2" = super.buildPythonPackage {
91 91 name = "contextlib2-0.6.0.post1";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
95 95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.psfl ];
99 99 };
100 100 };
101 101 "cov-core" = super.buildPythonPackage {
102 102 name = "cov-core-1.15.0";
103 103 doCheck = false;
104 104 propagatedBuildInputs = [
105 105 self."coverage"
106 106 ];
107 107 src = fetchurl {
108 108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
109 109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
110 110 };
111 111 meta = {
112 112 license = [ pkgs.lib.licenses.mit ];
113 113 };
114 114 };
115 115 "coverage" = super.buildPythonPackage {
116 116 name = "coverage-4.5.4";
117 117 doCheck = false;
118 118 src = fetchurl {
119 119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
120 120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
121 121 };
122 122 meta = {
123 123 license = [ pkgs.lib.licenses.asl20 ];
124 124 };
125 125 };
126 126 "decorator" = super.buildPythonPackage {
127 127 name = "decorator-4.1.2";
128 128 doCheck = false;
129 129 src = fetchurl {
130 130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
131 131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
132 132 };
133 133 meta = {
134 134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
135 135 };
136 136 };
137 137 "dogpile.cache" = super.buildPythonPackage {
138 138 name = "dogpile.cache-0.9.0";
139 139 doCheck = false;
140 140 propagatedBuildInputs = [
141 141 self."decorator"
142 142 ];
143 143 src = fetchurl {
144 144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
145 145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
146 146 };
147 147 meta = {
148 148 license = [ pkgs.lib.licenses.bsdOriginal ];
149 149 };
150 150 };
151 151 "dogpile.core" = super.buildPythonPackage {
152 152 name = "dogpile.core-0.4.1";
153 153 doCheck = false;
154 154 src = fetchurl {
155 155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
156 156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
157 157 };
158 158 meta = {
159 159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 160 };
161 161 };
162 162 "dulwich" = super.buildPythonPackage {
163 163 name = "dulwich-0.13.0";
164 164 doCheck = false;
165 165 src = fetchurl {
166 166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
167 167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
168 168 };
169 169 meta = {
170 170 license = [ pkgs.lib.licenses.gpl2Plus ];
171 171 };
172 172 };
173 173 "enum34" = super.buildPythonPackage {
174 174 name = "enum34-1.1.10";
175 175 doCheck = false;
176 176 src = fetchurl {
177 177 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
178 178 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
179 179 };
180 180 meta = {
181 181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 182 };
183 183 };
184 184 "funcsigs" = super.buildPythonPackage {
185 185 name = "funcsigs-1.0.2";
186 186 doCheck = false;
187 187 src = fetchurl {
188 188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
189 189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
190 190 };
191 191 meta = {
192 192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
193 193 };
194 194 };
195 195 "gevent" = super.buildPythonPackage {
196 196 name = "gevent-1.5.0";
197 197 doCheck = false;
198 198 propagatedBuildInputs = [
199 199 self."greenlet"
200 200 ];
201 201 src = fetchurl {
202 202 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
203 203 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
204 204 };
205 205 meta = {
206 206 license = [ pkgs.lib.licenses.mit ];
207 207 };
208 208 };
209 209 "gprof2dot" = super.buildPythonPackage {
210 210 name = "gprof2dot-2017.9.19";
211 211 doCheck = false;
212 212 src = fetchurl {
213 213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
214 214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
215 215 };
216 216 meta = {
217 217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
218 218 };
219 219 };
220 220 "greenlet" = super.buildPythonPackage {
221 221 name = "greenlet-0.4.15";
222 222 doCheck = false;
223 223 src = fetchurl {
224 224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
225 225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
226 226 };
227 227 meta = {
228 228 license = [ pkgs.lib.licenses.mit ];
229 229 };
230 230 };
231 231 "gunicorn" = super.buildPythonPackage {
232 232 name = "gunicorn-19.9.0";
233 233 doCheck = false;
234 234 src = fetchurl {
235 235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
236 236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
237 237 };
238 238 meta = {
239 239 license = [ pkgs.lib.licenses.mit ];
240 240 };
241 241 };
242 242 "hg-evolve" = super.buildPythonPackage {
243 243 name = "hg-evolve-9.1.0";
244 244 doCheck = false;
245 245 src = fetchurl {
246 246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
247 247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
248 248 };
249 249 meta = {
250 250 license = [ { fullName = "GPLv2+"; } ];
251 251 };
252 252 };
253 253 "hgsubversion" = super.buildPythonPackage {
254 254 name = "hgsubversion-1.9.3";
255 255 doCheck = false;
256 256 propagatedBuildInputs = [
257 257 self."mercurial"
258 258 self."subvertpy"
259 259 ];
260 260 src = fetchurl {
261 261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
262 262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
263 263 };
264 264 meta = {
265 265 license = [ pkgs.lib.licenses.gpl1 ];
266 266 };
267 267 };
268 268 "hupper" = super.buildPythonPackage {
269 269 name = "hupper-1.10.2";
270 270 doCheck = false;
271 271 src = fetchurl {
272 272 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
273 273 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
274 274 };
275 275 meta = {
276 276 license = [ pkgs.lib.licenses.mit ];
277 277 };
278 278 };
279 279 "importlib-metadata" = super.buildPythonPackage {
280 280 name = "importlib-metadata-1.6.0";
281 281 doCheck = false;
282 282 propagatedBuildInputs = [
283 283 self."zipp"
284 284 self."pathlib2"
285 285 self."contextlib2"
286 286 self."configparser"
287 287 ];
288 288 src = fetchurl {
289 289 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
290 290 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
291 291 };
292 292 meta = {
293 293 license = [ pkgs.lib.licenses.asl20 ];
294 294 };
295 295 };
296 296 "ipdb" = super.buildPythonPackage {
297 297 name = "ipdb-0.13.2";
298 298 doCheck = false;
299 299 propagatedBuildInputs = [
300 300 self."setuptools"
301 301 self."ipython"
302 302 ];
303 303 src = fetchurl {
304 304 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
305 305 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
306 306 };
307 307 meta = {
308 308 license = [ pkgs.lib.licenses.bsdOriginal ];
309 309 };
310 310 };
311 311 "ipython" = super.buildPythonPackage {
312 312 name = "ipython-5.1.0";
313 313 doCheck = false;
314 314 propagatedBuildInputs = [
315 315 self."setuptools"
316 316 self."decorator"
317 317 self."pickleshare"
318 318 self."simplegeneric"
319 319 self."traitlets"
320 320 self."prompt-toolkit"
321 321 self."pygments"
322 322 self."pexpect"
323 323 self."backports.shutil-get-terminal-size"
324 324 self."pathlib2"
325 325 self."pexpect"
326 326 ];
327 327 src = fetchurl {
328 328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
329 329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
330 330 };
331 331 meta = {
332 332 license = [ pkgs.lib.licenses.bsdOriginal ];
333 333 };
334 334 };
335 335 "ipython-genutils" = super.buildPythonPackage {
336 336 name = "ipython-genutils-0.2.0";
337 337 doCheck = false;
338 338 src = fetchurl {
339 339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
340 340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
341 341 };
342 342 meta = {
343 343 license = [ pkgs.lib.licenses.bsdOriginal ];
344 344 };
345 345 };
346 346 "mako" = super.buildPythonPackage {
347 347 name = "mako-1.1.0";
348 348 doCheck = false;
349 349 propagatedBuildInputs = [
350 350 self."markupsafe"
351 351 ];
352 352 src = fetchurl {
353 353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
354 354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
355 355 };
356 356 meta = {
357 357 license = [ pkgs.lib.licenses.mit ];
358 358 };
359 359 };
360 360 "markupsafe" = super.buildPythonPackage {
361 361 name = "markupsafe-1.1.1";
362 362 doCheck = false;
363 363 src = fetchurl {
364 364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
365 365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
366 366 };
367 367 meta = {
368 368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
369 369 };
370 370 };
371 371 "mercurial" = super.buildPythonPackage {
372 372 name = "mercurial-5.1.1";
373 373 doCheck = false;
374 374 src = fetchurl {
375 375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
376 376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
380 380 };
381 381 };
382 382 "mock" = super.buildPythonPackage {
383 383 name = "mock-3.0.5";
384 384 doCheck = false;
385 385 propagatedBuildInputs = [
386 386 self."six"
387 387 self."funcsigs"
388 388 ];
389 389 src = fetchurl {
390 390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
391 391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
392 392 };
393 393 meta = {
394 394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
395 395 };
396 396 };
397 397 "more-itertools" = super.buildPythonPackage {
398 398 name = "more-itertools-5.0.0";
399 399 doCheck = false;
400 400 propagatedBuildInputs = [
401 401 self."six"
402 402 ];
403 403 src = fetchurl {
404 404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
405 405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
406 406 };
407 407 meta = {
408 408 license = [ pkgs.lib.licenses.mit ];
409 409 };
410 410 };
411 411 "msgpack-python" = super.buildPythonPackage {
412 412 name = "msgpack-python-0.5.6";
413 413 doCheck = false;
414 414 src = fetchurl {
415 415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
416 416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
417 417 };
418 418 meta = {
419 419 license = [ pkgs.lib.licenses.asl20 ];
420 420 };
421 421 };
422 422 "packaging" = super.buildPythonPackage {
423 423 name = "packaging-20.3";
424 424 doCheck = false;
425 425 propagatedBuildInputs = [
426 426 self."pyparsing"
427 427 self."six"
428 428 ];
429 429 src = fetchurl {
430 430 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
431 431 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
432 432 };
433 433 meta = {
434 434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
435 435 };
436 436 };
437 437 "pastedeploy" = super.buildPythonPackage {
438 438 name = "pastedeploy-2.1.0";
439 439 doCheck = false;
440 440 src = fetchurl {
441 441 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
442 442 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
443 443 };
444 444 meta = {
445 445 license = [ pkgs.lib.licenses.mit ];
446 446 };
447 447 };
448 448 "pathlib2" = super.buildPythonPackage {
449 449 name = "pathlib2-2.3.5";
450 450 doCheck = false;
451 451 propagatedBuildInputs = [
452 452 self."six"
453 453 self."scandir"
454 454 ];
455 455 src = fetchurl {
456 456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
457 457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
458 458 };
459 459 meta = {
460 460 license = [ pkgs.lib.licenses.mit ];
461 461 };
462 462 };
463 463 "pexpect" = super.buildPythonPackage {
464 464 name = "pexpect-4.8.0";
465 465 doCheck = false;
466 466 propagatedBuildInputs = [
467 467 self."ptyprocess"
468 468 ];
469 469 src = fetchurl {
470 470 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
471 471 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
472 472 };
473 473 meta = {
474 474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
475 475 };
476 476 };
477 477 "pickleshare" = super.buildPythonPackage {
478 478 name = "pickleshare-0.7.5";
479 479 doCheck = false;
480 480 propagatedBuildInputs = [
481 481 self."pathlib2"
482 482 ];
483 483 src = fetchurl {
484 484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
485 485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
486 486 };
487 487 meta = {
488 488 license = [ pkgs.lib.licenses.mit ];
489 489 };
490 490 };
491 491 "plaster" = super.buildPythonPackage {
492 492 name = "plaster-1.0";
493 493 doCheck = false;
494 494 propagatedBuildInputs = [
495 495 self."setuptools"
496 496 ];
497 497 src = fetchurl {
498 498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
499 499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
500 500 };
501 501 meta = {
502 502 license = [ pkgs.lib.licenses.mit ];
503 503 };
504 504 };
505 505 "plaster-pastedeploy" = super.buildPythonPackage {
506 506 name = "plaster-pastedeploy-0.7";
507 507 doCheck = false;
508 508 propagatedBuildInputs = [
509 509 self."pastedeploy"
510 510 self."plaster"
511 511 ];
512 512 src = fetchurl {
513 513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
514 514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
515 515 };
516 516 meta = {
517 517 license = [ pkgs.lib.licenses.mit ];
518 518 };
519 519 };
520 520 "pluggy" = super.buildPythonPackage {
521 521 name = "pluggy-0.13.1";
522 522 doCheck = false;
523 523 propagatedBuildInputs = [
524 524 self."importlib-metadata"
525 525 ];
526 526 src = fetchurl {
527 527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
528 528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
529 529 };
530 530 meta = {
531 531 license = [ pkgs.lib.licenses.mit ];
532 532 };
533 533 };
534 534 "prompt-toolkit" = super.buildPythonPackage {
535 535 name = "prompt-toolkit-1.0.18";
536 536 doCheck = false;
537 537 propagatedBuildInputs = [
538 538 self."six"
539 539 self."wcwidth"
540 540 ];
541 541 src = fetchurl {
542 542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
543 543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
544 544 };
545 545 meta = {
546 546 license = [ pkgs.lib.licenses.bsdOriginal ];
547 547 };
548 548 };
549 549 "psutil" = super.buildPythonPackage {
550 550 name = "psutil-5.7.0";
551 551 doCheck = false;
552 552 src = fetchurl {
553 553 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
554 554 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
555 555 };
556 556 meta = {
557 557 license = [ pkgs.lib.licenses.bsdOriginal ];
558 558 };
559 559 };
560 560 "ptyprocess" = super.buildPythonPackage {
561 561 name = "ptyprocess-0.6.0";
562 562 doCheck = false;
563 563 src = fetchurl {
564 564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
565 565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
566 566 };
567 567 meta = {
568 568 license = [ ];
569 569 };
570 570 };
571 571 "py" = super.buildPythonPackage {
572 572 name = "py-1.8.0";
573 573 doCheck = false;
574 574 src = fetchurl {
575 575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
576 576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
577 577 };
578 578 meta = {
579 579 license = [ pkgs.lib.licenses.mit ];
580 580 };
581 581 };
582 582 "pycparser" = super.buildPythonPackage {
583 583 name = "pycparser-2.20";
584 584 doCheck = false;
585 585 src = fetchurl {
586 586 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
587 587 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
588 588 };
589 589 meta = {
590 590 license = [ pkgs.lib.licenses.bsdOriginal ];
591 591 };
592 592 };
593 593 "pygit2" = super.buildPythonPackage {
594 594 name = "pygit2-0.28.2";
595 595 doCheck = false;
596 596 propagatedBuildInputs = [
597 597 self."cffi"
598 598 self."six"
599 599 ];
600 600 src = fetchurl {
601 601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
602 602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
603 603 };
604 604 meta = {
605 605 license = [ { fullName = "GPLv2 with linking exception"; } ];
606 606 };
607 607 };
608 608 "pygments" = super.buildPythonPackage {
609 609 name = "pygments-2.4.2";
610 610 doCheck = false;
611 611 src = fetchurl {
612 612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
613 613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
614 614 };
615 615 meta = {
616 616 license = [ pkgs.lib.licenses.bsdOriginal ];
617 617 };
618 618 };
619 619 "pyparsing" = super.buildPythonPackage {
620 620 name = "pyparsing-2.4.7";
621 621 doCheck = false;
622 622 src = fetchurl {
623 623 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
624 624 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
625 625 };
626 626 meta = {
627 627 license = [ pkgs.lib.licenses.mit ];
628 628 };
629 629 };
630 630 "pyramid" = super.buildPythonPackage {
631 631 name = "pyramid-1.10.4";
632 632 doCheck = false;
633 633 propagatedBuildInputs = [
634 634 self."hupper"
635 635 self."plaster"
636 636 self."plaster-pastedeploy"
637 637 self."setuptools"
638 638 self."translationstring"
639 639 self."venusian"
640 640 self."webob"
641 641 self."zope.deprecation"
642 642 self."zope.interface"
643 643 self."repoze.lru"
644 644 ];
645 645 src = fetchurl {
646 646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
647 647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
648 648 };
649 649 meta = {
650 650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
651 651 };
652 652 };
653 653 "pyramid-mako" = super.buildPythonPackage {
654 654 name = "pyramid-mako-1.1.0";
655 655 doCheck = false;
656 656 propagatedBuildInputs = [
657 657 self."pyramid"
658 658 self."mako"
659 659 ];
660 660 src = fetchurl {
661 661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
662 662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
663 663 };
664 664 meta = {
665 665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
666 666 };
667 667 };
668 668 "pytest" = super.buildPythonPackage {
669 669 name = "pytest-4.6.5";
670 670 doCheck = false;
671 671 propagatedBuildInputs = [
672 672 self."py"
673 673 self."six"
674 674 self."packaging"
675 675 self."attrs"
676 676 self."atomicwrites"
677 677 self."pluggy"
678 678 self."importlib-metadata"
679 679 self."wcwidth"
680 680 self."funcsigs"
681 681 self."pathlib2"
682 682 self."more-itertools"
683 683 ];
684 684 src = fetchurl {
685 685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
686 686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
687 687 };
688 688 meta = {
689 689 license = [ pkgs.lib.licenses.mit ];
690 690 };
691 691 };
692 692 "pytest-cov" = super.buildPythonPackage {
693 693 name = "pytest-cov-2.7.1";
694 694 doCheck = false;
695 695 propagatedBuildInputs = [
696 696 self."pytest"
697 697 self."coverage"
698 698 ];
699 699 src = fetchurl {
700 700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
701 701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
702 702 };
703 703 meta = {
704 704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
705 705 };
706 706 };
707 707 "pytest-profiling" = super.buildPythonPackage {
708 708 name = "pytest-profiling-1.7.0";
709 709 doCheck = false;
710 710 propagatedBuildInputs = [
711 711 self."six"
712 712 self."pytest"
713 713 self."gprof2dot"
714 714 ];
715 715 src = fetchurl {
716 716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
717 717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
718 718 };
719 719 meta = {
720 720 license = [ pkgs.lib.licenses.mit ];
721 721 };
722 722 };
723 723 "pytest-runner" = super.buildPythonPackage {
724 724 name = "pytest-runner-5.1";
725 725 doCheck = false;
726 726 src = fetchurl {
727 727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
728 728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
729 729 };
730 730 meta = {
731 731 license = [ pkgs.lib.licenses.mit ];
732 732 };
733 733 };
734 734 "pytest-sugar" = super.buildPythonPackage {
735 735 name = "pytest-sugar-0.9.2";
736 736 doCheck = false;
737 737 propagatedBuildInputs = [
738 738 self."pytest"
739 739 self."termcolor"
740 740 self."packaging"
741 741 ];
742 742 src = fetchurl {
743 743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
744 744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
745 745 };
746 746 meta = {
747 747 license = [ pkgs.lib.licenses.bsdOriginal ];
748 748 };
749 749 };
750 750 "pytest-timeout" = super.buildPythonPackage {
751 751 name = "pytest-timeout-1.3.3";
752 752 doCheck = false;
753 753 propagatedBuildInputs = [
754 754 self."pytest"
755 755 ];
756 756 src = fetchurl {
757 757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
758 758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
759 759 };
760 760 meta = {
761 761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
762 762 };
763 763 };
764 764 "redis" = super.buildPythonPackage {
765 765 name = "redis-3.5.3";
766 766 doCheck = false;
767 767 src = fetchurl {
768 768 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
769 769 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
770 770 };
771 771 meta = {
772 772 license = [ pkgs.lib.licenses.mit ];
773 773 };
774 774 };
775 775 "repoze.lru" = super.buildPythonPackage {
776 776 name = "repoze.lru-0.7";
777 777 doCheck = false;
778 778 src = fetchurl {
779 779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
780 780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
781 781 };
782 782 meta = {
783 783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
784 784 };
785 785 };
786 786 "rhodecode-vcsserver" = super.buildPythonPackage {
787 name = "rhodecode-vcsserver-4.25.0";
787 name = "rhodecode-vcsserver-4.25.2";
788 788 buildInputs = [
789 789 self."pytest"
790 790 self."py"
791 791 self."pytest-cov"
792 792 self."pytest-sugar"
793 793 self."pytest-runner"
794 794 self."pytest-profiling"
795 795 self."pytest-timeout"
796 796 self."gprof2dot"
797 797 self."mock"
798 798 self."cov-core"
799 799 self."coverage"
800 800 self."webtest"
801 801 self."beautifulsoup4"
802 802 self."configobj"
803 803 ];
804 804 doCheck = true;
805 805 propagatedBuildInputs = [
806 806 self."configobj"
807 807 self."dogpile.cache"
808 808 self."dogpile.core"
809 809 self."decorator"
810 810 self."dulwich"
811 811 self."hgsubversion"
812 812 self."hg-evolve"
813 813 self."mako"
814 814 self."markupsafe"
815 815 self."mercurial"
816 816 self."msgpack-python"
817 817 self."pastedeploy"
818 818 self."pyramid"
819 819 self."pyramid-mako"
820 820 self."pygit2"
821 821 self."repoze.lru"
822 822 self."redis"
823 823 self."simplejson"
824 824 self."subprocess32"
825 825 self."subvertpy"
826 826 self."six"
827 827 self."translationstring"
828 828 self."webob"
829 829 self."zope.deprecation"
830 830 self."zope.interface"
831 831 self."gevent"
832 832 self."greenlet"
833 833 self."gunicorn"
834 834 self."waitress"
835 835 self."ipdb"
836 836 self."ipython"
837 837 self."pytest"
838 838 self."py"
839 839 self."pytest-cov"
840 840 self."pytest-sugar"
841 841 self."pytest-runner"
842 842 self."pytest-profiling"
843 843 self."pytest-timeout"
844 844 self."gprof2dot"
845 845 self."mock"
846 846 self."cov-core"
847 847 self."coverage"
848 848 self."webtest"
849 849 self."beautifulsoup4"
850 850 ];
851 851 src = ./.;
852 852 meta = {
853 853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
854 854 };
855 855 };
856 856 "scandir" = super.buildPythonPackage {
857 857 name = "scandir-1.10.0";
858 858 doCheck = false;
859 859 src = fetchurl {
860 860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
861 861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
862 862 };
863 863 meta = {
864 864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
865 865 };
866 866 };
867 867 "setproctitle" = super.buildPythonPackage {
868 868 name = "setproctitle-1.1.10";
869 869 doCheck = false;
870 870 src = fetchurl {
871 871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
872 872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
873 873 };
874 874 meta = {
875 875 license = [ pkgs.lib.licenses.bsdOriginal ];
876 876 };
877 877 };
878 878 "setuptools" = super.buildPythonPackage {
879 879 name = "setuptools-44.1.0";
880 880 doCheck = false;
881 881 src = fetchurl {
882 882 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
883 883 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
884 884 };
885 885 meta = {
886 886 license = [ pkgs.lib.licenses.mit ];
887 887 };
888 888 };
889 889
890 890 "setuptools-scm" = super.buildPythonPackage {
891 891 name = "setuptools-scm-3.5.0";
892 892 doCheck = false;
893 893 src = fetchurl {
894 894 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
895 895 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
896 896 };
897 897 meta = {
898 898 license = [ pkgs.lib.licenses.psfl ];
899 899 };
900 900 };
901 901
902 902 "simplegeneric" = super.buildPythonPackage {
903 903 name = "simplegeneric-0.8.1";
904 904 doCheck = false;
905 905 src = fetchurl {
906 906 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
907 907 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
908 908 };
909 909 meta = {
910 910 license = [ pkgs.lib.licenses.zpl21 ];
911 911 };
912 912 };
913 913 "simplejson" = super.buildPythonPackage {
914 914 name = "simplejson-3.16.0";
915 915 doCheck = false;
916 916 src = fetchurl {
917 917 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
918 918 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
919 919 };
920 920 meta = {
921 921 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
922 922 };
923 923 };
924 924 "six" = super.buildPythonPackage {
925 925 name = "six-1.11.0";
926 926 doCheck = false;
927 927 src = fetchurl {
928 928 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
929 929 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
930 930 };
931 931 meta = {
932 932 license = [ pkgs.lib.licenses.mit ];
933 933 };
934 934 };
935 935 "subprocess32" = super.buildPythonPackage {
936 936 name = "subprocess32-3.5.4";
937 937 doCheck = false;
938 938 src = fetchurl {
939 939 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
940 940 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
941 941 };
942 942 meta = {
943 943 license = [ pkgs.lib.licenses.psfl ];
944 944 };
945 945 };
946 946 "subvertpy" = super.buildPythonPackage {
947 947 name = "subvertpy-0.10.1";
948 948 doCheck = false;
949 949 src = fetchurl {
950 950 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
951 951 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
952 952 };
953 953 meta = {
954 954 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
955 955 };
956 956 };
957 957 "termcolor" = super.buildPythonPackage {
958 958 name = "termcolor-1.1.0";
959 959 doCheck = false;
960 960 src = fetchurl {
961 961 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
962 962 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
963 963 };
964 964 meta = {
965 965 license = [ pkgs.lib.licenses.mit ];
966 966 };
967 967 };
968 968 "traitlets" = super.buildPythonPackage {
969 969 name = "traitlets-4.3.3";
970 970 doCheck = false;
971 971 propagatedBuildInputs = [
972 972 self."ipython-genutils"
973 973 self."six"
974 974 self."decorator"
975 975 self."enum34"
976 976 ];
977 977 src = fetchurl {
978 978 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
979 979 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
980 980 };
981 981 meta = {
982 982 license = [ pkgs.lib.licenses.bsdOriginal ];
983 983 };
984 984 };
985 985 "translationstring" = super.buildPythonPackage {
986 986 name = "translationstring-1.3";
987 987 doCheck = false;
988 988 src = fetchurl {
989 989 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
990 990 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
991 991 };
992 992 meta = {
993 993 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
994 994 };
995 995 };
996 996 "venusian" = super.buildPythonPackage {
997 997 name = "venusian-1.2.0";
998 998 doCheck = false;
999 999 src = fetchurl {
1000 1000 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
1001 1001 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
1002 1002 };
1003 1003 meta = {
1004 1004 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1005 1005 };
1006 1006 };
1007 1007 "waitress" = super.buildPythonPackage {
1008 1008 name = "waitress-1.3.1";
1009 1009 doCheck = false;
1010 1010 src = fetchurl {
1011 1011 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
1012 1012 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
1013 1013 };
1014 1014 meta = {
1015 1015 license = [ pkgs.lib.licenses.zpl21 ];
1016 1016 };
1017 1017 };
1018 1018 "wcwidth" = super.buildPythonPackage {
1019 1019 name = "wcwidth-0.1.9";
1020 1020 doCheck = false;
1021 1021 src = fetchurl {
1022 1022 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
1023 1023 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
1024 1024 };
1025 1025 meta = {
1026 1026 license = [ pkgs.lib.licenses.mit ];
1027 1027 };
1028 1028 };
1029 1029 "webob" = super.buildPythonPackage {
1030 1030 name = "webob-1.8.5";
1031 1031 doCheck = false;
1032 1032 src = fetchurl {
1033 1033 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
1034 1034 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
1035 1035 };
1036 1036 meta = {
1037 1037 license = [ pkgs.lib.licenses.mit ];
1038 1038 };
1039 1039 };
1040 1040 "webtest" = super.buildPythonPackage {
1041 1041 name = "webtest-2.0.34";
1042 1042 doCheck = false;
1043 1043 propagatedBuildInputs = [
1044 1044 self."six"
1045 1045 self."webob"
1046 1046 self."waitress"
1047 1047 self."beautifulsoup4"
1048 1048 ];
1049 1049 src = fetchurl {
1050 1050 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
1051 1051 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
1052 1052 };
1053 1053 meta = {
1054 1054 license = [ pkgs.lib.licenses.mit ];
1055 1055 };
1056 1056 };
1057 1057 "zipp" = super.buildPythonPackage {
1058 1058 name = "zipp-1.2.0";
1059 1059 doCheck = false;
1060 1060 propagatedBuildInputs = [
1061 1061 self."contextlib2"
1062 1062 ];
1063 1063 src = fetchurl {
1064 1064 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
1065 1065 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
1066 1066 };
1067 1067 meta = {
1068 1068 license = [ pkgs.lib.licenses.mit ];
1069 1069 };
1070 1070 };
1071 1071 "zope.deprecation" = super.buildPythonPackage {
1072 1072 name = "zope.deprecation-4.4.0";
1073 1073 doCheck = false;
1074 1074 propagatedBuildInputs = [
1075 1075 self."setuptools"
1076 1076 ];
1077 1077 src = fetchurl {
1078 1078 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
1079 1079 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
1080 1080 };
1081 1081 meta = {
1082 1082 license = [ pkgs.lib.licenses.zpl21 ];
1083 1083 };
1084 1084 };
1085 1085 "zope.interface" = super.buildPythonPackage {
1086 1086 name = "zope.interface-4.6.0";
1087 1087 doCheck = false;
1088 1088 propagatedBuildInputs = [
1089 1089 self."setuptools"
1090 1090 ];
1091 1091 src = fetchurl {
1092 1092 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
1093 1093 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
1094 1094 };
1095 1095 meta = {
1096 1096 license = [ pkgs.lib.licenses.zpl21 ];
1097 1097 };
1098 1098 };
1099 1099
1100 1100 ### Test requirements
1101 1101
1102 1102
1103 1103 }
@@ -1,1 +1,1 b''
1 4.25.0 No newline at end of file
1 4.25.2 No newline at end of file
@@ -1,1021 +1,1022 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import functools
18 18 import io
19 19 import logging
20 20 import os
21 21 import stat
22 22 import urllib
23 23 import urllib2
24 24 import traceback
25 25
26 26 from hgext import largefiles, rebase, purge
27 27 from hgext.strip import strip as hgext_strip
28 28 from mercurial import commands
29 29 from mercurial import unionrepo
30 30 from mercurial import verify
31 31 from mercurial import repair
32 32
33 33 import vcsserver
34 34 from vcsserver import exceptions
35 35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
36 36 from vcsserver.hgcompat import (
37 37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 41 RepoLookupError, InterventionRequired, RequirementError,
42 42 alwaysmatcher, patternmatcher, hgutil)
43 43 from vcsserver.vcs_base import RemoteBase
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 def make_ui_from_config(repo_config):
49 49
50 50 class LoggingUI(ui.ui):
51 51 def status(self, *msg, **opts):
52 52 log.info(' '.join(msg).rstrip('\n'))
53 53 super(LoggingUI, self).status(*msg, **opts)
54 54
55 55 def warn(self, *msg, **opts):
56 56 log.warn(' '.join(msg).rstrip('\n'))
57 57 super(LoggingUI, self).warn(*msg, **opts)
58 58
59 59 def error(self, *msg, **opts):
60 60 log.error(' '.join(msg).rstrip('\n'))
61 61 super(LoggingUI, self).error(*msg, **opts)
62 62
63 63 def note(self, *msg, **opts):
64 64 log.info(' '.join(msg).rstrip('\n'))
65 65 super(LoggingUI, self).note(*msg, **opts)
66 66
67 67 def debug(self, *msg, **opts):
68 68 log.debug(' '.join(msg).rstrip('\n'))
69 69 super(LoggingUI, self).debug(*msg, **opts)
70 70
71 71 baseui = LoggingUI()
72 72
73 73 # clean the baseui object
74 74 baseui._ocfg = hgconfig.config()
75 75 baseui._ucfg = hgconfig.config()
76 76 baseui._tcfg = hgconfig.config()
77 77
78 78 for section, option, value in repo_config:
79 79 baseui.setconfig(section, option, value)
80 80
81 81 # make our hgweb quiet so it doesn't print output
82 82 baseui.setconfig('ui', 'quiet', 'true')
83 83
84 84 baseui.setconfig('ui', 'paginate', 'never')
85 85 # for better Error reporting of Mercurial
86 86 baseui.setconfig('ui', 'message-output', 'stderr')
87 87
88 88 # force mercurial to only use 1 thread, otherwise it may try to set a
89 89 # signal in a non-main thread, thus generating a ValueError.
90 90 baseui.setconfig('worker', 'numcpus', 1)
91 91
92 92 # If there is no config for the largefiles extension, we explicitly disable
93 93 # it here. This overrides settings from repositories hgrc file. Recent
94 94 # mercurial versions enable largefiles in hgrc on clone from largefile
95 95 # repo.
96 96 if not baseui.hasconfig('extensions', 'largefiles'):
97 97 log.debug('Explicitly disable largefiles extension for repo.')
98 98 baseui.setconfig('extensions', 'largefiles', '!')
99 99
100 100 return baseui
101 101
102 102
103 103 def reraise_safe_exceptions(func):
104 104 """Decorator for converting mercurial exceptions to something neutral."""
105 105
106 106 def wrapper(*args, **kwargs):
107 107 try:
108 108 return func(*args, **kwargs)
109 109 except (Abort, InterventionRequired) as e:
110 110 raise_from_original(exceptions.AbortException(e))
111 111 except RepoLookupError as e:
112 112 raise_from_original(exceptions.LookupException(e))
113 113 except RequirementError as e:
114 114 raise_from_original(exceptions.RequirementException(e))
115 115 except RepoError as e:
116 116 raise_from_original(exceptions.VcsException(e))
117 117 except LookupError as e:
118 118 raise_from_original(exceptions.LookupException(e))
119 119 except Exception as e:
120 120 if not hasattr(e, '_vcs_kind'):
121 121 log.exception("Unhandled exception in hg remote call")
122 122 raise_from_original(exceptions.UnhandledException(e))
123 123
124 124 raise
125 125 return wrapper
126 126
127 127
128 128 class MercurialFactory(RepoFactory):
129 129 repo_type = 'hg'
130 130
131 131 def _create_config(self, config, hooks=True):
132 132 if not hooks:
133 133 hooks_to_clean = frozenset((
134 134 'changegroup.repo_size', 'preoutgoing.pre_pull',
135 135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
136 136 new_config = []
137 137 for section, option, value in config:
138 138 if section == 'hooks' and option in hooks_to_clean:
139 139 continue
140 140 new_config.append((section, option, value))
141 141 config = new_config
142 142
143 143 baseui = make_ui_from_config(config)
144 144 return baseui
145 145
146 146 def _create_repo(self, wire, create):
147 147 baseui = self._create_config(wire["config"])
148 148 return instance(baseui, wire["path"], create)
149 149
150 150 def repo(self, wire, create=False):
151 151 """
152 152 Get a repository instance for the given path.
153 153 """
154 154 return self._create_repo(wire, create)
155 155
156 156
157 157 def patch_ui_message_output(baseui):
158 158 baseui.setconfig('ui', 'quiet', 'false')
159 159 output = io.BytesIO()
160 160
161 161 def write(data, **unused_kwargs):
162 162 output.write(data)
163 163
164 164 baseui.status = write
165 165 baseui.write = write
166 166 baseui.warn = write
167 167 baseui.debug = write
168 168
169 169 return baseui, output
170 170
171 171
172 172 class HgRemote(RemoteBase):
173 173
174 174 def __init__(self, factory):
175 175 self._factory = factory
176 176 self._bulk_methods = {
177 177 "affected_files": self.ctx_files,
178 178 "author": self.ctx_user,
179 179 "branch": self.ctx_branch,
180 180 "children": self.ctx_children,
181 181 "date": self.ctx_date,
182 182 "message": self.ctx_description,
183 183 "parents": self.ctx_parents,
184 184 "status": self.ctx_status,
185 185 "obsolete": self.ctx_obsolete,
186 186 "phase": self.ctx_phase,
187 187 "hidden": self.ctx_hidden,
188 188 "_file_paths": self.ctx_list,
189 189 }
190 190
191 191 def _get_ctx(self, repo, ref):
192 192 return get_ctx(repo, ref)
193 193
194 194 @reraise_safe_exceptions
195 195 def discover_hg_version(self):
196 196 from mercurial import util
197 197 return util.version()
198 198
199 199 @reraise_safe_exceptions
200 200 def is_empty(self, wire):
201 201 repo = self._factory.repo(wire)
202 202
203 203 try:
204 204 return len(repo) == 0
205 205 except Exception:
206 206 log.exception("failed to read object_store")
207 207 return False
208 208
209 209 @reraise_safe_exceptions
210 210 def bookmarks(self, wire):
211 211 cache_on, context_uid, repo_id = self._cache_on(wire)
212 212 @self.region.conditional_cache_on_arguments(condition=cache_on)
213 213 def _bookmarks(_context_uid, _repo_id):
214 214 repo = self._factory.repo(wire)
215 215 return dict(repo._bookmarks)
216 216
217 217 return _bookmarks(context_uid, repo_id)
218 218
219 219 @reraise_safe_exceptions
220 220 def branches(self, wire, normal, closed):
221 221 cache_on, context_uid, repo_id = self._cache_on(wire)
222 222 @self.region.conditional_cache_on_arguments(condition=cache_on)
223 223 def _branches(_context_uid, _repo_id, _normal, _closed):
224 224 repo = self._factory.repo(wire)
225 225 iter_branches = repo.branchmap().iterbranches()
226 226 bt = {}
227 227 for branch_name, _heads, tip, is_closed in iter_branches:
228 228 if normal and not is_closed:
229 229 bt[branch_name] = tip
230 230 if closed and is_closed:
231 231 bt[branch_name] = tip
232 232
233 233 return bt
234 234
235 235 return _branches(context_uid, repo_id, normal, closed)
236 236
237 237 @reraise_safe_exceptions
238 238 def bulk_request(self, wire, commit_id, pre_load):
239 239 cache_on, context_uid, repo_id = self._cache_on(wire)
240 240 @self.region.conditional_cache_on_arguments(condition=cache_on)
241 241 def _bulk_request(_repo_id, _commit_id, _pre_load):
242 242 result = {}
243 243 for attr in pre_load:
244 244 try:
245 245 method = self._bulk_methods[attr]
246 246 result[attr] = method(wire, commit_id)
247 247 except KeyError as e:
248 248 raise exceptions.VcsException(e)(
249 249 'Unknown bulk attribute: "%s"' % attr)
250 250 return result
251 251
252 252 return _bulk_request(repo_id, commit_id, sorted(pre_load))
253 253
254 254 @reraise_safe_exceptions
255 255 def ctx_branch(self, wire, commit_id):
256 256 cache_on, context_uid, repo_id = self._cache_on(wire)
257 257 @self.region.conditional_cache_on_arguments(condition=cache_on)
258 258 def _ctx_branch(_repo_id, _commit_id):
259 259 repo = self._factory.repo(wire)
260 260 ctx = self._get_ctx(repo, commit_id)
261 261 return ctx.branch()
262 262 return _ctx_branch(repo_id, commit_id)
263 263
264 264 @reraise_safe_exceptions
265 265 def ctx_date(self, wire, commit_id):
266 266 cache_on, context_uid, repo_id = self._cache_on(wire)
267 267 @self.region.conditional_cache_on_arguments(condition=cache_on)
268 268 def _ctx_date(_repo_id, _commit_id):
269 269 repo = self._factory.repo(wire)
270 270 ctx = self._get_ctx(repo, commit_id)
271 271 return ctx.date()
272 272 return _ctx_date(repo_id, commit_id)
273 273
274 274 @reraise_safe_exceptions
275 275 def ctx_description(self, wire, revision):
276 276 repo = self._factory.repo(wire)
277 277 ctx = self._get_ctx(repo, revision)
278 278 return ctx.description()
279 279
280 280 @reraise_safe_exceptions
281 281 def ctx_files(self, wire, commit_id):
282 282 cache_on, context_uid, repo_id = self._cache_on(wire)
283 283 @self.region.conditional_cache_on_arguments(condition=cache_on)
284 284 def _ctx_files(_repo_id, _commit_id):
285 285 repo = self._factory.repo(wire)
286 286 ctx = self._get_ctx(repo, commit_id)
287 287 return ctx.files()
288 288
289 289 return _ctx_files(repo_id, commit_id)
290 290
291 291 @reraise_safe_exceptions
292 292 def ctx_list(self, path, revision):
293 293 repo = self._factory.repo(path)
294 294 ctx = self._get_ctx(repo, revision)
295 295 return list(ctx)
296 296
297 297 @reraise_safe_exceptions
298 298 def ctx_parents(self, wire, commit_id):
299 299 cache_on, context_uid, repo_id = self._cache_on(wire)
300 300 @self.region.conditional_cache_on_arguments(condition=cache_on)
301 301 def _ctx_parents(_repo_id, _commit_id):
302 302 repo = self._factory.repo(wire)
303 303 ctx = self._get_ctx(repo, commit_id)
304 304 return [parent.hex() for parent in ctx.parents()
305 305 if not (parent.hidden() or parent.obsolete())]
306 306
307 307 return _ctx_parents(repo_id, commit_id)
308 308
309 309 @reraise_safe_exceptions
310 310 def ctx_children(self, wire, commit_id):
311 311 cache_on, context_uid, repo_id = self._cache_on(wire)
312 312 @self.region.conditional_cache_on_arguments(condition=cache_on)
313 313 def _ctx_children(_repo_id, _commit_id):
314 314 repo = self._factory.repo(wire)
315 315 ctx = self._get_ctx(repo, commit_id)
316 316 return [child.hex() for child in ctx.children()
317 317 if not (child.hidden() or child.obsolete())]
318 318
319 319 return _ctx_children(repo_id, commit_id)
320 320
321 321 @reraise_safe_exceptions
322 322 def ctx_phase(self, wire, commit_id):
323 323 cache_on, context_uid, repo_id = self._cache_on(wire)
324 324 @self.region.conditional_cache_on_arguments(condition=cache_on)
325 325 def _ctx_phase(_context_uid, _repo_id, _commit_id):
326 326 repo = self._factory.repo(wire)
327 327 ctx = self._get_ctx(repo, commit_id)
328 328 # public=0, draft=1, secret=3
329 329 return ctx.phase()
330 330 return _ctx_phase(context_uid, repo_id, commit_id)
331 331
332 332 @reraise_safe_exceptions
333 333 def ctx_obsolete(self, wire, commit_id):
334 334 cache_on, context_uid, repo_id = self._cache_on(wire)
335 335 @self.region.conditional_cache_on_arguments(condition=cache_on)
336 336 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
337 337 repo = self._factory.repo(wire)
338 338 ctx = self._get_ctx(repo, commit_id)
339 339 return ctx.obsolete()
340 340 return _ctx_obsolete(context_uid, repo_id, commit_id)
341 341
342 342 @reraise_safe_exceptions
343 343 def ctx_hidden(self, wire, commit_id):
344 344 cache_on, context_uid, repo_id = self._cache_on(wire)
345 345 @self.region.conditional_cache_on_arguments(condition=cache_on)
346 346 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
347 347 repo = self._factory.repo(wire)
348 348 ctx = self._get_ctx(repo, commit_id)
349 349 return ctx.hidden()
350 350 return _ctx_hidden(context_uid, repo_id, commit_id)
351 351
352 352 @reraise_safe_exceptions
353 353 def ctx_substate(self, wire, revision):
354 354 repo = self._factory.repo(wire)
355 355 ctx = self._get_ctx(repo, revision)
356 356 return ctx.substate
357 357
358 358 @reraise_safe_exceptions
359 359 def ctx_status(self, wire, revision):
360 360 repo = self._factory.repo(wire)
361 361 ctx = self._get_ctx(repo, revision)
362 362 status = repo[ctx.p1().node()].status(other=ctx.node())
363 363 # object of status (odd, custom named tuple in mercurial) is not
364 364 # correctly serializable, we make it a list, as the underling
365 365 # API expects this to be a list
366 366 return list(status)
367 367
368 368 @reraise_safe_exceptions
369 369 def ctx_user(self, wire, revision):
370 370 repo = self._factory.repo(wire)
371 371 ctx = self._get_ctx(repo, revision)
372 372 return ctx.user()
373 373
374 374 @reraise_safe_exceptions
375 375 def check_url(self, url, config):
376 376 _proto = None
377 377 if '+' in url[:url.find('://')]:
378 378 _proto = url[0:url.find('+')]
379 379 url = url[url.find('+') + 1:]
380 380 handlers = []
381 381 url_obj = url_parser(url)
382 382 test_uri, authinfo = url_obj.authinfo()
383 383 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
384 384 url_obj.query = obfuscate_qs(url_obj.query)
385 385
386 386 cleaned_uri = str(url_obj)
387 387 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
388 388
389 389 if authinfo:
390 390 # create a password manager
391 391 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
392 392 passmgr.add_password(*authinfo)
393 393
394 394 handlers.extend((httpbasicauthhandler(passmgr),
395 395 httpdigestauthhandler(passmgr)))
396 396
397 397 o = urllib2.build_opener(*handlers)
398 398 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
399 399 ('Accept', 'application/mercurial-0.1')]
400 400
401 401 q = {"cmd": 'between'}
402 402 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
403 403 qs = '?%s' % urllib.urlencode(q)
404 404 cu = "%s%s" % (test_uri, qs)
405 405 req = urllib2.Request(cu, None, {})
406 406
407 407 try:
408 408 log.debug("Trying to open URL %s", cleaned_uri)
409 409 resp = o.open(req)
410 410 if resp.code != 200:
411 411 raise exceptions.URLError()('Return Code is not 200')
412 412 except Exception as e:
413 413 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
414 414 # means it cannot be cloned
415 415 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
416 416
417 417 # now check if it's a proper hg repo, but don't do it for svn
418 418 try:
419 419 if _proto == 'svn':
420 420 pass
421 421 else:
422 422 # check for pure hg repos
423 423 log.debug(
424 424 "Verifying if URL is a Mercurial repository: %s",
425 425 cleaned_uri)
426 426 ui = make_ui_from_config(config)
427 427 peer_checker = makepeer(ui, url)
428 428 peer_checker.lookup('tip')
429 429 except Exception as e:
430 430 log.warning("URL is not a valid Mercurial repository: %s",
431 431 cleaned_uri)
432 432 raise exceptions.URLError(e)(
433 433 "url [%s] does not look like an hg repo org_exc: %s"
434 434 % (cleaned_uri, e))
435 435
436 436 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
437 437 return True
438 438
439 439 @reraise_safe_exceptions
440 440 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
441 441 repo = self._factory.repo(wire)
442 442
443 443 if file_filter:
444 444 match_filter = match(file_filter[0], '', [file_filter[1]])
445 445 else:
446 446 match_filter = file_filter
447 447 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
448 448
449 449 try:
450 450 return "".join(patch.diff(
451 451 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
452 452 except RepoLookupError as e:
453 453 raise exceptions.LookupException(e)()
454 454
455 455 @reraise_safe_exceptions
456 456 def node_history(self, wire, revision, path, limit):
457 457 cache_on, context_uid, repo_id = self._cache_on(wire)
458 458 @self.region.conditional_cache_on_arguments(condition=cache_on)
459 459 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
460 460 repo = self._factory.repo(wire)
461 461
462 462 ctx = self._get_ctx(repo, revision)
463 463 fctx = ctx.filectx(path)
464 464
465 465 def history_iter():
466 466 limit_rev = fctx.rev()
467 467 for obj in reversed(list(fctx.filelog())):
468 468 obj = fctx.filectx(obj)
469 469 ctx = obj.changectx()
470 470 if ctx.hidden() or ctx.obsolete():
471 471 continue
472 472
473 473 if limit_rev >= obj.rev():
474 474 yield obj
475 475
476 476 history = []
477 477 for cnt, obj in enumerate(history_iter()):
478 478 if limit and cnt >= limit:
479 479 break
480 480 history.append(hex(obj.node()))
481 481
482 482 return [x for x in history]
483 483 return _node_history(context_uid, repo_id, revision, path, limit)
484 484
485 485 @reraise_safe_exceptions
486 486 def node_history_untill(self, wire, revision, path, limit):
487 487 cache_on, context_uid, repo_id = self._cache_on(wire)
488 488 @self.region.conditional_cache_on_arguments(condition=cache_on)
489 489 def _node_history_until(_context_uid, _repo_id):
490 490 repo = self._factory.repo(wire)
491 491 ctx = self._get_ctx(repo, revision)
492 492 fctx = ctx.filectx(path)
493 493
494 494 file_log = list(fctx.filelog())
495 495 if limit:
496 496 # Limit to the last n items
497 497 file_log = file_log[-limit:]
498 498
499 499 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
500 500 return _node_history_until(context_uid, repo_id, revision, path, limit)
501 501
502 502 @reraise_safe_exceptions
503 503 def fctx_annotate(self, wire, revision, path):
504 504 repo = self._factory.repo(wire)
505 505 ctx = self._get_ctx(repo, revision)
506 506 fctx = ctx.filectx(path)
507 507
508 508 result = []
509 509 for i, annotate_obj in enumerate(fctx.annotate(), 1):
510 510 ln_no = i
511 511 sha = hex(annotate_obj.fctx.node())
512 512 content = annotate_obj.text
513 513 result.append((ln_no, sha, content))
514 514 return result
515 515
516 516 @reraise_safe_exceptions
517 517 def fctx_node_data(self, wire, revision, path):
518 518 repo = self._factory.repo(wire)
519 519 ctx = self._get_ctx(repo, revision)
520 520 fctx = ctx.filectx(path)
521 521 return fctx.data()
522 522
523 523 @reraise_safe_exceptions
524 524 def fctx_flags(self, wire, commit_id, path):
525 525 cache_on, context_uid, repo_id = self._cache_on(wire)
526 526 @self.region.conditional_cache_on_arguments(condition=cache_on)
527 527 def _fctx_flags(_repo_id, _commit_id, _path):
528 528 repo = self._factory.repo(wire)
529 529 ctx = self._get_ctx(repo, commit_id)
530 530 fctx = ctx.filectx(path)
531 531 return fctx.flags()
532 532
533 533 return _fctx_flags(repo_id, commit_id, path)
534 534
535 535 @reraise_safe_exceptions
536 536 def fctx_size(self, wire, commit_id, path):
537 537 cache_on, context_uid, repo_id = self._cache_on(wire)
538 538 @self.region.conditional_cache_on_arguments(condition=cache_on)
539 539 def _fctx_size(_repo_id, _revision, _path):
540 540 repo = self._factory.repo(wire)
541 541 ctx = self._get_ctx(repo, commit_id)
542 542 fctx = ctx.filectx(path)
543 543 return fctx.size()
544 544 return _fctx_size(repo_id, commit_id, path)
545 545
546 546 @reraise_safe_exceptions
547 547 def get_all_commit_ids(self, wire, name):
548 548 cache_on, context_uid, repo_id = self._cache_on(wire)
549 549 @self.region.conditional_cache_on_arguments(condition=cache_on)
550 550 def _get_all_commit_ids(_context_uid, _repo_id, _name):
551 551 repo = self._factory.repo(wire)
552 552 repo = repo.filtered(name)
553 553 revs = map(lambda x: hex(x[7]), repo.changelog.index)
554 554 return revs
555 555 return _get_all_commit_ids(context_uid, repo_id, name)
556 556
557 557 @reraise_safe_exceptions
558 558 def get_config_value(self, wire, section, name, untrusted=False):
559 559 repo = self._factory.repo(wire)
560 560 return repo.ui.config(section, name, untrusted=untrusted)
561 561
562 562 @reraise_safe_exceptions
563 563 def is_large_file(self, wire, commit_id, path):
564 564 cache_on, context_uid, repo_id = self._cache_on(wire)
565 565 @self.region.conditional_cache_on_arguments(condition=cache_on)
566 566 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
567 567 return largefiles.lfutil.isstandin(path)
568 568
569 569 return _is_large_file(context_uid, repo_id, commit_id, path)
570 570
571 571 @reraise_safe_exceptions
572 572 def is_binary(self, wire, revision, path):
573 573 cache_on, context_uid, repo_id = self._cache_on(wire)
574 574
575 575 @self.region.conditional_cache_on_arguments(condition=cache_on)
576 576 def _is_binary(_repo_id, _sha, _path):
577 577 repo = self._factory.repo(wire)
578 578 ctx = self._get_ctx(repo, revision)
579 579 fctx = ctx.filectx(path)
580 580 return fctx.isbinary()
581 581
582 582 return _is_binary(repo_id, revision, path)
583 583
584 584 @reraise_safe_exceptions
585 585 def in_largefiles_store(self, wire, sha):
586 586 repo = self._factory.repo(wire)
587 587 return largefiles.lfutil.instore(repo, sha)
588 588
589 589 @reraise_safe_exceptions
590 590 def in_user_cache(self, wire, sha):
591 591 repo = self._factory.repo(wire)
592 592 return largefiles.lfutil.inusercache(repo.ui, sha)
593 593
594 594 @reraise_safe_exceptions
595 595 def store_path(self, wire, sha):
596 596 repo = self._factory.repo(wire)
597 597 return largefiles.lfutil.storepath(repo, sha)
598 598
599 599 @reraise_safe_exceptions
600 600 def link(self, wire, sha, path):
601 601 repo = self._factory.repo(wire)
602 602 largefiles.lfutil.link(
603 603 largefiles.lfutil.usercachepath(repo.ui, sha), path)
604 604
605 605 @reraise_safe_exceptions
606 606 def localrepository(self, wire, create=False):
607 607 self._factory.repo(wire, create=create)
608 608
609 609 @reraise_safe_exceptions
610 610 def lookup(self, wire, revision, both):
611 611 cache_on, context_uid, repo_id = self._cache_on(wire)
612
612 613 @self.region.conditional_cache_on_arguments(condition=cache_on)
613 614 def _lookup(_context_uid, _repo_id, _revision, _both):
614 615
615 616 repo = self._factory.repo(wire)
616 617 rev = _revision
617 618 if isinstance(rev, int):
618 619 # NOTE(marcink):
619 620 # since Mercurial doesn't support negative indexes properly
620 621 # we need to shift accordingly by one to get proper index, e.g
621 622 # repo[-1] => repo[-2]
622 623 # repo[0] => repo[-1]
623 624 if rev <= 0:
624 625 rev = rev + -1
625 626 try:
626 627 ctx = self._get_ctx(repo, rev)
627 628 except (TypeError, RepoLookupError) as e:
628 629 e._org_exc_tb = traceback.format_exc()
629 630 raise exceptions.LookupException(e)(rev)
630 631 except LookupError as e:
631 632 e._org_exc_tb = traceback.format_exc()
632 633 raise exceptions.LookupException(e)(e.name)
633 634
634 635 if not both:
635 636 return ctx.hex()
636 637
637 638 ctx = repo[ctx.hex()]
638 639 return ctx.hex(), ctx.rev()
639 640
640 641 return _lookup(context_uid, repo_id, revision, both)
641 642
642 643 @reraise_safe_exceptions
643 644 def sync_push(self, wire, url):
644 645 if not self.check_url(url, wire['config']):
645 646 return
646 647
647 648 repo = self._factory.repo(wire)
648 649
649 650 # Disable any prompts for this repo
650 651 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
651 652
652 653 bookmarks = dict(repo._bookmarks).keys()
653 654 remote = peer(repo, {}, url)
654 655 # Disable any prompts for this remote
655 656 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
656 657
657 658 return exchange.push(
658 659 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
659 660
660 661 @reraise_safe_exceptions
661 662 def revision(self, wire, rev):
662 663 repo = self._factory.repo(wire)
663 664 ctx = self._get_ctx(repo, rev)
664 665 return ctx.rev()
665 666
666 667 @reraise_safe_exceptions
667 668 def rev_range(self, wire, commit_filter):
668 669 cache_on, context_uid, repo_id = self._cache_on(wire)
669 670
670 671 @self.region.conditional_cache_on_arguments(condition=cache_on)
671 672 def _rev_range(_context_uid, _repo_id, _filter):
672 673 repo = self._factory.repo(wire)
673 674 revisions = [rev for rev in revrange(repo, commit_filter)]
674 675 return revisions
675 676
676 677 return _rev_range(context_uid, repo_id, sorted(commit_filter))
677 678
678 679 @reraise_safe_exceptions
679 680 def rev_range_hash(self, wire, node):
680 681 repo = self._factory.repo(wire)
681 682
682 683 def get_revs(repo, rev_opt):
683 684 if rev_opt:
684 685 revs = revrange(repo, rev_opt)
685 686 if len(revs) == 0:
686 687 return (nullrev, nullrev)
687 688 return max(revs), min(revs)
688 689 else:
689 690 return len(repo) - 1, 0
690 691
691 692 stop, start = get_revs(repo, [node + ':'])
692 693 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
693 694 return revs
694 695
695 696 @reraise_safe_exceptions
696 697 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
697 698 other_path = kwargs.pop('other_path', None)
698 699
699 700 # case when we want to compare two independent repositories
700 701 if other_path and other_path != wire["path"]:
701 702 baseui = self._factory._create_config(wire["config"])
702 703 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
703 704 else:
704 705 repo = self._factory.repo(wire)
705 706 return list(repo.revs(rev_spec, *args))
706 707
707 708 @reraise_safe_exceptions
708 709 def verify(self, wire,):
709 710 repo = self._factory.repo(wire)
710 711 baseui = self._factory._create_config(wire['config'])
711 712
712 713 baseui, output = patch_ui_message_output(baseui)
713 714
714 715 repo.ui = baseui
715 716 verify.verify(repo)
716 717 return output.getvalue()
717 718
718 719 @reraise_safe_exceptions
719 720 def hg_update_cache(self, wire,):
720 721 repo = self._factory.repo(wire)
721 722 baseui = self._factory._create_config(wire['config'])
722 723 baseui, output = patch_ui_message_output(baseui)
723 724
724 725 repo.ui = baseui
725 726 with repo.wlock(), repo.lock():
726 727 repo.updatecaches(full=True)
727 728
728 729 return output.getvalue()
729 730
730 731 @reraise_safe_exceptions
731 732 def hg_rebuild_fn_cache(self, wire,):
732 733 repo = self._factory.repo(wire)
733 734 baseui = self._factory._create_config(wire['config'])
734 735 baseui, output = patch_ui_message_output(baseui)
735 736
736 737 repo.ui = baseui
737 738
738 739 repair.rebuildfncache(baseui, repo)
739 740
740 741 return output.getvalue()
741 742
742 743 @reraise_safe_exceptions
743 744 def tags(self, wire):
744 745 cache_on, context_uid, repo_id = self._cache_on(wire)
745 746 @self.region.conditional_cache_on_arguments(condition=cache_on)
746 747 def _tags(_context_uid, _repo_id):
747 748 repo = self._factory.repo(wire)
748 749 return repo.tags()
749 750
750 751 return _tags(context_uid, repo_id)
751 752
752 753 @reraise_safe_exceptions
753 754 def update(self, wire, node=None, clean=False):
754 755 repo = self._factory.repo(wire)
755 756 baseui = self._factory._create_config(wire['config'])
756 757 commands.update(baseui, repo, node=node, clean=clean)
757 758
758 759 @reraise_safe_exceptions
759 760 def identify(self, wire):
760 761 repo = self._factory.repo(wire)
761 762 baseui = self._factory._create_config(wire['config'])
762 763 output = io.BytesIO()
763 764 baseui.write = output.write
764 765 # This is required to get a full node id
765 766 baseui.debugflag = True
766 767 commands.identify(baseui, repo, id=True)
767 768
768 769 return output.getvalue()
769 770
770 771 @reraise_safe_exceptions
771 772 def heads(self, wire, branch=None):
772 773 repo = self._factory.repo(wire)
773 774 baseui = self._factory._create_config(wire['config'])
774 775 output = io.BytesIO()
775 776
776 777 def write(data, **unused_kwargs):
777 778 output.write(data)
778 779
779 780 baseui.write = write
780 781 if branch:
781 782 args = [branch]
782 783 else:
783 784 args = []
784 785 commands.heads(baseui, repo, template='{node} ', *args)
785 786
786 787 return output.getvalue()
787 788
788 789 @reraise_safe_exceptions
789 790 def ancestor(self, wire, revision1, revision2):
790 791 repo = self._factory.repo(wire)
791 792 changelog = repo.changelog
792 793 lookup = repo.lookup
793 794 a = changelog.ancestor(lookup(revision1), lookup(revision2))
794 795 return hex(a)
795 796
796 797 @reraise_safe_exceptions
797 798 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
798 799 baseui = self._factory._create_config(wire["config"], hooks=hooks)
799 800 clone(baseui, source, dest, noupdate=not update_after_clone)
800 801
801 802 @reraise_safe_exceptions
802 803 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
803 804
804 805 repo = self._factory.repo(wire)
805 806 baseui = self._factory._create_config(wire['config'])
806 807 publishing = baseui.configbool('phases', 'publish')
807 808 if publishing:
808 809 new_commit = 'public'
809 810 else:
810 811 new_commit = 'draft'
811 812
812 813 def _filectxfn(_repo, ctx, path):
813 814 """
814 815 Marks given path as added/changed/removed in a given _repo. This is
815 816 for internal mercurial commit function.
816 817 """
817 818
818 819 # check if this path is removed
819 820 if path in removed:
820 821 # returning None is a way to mark node for removal
821 822 return None
822 823
823 824 # check if this path is added
824 825 for node in updated:
825 826 if node['path'] == path:
826 827 return memfilectx(
827 828 _repo,
828 829 changectx=ctx,
829 830 path=node['path'],
830 831 data=node['content'],
831 832 islink=False,
832 833 isexec=bool(node['mode'] & stat.S_IXUSR),
833 834 copysource=False)
834 835
835 836 raise exceptions.AbortException()(
836 837 "Given path haven't been marked as added, "
837 838 "changed or removed (%s)" % path)
838 839
839 840 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
840 841
841 842 commit_ctx = memctx(
842 843 repo=repo,
843 844 parents=parents,
844 845 text=message,
845 846 files=files,
846 847 filectxfn=_filectxfn,
847 848 user=user,
848 849 date=(commit_time, commit_timezone),
849 850 extra=extra)
850 851
851 852 n = repo.commitctx(commit_ctx)
852 853 new_id = hex(n)
853 854
854 855 return new_id
855 856
856 857 @reraise_safe_exceptions
857 858 def pull(self, wire, url, commit_ids=None):
858 859 repo = self._factory.repo(wire)
859 860 # Disable any prompts for this repo
860 861 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
861 862
862 863 remote = peer(repo, {}, url)
863 864 # Disable any prompts for this remote
864 865 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
865 866
866 867 if commit_ids:
867 868 commit_ids = [bin(commit_id) for commit_id in commit_ids]
868 869
869 870 return exchange.pull(
870 871 repo, remote, heads=commit_ids, force=None).cgresult
871 872
872 873 @reraise_safe_exceptions
873 874 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
874 875 repo = self._factory.repo(wire)
875 876 baseui = self._factory._create_config(wire['config'], hooks=hooks)
876 877
877 878 # Mercurial internally has a lot of logic that checks ONLY if
878 879 # option is defined, we just pass those if they are defined then
879 880 opts = {}
880 881 if bookmark:
881 882 opts['bookmark'] = bookmark
882 883 if branch:
883 884 opts['branch'] = branch
884 885 if revision:
885 886 opts['rev'] = revision
886 887
887 888 commands.pull(baseui, repo, source, **opts)
888 889
889 890 @reraise_safe_exceptions
890 891 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
891 892 repo = self._factory.repo(wire)
892 893 baseui = self._factory._create_config(wire['config'], hooks=hooks)
893 894 commands.push(baseui, repo, dest=dest_path, rev=revisions,
894 895 new_branch=push_branches)
895 896
896 897 @reraise_safe_exceptions
897 898 def strip(self, wire, revision, update, backup):
898 899 repo = self._factory.repo(wire)
899 900 ctx = self._get_ctx(repo, revision)
900 901 hgext_strip(
901 902 repo.baseui, repo, ctx.node(), update=update, backup=backup)
902 903
903 904 @reraise_safe_exceptions
904 905 def get_unresolved_files(self, wire):
905 906 repo = self._factory.repo(wire)
906 907
907 908 log.debug('Calculating unresolved files for repo: %s', repo)
908 909 output = io.BytesIO()
909 910
910 911 def write(data, **unused_kwargs):
911 912 output.write(data)
912 913
913 914 baseui = self._factory._create_config(wire['config'])
914 915 baseui.write = write
915 916
916 917 commands.resolve(baseui, repo, list=True)
917 918 unresolved = output.getvalue().splitlines(0)
918 919 return unresolved
919 920
920 921 @reraise_safe_exceptions
921 922 def merge(self, wire, revision):
922 923 repo = self._factory.repo(wire)
923 924 baseui = self._factory._create_config(wire['config'])
924 925 repo.ui.setconfig('ui', 'merge', 'internal:dump')
925 926
926 927 # In case of sub repositories are used mercurial prompts the user in
927 928 # case of merge conflicts or different sub repository sources. By
928 929 # setting the interactive flag to `False` mercurial doesn't prompt the
929 930 # used but instead uses a default value.
930 931 repo.ui.setconfig('ui', 'interactive', False)
931 932 commands.merge(baseui, repo, rev=revision)
932 933
933 934 @reraise_safe_exceptions
934 935 def merge_state(self, wire):
935 936 repo = self._factory.repo(wire)
936 937 repo.ui.setconfig('ui', 'merge', 'internal:dump')
937 938
938 939 # In case of sub repositories are used mercurial prompts the user in
939 940 # case of merge conflicts or different sub repository sources. By
940 941 # setting the interactive flag to `False` mercurial doesn't prompt the
941 942 # used but instead uses a default value.
942 943 repo.ui.setconfig('ui', 'interactive', False)
943 944 ms = hg_merge.mergestate(repo)
944 945 return [x for x in ms.unresolved()]
945 946
946 947 @reraise_safe_exceptions
947 948 def commit(self, wire, message, username, close_branch=False):
948 949 repo = self._factory.repo(wire)
949 950 baseui = self._factory._create_config(wire['config'])
950 951 repo.ui.setconfig('ui', 'username', username)
951 952 commands.commit(baseui, repo, message=message, close_branch=close_branch)
952 953
953 954 @reraise_safe_exceptions
954 955 def rebase(self, wire, source=None, dest=None, abort=False):
955 956 repo = self._factory.repo(wire)
956 957 baseui = self._factory._create_config(wire['config'])
957 958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
958 959 # In case of sub repositories are used mercurial prompts the user in
959 960 # case of merge conflicts or different sub repository sources. By
960 961 # setting the interactive flag to `False` mercurial doesn't prompt the
961 962 # used but instead uses a default value.
962 963 repo.ui.setconfig('ui', 'interactive', False)
963 964 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
964 965
965 966 @reraise_safe_exceptions
966 967 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
967 968 repo = self._factory.repo(wire)
968 969 ctx = self._get_ctx(repo, revision)
969 970 node = ctx.node()
970 971
971 972 date = (tag_time, tag_timezone)
972 973 try:
973 974 hg_tag.tag(repo, name, node, message, local, user, date)
974 975 except Abort as e:
975 976 log.exception("Tag operation aborted")
976 977 # Exception can contain unicode which we convert
977 978 raise exceptions.AbortException(e)(repr(e))
978 979
979 980 @reraise_safe_exceptions
980 981 def bookmark(self, wire, bookmark, revision=None):
981 982 repo = self._factory.repo(wire)
982 983 baseui = self._factory._create_config(wire['config'])
983 984 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
984 985
985 986 @reraise_safe_exceptions
986 987 def install_hooks(self, wire, force=False):
987 988 # we don't need any special hooks for Mercurial
988 989 pass
989 990
990 991 @reraise_safe_exceptions
991 992 def get_hooks_info(self, wire):
992 993 return {
993 994 'pre_version': vcsserver.__version__,
994 995 'post_version': vcsserver.__version__,
995 996 }
996 997
997 998 @reraise_safe_exceptions
998 999 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
999 1000 archive_dir_name, commit_id):
1000 1001
1001 1002 def file_walker(_commit_id, path):
1002 1003 repo = self._factory.repo(wire)
1003 1004 ctx = repo[_commit_id]
1004 1005 is_root = path in ['', '/']
1005 1006 if is_root:
1006 1007 matcher = alwaysmatcher(badfn=None)
1007 1008 else:
1008 1009 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1009 1010 file_iter = ctx.manifest().walk(matcher)
1010 1011
1011 1012 for fn in file_iter:
1012 1013 file_path = fn
1013 1014 flags = ctx.flags(fn)
1014 1015 mode = b'x' in flags and 0o755 or 0o644
1015 1016 is_link = b'l' in flags
1016 1017
1017 1018 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1018 1019
1019 1020 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1020 1021 archive_dir_name, commit_id)
1021 1022
@@ -1,79 +1,79 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Mercurial libs compatibility
20 20 """
21 21
22 22 import mercurial
23 23 from mercurial import demandimport
24 24 # patch demandimport, due to bug in mercurial when it always triggers
25 25 # demandimport.enable()
26 26 demandimport.enable = lambda *args, **kwargs: 1
27 27
28 28 from mercurial import ui
29 29 from mercurial import patch
30 30 from mercurial import config
31 31 from mercurial import extensions
32 32 from mercurial import scmutil
33 33 from mercurial import archival
34 34 from mercurial import discovery
35 35 from mercurial import unionrepo
36 36 from mercurial import localrepo
37 37 from mercurial import merge as hg_merge
38 38 from mercurial import subrepo
39 39 from mercurial import subrepoutil
40 40 from mercurial import tags as hg_tag
41 41 from mercurial import util as hgutil
42 42 from mercurial.commands import clone, nullid, pull
43 43 from mercurial.context import memctx, memfilectx
44 44 from mercurial.error import (
45 45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
46 46 RequirementError, ProgrammingError)
47 47 from mercurial.hgweb import hgweb_mod
48 48 from mercurial.localrepo import instance
49 49 from mercurial.match import match, alwaysmatcher, patternmatcher
50 50 from mercurial.mdiff import diffopts
51 51 from mercurial.node import bin, hex
52 52 from mercurial.encoding import tolocal
53 53 from mercurial.discovery import findcommonoutgoing
54 54 from mercurial.hg import peer
55 55 from mercurial.httppeer import makepeer
56 56 from mercurial.util import url as hg_url
57 57 from mercurial.scmutil import revrange, revsymbol
58 58 from mercurial.node import nullrev
59 59 from mercurial import exchange
60 60 from hgext import largefiles
61 61
62 62 # those authnadlers are patched for python 2.6.5 bug an
63 63 # infinit looping when given invalid resources
64 64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
65 65
66 66
67 67 def get_ctx(repo, ref):
68 68 try:
69 69 ctx = repo[ref]
70 except ProgrammingError:
70 except (ProgrammingError, TypeError):
71 71 # we're unable to find the rev using a regular lookup, we fallback
72 72 # to slower, but backward compat revsymbol usage
73 73 ctx = revsymbol(repo, ref)
74 74 except (LookupError, RepoLookupError):
75 75 # Similar case as above but only for refs that are not numeric
76 76 if isinstance(ref, (int, long)):
77 77 raise
78 78 ctx = revsymbol(repo, ref)
79 79 return ctx
@@ -1,704 +1,705 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import base64
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import wsgiref.util
25 25 import traceback
26 26 import tempfile
27 27 import psutil
28 28 from itertools import chain
29 29 from cStringIO import StringIO
30 30
31 31 import simplejson as json
32 32 import msgpack
33 33 from pyramid.config import Configurator
34 34 from pyramid.settings import asbool, aslist
35 35 from pyramid.wsgi import wsgiapp
36 36 from pyramid.compat import configparser
37 37 from pyramid.response import Response
38 38
39 39 from vcsserver.utils import safe_int
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
44 44 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
45 45
46 46 try:
47 47 locale.setlocale(locale.LC_ALL, '')
48 48 except locale.Error as e:
49 49 log.error(
50 50 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
51 51 os.environ['LC_ALL'] = 'C'
52 52
53 53 import vcsserver
54 54 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
55 55 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
56 56 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
57 57 from vcsserver.echo_stub.echo_app import EchoApp
58 58 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
59 59 from vcsserver.lib.exc_tracking import store_exception
60 60 from vcsserver.server import VcsServer
61 61
62 62 try:
63 63 from vcsserver.git import GitFactory, GitRemote
64 64 except ImportError:
65 65 GitFactory = None
66 66 GitRemote = None
67 67
68 68 try:
69 69 from vcsserver.hg import MercurialFactory, HgRemote
70 70 except ImportError:
71 71 MercurialFactory = None
72 72 HgRemote = None
73 73
74 74 try:
75 75 from vcsserver.svn import SubversionFactory, SvnRemote
76 76 except ImportError:
77 77 SubversionFactory = None
78 78 SvnRemote = None
79 79
80 80
81 81 def _is_request_chunked(environ):
82 82 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
83 83 return stream
84 84
85 85
86 86 def _int_setting(settings, name, default):
87 87 settings[name] = int(settings.get(name, default))
88 88 return settings[name]
89 89
90 90
91 91 def _bool_setting(settings, name, default):
92 92 input_val = settings.get(name, default)
93 93 if isinstance(input_val, unicode):
94 94 input_val = input_val.encode('utf8')
95 95 settings[name] = asbool(input_val)
96 96 return settings[name]
97 97
98 98
99 99 def _list_setting(settings, name, default):
100 100 raw_value = settings.get(name, default)
101 101
102 102 # Otherwise we assume it uses pyramids space/newline separation.
103 103 settings[name] = aslist(raw_value)
104 104 return settings[name]
105 105
106 106
107 107 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
108 108 value = settings.get(name, default)
109 109
110 110 if default_when_empty and not value:
111 111 # use default value when value is empty
112 112 value = default
113 113
114 114 if lower:
115 115 value = value.lower()
116 116 settings[name] = value
117 117 return settings[name]
118 118
119 119
120 120 def log_max_fd():
121 121 try:
122 122 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
123 123 log.info('Max file descriptors value: %s', maxfd)
124 124 except Exception:
125 125 pass
126 126
127 127
128 128 class VCS(object):
129 129 def __init__(self, locale_conf=None, cache_config=None):
130 130 self.locale = locale_conf
131 131 self.cache_config = cache_config
132 132 self._configure_locale()
133 133
134 134 log_max_fd()
135 135
136 136 if GitFactory and GitRemote:
137 137 git_factory = GitFactory()
138 138 self._git_remote = GitRemote(git_factory)
139 139 else:
140 140 log.info("Git client import failed")
141 141
142 142 if MercurialFactory and HgRemote:
143 143 hg_factory = MercurialFactory()
144 144 self._hg_remote = HgRemote(hg_factory)
145 145 else:
146 146 log.info("Mercurial client import failed")
147 147
148 148 if SubversionFactory and SvnRemote:
149 149 svn_factory = SubversionFactory()
150 150
151 151 # hg factory is used for svn url validation
152 152 hg_factory = MercurialFactory()
153 153 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
154 154 else:
155 155 log.info("Subversion client import failed")
156 156
157 157 self._vcsserver = VcsServer()
158 158
159 159 def _configure_locale(self):
160 160 if self.locale:
161 161 log.info('Settings locale: `LC_ALL` to %s', self.locale)
162 162 else:
163 163 log.info(
164 164 'Configuring locale subsystem based on environment variables')
165 165 try:
166 166 # If self.locale is the empty string, then the locale
167 167 # module will use the environment variables. See the
168 168 # documentation of the package `locale`.
169 169 locale.setlocale(locale.LC_ALL, self.locale)
170 170
171 171 language_code, encoding = locale.getlocale()
172 172 log.info(
173 173 'Locale set to language code "%s" with encoding "%s".',
174 174 language_code, encoding)
175 175 except locale.Error:
176 176 log.exception(
177 177 'Cannot set locale, not configuring the locale system')
178 178
179 179
180 180 class WsgiProxy(object):
181 181 def __init__(self, wsgi):
182 182 self.wsgi = wsgi
183 183
184 184 def __call__(self, environ, start_response):
185 185 input_data = environ['wsgi.input'].read()
186 186 input_data = msgpack.unpackb(input_data)
187 187
188 188 error = None
189 189 try:
190 190 data, status, headers = self.wsgi.handle(
191 191 input_data['environment'], input_data['input_data'],
192 192 *input_data['args'], **input_data['kwargs'])
193 193 except Exception as e:
194 194 data, status, headers = [], None, None
195 195 error = {
196 196 'message': str(e),
197 197 '_vcs_kind': getattr(e, '_vcs_kind', None)
198 198 }
199 199
200 200 start_response(200, {})
201 201 return self._iterator(error, status, headers, data)
202 202
203 203 def _iterator(self, error, status, headers, data):
204 204 initial_data = [
205 205 error,
206 206 status,
207 207 headers,
208 208 ]
209 209
210 210 for d in chain(initial_data, data):
211 211 yield msgpack.packb(d)
212 212
213 213
214 214 def not_found(request):
215 215 return {'status': '404 NOT FOUND'}
216 216
217 217
218 218 class VCSViewPredicate(object):
219 219 def __init__(self, val, config):
220 220 self.remotes = val
221 221
222 222 def text(self):
223 223 return 'vcs view method = %s' % (self.remotes.keys(),)
224 224
225 225 phash = text
226 226
227 227 def __call__(self, context, request):
228 228 """
229 229 View predicate that returns true if given backend is supported by
230 230 defined remotes.
231 231 """
232 232 backend = request.matchdict.get('backend')
233 233 return backend in self.remotes
234 234
235 235
236 236 class HTTPApplication(object):
237 237 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
238 238
239 239 remote_wsgi = remote_wsgi
240 240 _use_echo_app = False
241 241
242 242 def __init__(self, settings=None, global_config=None):
243 243 self._sanitize_settings_and_apply_defaults(settings)
244 244
245 245 self.config = Configurator(settings=settings)
246 246 self.global_config = global_config
247 247 self.config.include('vcsserver.lib.rc_cache')
248 248
249 249 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
250 250 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
251 251 self._remotes = {
252 252 'hg': vcs._hg_remote,
253 253 'git': vcs._git_remote,
254 254 'svn': vcs._svn_remote,
255 255 'server': vcs._vcsserver,
256 256 }
257 257 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
258 258 self._use_echo_app = True
259 259 log.warning("Using EchoApp for VCS operations.")
260 260 self.remote_wsgi = remote_wsgi_stub
261 261
262 262 self._configure_settings(global_config, settings)
263 263
264 264 self._configure()
265 265
266 266 def _configure_settings(self, global_config, app_settings):
267 267 """
268 268 Configure the settings module.
269 269 """
270 270 settings_merged = global_config.copy()
271 271 settings_merged.update(app_settings)
272 272
273 273 git_path = app_settings.get('git_path', None)
274 274 if git_path:
275 275 settings.GIT_EXECUTABLE = git_path
276 276 binary_dir = app_settings.get('core.binary_dir', None)
277 277 if binary_dir:
278 278 settings.BINARY_DIR = binary_dir
279 279
280 280 # Store the settings to make them available to other modules.
281 281 vcsserver.PYRAMID_SETTINGS = settings_merged
282 282 vcsserver.CONFIG = settings_merged
283 283
284 284 def _sanitize_settings_and_apply_defaults(self, settings):
285 285 temp_store = tempfile.gettempdir()
286 286 default_cache_dir = os.path.join(temp_store, 'rc_cache')
287 287
288 288 # save default, cache dir, and use it for all backends later.
289 289 default_cache_dir = _string_setting(
290 290 settings,
291 291 'cache_dir',
292 292 default_cache_dir, lower=False, default_when_empty=True)
293 293
294 294 # ensure we have our dir created
295 295 if not os.path.isdir(default_cache_dir):
296 296 os.makedirs(default_cache_dir, mode=0o755)
297 297
298 298 # exception store cache
299 299 _string_setting(
300 300 settings,
301 301 'exception_tracker.store_path',
302 302 temp_store, lower=False, default_when_empty=True)
303 303
304 304 # repo_object cache
305 305 _string_setting(
306 306 settings,
307 307 'rc_cache.repo_object.backend',
308 308 'dogpile.cache.rc.file_namespace', lower=False)
309 309 _int_setting(
310 310 settings,
311 311 'rc_cache.repo_object.expiration_time',
312 312 30 * 24 * 60 * 60)
313 313 _string_setting(
314 314 settings,
315 315 'rc_cache.repo_object.arguments.filename',
316 316 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
317 317
318 318 def _configure(self):
319 319 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
320 320
321 321 self.config.add_route('service', '/_service')
322 322 self.config.add_route('status', '/status')
323 323 self.config.add_route('hg_proxy', '/proxy/hg')
324 324 self.config.add_route('git_proxy', '/proxy/git')
325 325
326 326 # rpc methods
327 327 self.config.add_route('vcs', '/{backend}')
328 328
329 329 # streaming rpc remote methods
330 330 self.config.add_route('vcs_stream', '/{backend}/stream')
331 331
332 332 # vcs operations clone/push as streaming
333 333 self.config.add_route('stream_git', '/stream/git/*repo_name')
334 334 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
335 335
336 336 self.config.add_view(self.status_view, route_name='status', renderer='json')
337 337 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
338 338
339 339 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
340 340 self.config.add_view(self.git_proxy(), route_name='git_proxy')
341 341 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
342 342 vcs_view=self._remotes)
343 343 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
344 344 vcs_view=self._remotes)
345 345
346 346 self.config.add_view(self.hg_stream(), route_name='stream_hg')
347 347 self.config.add_view(self.git_stream(), route_name='stream_git')
348 348
349 349 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
350 350
351 351 self.config.add_notfound_view(not_found, renderer='json')
352 352
353 353 self.config.add_view(self.handle_vcs_exception, context=Exception)
354 354
355 355 self.config.add_tween(
356 356 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
357 357 )
358 358 self.config.add_request_method(
359 359 'vcsserver.lib.request_counter.get_request_counter',
360 360 'request_count')
361 361
362 362 self.config.add_request_method(
363 363 'vcsserver.lib._vendor.statsd.get_statsd_client',
364 364 'statsd', reify=True)
365 365
366 366 def wsgi_app(self):
367 367 return self.config.make_wsgi_app()
368 368
369 369 def _vcs_view_params(self, request):
370 370 remote = self._remotes[request.matchdict['backend']]
371 371 payload = msgpack.unpackb(request.body, use_list=True)
372 372 method = payload.get('method')
373 373 params = payload['params']
374 374 wire = params.get('wire')
375 375 args = params.get('args')
376 376 kwargs = params.get('kwargs')
377 377 context_uid = None
378 378
379 379 if wire:
380 380 try:
381 381 wire['context'] = context_uid = uuid.UUID(wire['context'])
382 382 except KeyError:
383 383 pass
384 384 args.insert(0, wire)
385 385 repo_state_uid = wire.get('repo_state_uid') if wire else None
386 386
387 387 # NOTE(marcink): trading complexity for slight performance
388 388 if log.isEnabledFor(logging.DEBUG):
389 389 no_args_methods = [
390 390
391 391 ]
392 392 if method in no_args_methods:
393 393 call_args = ''
394 394 else:
395 395 call_args = args[1:]
396 396
397 397 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
398 398 method, call_args, kwargs, context_uid, repo_state_uid)
399 399
400 400 return payload, remote, method, args, kwargs
401 401
402 402 def vcs_view(self, request):
403 403
404 404 payload, remote, method, args, kwargs = self._vcs_view_params(request)
405 405 payload_id = payload.get('id')
406 406
407 407 try:
408 408 resp = getattr(remote, method)(*args, **kwargs)
409 409 except Exception as e:
410 410 exc_info = list(sys.exc_info())
411 411 exc_type, exc_value, exc_traceback = exc_info
412 412
413 413 org_exc = getattr(e, '_org_exc', None)
414 414 org_exc_name = None
415 415 org_exc_tb = ''
416 416 if org_exc:
417 417 org_exc_name = org_exc.__class__.__name__
418 418 org_exc_tb = getattr(e, '_org_exc_tb', '')
419 419 # replace our "faked" exception with our org
420 420 exc_info[0] = org_exc.__class__
421 421 exc_info[1] = org_exc
422 422
423 423 should_store_exc = True
424 424 if org_exc:
425 425 def get_exc_fqn(_exc_obj):
426 426 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
427 427 return module_name + '.' + org_exc_name
428 428
429 429 exc_fqn = get_exc_fqn(org_exc)
430 430
431 431 if exc_fqn in ['mercurial.error.RepoLookupError',
432 432 'vcsserver.exceptions.RefNotFoundException']:
433 433 should_store_exc = False
434 434
435 435 if should_store_exc:
436 store_exception(id(exc_info), exc_info)
436 store_exception(id(exc_info), exc_info, request_path=request.path)
437 437
438 438 tb_info = ''.join(
439 439 traceback.format_exception(exc_type, exc_value, exc_traceback))
440 440
441 441 type_ = e.__class__.__name__
442 442 if type_ not in self.ALLOWED_EXCEPTIONS:
443 443 type_ = None
444 444
445 445 resp = {
446 446 'id': payload_id,
447 447 'error': {
448 448 'message': e.message,
449 449 'traceback': tb_info,
450 450 'org_exc': org_exc_name,
451 451 'org_exc_tb': org_exc_tb,
452 452 'type': type_
453 453 }
454 454 }
455
455 456 try:
456 457 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
457 458 except AttributeError:
458 459 pass
459 460 else:
460 461 resp = {
461 462 'id': payload_id,
462 463 'result': resp
463 464 }
464 465
465 466 return resp
466 467
467 468 def vcs_stream_view(self, request):
468 469 payload, remote, method, args, kwargs = self._vcs_view_params(request)
469 470 # this method has a stream: marker we remove it here
470 471 method = method.split('stream:')[-1]
471 472 chunk_size = safe_int(payload.get('chunk_size')) or 4096
472 473
473 474 try:
474 475 resp = getattr(remote, method)(*args, **kwargs)
475 476 except Exception as e:
476 477 raise
477 478
478 479 def get_chunked_data(method_resp):
479 480 stream = StringIO(method_resp)
480 481 while 1:
481 482 chunk = stream.read(chunk_size)
482 483 if not chunk:
483 484 break
484 485 yield chunk
485 486
486 487 response = Response(app_iter=get_chunked_data(resp))
487 488 response.content_type = 'application/octet-stream'
488 489
489 490 return response
490 491
491 492 def status_view(self, request):
492 493 import vcsserver
493 494 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
494 495 'pid': os.getpid()}
495 496
496 497 def service_view(self, request):
497 498 import vcsserver
498 499
499 500 payload = msgpack.unpackb(request.body, use_list=True)
500 501 server_config, app_config = {}, {}
501 502
502 503 try:
503 504 path = self.global_config['__file__']
504 505 config = configparser.RawConfigParser()
505 506
506 507 config.read(path)
507 508
508 509 if config.has_section('server:main'):
509 510 server_config = dict(config.items('server:main'))
510 511 if config.has_section('app:main'):
511 512 app_config = dict(config.items('app:main'))
512 513
513 514 except Exception:
514 515 log.exception('Failed to read .ini file for display')
515 516
516 517 environ = os.environ.items()
517 518
518 519 resp = {
519 520 'id': payload.get('id'),
520 521 'result': dict(
521 522 version=vcsserver.__version__,
522 523 config=server_config,
523 524 app_config=app_config,
524 525 environ=environ,
525 526 payload=payload,
526 527 )
527 528 }
528 529 return resp
529 530
530 531 def _msgpack_renderer_factory(self, info):
531 532 def _render(value, system):
532 533 request = system.get('request')
533 534 if request is not None:
534 535 response = request.response
535 536 ct = response.content_type
536 537 if ct == response.default_content_type:
537 538 response.content_type = 'application/x-msgpack'
538 539 return msgpack.packb(value)
539 540 return _render
540 541
541 542 def set_env_from_config(self, environ, config):
542 543 dict_conf = {}
543 544 try:
544 545 for elem in config:
545 546 if elem[0] == 'rhodecode':
546 547 dict_conf = json.loads(elem[2])
547 548 break
548 549 except Exception:
549 550 log.exception('Failed to fetch SCM CONFIG')
550 551 return
551 552
552 553 username = dict_conf.get('username')
553 554 if username:
554 555 environ['REMOTE_USER'] = username
555 556 # mercurial specific, some extension api rely on this
556 557 environ['HGUSER'] = username
557 558
558 559 ip = dict_conf.get('ip')
559 560 if ip:
560 561 environ['REMOTE_HOST'] = ip
561 562
562 563 if _is_request_chunked(environ):
563 564 # set the compatibility flag for webob
564 565 environ['wsgi.input_terminated'] = True
565 566
566 567 def hg_proxy(self):
567 568 @wsgiapp
568 569 def _hg_proxy(environ, start_response):
569 570 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
570 571 return app(environ, start_response)
571 572 return _hg_proxy
572 573
573 574 def git_proxy(self):
574 575 @wsgiapp
575 576 def _git_proxy(environ, start_response):
576 577 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
577 578 return app(environ, start_response)
578 579 return _git_proxy
579 580
580 581 def hg_stream(self):
581 582 if self._use_echo_app:
582 583 @wsgiapp
583 584 def _hg_stream(environ, start_response):
584 585 app = EchoApp('fake_path', 'fake_name', None)
585 586 return app(environ, start_response)
586 587 return _hg_stream
587 588 else:
588 589 @wsgiapp
589 590 def _hg_stream(environ, start_response):
590 591 log.debug('http-app: handling hg stream')
591 592 repo_path = environ['HTTP_X_RC_REPO_PATH']
592 593 repo_name = environ['HTTP_X_RC_REPO_NAME']
593 594 packed_config = base64.b64decode(
594 595 environ['HTTP_X_RC_REPO_CONFIG'])
595 596 config = msgpack.unpackb(packed_config)
596 597 app = scm_app.create_hg_wsgi_app(
597 598 repo_path, repo_name, config)
598 599
599 600 # Consistent path information for hgweb
600 601 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
601 602 environ['REPO_NAME'] = repo_name
602 603 self.set_env_from_config(environ, config)
603 604
604 605 log.debug('http-app: starting app handler '
605 606 'with %s and process request', app)
606 607 return app(environ, ResponseFilter(start_response))
607 608 return _hg_stream
608 609
609 610 def git_stream(self):
610 611 if self._use_echo_app:
611 612 @wsgiapp
612 613 def _git_stream(environ, start_response):
613 614 app = EchoApp('fake_path', 'fake_name', None)
614 615 return app(environ, start_response)
615 616 return _git_stream
616 617 else:
617 618 @wsgiapp
618 619 def _git_stream(environ, start_response):
619 620 log.debug('http-app: handling git stream')
620 621 repo_path = environ['HTTP_X_RC_REPO_PATH']
621 622 repo_name = environ['HTTP_X_RC_REPO_NAME']
622 623 packed_config = base64.b64decode(
623 624 environ['HTTP_X_RC_REPO_CONFIG'])
624 625 config = msgpack.unpackb(packed_config)
625 626
626 627 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
627 628 self.set_env_from_config(environ, config)
628 629
629 630 content_type = environ.get('CONTENT_TYPE', '')
630 631
631 632 path = environ['PATH_INFO']
632 633 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
633 634 log.debug(
634 635 'LFS: Detecting if request `%s` is LFS server path based '
635 636 'on content type:`%s`, is_lfs:%s',
636 637 path, content_type, is_lfs_request)
637 638
638 639 if not is_lfs_request:
639 640 # fallback detection by path
640 641 if GIT_LFS_PROTO_PAT.match(path):
641 642 is_lfs_request = True
642 643 log.debug(
643 644 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
644 645 path, is_lfs_request)
645 646
646 647 if is_lfs_request:
647 648 app = scm_app.create_git_lfs_wsgi_app(
648 649 repo_path, repo_name, config)
649 650 else:
650 651 app = scm_app.create_git_wsgi_app(
651 652 repo_path, repo_name, config)
652 653
653 654 log.debug('http-app: starting app handler '
654 655 'with %s and process request', app)
655 656
656 657 return app(environ, start_response)
657 658
658 659 return _git_stream
659 660
660 661 def handle_vcs_exception(self, exception, request):
661 662 _vcs_kind = getattr(exception, '_vcs_kind', '')
662 663 if _vcs_kind == 'repo_locked':
663 664 # Get custom repo-locked status code if present.
664 665 status_code = request.headers.get('X-RC-Locked-Status-Code')
665 666 return HTTPRepoLocked(
666 667 title=exception.message, status_code=status_code)
667 668
668 669 elif _vcs_kind == 'repo_branch_protected':
669 670 # Get custom repo-branch-protected status code if present.
670 671 return HTTPRepoBranchProtected(title=exception.message)
671 672
672 673 exc_info = request.exc_info
673 674 store_exception(id(exc_info), exc_info)
674 675
675 676 traceback_info = 'unavailable'
676 677 if request.exc_info:
677 678 exc_type, exc_value, exc_tb = request.exc_info
678 679 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
679 680
680 681 log.error(
681 682 'error occurred handling this request for path: %s, \n tb: %s',
682 683 request.path, traceback_info)
683 684 raise exception
684 685
685 686
686 687 class ResponseFilter(object):
687 688
688 689 def __init__(self, start_response):
689 690 self._start_response = start_response
690 691
691 692 def __call__(self, status, response_headers, exc_info=None):
692 693 headers = tuple(
693 694 (h, v) for h, v in response_headers
694 695 if not wsgiref.util.is_hop_by_hop(h))
695 696 return self._start_response(status, headers, exc_info)
696 697
697 698
698 699 def main(global_config, **settings):
699 700 if MercurialFactory:
700 701 hgpatches.patch_largefiles_capabilities()
701 702 hgpatches.patch_subrepo_type_mapping()
702 703
703 704 app = HTTPApplication(settings=settings, global_config=global_config)
704 705 return app.wsgi_app()
@@ -1,169 +1,175 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20
21 21 import os
22 22 import time
23 23 import datetime
24 24 import msgpack
25 25 import logging
26 26 import traceback
27 27 import tempfile
28 28
29 29 from pyramid import compat
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
34 34 global_prefix = 'vcsserver'
35 35 exc_store_dir_name = 'rc_exception_store_v1'
36 36
37 37
38 38 def exc_serialize(exc_id, tb, exc_type):
39 39
40 40 data = {
41 41 'version': 'v1',
42 42 'exc_id': exc_id,
43 43 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
44 44 'exc_timestamp': repr(time.time()),
45 45 'exc_message': tb,
46 46 'exc_type': exc_type,
47 47 }
48 48 return msgpack.packb(data), data
49 49
50 50
51 51 def exc_unserialize(tb):
52 52 return msgpack.unpackb(tb)
53 53
54 54
55 55 def get_exc_store():
56 56 """
57 57 Get and create exception store if it's not existing
58 58 """
59 59 import vcsserver as app
60 60
61 61 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
62 62 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
63 63
64 64 _exc_store_path = os.path.abspath(_exc_store_path)
65 65 if not os.path.isdir(_exc_store_path):
66 66 os.makedirs(_exc_store_path)
67 67 log.debug('Initializing exceptions store at %s', _exc_store_path)
68 68 return _exc_store_path
69 69
70 70
71 def _store_exception(exc_id, exc_info, prefix):
71 def _store_exception(exc_id, exc_info, prefix, request_path=''):
72 72 exc_type, exc_value, exc_traceback = exc_info
73 73
74 74 tb = ''.join(traceback.format_exception(
75 75 exc_type, exc_value, exc_traceback, None))
76 76
77 77 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
78 78
79 79 if detailed_tb:
80 80 if isinstance(detailed_tb, compat.string_types):
81 81 remote_tb = [detailed_tb]
82 82
83 83 tb += (
84 84 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
85 85 '{}\n'
86 86 '+++ END SOURCE EXCEPTION +++\n'
87 87 ''.format('\n'.join(remote_tb))
88 88 )
89 89
90 90 # Avoid that remote_tb also appears in the frame
91 91 del remote_tb
92 92
93 93 exc_type_name = exc_type.__name__
94 94 exc_store_path = get_exc_store()
95 95 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
96 96 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
97 97 if not os.path.isdir(exc_store_path):
98 98 os.makedirs(exc_store_path)
99 99 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
100 100 with open(stored_exc_path, 'wb') as f:
101 101 f.write(exc_data)
102 102 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
103 103
104 log.error(
105 'error occurred handling this request.\n'
106 'Path: `%s`, tb: %s',
107 request_path, tb)
104 108
105 def store_exception(exc_id, exc_info, prefix=global_prefix):
109
110 def store_exception(exc_id, exc_info, prefix=global_prefix, request_path=''):
106 111 """
107 112 Example usage::
108 113
109 114 exc_info = sys.exc_info()
110 115 store_exception(id(exc_info), exc_info)
111 116 """
112 117
113 118 try:
114 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
119 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix,
120 request_path=request_path)
115 121 except Exception:
116 122 log.exception('Failed to store exception `%s` information', exc_id)
117 123 # there's no way this can fail, it will crash server badly if it does.
118 124 pass
119 125
120 126
121 127 def _find_exc_file(exc_id, prefix=global_prefix):
122 128 exc_store_path = get_exc_store()
123 129 if prefix:
124 130 exc_id = '{}_{}'.format(exc_id, prefix)
125 131 else:
126 132 # search without a prefix
127 133 exc_id = '{}'.format(exc_id)
128 134
129 135 # we need to search the store for such start pattern as above
130 136 for fname in os.listdir(exc_store_path):
131 137 if fname.startswith(exc_id):
132 138 exc_id = os.path.join(exc_store_path, fname)
133 139 break
134 140 continue
135 141 else:
136 142 exc_id = None
137 143
138 144 return exc_id
139 145
140 146
141 147 def _read_exception(exc_id, prefix):
142 148 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
143 149 if exc_id_file_path:
144 150 with open(exc_id_file_path, 'rb') as f:
145 151 return exc_unserialize(f.read())
146 152 else:
147 153 log.debug('Exception File `%s` not found', exc_id_file_path)
148 154 return None
149 155
150 156
151 157 def read_exception(exc_id, prefix=global_prefix):
152 158 try:
153 159 return _read_exception(exc_id=exc_id, prefix=prefix)
154 160 except Exception:
155 161 log.exception('Failed to read exception `%s` information', exc_id)
156 162 # there's no way this can fail, it will crash server badly if it does.
157 163 return None
158 164
159 165
160 166 def delete_exception(exc_id, prefix=global_prefix):
161 167 try:
162 168 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
163 169 if exc_id_file_path:
164 170 os.remove(exc_id_file_path)
165 171
166 172 except Exception:
167 173 log.exception('Failed to remove exception `%s` information', exc_id)
168 174 # there's no way this can fail, it will crash server badly if it does.
169 175 pass
@@ -1,253 +1,307 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import time
19 19 import errno
20 20 import logging
21 21
22 22 import msgpack
23 23 import redis
24 24
25 25 from dogpile.cache.api import CachedValue
26 26 from dogpile.cache.backends import memory as memory_backend
27 27 from dogpile.cache.backends import file as file_backend
28 28 from dogpile.cache.backends import redis as redis_backend
29 29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 30 from dogpile.cache.util import memoized_property
31 31
32 32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
33 33
34 34
35 35 _default_max_size = 1024
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class LRUMemoryBackend(memory_backend.MemoryBackend):
41 41 key_prefix = 'lru_mem_backend'
42 42 pickle_values = False
43 43
44 44 def __init__(self, arguments):
45 45 max_size = arguments.pop('max_size', _default_max_size)
46 46
47 47 LRUDictClass = LRUDict
48 48 if arguments.pop('log_key_count', None):
49 49 LRUDictClass = LRUDictDebug
50 50
51 51 arguments['cache_dict'] = LRUDictClass(max_size)
52 52 super(LRUMemoryBackend, self).__init__(arguments)
53 53
54 54 def delete(self, key):
55 55 try:
56 56 del self._cache[key]
57 57 except KeyError:
58 58 # we don't care if key isn't there at deletion
59 59 pass
60 60
61 61 def delete_multi(self, keys):
62 62 for key in keys:
63 63 self.delete(key)
64 64
65 65
66 66 class PickleSerializer(object):
67 67
68 68 def _dumps(self, value, safe=False):
69 69 try:
70 70 return compat.pickle.dumps(value)
71 71 except Exception:
72 72 if safe:
73 73 return NO_VALUE
74 74 else:
75 75 raise
76 76
77 77 def _loads(self, value, safe=True):
78 78 try:
79 79 return compat.pickle.loads(value)
80 80 except Exception:
81 81 if safe:
82 82 return NO_VALUE
83 83 else:
84 84 raise
85 85
86 86
87 87 class MsgPackSerializer(object):
88 88
89 89 def _dumps(self, value, safe=False):
90 90 try:
91 91 return msgpack.packb(value)
92 92 except Exception:
93 93 if safe:
94 94 return NO_VALUE
95 95 else:
96 96 raise
97 97
98 98 def _loads(self, value, safe=True):
99 99 """
100 100 pickle maintained the `CachedValue` wrapper of the tuple
101 101 msgpack does not, so it must be added back in.
102 102 """
103 103 try:
104 104 value = msgpack.unpackb(value, use_list=False)
105 105 return CachedValue(*value)
106 106 except Exception:
107 107 if safe:
108 108 return NO_VALUE
109 109 else:
110 110 raise
111 111
112 112
113 113 import fcntl
114 114 flock_org = fcntl.flock
115 115
116 116
117 117 class CustomLockFactory(FileLock):
118 118
119 119 pass
120 120
121 121
122 122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
123 123 key_prefix = 'file_backend'
124 124
125 125 def __init__(self, arguments):
126 126 arguments['lock_factory'] = CustomLockFactory
127 super(FileNamespaceBackend, self).__init__(arguments)
127 db_file = arguments.get('filename')
128
129 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
130 try:
131 super(FileNamespaceBackend, self).__init__(arguments)
132 except Exception:
133 log.error('Failed to initialize db at: %s', db_file)
134 raise
128 135
129 136 def __repr__(self):
130 137 return '{} `{}`'.format(self.__class__, self.filename)
131 138
132 139 def list_keys(self, prefix=''):
133 140 prefix = '{}:{}'.format(self.key_prefix, prefix)
134 141
135 142 def cond(v):
136 143 if not prefix:
137 144 return True
138 145
139 146 if v.startswith(prefix):
140 147 return True
141 148 return False
142 149
143 150 with self._dbm_file(True) as dbm:
144
145 return filter(cond, dbm.keys())
151 try:
152 return filter(cond, dbm.keys())
153 except Exception:
154 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
155 raise
146 156
147 157 def get_store(self):
148 158 return self.filename
149 159
150 def get(self, key):
160 def _dbm_get(self, key):
151 161 with self._dbm_file(False) as dbm:
152 162 if hasattr(dbm, 'get'):
153 163 value = dbm.get(key, NO_VALUE)
154 164 else:
155 165 # gdbm objects lack a .get method
156 166 try:
157 167 value = dbm[key]
158 168 except KeyError:
159 169 value = NO_VALUE
160 170 if value is not NO_VALUE:
161 171 value = self._loads(value)
162 172 return value
163 173
174 def get(self, key):
175 try:
176 return self._dbm_get(key)
177 except Exception:
178 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
179 raise
180
164 181 def set(self, key, value):
165 182 with self._dbm_file(True) as dbm:
166 183 dbm[key] = self._dumps(value)
167 184
168 185 def set_multi(self, mapping):
169 186 with self._dbm_file(True) as dbm:
170 187 for key, value in mapping.items():
171 188 dbm[key] = self._dumps(value)
172 189
173 190
174 191 class BaseRedisBackend(redis_backend.RedisBackend):
175 192
176 193 def _create_client(self):
177 194 args = {}
178 195
179 196 if self.url is not None:
180 197 args.update(url=self.url)
181 198
182 199 else:
183 200 args.update(
184 201 host=self.host, password=self.password,
185 202 port=self.port, db=self.db
186 203 )
187 204
188 205 connection_pool = redis.ConnectionPool(**args)
189 206
190 207 return redis.StrictRedis(connection_pool=connection_pool)
191 208
192 209 def list_keys(self, prefix=''):
193 210 prefix = '{}:{}*'.format(self.key_prefix, prefix)
194 211 return self.client.keys(prefix)
195 212
196 213 def get_store(self):
197 214 return self.client.connection_pool
198 215
199 216 def get(self, key):
200 217 value = self.client.get(key)
201 218 if value is None:
202 219 return NO_VALUE
203 220 return self._loads(value)
204 221
205 222 def get_multi(self, keys):
206 223 if not keys:
207 224 return []
208 225 values = self.client.mget(keys)
209 226 loads = self._loads
210 227 return [
211 228 loads(v) if v is not None else NO_VALUE
212 229 for v in values]
213 230
214 231 def set(self, key, value):
215 232 if self.redis_expiration_time:
216 233 self.client.setex(key, self.redis_expiration_time,
217 234 self._dumps(value))
218 235 else:
219 236 self.client.set(key, self._dumps(value))
220 237
221 238 def set_multi(self, mapping):
222 239 dumps = self._dumps
223 240 mapping = dict(
224 241 (k, dumps(v))
225 242 for k, v in mapping.items()
226 243 )
227 244
228 245 if not self.redis_expiration_time:
229 246 self.client.mset(mapping)
230 247 else:
231 248 pipe = self.client.pipeline()
232 249 for key, value in mapping.items():
233 250 pipe.setex(key, self.redis_expiration_time, value)
234 251 pipe.execute()
235 252
236 253 def get_mutex(self, key):
237 u = redis_backend.u
238 254 if self.distributed_lock:
239 lock_key = u('_lock_{0}').format(key)
255 lock_key = redis_backend.u('_lock_{0}').format(key)
240 256 log.debug('Trying to acquire Redis lock for key %s', lock_key)
241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
257
258 auto_renewal = True
259 lock_timeout = self.lock_timeout
260 if auto_renewal and not self.lock_timeout:
261 # set default timeout for auto_renewal
262 lock_timeout = 10
263 return get_mutex_lock(self.client, lock_key, lock_timeout,
264 auto_renewal=auto_renewal)
242 265 else:
243 266 return None
244 267
245 268
246 269 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
247 270 key_prefix = 'redis_pickle_backend'
248 271 pass
249 272
250 273
251 274 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
252 275 key_prefix = 'redis_msgpack_backend'
253 276 pass
277
278
279 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
280 import redis_lock
281
282 class _RedisLockWrapper(object):
283 """LockWrapper for redis_lock"""
284
285 def __init__(self):
286 pass
287
288 @property
289 def lock(self):
290 return redis_lock.Lock(
291 redis_client=client,
292 name=lock_key,
293 expire=lock_timeout,
294 auto_renewal=auto_renewal,
295 strict=True,
296 )
297
298 def acquire(self, wait=True):
299 return self.lock.acquire(wait)
300
301 def release(self):
302 try:
303 self.lock.release()
304 except redis_lock.NotAcquired:
305 pass
306
307 return _RedisLockWrapper()
General Comments 0
You need to be logged in to leave comments. Login now