##// END OF EJS Templates
merged stable into default
super-admin -
r947:4c8695a6 merge default
parent child Browse files
Show More
@@ -0,0 +1,16 b''
1 [DEFAULT]
2 done = false
3
4 [task:bump_version]
5 done = true
6
7 [task:fixes_on_stable]
8 done = true
9
10 [task:pip2nix_generated]
11 done = true
12
13 [release]
14 state = prepared
15 version = 4.25.2
16
@@ -0,0 +1,390 b''
1 import sys
2 import threading
3 import weakref
4 from base64 import b64encode
5 from logging import getLogger
6 from os import urandom
7
8 from redis import StrictRedis
9
10 __version__ = '3.7.0'
11
12 loggers = {
13 k: getLogger("vcsserver." + ".".join((__name__, k)))
14 for k in [
15 "acquire",
16 "refresh.thread.start",
17 "refresh.thread.stop",
18 "refresh.thread.exit",
19 "refresh.start",
20 "refresh.shutdown",
21 "refresh.exit",
22 "release",
23 ]
24 }
25
26 PY3 = sys.version_info[0] == 3
27
28 if PY3:
29 text_type = str
30 binary_type = bytes
31 else:
32 text_type = unicode # noqa
33 binary_type = str
34
35
36 # Check if the id match. If not, return an error code.
37 UNLOCK_SCRIPT = b"""
38 if redis.call("get", KEYS[1]) ~= ARGV[1] then
39 return 1
40 else
41 redis.call("del", KEYS[2])
42 redis.call("lpush", KEYS[2], 1)
43 redis.call("pexpire", KEYS[2], ARGV[2])
44 redis.call("del", KEYS[1])
45 return 0
46 end
47 """
48
49 # Covers both cases when key doesn't exist and doesn't equal to lock's id
50 EXTEND_SCRIPT = b"""
51 if redis.call("get", KEYS[1]) ~= ARGV[1] then
52 return 1
53 elseif redis.call("ttl", KEYS[1]) < 0 then
54 return 2
55 else
56 redis.call("expire", KEYS[1], ARGV[2])
57 return 0
58 end
59 """
60
61 RESET_SCRIPT = b"""
62 redis.call('del', KEYS[2])
63 redis.call('lpush', KEYS[2], 1)
64 redis.call('pexpire', KEYS[2], ARGV[2])
65 return redis.call('del', KEYS[1])
66 """
67
68 RESET_ALL_SCRIPT = b"""
69 local locks = redis.call('keys', 'lock:*')
70 local signal
71 for _, lock in pairs(locks) do
72 signal = 'lock-signal:' .. string.sub(lock, 6)
73 redis.call('del', signal)
74 redis.call('lpush', signal, 1)
75 redis.call('expire', signal, 1)
76 redis.call('del', lock)
77 end
78 return #locks
79 """
80
81
82 class AlreadyAcquired(RuntimeError):
83 pass
84
85
86 class NotAcquired(RuntimeError):
87 pass
88
89
90 class AlreadyStarted(RuntimeError):
91 pass
92
93
94 class TimeoutNotUsable(RuntimeError):
95 pass
96
97
98 class InvalidTimeout(RuntimeError):
99 pass
100
101
102 class TimeoutTooLarge(RuntimeError):
103 pass
104
105
106 class NotExpirable(RuntimeError):
107 pass
108
109
110 class Lock(object):
111 """
112 A Lock context manager implemented via redis SETNX/BLPOP.
113 """
114 unlock_script = None
115 extend_script = None
116 reset_script = None
117 reset_all_script = None
118
119 def __init__(self, redis_client, name, expire=None, id=None, auto_renewal=False, strict=True, signal_expire=1000):
120 """
121 :param redis_client:
122 An instance of :class:`~StrictRedis`.
123 :param name:
124 The name (redis key) the lock should have.
125 :param expire:
126 The lock expiry time in seconds. If left at the default (None)
127 the lock will not expire.
128 :param id:
129 The ID (redis value) the lock should have. A random value is
130 generated when left at the default.
131
132 Note that if you specify this then the lock is marked as "held". Acquires
133 won't be possible.
134 :param auto_renewal:
135 If set to ``True``, Lock will automatically renew the lock so that it
136 doesn't expire for as long as the lock is held (acquire() called
137 or running in a context manager).
138
139 Implementation note: Renewal will happen using a daemon thread with
140 an interval of ``expire*2/3``. If wishing to use a different renewal
141 time, subclass Lock, call ``super().__init__()`` then set
142 ``self._lock_renewal_interval`` to your desired interval.
143 :param strict:
144 If set ``True`` then the ``redis_client`` needs to be an instance of ``redis.StrictRedis``.
145 :param signal_expire:
146 Advanced option to override signal list expiration in milliseconds. Increase it for very slow clients. Default: ``1000``.
147 """
148 if strict and not isinstance(redis_client, StrictRedis):
149 raise ValueError("redis_client must be instance of StrictRedis. "
150 "Use strict=False if you know what you're doing.")
151 if auto_renewal and expire is None:
152 raise ValueError("Expire may not be None when auto_renewal is set")
153
154 self._client = redis_client
155
156 if expire:
157 expire = int(expire)
158 if expire < 0:
159 raise ValueError("A negative expire is not acceptable.")
160 else:
161 expire = None
162 self._expire = expire
163
164 self._signal_expire = signal_expire
165 if id is None:
166 self._id = b64encode(urandom(18)).decode('ascii')
167 elif isinstance(id, binary_type):
168 try:
169 self._id = id.decode('ascii')
170 except UnicodeDecodeError:
171 self._id = b64encode(id).decode('ascii')
172 elif isinstance(id, text_type):
173 self._id = id
174 else:
175 raise TypeError("Incorrect type for `id`. Must be bytes/str not %s." % type(id))
176 self._name = 'lock:' + name
177 self._signal = 'lock-signal:' + name
178 self._lock_renewal_interval = (float(expire) * 2 / 3
179 if auto_renewal
180 else None)
181 self._lock_renewal_thread = None
182
183 self.register_scripts(redis_client)
184
185 @classmethod
186 def register_scripts(cls, redis_client):
187 global reset_all_script
188 if reset_all_script is None:
189 reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
190 cls.unlock_script = redis_client.register_script(UNLOCK_SCRIPT)
191 cls.extend_script = redis_client.register_script(EXTEND_SCRIPT)
192 cls.reset_script = redis_client.register_script(RESET_SCRIPT)
193 cls.reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
194
195 @property
196 def _held(self):
197 return self.id == self.get_owner_id()
198
199 def reset(self):
200 """
201 Forcibly deletes the lock. Use this with care.
202 """
203 self.reset_script(client=self._client, keys=(self._name, self._signal), args=(self.id, self._signal_expire))
204
205 @property
206 def id(self):
207 return self._id
208
209 def get_owner_id(self):
210 owner_id = self._client.get(self._name)
211 if isinstance(owner_id, binary_type):
212 owner_id = owner_id.decode('ascii', 'replace')
213 return owner_id
214
215 def acquire(self, blocking=True, timeout=None):
216 """
217 :param blocking:
218 Boolean value specifying whether lock should be blocking or not.
219 :param timeout:
220 An integer value specifying the maximum number of seconds to block.
221 """
222 logger = loggers["acquire"]
223
224 logger.debug("Getting acquire on %r ...", self._name)
225
226 if self._held:
227 owner_id = self.get_owner_id()
228 raise AlreadyAcquired("Already acquired from this Lock instance. Lock id: {}".format(owner_id))
229
230 if not blocking and timeout is not None:
231 raise TimeoutNotUsable("Timeout cannot be used if blocking=False")
232
233 if timeout:
234 timeout = int(timeout)
235 if timeout < 0:
236 raise InvalidTimeout("Timeout (%d) cannot be less than or equal to 0" % timeout)
237
238 if self._expire and not self._lock_renewal_interval and timeout > self._expire:
239 raise TimeoutTooLarge("Timeout (%d) cannot be greater than expire (%d)" % (timeout, self._expire))
240
241 busy = True
242 blpop_timeout = timeout or self._expire or 0
243 timed_out = False
244 while busy:
245 busy = not self._client.set(self._name, self._id, nx=True, ex=self._expire)
246 if busy:
247 if timed_out:
248 return False
249 elif blocking:
250 timed_out = not self._client.blpop(self._signal, blpop_timeout) and timeout
251 else:
252 logger.warning("Failed to get %r.", self._name)
253 return False
254
255 logger.info("Got lock for %r.", self._name)
256 if self._lock_renewal_interval is not None:
257 self._start_lock_renewer()
258 return True
259
260 def extend(self, expire=None):
261 """Extends expiration time of the lock.
262
263 :param expire:
264 New expiration time. If ``None`` - `expire` provided during
265 lock initialization will be taken.
266 """
267 if expire:
268 expire = int(expire)
269 if expire < 0:
270 raise ValueError("A negative expire is not acceptable.")
271 elif self._expire is not None:
272 expire = self._expire
273 else:
274 raise TypeError(
275 "To extend a lock 'expire' must be provided as an "
276 "argument to extend() method or at initialization time."
277 )
278
279 error = self.extend_script(client=self._client, keys=(self._name, self._signal), args=(self._id, expire))
280 if error == 1:
281 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
282 elif error == 2:
283 raise NotExpirable("Lock %s has no assigned expiration time" % self._name)
284 elif error:
285 raise RuntimeError("Unsupported error code %s from EXTEND script" % error)
286
287 @staticmethod
288 def _lock_renewer(lockref, interval, stop):
289 """
290 Renew the lock key in redis every `interval` seconds for as long
291 as `self._lock_renewal_thread.should_exit` is False.
292 """
293 while not stop.wait(timeout=interval):
294 loggers["refresh.thread.start"].debug("Refreshing lock")
295 lock = lockref()
296 if lock is None:
297 loggers["refresh.thread.stop"].debug(
298 "The lock no longer exists, stopping lock refreshing"
299 )
300 break
301 lock.extend(expire=lock._expire)
302 del lock
303 loggers["refresh.thread.exit"].debug("Exit requested, stopping lock refreshing")
304
305 def _start_lock_renewer(self):
306 """
307 Starts the lock refresher thread.
308 """
309 if self._lock_renewal_thread is not None:
310 raise AlreadyStarted("Lock refresh thread already started")
311
312 loggers["refresh.start"].debug(
313 "Starting thread to refresh lock every %s seconds",
314 self._lock_renewal_interval
315 )
316 self._lock_renewal_stop = threading.Event()
317 self._lock_renewal_thread = threading.Thread(
318 group=None,
319 target=self._lock_renewer,
320 kwargs={'lockref': weakref.ref(self),
321 'interval': self._lock_renewal_interval,
322 'stop': self._lock_renewal_stop}
323 )
324 self._lock_renewal_thread.setDaemon(True)
325 self._lock_renewal_thread.start()
326
327 def _stop_lock_renewer(self):
328 """
329 Stop the lock renewer.
330
331 This signals the renewal thread and waits for its exit.
332 """
333 if self._lock_renewal_thread is None or not self._lock_renewal_thread.is_alive():
334 return
335 loggers["refresh.shutdown"].debug("Signalling the lock refresher to stop")
336 self._lock_renewal_stop.set()
337 self._lock_renewal_thread.join()
338 self._lock_renewal_thread = None
339 loggers["refresh.exit"].debug("Lock refresher has stopped")
340
341 def __enter__(self):
342 acquired = self.acquire(blocking=True)
343 assert acquired, "Lock wasn't acquired, but blocking=True"
344 return self
345
346 def __exit__(self, exc_type=None, exc_value=None, traceback=None):
347 self.release()
348
349 def release(self):
350 """Releases the lock, that was acquired with the same object.
351
352 .. note::
353
354 If you want to release a lock that you acquired in a different place you have two choices:
355
356 * Use ``Lock("name", id=id_from_other_place).release()``
357 * Use ``Lock("name").reset()``
358 """
359 if self._lock_renewal_thread is not None:
360 self._stop_lock_renewer()
361 loggers["release"].debug("Releasing %r.", self._name)
362 error = self.unlock_script(client=self._client, keys=(self._name, self._signal), args=(self._id, self._signal_expire))
363 if error == 1:
364 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
365 elif error:
366 raise RuntimeError("Unsupported error code %s from EXTEND script." % error)
367
368 def locked(self):
369 """
370 Return true if the lock is acquired.
371
372 Checks that lock with same name already exists. This method returns true, even if
373 lock have another id.
374 """
375 return self._client.exists(self._name) == 1
376
377
378 reset_all_script = None
379
380
381 def reset_all(redis_client):
382 """
383 Forcibly deletes all locks if its remains (like a crash reason). Use this with care.
384
385 :param redis_client:
386 An instance of :class:`~StrictRedis`.
387 """
388 Lock.register_scripts(redis_client)
389
390 reset_all_script(client=redis_client) # noqa
@@ -1,5 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.25.0
2 current_version = 4.25.2
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
@@ -1,76 +1,79 b''
1 c6fad7d1e61f22b1f4a4863eff207a04c27e9462 v4.0.0
1 c6fad7d1e61f22b1f4a4863eff207a04c27e9462 v4.0.0
2 77b6e243b4cc5b702c15abd6d737798edbac60dc v4.0.1
2 77b6e243b4cc5b702c15abd6d737798edbac60dc v4.0.1
3 a359c072337fdd8e1e71df72cc520b8a9b042f80 v4.1.0
3 a359c072337fdd8e1e71df72cc520b8a9b042f80 v4.1.0
4 49aa7ed030a36b7ceba149a21e587cb5d20b4946 v4.1.1
4 49aa7ed030a36b7ceba149a21e587cb5d20b4946 v4.1.1
5 f38ed1e1a31dce3c170b4d31585ba43471cf0705 v4.1.2
5 f38ed1e1a31dce3c170b4d31585ba43471cf0705 v4.1.2
6 21269ba7bafd8f0c77e79dd86a31eb9bce7643d2 v4.2.0
6 21269ba7bafd8f0c77e79dd86a31eb9bce7643d2 v4.2.0
7 b53930c918c25b2c8f69ceddc6641e511be27fd3 v4.2.1
7 b53930c918c25b2c8f69ceddc6641e511be27fd3 v4.2.1
8 6627ff4119723d8b2b60918e8b1aa49e9f055aab v4.3.0
8 6627ff4119723d8b2b60918e8b1aa49e9f055aab v4.3.0
9 d38f2c2b861dde6c4178923f7cf15ea58b85aa92 v4.3.1
9 d38f2c2b861dde6c4178923f7cf15ea58b85aa92 v4.3.1
10 1232313f9e6adac5ce5399c2a891dc1e72b79022 v4.4.0
10 1232313f9e6adac5ce5399c2a891dc1e72b79022 v4.4.0
11 cbb9f1d329ae5768379cdec55a62ebdd546c4e27 v4.4.1
11 cbb9f1d329ae5768379cdec55a62ebdd546c4e27 v4.4.1
12 24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17 v4.4.2
12 24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17 v4.4.2
13 beaeeaa440cd17471110d4621b8816506c0dff4a v4.5.0
13 beaeeaa440cd17471110d4621b8816506c0dff4a v4.5.0
14 668e5c656f61dd94595611844e1106d1361aa6a7 v4.5.1
14 668e5c656f61dd94595611844e1106d1361aa6a7 v4.5.1
15 ae0640240cb7a77e6dc8c77e64dd80d79732cb5b v4.5.2
15 ae0640240cb7a77e6dc8c77e64dd80d79732cb5b v4.5.2
16 7af06899f426813583fe60449d7517cc49c15b28 v4.6.0
16 7af06899f426813583fe60449d7517cc49c15b28 v4.6.0
17 8f7f4299bf341b43f94dadafa1ea73d6cea2c9ba v4.6.1
17 8f7f4299bf341b43f94dadafa1ea73d6cea2c9ba v4.6.1
18 de00a831a0709ffaac57f948738ea927b97223a9 v4.7.0
18 de00a831a0709ffaac57f948738ea927b97223a9 v4.7.0
19 57f527e0646d731768fb5e0fe742b12a35bdc63b v4.7.1
19 57f527e0646d731768fb5e0fe742b12a35bdc63b v4.7.1
20 f9b09787da9845e4a105f4bffdc252099902cefb v4.7.2
20 f9b09787da9845e4a105f4bffdc252099902cefb v4.7.2
21 0b7c790b726f08385e6ebdf4f257c905787b9244 v4.8.0
21 0b7c790b726f08385e6ebdf4f257c905787b9244 v4.8.0
22 f4123e725b74d0e82fe89982ab8791a66062e2b3 v4.9.0
22 f4123e725b74d0e82fe89982ab8791a66062e2b3 v4.9.0
23 940bac044a0fe1ec839759df81399b50141be720 v4.9.1
23 940bac044a0fe1ec839759df81399b50141be720 v4.9.1
24 582d9ebbe46bdddac4b26eacae36ee5ecabca267 v4.10.0
24 582d9ebbe46bdddac4b26eacae36ee5ecabca267 v4.10.0
25 12fbd08d0ab57acce9c0bdccee75633cfa08d7f4 v4.10.1
25 12fbd08d0ab57acce9c0bdccee75633cfa08d7f4 v4.10.1
26 78352f95021a9d128f5803fdbca7036daef5dabe v4.10.2
26 78352f95021a9d128f5803fdbca7036daef5dabe v4.10.2
27 a47ccfb020cda78c8680e3844aaf0b82b1390f3b v4.10.3
27 a47ccfb020cda78c8680e3844aaf0b82b1390f3b v4.10.3
28 347ae9ae544bba8deb417995285287a3b6be1611 v4.10.4
28 347ae9ae544bba8deb417995285287a3b6be1611 v4.10.4
29 9b257ac49841f850434be0d518baca0827e6c8cc v4.10.5
29 9b257ac49841f850434be0d518baca0827e6c8cc v4.10.5
30 e8bf26eea118694edc4ffe50c6c5aa91022bc434 v4.10.6
30 e8bf26eea118694edc4ffe50c6c5aa91022bc434 v4.10.6
31 71fa9274ba59fb982104f0b9b3d0d024c78675f7 v4.11.0
31 71fa9274ba59fb982104f0b9b3d0d024c78675f7 v4.11.0
32 92471577ef25636e5babe8001d47fc8e51521522 v4.11.1
32 92471577ef25636e5babe8001d47fc8e51521522 v4.11.1
33 0277edbcda5a8d075e1e41a95bcee6dcf21f3f77 v4.11.2
33 0277edbcda5a8d075e1e41a95bcee6dcf21f3f77 v4.11.2
34 6c5ecbf0778ef870e5b23d9fad5340135b563356 v4.11.3
34 6c5ecbf0778ef870e5b23d9fad5340135b563356 v4.11.3
35 be788a89a939ebd63606220064bd624fa9d5c9c9 v4.11.4
35 be788a89a939ebd63606220064bd624fa9d5c9c9 v4.11.4
36 15c90a04098a373ac761fab07695fd80dde3bcdb v4.11.5
36 15c90a04098a373ac761fab07695fd80dde3bcdb v4.11.5
37 77aff155b3251cc00394a49f5e8f2c99e33149a7 v4.11.6
37 77aff155b3251cc00394a49f5e8f2c99e33149a7 v4.11.6
38 c218a1ce5d370c2e671d42a91684b3fc2c91b81d v4.12.0
38 c218a1ce5d370c2e671d42a91684b3fc2c91b81d v4.12.0
39 80085fb846cc948195a5c76b579ca34cbc49b59b v4.12.1
39 80085fb846cc948195a5c76b579ca34cbc49b59b v4.12.1
40 346f04fc8a18df3235defbe6e71bd552c0d46481 v4.12.2
40 346f04fc8a18df3235defbe6e71bd552c0d46481 v4.12.2
41 764fdd752322f3e0c13ea00957f2d548bf4363a7 v4.12.3
41 764fdd752322f3e0c13ea00957f2d548bf4363a7 v4.12.3
42 b58038974a5cecbb9c100d32ad2e4c68582f1a78 v4.12.4
42 b58038974a5cecbb9c100d32ad2e4c68582f1a78 v4.12.4
43 e1d42d92a0fec0c80b56c82f37bc7b5472613706 v4.13.0
43 e1d42d92a0fec0c80b56c82f37bc7b5472613706 v4.13.0
44 c3ded3ff17e9bb2a47002a808984a7a946f58a1c v4.13.1
44 c3ded3ff17e9bb2a47002a808984a7a946f58a1c v4.13.1
45 7ff81aa47b1b40cdef9dd5bcdd439f59c269db3d v4.13.2
45 7ff81aa47b1b40cdef9dd5bcdd439f59c269db3d v4.13.2
46 628a08e6aaeff2c3f9e0e268e854f870e6778e53 v4.13.3
46 628a08e6aaeff2c3f9e0e268e854f870e6778e53 v4.13.3
47 941d675f10cfa7d774815bfacfb37085751b7a0d v4.14.0
47 941d675f10cfa7d774815bfacfb37085751b7a0d v4.14.0
48 75e11d32c0be0a457198f07888e7ef650cfa6888 v4.14.1
48 75e11d32c0be0a457198f07888e7ef650cfa6888 v4.14.1
49 6c6f49fda0191c4641dcd43aa0d4376b8b728d40 v4.15.0
49 6c6f49fda0191c4641dcd43aa0d4376b8b728d40 v4.15.0
50 184dea5e01c36e6474c83d3bb34719cdfec22b0d v4.15.1
50 184dea5e01c36e6474c83d3bb34719cdfec22b0d v4.15.1
51 a4dc3669345553582296b2ce1485229a6c6f0522 v4.15.2
51 a4dc3669345553582296b2ce1485229a6c6f0522 v4.15.2
52 d2a4a1a66f204668841da1cdccfa29083e1ef7a3 v4.16.0
52 d2a4a1a66f204668841da1cdccfa29083e1ef7a3 v4.16.0
53 744cf8f2c8f23051978fc293404bf475cc5a31f6 v4.16.1
53 744cf8f2c8f23051978fc293404bf475cc5a31f6 v4.16.1
54 e68aff93ce4ad11fea13420e914f7dfb05c39566 v4.16.2
54 e68aff93ce4ad11fea13420e914f7dfb05c39566 v4.16.2
55 647aeff9752dc1aa00796fa280d0d2ce2f511bc9 v4.17.0
55 647aeff9752dc1aa00796fa280d0d2ce2f511bc9 v4.17.0
56 5e0c2990e095bba1dc903cf0e6ef6ac035e0ccf9 v4.17.1
56 5e0c2990e095bba1dc903cf0e6ef6ac035e0ccf9 v4.17.1
57 8a824544d95037d76d99b104b5d2363858101d53 v4.17.2
57 8a824544d95037d76d99b104b5d2363858101d53 v4.17.2
58 ccd806a2d9482f61bd7e8956a02a28eb24a1d46a v4.17.3
58 ccd806a2d9482f61bd7e8956a02a28eb24a1d46a v4.17.3
59 e533ca02ccc205189b7bad9f227a312212772022 v4.17.4
59 e533ca02ccc205189b7bad9f227a312212772022 v4.17.4
60 ba6a6dc9ecd7fd8b1dcd6eb0c4ee0210e897c426 v4.18.0
60 ba6a6dc9ecd7fd8b1dcd6eb0c4ee0210e897c426 v4.18.0
61 17bc818b41bcf6883b9ff0da31f01d8c2a5d0781 v4.18.1
61 17bc818b41bcf6883b9ff0da31f01d8c2a5d0781 v4.18.1
62 1e9f12aa01f82c335abc9017efe94ce1c30b52ba v4.18.2
62 1e9f12aa01f82c335abc9017efe94ce1c30b52ba v4.18.2
63 f4cc6b3c5680bdf4541d7d442fbb7086640fb547 v4.18.3
63 f4cc6b3c5680bdf4541d7d442fbb7086640fb547 v4.18.3
64 5dc0277e4f77bd4cc3042d99625bb5d3ba480c8c v4.19.0
64 5dc0277e4f77bd4cc3042d99625bb5d3ba480c8c v4.19.0
65 3a815eeb1b1efa340dda9b81a8da3cf24a7d605b v4.19.1
65 3a815eeb1b1efa340dda9b81a8da3cf24a7d605b v4.19.1
66 8841da3680fba841e5a54ebccd8ca56c078f7553 v4.19.2
66 8841da3680fba841e5a54ebccd8ca56c078f7553 v4.19.2
67 4b0dec7fd80b1ca38e5073e5e562a5a450f73669 v4.19.3
67 4b0dec7fd80b1ca38e5073e5e562a5a450f73669 v4.19.3
68 1485aa75ffe1b1ec48352dce7b7492d92f85e95f v4.20.0
68 1485aa75ffe1b1ec48352dce7b7492d92f85e95f v4.20.0
69 5b740274011766ef2f73803cc196d081e1e7f1d4 v4.20.1
69 5b740274011766ef2f73803cc196d081e1e7f1d4 v4.20.1
70 5a7835234e2c45e8fb8184c60f548a64b5842af8 v4.21.0
70 5a7835234e2c45e8fb8184c60f548a64b5842af8 v4.21.0
71 26af88343015f8b89d5a66f92bc7547c51fcf0df v4.22.0
71 26af88343015f8b89d5a66f92bc7547c51fcf0df v4.22.0
72 cf54e5f700fe5dc50af1a1bdf5197c18cf52105f v4.23.0
72 cf54e5f700fe5dc50af1a1bdf5197c18cf52105f v4.23.0
73 179d989bcfe02c6227f9f6aa9236cbbe1c14c400 v4.23.1
73 179d989bcfe02c6227f9f6aa9236cbbe1c14c400 v4.23.1
74 383aee8b1652affaa26aefe336a89ee366b2b26d v4.23.2
74 383aee8b1652affaa26aefe336a89ee366b2b26d v4.23.2
75 bc1a8141cc51fc23c455ebc50c6609c810b46f8d v4.24.0
75 bc1a8141cc51fc23c455ebc50c6609c810b46f8d v4.24.0
76 530a1c03caabc806ea1ef34605f8f67f18c70e55 v4.24.1
76 530a1c03caabc806ea1ef34605f8f67f18c70e55 v4.24.1
77 5908ae65cee1043982e1b26d7b618af5fcfebbb3 v4.25.0
78 cce8bcdf75090d5943a1e9706fe5212d7b5d1fa1 v4.25.1
79 8610c4bf846c63bbc95d3ddfb53fadaaa9c7aa42 v4.25.2
@@ -1,1103 +1,1103 b''
1 # Generated by pip2nix 0.8.0.dev1
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 { pkgs, fetchurl, fetchgit, fetchhg }:
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5
5
6 self: super: {
6 self: super: {
7 "atomicwrites" = super.buildPythonPackage {
7 "atomicwrites" = super.buildPythonPackage {
8 name = "atomicwrites-1.3.0";
8 name = "atomicwrites-1.3.0";
9 doCheck = false;
9 doCheck = false;
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.mit ];
15 license = [ pkgs.lib.licenses.mit ];
16 };
16 };
17 };
17 };
18 "attrs" = super.buildPythonPackage {
18 "attrs" = super.buildPythonPackage {
19 name = "attrs-19.3.0";
19 name = "attrs-19.3.0";
20 doCheck = false;
20 doCheck = false;
21 src = fetchurl {
21 src = fetchurl {
22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
24 };
24 };
25 meta = {
25 meta = {
26 license = [ pkgs.lib.licenses.mit ];
26 license = [ pkgs.lib.licenses.mit ];
27 };
27 };
28 };
28 };
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 name = "backports.shutil-get-terminal-size-1.0.0";
30 name = "backports.shutil-get-terminal-size-1.0.0";
31 doCheck = false;
31 doCheck = false;
32 src = fetchurl {
32 src = fetchurl {
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 };
35 };
36 meta = {
36 meta = {
37 license = [ pkgs.lib.licenses.mit ];
37 license = [ pkgs.lib.licenses.mit ];
38 };
38 };
39 };
39 };
40 "beautifulsoup4" = super.buildPythonPackage {
40 "beautifulsoup4" = super.buildPythonPackage {
41 name = "beautifulsoup4-4.6.3";
41 name = "beautifulsoup4-4.6.3";
42 doCheck = false;
42 doCheck = false;
43 src = fetchurl {
43 src = fetchurl {
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 };
46 };
47 meta = {
47 meta = {
48 license = [ pkgs.lib.licenses.mit ];
48 license = [ pkgs.lib.licenses.mit ];
49 };
49 };
50 };
50 };
51 "cffi" = super.buildPythonPackage {
51 "cffi" = super.buildPythonPackage {
52 name = "cffi-1.12.3";
52 name = "cffi-1.12.3";
53 doCheck = false;
53 doCheck = false;
54 propagatedBuildInputs = [
54 propagatedBuildInputs = [
55 self."pycparser"
55 self."pycparser"
56 ];
56 ];
57 src = fetchurl {
57 src = fetchurl {
58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
60 };
60 };
61 meta = {
61 meta = {
62 license = [ pkgs.lib.licenses.mit ];
62 license = [ pkgs.lib.licenses.mit ];
63 };
63 };
64 };
64 };
65 "configobj" = super.buildPythonPackage {
65 "configobj" = super.buildPythonPackage {
66 name = "configobj-5.0.6";
66 name = "configobj-5.0.6";
67 doCheck = false;
67 doCheck = false;
68 propagatedBuildInputs = [
68 propagatedBuildInputs = [
69 self."six"
69 self."six"
70 ];
70 ];
71 src = fetchurl {
71 src = fetchurl {
72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
74 };
74 };
75 meta = {
75 meta = {
76 license = [ pkgs.lib.licenses.bsdOriginal ];
76 license = [ pkgs.lib.licenses.bsdOriginal ];
77 };
77 };
78 };
78 };
79 "configparser" = super.buildPythonPackage {
79 "configparser" = super.buildPythonPackage {
80 name = "configparser-4.0.2";
80 name = "configparser-4.0.2";
81 doCheck = false;
81 doCheck = false;
82 src = fetchurl {
82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
85 };
85 };
86 meta = {
86 meta = {
87 license = [ pkgs.lib.licenses.mit ];
87 license = [ pkgs.lib.licenses.mit ];
88 };
88 };
89 };
89 };
90 "contextlib2" = super.buildPythonPackage {
90 "contextlib2" = super.buildPythonPackage {
91 name = "contextlib2-0.6.0.post1";
91 name = "contextlib2-0.6.0.post1";
92 doCheck = false;
92 doCheck = false;
93 src = fetchurl {
93 src = fetchurl {
94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
96 };
96 };
97 meta = {
97 meta = {
98 license = [ pkgs.lib.licenses.psfl ];
98 license = [ pkgs.lib.licenses.psfl ];
99 };
99 };
100 };
100 };
101 "cov-core" = super.buildPythonPackage {
101 "cov-core" = super.buildPythonPackage {
102 name = "cov-core-1.15.0";
102 name = "cov-core-1.15.0";
103 doCheck = false;
103 doCheck = false;
104 propagatedBuildInputs = [
104 propagatedBuildInputs = [
105 self."coverage"
105 self."coverage"
106 ];
106 ];
107 src = fetchurl {
107 src = fetchurl {
108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
110 };
110 };
111 meta = {
111 meta = {
112 license = [ pkgs.lib.licenses.mit ];
112 license = [ pkgs.lib.licenses.mit ];
113 };
113 };
114 };
114 };
115 "coverage" = super.buildPythonPackage {
115 "coverage" = super.buildPythonPackage {
116 name = "coverage-4.5.4";
116 name = "coverage-4.5.4";
117 doCheck = false;
117 doCheck = false;
118 src = fetchurl {
118 src = fetchurl {
119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
121 };
121 };
122 meta = {
122 meta = {
123 license = [ pkgs.lib.licenses.asl20 ];
123 license = [ pkgs.lib.licenses.asl20 ];
124 };
124 };
125 };
125 };
126 "decorator" = super.buildPythonPackage {
126 "decorator" = super.buildPythonPackage {
127 name = "decorator-4.1.2";
127 name = "decorator-4.1.2";
128 doCheck = false;
128 doCheck = false;
129 src = fetchurl {
129 src = fetchurl {
130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
132 };
132 };
133 meta = {
133 meta = {
134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
135 };
135 };
136 };
136 };
137 "dogpile.cache" = super.buildPythonPackage {
137 "dogpile.cache" = super.buildPythonPackage {
138 name = "dogpile.cache-0.9.0";
138 name = "dogpile.cache-0.9.0";
139 doCheck = false;
139 doCheck = false;
140 propagatedBuildInputs = [
140 propagatedBuildInputs = [
141 self."decorator"
141 self."decorator"
142 ];
142 ];
143 src = fetchurl {
143 src = fetchurl {
144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
146 };
146 };
147 meta = {
147 meta = {
148 license = [ pkgs.lib.licenses.bsdOriginal ];
148 license = [ pkgs.lib.licenses.bsdOriginal ];
149 };
149 };
150 };
150 };
151 "dogpile.core" = super.buildPythonPackage {
151 "dogpile.core" = super.buildPythonPackage {
152 name = "dogpile.core-0.4.1";
152 name = "dogpile.core-0.4.1";
153 doCheck = false;
153 doCheck = false;
154 src = fetchurl {
154 src = fetchurl {
155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
157 };
157 };
158 meta = {
158 meta = {
159 license = [ pkgs.lib.licenses.bsdOriginal ];
159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 };
160 };
161 };
161 };
162 "dulwich" = super.buildPythonPackage {
162 "dulwich" = super.buildPythonPackage {
163 name = "dulwich-0.13.0";
163 name = "dulwich-0.13.0";
164 doCheck = false;
164 doCheck = false;
165 src = fetchurl {
165 src = fetchurl {
166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
168 };
168 };
169 meta = {
169 meta = {
170 license = [ pkgs.lib.licenses.gpl2Plus ];
170 license = [ pkgs.lib.licenses.gpl2Plus ];
171 };
171 };
172 };
172 };
173 "enum34" = super.buildPythonPackage {
173 "enum34" = super.buildPythonPackage {
174 name = "enum34-1.1.10";
174 name = "enum34-1.1.10";
175 doCheck = false;
175 doCheck = false;
176 src = fetchurl {
176 src = fetchurl {
177 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
177 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
178 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
178 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
179 };
179 };
180 meta = {
180 meta = {
181 license = [ pkgs.lib.licenses.bsdOriginal ];
181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 };
182 };
183 };
183 };
184 "funcsigs" = super.buildPythonPackage {
184 "funcsigs" = super.buildPythonPackage {
185 name = "funcsigs-1.0.2";
185 name = "funcsigs-1.0.2";
186 doCheck = false;
186 doCheck = false;
187 src = fetchurl {
187 src = fetchurl {
188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
190 };
190 };
191 meta = {
191 meta = {
192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
193 };
193 };
194 };
194 };
195 "gevent" = super.buildPythonPackage {
195 "gevent" = super.buildPythonPackage {
196 name = "gevent-1.5.0";
196 name = "gevent-1.5.0";
197 doCheck = false;
197 doCheck = false;
198 propagatedBuildInputs = [
198 propagatedBuildInputs = [
199 self."greenlet"
199 self."greenlet"
200 ];
200 ];
201 src = fetchurl {
201 src = fetchurl {
202 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
202 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
203 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
203 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
204 };
204 };
205 meta = {
205 meta = {
206 license = [ pkgs.lib.licenses.mit ];
206 license = [ pkgs.lib.licenses.mit ];
207 };
207 };
208 };
208 };
209 "gprof2dot" = super.buildPythonPackage {
209 "gprof2dot" = super.buildPythonPackage {
210 name = "gprof2dot-2017.9.19";
210 name = "gprof2dot-2017.9.19";
211 doCheck = false;
211 doCheck = false;
212 src = fetchurl {
212 src = fetchurl {
213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
215 };
215 };
216 meta = {
216 meta = {
217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
218 };
218 };
219 };
219 };
220 "greenlet" = super.buildPythonPackage {
220 "greenlet" = super.buildPythonPackage {
221 name = "greenlet-0.4.15";
221 name = "greenlet-0.4.15";
222 doCheck = false;
222 doCheck = false;
223 src = fetchurl {
223 src = fetchurl {
224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
226 };
226 };
227 meta = {
227 meta = {
228 license = [ pkgs.lib.licenses.mit ];
228 license = [ pkgs.lib.licenses.mit ];
229 };
229 };
230 };
230 };
231 "gunicorn" = super.buildPythonPackage {
231 "gunicorn" = super.buildPythonPackage {
232 name = "gunicorn-19.9.0";
232 name = "gunicorn-19.9.0";
233 doCheck = false;
233 doCheck = false;
234 src = fetchurl {
234 src = fetchurl {
235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
237 };
237 };
238 meta = {
238 meta = {
239 license = [ pkgs.lib.licenses.mit ];
239 license = [ pkgs.lib.licenses.mit ];
240 };
240 };
241 };
241 };
242 "hg-evolve" = super.buildPythonPackage {
242 "hg-evolve" = super.buildPythonPackage {
243 name = "hg-evolve-9.1.0";
243 name = "hg-evolve-9.1.0";
244 doCheck = false;
244 doCheck = false;
245 src = fetchurl {
245 src = fetchurl {
246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
248 };
248 };
249 meta = {
249 meta = {
250 license = [ { fullName = "GPLv2+"; } ];
250 license = [ { fullName = "GPLv2+"; } ];
251 };
251 };
252 };
252 };
253 "hgsubversion" = super.buildPythonPackage {
253 "hgsubversion" = super.buildPythonPackage {
254 name = "hgsubversion-1.9.3";
254 name = "hgsubversion-1.9.3";
255 doCheck = false;
255 doCheck = false;
256 propagatedBuildInputs = [
256 propagatedBuildInputs = [
257 self."mercurial"
257 self."mercurial"
258 self."subvertpy"
258 self."subvertpy"
259 ];
259 ];
260 src = fetchurl {
260 src = fetchurl {
261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
263 };
263 };
264 meta = {
264 meta = {
265 license = [ pkgs.lib.licenses.gpl1 ];
265 license = [ pkgs.lib.licenses.gpl1 ];
266 };
266 };
267 };
267 };
268 "hupper" = super.buildPythonPackage {
268 "hupper" = super.buildPythonPackage {
269 name = "hupper-1.10.2";
269 name = "hupper-1.10.2";
270 doCheck = false;
270 doCheck = false;
271 src = fetchurl {
271 src = fetchurl {
272 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
272 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
273 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
273 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
274 };
274 };
275 meta = {
275 meta = {
276 license = [ pkgs.lib.licenses.mit ];
276 license = [ pkgs.lib.licenses.mit ];
277 };
277 };
278 };
278 };
279 "importlib-metadata" = super.buildPythonPackage {
279 "importlib-metadata" = super.buildPythonPackage {
280 name = "importlib-metadata-1.6.0";
280 name = "importlib-metadata-1.6.0";
281 doCheck = false;
281 doCheck = false;
282 propagatedBuildInputs = [
282 propagatedBuildInputs = [
283 self."zipp"
283 self."zipp"
284 self."pathlib2"
284 self."pathlib2"
285 self."contextlib2"
285 self."contextlib2"
286 self."configparser"
286 self."configparser"
287 ];
287 ];
288 src = fetchurl {
288 src = fetchurl {
289 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
289 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
290 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
290 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
291 };
291 };
292 meta = {
292 meta = {
293 license = [ pkgs.lib.licenses.asl20 ];
293 license = [ pkgs.lib.licenses.asl20 ];
294 };
294 };
295 };
295 };
296 "ipdb" = super.buildPythonPackage {
296 "ipdb" = super.buildPythonPackage {
297 name = "ipdb-0.13.2";
297 name = "ipdb-0.13.2";
298 doCheck = false;
298 doCheck = false;
299 propagatedBuildInputs = [
299 propagatedBuildInputs = [
300 self."setuptools"
300 self."setuptools"
301 self."ipython"
301 self."ipython"
302 ];
302 ];
303 src = fetchurl {
303 src = fetchurl {
304 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
304 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
305 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
305 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
306 };
306 };
307 meta = {
307 meta = {
308 license = [ pkgs.lib.licenses.bsdOriginal ];
308 license = [ pkgs.lib.licenses.bsdOriginal ];
309 };
309 };
310 };
310 };
311 "ipython" = super.buildPythonPackage {
311 "ipython" = super.buildPythonPackage {
312 name = "ipython-5.1.0";
312 name = "ipython-5.1.0";
313 doCheck = false;
313 doCheck = false;
314 propagatedBuildInputs = [
314 propagatedBuildInputs = [
315 self."setuptools"
315 self."setuptools"
316 self."decorator"
316 self."decorator"
317 self."pickleshare"
317 self."pickleshare"
318 self."simplegeneric"
318 self."simplegeneric"
319 self."traitlets"
319 self."traitlets"
320 self."prompt-toolkit"
320 self."prompt-toolkit"
321 self."pygments"
321 self."pygments"
322 self."pexpect"
322 self."pexpect"
323 self."backports.shutil-get-terminal-size"
323 self."backports.shutil-get-terminal-size"
324 self."pathlib2"
324 self."pathlib2"
325 self."pexpect"
325 self."pexpect"
326 ];
326 ];
327 src = fetchurl {
327 src = fetchurl {
328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
330 };
330 };
331 meta = {
331 meta = {
332 license = [ pkgs.lib.licenses.bsdOriginal ];
332 license = [ pkgs.lib.licenses.bsdOriginal ];
333 };
333 };
334 };
334 };
335 "ipython-genutils" = super.buildPythonPackage {
335 "ipython-genutils" = super.buildPythonPackage {
336 name = "ipython-genutils-0.2.0";
336 name = "ipython-genutils-0.2.0";
337 doCheck = false;
337 doCheck = false;
338 src = fetchurl {
338 src = fetchurl {
339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
341 };
341 };
342 meta = {
342 meta = {
343 license = [ pkgs.lib.licenses.bsdOriginal ];
343 license = [ pkgs.lib.licenses.bsdOriginal ];
344 };
344 };
345 };
345 };
346 "mako" = super.buildPythonPackage {
346 "mako" = super.buildPythonPackage {
347 name = "mako-1.1.0";
347 name = "mako-1.1.0";
348 doCheck = false;
348 doCheck = false;
349 propagatedBuildInputs = [
349 propagatedBuildInputs = [
350 self."markupsafe"
350 self."markupsafe"
351 ];
351 ];
352 src = fetchurl {
352 src = fetchurl {
353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
355 };
355 };
356 meta = {
356 meta = {
357 license = [ pkgs.lib.licenses.mit ];
357 license = [ pkgs.lib.licenses.mit ];
358 };
358 };
359 };
359 };
360 "markupsafe" = super.buildPythonPackage {
360 "markupsafe" = super.buildPythonPackage {
361 name = "markupsafe-1.1.1";
361 name = "markupsafe-1.1.1";
362 doCheck = false;
362 doCheck = false;
363 src = fetchurl {
363 src = fetchurl {
364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
366 };
366 };
367 meta = {
367 meta = {
368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
369 };
369 };
370 };
370 };
371 "mercurial" = super.buildPythonPackage {
371 "mercurial" = super.buildPythonPackage {
372 name = "mercurial-5.1.1";
372 name = "mercurial-5.1.1";
373 doCheck = false;
373 doCheck = false;
374 src = fetchurl {
374 src = fetchurl {
375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
377 };
377 };
378 meta = {
378 meta = {
379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
380 };
380 };
381 };
381 };
382 "mock" = super.buildPythonPackage {
382 "mock" = super.buildPythonPackage {
383 name = "mock-3.0.5";
383 name = "mock-3.0.5";
384 doCheck = false;
384 doCheck = false;
385 propagatedBuildInputs = [
385 propagatedBuildInputs = [
386 self."six"
386 self."six"
387 self."funcsigs"
387 self."funcsigs"
388 ];
388 ];
389 src = fetchurl {
389 src = fetchurl {
390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
392 };
392 };
393 meta = {
393 meta = {
394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
395 };
395 };
396 };
396 };
397 "more-itertools" = super.buildPythonPackage {
397 "more-itertools" = super.buildPythonPackage {
398 name = "more-itertools-5.0.0";
398 name = "more-itertools-5.0.0";
399 doCheck = false;
399 doCheck = false;
400 propagatedBuildInputs = [
400 propagatedBuildInputs = [
401 self."six"
401 self."six"
402 ];
402 ];
403 src = fetchurl {
403 src = fetchurl {
404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
406 };
406 };
407 meta = {
407 meta = {
408 license = [ pkgs.lib.licenses.mit ];
408 license = [ pkgs.lib.licenses.mit ];
409 };
409 };
410 };
410 };
411 "msgpack-python" = super.buildPythonPackage {
411 "msgpack-python" = super.buildPythonPackage {
412 name = "msgpack-python-0.5.6";
412 name = "msgpack-python-0.5.6";
413 doCheck = false;
413 doCheck = false;
414 src = fetchurl {
414 src = fetchurl {
415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
417 };
417 };
418 meta = {
418 meta = {
419 license = [ pkgs.lib.licenses.asl20 ];
419 license = [ pkgs.lib.licenses.asl20 ];
420 };
420 };
421 };
421 };
422 "packaging" = super.buildPythonPackage {
422 "packaging" = super.buildPythonPackage {
423 name = "packaging-20.3";
423 name = "packaging-20.3";
424 doCheck = false;
424 doCheck = false;
425 propagatedBuildInputs = [
425 propagatedBuildInputs = [
426 self."pyparsing"
426 self."pyparsing"
427 self."six"
427 self."six"
428 ];
428 ];
429 src = fetchurl {
429 src = fetchurl {
430 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
430 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
431 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
431 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
432 };
432 };
433 meta = {
433 meta = {
434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
435 };
435 };
436 };
436 };
437 "pastedeploy" = super.buildPythonPackage {
437 "pastedeploy" = super.buildPythonPackage {
438 name = "pastedeploy-2.1.0";
438 name = "pastedeploy-2.1.0";
439 doCheck = false;
439 doCheck = false;
440 src = fetchurl {
440 src = fetchurl {
441 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
441 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
442 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
442 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
443 };
443 };
444 meta = {
444 meta = {
445 license = [ pkgs.lib.licenses.mit ];
445 license = [ pkgs.lib.licenses.mit ];
446 };
446 };
447 };
447 };
448 "pathlib2" = super.buildPythonPackage {
448 "pathlib2" = super.buildPythonPackage {
449 name = "pathlib2-2.3.5";
449 name = "pathlib2-2.3.5";
450 doCheck = false;
450 doCheck = false;
451 propagatedBuildInputs = [
451 propagatedBuildInputs = [
452 self."six"
452 self."six"
453 self."scandir"
453 self."scandir"
454 ];
454 ];
455 src = fetchurl {
455 src = fetchurl {
456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
458 };
458 };
459 meta = {
459 meta = {
460 license = [ pkgs.lib.licenses.mit ];
460 license = [ pkgs.lib.licenses.mit ];
461 };
461 };
462 };
462 };
463 "pexpect" = super.buildPythonPackage {
463 "pexpect" = super.buildPythonPackage {
464 name = "pexpect-4.8.0";
464 name = "pexpect-4.8.0";
465 doCheck = false;
465 doCheck = false;
466 propagatedBuildInputs = [
466 propagatedBuildInputs = [
467 self."ptyprocess"
467 self."ptyprocess"
468 ];
468 ];
469 src = fetchurl {
469 src = fetchurl {
470 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
470 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
471 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
471 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
472 };
472 };
473 meta = {
473 meta = {
474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
475 };
475 };
476 };
476 };
477 "pickleshare" = super.buildPythonPackage {
477 "pickleshare" = super.buildPythonPackage {
478 name = "pickleshare-0.7.5";
478 name = "pickleshare-0.7.5";
479 doCheck = false;
479 doCheck = false;
480 propagatedBuildInputs = [
480 propagatedBuildInputs = [
481 self."pathlib2"
481 self."pathlib2"
482 ];
482 ];
483 src = fetchurl {
483 src = fetchurl {
484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
486 };
486 };
487 meta = {
487 meta = {
488 license = [ pkgs.lib.licenses.mit ];
488 license = [ pkgs.lib.licenses.mit ];
489 };
489 };
490 };
490 };
491 "plaster" = super.buildPythonPackage {
491 "plaster" = super.buildPythonPackage {
492 name = "plaster-1.0";
492 name = "plaster-1.0";
493 doCheck = false;
493 doCheck = false;
494 propagatedBuildInputs = [
494 propagatedBuildInputs = [
495 self."setuptools"
495 self."setuptools"
496 ];
496 ];
497 src = fetchurl {
497 src = fetchurl {
498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
500 };
500 };
501 meta = {
501 meta = {
502 license = [ pkgs.lib.licenses.mit ];
502 license = [ pkgs.lib.licenses.mit ];
503 };
503 };
504 };
504 };
505 "plaster-pastedeploy" = super.buildPythonPackage {
505 "plaster-pastedeploy" = super.buildPythonPackage {
506 name = "plaster-pastedeploy-0.7";
506 name = "plaster-pastedeploy-0.7";
507 doCheck = false;
507 doCheck = false;
508 propagatedBuildInputs = [
508 propagatedBuildInputs = [
509 self."pastedeploy"
509 self."pastedeploy"
510 self."plaster"
510 self."plaster"
511 ];
511 ];
512 src = fetchurl {
512 src = fetchurl {
513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
515 };
515 };
516 meta = {
516 meta = {
517 license = [ pkgs.lib.licenses.mit ];
517 license = [ pkgs.lib.licenses.mit ];
518 };
518 };
519 };
519 };
520 "pluggy" = super.buildPythonPackage {
520 "pluggy" = super.buildPythonPackage {
521 name = "pluggy-0.13.1";
521 name = "pluggy-0.13.1";
522 doCheck = false;
522 doCheck = false;
523 propagatedBuildInputs = [
523 propagatedBuildInputs = [
524 self."importlib-metadata"
524 self."importlib-metadata"
525 ];
525 ];
526 src = fetchurl {
526 src = fetchurl {
527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
529 };
529 };
530 meta = {
530 meta = {
531 license = [ pkgs.lib.licenses.mit ];
531 license = [ pkgs.lib.licenses.mit ];
532 };
532 };
533 };
533 };
534 "prompt-toolkit" = super.buildPythonPackage {
534 "prompt-toolkit" = super.buildPythonPackage {
535 name = "prompt-toolkit-1.0.18";
535 name = "prompt-toolkit-1.0.18";
536 doCheck = false;
536 doCheck = false;
537 propagatedBuildInputs = [
537 propagatedBuildInputs = [
538 self."six"
538 self."six"
539 self."wcwidth"
539 self."wcwidth"
540 ];
540 ];
541 src = fetchurl {
541 src = fetchurl {
542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
544 };
544 };
545 meta = {
545 meta = {
546 license = [ pkgs.lib.licenses.bsdOriginal ];
546 license = [ pkgs.lib.licenses.bsdOriginal ];
547 };
547 };
548 };
548 };
549 "psutil" = super.buildPythonPackage {
549 "psutil" = super.buildPythonPackage {
550 name = "psutil-5.7.0";
550 name = "psutil-5.7.0";
551 doCheck = false;
551 doCheck = false;
552 src = fetchurl {
552 src = fetchurl {
553 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
553 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
554 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
554 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
555 };
555 };
556 meta = {
556 meta = {
557 license = [ pkgs.lib.licenses.bsdOriginal ];
557 license = [ pkgs.lib.licenses.bsdOriginal ];
558 };
558 };
559 };
559 };
560 "ptyprocess" = super.buildPythonPackage {
560 "ptyprocess" = super.buildPythonPackage {
561 name = "ptyprocess-0.6.0";
561 name = "ptyprocess-0.6.0";
562 doCheck = false;
562 doCheck = false;
563 src = fetchurl {
563 src = fetchurl {
564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
566 };
566 };
567 meta = {
567 meta = {
568 license = [ ];
568 license = [ ];
569 };
569 };
570 };
570 };
571 "py" = super.buildPythonPackage {
571 "py" = super.buildPythonPackage {
572 name = "py-1.8.0";
572 name = "py-1.8.0";
573 doCheck = false;
573 doCheck = false;
574 src = fetchurl {
574 src = fetchurl {
575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
577 };
577 };
578 meta = {
578 meta = {
579 license = [ pkgs.lib.licenses.mit ];
579 license = [ pkgs.lib.licenses.mit ];
580 };
580 };
581 };
581 };
582 "pycparser" = super.buildPythonPackage {
582 "pycparser" = super.buildPythonPackage {
583 name = "pycparser-2.20";
583 name = "pycparser-2.20";
584 doCheck = false;
584 doCheck = false;
585 src = fetchurl {
585 src = fetchurl {
586 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
586 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
587 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
587 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
588 };
588 };
589 meta = {
589 meta = {
590 license = [ pkgs.lib.licenses.bsdOriginal ];
590 license = [ pkgs.lib.licenses.bsdOriginal ];
591 };
591 };
592 };
592 };
593 "pygit2" = super.buildPythonPackage {
593 "pygit2" = super.buildPythonPackage {
594 name = "pygit2-0.28.2";
594 name = "pygit2-0.28.2";
595 doCheck = false;
595 doCheck = false;
596 propagatedBuildInputs = [
596 propagatedBuildInputs = [
597 self."cffi"
597 self."cffi"
598 self."six"
598 self."six"
599 ];
599 ];
600 src = fetchurl {
600 src = fetchurl {
601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
603 };
603 };
604 meta = {
604 meta = {
605 license = [ { fullName = "GPLv2 with linking exception"; } ];
605 license = [ { fullName = "GPLv2 with linking exception"; } ];
606 };
606 };
607 };
607 };
608 "pygments" = super.buildPythonPackage {
608 "pygments" = super.buildPythonPackage {
609 name = "pygments-2.4.2";
609 name = "pygments-2.4.2";
610 doCheck = false;
610 doCheck = false;
611 src = fetchurl {
611 src = fetchurl {
612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
614 };
614 };
615 meta = {
615 meta = {
616 license = [ pkgs.lib.licenses.bsdOriginal ];
616 license = [ pkgs.lib.licenses.bsdOriginal ];
617 };
617 };
618 };
618 };
619 "pyparsing" = super.buildPythonPackage {
619 "pyparsing" = super.buildPythonPackage {
620 name = "pyparsing-2.4.7";
620 name = "pyparsing-2.4.7";
621 doCheck = false;
621 doCheck = false;
622 src = fetchurl {
622 src = fetchurl {
623 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
623 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
624 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
624 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
625 };
625 };
626 meta = {
626 meta = {
627 license = [ pkgs.lib.licenses.mit ];
627 license = [ pkgs.lib.licenses.mit ];
628 };
628 };
629 };
629 };
630 "pyramid" = super.buildPythonPackage {
630 "pyramid" = super.buildPythonPackage {
631 name = "pyramid-1.10.4";
631 name = "pyramid-1.10.4";
632 doCheck = false;
632 doCheck = false;
633 propagatedBuildInputs = [
633 propagatedBuildInputs = [
634 self."hupper"
634 self."hupper"
635 self."plaster"
635 self."plaster"
636 self."plaster-pastedeploy"
636 self."plaster-pastedeploy"
637 self."setuptools"
637 self."setuptools"
638 self."translationstring"
638 self."translationstring"
639 self."venusian"
639 self."venusian"
640 self."webob"
640 self."webob"
641 self."zope.deprecation"
641 self."zope.deprecation"
642 self."zope.interface"
642 self."zope.interface"
643 self."repoze.lru"
643 self."repoze.lru"
644 ];
644 ];
645 src = fetchurl {
645 src = fetchurl {
646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
648 };
648 };
649 meta = {
649 meta = {
650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
651 };
651 };
652 };
652 };
653 "pyramid-mako" = super.buildPythonPackage {
653 "pyramid-mako" = super.buildPythonPackage {
654 name = "pyramid-mako-1.1.0";
654 name = "pyramid-mako-1.1.0";
655 doCheck = false;
655 doCheck = false;
656 propagatedBuildInputs = [
656 propagatedBuildInputs = [
657 self."pyramid"
657 self."pyramid"
658 self."mako"
658 self."mako"
659 ];
659 ];
660 src = fetchurl {
660 src = fetchurl {
661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
663 };
663 };
664 meta = {
664 meta = {
665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
666 };
666 };
667 };
667 };
668 "pytest" = super.buildPythonPackage {
668 "pytest" = super.buildPythonPackage {
669 name = "pytest-4.6.5";
669 name = "pytest-4.6.5";
670 doCheck = false;
670 doCheck = false;
671 propagatedBuildInputs = [
671 propagatedBuildInputs = [
672 self."py"
672 self."py"
673 self."six"
673 self."six"
674 self."packaging"
674 self."packaging"
675 self."attrs"
675 self."attrs"
676 self."atomicwrites"
676 self."atomicwrites"
677 self."pluggy"
677 self."pluggy"
678 self."importlib-metadata"
678 self."importlib-metadata"
679 self."wcwidth"
679 self."wcwidth"
680 self."funcsigs"
680 self."funcsigs"
681 self."pathlib2"
681 self."pathlib2"
682 self."more-itertools"
682 self."more-itertools"
683 ];
683 ];
684 src = fetchurl {
684 src = fetchurl {
685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
687 };
687 };
688 meta = {
688 meta = {
689 license = [ pkgs.lib.licenses.mit ];
689 license = [ pkgs.lib.licenses.mit ];
690 };
690 };
691 };
691 };
692 "pytest-cov" = super.buildPythonPackage {
692 "pytest-cov" = super.buildPythonPackage {
693 name = "pytest-cov-2.7.1";
693 name = "pytest-cov-2.7.1";
694 doCheck = false;
694 doCheck = false;
695 propagatedBuildInputs = [
695 propagatedBuildInputs = [
696 self."pytest"
696 self."pytest"
697 self."coverage"
697 self."coverage"
698 ];
698 ];
699 src = fetchurl {
699 src = fetchurl {
700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
702 };
702 };
703 meta = {
703 meta = {
704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
705 };
705 };
706 };
706 };
707 "pytest-profiling" = super.buildPythonPackage {
707 "pytest-profiling" = super.buildPythonPackage {
708 name = "pytest-profiling-1.7.0";
708 name = "pytest-profiling-1.7.0";
709 doCheck = false;
709 doCheck = false;
710 propagatedBuildInputs = [
710 propagatedBuildInputs = [
711 self."six"
711 self."six"
712 self."pytest"
712 self."pytest"
713 self."gprof2dot"
713 self."gprof2dot"
714 ];
714 ];
715 src = fetchurl {
715 src = fetchurl {
716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
718 };
718 };
719 meta = {
719 meta = {
720 license = [ pkgs.lib.licenses.mit ];
720 license = [ pkgs.lib.licenses.mit ];
721 };
721 };
722 };
722 };
723 "pytest-runner" = super.buildPythonPackage {
723 "pytest-runner" = super.buildPythonPackage {
724 name = "pytest-runner-5.1";
724 name = "pytest-runner-5.1";
725 doCheck = false;
725 doCheck = false;
726 src = fetchurl {
726 src = fetchurl {
727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
729 };
729 };
730 meta = {
730 meta = {
731 license = [ pkgs.lib.licenses.mit ];
731 license = [ pkgs.lib.licenses.mit ];
732 };
732 };
733 };
733 };
734 "pytest-sugar" = super.buildPythonPackage {
734 "pytest-sugar" = super.buildPythonPackage {
735 name = "pytest-sugar-0.9.2";
735 name = "pytest-sugar-0.9.2";
736 doCheck = false;
736 doCheck = false;
737 propagatedBuildInputs = [
737 propagatedBuildInputs = [
738 self."pytest"
738 self."pytest"
739 self."termcolor"
739 self."termcolor"
740 self."packaging"
740 self."packaging"
741 ];
741 ];
742 src = fetchurl {
742 src = fetchurl {
743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
745 };
745 };
746 meta = {
746 meta = {
747 license = [ pkgs.lib.licenses.bsdOriginal ];
747 license = [ pkgs.lib.licenses.bsdOriginal ];
748 };
748 };
749 };
749 };
750 "pytest-timeout" = super.buildPythonPackage {
750 "pytest-timeout" = super.buildPythonPackage {
751 name = "pytest-timeout-1.3.3";
751 name = "pytest-timeout-1.3.3";
752 doCheck = false;
752 doCheck = false;
753 propagatedBuildInputs = [
753 propagatedBuildInputs = [
754 self."pytest"
754 self."pytest"
755 ];
755 ];
756 src = fetchurl {
756 src = fetchurl {
757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
759 };
759 };
760 meta = {
760 meta = {
761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
762 };
762 };
763 };
763 };
764 "redis" = super.buildPythonPackage {
764 "redis" = super.buildPythonPackage {
765 name = "redis-3.5.3";
765 name = "redis-3.5.3";
766 doCheck = false;
766 doCheck = false;
767 src = fetchurl {
767 src = fetchurl {
768 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
768 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
769 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
769 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
770 };
770 };
771 meta = {
771 meta = {
772 license = [ pkgs.lib.licenses.mit ];
772 license = [ pkgs.lib.licenses.mit ];
773 };
773 };
774 };
774 };
775 "repoze.lru" = super.buildPythonPackage {
775 "repoze.lru" = super.buildPythonPackage {
776 name = "repoze.lru-0.7";
776 name = "repoze.lru-0.7";
777 doCheck = false;
777 doCheck = false;
778 src = fetchurl {
778 src = fetchurl {
779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
781 };
781 };
782 meta = {
782 meta = {
783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
784 };
784 };
785 };
785 };
786 "rhodecode-vcsserver" = super.buildPythonPackage {
786 "rhodecode-vcsserver" = super.buildPythonPackage {
787 name = "rhodecode-vcsserver-4.25.0";
787 name = "rhodecode-vcsserver-4.25.2";
788 buildInputs = [
788 buildInputs = [
789 self."pytest"
789 self."pytest"
790 self."py"
790 self."py"
791 self."pytest-cov"
791 self."pytest-cov"
792 self."pytest-sugar"
792 self."pytest-sugar"
793 self."pytest-runner"
793 self."pytest-runner"
794 self."pytest-profiling"
794 self."pytest-profiling"
795 self."pytest-timeout"
795 self."pytest-timeout"
796 self."gprof2dot"
796 self."gprof2dot"
797 self."mock"
797 self."mock"
798 self."cov-core"
798 self."cov-core"
799 self."coverage"
799 self."coverage"
800 self."webtest"
800 self."webtest"
801 self."beautifulsoup4"
801 self."beautifulsoup4"
802 self."configobj"
802 self."configobj"
803 ];
803 ];
804 doCheck = true;
804 doCheck = true;
805 propagatedBuildInputs = [
805 propagatedBuildInputs = [
806 self."configobj"
806 self."configobj"
807 self."dogpile.cache"
807 self."dogpile.cache"
808 self."dogpile.core"
808 self."dogpile.core"
809 self."decorator"
809 self."decorator"
810 self."dulwich"
810 self."dulwich"
811 self."hgsubversion"
811 self."hgsubversion"
812 self."hg-evolve"
812 self."hg-evolve"
813 self."mako"
813 self."mako"
814 self."markupsafe"
814 self."markupsafe"
815 self."mercurial"
815 self."mercurial"
816 self."msgpack-python"
816 self."msgpack-python"
817 self."pastedeploy"
817 self."pastedeploy"
818 self."pyramid"
818 self."pyramid"
819 self."pyramid-mako"
819 self."pyramid-mako"
820 self."pygit2"
820 self."pygit2"
821 self."repoze.lru"
821 self."repoze.lru"
822 self."redis"
822 self."redis"
823 self."simplejson"
823 self."simplejson"
824 self."subprocess32"
824 self."subprocess32"
825 self."subvertpy"
825 self."subvertpy"
826 self."six"
826 self."six"
827 self."translationstring"
827 self."translationstring"
828 self."webob"
828 self."webob"
829 self."zope.deprecation"
829 self."zope.deprecation"
830 self."zope.interface"
830 self."zope.interface"
831 self."gevent"
831 self."gevent"
832 self."greenlet"
832 self."greenlet"
833 self."gunicorn"
833 self."gunicorn"
834 self."waitress"
834 self."waitress"
835 self."ipdb"
835 self."ipdb"
836 self."ipython"
836 self."ipython"
837 self."pytest"
837 self."pytest"
838 self."py"
838 self."py"
839 self."pytest-cov"
839 self."pytest-cov"
840 self."pytest-sugar"
840 self."pytest-sugar"
841 self."pytest-runner"
841 self."pytest-runner"
842 self."pytest-profiling"
842 self."pytest-profiling"
843 self."pytest-timeout"
843 self."pytest-timeout"
844 self."gprof2dot"
844 self."gprof2dot"
845 self."mock"
845 self."mock"
846 self."cov-core"
846 self."cov-core"
847 self."coverage"
847 self."coverage"
848 self."webtest"
848 self."webtest"
849 self."beautifulsoup4"
849 self."beautifulsoup4"
850 ];
850 ];
851 src = ./.;
851 src = ./.;
852 meta = {
852 meta = {
853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
854 };
854 };
855 };
855 };
856 "scandir" = super.buildPythonPackage {
856 "scandir" = super.buildPythonPackage {
857 name = "scandir-1.10.0";
857 name = "scandir-1.10.0";
858 doCheck = false;
858 doCheck = false;
859 src = fetchurl {
859 src = fetchurl {
860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
862 };
862 };
863 meta = {
863 meta = {
864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
865 };
865 };
866 };
866 };
867 "setproctitle" = super.buildPythonPackage {
867 "setproctitle" = super.buildPythonPackage {
868 name = "setproctitle-1.1.10";
868 name = "setproctitle-1.1.10";
869 doCheck = false;
869 doCheck = false;
870 src = fetchurl {
870 src = fetchurl {
871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
873 };
873 };
874 meta = {
874 meta = {
875 license = [ pkgs.lib.licenses.bsdOriginal ];
875 license = [ pkgs.lib.licenses.bsdOriginal ];
876 };
876 };
877 };
877 };
878 "setuptools" = super.buildPythonPackage {
878 "setuptools" = super.buildPythonPackage {
879 name = "setuptools-44.1.0";
879 name = "setuptools-44.1.0";
880 doCheck = false;
880 doCheck = false;
881 src = fetchurl {
881 src = fetchurl {
882 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
882 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
883 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
883 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
884 };
884 };
885 meta = {
885 meta = {
886 license = [ pkgs.lib.licenses.mit ];
886 license = [ pkgs.lib.licenses.mit ];
887 };
887 };
888 };
888 };
889
889
890 "setuptools-scm" = super.buildPythonPackage {
890 "setuptools-scm" = super.buildPythonPackage {
891 name = "setuptools-scm-3.5.0";
891 name = "setuptools-scm-3.5.0";
892 doCheck = false;
892 doCheck = false;
893 src = fetchurl {
893 src = fetchurl {
894 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
894 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
895 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
895 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
896 };
896 };
897 meta = {
897 meta = {
898 license = [ pkgs.lib.licenses.psfl ];
898 license = [ pkgs.lib.licenses.psfl ];
899 };
899 };
900 };
900 };
901
901
902 "simplegeneric" = super.buildPythonPackage {
902 "simplegeneric" = super.buildPythonPackage {
903 name = "simplegeneric-0.8.1";
903 name = "simplegeneric-0.8.1";
904 doCheck = false;
904 doCheck = false;
905 src = fetchurl {
905 src = fetchurl {
906 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
906 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
907 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
907 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
908 };
908 };
909 meta = {
909 meta = {
910 license = [ pkgs.lib.licenses.zpl21 ];
910 license = [ pkgs.lib.licenses.zpl21 ];
911 };
911 };
912 };
912 };
913 "simplejson" = super.buildPythonPackage {
913 "simplejson" = super.buildPythonPackage {
914 name = "simplejson-3.16.0";
914 name = "simplejson-3.16.0";
915 doCheck = false;
915 doCheck = false;
916 src = fetchurl {
916 src = fetchurl {
917 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
917 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
918 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
918 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
919 };
919 };
920 meta = {
920 meta = {
921 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
921 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
922 };
922 };
923 };
923 };
924 "six" = super.buildPythonPackage {
924 "six" = super.buildPythonPackage {
925 name = "six-1.11.0";
925 name = "six-1.11.0";
926 doCheck = false;
926 doCheck = false;
927 src = fetchurl {
927 src = fetchurl {
928 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
928 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
929 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
929 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
930 };
930 };
931 meta = {
931 meta = {
932 license = [ pkgs.lib.licenses.mit ];
932 license = [ pkgs.lib.licenses.mit ];
933 };
933 };
934 };
934 };
935 "subprocess32" = super.buildPythonPackage {
935 "subprocess32" = super.buildPythonPackage {
936 name = "subprocess32-3.5.4";
936 name = "subprocess32-3.5.4";
937 doCheck = false;
937 doCheck = false;
938 src = fetchurl {
938 src = fetchurl {
939 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
939 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
940 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
940 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
941 };
941 };
942 meta = {
942 meta = {
943 license = [ pkgs.lib.licenses.psfl ];
943 license = [ pkgs.lib.licenses.psfl ];
944 };
944 };
945 };
945 };
946 "subvertpy" = super.buildPythonPackage {
946 "subvertpy" = super.buildPythonPackage {
947 name = "subvertpy-0.10.1";
947 name = "subvertpy-0.10.1";
948 doCheck = false;
948 doCheck = false;
949 src = fetchurl {
949 src = fetchurl {
950 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
950 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
951 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
951 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
952 };
952 };
953 meta = {
953 meta = {
954 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
954 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
955 };
955 };
956 };
956 };
957 "termcolor" = super.buildPythonPackage {
957 "termcolor" = super.buildPythonPackage {
958 name = "termcolor-1.1.0";
958 name = "termcolor-1.1.0";
959 doCheck = false;
959 doCheck = false;
960 src = fetchurl {
960 src = fetchurl {
961 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
961 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
962 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
962 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
963 };
963 };
964 meta = {
964 meta = {
965 license = [ pkgs.lib.licenses.mit ];
965 license = [ pkgs.lib.licenses.mit ];
966 };
966 };
967 };
967 };
968 "traitlets" = super.buildPythonPackage {
968 "traitlets" = super.buildPythonPackage {
969 name = "traitlets-4.3.3";
969 name = "traitlets-4.3.3";
970 doCheck = false;
970 doCheck = false;
971 propagatedBuildInputs = [
971 propagatedBuildInputs = [
972 self."ipython-genutils"
972 self."ipython-genutils"
973 self."six"
973 self."six"
974 self."decorator"
974 self."decorator"
975 self."enum34"
975 self."enum34"
976 ];
976 ];
977 src = fetchurl {
977 src = fetchurl {
978 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
978 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
979 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
979 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
980 };
980 };
981 meta = {
981 meta = {
982 license = [ pkgs.lib.licenses.bsdOriginal ];
982 license = [ pkgs.lib.licenses.bsdOriginal ];
983 };
983 };
984 };
984 };
985 "translationstring" = super.buildPythonPackage {
985 "translationstring" = super.buildPythonPackage {
986 name = "translationstring-1.3";
986 name = "translationstring-1.3";
987 doCheck = false;
987 doCheck = false;
988 src = fetchurl {
988 src = fetchurl {
989 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
989 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
990 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
990 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
991 };
991 };
992 meta = {
992 meta = {
993 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
993 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
994 };
994 };
995 };
995 };
996 "venusian" = super.buildPythonPackage {
996 "venusian" = super.buildPythonPackage {
997 name = "venusian-1.2.0";
997 name = "venusian-1.2.0";
998 doCheck = false;
998 doCheck = false;
999 src = fetchurl {
999 src = fetchurl {
1000 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
1000 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
1001 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
1001 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
1002 };
1002 };
1003 meta = {
1003 meta = {
1004 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1004 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1005 };
1005 };
1006 };
1006 };
1007 "waitress" = super.buildPythonPackage {
1007 "waitress" = super.buildPythonPackage {
1008 name = "waitress-1.3.1";
1008 name = "waitress-1.3.1";
1009 doCheck = false;
1009 doCheck = false;
1010 src = fetchurl {
1010 src = fetchurl {
1011 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
1011 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
1012 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
1012 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
1013 };
1013 };
1014 meta = {
1014 meta = {
1015 license = [ pkgs.lib.licenses.zpl21 ];
1015 license = [ pkgs.lib.licenses.zpl21 ];
1016 };
1016 };
1017 };
1017 };
1018 "wcwidth" = super.buildPythonPackage {
1018 "wcwidth" = super.buildPythonPackage {
1019 name = "wcwidth-0.1.9";
1019 name = "wcwidth-0.1.9";
1020 doCheck = false;
1020 doCheck = false;
1021 src = fetchurl {
1021 src = fetchurl {
1022 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
1022 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
1023 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
1023 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
1024 };
1024 };
1025 meta = {
1025 meta = {
1026 license = [ pkgs.lib.licenses.mit ];
1026 license = [ pkgs.lib.licenses.mit ];
1027 };
1027 };
1028 };
1028 };
1029 "webob" = super.buildPythonPackage {
1029 "webob" = super.buildPythonPackage {
1030 name = "webob-1.8.5";
1030 name = "webob-1.8.5";
1031 doCheck = false;
1031 doCheck = false;
1032 src = fetchurl {
1032 src = fetchurl {
1033 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
1033 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
1034 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
1034 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
1035 };
1035 };
1036 meta = {
1036 meta = {
1037 license = [ pkgs.lib.licenses.mit ];
1037 license = [ pkgs.lib.licenses.mit ];
1038 };
1038 };
1039 };
1039 };
1040 "webtest" = super.buildPythonPackage {
1040 "webtest" = super.buildPythonPackage {
1041 name = "webtest-2.0.34";
1041 name = "webtest-2.0.34";
1042 doCheck = false;
1042 doCheck = false;
1043 propagatedBuildInputs = [
1043 propagatedBuildInputs = [
1044 self."six"
1044 self."six"
1045 self."webob"
1045 self."webob"
1046 self."waitress"
1046 self."waitress"
1047 self."beautifulsoup4"
1047 self."beautifulsoup4"
1048 ];
1048 ];
1049 src = fetchurl {
1049 src = fetchurl {
1050 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
1050 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
1051 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
1051 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
1052 };
1052 };
1053 meta = {
1053 meta = {
1054 license = [ pkgs.lib.licenses.mit ];
1054 license = [ pkgs.lib.licenses.mit ];
1055 };
1055 };
1056 };
1056 };
1057 "zipp" = super.buildPythonPackage {
1057 "zipp" = super.buildPythonPackage {
1058 name = "zipp-1.2.0";
1058 name = "zipp-1.2.0";
1059 doCheck = false;
1059 doCheck = false;
1060 propagatedBuildInputs = [
1060 propagatedBuildInputs = [
1061 self."contextlib2"
1061 self."contextlib2"
1062 ];
1062 ];
1063 src = fetchurl {
1063 src = fetchurl {
1064 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
1064 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
1065 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
1065 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
1066 };
1066 };
1067 meta = {
1067 meta = {
1068 license = [ pkgs.lib.licenses.mit ];
1068 license = [ pkgs.lib.licenses.mit ];
1069 };
1069 };
1070 };
1070 };
1071 "zope.deprecation" = super.buildPythonPackage {
1071 "zope.deprecation" = super.buildPythonPackage {
1072 name = "zope.deprecation-4.4.0";
1072 name = "zope.deprecation-4.4.0";
1073 doCheck = false;
1073 doCheck = false;
1074 propagatedBuildInputs = [
1074 propagatedBuildInputs = [
1075 self."setuptools"
1075 self."setuptools"
1076 ];
1076 ];
1077 src = fetchurl {
1077 src = fetchurl {
1078 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
1078 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
1079 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
1079 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
1080 };
1080 };
1081 meta = {
1081 meta = {
1082 license = [ pkgs.lib.licenses.zpl21 ];
1082 license = [ pkgs.lib.licenses.zpl21 ];
1083 };
1083 };
1084 };
1084 };
1085 "zope.interface" = super.buildPythonPackage {
1085 "zope.interface" = super.buildPythonPackage {
1086 name = "zope.interface-4.6.0";
1086 name = "zope.interface-4.6.0";
1087 doCheck = false;
1087 doCheck = false;
1088 propagatedBuildInputs = [
1088 propagatedBuildInputs = [
1089 self."setuptools"
1089 self."setuptools"
1090 ];
1090 ];
1091 src = fetchurl {
1091 src = fetchurl {
1092 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
1092 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
1093 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
1093 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
1094 };
1094 };
1095 meta = {
1095 meta = {
1096 license = [ pkgs.lib.licenses.zpl21 ];
1096 license = [ pkgs.lib.licenses.zpl21 ];
1097 };
1097 };
1098 };
1098 };
1099
1099
1100 ### Test requirements
1100 ### Test requirements
1101
1101
1102
1102
1103 }
1103 }
@@ -1,1 +1,1 b''
1 4.25.0 No newline at end of file
1 4.25.2 No newline at end of file
@@ -1,1021 +1,1022 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import functools
17 import functools
18 import io
18 import io
19 import logging
19 import logging
20 import os
20 import os
21 import stat
21 import stat
22 import urllib
22 import urllib
23 import urllib2
23 import urllib2
24 import traceback
24 import traceback
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27 from hgext.strip import strip as hgext_strip
27 from hgext.strip import strip as hgext_strip
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
31 from mercurial import repair
31 from mercurial import repair
32
32
33 import vcsserver
33 import vcsserver
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
36 from vcsserver.hgcompat import (
36 from vcsserver.hgcompat import (
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 RepoLookupError, InterventionRequired, RequirementError,
41 RepoLookupError, InterventionRequired, RequirementError,
42 alwaysmatcher, patternmatcher, hgutil)
42 alwaysmatcher, patternmatcher, hgutil)
43 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.vcs_base import RemoteBase
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 def make_ui_from_config(repo_config):
48 def make_ui_from_config(repo_config):
49
49
50 class LoggingUI(ui.ui):
50 class LoggingUI(ui.ui):
51 def status(self, *msg, **opts):
51 def status(self, *msg, **opts):
52 log.info(' '.join(msg).rstrip('\n'))
52 log.info(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).status(*msg, **opts)
53 super(LoggingUI, self).status(*msg, **opts)
54
54
55 def warn(self, *msg, **opts):
55 def warn(self, *msg, **opts):
56 log.warn(' '.join(msg).rstrip('\n'))
56 log.warn(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).warn(*msg, **opts)
57 super(LoggingUI, self).warn(*msg, **opts)
58
58
59 def error(self, *msg, **opts):
59 def error(self, *msg, **opts):
60 log.error(' '.join(msg).rstrip('\n'))
60 log.error(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).error(*msg, **opts)
61 super(LoggingUI, self).error(*msg, **opts)
62
62
63 def note(self, *msg, **opts):
63 def note(self, *msg, **opts):
64 log.info(' '.join(msg).rstrip('\n'))
64 log.info(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).note(*msg, **opts)
65 super(LoggingUI, self).note(*msg, **opts)
66
66
67 def debug(self, *msg, **opts):
67 def debug(self, *msg, **opts):
68 log.debug(' '.join(msg).rstrip('\n'))
68 log.debug(' '.join(msg).rstrip('\n'))
69 super(LoggingUI, self).debug(*msg, **opts)
69 super(LoggingUI, self).debug(*msg, **opts)
70
70
71 baseui = LoggingUI()
71 baseui = LoggingUI()
72
72
73 # clean the baseui object
73 # clean the baseui object
74 baseui._ocfg = hgconfig.config()
74 baseui._ocfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
77
77
78 for section, option, value in repo_config:
78 for section, option, value in repo_config:
79 baseui.setconfig(section, option, value)
79 baseui.setconfig(section, option, value)
80
80
81 # make our hgweb quiet so it doesn't print output
81 # make our hgweb quiet so it doesn't print output
82 baseui.setconfig('ui', 'quiet', 'true')
82 baseui.setconfig('ui', 'quiet', 'true')
83
83
84 baseui.setconfig('ui', 'paginate', 'never')
84 baseui.setconfig('ui', 'paginate', 'never')
85 # for better Error reporting of Mercurial
85 # for better Error reporting of Mercurial
86 baseui.setconfig('ui', 'message-output', 'stderr')
86 baseui.setconfig('ui', 'message-output', 'stderr')
87
87
88 # force mercurial to only use 1 thread, otherwise it may try to set a
88 # force mercurial to only use 1 thread, otherwise it may try to set a
89 # signal in a non-main thread, thus generating a ValueError.
89 # signal in a non-main thread, thus generating a ValueError.
90 baseui.setconfig('worker', 'numcpus', 1)
90 baseui.setconfig('worker', 'numcpus', 1)
91
91
92 # If there is no config for the largefiles extension, we explicitly disable
92 # If there is no config for the largefiles extension, we explicitly disable
93 # it here. This overrides settings from repositories hgrc file. Recent
93 # it here. This overrides settings from repositories hgrc file. Recent
94 # mercurial versions enable largefiles in hgrc on clone from largefile
94 # mercurial versions enable largefiles in hgrc on clone from largefile
95 # repo.
95 # repo.
96 if not baseui.hasconfig('extensions', 'largefiles'):
96 if not baseui.hasconfig('extensions', 'largefiles'):
97 log.debug('Explicitly disable largefiles extension for repo.')
97 log.debug('Explicitly disable largefiles extension for repo.')
98 baseui.setconfig('extensions', 'largefiles', '!')
98 baseui.setconfig('extensions', 'largefiles', '!')
99
99
100 return baseui
100 return baseui
101
101
102
102
103 def reraise_safe_exceptions(func):
103 def reraise_safe_exceptions(func):
104 """Decorator for converting mercurial exceptions to something neutral."""
104 """Decorator for converting mercurial exceptions to something neutral."""
105
105
106 def wrapper(*args, **kwargs):
106 def wrapper(*args, **kwargs):
107 try:
107 try:
108 return func(*args, **kwargs)
108 return func(*args, **kwargs)
109 except (Abort, InterventionRequired) as e:
109 except (Abort, InterventionRequired) as e:
110 raise_from_original(exceptions.AbortException(e))
110 raise_from_original(exceptions.AbortException(e))
111 except RepoLookupError as e:
111 except RepoLookupError as e:
112 raise_from_original(exceptions.LookupException(e))
112 raise_from_original(exceptions.LookupException(e))
113 except RequirementError as e:
113 except RequirementError as e:
114 raise_from_original(exceptions.RequirementException(e))
114 raise_from_original(exceptions.RequirementException(e))
115 except RepoError as e:
115 except RepoError as e:
116 raise_from_original(exceptions.VcsException(e))
116 raise_from_original(exceptions.VcsException(e))
117 except LookupError as e:
117 except LookupError as e:
118 raise_from_original(exceptions.LookupException(e))
118 raise_from_original(exceptions.LookupException(e))
119 except Exception as e:
119 except Exception as e:
120 if not hasattr(e, '_vcs_kind'):
120 if not hasattr(e, '_vcs_kind'):
121 log.exception("Unhandled exception in hg remote call")
121 log.exception("Unhandled exception in hg remote call")
122 raise_from_original(exceptions.UnhandledException(e))
122 raise_from_original(exceptions.UnhandledException(e))
123
123
124 raise
124 raise
125 return wrapper
125 return wrapper
126
126
127
127
128 class MercurialFactory(RepoFactory):
128 class MercurialFactory(RepoFactory):
129 repo_type = 'hg'
129 repo_type = 'hg'
130
130
131 def _create_config(self, config, hooks=True):
131 def _create_config(self, config, hooks=True):
132 if not hooks:
132 if not hooks:
133 hooks_to_clean = frozenset((
133 hooks_to_clean = frozenset((
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
136 new_config = []
136 new_config = []
137 for section, option, value in config:
137 for section, option, value in config:
138 if section == 'hooks' and option in hooks_to_clean:
138 if section == 'hooks' and option in hooks_to_clean:
139 continue
139 continue
140 new_config.append((section, option, value))
140 new_config.append((section, option, value))
141 config = new_config
141 config = new_config
142
142
143 baseui = make_ui_from_config(config)
143 baseui = make_ui_from_config(config)
144 return baseui
144 return baseui
145
145
146 def _create_repo(self, wire, create):
146 def _create_repo(self, wire, create):
147 baseui = self._create_config(wire["config"])
147 baseui = self._create_config(wire["config"])
148 return instance(baseui, wire["path"], create)
148 return instance(baseui, wire["path"], create)
149
149
150 def repo(self, wire, create=False):
150 def repo(self, wire, create=False):
151 """
151 """
152 Get a repository instance for the given path.
152 Get a repository instance for the given path.
153 """
153 """
154 return self._create_repo(wire, create)
154 return self._create_repo(wire, create)
155
155
156
156
157 def patch_ui_message_output(baseui):
157 def patch_ui_message_output(baseui):
158 baseui.setconfig('ui', 'quiet', 'false')
158 baseui.setconfig('ui', 'quiet', 'false')
159 output = io.BytesIO()
159 output = io.BytesIO()
160
160
161 def write(data, **unused_kwargs):
161 def write(data, **unused_kwargs):
162 output.write(data)
162 output.write(data)
163
163
164 baseui.status = write
164 baseui.status = write
165 baseui.write = write
165 baseui.write = write
166 baseui.warn = write
166 baseui.warn = write
167 baseui.debug = write
167 baseui.debug = write
168
168
169 return baseui, output
169 return baseui, output
170
170
171
171
172 class HgRemote(RemoteBase):
172 class HgRemote(RemoteBase):
173
173
174 def __init__(self, factory):
174 def __init__(self, factory):
175 self._factory = factory
175 self._factory = factory
176 self._bulk_methods = {
176 self._bulk_methods = {
177 "affected_files": self.ctx_files,
177 "affected_files": self.ctx_files,
178 "author": self.ctx_user,
178 "author": self.ctx_user,
179 "branch": self.ctx_branch,
179 "branch": self.ctx_branch,
180 "children": self.ctx_children,
180 "children": self.ctx_children,
181 "date": self.ctx_date,
181 "date": self.ctx_date,
182 "message": self.ctx_description,
182 "message": self.ctx_description,
183 "parents": self.ctx_parents,
183 "parents": self.ctx_parents,
184 "status": self.ctx_status,
184 "status": self.ctx_status,
185 "obsolete": self.ctx_obsolete,
185 "obsolete": self.ctx_obsolete,
186 "phase": self.ctx_phase,
186 "phase": self.ctx_phase,
187 "hidden": self.ctx_hidden,
187 "hidden": self.ctx_hidden,
188 "_file_paths": self.ctx_list,
188 "_file_paths": self.ctx_list,
189 }
189 }
190
190
191 def _get_ctx(self, repo, ref):
191 def _get_ctx(self, repo, ref):
192 return get_ctx(repo, ref)
192 return get_ctx(repo, ref)
193
193
194 @reraise_safe_exceptions
194 @reraise_safe_exceptions
195 def discover_hg_version(self):
195 def discover_hg_version(self):
196 from mercurial import util
196 from mercurial import util
197 return util.version()
197 return util.version()
198
198
199 @reraise_safe_exceptions
199 @reraise_safe_exceptions
200 def is_empty(self, wire):
200 def is_empty(self, wire):
201 repo = self._factory.repo(wire)
201 repo = self._factory.repo(wire)
202
202
203 try:
203 try:
204 return len(repo) == 0
204 return len(repo) == 0
205 except Exception:
205 except Exception:
206 log.exception("failed to read object_store")
206 log.exception("failed to read object_store")
207 return False
207 return False
208
208
209 @reraise_safe_exceptions
209 @reraise_safe_exceptions
210 def bookmarks(self, wire):
210 def bookmarks(self, wire):
211 cache_on, context_uid, repo_id = self._cache_on(wire)
211 cache_on, context_uid, repo_id = self._cache_on(wire)
212 @self.region.conditional_cache_on_arguments(condition=cache_on)
212 @self.region.conditional_cache_on_arguments(condition=cache_on)
213 def _bookmarks(_context_uid, _repo_id):
213 def _bookmarks(_context_uid, _repo_id):
214 repo = self._factory.repo(wire)
214 repo = self._factory.repo(wire)
215 return dict(repo._bookmarks)
215 return dict(repo._bookmarks)
216
216
217 return _bookmarks(context_uid, repo_id)
217 return _bookmarks(context_uid, repo_id)
218
218
219 @reraise_safe_exceptions
219 @reraise_safe_exceptions
220 def branches(self, wire, normal, closed):
220 def branches(self, wire, normal, closed):
221 cache_on, context_uid, repo_id = self._cache_on(wire)
221 cache_on, context_uid, repo_id = self._cache_on(wire)
222 @self.region.conditional_cache_on_arguments(condition=cache_on)
222 @self.region.conditional_cache_on_arguments(condition=cache_on)
223 def _branches(_context_uid, _repo_id, _normal, _closed):
223 def _branches(_context_uid, _repo_id, _normal, _closed):
224 repo = self._factory.repo(wire)
224 repo = self._factory.repo(wire)
225 iter_branches = repo.branchmap().iterbranches()
225 iter_branches = repo.branchmap().iterbranches()
226 bt = {}
226 bt = {}
227 for branch_name, _heads, tip, is_closed in iter_branches:
227 for branch_name, _heads, tip, is_closed in iter_branches:
228 if normal and not is_closed:
228 if normal and not is_closed:
229 bt[branch_name] = tip
229 bt[branch_name] = tip
230 if closed and is_closed:
230 if closed and is_closed:
231 bt[branch_name] = tip
231 bt[branch_name] = tip
232
232
233 return bt
233 return bt
234
234
235 return _branches(context_uid, repo_id, normal, closed)
235 return _branches(context_uid, repo_id, normal, closed)
236
236
237 @reraise_safe_exceptions
237 @reraise_safe_exceptions
238 def bulk_request(self, wire, commit_id, pre_load):
238 def bulk_request(self, wire, commit_id, pre_load):
239 cache_on, context_uid, repo_id = self._cache_on(wire)
239 cache_on, context_uid, repo_id = self._cache_on(wire)
240 @self.region.conditional_cache_on_arguments(condition=cache_on)
240 @self.region.conditional_cache_on_arguments(condition=cache_on)
241 def _bulk_request(_repo_id, _commit_id, _pre_load):
241 def _bulk_request(_repo_id, _commit_id, _pre_load):
242 result = {}
242 result = {}
243 for attr in pre_load:
243 for attr in pre_load:
244 try:
244 try:
245 method = self._bulk_methods[attr]
245 method = self._bulk_methods[attr]
246 result[attr] = method(wire, commit_id)
246 result[attr] = method(wire, commit_id)
247 except KeyError as e:
247 except KeyError as e:
248 raise exceptions.VcsException(e)(
248 raise exceptions.VcsException(e)(
249 'Unknown bulk attribute: "%s"' % attr)
249 'Unknown bulk attribute: "%s"' % attr)
250 return result
250 return result
251
251
252 return _bulk_request(repo_id, commit_id, sorted(pre_load))
252 return _bulk_request(repo_id, commit_id, sorted(pre_load))
253
253
254 @reraise_safe_exceptions
254 @reraise_safe_exceptions
255 def ctx_branch(self, wire, commit_id):
255 def ctx_branch(self, wire, commit_id):
256 cache_on, context_uid, repo_id = self._cache_on(wire)
256 cache_on, context_uid, repo_id = self._cache_on(wire)
257 @self.region.conditional_cache_on_arguments(condition=cache_on)
257 @self.region.conditional_cache_on_arguments(condition=cache_on)
258 def _ctx_branch(_repo_id, _commit_id):
258 def _ctx_branch(_repo_id, _commit_id):
259 repo = self._factory.repo(wire)
259 repo = self._factory.repo(wire)
260 ctx = self._get_ctx(repo, commit_id)
260 ctx = self._get_ctx(repo, commit_id)
261 return ctx.branch()
261 return ctx.branch()
262 return _ctx_branch(repo_id, commit_id)
262 return _ctx_branch(repo_id, commit_id)
263
263
264 @reraise_safe_exceptions
264 @reraise_safe_exceptions
265 def ctx_date(self, wire, commit_id):
265 def ctx_date(self, wire, commit_id):
266 cache_on, context_uid, repo_id = self._cache_on(wire)
266 cache_on, context_uid, repo_id = self._cache_on(wire)
267 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 @self.region.conditional_cache_on_arguments(condition=cache_on)
268 def _ctx_date(_repo_id, _commit_id):
268 def _ctx_date(_repo_id, _commit_id):
269 repo = self._factory.repo(wire)
269 repo = self._factory.repo(wire)
270 ctx = self._get_ctx(repo, commit_id)
270 ctx = self._get_ctx(repo, commit_id)
271 return ctx.date()
271 return ctx.date()
272 return _ctx_date(repo_id, commit_id)
272 return _ctx_date(repo_id, commit_id)
273
273
274 @reraise_safe_exceptions
274 @reraise_safe_exceptions
275 def ctx_description(self, wire, revision):
275 def ctx_description(self, wire, revision):
276 repo = self._factory.repo(wire)
276 repo = self._factory.repo(wire)
277 ctx = self._get_ctx(repo, revision)
277 ctx = self._get_ctx(repo, revision)
278 return ctx.description()
278 return ctx.description()
279
279
280 @reraise_safe_exceptions
280 @reraise_safe_exceptions
281 def ctx_files(self, wire, commit_id):
281 def ctx_files(self, wire, commit_id):
282 cache_on, context_uid, repo_id = self._cache_on(wire)
282 cache_on, context_uid, repo_id = self._cache_on(wire)
283 @self.region.conditional_cache_on_arguments(condition=cache_on)
283 @self.region.conditional_cache_on_arguments(condition=cache_on)
284 def _ctx_files(_repo_id, _commit_id):
284 def _ctx_files(_repo_id, _commit_id):
285 repo = self._factory.repo(wire)
285 repo = self._factory.repo(wire)
286 ctx = self._get_ctx(repo, commit_id)
286 ctx = self._get_ctx(repo, commit_id)
287 return ctx.files()
287 return ctx.files()
288
288
289 return _ctx_files(repo_id, commit_id)
289 return _ctx_files(repo_id, commit_id)
290
290
291 @reraise_safe_exceptions
291 @reraise_safe_exceptions
292 def ctx_list(self, path, revision):
292 def ctx_list(self, path, revision):
293 repo = self._factory.repo(path)
293 repo = self._factory.repo(path)
294 ctx = self._get_ctx(repo, revision)
294 ctx = self._get_ctx(repo, revision)
295 return list(ctx)
295 return list(ctx)
296
296
297 @reraise_safe_exceptions
297 @reraise_safe_exceptions
298 def ctx_parents(self, wire, commit_id):
298 def ctx_parents(self, wire, commit_id):
299 cache_on, context_uid, repo_id = self._cache_on(wire)
299 cache_on, context_uid, repo_id = self._cache_on(wire)
300 @self.region.conditional_cache_on_arguments(condition=cache_on)
300 @self.region.conditional_cache_on_arguments(condition=cache_on)
301 def _ctx_parents(_repo_id, _commit_id):
301 def _ctx_parents(_repo_id, _commit_id):
302 repo = self._factory.repo(wire)
302 repo = self._factory.repo(wire)
303 ctx = self._get_ctx(repo, commit_id)
303 ctx = self._get_ctx(repo, commit_id)
304 return [parent.hex() for parent in ctx.parents()
304 return [parent.hex() for parent in ctx.parents()
305 if not (parent.hidden() or parent.obsolete())]
305 if not (parent.hidden() or parent.obsolete())]
306
306
307 return _ctx_parents(repo_id, commit_id)
307 return _ctx_parents(repo_id, commit_id)
308
308
309 @reraise_safe_exceptions
309 @reraise_safe_exceptions
310 def ctx_children(self, wire, commit_id):
310 def ctx_children(self, wire, commit_id):
311 cache_on, context_uid, repo_id = self._cache_on(wire)
311 cache_on, context_uid, repo_id = self._cache_on(wire)
312 @self.region.conditional_cache_on_arguments(condition=cache_on)
312 @self.region.conditional_cache_on_arguments(condition=cache_on)
313 def _ctx_children(_repo_id, _commit_id):
313 def _ctx_children(_repo_id, _commit_id):
314 repo = self._factory.repo(wire)
314 repo = self._factory.repo(wire)
315 ctx = self._get_ctx(repo, commit_id)
315 ctx = self._get_ctx(repo, commit_id)
316 return [child.hex() for child in ctx.children()
316 return [child.hex() for child in ctx.children()
317 if not (child.hidden() or child.obsolete())]
317 if not (child.hidden() or child.obsolete())]
318
318
319 return _ctx_children(repo_id, commit_id)
319 return _ctx_children(repo_id, commit_id)
320
320
321 @reraise_safe_exceptions
321 @reraise_safe_exceptions
322 def ctx_phase(self, wire, commit_id):
322 def ctx_phase(self, wire, commit_id):
323 cache_on, context_uid, repo_id = self._cache_on(wire)
323 cache_on, context_uid, repo_id = self._cache_on(wire)
324 @self.region.conditional_cache_on_arguments(condition=cache_on)
324 @self.region.conditional_cache_on_arguments(condition=cache_on)
325 def _ctx_phase(_context_uid, _repo_id, _commit_id):
325 def _ctx_phase(_context_uid, _repo_id, _commit_id):
326 repo = self._factory.repo(wire)
326 repo = self._factory.repo(wire)
327 ctx = self._get_ctx(repo, commit_id)
327 ctx = self._get_ctx(repo, commit_id)
328 # public=0, draft=1, secret=3
328 # public=0, draft=1, secret=3
329 return ctx.phase()
329 return ctx.phase()
330 return _ctx_phase(context_uid, repo_id, commit_id)
330 return _ctx_phase(context_uid, repo_id, commit_id)
331
331
332 @reraise_safe_exceptions
332 @reraise_safe_exceptions
333 def ctx_obsolete(self, wire, commit_id):
333 def ctx_obsolete(self, wire, commit_id):
334 cache_on, context_uid, repo_id = self._cache_on(wire)
334 cache_on, context_uid, repo_id = self._cache_on(wire)
335 @self.region.conditional_cache_on_arguments(condition=cache_on)
335 @self.region.conditional_cache_on_arguments(condition=cache_on)
336 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
336 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
337 repo = self._factory.repo(wire)
337 repo = self._factory.repo(wire)
338 ctx = self._get_ctx(repo, commit_id)
338 ctx = self._get_ctx(repo, commit_id)
339 return ctx.obsolete()
339 return ctx.obsolete()
340 return _ctx_obsolete(context_uid, repo_id, commit_id)
340 return _ctx_obsolete(context_uid, repo_id, commit_id)
341
341
342 @reraise_safe_exceptions
342 @reraise_safe_exceptions
343 def ctx_hidden(self, wire, commit_id):
343 def ctx_hidden(self, wire, commit_id):
344 cache_on, context_uid, repo_id = self._cache_on(wire)
344 cache_on, context_uid, repo_id = self._cache_on(wire)
345 @self.region.conditional_cache_on_arguments(condition=cache_on)
345 @self.region.conditional_cache_on_arguments(condition=cache_on)
346 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
346 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
347 repo = self._factory.repo(wire)
347 repo = self._factory.repo(wire)
348 ctx = self._get_ctx(repo, commit_id)
348 ctx = self._get_ctx(repo, commit_id)
349 return ctx.hidden()
349 return ctx.hidden()
350 return _ctx_hidden(context_uid, repo_id, commit_id)
350 return _ctx_hidden(context_uid, repo_id, commit_id)
351
351
352 @reraise_safe_exceptions
352 @reraise_safe_exceptions
353 def ctx_substate(self, wire, revision):
353 def ctx_substate(self, wire, revision):
354 repo = self._factory.repo(wire)
354 repo = self._factory.repo(wire)
355 ctx = self._get_ctx(repo, revision)
355 ctx = self._get_ctx(repo, revision)
356 return ctx.substate
356 return ctx.substate
357
357
358 @reraise_safe_exceptions
358 @reraise_safe_exceptions
359 def ctx_status(self, wire, revision):
359 def ctx_status(self, wire, revision):
360 repo = self._factory.repo(wire)
360 repo = self._factory.repo(wire)
361 ctx = self._get_ctx(repo, revision)
361 ctx = self._get_ctx(repo, revision)
362 status = repo[ctx.p1().node()].status(other=ctx.node())
362 status = repo[ctx.p1().node()].status(other=ctx.node())
363 # object of status (odd, custom named tuple in mercurial) is not
363 # object of status (odd, custom named tuple in mercurial) is not
364 # correctly serializable, we make it a list, as the underling
364 # correctly serializable, we make it a list, as the underling
365 # API expects this to be a list
365 # API expects this to be a list
366 return list(status)
366 return list(status)
367
367
368 @reraise_safe_exceptions
368 @reraise_safe_exceptions
369 def ctx_user(self, wire, revision):
369 def ctx_user(self, wire, revision):
370 repo = self._factory.repo(wire)
370 repo = self._factory.repo(wire)
371 ctx = self._get_ctx(repo, revision)
371 ctx = self._get_ctx(repo, revision)
372 return ctx.user()
372 return ctx.user()
373
373
374 @reraise_safe_exceptions
374 @reraise_safe_exceptions
375 def check_url(self, url, config):
375 def check_url(self, url, config):
376 _proto = None
376 _proto = None
377 if '+' in url[:url.find('://')]:
377 if '+' in url[:url.find('://')]:
378 _proto = url[0:url.find('+')]
378 _proto = url[0:url.find('+')]
379 url = url[url.find('+') + 1:]
379 url = url[url.find('+') + 1:]
380 handlers = []
380 handlers = []
381 url_obj = url_parser(url)
381 url_obj = url_parser(url)
382 test_uri, authinfo = url_obj.authinfo()
382 test_uri, authinfo = url_obj.authinfo()
383 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
383 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
384 url_obj.query = obfuscate_qs(url_obj.query)
384 url_obj.query = obfuscate_qs(url_obj.query)
385
385
386 cleaned_uri = str(url_obj)
386 cleaned_uri = str(url_obj)
387 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
387 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
388
388
389 if authinfo:
389 if authinfo:
390 # create a password manager
390 # create a password manager
391 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
391 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
392 passmgr.add_password(*authinfo)
392 passmgr.add_password(*authinfo)
393
393
394 handlers.extend((httpbasicauthhandler(passmgr),
394 handlers.extend((httpbasicauthhandler(passmgr),
395 httpdigestauthhandler(passmgr)))
395 httpdigestauthhandler(passmgr)))
396
396
397 o = urllib2.build_opener(*handlers)
397 o = urllib2.build_opener(*handlers)
398 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
398 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
399 ('Accept', 'application/mercurial-0.1')]
399 ('Accept', 'application/mercurial-0.1')]
400
400
401 q = {"cmd": 'between'}
401 q = {"cmd": 'between'}
402 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
402 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
403 qs = '?%s' % urllib.urlencode(q)
403 qs = '?%s' % urllib.urlencode(q)
404 cu = "%s%s" % (test_uri, qs)
404 cu = "%s%s" % (test_uri, qs)
405 req = urllib2.Request(cu, None, {})
405 req = urllib2.Request(cu, None, {})
406
406
407 try:
407 try:
408 log.debug("Trying to open URL %s", cleaned_uri)
408 log.debug("Trying to open URL %s", cleaned_uri)
409 resp = o.open(req)
409 resp = o.open(req)
410 if resp.code != 200:
410 if resp.code != 200:
411 raise exceptions.URLError()('Return Code is not 200')
411 raise exceptions.URLError()('Return Code is not 200')
412 except Exception as e:
412 except Exception as e:
413 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
413 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
414 # means it cannot be cloned
414 # means it cannot be cloned
415 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
415 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
416
416
417 # now check if it's a proper hg repo, but don't do it for svn
417 # now check if it's a proper hg repo, but don't do it for svn
418 try:
418 try:
419 if _proto == 'svn':
419 if _proto == 'svn':
420 pass
420 pass
421 else:
421 else:
422 # check for pure hg repos
422 # check for pure hg repos
423 log.debug(
423 log.debug(
424 "Verifying if URL is a Mercurial repository: %s",
424 "Verifying if URL is a Mercurial repository: %s",
425 cleaned_uri)
425 cleaned_uri)
426 ui = make_ui_from_config(config)
426 ui = make_ui_from_config(config)
427 peer_checker = makepeer(ui, url)
427 peer_checker = makepeer(ui, url)
428 peer_checker.lookup('tip')
428 peer_checker.lookup('tip')
429 except Exception as e:
429 except Exception as e:
430 log.warning("URL is not a valid Mercurial repository: %s",
430 log.warning("URL is not a valid Mercurial repository: %s",
431 cleaned_uri)
431 cleaned_uri)
432 raise exceptions.URLError(e)(
432 raise exceptions.URLError(e)(
433 "url [%s] does not look like an hg repo org_exc: %s"
433 "url [%s] does not look like an hg repo org_exc: %s"
434 % (cleaned_uri, e))
434 % (cleaned_uri, e))
435
435
436 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
436 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
437 return True
437 return True
438
438
439 @reraise_safe_exceptions
439 @reraise_safe_exceptions
440 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
440 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
441 repo = self._factory.repo(wire)
441 repo = self._factory.repo(wire)
442
442
443 if file_filter:
443 if file_filter:
444 match_filter = match(file_filter[0], '', [file_filter[1]])
444 match_filter = match(file_filter[0], '', [file_filter[1]])
445 else:
445 else:
446 match_filter = file_filter
446 match_filter = file_filter
447 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
447 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
448
448
449 try:
449 try:
450 return "".join(patch.diff(
450 return "".join(patch.diff(
451 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
451 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
452 except RepoLookupError as e:
452 except RepoLookupError as e:
453 raise exceptions.LookupException(e)()
453 raise exceptions.LookupException(e)()
454
454
455 @reraise_safe_exceptions
455 @reraise_safe_exceptions
456 def node_history(self, wire, revision, path, limit):
456 def node_history(self, wire, revision, path, limit):
457 cache_on, context_uid, repo_id = self._cache_on(wire)
457 cache_on, context_uid, repo_id = self._cache_on(wire)
458 @self.region.conditional_cache_on_arguments(condition=cache_on)
458 @self.region.conditional_cache_on_arguments(condition=cache_on)
459 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
459 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
460 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
461
461
462 ctx = self._get_ctx(repo, revision)
462 ctx = self._get_ctx(repo, revision)
463 fctx = ctx.filectx(path)
463 fctx = ctx.filectx(path)
464
464
465 def history_iter():
465 def history_iter():
466 limit_rev = fctx.rev()
466 limit_rev = fctx.rev()
467 for obj in reversed(list(fctx.filelog())):
467 for obj in reversed(list(fctx.filelog())):
468 obj = fctx.filectx(obj)
468 obj = fctx.filectx(obj)
469 ctx = obj.changectx()
469 ctx = obj.changectx()
470 if ctx.hidden() or ctx.obsolete():
470 if ctx.hidden() or ctx.obsolete():
471 continue
471 continue
472
472
473 if limit_rev >= obj.rev():
473 if limit_rev >= obj.rev():
474 yield obj
474 yield obj
475
475
476 history = []
476 history = []
477 for cnt, obj in enumerate(history_iter()):
477 for cnt, obj in enumerate(history_iter()):
478 if limit and cnt >= limit:
478 if limit and cnt >= limit:
479 break
479 break
480 history.append(hex(obj.node()))
480 history.append(hex(obj.node()))
481
481
482 return [x for x in history]
482 return [x for x in history]
483 return _node_history(context_uid, repo_id, revision, path, limit)
483 return _node_history(context_uid, repo_id, revision, path, limit)
484
484
485 @reraise_safe_exceptions
485 @reraise_safe_exceptions
486 def node_history_untill(self, wire, revision, path, limit):
486 def node_history_untill(self, wire, revision, path, limit):
487 cache_on, context_uid, repo_id = self._cache_on(wire)
487 cache_on, context_uid, repo_id = self._cache_on(wire)
488 @self.region.conditional_cache_on_arguments(condition=cache_on)
488 @self.region.conditional_cache_on_arguments(condition=cache_on)
489 def _node_history_until(_context_uid, _repo_id):
489 def _node_history_until(_context_uid, _repo_id):
490 repo = self._factory.repo(wire)
490 repo = self._factory.repo(wire)
491 ctx = self._get_ctx(repo, revision)
491 ctx = self._get_ctx(repo, revision)
492 fctx = ctx.filectx(path)
492 fctx = ctx.filectx(path)
493
493
494 file_log = list(fctx.filelog())
494 file_log = list(fctx.filelog())
495 if limit:
495 if limit:
496 # Limit to the last n items
496 # Limit to the last n items
497 file_log = file_log[-limit:]
497 file_log = file_log[-limit:]
498
498
499 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
499 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
500 return _node_history_until(context_uid, repo_id, revision, path, limit)
500 return _node_history_until(context_uid, repo_id, revision, path, limit)
501
501
502 @reraise_safe_exceptions
502 @reraise_safe_exceptions
503 def fctx_annotate(self, wire, revision, path):
503 def fctx_annotate(self, wire, revision, path):
504 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
505 ctx = self._get_ctx(repo, revision)
505 ctx = self._get_ctx(repo, revision)
506 fctx = ctx.filectx(path)
506 fctx = ctx.filectx(path)
507
507
508 result = []
508 result = []
509 for i, annotate_obj in enumerate(fctx.annotate(), 1):
509 for i, annotate_obj in enumerate(fctx.annotate(), 1):
510 ln_no = i
510 ln_no = i
511 sha = hex(annotate_obj.fctx.node())
511 sha = hex(annotate_obj.fctx.node())
512 content = annotate_obj.text
512 content = annotate_obj.text
513 result.append((ln_no, sha, content))
513 result.append((ln_no, sha, content))
514 return result
514 return result
515
515
516 @reraise_safe_exceptions
516 @reraise_safe_exceptions
517 def fctx_node_data(self, wire, revision, path):
517 def fctx_node_data(self, wire, revision, path):
518 repo = self._factory.repo(wire)
518 repo = self._factory.repo(wire)
519 ctx = self._get_ctx(repo, revision)
519 ctx = self._get_ctx(repo, revision)
520 fctx = ctx.filectx(path)
520 fctx = ctx.filectx(path)
521 return fctx.data()
521 return fctx.data()
522
522
523 @reraise_safe_exceptions
523 @reraise_safe_exceptions
524 def fctx_flags(self, wire, commit_id, path):
524 def fctx_flags(self, wire, commit_id, path):
525 cache_on, context_uid, repo_id = self._cache_on(wire)
525 cache_on, context_uid, repo_id = self._cache_on(wire)
526 @self.region.conditional_cache_on_arguments(condition=cache_on)
526 @self.region.conditional_cache_on_arguments(condition=cache_on)
527 def _fctx_flags(_repo_id, _commit_id, _path):
527 def _fctx_flags(_repo_id, _commit_id, _path):
528 repo = self._factory.repo(wire)
528 repo = self._factory.repo(wire)
529 ctx = self._get_ctx(repo, commit_id)
529 ctx = self._get_ctx(repo, commit_id)
530 fctx = ctx.filectx(path)
530 fctx = ctx.filectx(path)
531 return fctx.flags()
531 return fctx.flags()
532
532
533 return _fctx_flags(repo_id, commit_id, path)
533 return _fctx_flags(repo_id, commit_id, path)
534
534
535 @reraise_safe_exceptions
535 @reraise_safe_exceptions
536 def fctx_size(self, wire, commit_id, path):
536 def fctx_size(self, wire, commit_id, path):
537 cache_on, context_uid, repo_id = self._cache_on(wire)
537 cache_on, context_uid, repo_id = self._cache_on(wire)
538 @self.region.conditional_cache_on_arguments(condition=cache_on)
538 @self.region.conditional_cache_on_arguments(condition=cache_on)
539 def _fctx_size(_repo_id, _revision, _path):
539 def _fctx_size(_repo_id, _revision, _path):
540 repo = self._factory.repo(wire)
540 repo = self._factory.repo(wire)
541 ctx = self._get_ctx(repo, commit_id)
541 ctx = self._get_ctx(repo, commit_id)
542 fctx = ctx.filectx(path)
542 fctx = ctx.filectx(path)
543 return fctx.size()
543 return fctx.size()
544 return _fctx_size(repo_id, commit_id, path)
544 return _fctx_size(repo_id, commit_id, path)
545
545
546 @reraise_safe_exceptions
546 @reraise_safe_exceptions
547 def get_all_commit_ids(self, wire, name):
547 def get_all_commit_ids(self, wire, name):
548 cache_on, context_uid, repo_id = self._cache_on(wire)
548 cache_on, context_uid, repo_id = self._cache_on(wire)
549 @self.region.conditional_cache_on_arguments(condition=cache_on)
549 @self.region.conditional_cache_on_arguments(condition=cache_on)
550 def _get_all_commit_ids(_context_uid, _repo_id, _name):
550 def _get_all_commit_ids(_context_uid, _repo_id, _name):
551 repo = self._factory.repo(wire)
551 repo = self._factory.repo(wire)
552 repo = repo.filtered(name)
552 repo = repo.filtered(name)
553 revs = map(lambda x: hex(x[7]), repo.changelog.index)
553 revs = map(lambda x: hex(x[7]), repo.changelog.index)
554 return revs
554 return revs
555 return _get_all_commit_ids(context_uid, repo_id, name)
555 return _get_all_commit_ids(context_uid, repo_id, name)
556
556
557 @reraise_safe_exceptions
557 @reraise_safe_exceptions
558 def get_config_value(self, wire, section, name, untrusted=False):
558 def get_config_value(self, wire, section, name, untrusted=False):
559 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
560 return repo.ui.config(section, name, untrusted=untrusted)
560 return repo.ui.config(section, name, untrusted=untrusted)
561
561
562 @reraise_safe_exceptions
562 @reraise_safe_exceptions
563 def is_large_file(self, wire, commit_id, path):
563 def is_large_file(self, wire, commit_id, path):
564 cache_on, context_uid, repo_id = self._cache_on(wire)
564 cache_on, context_uid, repo_id = self._cache_on(wire)
565 @self.region.conditional_cache_on_arguments(condition=cache_on)
565 @self.region.conditional_cache_on_arguments(condition=cache_on)
566 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
566 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
567 return largefiles.lfutil.isstandin(path)
567 return largefiles.lfutil.isstandin(path)
568
568
569 return _is_large_file(context_uid, repo_id, commit_id, path)
569 return _is_large_file(context_uid, repo_id, commit_id, path)
570
570
571 @reraise_safe_exceptions
571 @reraise_safe_exceptions
572 def is_binary(self, wire, revision, path):
572 def is_binary(self, wire, revision, path):
573 cache_on, context_uid, repo_id = self._cache_on(wire)
573 cache_on, context_uid, repo_id = self._cache_on(wire)
574
574
575 @self.region.conditional_cache_on_arguments(condition=cache_on)
575 @self.region.conditional_cache_on_arguments(condition=cache_on)
576 def _is_binary(_repo_id, _sha, _path):
576 def _is_binary(_repo_id, _sha, _path):
577 repo = self._factory.repo(wire)
577 repo = self._factory.repo(wire)
578 ctx = self._get_ctx(repo, revision)
578 ctx = self._get_ctx(repo, revision)
579 fctx = ctx.filectx(path)
579 fctx = ctx.filectx(path)
580 return fctx.isbinary()
580 return fctx.isbinary()
581
581
582 return _is_binary(repo_id, revision, path)
582 return _is_binary(repo_id, revision, path)
583
583
584 @reraise_safe_exceptions
584 @reraise_safe_exceptions
585 def in_largefiles_store(self, wire, sha):
585 def in_largefiles_store(self, wire, sha):
586 repo = self._factory.repo(wire)
586 repo = self._factory.repo(wire)
587 return largefiles.lfutil.instore(repo, sha)
587 return largefiles.lfutil.instore(repo, sha)
588
588
589 @reraise_safe_exceptions
589 @reraise_safe_exceptions
590 def in_user_cache(self, wire, sha):
590 def in_user_cache(self, wire, sha):
591 repo = self._factory.repo(wire)
591 repo = self._factory.repo(wire)
592 return largefiles.lfutil.inusercache(repo.ui, sha)
592 return largefiles.lfutil.inusercache(repo.ui, sha)
593
593
594 @reraise_safe_exceptions
594 @reraise_safe_exceptions
595 def store_path(self, wire, sha):
595 def store_path(self, wire, sha):
596 repo = self._factory.repo(wire)
596 repo = self._factory.repo(wire)
597 return largefiles.lfutil.storepath(repo, sha)
597 return largefiles.lfutil.storepath(repo, sha)
598
598
599 @reraise_safe_exceptions
599 @reraise_safe_exceptions
600 def link(self, wire, sha, path):
600 def link(self, wire, sha, path):
601 repo = self._factory.repo(wire)
601 repo = self._factory.repo(wire)
602 largefiles.lfutil.link(
602 largefiles.lfutil.link(
603 largefiles.lfutil.usercachepath(repo.ui, sha), path)
603 largefiles.lfutil.usercachepath(repo.ui, sha), path)
604
604
605 @reraise_safe_exceptions
605 @reraise_safe_exceptions
606 def localrepository(self, wire, create=False):
606 def localrepository(self, wire, create=False):
607 self._factory.repo(wire, create=create)
607 self._factory.repo(wire, create=create)
608
608
609 @reraise_safe_exceptions
609 @reraise_safe_exceptions
610 def lookup(self, wire, revision, both):
610 def lookup(self, wire, revision, both):
611 cache_on, context_uid, repo_id = self._cache_on(wire)
611 cache_on, context_uid, repo_id = self._cache_on(wire)
612
612 @self.region.conditional_cache_on_arguments(condition=cache_on)
613 @self.region.conditional_cache_on_arguments(condition=cache_on)
613 def _lookup(_context_uid, _repo_id, _revision, _both):
614 def _lookup(_context_uid, _repo_id, _revision, _both):
614
615
615 repo = self._factory.repo(wire)
616 repo = self._factory.repo(wire)
616 rev = _revision
617 rev = _revision
617 if isinstance(rev, int):
618 if isinstance(rev, int):
618 # NOTE(marcink):
619 # NOTE(marcink):
619 # since Mercurial doesn't support negative indexes properly
620 # since Mercurial doesn't support negative indexes properly
620 # we need to shift accordingly by one to get proper index, e.g
621 # we need to shift accordingly by one to get proper index, e.g
621 # repo[-1] => repo[-2]
622 # repo[-1] => repo[-2]
622 # repo[0] => repo[-1]
623 # repo[0] => repo[-1]
623 if rev <= 0:
624 if rev <= 0:
624 rev = rev + -1
625 rev = rev + -1
625 try:
626 try:
626 ctx = self._get_ctx(repo, rev)
627 ctx = self._get_ctx(repo, rev)
627 except (TypeError, RepoLookupError) as e:
628 except (TypeError, RepoLookupError) as e:
628 e._org_exc_tb = traceback.format_exc()
629 e._org_exc_tb = traceback.format_exc()
629 raise exceptions.LookupException(e)(rev)
630 raise exceptions.LookupException(e)(rev)
630 except LookupError as e:
631 except LookupError as e:
631 e._org_exc_tb = traceback.format_exc()
632 e._org_exc_tb = traceback.format_exc()
632 raise exceptions.LookupException(e)(e.name)
633 raise exceptions.LookupException(e)(e.name)
633
634
634 if not both:
635 if not both:
635 return ctx.hex()
636 return ctx.hex()
636
637
637 ctx = repo[ctx.hex()]
638 ctx = repo[ctx.hex()]
638 return ctx.hex(), ctx.rev()
639 return ctx.hex(), ctx.rev()
639
640
640 return _lookup(context_uid, repo_id, revision, both)
641 return _lookup(context_uid, repo_id, revision, both)
641
642
642 @reraise_safe_exceptions
643 @reraise_safe_exceptions
643 def sync_push(self, wire, url):
644 def sync_push(self, wire, url):
644 if not self.check_url(url, wire['config']):
645 if not self.check_url(url, wire['config']):
645 return
646 return
646
647
647 repo = self._factory.repo(wire)
648 repo = self._factory.repo(wire)
648
649
649 # Disable any prompts for this repo
650 # Disable any prompts for this repo
650 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
651 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
651
652
652 bookmarks = dict(repo._bookmarks).keys()
653 bookmarks = dict(repo._bookmarks).keys()
653 remote = peer(repo, {}, url)
654 remote = peer(repo, {}, url)
654 # Disable any prompts for this remote
655 # Disable any prompts for this remote
655 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
656 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
656
657
657 return exchange.push(
658 return exchange.push(
658 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
659 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
659
660
660 @reraise_safe_exceptions
661 @reraise_safe_exceptions
661 def revision(self, wire, rev):
662 def revision(self, wire, rev):
662 repo = self._factory.repo(wire)
663 repo = self._factory.repo(wire)
663 ctx = self._get_ctx(repo, rev)
664 ctx = self._get_ctx(repo, rev)
664 return ctx.rev()
665 return ctx.rev()
665
666
666 @reraise_safe_exceptions
667 @reraise_safe_exceptions
667 def rev_range(self, wire, commit_filter):
668 def rev_range(self, wire, commit_filter):
668 cache_on, context_uid, repo_id = self._cache_on(wire)
669 cache_on, context_uid, repo_id = self._cache_on(wire)
669
670
670 @self.region.conditional_cache_on_arguments(condition=cache_on)
671 @self.region.conditional_cache_on_arguments(condition=cache_on)
671 def _rev_range(_context_uid, _repo_id, _filter):
672 def _rev_range(_context_uid, _repo_id, _filter):
672 repo = self._factory.repo(wire)
673 repo = self._factory.repo(wire)
673 revisions = [rev for rev in revrange(repo, commit_filter)]
674 revisions = [rev for rev in revrange(repo, commit_filter)]
674 return revisions
675 return revisions
675
676
676 return _rev_range(context_uid, repo_id, sorted(commit_filter))
677 return _rev_range(context_uid, repo_id, sorted(commit_filter))
677
678
678 @reraise_safe_exceptions
679 @reraise_safe_exceptions
679 def rev_range_hash(self, wire, node):
680 def rev_range_hash(self, wire, node):
680 repo = self._factory.repo(wire)
681 repo = self._factory.repo(wire)
681
682
682 def get_revs(repo, rev_opt):
683 def get_revs(repo, rev_opt):
683 if rev_opt:
684 if rev_opt:
684 revs = revrange(repo, rev_opt)
685 revs = revrange(repo, rev_opt)
685 if len(revs) == 0:
686 if len(revs) == 0:
686 return (nullrev, nullrev)
687 return (nullrev, nullrev)
687 return max(revs), min(revs)
688 return max(revs), min(revs)
688 else:
689 else:
689 return len(repo) - 1, 0
690 return len(repo) - 1, 0
690
691
691 stop, start = get_revs(repo, [node + ':'])
692 stop, start = get_revs(repo, [node + ':'])
692 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
693 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
693 return revs
694 return revs
694
695
695 @reraise_safe_exceptions
696 @reraise_safe_exceptions
696 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
697 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
697 other_path = kwargs.pop('other_path', None)
698 other_path = kwargs.pop('other_path', None)
698
699
699 # case when we want to compare two independent repositories
700 # case when we want to compare two independent repositories
700 if other_path and other_path != wire["path"]:
701 if other_path and other_path != wire["path"]:
701 baseui = self._factory._create_config(wire["config"])
702 baseui = self._factory._create_config(wire["config"])
702 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
703 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
703 else:
704 else:
704 repo = self._factory.repo(wire)
705 repo = self._factory.repo(wire)
705 return list(repo.revs(rev_spec, *args))
706 return list(repo.revs(rev_spec, *args))
706
707
707 @reraise_safe_exceptions
708 @reraise_safe_exceptions
708 def verify(self, wire,):
709 def verify(self, wire,):
709 repo = self._factory.repo(wire)
710 repo = self._factory.repo(wire)
710 baseui = self._factory._create_config(wire['config'])
711 baseui = self._factory._create_config(wire['config'])
711
712
712 baseui, output = patch_ui_message_output(baseui)
713 baseui, output = patch_ui_message_output(baseui)
713
714
714 repo.ui = baseui
715 repo.ui = baseui
715 verify.verify(repo)
716 verify.verify(repo)
716 return output.getvalue()
717 return output.getvalue()
717
718
718 @reraise_safe_exceptions
719 @reraise_safe_exceptions
719 def hg_update_cache(self, wire,):
720 def hg_update_cache(self, wire,):
720 repo = self._factory.repo(wire)
721 repo = self._factory.repo(wire)
721 baseui = self._factory._create_config(wire['config'])
722 baseui = self._factory._create_config(wire['config'])
722 baseui, output = patch_ui_message_output(baseui)
723 baseui, output = patch_ui_message_output(baseui)
723
724
724 repo.ui = baseui
725 repo.ui = baseui
725 with repo.wlock(), repo.lock():
726 with repo.wlock(), repo.lock():
726 repo.updatecaches(full=True)
727 repo.updatecaches(full=True)
727
728
728 return output.getvalue()
729 return output.getvalue()
729
730
730 @reraise_safe_exceptions
731 @reraise_safe_exceptions
731 def hg_rebuild_fn_cache(self, wire,):
732 def hg_rebuild_fn_cache(self, wire,):
732 repo = self._factory.repo(wire)
733 repo = self._factory.repo(wire)
733 baseui = self._factory._create_config(wire['config'])
734 baseui = self._factory._create_config(wire['config'])
734 baseui, output = patch_ui_message_output(baseui)
735 baseui, output = patch_ui_message_output(baseui)
735
736
736 repo.ui = baseui
737 repo.ui = baseui
737
738
738 repair.rebuildfncache(baseui, repo)
739 repair.rebuildfncache(baseui, repo)
739
740
740 return output.getvalue()
741 return output.getvalue()
741
742
742 @reraise_safe_exceptions
743 @reraise_safe_exceptions
743 def tags(self, wire):
744 def tags(self, wire):
744 cache_on, context_uid, repo_id = self._cache_on(wire)
745 cache_on, context_uid, repo_id = self._cache_on(wire)
745 @self.region.conditional_cache_on_arguments(condition=cache_on)
746 @self.region.conditional_cache_on_arguments(condition=cache_on)
746 def _tags(_context_uid, _repo_id):
747 def _tags(_context_uid, _repo_id):
747 repo = self._factory.repo(wire)
748 repo = self._factory.repo(wire)
748 return repo.tags()
749 return repo.tags()
749
750
750 return _tags(context_uid, repo_id)
751 return _tags(context_uid, repo_id)
751
752
752 @reraise_safe_exceptions
753 @reraise_safe_exceptions
753 def update(self, wire, node=None, clean=False):
754 def update(self, wire, node=None, clean=False):
754 repo = self._factory.repo(wire)
755 repo = self._factory.repo(wire)
755 baseui = self._factory._create_config(wire['config'])
756 baseui = self._factory._create_config(wire['config'])
756 commands.update(baseui, repo, node=node, clean=clean)
757 commands.update(baseui, repo, node=node, clean=clean)
757
758
758 @reraise_safe_exceptions
759 @reraise_safe_exceptions
759 def identify(self, wire):
760 def identify(self, wire):
760 repo = self._factory.repo(wire)
761 repo = self._factory.repo(wire)
761 baseui = self._factory._create_config(wire['config'])
762 baseui = self._factory._create_config(wire['config'])
762 output = io.BytesIO()
763 output = io.BytesIO()
763 baseui.write = output.write
764 baseui.write = output.write
764 # This is required to get a full node id
765 # This is required to get a full node id
765 baseui.debugflag = True
766 baseui.debugflag = True
766 commands.identify(baseui, repo, id=True)
767 commands.identify(baseui, repo, id=True)
767
768
768 return output.getvalue()
769 return output.getvalue()
769
770
770 @reraise_safe_exceptions
771 @reraise_safe_exceptions
771 def heads(self, wire, branch=None):
772 def heads(self, wire, branch=None):
772 repo = self._factory.repo(wire)
773 repo = self._factory.repo(wire)
773 baseui = self._factory._create_config(wire['config'])
774 baseui = self._factory._create_config(wire['config'])
774 output = io.BytesIO()
775 output = io.BytesIO()
775
776
776 def write(data, **unused_kwargs):
777 def write(data, **unused_kwargs):
777 output.write(data)
778 output.write(data)
778
779
779 baseui.write = write
780 baseui.write = write
780 if branch:
781 if branch:
781 args = [branch]
782 args = [branch]
782 else:
783 else:
783 args = []
784 args = []
784 commands.heads(baseui, repo, template='{node} ', *args)
785 commands.heads(baseui, repo, template='{node} ', *args)
785
786
786 return output.getvalue()
787 return output.getvalue()
787
788
788 @reraise_safe_exceptions
789 @reraise_safe_exceptions
789 def ancestor(self, wire, revision1, revision2):
790 def ancestor(self, wire, revision1, revision2):
790 repo = self._factory.repo(wire)
791 repo = self._factory.repo(wire)
791 changelog = repo.changelog
792 changelog = repo.changelog
792 lookup = repo.lookup
793 lookup = repo.lookup
793 a = changelog.ancestor(lookup(revision1), lookup(revision2))
794 a = changelog.ancestor(lookup(revision1), lookup(revision2))
794 return hex(a)
795 return hex(a)
795
796
796 @reraise_safe_exceptions
797 @reraise_safe_exceptions
797 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
798 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
798 baseui = self._factory._create_config(wire["config"], hooks=hooks)
799 baseui = self._factory._create_config(wire["config"], hooks=hooks)
799 clone(baseui, source, dest, noupdate=not update_after_clone)
800 clone(baseui, source, dest, noupdate=not update_after_clone)
800
801
801 @reraise_safe_exceptions
802 @reraise_safe_exceptions
802 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
803 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
803
804
804 repo = self._factory.repo(wire)
805 repo = self._factory.repo(wire)
805 baseui = self._factory._create_config(wire['config'])
806 baseui = self._factory._create_config(wire['config'])
806 publishing = baseui.configbool('phases', 'publish')
807 publishing = baseui.configbool('phases', 'publish')
807 if publishing:
808 if publishing:
808 new_commit = 'public'
809 new_commit = 'public'
809 else:
810 else:
810 new_commit = 'draft'
811 new_commit = 'draft'
811
812
812 def _filectxfn(_repo, ctx, path):
813 def _filectxfn(_repo, ctx, path):
813 """
814 """
814 Marks given path as added/changed/removed in a given _repo. This is
815 Marks given path as added/changed/removed in a given _repo. This is
815 for internal mercurial commit function.
816 for internal mercurial commit function.
816 """
817 """
817
818
818 # check if this path is removed
819 # check if this path is removed
819 if path in removed:
820 if path in removed:
820 # returning None is a way to mark node for removal
821 # returning None is a way to mark node for removal
821 return None
822 return None
822
823
823 # check if this path is added
824 # check if this path is added
824 for node in updated:
825 for node in updated:
825 if node['path'] == path:
826 if node['path'] == path:
826 return memfilectx(
827 return memfilectx(
827 _repo,
828 _repo,
828 changectx=ctx,
829 changectx=ctx,
829 path=node['path'],
830 path=node['path'],
830 data=node['content'],
831 data=node['content'],
831 islink=False,
832 islink=False,
832 isexec=bool(node['mode'] & stat.S_IXUSR),
833 isexec=bool(node['mode'] & stat.S_IXUSR),
833 copysource=False)
834 copysource=False)
834
835
835 raise exceptions.AbortException()(
836 raise exceptions.AbortException()(
836 "Given path haven't been marked as added, "
837 "Given path haven't been marked as added, "
837 "changed or removed (%s)" % path)
838 "changed or removed (%s)" % path)
838
839
839 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
840 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
840
841
841 commit_ctx = memctx(
842 commit_ctx = memctx(
842 repo=repo,
843 repo=repo,
843 parents=parents,
844 parents=parents,
844 text=message,
845 text=message,
845 files=files,
846 files=files,
846 filectxfn=_filectxfn,
847 filectxfn=_filectxfn,
847 user=user,
848 user=user,
848 date=(commit_time, commit_timezone),
849 date=(commit_time, commit_timezone),
849 extra=extra)
850 extra=extra)
850
851
851 n = repo.commitctx(commit_ctx)
852 n = repo.commitctx(commit_ctx)
852 new_id = hex(n)
853 new_id = hex(n)
853
854
854 return new_id
855 return new_id
855
856
856 @reraise_safe_exceptions
857 @reraise_safe_exceptions
857 def pull(self, wire, url, commit_ids=None):
858 def pull(self, wire, url, commit_ids=None):
858 repo = self._factory.repo(wire)
859 repo = self._factory.repo(wire)
859 # Disable any prompts for this repo
860 # Disable any prompts for this repo
860 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
861 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
861
862
862 remote = peer(repo, {}, url)
863 remote = peer(repo, {}, url)
863 # Disable any prompts for this remote
864 # Disable any prompts for this remote
864 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
865 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
865
866
866 if commit_ids:
867 if commit_ids:
867 commit_ids = [bin(commit_id) for commit_id in commit_ids]
868 commit_ids = [bin(commit_id) for commit_id in commit_ids]
868
869
869 return exchange.pull(
870 return exchange.pull(
870 repo, remote, heads=commit_ids, force=None).cgresult
871 repo, remote, heads=commit_ids, force=None).cgresult
871
872
872 @reraise_safe_exceptions
873 @reraise_safe_exceptions
873 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
874 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
874 repo = self._factory.repo(wire)
875 repo = self._factory.repo(wire)
875 baseui = self._factory._create_config(wire['config'], hooks=hooks)
876 baseui = self._factory._create_config(wire['config'], hooks=hooks)
876
877
877 # Mercurial internally has a lot of logic that checks ONLY if
878 # Mercurial internally has a lot of logic that checks ONLY if
878 # option is defined, we just pass those if they are defined then
879 # option is defined, we just pass those if they are defined then
879 opts = {}
880 opts = {}
880 if bookmark:
881 if bookmark:
881 opts['bookmark'] = bookmark
882 opts['bookmark'] = bookmark
882 if branch:
883 if branch:
883 opts['branch'] = branch
884 opts['branch'] = branch
884 if revision:
885 if revision:
885 opts['rev'] = revision
886 opts['rev'] = revision
886
887
887 commands.pull(baseui, repo, source, **opts)
888 commands.pull(baseui, repo, source, **opts)
888
889
889 @reraise_safe_exceptions
890 @reraise_safe_exceptions
890 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
891 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
891 repo = self._factory.repo(wire)
892 repo = self._factory.repo(wire)
892 baseui = self._factory._create_config(wire['config'], hooks=hooks)
893 baseui = self._factory._create_config(wire['config'], hooks=hooks)
893 commands.push(baseui, repo, dest=dest_path, rev=revisions,
894 commands.push(baseui, repo, dest=dest_path, rev=revisions,
894 new_branch=push_branches)
895 new_branch=push_branches)
895
896
896 @reraise_safe_exceptions
897 @reraise_safe_exceptions
897 def strip(self, wire, revision, update, backup):
898 def strip(self, wire, revision, update, backup):
898 repo = self._factory.repo(wire)
899 repo = self._factory.repo(wire)
899 ctx = self._get_ctx(repo, revision)
900 ctx = self._get_ctx(repo, revision)
900 hgext_strip(
901 hgext_strip(
901 repo.baseui, repo, ctx.node(), update=update, backup=backup)
902 repo.baseui, repo, ctx.node(), update=update, backup=backup)
902
903
903 @reraise_safe_exceptions
904 @reraise_safe_exceptions
904 def get_unresolved_files(self, wire):
905 def get_unresolved_files(self, wire):
905 repo = self._factory.repo(wire)
906 repo = self._factory.repo(wire)
906
907
907 log.debug('Calculating unresolved files for repo: %s', repo)
908 log.debug('Calculating unresolved files for repo: %s', repo)
908 output = io.BytesIO()
909 output = io.BytesIO()
909
910
910 def write(data, **unused_kwargs):
911 def write(data, **unused_kwargs):
911 output.write(data)
912 output.write(data)
912
913
913 baseui = self._factory._create_config(wire['config'])
914 baseui = self._factory._create_config(wire['config'])
914 baseui.write = write
915 baseui.write = write
915
916
916 commands.resolve(baseui, repo, list=True)
917 commands.resolve(baseui, repo, list=True)
917 unresolved = output.getvalue().splitlines(0)
918 unresolved = output.getvalue().splitlines(0)
918 return unresolved
919 return unresolved
919
920
920 @reraise_safe_exceptions
921 @reraise_safe_exceptions
921 def merge(self, wire, revision):
922 def merge(self, wire, revision):
922 repo = self._factory.repo(wire)
923 repo = self._factory.repo(wire)
923 baseui = self._factory._create_config(wire['config'])
924 baseui = self._factory._create_config(wire['config'])
924 repo.ui.setconfig('ui', 'merge', 'internal:dump')
925 repo.ui.setconfig('ui', 'merge', 'internal:dump')
925
926
926 # In case of sub repositories are used mercurial prompts the user in
927 # In case of sub repositories are used mercurial prompts the user in
927 # case of merge conflicts or different sub repository sources. By
928 # case of merge conflicts or different sub repository sources. By
928 # setting the interactive flag to `False` mercurial doesn't prompt the
929 # setting the interactive flag to `False` mercurial doesn't prompt the
929 # used but instead uses a default value.
930 # used but instead uses a default value.
930 repo.ui.setconfig('ui', 'interactive', False)
931 repo.ui.setconfig('ui', 'interactive', False)
931 commands.merge(baseui, repo, rev=revision)
932 commands.merge(baseui, repo, rev=revision)
932
933
933 @reraise_safe_exceptions
934 @reraise_safe_exceptions
934 def merge_state(self, wire):
935 def merge_state(self, wire):
935 repo = self._factory.repo(wire)
936 repo = self._factory.repo(wire)
936 repo.ui.setconfig('ui', 'merge', 'internal:dump')
937 repo.ui.setconfig('ui', 'merge', 'internal:dump')
937
938
938 # In case of sub repositories are used mercurial prompts the user in
939 # In case of sub repositories are used mercurial prompts the user in
939 # case of merge conflicts or different sub repository sources. By
940 # case of merge conflicts or different sub repository sources. By
940 # setting the interactive flag to `False` mercurial doesn't prompt the
941 # setting the interactive flag to `False` mercurial doesn't prompt the
941 # used but instead uses a default value.
942 # used but instead uses a default value.
942 repo.ui.setconfig('ui', 'interactive', False)
943 repo.ui.setconfig('ui', 'interactive', False)
943 ms = hg_merge.mergestate(repo)
944 ms = hg_merge.mergestate(repo)
944 return [x for x in ms.unresolved()]
945 return [x for x in ms.unresolved()]
945
946
946 @reraise_safe_exceptions
947 @reraise_safe_exceptions
947 def commit(self, wire, message, username, close_branch=False):
948 def commit(self, wire, message, username, close_branch=False):
948 repo = self._factory.repo(wire)
949 repo = self._factory.repo(wire)
949 baseui = self._factory._create_config(wire['config'])
950 baseui = self._factory._create_config(wire['config'])
950 repo.ui.setconfig('ui', 'username', username)
951 repo.ui.setconfig('ui', 'username', username)
951 commands.commit(baseui, repo, message=message, close_branch=close_branch)
952 commands.commit(baseui, repo, message=message, close_branch=close_branch)
952
953
953 @reraise_safe_exceptions
954 @reraise_safe_exceptions
954 def rebase(self, wire, source=None, dest=None, abort=False):
955 def rebase(self, wire, source=None, dest=None, abort=False):
955 repo = self._factory.repo(wire)
956 repo = self._factory.repo(wire)
956 baseui = self._factory._create_config(wire['config'])
957 baseui = self._factory._create_config(wire['config'])
957 repo.ui.setconfig('ui', 'merge', 'internal:dump')
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
958 # In case of sub repositories are used mercurial prompts the user in
959 # In case of sub repositories are used mercurial prompts the user in
959 # case of merge conflicts or different sub repository sources. By
960 # case of merge conflicts or different sub repository sources. By
960 # setting the interactive flag to `False` mercurial doesn't prompt the
961 # setting the interactive flag to `False` mercurial doesn't prompt the
961 # used but instead uses a default value.
962 # used but instead uses a default value.
962 repo.ui.setconfig('ui', 'interactive', False)
963 repo.ui.setconfig('ui', 'interactive', False)
963 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
964 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
964
965
965 @reraise_safe_exceptions
966 @reraise_safe_exceptions
966 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
967 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
967 repo = self._factory.repo(wire)
968 repo = self._factory.repo(wire)
968 ctx = self._get_ctx(repo, revision)
969 ctx = self._get_ctx(repo, revision)
969 node = ctx.node()
970 node = ctx.node()
970
971
971 date = (tag_time, tag_timezone)
972 date = (tag_time, tag_timezone)
972 try:
973 try:
973 hg_tag.tag(repo, name, node, message, local, user, date)
974 hg_tag.tag(repo, name, node, message, local, user, date)
974 except Abort as e:
975 except Abort as e:
975 log.exception("Tag operation aborted")
976 log.exception("Tag operation aborted")
976 # Exception can contain unicode which we convert
977 # Exception can contain unicode which we convert
977 raise exceptions.AbortException(e)(repr(e))
978 raise exceptions.AbortException(e)(repr(e))
978
979
979 @reraise_safe_exceptions
980 @reraise_safe_exceptions
980 def bookmark(self, wire, bookmark, revision=None):
981 def bookmark(self, wire, bookmark, revision=None):
981 repo = self._factory.repo(wire)
982 repo = self._factory.repo(wire)
982 baseui = self._factory._create_config(wire['config'])
983 baseui = self._factory._create_config(wire['config'])
983 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
984 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
984
985
985 @reraise_safe_exceptions
986 @reraise_safe_exceptions
986 def install_hooks(self, wire, force=False):
987 def install_hooks(self, wire, force=False):
987 # we don't need any special hooks for Mercurial
988 # we don't need any special hooks for Mercurial
988 pass
989 pass
989
990
990 @reraise_safe_exceptions
991 @reraise_safe_exceptions
991 def get_hooks_info(self, wire):
992 def get_hooks_info(self, wire):
992 return {
993 return {
993 'pre_version': vcsserver.__version__,
994 'pre_version': vcsserver.__version__,
994 'post_version': vcsserver.__version__,
995 'post_version': vcsserver.__version__,
995 }
996 }
996
997
997 @reraise_safe_exceptions
998 @reraise_safe_exceptions
998 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
999 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
999 archive_dir_name, commit_id):
1000 archive_dir_name, commit_id):
1000
1001
1001 def file_walker(_commit_id, path):
1002 def file_walker(_commit_id, path):
1002 repo = self._factory.repo(wire)
1003 repo = self._factory.repo(wire)
1003 ctx = repo[_commit_id]
1004 ctx = repo[_commit_id]
1004 is_root = path in ['', '/']
1005 is_root = path in ['', '/']
1005 if is_root:
1006 if is_root:
1006 matcher = alwaysmatcher(badfn=None)
1007 matcher = alwaysmatcher(badfn=None)
1007 else:
1008 else:
1008 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1009 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1009 file_iter = ctx.manifest().walk(matcher)
1010 file_iter = ctx.manifest().walk(matcher)
1010
1011
1011 for fn in file_iter:
1012 for fn in file_iter:
1012 file_path = fn
1013 file_path = fn
1013 flags = ctx.flags(fn)
1014 flags = ctx.flags(fn)
1014 mode = b'x' in flags and 0o755 or 0o644
1015 mode = b'x' in flags and 0o755 or 0o644
1015 is_link = b'l' in flags
1016 is_link = b'l' in flags
1016
1017
1017 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1018 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1018
1019
1019 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1020 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1020 archive_dir_name, commit_id)
1021 archive_dir_name, commit_id)
1021
1022
@@ -1,79 +1,79 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 from mercurial import demandimport
23 from mercurial import demandimport
24 # patch demandimport, due to bug in mercurial when it always triggers
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
25 # demandimport.enable()
26 demandimport.enable = lambda *args, **kwargs: 1
26 demandimport.enable = lambda *args, **kwargs: 1
27
27
28 from mercurial import ui
28 from mercurial import ui
29 from mercurial import patch
29 from mercurial import patch
30 from mercurial import config
30 from mercurial import config
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial import scmutil
32 from mercurial import scmutil
33 from mercurial import archival
33 from mercurial import archival
34 from mercurial import discovery
34 from mercurial import discovery
35 from mercurial import unionrepo
35 from mercurial import unionrepo
36 from mercurial import localrepo
36 from mercurial import localrepo
37 from mercurial import merge as hg_merge
37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38 from mercurial import subrepo
39 from mercurial import subrepoutil
39 from mercurial import subrepoutil
40 from mercurial import tags as hg_tag
40 from mercurial import tags as hg_tag
41 from mercurial import util as hgutil
41 from mercurial import util as hgutil
42 from mercurial.commands import clone, nullid, pull
42 from mercurial.commands import clone, nullid, pull
43 from mercurial.context import memctx, memfilectx
43 from mercurial.context import memctx, memfilectx
44 from mercurial.error import (
44 from mercurial.error import (
45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
46 RequirementError, ProgrammingError)
46 RequirementError, ProgrammingError)
47 from mercurial.hgweb import hgweb_mod
47 from mercurial.hgweb import hgweb_mod
48 from mercurial.localrepo import instance
48 from mercurial.localrepo import instance
49 from mercurial.match import match, alwaysmatcher, patternmatcher
49 from mercurial.match import match, alwaysmatcher, patternmatcher
50 from mercurial.mdiff import diffopts
50 from mercurial.mdiff import diffopts
51 from mercurial.node import bin, hex
51 from mercurial.node import bin, hex
52 from mercurial.encoding import tolocal
52 from mercurial.encoding import tolocal
53 from mercurial.discovery import findcommonoutgoing
53 from mercurial.discovery import findcommonoutgoing
54 from mercurial.hg import peer
54 from mercurial.hg import peer
55 from mercurial.httppeer import makepeer
55 from mercurial.httppeer import makepeer
56 from mercurial.util import url as hg_url
56 from mercurial.util import url as hg_url
57 from mercurial.scmutil import revrange, revsymbol
57 from mercurial.scmutil import revrange, revsymbol
58 from mercurial.node import nullrev
58 from mercurial.node import nullrev
59 from mercurial import exchange
59 from mercurial import exchange
60 from hgext import largefiles
60 from hgext import largefiles
61
61
62 # those authnadlers are patched for python 2.6.5 bug an
62 # those authnadlers are patched for python 2.6.5 bug an
63 # infinit looping when given invalid resources
63 # infinit looping when given invalid resources
64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
65
65
66
66
67 def get_ctx(repo, ref):
67 def get_ctx(repo, ref):
68 try:
68 try:
69 ctx = repo[ref]
69 ctx = repo[ref]
70 except ProgrammingError:
70 except (ProgrammingError, TypeError):
71 # we're unable to find the rev using a regular lookup, we fallback
71 # we're unable to find the rev using a regular lookup, we fallback
72 # to slower, but backward compat revsymbol usage
72 # to slower, but backward compat revsymbol usage
73 ctx = revsymbol(repo, ref)
73 ctx = revsymbol(repo, ref)
74 except (LookupError, RepoLookupError):
74 except (LookupError, RepoLookupError):
75 # Similar case as above but only for refs that are not numeric
75 # Similar case as above but only for refs that are not numeric
76 if isinstance(ref, (int, long)):
76 if isinstance(ref, (int, long)):
77 raise
77 raise
78 ctx = revsymbol(repo, ref)
78 ctx = revsymbol(repo, ref)
79 return ctx
79 return ctx
@@ -1,704 +1,705 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import base64
20 import base64
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import wsgiref.util
24 import wsgiref.util
25 import traceback
25 import traceback
26 import tempfile
26 import tempfile
27 import psutil
27 import psutil
28 from itertools import chain
28 from itertools import chain
29 from cStringIO import StringIO
29 from cStringIO import StringIO
30
30
31 import simplejson as json
31 import simplejson as json
32 import msgpack
32 import msgpack
33 from pyramid.config import Configurator
33 from pyramid.config import Configurator
34 from pyramid.settings import asbool, aslist
34 from pyramid.settings import asbool, aslist
35 from pyramid.wsgi import wsgiapp
35 from pyramid.wsgi import wsgiapp
36 from pyramid.compat import configparser
36 from pyramid.compat import configparser
37 from pyramid.response import Response
37 from pyramid.response import Response
38
38
39 from vcsserver.utils import safe_int
39 from vcsserver.utils import safe_int
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
43 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
44 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
44 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
45
45
46 try:
46 try:
47 locale.setlocale(locale.LC_ALL, '')
47 locale.setlocale(locale.LC_ALL, '')
48 except locale.Error as e:
48 except locale.Error as e:
49 log.error(
49 log.error(
50 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
50 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
51 os.environ['LC_ALL'] = 'C'
51 os.environ['LC_ALL'] = 'C'
52
52
53 import vcsserver
53 import vcsserver
54 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
54 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
55 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
55 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
56 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
56 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
57 from vcsserver.echo_stub.echo_app import EchoApp
57 from vcsserver.echo_stub.echo_app import EchoApp
58 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
58 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
59 from vcsserver.lib.exc_tracking import store_exception
59 from vcsserver.lib.exc_tracking import store_exception
60 from vcsserver.server import VcsServer
60 from vcsserver.server import VcsServer
61
61
62 try:
62 try:
63 from vcsserver.git import GitFactory, GitRemote
63 from vcsserver.git import GitFactory, GitRemote
64 except ImportError:
64 except ImportError:
65 GitFactory = None
65 GitFactory = None
66 GitRemote = None
66 GitRemote = None
67
67
68 try:
68 try:
69 from vcsserver.hg import MercurialFactory, HgRemote
69 from vcsserver.hg import MercurialFactory, HgRemote
70 except ImportError:
70 except ImportError:
71 MercurialFactory = None
71 MercurialFactory = None
72 HgRemote = None
72 HgRemote = None
73
73
74 try:
74 try:
75 from vcsserver.svn import SubversionFactory, SvnRemote
75 from vcsserver.svn import SubversionFactory, SvnRemote
76 except ImportError:
76 except ImportError:
77 SubversionFactory = None
77 SubversionFactory = None
78 SvnRemote = None
78 SvnRemote = None
79
79
80
80
81 def _is_request_chunked(environ):
81 def _is_request_chunked(environ):
82 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
82 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
83 return stream
83 return stream
84
84
85
85
86 def _int_setting(settings, name, default):
86 def _int_setting(settings, name, default):
87 settings[name] = int(settings.get(name, default))
87 settings[name] = int(settings.get(name, default))
88 return settings[name]
88 return settings[name]
89
89
90
90
91 def _bool_setting(settings, name, default):
91 def _bool_setting(settings, name, default):
92 input_val = settings.get(name, default)
92 input_val = settings.get(name, default)
93 if isinstance(input_val, unicode):
93 if isinstance(input_val, unicode):
94 input_val = input_val.encode('utf8')
94 input_val = input_val.encode('utf8')
95 settings[name] = asbool(input_val)
95 settings[name] = asbool(input_val)
96 return settings[name]
96 return settings[name]
97
97
98
98
99 def _list_setting(settings, name, default):
99 def _list_setting(settings, name, default):
100 raw_value = settings.get(name, default)
100 raw_value = settings.get(name, default)
101
101
102 # Otherwise we assume it uses pyramids space/newline separation.
102 # Otherwise we assume it uses pyramids space/newline separation.
103 settings[name] = aslist(raw_value)
103 settings[name] = aslist(raw_value)
104 return settings[name]
104 return settings[name]
105
105
106
106
107 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
107 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
108 value = settings.get(name, default)
108 value = settings.get(name, default)
109
109
110 if default_when_empty and not value:
110 if default_when_empty and not value:
111 # use default value when value is empty
111 # use default value when value is empty
112 value = default
112 value = default
113
113
114 if lower:
114 if lower:
115 value = value.lower()
115 value = value.lower()
116 settings[name] = value
116 settings[name] = value
117 return settings[name]
117 return settings[name]
118
118
119
119
120 def log_max_fd():
120 def log_max_fd():
121 try:
121 try:
122 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
122 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
123 log.info('Max file descriptors value: %s', maxfd)
123 log.info('Max file descriptors value: %s', maxfd)
124 except Exception:
124 except Exception:
125 pass
125 pass
126
126
127
127
128 class VCS(object):
128 class VCS(object):
129 def __init__(self, locale_conf=None, cache_config=None):
129 def __init__(self, locale_conf=None, cache_config=None):
130 self.locale = locale_conf
130 self.locale = locale_conf
131 self.cache_config = cache_config
131 self.cache_config = cache_config
132 self._configure_locale()
132 self._configure_locale()
133
133
134 log_max_fd()
134 log_max_fd()
135
135
136 if GitFactory and GitRemote:
136 if GitFactory and GitRemote:
137 git_factory = GitFactory()
137 git_factory = GitFactory()
138 self._git_remote = GitRemote(git_factory)
138 self._git_remote = GitRemote(git_factory)
139 else:
139 else:
140 log.info("Git client import failed")
140 log.info("Git client import failed")
141
141
142 if MercurialFactory and HgRemote:
142 if MercurialFactory and HgRemote:
143 hg_factory = MercurialFactory()
143 hg_factory = MercurialFactory()
144 self._hg_remote = HgRemote(hg_factory)
144 self._hg_remote = HgRemote(hg_factory)
145 else:
145 else:
146 log.info("Mercurial client import failed")
146 log.info("Mercurial client import failed")
147
147
148 if SubversionFactory and SvnRemote:
148 if SubversionFactory and SvnRemote:
149 svn_factory = SubversionFactory()
149 svn_factory = SubversionFactory()
150
150
151 # hg factory is used for svn url validation
151 # hg factory is used for svn url validation
152 hg_factory = MercurialFactory()
152 hg_factory = MercurialFactory()
153 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
153 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
154 else:
154 else:
155 log.info("Subversion client import failed")
155 log.info("Subversion client import failed")
156
156
157 self._vcsserver = VcsServer()
157 self._vcsserver = VcsServer()
158
158
159 def _configure_locale(self):
159 def _configure_locale(self):
160 if self.locale:
160 if self.locale:
161 log.info('Settings locale: `LC_ALL` to %s', self.locale)
161 log.info('Settings locale: `LC_ALL` to %s', self.locale)
162 else:
162 else:
163 log.info(
163 log.info(
164 'Configuring locale subsystem based on environment variables')
164 'Configuring locale subsystem based on environment variables')
165 try:
165 try:
166 # If self.locale is the empty string, then the locale
166 # If self.locale is the empty string, then the locale
167 # module will use the environment variables. See the
167 # module will use the environment variables. See the
168 # documentation of the package `locale`.
168 # documentation of the package `locale`.
169 locale.setlocale(locale.LC_ALL, self.locale)
169 locale.setlocale(locale.LC_ALL, self.locale)
170
170
171 language_code, encoding = locale.getlocale()
171 language_code, encoding = locale.getlocale()
172 log.info(
172 log.info(
173 'Locale set to language code "%s" with encoding "%s".',
173 'Locale set to language code "%s" with encoding "%s".',
174 language_code, encoding)
174 language_code, encoding)
175 except locale.Error:
175 except locale.Error:
176 log.exception(
176 log.exception(
177 'Cannot set locale, not configuring the locale system')
177 'Cannot set locale, not configuring the locale system')
178
178
179
179
180 class WsgiProxy(object):
180 class WsgiProxy(object):
181 def __init__(self, wsgi):
181 def __init__(self, wsgi):
182 self.wsgi = wsgi
182 self.wsgi = wsgi
183
183
184 def __call__(self, environ, start_response):
184 def __call__(self, environ, start_response):
185 input_data = environ['wsgi.input'].read()
185 input_data = environ['wsgi.input'].read()
186 input_data = msgpack.unpackb(input_data)
186 input_data = msgpack.unpackb(input_data)
187
187
188 error = None
188 error = None
189 try:
189 try:
190 data, status, headers = self.wsgi.handle(
190 data, status, headers = self.wsgi.handle(
191 input_data['environment'], input_data['input_data'],
191 input_data['environment'], input_data['input_data'],
192 *input_data['args'], **input_data['kwargs'])
192 *input_data['args'], **input_data['kwargs'])
193 except Exception as e:
193 except Exception as e:
194 data, status, headers = [], None, None
194 data, status, headers = [], None, None
195 error = {
195 error = {
196 'message': str(e),
196 'message': str(e),
197 '_vcs_kind': getattr(e, '_vcs_kind', None)
197 '_vcs_kind': getattr(e, '_vcs_kind', None)
198 }
198 }
199
199
200 start_response(200, {})
200 start_response(200, {})
201 return self._iterator(error, status, headers, data)
201 return self._iterator(error, status, headers, data)
202
202
203 def _iterator(self, error, status, headers, data):
203 def _iterator(self, error, status, headers, data):
204 initial_data = [
204 initial_data = [
205 error,
205 error,
206 status,
206 status,
207 headers,
207 headers,
208 ]
208 ]
209
209
210 for d in chain(initial_data, data):
210 for d in chain(initial_data, data):
211 yield msgpack.packb(d)
211 yield msgpack.packb(d)
212
212
213
213
214 def not_found(request):
214 def not_found(request):
215 return {'status': '404 NOT FOUND'}
215 return {'status': '404 NOT FOUND'}
216
216
217
217
218 class VCSViewPredicate(object):
218 class VCSViewPredicate(object):
219 def __init__(self, val, config):
219 def __init__(self, val, config):
220 self.remotes = val
220 self.remotes = val
221
221
222 def text(self):
222 def text(self):
223 return 'vcs view method = %s' % (self.remotes.keys(),)
223 return 'vcs view method = %s' % (self.remotes.keys(),)
224
224
225 phash = text
225 phash = text
226
226
227 def __call__(self, context, request):
227 def __call__(self, context, request):
228 """
228 """
229 View predicate that returns true if given backend is supported by
229 View predicate that returns true if given backend is supported by
230 defined remotes.
230 defined remotes.
231 """
231 """
232 backend = request.matchdict.get('backend')
232 backend = request.matchdict.get('backend')
233 return backend in self.remotes
233 return backend in self.remotes
234
234
235
235
236 class HTTPApplication(object):
236 class HTTPApplication(object):
237 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
237 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
238
238
239 remote_wsgi = remote_wsgi
239 remote_wsgi = remote_wsgi
240 _use_echo_app = False
240 _use_echo_app = False
241
241
242 def __init__(self, settings=None, global_config=None):
242 def __init__(self, settings=None, global_config=None):
243 self._sanitize_settings_and_apply_defaults(settings)
243 self._sanitize_settings_and_apply_defaults(settings)
244
244
245 self.config = Configurator(settings=settings)
245 self.config = Configurator(settings=settings)
246 self.global_config = global_config
246 self.global_config = global_config
247 self.config.include('vcsserver.lib.rc_cache')
247 self.config.include('vcsserver.lib.rc_cache')
248
248
249 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
249 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
250 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
250 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
251 self._remotes = {
251 self._remotes = {
252 'hg': vcs._hg_remote,
252 'hg': vcs._hg_remote,
253 'git': vcs._git_remote,
253 'git': vcs._git_remote,
254 'svn': vcs._svn_remote,
254 'svn': vcs._svn_remote,
255 'server': vcs._vcsserver,
255 'server': vcs._vcsserver,
256 }
256 }
257 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
257 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
258 self._use_echo_app = True
258 self._use_echo_app = True
259 log.warning("Using EchoApp for VCS operations.")
259 log.warning("Using EchoApp for VCS operations.")
260 self.remote_wsgi = remote_wsgi_stub
260 self.remote_wsgi = remote_wsgi_stub
261
261
262 self._configure_settings(global_config, settings)
262 self._configure_settings(global_config, settings)
263
263
264 self._configure()
264 self._configure()
265
265
266 def _configure_settings(self, global_config, app_settings):
266 def _configure_settings(self, global_config, app_settings):
267 """
267 """
268 Configure the settings module.
268 Configure the settings module.
269 """
269 """
270 settings_merged = global_config.copy()
270 settings_merged = global_config.copy()
271 settings_merged.update(app_settings)
271 settings_merged.update(app_settings)
272
272
273 git_path = app_settings.get('git_path', None)
273 git_path = app_settings.get('git_path', None)
274 if git_path:
274 if git_path:
275 settings.GIT_EXECUTABLE = git_path
275 settings.GIT_EXECUTABLE = git_path
276 binary_dir = app_settings.get('core.binary_dir', None)
276 binary_dir = app_settings.get('core.binary_dir', None)
277 if binary_dir:
277 if binary_dir:
278 settings.BINARY_DIR = binary_dir
278 settings.BINARY_DIR = binary_dir
279
279
280 # Store the settings to make them available to other modules.
280 # Store the settings to make them available to other modules.
281 vcsserver.PYRAMID_SETTINGS = settings_merged
281 vcsserver.PYRAMID_SETTINGS = settings_merged
282 vcsserver.CONFIG = settings_merged
282 vcsserver.CONFIG = settings_merged
283
283
284 def _sanitize_settings_and_apply_defaults(self, settings):
284 def _sanitize_settings_and_apply_defaults(self, settings):
285 temp_store = tempfile.gettempdir()
285 temp_store = tempfile.gettempdir()
286 default_cache_dir = os.path.join(temp_store, 'rc_cache')
286 default_cache_dir = os.path.join(temp_store, 'rc_cache')
287
287
288 # save default, cache dir, and use it for all backends later.
288 # save default, cache dir, and use it for all backends later.
289 default_cache_dir = _string_setting(
289 default_cache_dir = _string_setting(
290 settings,
290 settings,
291 'cache_dir',
291 'cache_dir',
292 default_cache_dir, lower=False, default_when_empty=True)
292 default_cache_dir, lower=False, default_when_empty=True)
293
293
294 # ensure we have our dir created
294 # ensure we have our dir created
295 if not os.path.isdir(default_cache_dir):
295 if not os.path.isdir(default_cache_dir):
296 os.makedirs(default_cache_dir, mode=0o755)
296 os.makedirs(default_cache_dir, mode=0o755)
297
297
298 # exception store cache
298 # exception store cache
299 _string_setting(
299 _string_setting(
300 settings,
300 settings,
301 'exception_tracker.store_path',
301 'exception_tracker.store_path',
302 temp_store, lower=False, default_when_empty=True)
302 temp_store, lower=False, default_when_empty=True)
303
303
304 # repo_object cache
304 # repo_object cache
305 _string_setting(
305 _string_setting(
306 settings,
306 settings,
307 'rc_cache.repo_object.backend',
307 'rc_cache.repo_object.backend',
308 'dogpile.cache.rc.file_namespace', lower=False)
308 'dogpile.cache.rc.file_namespace', lower=False)
309 _int_setting(
309 _int_setting(
310 settings,
310 settings,
311 'rc_cache.repo_object.expiration_time',
311 'rc_cache.repo_object.expiration_time',
312 30 * 24 * 60 * 60)
312 30 * 24 * 60 * 60)
313 _string_setting(
313 _string_setting(
314 settings,
314 settings,
315 'rc_cache.repo_object.arguments.filename',
315 'rc_cache.repo_object.arguments.filename',
316 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
316 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
317
317
318 def _configure(self):
318 def _configure(self):
319 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
319 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
320
320
321 self.config.add_route('service', '/_service')
321 self.config.add_route('service', '/_service')
322 self.config.add_route('status', '/status')
322 self.config.add_route('status', '/status')
323 self.config.add_route('hg_proxy', '/proxy/hg')
323 self.config.add_route('hg_proxy', '/proxy/hg')
324 self.config.add_route('git_proxy', '/proxy/git')
324 self.config.add_route('git_proxy', '/proxy/git')
325
325
326 # rpc methods
326 # rpc methods
327 self.config.add_route('vcs', '/{backend}')
327 self.config.add_route('vcs', '/{backend}')
328
328
329 # streaming rpc remote methods
329 # streaming rpc remote methods
330 self.config.add_route('vcs_stream', '/{backend}/stream')
330 self.config.add_route('vcs_stream', '/{backend}/stream')
331
331
332 # vcs operations clone/push as streaming
332 # vcs operations clone/push as streaming
333 self.config.add_route('stream_git', '/stream/git/*repo_name')
333 self.config.add_route('stream_git', '/stream/git/*repo_name')
334 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
334 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
335
335
336 self.config.add_view(self.status_view, route_name='status', renderer='json')
336 self.config.add_view(self.status_view, route_name='status', renderer='json')
337 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
337 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
338
338
339 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
339 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
340 self.config.add_view(self.git_proxy(), route_name='git_proxy')
340 self.config.add_view(self.git_proxy(), route_name='git_proxy')
341 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
341 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
342 vcs_view=self._remotes)
342 vcs_view=self._remotes)
343 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
343 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
344 vcs_view=self._remotes)
344 vcs_view=self._remotes)
345
345
346 self.config.add_view(self.hg_stream(), route_name='stream_hg')
346 self.config.add_view(self.hg_stream(), route_name='stream_hg')
347 self.config.add_view(self.git_stream(), route_name='stream_git')
347 self.config.add_view(self.git_stream(), route_name='stream_git')
348
348
349 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
349 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
350
350
351 self.config.add_notfound_view(not_found, renderer='json')
351 self.config.add_notfound_view(not_found, renderer='json')
352
352
353 self.config.add_view(self.handle_vcs_exception, context=Exception)
353 self.config.add_view(self.handle_vcs_exception, context=Exception)
354
354
355 self.config.add_tween(
355 self.config.add_tween(
356 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
356 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
357 )
357 )
358 self.config.add_request_method(
358 self.config.add_request_method(
359 'vcsserver.lib.request_counter.get_request_counter',
359 'vcsserver.lib.request_counter.get_request_counter',
360 'request_count')
360 'request_count')
361
361
362 self.config.add_request_method(
362 self.config.add_request_method(
363 'vcsserver.lib._vendor.statsd.get_statsd_client',
363 'vcsserver.lib._vendor.statsd.get_statsd_client',
364 'statsd', reify=True)
364 'statsd', reify=True)
365
365
366 def wsgi_app(self):
366 def wsgi_app(self):
367 return self.config.make_wsgi_app()
367 return self.config.make_wsgi_app()
368
368
369 def _vcs_view_params(self, request):
369 def _vcs_view_params(self, request):
370 remote = self._remotes[request.matchdict['backend']]
370 remote = self._remotes[request.matchdict['backend']]
371 payload = msgpack.unpackb(request.body, use_list=True)
371 payload = msgpack.unpackb(request.body, use_list=True)
372 method = payload.get('method')
372 method = payload.get('method')
373 params = payload['params']
373 params = payload['params']
374 wire = params.get('wire')
374 wire = params.get('wire')
375 args = params.get('args')
375 args = params.get('args')
376 kwargs = params.get('kwargs')
376 kwargs = params.get('kwargs')
377 context_uid = None
377 context_uid = None
378
378
379 if wire:
379 if wire:
380 try:
380 try:
381 wire['context'] = context_uid = uuid.UUID(wire['context'])
381 wire['context'] = context_uid = uuid.UUID(wire['context'])
382 except KeyError:
382 except KeyError:
383 pass
383 pass
384 args.insert(0, wire)
384 args.insert(0, wire)
385 repo_state_uid = wire.get('repo_state_uid') if wire else None
385 repo_state_uid = wire.get('repo_state_uid') if wire else None
386
386
387 # NOTE(marcink): trading complexity for slight performance
387 # NOTE(marcink): trading complexity for slight performance
388 if log.isEnabledFor(logging.DEBUG):
388 if log.isEnabledFor(logging.DEBUG):
389 no_args_methods = [
389 no_args_methods = [
390
390
391 ]
391 ]
392 if method in no_args_methods:
392 if method in no_args_methods:
393 call_args = ''
393 call_args = ''
394 else:
394 else:
395 call_args = args[1:]
395 call_args = args[1:]
396
396
397 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
397 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
398 method, call_args, kwargs, context_uid, repo_state_uid)
398 method, call_args, kwargs, context_uid, repo_state_uid)
399
399
400 return payload, remote, method, args, kwargs
400 return payload, remote, method, args, kwargs
401
401
402 def vcs_view(self, request):
402 def vcs_view(self, request):
403
403
404 payload, remote, method, args, kwargs = self._vcs_view_params(request)
404 payload, remote, method, args, kwargs = self._vcs_view_params(request)
405 payload_id = payload.get('id')
405 payload_id = payload.get('id')
406
406
407 try:
407 try:
408 resp = getattr(remote, method)(*args, **kwargs)
408 resp = getattr(remote, method)(*args, **kwargs)
409 except Exception as e:
409 except Exception as e:
410 exc_info = list(sys.exc_info())
410 exc_info = list(sys.exc_info())
411 exc_type, exc_value, exc_traceback = exc_info
411 exc_type, exc_value, exc_traceback = exc_info
412
412
413 org_exc = getattr(e, '_org_exc', None)
413 org_exc = getattr(e, '_org_exc', None)
414 org_exc_name = None
414 org_exc_name = None
415 org_exc_tb = ''
415 org_exc_tb = ''
416 if org_exc:
416 if org_exc:
417 org_exc_name = org_exc.__class__.__name__
417 org_exc_name = org_exc.__class__.__name__
418 org_exc_tb = getattr(e, '_org_exc_tb', '')
418 org_exc_tb = getattr(e, '_org_exc_tb', '')
419 # replace our "faked" exception with our org
419 # replace our "faked" exception with our org
420 exc_info[0] = org_exc.__class__
420 exc_info[0] = org_exc.__class__
421 exc_info[1] = org_exc
421 exc_info[1] = org_exc
422
422
423 should_store_exc = True
423 should_store_exc = True
424 if org_exc:
424 if org_exc:
425 def get_exc_fqn(_exc_obj):
425 def get_exc_fqn(_exc_obj):
426 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
426 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
427 return module_name + '.' + org_exc_name
427 return module_name + '.' + org_exc_name
428
428
429 exc_fqn = get_exc_fqn(org_exc)
429 exc_fqn = get_exc_fqn(org_exc)
430
430
431 if exc_fqn in ['mercurial.error.RepoLookupError',
431 if exc_fqn in ['mercurial.error.RepoLookupError',
432 'vcsserver.exceptions.RefNotFoundException']:
432 'vcsserver.exceptions.RefNotFoundException']:
433 should_store_exc = False
433 should_store_exc = False
434
434
435 if should_store_exc:
435 if should_store_exc:
436 store_exception(id(exc_info), exc_info)
436 store_exception(id(exc_info), exc_info, request_path=request.path)
437
437
438 tb_info = ''.join(
438 tb_info = ''.join(
439 traceback.format_exception(exc_type, exc_value, exc_traceback))
439 traceback.format_exception(exc_type, exc_value, exc_traceback))
440
440
441 type_ = e.__class__.__name__
441 type_ = e.__class__.__name__
442 if type_ not in self.ALLOWED_EXCEPTIONS:
442 if type_ not in self.ALLOWED_EXCEPTIONS:
443 type_ = None
443 type_ = None
444
444
445 resp = {
445 resp = {
446 'id': payload_id,
446 'id': payload_id,
447 'error': {
447 'error': {
448 'message': e.message,
448 'message': e.message,
449 'traceback': tb_info,
449 'traceback': tb_info,
450 'org_exc': org_exc_name,
450 'org_exc': org_exc_name,
451 'org_exc_tb': org_exc_tb,
451 'org_exc_tb': org_exc_tb,
452 'type': type_
452 'type': type_
453 }
453 }
454 }
454 }
455
455 try:
456 try:
456 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
457 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
457 except AttributeError:
458 except AttributeError:
458 pass
459 pass
459 else:
460 else:
460 resp = {
461 resp = {
461 'id': payload_id,
462 'id': payload_id,
462 'result': resp
463 'result': resp
463 }
464 }
464
465
465 return resp
466 return resp
466
467
467 def vcs_stream_view(self, request):
468 def vcs_stream_view(self, request):
468 payload, remote, method, args, kwargs = self._vcs_view_params(request)
469 payload, remote, method, args, kwargs = self._vcs_view_params(request)
469 # this method has a stream: marker we remove it here
470 # this method has a stream: marker we remove it here
470 method = method.split('stream:')[-1]
471 method = method.split('stream:')[-1]
471 chunk_size = safe_int(payload.get('chunk_size')) or 4096
472 chunk_size = safe_int(payload.get('chunk_size')) or 4096
472
473
473 try:
474 try:
474 resp = getattr(remote, method)(*args, **kwargs)
475 resp = getattr(remote, method)(*args, **kwargs)
475 except Exception as e:
476 except Exception as e:
476 raise
477 raise
477
478
478 def get_chunked_data(method_resp):
479 def get_chunked_data(method_resp):
479 stream = StringIO(method_resp)
480 stream = StringIO(method_resp)
480 while 1:
481 while 1:
481 chunk = stream.read(chunk_size)
482 chunk = stream.read(chunk_size)
482 if not chunk:
483 if not chunk:
483 break
484 break
484 yield chunk
485 yield chunk
485
486
486 response = Response(app_iter=get_chunked_data(resp))
487 response = Response(app_iter=get_chunked_data(resp))
487 response.content_type = 'application/octet-stream'
488 response.content_type = 'application/octet-stream'
488
489
489 return response
490 return response
490
491
491 def status_view(self, request):
492 def status_view(self, request):
492 import vcsserver
493 import vcsserver
493 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
494 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
494 'pid': os.getpid()}
495 'pid': os.getpid()}
495
496
496 def service_view(self, request):
497 def service_view(self, request):
497 import vcsserver
498 import vcsserver
498
499
499 payload = msgpack.unpackb(request.body, use_list=True)
500 payload = msgpack.unpackb(request.body, use_list=True)
500 server_config, app_config = {}, {}
501 server_config, app_config = {}, {}
501
502
502 try:
503 try:
503 path = self.global_config['__file__']
504 path = self.global_config['__file__']
504 config = configparser.RawConfigParser()
505 config = configparser.RawConfigParser()
505
506
506 config.read(path)
507 config.read(path)
507
508
508 if config.has_section('server:main'):
509 if config.has_section('server:main'):
509 server_config = dict(config.items('server:main'))
510 server_config = dict(config.items('server:main'))
510 if config.has_section('app:main'):
511 if config.has_section('app:main'):
511 app_config = dict(config.items('app:main'))
512 app_config = dict(config.items('app:main'))
512
513
513 except Exception:
514 except Exception:
514 log.exception('Failed to read .ini file for display')
515 log.exception('Failed to read .ini file for display')
515
516
516 environ = os.environ.items()
517 environ = os.environ.items()
517
518
518 resp = {
519 resp = {
519 'id': payload.get('id'),
520 'id': payload.get('id'),
520 'result': dict(
521 'result': dict(
521 version=vcsserver.__version__,
522 version=vcsserver.__version__,
522 config=server_config,
523 config=server_config,
523 app_config=app_config,
524 app_config=app_config,
524 environ=environ,
525 environ=environ,
525 payload=payload,
526 payload=payload,
526 )
527 )
527 }
528 }
528 return resp
529 return resp
529
530
530 def _msgpack_renderer_factory(self, info):
531 def _msgpack_renderer_factory(self, info):
531 def _render(value, system):
532 def _render(value, system):
532 request = system.get('request')
533 request = system.get('request')
533 if request is not None:
534 if request is not None:
534 response = request.response
535 response = request.response
535 ct = response.content_type
536 ct = response.content_type
536 if ct == response.default_content_type:
537 if ct == response.default_content_type:
537 response.content_type = 'application/x-msgpack'
538 response.content_type = 'application/x-msgpack'
538 return msgpack.packb(value)
539 return msgpack.packb(value)
539 return _render
540 return _render
540
541
541 def set_env_from_config(self, environ, config):
542 def set_env_from_config(self, environ, config):
542 dict_conf = {}
543 dict_conf = {}
543 try:
544 try:
544 for elem in config:
545 for elem in config:
545 if elem[0] == 'rhodecode':
546 if elem[0] == 'rhodecode':
546 dict_conf = json.loads(elem[2])
547 dict_conf = json.loads(elem[2])
547 break
548 break
548 except Exception:
549 except Exception:
549 log.exception('Failed to fetch SCM CONFIG')
550 log.exception('Failed to fetch SCM CONFIG')
550 return
551 return
551
552
552 username = dict_conf.get('username')
553 username = dict_conf.get('username')
553 if username:
554 if username:
554 environ['REMOTE_USER'] = username
555 environ['REMOTE_USER'] = username
555 # mercurial specific, some extension api rely on this
556 # mercurial specific, some extension api rely on this
556 environ['HGUSER'] = username
557 environ['HGUSER'] = username
557
558
558 ip = dict_conf.get('ip')
559 ip = dict_conf.get('ip')
559 if ip:
560 if ip:
560 environ['REMOTE_HOST'] = ip
561 environ['REMOTE_HOST'] = ip
561
562
562 if _is_request_chunked(environ):
563 if _is_request_chunked(environ):
563 # set the compatibility flag for webob
564 # set the compatibility flag for webob
564 environ['wsgi.input_terminated'] = True
565 environ['wsgi.input_terminated'] = True
565
566
566 def hg_proxy(self):
567 def hg_proxy(self):
567 @wsgiapp
568 @wsgiapp
568 def _hg_proxy(environ, start_response):
569 def _hg_proxy(environ, start_response):
569 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
570 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
570 return app(environ, start_response)
571 return app(environ, start_response)
571 return _hg_proxy
572 return _hg_proxy
572
573
573 def git_proxy(self):
574 def git_proxy(self):
574 @wsgiapp
575 @wsgiapp
575 def _git_proxy(environ, start_response):
576 def _git_proxy(environ, start_response):
576 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
577 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
577 return app(environ, start_response)
578 return app(environ, start_response)
578 return _git_proxy
579 return _git_proxy
579
580
580 def hg_stream(self):
581 def hg_stream(self):
581 if self._use_echo_app:
582 if self._use_echo_app:
582 @wsgiapp
583 @wsgiapp
583 def _hg_stream(environ, start_response):
584 def _hg_stream(environ, start_response):
584 app = EchoApp('fake_path', 'fake_name', None)
585 app = EchoApp('fake_path', 'fake_name', None)
585 return app(environ, start_response)
586 return app(environ, start_response)
586 return _hg_stream
587 return _hg_stream
587 else:
588 else:
588 @wsgiapp
589 @wsgiapp
589 def _hg_stream(environ, start_response):
590 def _hg_stream(environ, start_response):
590 log.debug('http-app: handling hg stream')
591 log.debug('http-app: handling hg stream')
591 repo_path = environ['HTTP_X_RC_REPO_PATH']
592 repo_path = environ['HTTP_X_RC_REPO_PATH']
592 repo_name = environ['HTTP_X_RC_REPO_NAME']
593 repo_name = environ['HTTP_X_RC_REPO_NAME']
593 packed_config = base64.b64decode(
594 packed_config = base64.b64decode(
594 environ['HTTP_X_RC_REPO_CONFIG'])
595 environ['HTTP_X_RC_REPO_CONFIG'])
595 config = msgpack.unpackb(packed_config)
596 config = msgpack.unpackb(packed_config)
596 app = scm_app.create_hg_wsgi_app(
597 app = scm_app.create_hg_wsgi_app(
597 repo_path, repo_name, config)
598 repo_path, repo_name, config)
598
599
599 # Consistent path information for hgweb
600 # Consistent path information for hgweb
600 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
601 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
601 environ['REPO_NAME'] = repo_name
602 environ['REPO_NAME'] = repo_name
602 self.set_env_from_config(environ, config)
603 self.set_env_from_config(environ, config)
603
604
604 log.debug('http-app: starting app handler '
605 log.debug('http-app: starting app handler '
605 'with %s and process request', app)
606 'with %s and process request', app)
606 return app(environ, ResponseFilter(start_response))
607 return app(environ, ResponseFilter(start_response))
607 return _hg_stream
608 return _hg_stream
608
609
609 def git_stream(self):
610 def git_stream(self):
610 if self._use_echo_app:
611 if self._use_echo_app:
611 @wsgiapp
612 @wsgiapp
612 def _git_stream(environ, start_response):
613 def _git_stream(environ, start_response):
613 app = EchoApp('fake_path', 'fake_name', None)
614 app = EchoApp('fake_path', 'fake_name', None)
614 return app(environ, start_response)
615 return app(environ, start_response)
615 return _git_stream
616 return _git_stream
616 else:
617 else:
617 @wsgiapp
618 @wsgiapp
618 def _git_stream(environ, start_response):
619 def _git_stream(environ, start_response):
619 log.debug('http-app: handling git stream')
620 log.debug('http-app: handling git stream')
620 repo_path = environ['HTTP_X_RC_REPO_PATH']
621 repo_path = environ['HTTP_X_RC_REPO_PATH']
621 repo_name = environ['HTTP_X_RC_REPO_NAME']
622 repo_name = environ['HTTP_X_RC_REPO_NAME']
622 packed_config = base64.b64decode(
623 packed_config = base64.b64decode(
623 environ['HTTP_X_RC_REPO_CONFIG'])
624 environ['HTTP_X_RC_REPO_CONFIG'])
624 config = msgpack.unpackb(packed_config)
625 config = msgpack.unpackb(packed_config)
625
626
626 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
627 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
627 self.set_env_from_config(environ, config)
628 self.set_env_from_config(environ, config)
628
629
629 content_type = environ.get('CONTENT_TYPE', '')
630 content_type = environ.get('CONTENT_TYPE', '')
630
631
631 path = environ['PATH_INFO']
632 path = environ['PATH_INFO']
632 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
633 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
633 log.debug(
634 log.debug(
634 'LFS: Detecting if request `%s` is LFS server path based '
635 'LFS: Detecting if request `%s` is LFS server path based '
635 'on content type:`%s`, is_lfs:%s',
636 'on content type:`%s`, is_lfs:%s',
636 path, content_type, is_lfs_request)
637 path, content_type, is_lfs_request)
637
638
638 if not is_lfs_request:
639 if not is_lfs_request:
639 # fallback detection by path
640 # fallback detection by path
640 if GIT_LFS_PROTO_PAT.match(path):
641 if GIT_LFS_PROTO_PAT.match(path):
641 is_lfs_request = True
642 is_lfs_request = True
642 log.debug(
643 log.debug(
643 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
644 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
644 path, is_lfs_request)
645 path, is_lfs_request)
645
646
646 if is_lfs_request:
647 if is_lfs_request:
647 app = scm_app.create_git_lfs_wsgi_app(
648 app = scm_app.create_git_lfs_wsgi_app(
648 repo_path, repo_name, config)
649 repo_path, repo_name, config)
649 else:
650 else:
650 app = scm_app.create_git_wsgi_app(
651 app = scm_app.create_git_wsgi_app(
651 repo_path, repo_name, config)
652 repo_path, repo_name, config)
652
653
653 log.debug('http-app: starting app handler '
654 log.debug('http-app: starting app handler '
654 'with %s and process request', app)
655 'with %s and process request', app)
655
656
656 return app(environ, start_response)
657 return app(environ, start_response)
657
658
658 return _git_stream
659 return _git_stream
659
660
660 def handle_vcs_exception(self, exception, request):
661 def handle_vcs_exception(self, exception, request):
661 _vcs_kind = getattr(exception, '_vcs_kind', '')
662 _vcs_kind = getattr(exception, '_vcs_kind', '')
662 if _vcs_kind == 'repo_locked':
663 if _vcs_kind == 'repo_locked':
663 # Get custom repo-locked status code if present.
664 # Get custom repo-locked status code if present.
664 status_code = request.headers.get('X-RC-Locked-Status-Code')
665 status_code = request.headers.get('X-RC-Locked-Status-Code')
665 return HTTPRepoLocked(
666 return HTTPRepoLocked(
666 title=exception.message, status_code=status_code)
667 title=exception.message, status_code=status_code)
667
668
668 elif _vcs_kind == 'repo_branch_protected':
669 elif _vcs_kind == 'repo_branch_protected':
669 # Get custom repo-branch-protected status code if present.
670 # Get custom repo-branch-protected status code if present.
670 return HTTPRepoBranchProtected(title=exception.message)
671 return HTTPRepoBranchProtected(title=exception.message)
671
672
672 exc_info = request.exc_info
673 exc_info = request.exc_info
673 store_exception(id(exc_info), exc_info)
674 store_exception(id(exc_info), exc_info)
674
675
675 traceback_info = 'unavailable'
676 traceback_info = 'unavailable'
676 if request.exc_info:
677 if request.exc_info:
677 exc_type, exc_value, exc_tb = request.exc_info
678 exc_type, exc_value, exc_tb = request.exc_info
678 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
679 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
679
680
680 log.error(
681 log.error(
681 'error occurred handling this request for path: %s, \n tb: %s',
682 'error occurred handling this request for path: %s, \n tb: %s',
682 request.path, traceback_info)
683 request.path, traceback_info)
683 raise exception
684 raise exception
684
685
685
686
686 class ResponseFilter(object):
687 class ResponseFilter(object):
687
688
688 def __init__(self, start_response):
689 def __init__(self, start_response):
689 self._start_response = start_response
690 self._start_response = start_response
690
691
691 def __call__(self, status, response_headers, exc_info=None):
692 def __call__(self, status, response_headers, exc_info=None):
692 headers = tuple(
693 headers = tuple(
693 (h, v) for h, v in response_headers
694 (h, v) for h, v in response_headers
694 if not wsgiref.util.is_hop_by_hop(h))
695 if not wsgiref.util.is_hop_by_hop(h))
695 return self._start_response(status, headers, exc_info)
696 return self._start_response(status, headers, exc_info)
696
697
697
698
698 def main(global_config, **settings):
699 def main(global_config, **settings):
699 if MercurialFactory:
700 if MercurialFactory:
700 hgpatches.patch_largefiles_capabilities()
701 hgpatches.patch_largefiles_capabilities()
701 hgpatches.patch_subrepo_type_mapping()
702 hgpatches.patch_subrepo_type_mapping()
702
703
703 app = HTTPApplication(settings=settings, global_config=global_config)
704 app = HTTPApplication(settings=settings, global_config=global_config)
704 return app.wsgi_app()
705 return app.wsgi_app()
@@ -1,169 +1,175 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20
20
21 import os
21 import os
22 import time
22 import time
23 import datetime
23 import datetime
24 import msgpack
24 import msgpack
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import tempfile
27 import tempfile
28
28
29 from pyramid import compat
29 from pyramid import compat
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
33 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
34 global_prefix = 'vcsserver'
34 global_prefix = 'vcsserver'
35 exc_store_dir_name = 'rc_exception_store_v1'
35 exc_store_dir_name = 'rc_exception_store_v1'
36
36
37
37
38 def exc_serialize(exc_id, tb, exc_type):
38 def exc_serialize(exc_id, tb, exc_type):
39
39
40 data = {
40 data = {
41 'version': 'v1',
41 'version': 'v1',
42 'exc_id': exc_id,
42 'exc_id': exc_id,
43 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
43 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
44 'exc_timestamp': repr(time.time()),
44 'exc_timestamp': repr(time.time()),
45 'exc_message': tb,
45 'exc_message': tb,
46 'exc_type': exc_type,
46 'exc_type': exc_type,
47 }
47 }
48 return msgpack.packb(data), data
48 return msgpack.packb(data), data
49
49
50
50
51 def exc_unserialize(tb):
51 def exc_unserialize(tb):
52 return msgpack.unpackb(tb)
52 return msgpack.unpackb(tb)
53
53
54
54
55 def get_exc_store():
55 def get_exc_store():
56 """
56 """
57 Get and create exception store if it's not existing
57 Get and create exception store if it's not existing
58 """
58 """
59 import vcsserver as app
59 import vcsserver as app
60
60
61 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
61 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
62 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
62 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
63
63
64 _exc_store_path = os.path.abspath(_exc_store_path)
64 _exc_store_path = os.path.abspath(_exc_store_path)
65 if not os.path.isdir(_exc_store_path):
65 if not os.path.isdir(_exc_store_path):
66 os.makedirs(_exc_store_path)
66 os.makedirs(_exc_store_path)
67 log.debug('Initializing exceptions store at %s', _exc_store_path)
67 log.debug('Initializing exceptions store at %s', _exc_store_path)
68 return _exc_store_path
68 return _exc_store_path
69
69
70
70
71 def _store_exception(exc_id, exc_info, prefix):
71 def _store_exception(exc_id, exc_info, prefix, request_path=''):
72 exc_type, exc_value, exc_traceback = exc_info
72 exc_type, exc_value, exc_traceback = exc_info
73
73
74 tb = ''.join(traceback.format_exception(
74 tb = ''.join(traceback.format_exception(
75 exc_type, exc_value, exc_traceback, None))
75 exc_type, exc_value, exc_traceback, None))
76
76
77 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
77 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
78
78
79 if detailed_tb:
79 if detailed_tb:
80 if isinstance(detailed_tb, compat.string_types):
80 if isinstance(detailed_tb, compat.string_types):
81 remote_tb = [detailed_tb]
81 remote_tb = [detailed_tb]
82
82
83 tb += (
83 tb += (
84 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
84 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
85 '{}\n'
85 '{}\n'
86 '+++ END SOURCE EXCEPTION +++\n'
86 '+++ END SOURCE EXCEPTION +++\n'
87 ''.format('\n'.join(remote_tb))
87 ''.format('\n'.join(remote_tb))
88 )
88 )
89
89
90 # Avoid that remote_tb also appears in the frame
90 # Avoid that remote_tb also appears in the frame
91 del remote_tb
91 del remote_tb
92
92
93 exc_type_name = exc_type.__name__
93 exc_type_name = exc_type.__name__
94 exc_store_path = get_exc_store()
94 exc_store_path = get_exc_store()
95 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
95 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
96 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
96 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
97 if not os.path.isdir(exc_store_path):
97 if not os.path.isdir(exc_store_path):
98 os.makedirs(exc_store_path)
98 os.makedirs(exc_store_path)
99 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
99 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
100 with open(stored_exc_path, 'wb') as f:
100 with open(stored_exc_path, 'wb') as f:
101 f.write(exc_data)
101 f.write(exc_data)
102 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
102 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
103
103
104 log.error(
105 'error occurred handling this request.\n'
106 'Path: `%s`, tb: %s',
107 request_path, tb)
104
108
105 def store_exception(exc_id, exc_info, prefix=global_prefix):
109
110 def store_exception(exc_id, exc_info, prefix=global_prefix, request_path=''):
106 """
111 """
107 Example usage::
112 Example usage::
108
113
109 exc_info = sys.exc_info()
114 exc_info = sys.exc_info()
110 store_exception(id(exc_info), exc_info)
115 store_exception(id(exc_info), exc_info)
111 """
116 """
112
117
113 try:
118 try:
114 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
119 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix,
120 request_path=request_path)
115 except Exception:
121 except Exception:
116 log.exception('Failed to store exception `%s` information', exc_id)
122 log.exception('Failed to store exception `%s` information', exc_id)
117 # there's no way this can fail, it will crash server badly if it does.
123 # there's no way this can fail, it will crash server badly if it does.
118 pass
124 pass
119
125
120
126
121 def _find_exc_file(exc_id, prefix=global_prefix):
127 def _find_exc_file(exc_id, prefix=global_prefix):
122 exc_store_path = get_exc_store()
128 exc_store_path = get_exc_store()
123 if prefix:
129 if prefix:
124 exc_id = '{}_{}'.format(exc_id, prefix)
130 exc_id = '{}_{}'.format(exc_id, prefix)
125 else:
131 else:
126 # search without a prefix
132 # search without a prefix
127 exc_id = '{}'.format(exc_id)
133 exc_id = '{}'.format(exc_id)
128
134
129 # we need to search the store for such start pattern as above
135 # we need to search the store for such start pattern as above
130 for fname in os.listdir(exc_store_path):
136 for fname in os.listdir(exc_store_path):
131 if fname.startswith(exc_id):
137 if fname.startswith(exc_id):
132 exc_id = os.path.join(exc_store_path, fname)
138 exc_id = os.path.join(exc_store_path, fname)
133 break
139 break
134 continue
140 continue
135 else:
141 else:
136 exc_id = None
142 exc_id = None
137
143
138 return exc_id
144 return exc_id
139
145
140
146
141 def _read_exception(exc_id, prefix):
147 def _read_exception(exc_id, prefix):
142 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
148 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
143 if exc_id_file_path:
149 if exc_id_file_path:
144 with open(exc_id_file_path, 'rb') as f:
150 with open(exc_id_file_path, 'rb') as f:
145 return exc_unserialize(f.read())
151 return exc_unserialize(f.read())
146 else:
152 else:
147 log.debug('Exception File `%s` not found', exc_id_file_path)
153 log.debug('Exception File `%s` not found', exc_id_file_path)
148 return None
154 return None
149
155
150
156
151 def read_exception(exc_id, prefix=global_prefix):
157 def read_exception(exc_id, prefix=global_prefix):
152 try:
158 try:
153 return _read_exception(exc_id=exc_id, prefix=prefix)
159 return _read_exception(exc_id=exc_id, prefix=prefix)
154 except Exception:
160 except Exception:
155 log.exception('Failed to read exception `%s` information', exc_id)
161 log.exception('Failed to read exception `%s` information', exc_id)
156 # there's no way this can fail, it will crash server badly if it does.
162 # there's no way this can fail, it will crash server badly if it does.
157 return None
163 return None
158
164
159
165
160 def delete_exception(exc_id, prefix=global_prefix):
166 def delete_exception(exc_id, prefix=global_prefix):
161 try:
167 try:
162 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
168 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
163 if exc_id_file_path:
169 if exc_id_file_path:
164 os.remove(exc_id_file_path)
170 os.remove(exc_id_file_path)
165
171
166 except Exception:
172 except Exception:
167 log.exception('Failed to remove exception `%s` information', exc_id)
173 log.exception('Failed to remove exception `%s` information', exc_id)
168 # there's no way this can fail, it will crash server badly if it does.
174 # there's no way this can fail, it will crash server badly if it does.
169 pass
175 pass
@@ -1,253 +1,307 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21
21
22 import msgpack
22 import msgpack
23 import redis
23 import redis
24
24
25 from dogpile.cache.api import CachedValue
25 from dogpile.cache.api import CachedValue
26 from dogpile.cache.backends import memory as memory_backend
26 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import file as file_backend
27 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import redis as redis_backend
28 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 from dogpile.cache.util import memoized_property
30 from dogpile.cache.util import memoized_property
31
31
32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
33
33
34
34
35 _default_max_size = 1024
35 _default_max_size = 1024
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class LRUMemoryBackend(memory_backend.MemoryBackend):
40 class LRUMemoryBackend(memory_backend.MemoryBackend):
41 key_prefix = 'lru_mem_backend'
41 key_prefix = 'lru_mem_backend'
42 pickle_values = False
42 pickle_values = False
43
43
44 def __init__(self, arguments):
44 def __init__(self, arguments):
45 max_size = arguments.pop('max_size', _default_max_size)
45 max_size = arguments.pop('max_size', _default_max_size)
46
46
47 LRUDictClass = LRUDict
47 LRUDictClass = LRUDict
48 if arguments.pop('log_key_count', None):
48 if arguments.pop('log_key_count', None):
49 LRUDictClass = LRUDictDebug
49 LRUDictClass = LRUDictDebug
50
50
51 arguments['cache_dict'] = LRUDictClass(max_size)
51 arguments['cache_dict'] = LRUDictClass(max_size)
52 super(LRUMemoryBackend, self).__init__(arguments)
52 super(LRUMemoryBackend, self).__init__(arguments)
53
53
54 def delete(self, key):
54 def delete(self, key):
55 try:
55 try:
56 del self._cache[key]
56 del self._cache[key]
57 except KeyError:
57 except KeyError:
58 # we don't care if key isn't there at deletion
58 # we don't care if key isn't there at deletion
59 pass
59 pass
60
60
61 def delete_multi(self, keys):
61 def delete_multi(self, keys):
62 for key in keys:
62 for key in keys:
63 self.delete(key)
63 self.delete(key)
64
64
65
65
66 class PickleSerializer(object):
66 class PickleSerializer(object):
67
67
68 def _dumps(self, value, safe=False):
68 def _dumps(self, value, safe=False):
69 try:
69 try:
70 return compat.pickle.dumps(value)
70 return compat.pickle.dumps(value)
71 except Exception:
71 except Exception:
72 if safe:
72 if safe:
73 return NO_VALUE
73 return NO_VALUE
74 else:
74 else:
75 raise
75 raise
76
76
77 def _loads(self, value, safe=True):
77 def _loads(self, value, safe=True):
78 try:
78 try:
79 return compat.pickle.loads(value)
79 return compat.pickle.loads(value)
80 except Exception:
80 except Exception:
81 if safe:
81 if safe:
82 return NO_VALUE
82 return NO_VALUE
83 else:
83 else:
84 raise
84 raise
85
85
86
86
87 class MsgPackSerializer(object):
87 class MsgPackSerializer(object):
88
88
89 def _dumps(self, value, safe=False):
89 def _dumps(self, value, safe=False):
90 try:
90 try:
91 return msgpack.packb(value)
91 return msgpack.packb(value)
92 except Exception:
92 except Exception:
93 if safe:
93 if safe:
94 return NO_VALUE
94 return NO_VALUE
95 else:
95 else:
96 raise
96 raise
97
97
98 def _loads(self, value, safe=True):
98 def _loads(self, value, safe=True):
99 """
99 """
100 pickle maintained the `CachedValue` wrapper of the tuple
100 pickle maintained the `CachedValue` wrapper of the tuple
101 msgpack does not, so it must be added back in.
101 msgpack does not, so it must be added back in.
102 """
102 """
103 try:
103 try:
104 value = msgpack.unpackb(value, use_list=False)
104 value = msgpack.unpackb(value, use_list=False)
105 return CachedValue(*value)
105 return CachedValue(*value)
106 except Exception:
106 except Exception:
107 if safe:
107 if safe:
108 return NO_VALUE
108 return NO_VALUE
109 else:
109 else:
110 raise
110 raise
111
111
112
112
113 import fcntl
113 import fcntl
114 flock_org = fcntl.flock
114 flock_org = fcntl.flock
115
115
116
116
117 class CustomLockFactory(FileLock):
117 class CustomLockFactory(FileLock):
118
118
119 pass
119 pass
120
120
121
121
122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
123 key_prefix = 'file_backend'
123 key_prefix = 'file_backend'
124
124
125 def __init__(self, arguments):
125 def __init__(self, arguments):
126 arguments['lock_factory'] = CustomLockFactory
126 arguments['lock_factory'] = CustomLockFactory
127 super(FileNamespaceBackend, self).__init__(arguments)
127 db_file = arguments.get('filename')
128
129 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
130 try:
131 super(FileNamespaceBackend, self).__init__(arguments)
132 except Exception:
133 log.error('Failed to initialize db at: %s', db_file)
134 raise
128
135
129 def __repr__(self):
136 def __repr__(self):
130 return '{} `{}`'.format(self.__class__, self.filename)
137 return '{} `{}`'.format(self.__class__, self.filename)
131
138
132 def list_keys(self, prefix=''):
139 def list_keys(self, prefix=''):
133 prefix = '{}:{}'.format(self.key_prefix, prefix)
140 prefix = '{}:{}'.format(self.key_prefix, prefix)
134
141
135 def cond(v):
142 def cond(v):
136 if not prefix:
143 if not prefix:
137 return True
144 return True
138
145
139 if v.startswith(prefix):
146 if v.startswith(prefix):
140 return True
147 return True
141 return False
148 return False
142
149
143 with self._dbm_file(True) as dbm:
150 with self._dbm_file(True) as dbm:
144
151 try:
145 return filter(cond, dbm.keys())
152 return filter(cond, dbm.keys())
153 except Exception:
154 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
155 raise
146
156
147 def get_store(self):
157 def get_store(self):
148 return self.filename
158 return self.filename
149
159
150 def get(self, key):
160 def _dbm_get(self, key):
151 with self._dbm_file(False) as dbm:
161 with self._dbm_file(False) as dbm:
152 if hasattr(dbm, 'get'):
162 if hasattr(dbm, 'get'):
153 value = dbm.get(key, NO_VALUE)
163 value = dbm.get(key, NO_VALUE)
154 else:
164 else:
155 # gdbm objects lack a .get method
165 # gdbm objects lack a .get method
156 try:
166 try:
157 value = dbm[key]
167 value = dbm[key]
158 except KeyError:
168 except KeyError:
159 value = NO_VALUE
169 value = NO_VALUE
160 if value is not NO_VALUE:
170 if value is not NO_VALUE:
161 value = self._loads(value)
171 value = self._loads(value)
162 return value
172 return value
163
173
174 def get(self, key):
175 try:
176 return self._dbm_get(key)
177 except Exception:
178 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
179 raise
180
164 def set(self, key, value):
181 def set(self, key, value):
165 with self._dbm_file(True) as dbm:
182 with self._dbm_file(True) as dbm:
166 dbm[key] = self._dumps(value)
183 dbm[key] = self._dumps(value)
167
184
168 def set_multi(self, mapping):
185 def set_multi(self, mapping):
169 with self._dbm_file(True) as dbm:
186 with self._dbm_file(True) as dbm:
170 for key, value in mapping.items():
187 for key, value in mapping.items():
171 dbm[key] = self._dumps(value)
188 dbm[key] = self._dumps(value)
172
189
173
190
174 class BaseRedisBackend(redis_backend.RedisBackend):
191 class BaseRedisBackend(redis_backend.RedisBackend):
175
192
176 def _create_client(self):
193 def _create_client(self):
177 args = {}
194 args = {}
178
195
179 if self.url is not None:
196 if self.url is not None:
180 args.update(url=self.url)
197 args.update(url=self.url)
181
198
182 else:
199 else:
183 args.update(
200 args.update(
184 host=self.host, password=self.password,
201 host=self.host, password=self.password,
185 port=self.port, db=self.db
202 port=self.port, db=self.db
186 )
203 )
187
204
188 connection_pool = redis.ConnectionPool(**args)
205 connection_pool = redis.ConnectionPool(**args)
189
206
190 return redis.StrictRedis(connection_pool=connection_pool)
207 return redis.StrictRedis(connection_pool=connection_pool)
191
208
192 def list_keys(self, prefix=''):
209 def list_keys(self, prefix=''):
193 prefix = '{}:{}*'.format(self.key_prefix, prefix)
210 prefix = '{}:{}*'.format(self.key_prefix, prefix)
194 return self.client.keys(prefix)
211 return self.client.keys(prefix)
195
212
196 def get_store(self):
213 def get_store(self):
197 return self.client.connection_pool
214 return self.client.connection_pool
198
215
199 def get(self, key):
216 def get(self, key):
200 value = self.client.get(key)
217 value = self.client.get(key)
201 if value is None:
218 if value is None:
202 return NO_VALUE
219 return NO_VALUE
203 return self._loads(value)
220 return self._loads(value)
204
221
205 def get_multi(self, keys):
222 def get_multi(self, keys):
206 if not keys:
223 if not keys:
207 return []
224 return []
208 values = self.client.mget(keys)
225 values = self.client.mget(keys)
209 loads = self._loads
226 loads = self._loads
210 return [
227 return [
211 loads(v) if v is not None else NO_VALUE
228 loads(v) if v is not None else NO_VALUE
212 for v in values]
229 for v in values]
213
230
214 def set(self, key, value):
231 def set(self, key, value):
215 if self.redis_expiration_time:
232 if self.redis_expiration_time:
216 self.client.setex(key, self.redis_expiration_time,
233 self.client.setex(key, self.redis_expiration_time,
217 self._dumps(value))
234 self._dumps(value))
218 else:
235 else:
219 self.client.set(key, self._dumps(value))
236 self.client.set(key, self._dumps(value))
220
237
221 def set_multi(self, mapping):
238 def set_multi(self, mapping):
222 dumps = self._dumps
239 dumps = self._dumps
223 mapping = dict(
240 mapping = dict(
224 (k, dumps(v))
241 (k, dumps(v))
225 for k, v in mapping.items()
242 for k, v in mapping.items()
226 )
243 )
227
244
228 if not self.redis_expiration_time:
245 if not self.redis_expiration_time:
229 self.client.mset(mapping)
246 self.client.mset(mapping)
230 else:
247 else:
231 pipe = self.client.pipeline()
248 pipe = self.client.pipeline()
232 for key, value in mapping.items():
249 for key, value in mapping.items():
233 pipe.setex(key, self.redis_expiration_time, value)
250 pipe.setex(key, self.redis_expiration_time, value)
234 pipe.execute()
251 pipe.execute()
235
252
236 def get_mutex(self, key):
253 def get_mutex(self, key):
237 u = redis_backend.u
238 if self.distributed_lock:
254 if self.distributed_lock:
239 lock_key = u('_lock_{0}').format(key)
255 lock_key = redis_backend.u('_lock_{0}').format(key)
240 log.debug('Trying to acquire Redis lock for key %s', lock_key)
256 log.debug('Trying to acquire Redis lock for key %s', lock_key)
241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
257
258 auto_renewal = True
259 lock_timeout = self.lock_timeout
260 if auto_renewal and not self.lock_timeout:
261 # set default timeout for auto_renewal
262 lock_timeout = 10
263 return get_mutex_lock(self.client, lock_key, lock_timeout,
264 auto_renewal=auto_renewal)
242 else:
265 else:
243 return None
266 return None
244
267
245
268
246 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
269 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
247 key_prefix = 'redis_pickle_backend'
270 key_prefix = 'redis_pickle_backend'
248 pass
271 pass
249
272
250
273
251 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
274 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
252 key_prefix = 'redis_msgpack_backend'
275 key_prefix = 'redis_msgpack_backend'
253 pass
276 pass
277
278
279 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
280 import redis_lock
281
282 class _RedisLockWrapper(object):
283 """LockWrapper for redis_lock"""
284
285 def __init__(self):
286 pass
287
288 @property
289 def lock(self):
290 return redis_lock.Lock(
291 redis_client=client,
292 name=lock_key,
293 expire=lock_timeout,
294 auto_renewal=auto_renewal,
295 strict=True,
296 )
297
298 def acquire(self, wait=True):
299 return self.lock.acquire(wait)
300
301 def release(self):
302 try:
303 self.lock.release()
304 except redis_lock.NotAcquired:
305 pass
306
307 return _RedisLockWrapper()
General Comments 0
You need to be logged in to leave comments. Login now