##// END OF EJS Templates
more py2.5 compatibility patches
marcink -
r2790:3c0ae445 beta
parent child Browse files
Show More
@@ -1,585 +1,594 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.compat
4 4 ~~~~~~~~~~~~~~~~~~~~
5 5
6 6 Python backward compatibility functions and common libs
7 7
8 8
9 9 :created_on: Oct 7, 2011
10 10 :author: marcink
11 11 :copyright: (C) 2010-2010 Marcin Kuzminski <marcin@python-works.com>
12 12 :license: GPLv3, see COPYING for more details.
13 13 """
14 14 # This program is free software: you can redistribute it and/or modify
15 15 # it under the terms of the GNU General Public License as published by
16 16 # the Free Software Foundation, either version 3 of the License, or
17 17 # (at your option) any later version.
18 18 #
19 19 # This program is distributed in the hope that it will be useful,
20 20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 22 # GNU General Public License for more details.
23 23 #
24 24 # You should have received a copy of the GNU General Public License
25 25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 26
27 27 import os
28 28 from rhodecode import __platform__, PLATFORM_WIN, __py_version__
29 29
30 30 #==============================================================================
31 31 # json
32 32 #==============================================================================
33 33 from rhodecode.lib.ext_json import json
34 34
35 35
36 36 #==============================================================================
37 37 # izip_longest
38 38 #==============================================================================
39 39 try:
40 40 from itertools import izip_longest
41 41 except ImportError:
42 42 import itertools
43 43
44 44 def izip_longest(*args, **kwds):
45 45 fillvalue = kwds.get("fillvalue")
46 46
47 47 def sentinel(counter=([fillvalue] * (len(args) - 1)).pop):
48 48 yield counter() # yields the fillvalue, or raises IndexError
49 49
50 50 fillers = itertools.repeat(fillvalue)
51 51 iters = [itertools.chain(it, sentinel(), fillers)
52 52 for it in args]
53 53 try:
54 54 for tup in itertools.izip(*iters):
55 55 yield tup
56 56 except IndexError:
57 57 pass
58 58
59 59
60 60 #==============================================================================
61 61 # OrderedDict
62 62 #==============================================================================
63 63
64 64 # Python Software Foundation License
65 65
66 66 # XXX: it feels like using the class with "is" and "is not" instead of "==" and
67 67 # "!=" should be faster.
68 68 class _Nil(object):
69 69
70 70 def __repr__(self):
71 71 return "nil"
72 72
73 73 def __eq__(self, other):
74 74 if (isinstance(other, _Nil)):
75 75 return True
76 76 else:
77 77 return NotImplemented
78 78
79 79 def __ne__(self, other):
80 80 if (isinstance(other, _Nil)):
81 81 return False
82 82 else:
83 83 return NotImplemented
84 84
85 85 _nil = _Nil()
86 86
87 87
88 88 class _odict(object):
89 89 """Ordered dict data structure, with O(1) complexity for dict operations
90 90 that modify one element.
91 91
92 92 Overwriting values doesn't change their original sequential order.
93 93 """
94 94
95 95 def _dict_impl(self):
96 96 return None
97 97
98 98 def __init__(self, data=(), **kwds):
99 99 """This doesn't accept keyword initialization as normal dicts to avoid
100 100 a trap - inside a function or method the keyword args are accessible
101 101 only as a dict, without a defined order, so their original order is
102 102 lost.
103 103 """
104 104 if kwds:
105 105 raise TypeError("__init__() of ordered dict takes no keyword "
106 106 "arguments to avoid an ordering trap.")
107 107 self._dict_impl().__init__(self)
108 108 # If you give a normal dict, then the order of elements is undefined
109 109 if hasattr(data, "iteritems"):
110 110 for key, val in data.iteritems():
111 111 self[key] = val
112 112 else:
113 113 for key, val in data:
114 114 self[key] = val
115 115
116 116 # Double-linked list header
117 117 def _get_lh(self):
118 118 dict_impl = self._dict_impl()
119 119 if not hasattr(self, '_lh'):
120 120 dict_impl.__setattr__(self, '_lh', _nil)
121 121 return dict_impl.__getattribute__(self, '_lh')
122 122
123 123 def _set_lh(self, val):
124 124 self._dict_impl().__setattr__(self, '_lh', val)
125 125
126 126 lh = property(_get_lh, _set_lh)
127 127
128 128 # Double-linked list tail
129 129 def _get_lt(self):
130 130 dict_impl = self._dict_impl()
131 131 if not hasattr(self, '_lt'):
132 132 dict_impl.__setattr__(self, '_lt', _nil)
133 133 return dict_impl.__getattribute__(self, '_lt')
134 134
135 135 def _set_lt(self, val):
136 136 self._dict_impl().__setattr__(self, '_lt', val)
137 137
138 138 lt = property(_get_lt, _set_lt)
139 139
140 140 def __getitem__(self, key):
141 141 return self._dict_impl().__getitem__(self, key)[1]
142 142
143 143 def __setitem__(self, key, val):
144 144 dict_impl = self._dict_impl()
145 145 try:
146 146 dict_impl.__getitem__(self, key)[1] = val
147 147 except KeyError:
148 148 new = [dict_impl.__getattribute__(self, 'lt'), val, _nil]
149 149 dict_impl.__setitem__(self, key, new)
150 150 if dict_impl.__getattribute__(self, 'lt') == _nil:
151 151 dict_impl.__setattr__(self, 'lh', key)
152 152 else:
153 153 dict_impl.__getitem__(
154 154 self, dict_impl.__getattribute__(self, 'lt'))[2] = key
155 155 dict_impl.__setattr__(self, 'lt', key)
156 156
157 157 def __delitem__(self, key):
158 158 dict_impl = self._dict_impl()
159 159 pred, _, succ = self._dict_impl().__getitem__(self, key)
160 160 if pred == _nil:
161 161 dict_impl.__setattr__(self, 'lh', succ)
162 162 else:
163 163 dict_impl.__getitem__(self, pred)[2] = succ
164 164 if succ == _nil:
165 165 dict_impl.__setattr__(self, 'lt', pred)
166 166 else:
167 167 dict_impl.__getitem__(self, succ)[0] = pred
168 168 dict_impl.__delitem__(self, key)
169 169
170 170 def __contains__(self, key):
171 171 return key in self.keys()
172 172
173 173 def __len__(self):
174 174 return len(self.keys())
175 175
176 176 def __str__(self):
177 177 pairs = ("%r: %r" % (k, v) for k, v in self.iteritems())
178 178 return "{%s}" % ", ".join(pairs)
179 179
180 180 def __repr__(self):
181 181 if self:
182 182 pairs = ("(%r, %r)" % (k, v) for k, v in self.iteritems())
183 183 return "odict([%s])" % ", ".join(pairs)
184 184 else:
185 185 return "odict()"
186 186
187 187 def get(self, k, x=None):
188 188 if k in self:
189 189 return self._dict_impl().__getitem__(self, k)[1]
190 190 else:
191 191 return x
192 192
193 193 def __iter__(self):
194 194 dict_impl = self._dict_impl()
195 195 curr_key = dict_impl.__getattribute__(self, 'lh')
196 196 while curr_key != _nil:
197 197 yield curr_key
198 198 curr_key = dict_impl.__getitem__(self, curr_key)[2]
199 199
200 200 iterkeys = __iter__
201 201
202 202 def keys(self):
203 203 return list(self.iterkeys())
204 204
205 205 def itervalues(self):
206 206 dict_impl = self._dict_impl()
207 207 curr_key = dict_impl.__getattribute__(self, 'lh')
208 208 while curr_key != _nil:
209 209 _, val, curr_key = dict_impl.__getitem__(self, curr_key)
210 210 yield val
211 211
212 212 def values(self):
213 213 return list(self.itervalues())
214 214
215 215 def iteritems(self):
216 216 dict_impl = self._dict_impl()
217 217 curr_key = dict_impl.__getattribute__(self, 'lh')
218 218 while curr_key != _nil:
219 219 _, val, next_key = dict_impl.__getitem__(self, curr_key)
220 220 yield curr_key, val
221 221 curr_key = next_key
222 222
223 223 def items(self):
224 224 return list(self.iteritems())
225 225
226 226 def sort(self, cmp=None, key=None, reverse=False):
227 227 items = [(k, v) for k, v in self.items()]
228 228 if cmp is not None:
229 229 items = sorted(items, cmp=cmp)
230 230 elif key is not None:
231 231 items = sorted(items, key=key)
232 232 else:
233 233 items = sorted(items, key=lambda x: x[1])
234 234 if reverse:
235 235 items.reverse()
236 236 self.clear()
237 237 self.__init__(items)
238 238
239 239 def clear(self):
240 240 dict_impl = self._dict_impl()
241 241 dict_impl.clear(self)
242 242 dict_impl.__setattr__(self, 'lh', _nil)
243 243 dict_impl.__setattr__(self, 'lt', _nil)
244 244
245 245 def copy(self):
246 246 return self.__class__(self)
247 247
248 248 def update(self, data=(), **kwds):
249 249 if kwds:
250 250 raise TypeError("update() of ordered dict takes no keyword "
251 251 "arguments to avoid an ordering trap.")
252 252 if hasattr(data, "iteritems"):
253 253 data = data.iteritems()
254 254 for key, val in data:
255 255 self[key] = val
256 256
257 257 def setdefault(self, k, x=None):
258 258 try:
259 259 return self[k]
260 260 except KeyError:
261 261 self[k] = x
262 262 return x
263 263
264 264 def pop(self, k, x=_nil):
265 265 try:
266 266 val = self[k]
267 267 del self[k]
268 268 return val
269 269 except KeyError:
270 270 if x == _nil:
271 271 raise
272 272 return x
273 273
274 274 def popitem(self):
275 275 try:
276 276 dict_impl = self._dict_impl()
277 277 key = dict_impl.__getattribute__(self, 'lt')
278 278 return key, self.pop(key)
279 279 except KeyError:
280 280 raise KeyError("'popitem(): ordered dictionary is empty'")
281 281
282 282 def riterkeys(self):
283 283 """To iterate on keys in reversed order.
284 284 """
285 285 dict_impl = self._dict_impl()
286 286 curr_key = dict_impl.__getattribute__(self, 'lt')
287 287 while curr_key != _nil:
288 288 yield curr_key
289 289 curr_key = dict_impl.__getitem__(self, curr_key)[0]
290 290
291 291 __reversed__ = riterkeys
292 292
293 293 def rkeys(self):
294 294 """List of the keys in reversed order.
295 295 """
296 296 return list(self.riterkeys())
297 297
298 298 def ritervalues(self):
299 299 """To iterate on values in reversed order.
300 300 """
301 301 dict_impl = self._dict_impl()
302 302 curr_key = dict_impl.__getattribute__(self, 'lt')
303 303 while curr_key != _nil:
304 304 curr_key, val, _ = dict_impl.__getitem__(self, curr_key)
305 305 yield val
306 306
307 307 def rvalues(self):
308 308 """List of the values in reversed order.
309 309 """
310 310 return list(self.ritervalues())
311 311
312 312 def riteritems(self):
313 313 """To iterate on (key, value) in reversed order.
314 314 """
315 315 dict_impl = self._dict_impl()
316 316 curr_key = dict_impl.__getattribute__(self, 'lt')
317 317 while curr_key != _nil:
318 318 pred_key, val, _ = dict_impl.__getitem__(self, curr_key)
319 319 yield curr_key, val
320 320 curr_key = pred_key
321 321
322 322 def ritems(self):
323 323 """List of the (key, value) in reversed order.
324 324 """
325 325 return list(self.riteritems())
326 326
327 327 def firstkey(self):
328 328 if self:
329 329 return self._dict_impl().__getattribute__(self, 'lh')
330 330 else:
331 331 raise KeyError("'firstkey(): ordered dictionary is empty'")
332 332
333 333 def lastkey(self):
334 334 if self:
335 335 return self._dict_impl().__getattribute__(self, 'lt')
336 336 else:
337 337 raise KeyError("'lastkey(): ordered dictionary is empty'")
338 338
339 339 def as_dict(self):
340 340 return self._dict_impl()(self.items())
341 341
342 342 def _repr(self):
343 343 """_repr(): low level repr of the whole data contained in the odict.
344 344 Useful for debugging.
345 345 """
346 346 dict_impl = self._dict_impl()
347 347 form = "odict low level repr lh,lt,data: %r, %r, %s"
348 348 return form % (dict_impl.__getattribute__(self, 'lh'),
349 349 dict_impl.__getattribute__(self, 'lt'),
350 350 dict_impl.__repr__(self))
351 351
352 352
353 353 class OrderedDict(_odict, dict):
354 354
355 355 def _dict_impl(self):
356 356 return dict
357 357
358 358
359 359 #==============================================================================
360 360 # OrderedSet
361 361 #==============================================================================
362 362 from sqlalchemy.util import OrderedSet
363 363
364 364
365 365 #==============================================================================
366 366 # kill FUNCTIONS
367 367 #==============================================================================
368 368 if __platform__ in PLATFORM_WIN:
369 369 import ctypes
370 370
371 371 def kill(pid, sig):
372 372 """kill function for Win32"""
373 373 kernel32 = ctypes.windll.kernel32
374 374 handle = kernel32.OpenProcess(1, 0, pid)
375 375 return (0 != kernel32.TerminateProcess(handle, 0))
376 376
377 377 else:
378 378 kill = os.kill
379 379
380 380
381 381 #==============================================================================
382 382 # itertools.product
383 383 #==============================================================================
384 384
385 385 try:
386 386 from itertools import product
387 387 except ImportError:
388 388 def product(*args, **kwds):
389 389 # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
390 390 # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
391 391 pools = map(tuple, args) * kwds.get('repeat', 1)
392 392 result = [[]]
393 393 for pool in pools:
394 394 result = [x + [y] for x in result for y in pool]
395 395 for prod in result:
396 396 yield tuple(prod)
397 397
398 398
399 399 #==============================================================================
400 400 # BytesIO
401 401 #==============================================================================
402 402
403 403 try:
404 404 from io import BytesIO
405 405 except ImportError:
406 406 from cStringIO import StringIO as BytesIO
407 407
408 408
409 409 #==============================================================================
410 # bytes
411 #==============================================================================
412 if __py_version__ >= (2, 6):
413 _bytes = bytes
414 else:
415 # in py2.6 bytes is a synonim for str
416 _bytes = str
417
418 #==============================================================================
410 419 # deque
411 420 #==============================================================================
412 421
413 422 if __py_version__ >= (2, 6):
414 423 from collections import deque
415 424 else:
416 425 #need to implement our own deque with maxlen
417 426 class deque(object):
418 427
419 def __init__(self, iterable=(), maxlen=-1):
428 def __init__(self, iterable=(), maxlen= -1):
420 429 if not hasattr(self, 'data'):
421 430 self.left = self.right = 0
422 431 self.data = {}
423 self.maxlen = maxlen
432 self.maxlen = maxlen or -1
424 433 self.extend(iterable)
425 434
426 435 def append(self, x):
427 436 self.data[self.right] = x
428 437 self.right += 1
429 438 if self.maxlen != -1 and len(self) > self.maxlen:
430 439 self.popleft()
431 440
432 441 def appendleft(self, x):
433 442 self.left -= 1
434 443 self.data[self.left] = x
435 444 if self.maxlen != -1 and len(self) > self.maxlen:
436 445 self.pop()
437 446
438 447 def pop(self):
439 448 if self.left == self.right:
440 449 raise IndexError('cannot pop from empty deque')
441 450 self.right -= 1
442 451 elem = self.data[self.right]
443 452 del self.data[self.right]
444 453 return elem
445 454
446 455 def popleft(self):
447 456 if self.left == self.right:
448 457 raise IndexError('cannot pop from empty deque')
449 458 elem = self.data[self.left]
450 459 del self.data[self.left]
451 460 self.left += 1
452 461 return elem
453 462
454 463 def clear(self):
455 464 self.data.clear()
456 465 self.left = self.right = 0
457 466
458 467 def extend(self, iterable):
459 468 for elem in iterable:
460 469 self.append(elem)
461 470
462 471 def extendleft(self, iterable):
463 472 for elem in iterable:
464 473 self.appendleft(elem)
465 474
466 475 def rotate(self, n=1):
467 476 if self:
468 477 n %= len(self)
469 478 for i in xrange(n):
470 479 self.appendleft(self.pop())
471 480
472 481 def __getitem__(self, i):
473 482 if i < 0:
474 483 i += len(self)
475 484 try:
476 485 return self.data[i + self.left]
477 486 except KeyError:
478 487 raise IndexError
479 488
480 489 def __setitem__(self, i, value):
481 490 if i < 0:
482 491 i += len(self)
483 492 try:
484 493 self.data[i + self.left] = value
485 494 except KeyError:
486 495 raise IndexError
487 496
488 497 def __delitem__(self, i):
489 498 size = len(self)
490 499 if not (-size <= i < size):
491 500 raise IndexError
492 501 data = self.data
493 502 if i < 0:
494 503 i += size
495 504 for j in xrange(self.left + i, self.right - 1):
496 505 data[j] = data[j + 1]
497 506 self.pop()
498 507
499 508 def __len__(self):
500 509 return self.right - self.left
501 510
502 511 def __cmp__(self, other):
503 512 if type(self) != type(other):
504 513 return cmp(type(self), type(other))
505 514 return cmp(list(self), list(other))
506 515
507 516 def __repr__(self, _track=[]):
508 517 if id(self) in _track:
509 518 return '...'
510 519 _track.append(id(self))
511 520 r = 'deque(%r, maxlen=%s)' % (list(self), self.maxlen)
512 521 _track.remove(id(self))
513 522 return r
514 523
515 524 def __getstate__(self):
516 525 return (tuple(self),)
517 526
518 527 def __setstate__(self, s):
519 528 self.__init__(s[0])
520 529
521 530 def __hash__(self):
522 531 raise TypeError
523 532
524 533 def __copy__(self):
525 534 return self.__class__(self)
526 535
527 536 def __deepcopy__(self, memo={}):
528 537 from copy import deepcopy
529 538 result = self.__class__()
530 539 memo[id(self)] = result
531 540 result.__init__(deepcopy(tuple(self), memo))
532 541 return result
533 542
534 543
535 544 #==============================================================================
536 545 # threading.Event
537 546 #==============================================================================
538 547
539 548 if __py_version__ >= (2, 6):
540 from threading import Event
549 from threading import Event, Thread
541 550 else:
542 from threading import _Verbose, Condition, Lock
551 from threading import _Verbose, Condition, Lock, Thread
543 552
544 553 def Event(*args, **kwargs):
545 554 return _Event(*args, **kwargs)
546 555
547 556 class _Event(_Verbose):
548 557
549 558 # After Tim Peters' event class (without is_posted())
550 559
551 560 def __init__(self, verbose=None):
552 561 _Verbose.__init__(self, verbose)
553 562 self.__cond = Condition(Lock())
554 563 self.__flag = False
555 564
556 565 def isSet(self):
557 566 return self.__flag
558 567
559 568 is_set = isSet
560 569
561 570 def set(self):
562 571 self.__cond.acquire()
563 572 try:
564 573 self.__flag = True
565 574 self.__cond.notify_all()
566 575 finally:
567 576 self.__cond.release()
568 577
569 578 def clear(self):
570 579 self.__cond.acquire()
571 580 try:
572 581 self.__flag = False
573 582 finally:
574 583 self.__cond.release()
575 584
576 585 def wait(self, timeout=None):
577 586 self.__cond.acquire()
578 587 try:
579 588 if not self.__flag:
580 589 self.__cond.wait(timeout)
581 590 finally:
582 591 self.__cond.release()
583 592
584 593
585 594
@@ -1,409 +1,409 b''
1 1 '''
2 2 Module provides a class allowing to wrap communication over subprocess.Popen
3 3 input, output, error streams into a meaningfull, non-blocking, concurrent
4 4 stream processor exposing the output data as an iterator fitting to be a
5 5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
6 6
7 7 Copyright (c) 2011 Daniel Dotsenko <dotsa@hotmail.com>
8 8
9 9 This file is part of git_http_backend.py Project.
10 10
11 11 git_http_backend.py Project is free software: you can redistribute it and/or
12 12 modify it under the terms of the GNU Lesser General Public License as
13 13 published by the Free Software Foundation, either version 2.1 of the License,
14 14 or (at your option) any later version.
15 15
16 16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 19 GNU Lesser General Public License for more details.
20 20
21 21 You should have received a copy of the GNU Lesser General Public License
22 22 along with git_http_backend.py Project.
23 23 If not, see <http://www.gnu.org/licenses/>.
24 24 '''
25 25 import os
26 26 import subprocess
27 import threading
28 from rhodecode.lib.compat import deque, Event
27 from rhodecode.lib.compat import deque, Event, Thread, _bytes
29 28
30 29
31 class StreamFeeder(threading.Thread):
30 class StreamFeeder(Thread):
32 31 """
33 32 Normal writing into pipe-like is blocking once the buffer is filled.
34 33 This thread allows a thread to seep data from a file-like into a pipe
35 34 without blocking the main thread.
36 35 We close inpipe once the end of the source stream is reached.
37 36 """
38 37 def __init__(self, source):
39 38 super(StreamFeeder, self).__init__()
40 39 self.daemon = True
41 40 filelike = False
42 self.bytes = bytes()
43 if type(source) in (type(''), bytes, bytearray): # string-like
44 self.bytes = bytes(source)
41 self.bytes = _bytes()
42 if type(source) in (type(''), _bytes, bytearray): # string-like
43 self.bytes = _bytes(source)
45 44 else: # can be either file pointer or file-like
46 45 if type(source) in (int, long): # file pointer it is
47 46 ## converting file descriptor (int) stdin into file-like
48 47 try:
49 48 source = os.fdopen(source, 'rb', 16384)
50 49 except Exception:
51 50 pass
52 51 # let's see if source is file-like by now
53 52 try:
54 53 filelike = source.read
55 54 except Exception:
56 55 pass
57 56 if not filelike and not self.bytes:
58 57 raise TypeError("StreamFeeder's source object must be a readable "
59 58 "file-like, a file descriptor, or a string-like.")
60 59 self.source = source
61 60 self.readiface, self.writeiface = os.pipe()
62 61
63 62 def run(self):
64 63 t = self.writeiface
65 64 if self.bytes:
66 65 os.write(t, self.bytes)
67 66 else:
68 67 s = self.source
69 68 b = s.read(4096)
70 69 while b:
71 70 os.write(t, b)
72 71 b = s.read(4096)
73 72 os.close(t)
74 73
75 74 @property
76 75 def output(self):
77 76 return self.readiface
78 77
79 78
80 class InputStreamChunker(threading.Thread):
79 class InputStreamChunker(Thread):
81 80 def __init__(self, source, target, buffer_size, chunk_size):
82 81
83 82 super(InputStreamChunker, self).__init__()
84 83
85 84 self.daemon = True # die die die.
86 85
87 86 self.source = source
88 87 self.target = target
89 88 self.chunk_count_max = int(buffer_size / chunk_size) + 1
90 89 self.chunk_size = chunk_size
91 90
92 91 self.data_added = Event()
93 92 self.data_added.clear()
94 93
95 94 self.keep_reading = Event()
96 95 self.keep_reading.set()
97 96
98 97 self.EOF = Event()
99 98 self.EOF.clear()
100 99
101 100 self.go = Event()
102 101 self.go.set()
103 102
104 103 def stop(self):
105 104 self.go.clear()
106 105 self.EOF.set()
107 106 try:
108 107 # this is not proper, but is done to force the reader thread let
109 108 # go of the input because, if successful, .close() will send EOF
110 109 # down the pipe.
111 110 self.source.close()
112 111 except:
113 112 pass
114 113
115 114 def run(self):
116 115 s = self.source
117 116 t = self.target
118 117 cs = self.chunk_size
119 118 ccm = self.chunk_count_max
120 119 kr = self.keep_reading
121 120 da = self.data_added
122 121 go = self.go
123 122 b = s.read(cs)
123
124 124 while b and go.is_set():
125 125 if len(t) > ccm:
126 126 kr.clear()
127 127 kr.wait(2)
128 128 # # this only works on 2.7.x and up
129 129 # if not kr.wait(10):
130 130 # raise Exception("Timed out while waiting for input to be read.")
131 131 # instead we'll use this
132 132 if len(t) > ccm + 3:
133 133 raise IOError("Timed out while waiting for input from subprocess.")
134 134 t.append(b)
135 135 da.set()
136 136 b = s.read(cs)
137 137 self.EOF.set()
138 138 da.set() # for cases when done but there was no input.
139 139
140 140
141 141 class BufferedGenerator():
142 142 '''
143 143 Class behaves as a non-blocking, buffered pipe reader.
144 144 Reads chunks of data (through a thread)
145 145 from a blocking pipe, and attaches these to an array (Deque) of chunks.
146 146 Reading is halted in the thread when max chunks is internally buffered.
147 147 The .next() may operate in blocking or non-blocking fashion by yielding
148 148 '' if no data is ready
149 149 to be sent or by not returning until there is some data to send
150 150 When we get EOF from underlying source pipe we raise the marker to raise
151 151 StopIteration after the last chunk of data is yielded.
152 152 '''
153 153
154 154 def __init__(self, source, buffer_size=65536, chunk_size=4096,
155 155 starting_values=[], bottomless=False):
156 156
157 157 if bottomless:
158 158 maxlen = int(buffer_size / chunk_size)
159 159 else:
160 160 maxlen = None
161 161
162 162 self.data = deque(starting_values, maxlen)
163 163
164 164 self.worker = InputStreamChunker(source, self.data, buffer_size,
165 165 chunk_size)
166 166 if starting_values:
167 167 self.worker.data_added.set()
168 168 self.worker.start()
169 169
170 170 ####################
171 171 # Generator's methods
172 172 ####################
173 173
174 174 def __iter__(self):
175 175 return self
176 176
177 177 def next(self):
178 178 while not len(self.data) and not self.worker.EOF.is_set():
179 179 self.worker.data_added.clear()
180 180 self.worker.data_added.wait(0.2)
181 181 if len(self.data):
182 182 self.worker.keep_reading.set()
183 return bytes(self.data.popleft())
183 return _bytes(self.data.popleft())
184 184 elif self.worker.EOF.is_set():
185 185 raise StopIteration
186 186
187 187 def throw(self, type, value=None, traceback=None):
188 188 if not self.worker.EOF.is_set():
189 189 raise type(value)
190 190
191 191 def start(self):
192 192 self.worker.start()
193 193
194 194 def stop(self):
195 195 self.worker.stop()
196 196
197 197 def close(self):
198 198 try:
199 199 self.worker.stop()
200 200 self.throw(GeneratorExit)
201 201 except (GeneratorExit, StopIteration):
202 202 pass
203 203
204 204 def __del__(self):
205 205 self.close()
206 206
207 207 ####################
208 208 # Threaded reader's infrastructure.
209 209 ####################
210 210 @property
211 211 def input(self):
212 212 return self.worker.w
213 213
214 214 @property
215 215 def data_added_event(self):
216 216 return self.worker.data_added
217 217
218 218 @property
219 219 def data_added(self):
220 220 return self.worker.data_added.is_set()
221 221
222 222 @property
223 223 def reading_paused(self):
224 224 return not self.worker.keep_reading.is_set()
225 225
226 226 @property
227 227 def done_reading_event(self):
228 228 '''
229 229 Done_reding does not mean that the iterator's buffer is empty.
230 230 Iterator might have done reading from underlying source, but the read
231 231 chunks might still be available for serving through .next() method.
232 232
233 233 @return An Event class instance.
234 234 '''
235 235 return self.worker.EOF
236 236
237 237 @property
238 238 def done_reading(self):
239 239 '''
240 240 Done_reding does not mean that the iterator's buffer is empty.
241 241 Iterator might have done reading from underlying source, but the read
242 242 chunks might still be available for serving through .next() method.
243 243
244 244 @return An Bool value.
245 245 '''
246 246 return self.worker.EOF.is_set()
247 247
248 248 @property
249 249 def length(self):
250 250 '''
251 251 returns int.
252 252
253 253 This is the lenght of the que of chunks, not the length of
254 254 the combined contents in those chunks.
255 255
256 256 __len__() cannot be meaningfully implemented because this
257 257 reader is just flying throuh a bottomless pit content and
258 258 can only know the lenght of what it already saw.
259 259
260 260 If __len__() on WSGI server per PEP 3333 returns a value,
261 261 the responce's length will be set to that. In order not to
262 262 confuse WSGI PEP3333 servers, we will not implement __len__
263 263 at all.
264 264 '''
265 265 return len(self.data)
266 266
267 267 def prepend(self, x):
268 268 self.data.appendleft(x)
269 269
270 270 def append(self, x):
271 271 self.data.append(x)
272 272
273 273 def extend(self, o):
274 274 self.data.extend(o)
275 275
276 276 def __getitem__(self, i):
277 277 return self.data[i]
278 278
279 279
280 280 class SubprocessIOChunker(object):
281 281 '''
282 282 Processor class wrapping handling of subprocess IO.
283 283
284 284 In a way, this is a "communicate()" replacement with a twist.
285 285
286 286 - We are multithreaded. Writing in and reading out, err are all sep threads.
287 287 - We support concurrent (in and out) stream processing.
288 288 - The output is not a stream. It's a queue of read string (bytes, not unicode)
289 289 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
290 290 - We are non-blocking in more respects than communicate()
291 291 (reading from subprocess out pauses when internal buffer is full, but
292 292 does not block the parent calling code. On the flip side, reading from
293 293 slow-yielding subprocess may block the iteration until data shows up. This
294 294 does not block the parallel inpipe reading occurring parallel thread.)
295 295
296 296 The purpose of the object is to allow us to wrap subprocess interactions into
297 297 and interable that can be passed to a WSGI server as the application's return
298 298 value. Because of stream-processing-ability, WSGI does not have to read ALL
299 299 of the subprocess's output and buffer it, before handing it to WSGI server for
300 300 HTTP response. Instead, the class initializer reads just a bit of the stream
301 301 to figure out if error ocurred or likely to occur and if not, just hands the
302 302 further iteration over subprocess output to the server for completion of HTTP
303 303 response.
304 304
305 305 The real or perceived subprocess error is trapped and raised as one of
306 306 EnvironmentError family of exceptions
307 307
308 308 Example usage:
309 309 # try:
310 310 # answer = SubprocessIOChunker(
311 311 # cmd,
312 312 # input,
313 313 # buffer_size = 65536,
314 314 # chunk_size = 4096
315 315 # )
316 316 # except (EnvironmentError) as e:
317 317 # print str(e)
318 318 # raise e
319 319 #
320 320 # return answer
321 321
322 322
323 323 '''
324 324 def __init__(self, cmd, inputstream=None, buffer_size=65536,
325 325 chunk_size=4096, starting_values=[], **kwargs):
326 326 '''
327 327 Initializes SubprocessIOChunker
328 328
329 329 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
330 330 :param inputstream: (Default: None) A file-like, string, or file pointer.
331 331 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
332 332 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
333 333 :param starting_values: (Default: []) An array of strings to put in front of output que.
334 334 '''
335 335
336 336 if inputstream:
337 337 input_streamer = StreamFeeder(inputstream)
338 338 input_streamer.start()
339 339 inputstream = input_streamer.output
340 340
341 341 if isinstance(cmd, (list, tuple)):
342 342 cmd = ' '.join(cmd)
343 343
344 344 _shell = kwargs.get('shell') or True
345 345 kwargs['shell'] = _shell
346 346 _p = subprocess.Popen(cmd,
347 347 bufsize=-1,
348 348 stdin=inputstream,
349 349 stdout=subprocess.PIPE,
350 350 stderr=subprocess.PIPE,
351 351 **kwargs
352 352 )
353 353
354 354 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size, starting_values)
355 355 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
356 356
357 357 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
358 358 # doing this until we reach either end of file, or end of buffer.
359 359 bg_out.data_added_event.wait(1)
360 360 bg_out.data_added_event.clear()
361 361
362 362 # at this point it's still ambiguous if we are done reading or just full buffer.
363 363 # Either way, if error (returned by ended process, or implied based on
364 364 # presence of stuff in stderr output) we error out.
365 365 # Else, we are happy.
366 366 _returncode = _p.poll()
367 367 if _returncode or (_returncode == None and bg_err.length):
368 368 try:
369 369 _p.terminate()
370 370 except:
371 371 pass
372 372 bg_out.stop()
373 373 bg_err.stop()
374 374 err = '%s' % ''.join(bg_err)
375 375 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
376 376
377 377 self.process = _p
378 378 self.output = bg_out
379 379 self.error = bg_err
380 380
381 381 def __iter__(self):
382 382 return self
383 383
384 384 def next(self):
385 385 if self.process.poll():
386 386 err = '%s' % ''.join(self.error)
387 387 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
388 388 return self.output.next()
389 389
390 390 def throw(self, type, value=None, traceback=None):
391 391 if self.output.length or not self.output.done_reading:
392 392 raise type(value)
393 393
394 394 def close(self):
395 395 try:
396 396 self.process.terminate()
397 397 except:
398 398 pass
399 399 try:
400 400 self.output.close()
401 401 except:
402 402 pass
403 403 try:
404 404 self.error.close()
405 405 except:
406 406 pass
407 407
408 408 def __del__(self):
409 409 self.close()
@@ -1,177 +1,177 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.tests.test_libs
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6
7 7 Package for testing various lib/helper functions in rhodecode
8 8
9 9 :created_on: Jun 9, 2011
10 10 :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com>
11 11 :license: GPLv3, see COPYING for more details.
12 12 """
13 13 # This program is free software: you can redistribute it and/or modify
14 14 # it under the terms of the GNU General Public License as published by
15 15 # the Free Software Foundation, either version 3 of the License, or
16 16 # (at your option) any later version.
17 17 #
18 18 # This program is distributed in the hope that it will be useful,
19 19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 21 # GNU General Public License for more details.
22 22 #
23 23 # You should have received a copy of the GNU General Public License
24 24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25 from __future__ import with_statement
26 26 import unittest
27 27 import datetime
28 28 import hashlib
29 29 import mock
30 30 from rhodecode.tests import *
31 31
32 32 proto = 'http'
33 33 TEST_URLS = [
34 34 ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
35 35 '%s://127.0.0.1' % proto),
36 36 ('%s://marcink@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
37 37 '%s://127.0.0.1' % proto),
38 38 ('%s://marcink:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
39 39 '%s://127.0.0.1' % proto),
40 40 ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'],
41 41 '%s://127.0.0.1:8080' % proto),
42 42 ('%s://domain.org' % proto, ['%s://' % proto, 'domain.org'],
43 43 '%s://domain.org' % proto),
44 44 ('%s://user:pass@domain.org:8080' % proto, ['%s://' % proto, 'domain.org',
45 45 '8080'],
46 46 '%s://domain.org:8080' % proto),
47 47 ]
48 48
49 49 proto = 'https'
50 50 TEST_URLS += [
51 51 ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
52 52 '%s://127.0.0.1' % proto),
53 53 ('%s://marcink@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
54 54 '%s://127.0.0.1' % proto),
55 55 ('%s://marcink:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
56 56 '%s://127.0.0.1' % proto),
57 57 ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'],
58 58 '%s://127.0.0.1:8080' % proto),
59 59 ('%s://domain.org' % proto, ['%s://' % proto, 'domain.org'],
60 60 '%s://domain.org' % proto),
61 61 ('%s://user:pass@domain.org:8080' % proto, ['%s://' % proto, 'domain.org',
62 62 '8080'],
63 63 '%s://domain.org:8080' % proto),
64 64 ]
65 65
66 66
67 67 class TestLibs(unittest.TestCase):
68 68
69 69 def test_uri_filter(self):
70 70 from rhodecode.lib.utils2 import uri_filter
71 71
72 72 for url in TEST_URLS:
73 73 self.assertEqual(uri_filter(url[0]), url[1])
74 74
75 75 def test_credentials_filter(self):
76 76 from rhodecode.lib.utils2 import credentials_filter
77 77
78 78 for url in TEST_URLS:
79 79 self.assertEqual(credentials_filter(url[0]), url[2])
80 80
81 81 def test_str2bool(self):
82 82 from rhodecode.lib.utils2 import str2bool
83 83 test_cases = [
84 84 ('t', True),
85 85 ('true', True),
86 86 ('y', True),
87 87 ('yes', True),
88 88 ('on', True),
89 89 ('1', True),
90 90 ('Y', True),
91 91 ('yeS', True),
92 92 ('Y', True),
93 93 ('TRUE', True),
94 94 ('T', True),
95 95 ('False', False),
96 96 ('F', False),
97 97 ('FALSE', False),
98 98 ('0', False),
99 99 ('-1', False),
100 100 ('', False), ]
101 101
102 102 for case in test_cases:
103 103 self.assertEqual(str2bool(case[0]), case[1])
104 104
105 105 def test_mention_extractor(self):
106 106 from rhodecode.lib.utils2 import extract_mentioned_users
107 107 sample = (
108 108 "@first hi there @marcink here's my email marcin@email.com "
109 109 "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three "
110 110 "@MARCIN @maRCiN @2one_more22 @john please see this http://org.pl "
111 111 "@marian.user just do it @marco-polo and next extract @marco_polo "
112 112 "user.dot hej ! not-needed maril@domain.org"
113 113 )
114 114
115 115 s = sorted([
116 116 'first', 'marcink', 'lukaszb', 'one_more22', 'MARCIN', 'maRCiN', 'john',
117 117 'marian.user', 'marco-polo', 'marco_polo'
118 118 ], key=lambda k: k.lower())
119 119 self.assertEqual(s, extract_mentioned_users(sample))
120 120
121 121 def test_age(self):
122 122 import calendar
123 123 from rhodecode.lib.utils2 import age
124 124 n = datetime.datetime.now()
125 125 delt = lambda *args, **kwargs: datetime.timedelta(*args, **kwargs)
126 126 self.assertEqual(age(n), u'just now')
127 127 self.assertEqual(age(n - delt(seconds=1)), u'1 second ago')
128 128 self.assertEqual(age(n - delt(seconds=60 * 2)), u'2 minutes ago')
129 129 self.assertEqual(age(n - delt(hours=1)), u'1 hour ago')
130 130 self.assertEqual(age(n - delt(hours=24)), u'1 day ago')
131 131 self.assertEqual(age(n - delt(hours=24 * 5)), u'5 days ago')
132 132 self.assertEqual(age(n - delt(hours=24 * (calendar.mdays[n.month-1] + 2))),
133 133 u'1 month and 2 days ago')
134 134 self.assertEqual(age(n - delt(hours=24 * 400)), u'1 year and 1 month ago')
135 135
136 136 def test_tag_exctrator(self):
137 137 sample = (
138 138 "hello pta[tag] gog [[]] [[] sda ero[or]d [me =>>< sa]"
139 139 "[requires] [stale] [see<>=>] [see => http://url.com]"
140 140 "[requires => url] [lang => python] [just a tag]"
141 141 "[,d] [ => ULR ] [obsolete] [desc]]"
142 142 )
143 143 from rhodecode.lib.helpers import desc_stylize
144 144 res = desc_stylize(sample)
145 145 self.assertTrue('<div class="metatag" tag="tag">tag</div>' in res)
146 146 self.assertTrue('<div class="metatag" tag="obsolete">obsolete</div>' in res)
147 147 self.assertTrue('<div class="metatag" tag="stale">stale</div>' in res)
148 148 self.assertTrue('<div class="metatag" tag="lang">python</div>' in res)
149 149 self.assertTrue('<div class="metatag" tag="requires">requires =&gt; <a href="/url">url</a></div>' in res)
150 150 self.assertTrue('<div class="metatag" tag="tag">tag</div>' in res)
151 151
152 152 def test_alternative_gravatar(self):
153 153 from rhodecode.lib.helpers import gravatar_url
154 154 _md5 = lambda s: hashlib.md5(s).hexdigest()
155 155
156 156 def fake_conf(**kwargs):
157 157 from pylons import config
158 158 config['app_conf'] = {}
159 159 config['app_conf']['use_gravatar'] = True
160 160 config['app_conf'].update(kwargs)
161 161 return config
162 162 fake = fake_conf(alternative_gravatar_url='http://test.com/{email}')
163 163 with mock.patch('pylons.config', fake):
164 164 grav = gravatar_url(email_address='test@foo.com', size=24)
165 165 assert grav == 'http://test.com/test@foo.com'
166 166
167 167 fake = fake_conf(alternative_gravatar_url='http://test.com/{md5email}')
168 168 with mock.patch('pylons.config', fake):
169 169 em = 'test@foo.com'
170 170 grav = gravatar_url(email_address=em, size=24)
171 171 assert grav == 'http://test.com/%s' % (_md5(em))
172 172
173 173 fake = fake_conf(alternative_gravatar_url='http://test.com/{md5email}/{size}')
174 174 with mock.patch('pylons.config', fake):
175 175 em = 'test@foo.com'
176 176 grav = gravatar_url(email_address=em, size=24)
177 177 assert grav == 'http://test.com/%s/%s' % (_md5(em), 24)
General Comments 0
You need to be logged in to leave comments. Login now