##// END OF EJS Templates
small docstring fixes
marcink -
r3895:e39fb661 beta
parent child Browse files
Show More
@@ -1,415 +1,415 b''
1 """
1 """
2 Module provides a class allowing to wrap communication over subprocess.Popen
2 Module provides a class allowing to wrap communication over subprocess.Popen
3 input, output, error streams into a meaningfull, non-blocking, concurrent
3 input, output, error streams into a meaningfull, non-blocking, concurrent
4 stream processor exposing the output data as an iterator fitting to be a
4 stream processor exposing the output data as an iterator fitting to be a
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
6
6
7 Copyright (c) 2011 Daniel Dotsenko <dotsa@hotmail.com>
7 Copyright (c) 2011 Daniel Dotsenko <dotsa@hotmail.com>
8
8
9 This file is part of git_http_backend.py Project.
9 This file is part of git_http_backend.py Project.
10
10
11 git_http_backend.py Project is free software: you can redistribute it and/or
11 git_http_backend.py Project is free software: you can redistribute it and/or
12 modify it under the terms of the GNU Lesser General Public License as
12 modify it under the terms of the GNU Lesser General Public License as
13 published by the Free Software Foundation, either version 2.1 of the License,
13 published by the Free Software Foundation, either version 2.1 of the License,
14 or (at your option) any later version.
14 or (at your option) any later version.
15
15
16 git_http_backend.py Project is distributed in the hope that it will be useful,
16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU Lesser General Public License for more details.
19 GNU Lesser General Public License for more details.
20
20
21 You should have received a copy of the GNU Lesser General Public License
21 You should have received a copy of the GNU Lesser General Public License
22 along with git_http_backend.py Project.
22 along with git_http_backend.py Project.
23 If not, see <http://www.gnu.org/licenses/>.
23 If not, see <http://www.gnu.org/licenses/>.
24 """
24 """
25 import os
25 import os
26 import subprocess
26 import subprocess
27 from rhodecode.lib.vcs.utils.compat import deque, Event, Thread, _bytes, _bytearray
27 from rhodecode.lib.vcs.utils.compat import deque, Event, Thread, _bytes, _bytearray
28
28
29
29
30 class StreamFeeder(Thread):
30 class StreamFeeder(Thread):
31 """
31 """
32 Normal writing into pipe-like is blocking once the buffer is filled.
32 Normal writing into pipe-like is blocking once the buffer is filled.
33 This thread allows a thread to seep data from a file-like into a pipe
33 This thread allows a thread to seep data from a file-like into a pipe
34 without blocking the main thread.
34 without blocking the main thread.
35 We close inpipe once the end of the source stream is reached.
35 We close inpipe once the end of the source stream is reached.
36 """
36 """
37 def __init__(self, source):
37 def __init__(self, source):
38 super(StreamFeeder, self).__init__()
38 super(StreamFeeder, self).__init__()
39 self.daemon = True
39 self.daemon = True
40 filelike = False
40 filelike = False
41 self.bytes = _bytes()
41 self.bytes = _bytes()
42 if type(source) in (type(''), _bytes, _bytearray): # string-like
42 if type(source) in (type(''), _bytes, _bytearray): # string-like
43 self.bytes = _bytes(source)
43 self.bytes = _bytes(source)
44 else: # can be either file pointer or file-like
44 else: # can be either file pointer or file-like
45 if type(source) in (int, long): # file pointer it is
45 if type(source) in (int, long): # file pointer it is
46 ## converting file descriptor (int) stdin into file-like
46 ## converting file descriptor (int) stdin into file-like
47 try:
47 try:
48 source = os.fdopen(source, 'rb', 16384)
48 source = os.fdopen(source, 'rb', 16384)
49 except Exception:
49 except Exception:
50 pass
50 pass
51 # let's see if source is file-like by now
51 # let's see if source is file-like by now
52 try:
52 try:
53 filelike = source.read
53 filelike = source.read
54 except Exception:
54 except Exception:
55 pass
55 pass
56 if not filelike and not self.bytes:
56 if not filelike and not self.bytes:
57 raise TypeError("StreamFeeder's source object must be a readable "
57 raise TypeError("StreamFeeder's source object must be a readable "
58 "file-like, a file descriptor, or a string-like.")
58 "file-like, a file descriptor, or a string-like.")
59 self.source = source
59 self.source = source
60 self.readiface, self.writeiface = os.pipe()
60 self.readiface, self.writeiface = os.pipe()
61
61
62 def run(self):
62 def run(self):
63 t = self.writeiface
63 t = self.writeiface
64 if self.bytes:
64 if self.bytes:
65 os.write(t, self.bytes)
65 os.write(t, self.bytes)
66 else:
66 else:
67 s = self.source
67 s = self.source
68 b = s.read(4096)
68 b = s.read(4096)
69 while b:
69 while b:
70 os.write(t, b)
70 os.write(t, b)
71 b = s.read(4096)
71 b = s.read(4096)
72 os.close(t)
72 os.close(t)
73
73
74 @property
74 @property
75 def output(self):
75 def output(self):
76 return self.readiface
76 return self.readiface
77
77
78
78
79 class InputStreamChunker(Thread):
79 class InputStreamChunker(Thread):
80 def __init__(self, source, target, buffer_size, chunk_size):
80 def __init__(self, source, target, buffer_size, chunk_size):
81
81
82 super(InputStreamChunker, self).__init__()
82 super(InputStreamChunker, self).__init__()
83
83
84 self.daemon = True # die die die.
84 self.daemon = True # die die die.
85
85
86 self.source = source
86 self.source = source
87 self.target = target
87 self.target = target
88 self.chunk_count_max = int(buffer_size / chunk_size) + 1
88 self.chunk_count_max = int(buffer_size / chunk_size) + 1
89 self.chunk_size = chunk_size
89 self.chunk_size = chunk_size
90
90
91 self.data_added = Event()
91 self.data_added = Event()
92 self.data_added.clear()
92 self.data_added.clear()
93
93
94 self.keep_reading = Event()
94 self.keep_reading = Event()
95 self.keep_reading.set()
95 self.keep_reading.set()
96
96
97 self.EOF = Event()
97 self.EOF = Event()
98 self.EOF.clear()
98 self.EOF.clear()
99
99
100 self.go = Event()
100 self.go = Event()
101 self.go.set()
101 self.go.set()
102
102
103 def stop(self):
103 def stop(self):
104 self.go.clear()
104 self.go.clear()
105 self.EOF.set()
105 self.EOF.set()
106 try:
106 try:
107 # this is not proper, but is done to force the reader thread let
107 # this is not proper, but is done to force the reader thread let
108 # go of the input because, if successful, .close() will send EOF
108 # go of the input because, if successful, .close() will send EOF
109 # down the pipe.
109 # down the pipe.
110 self.source.close()
110 self.source.close()
111 except:
111 except:
112 pass
112 pass
113
113
114 def run(self):
114 def run(self):
115 s = self.source
115 s = self.source
116 t = self.target
116 t = self.target
117 cs = self.chunk_size
117 cs = self.chunk_size
118 ccm = self.chunk_count_max
118 ccm = self.chunk_count_max
119 kr = self.keep_reading
119 kr = self.keep_reading
120 da = self.data_added
120 da = self.data_added
121 go = self.go
121 go = self.go
122
122
123 try:
123 try:
124 b = s.read(cs)
124 b = s.read(cs)
125 except ValueError:
125 except ValueError:
126 b = ''
126 b = ''
127
127
128 while b and go.is_set():
128 while b and go.is_set():
129 if len(t) > ccm:
129 if len(t) > ccm:
130 kr.clear()
130 kr.clear()
131 kr.wait(2)
131 kr.wait(2)
132 # # this only works on 2.7.x and up
132 # # this only works on 2.7.x and up
133 # if not kr.wait(10):
133 # if not kr.wait(10):
134 # raise Exception("Timed out while waiting for input to be read.")
134 # raise Exception("Timed out while waiting for input to be read.")
135 # instead we'll use this
135 # instead we'll use this
136 if len(t) > ccm + 3:
136 if len(t) > ccm + 3:
137 raise IOError("Timed out while waiting for input from subprocess.")
137 raise IOError("Timed out while waiting for input from subprocess.")
138 t.append(b)
138 t.append(b)
139 da.set()
139 da.set()
140 b = s.read(cs)
140 b = s.read(cs)
141 self.EOF.set()
141 self.EOF.set()
142 da.set() # for cases when done but there was no input.
142 da.set() # for cases when done but there was no input.
143
143
144
144
145 class BufferedGenerator():
145 class BufferedGenerator():
146 """
146 """
147 Class behaves as a non-blocking, buffered pipe reader.
147 Class behaves as a non-blocking, buffered pipe reader.
148 Reads chunks of data (through a thread)
148 Reads chunks of data (through a thread)
149 from a blocking pipe, and attaches these to an array (Deque) of chunks.
149 from a blocking pipe, and attaches these to an array (Deque) of chunks.
150 Reading is halted in the thread when max chunks is internally buffered.
150 Reading is halted in the thread when max chunks is internally buffered.
151 The .next() may operate in blocking or non-blocking fashion by yielding
151 The .next() may operate in blocking or non-blocking fashion by yielding
152 '' if no data is ready
152 '' if no data is ready
153 to be sent or by not returning until there is some data to send
153 to be sent or by not returning until there is some data to send
154 When we get EOF from underlying source pipe we raise the marker to raise
154 When we get EOF from underlying source pipe we raise the marker to raise
155 StopIteration after the last chunk of data is yielded.
155 StopIteration after the last chunk of data is yielded.
156 """
156 """
157
157
158 def __init__(self, source, buffer_size=65536, chunk_size=4096,
158 def __init__(self, source, buffer_size=65536, chunk_size=4096,
159 starting_values=[], bottomless=False):
159 starting_values=[], bottomless=False):
160
160
161 if bottomless:
161 if bottomless:
162 maxlen = int(buffer_size / chunk_size)
162 maxlen = int(buffer_size / chunk_size)
163 else:
163 else:
164 maxlen = None
164 maxlen = None
165
165
166 self.data = deque(starting_values, maxlen)
166 self.data = deque(starting_values, maxlen)
167
167
168 self.worker = InputStreamChunker(source, self.data, buffer_size,
168 self.worker = InputStreamChunker(source, self.data, buffer_size,
169 chunk_size)
169 chunk_size)
170 if starting_values:
170 if starting_values:
171 self.worker.data_added.set()
171 self.worker.data_added.set()
172 self.worker.start()
172 self.worker.start()
173
173
174 ####################
174 ####################
175 # Generator's methods
175 # Generator's methods
176 ####################
176 ####################
177
177
178 def __iter__(self):
178 def __iter__(self):
179 return self
179 return self
180
180
181 def next(self):
181 def next(self):
182 while not len(self.data) and not self.worker.EOF.is_set():
182 while not len(self.data) and not self.worker.EOF.is_set():
183 self.worker.data_added.clear()
183 self.worker.data_added.clear()
184 self.worker.data_added.wait(0.2)
184 self.worker.data_added.wait(0.2)
185 if len(self.data):
185 if len(self.data):
186 self.worker.keep_reading.set()
186 self.worker.keep_reading.set()
187 return _bytes(self.data.popleft())
187 return _bytes(self.data.popleft())
188 elif self.worker.EOF.is_set():
188 elif self.worker.EOF.is_set():
189 raise StopIteration
189 raise StopIteration
190
190
191 def throw(self, type, value=None, traceback=None):
191 def throw(self, type, value=None, traceback=None):
192 if not self.worker.EOF.is_set():
192 if not self.worker.EOF.is_set():
193 raise type(value)
193 raise type(value)
194
194
195 def start(self):
195 def start(self):
196 self.worker.start()
196 self.worker.start()
197
197
198 def stop(self):
198 def stop(self):
199 self.worker.stop()
199 self.worker.stop()
200
200
201 def close(self):
201 def close(self):
202 try:
202 try:
203 self.worker.stop()
203 self.worker.stop()
204 self.throw(GeneratorExit)
204 self.throw(GeneratorExit)
205 except (GeneratorExit, StopIteration):
205 except (GeneratorExit, StopIteration):
206 pass
206 pass
207
207
208 def __del__(self):
208 def __del__(self):
209 self.close()
209 self.close()
210
210
211 ####################
211 ####################
212 # Threaded reader's infrastructure.
212 # Threaded reader's infrastructure.
213 ####################
213 ####################
214 @property
214 @property
215 def input(self):
215 def input(self):
216 return self.worker.w
216 return self.worker.w
217
217
218 @property
218 @property
219 def data_added_event(self):
219 def data_added_event(self):
220 return self.worker.data_added
220 return self.worker.data_added
221
221
222 @property
222 @property
223 def data_added(self):
223 def data_added(self):
224 return self.worker.data_added.is_set()
224 return self.worker.data_added.is_set()
225
225
226 @property
226 @property
227 def reading_paused(self):
227 def reading_paused(self):
228 return not self.worker.keep_reading.is_set()
228 return not self.worker.keep_reading.is_set()
229
229
230 @property
230 @property
231 def done_reading_event(self):
231 def done_reading_event(self):
232 """
232 """
233 Done_reding does not mean that the iterator's buffer is empty.
233 Done_reding does not mean that the iterator's buffer is empty.
234 Iterator might have done reading from underlying source, but the read
234 Iterator might have done reading from underlying source, but the read
235 chunks might still be available for serving through .next() method.
235 chunks might still be available for serving through .next() method.
236
236
237 @return An Event class instance.
237 :returns: An Event class instance.
238 """
238 """
239 return self.worker.EOF
239 return self.worker.EOF
240
240
241 @property
241 @property
242 def done_reading(self):
242 def done_reading(self):
243 """
243 """
244 Done_reding does not mean that the iterator's buffer is empty.
244 Done_reding does not mean that the iterator's buffer is empty.
245 Iterator might have done reading from underlying source, but the read
245 Iterator might have done reading from underlying source, but the read
246 chunks might still be available for serving through .next() method.
246 chunks might still be available for serving through .next() method.
247
247
248 @return An Bool value.
248 :returns: An Bool value.
249 """
249 """
250 return self.worker.EOF.is_set()
250 return self.worker.EOF.is_set()
251
251
252 @property
252 @property
253 def length(self):
253 def length(self):
254 """
254 """
255 returns int.
255 returns int.
256
256
257 This is the lenght of the que of chunks, not the length of
257 This is the lenght of the que of chunks, not the length of
258 the combined contents in those chunks.
258 the combined contents in those chunks.
259
259
260 __len__() cannot be meaningfully implemented because this
260 __len__() cannot be meaningfully implemented because this
261 reader is just flying throuh a bottomless pit content and
261 reader is just flying throuh a bottomless pit content and
262 can only know the lenght of what it already saw.
262 can only know the lenght of what it already saw.
263
263
264 If __len__() on WSGI server per PEP 3333 returns a value,
264 If __len__() on WSGI server per PEP 3333 returns a value,
265 the responce's length will be set to that. In order not to
265 the responce's length will be set to that. In order not to
266 confuse WSGI PEP3333 servers, we will not implement __len__
266 confuse WSGI PEP3333 servers, we will not implement __len__
267 at all.
267 at all.
268 """
268 """
269 return len(self.data)
269 return len(self.data)
270
270
271 def prepend(self, x):
271 def prepend(self, x):
272 self.data.appendleft(x)
272 self.data.appendleft(x)
273
273
274 def append(self, x):
274 def append(self, x):
275 self.data.append(x)
275 self.data.append(x)
276
276
277 def extend(self, o):
277 def extend(self, o):
278 self.data.extend(o)
278 self.data.extend(o)
279
279
280 def __getitem__(self, i):
280 def __getitem__(self, i):
281 return self.data[i]
281 return self.data[i]
282
282
283
283
284 class SubprocessIOChunker(object):
284 class SubprocessIOChunker(object):
285 """
285 """
286 Processor class wrapping handling of subprocess IO.
286 Processor class wrapping handling of subprocess IO.
287
287
288 In a way, this is a "communicate()" replacement with a twist.
288 In a way, this is a "communicate()" replacement with a twist.
289
289
290 - We are multithreaded. Writing in and reading out, err are all sep threads.
290 - We are multithreaded. Writing in and reading out, err are all sep threads.
291 - We support concurrent (in and out) stream processing.
291 - We support concurrent (in and out) stream processing.
292 - The output is not a stream. It's a queue of read string (bytes, not unicode)
292 - The output is not a stream. It's a queue of read string (bytes, not unicode)
293 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
293 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
294 - We are non-blocking in more respects than communicate()
294 - We are non-blocking in more respects than communicate()
295 (reading from subprocess out pauses when internal buffer is full, but
295 (reading from subprocess out pauses when internal buffer is full, but
296 does not block the parent calling code. On the flip side, reading from
296 does not block the parent calling code. On the flip side, reading from
297 slow-yielding subprocess may block the iteration until data shows up. This
297 slow-yielding subprocess may block the iteration until data shows up. This
298 does not block the parallel inpipe reading occurring parallel thread.)
298 does not block the parallel inpipe reading occurring parallel thread.)
299
299
300 The purpose of the object is to allow us to wrap subprocess interactions into
300 The purpose of the object is to allow us to wrap subprocess interactions into
301 and interable that can be passed to a WSGI server as the application's return
301 and interable that can be passed to a WSGI server as the application's return
302 value. Because of stream-processing-ability, WSGI does not have to read ALL
302 value. Because of stream-processing-ability, WSGI does not have to read ALL
303 of the subprocess's output and buffer it, before handing it to WSGI server for
303 of the subprocess's output and buffer it, before handing it to WSGI server for
304 HTTP response. Instead, the class initializer reads just a bit of the stream
304 HTTP response. Instead, the class initializer reads just a bit of the stream
305 to figure out if error ocurred or likely to occur and if not, just hands the
305 to figure out if error ocurred or likely to occur and if not, just hands the
306 further iteration over subprocess output to the server for completion of HTTP
306 further iteration over subprocess output to the server for completion of HTTP
307 response.
307 response.
308
308
309 The real or perceived subprocess error is trapped and raised as one of
309 The real or perceived subprocess error is trapped and raised as one of
310 EnvironmentError family of exceptions
310 EnvironmentError family of exceptions
311
311
312 Example usage:
312 Example usage:
313 # try:
313 # try:
314 # answer = SubprocessIOChunker(
314 # answer = SubprocessIOChunker(
315 # cmd,
315 # cmd,
316 # input,
316 # input,
317 # buffer_size = 65536,
317 # buffer_size = 65536,
318 # chunk_size = 4096
318 # chunk_size = 4096
319 # )
319 # )
320 # except (EnvironmentError) as e:
320 # except (EnvironmentError) as e:
321 # print str(e)
321 # print str(e)
322 # raise e
322 # raise e
323 #
323 #
324 # return answer
324 # return answer
325
325
326
326
327 """
327 """
328 def __init__(self, cmd, inputstream=None, buffer_size=65536,
328 def __init__(self, cmd, inputstream=None, buffer_size=65536,
329 chunk_size=4096, starting_values=[], **kwargs):
329 chunk_size=4096, starting_values=[], **kwargs):
330 """
330 """
331 Initializes SubprocessIOChunker
331 Initializes SubprocessIOChunker
332
332
333 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
333 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
334 :param inputstream: (Default: None) A file-like, string, or file pointer.
334 :param inputstream: (Default: None) A file-like, string, or file pointer.
335 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
335 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
336 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
336 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
337 :param starting_values: (Default: []) An array of strings to put in front of output que.
337 :param starting_values: (Default: []) An array of strings to put in front of output que.
338 """
338 """
339
339
340 if inputstream:
340 if inputstream:
341 input_streamer = StreamFeeder(inputstream)
341 input_streamer = StreamFeeder(inputstream)
342 input_streamer.start()
342 input_streamer.start()
343 inputstream = input_streamer.output
343 inputstream = input_streamer.output
344
344
345 _shell = kwargs.get('shell', True)
345 _shell = kwargs.get('shell', True)
346 if isinstance(cmd, (list, tuple)):
346 if isinstance(cmd, (list, tuple)):
347 cmd = ' '.join(cmd)
347 cmd = ' '.join(cmd)
348
348
349 kwargs['shell'] = _shell
349 kwargs['shell'] = _shell
350 _p = subprocess.Popen(cmd,
350 _p = subprocess.Popen(cmd,
351 bufsize=-1,
351 bufsize=-1,
352 stdin=inputstream,
352 stdin=inputstream,
353 stdout=subprocess.PIPE,
353 stdout=subprocess.PIPE,
354 stderr=subprocess.PIPE,
354 stderr=subprocess.PIPE,
355 **kwargs
355 **kwargs
356 )
356 )
357
357
358 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size, starting_values)
358 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size, starting_values)
359 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
359 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
360
360
361 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
361 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
362 # doing this until we reach either end of file, or end of buffer.
362 # doing this until we reach either end of file, or end of buffer.
363 bg_out.data_added_event.wait(1)
363 bg_out.data_added_event.wait(1)
364 bg_out.data_added_event.clear()
364 bg_out.data_added_event.clear()
365
365
366 # at this point it's still ambiguous if we are done reading or just full buffer.
366 # at this point it's still ambiguous if we are done reading or just full buffer.
367 # Either way, if error (returned by ended process, or implied based on
367 # Either way, if error (returned by ended process, or implied based on
368 # presence of stuff in stderr output) we error out.
368 # presence of stuff in stderr output) we error out.
369 # Else, we are happy.
369 # Else, we are happy.
370 _returncode = _p.poll()
370 _returncode = _p.poll()
371 if _returncode or (_returncode is None and bg_err.length):
371 if _returncode or (_returncode is None and bg_err.length):
372 try:
372 try:
373 _p.terminate()
373 _p.terminate()
374 except:
374 except:
375 pass
375 pass
376 bg_out.stop()
376 bg_out.stop()
377 bg_err.stop()
377 bg_err.stop()
378 err = '%s' % ''.join(bg_err)
378 err = '%s' % ''.join(bg_err)
379 if err:
379 if err:
380 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
380 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
381 raise EnvironmentError("Subprocess exited with non 0 ret code:%s" % _returncode)
381 raise EnvironmentError("Subprocess exited with non 0 ret code:%s" % _returncode)
382
382
383 self.process = _p
383 self.process = _p
384 self.output = bg_out
384 self.output = bg_out
385 self.error = bg_err
385 self.error = bg_err
386
386
387 def __iter__(self):
387 def __iter__(self):
388 return self
388 return self
389
389
390 def next(self):
390 def next(self):
391 if self.process.poll():
391 if self.process.poll():
392 err = '%s' % ''.join(self.error)
392 err = '%s' % ''.join(self.error)
393 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
393 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
394 return self.output.next()
394 return self.output.next()
395
395
396 def throw(self, type, value=None, traceback=None):
396 def throw(self, type, value=None, traceback=None):
397 if self.output.length or not self.output.done_reading:
397 if self.output.length or not self.output.done_reading:
398 raise type(value)
398 raise type(value)
399
399
400 def close(self):
400 def close(self):
401 try:
401 try:
402 self.process.terminate()
402 self.process.terminate()
403 except:
403 except:
404 pass
404 pass
405 try:
405 try:
406 self.output.close()
406 self.output.close()
407 except:
407 except:
408 pass
408 pass
409 try:
409 try:
410 self.error.close()
410 self.error.close()
411 except:
411 except:
412 pass
412 pass
413
413
414 def __del__(self):
414 def __del__(self):
415 self.close()
415 self.close()
General Comments 0
You need to be logged in to leave comments. Login now