##// END OF EJS Templates
--version command should be safe, and bare no modifications...
marcink -
r3397:64c19449 beta
parent child Browse files
Show More
@@ -1,409 +1,415 b''
1 '''
1 '''
2 Module provides a class allowing to wrap communication over subprocess.Popen
2 Module provides a class allowing to wrap communication over subprocess.Popen
3 input, output, error streams into a meaningfull, non-blocking, concurrent
3 input, output, error streams into a meaningfull, non-blocking, concurrent
4 stream processor exposing the output data as an iterator fitting to be a
4 stream processor exposing the output data as an iterator fitting to be a
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
6
6
7 Copyright (c) 2011 Daniel Dotsenko <dotsa@hotmail.com>
7 Copyright (c) 2011 Daniel Dotsenko <dotsa@hotmail.com>
8
8
9 This file is part of git_http_backend.py Project.
9 This file is part of git_http_backend.py Project.
10
10
11 git_http_backend.py Project is free software: you can redistribute it and/or
11 git_http_backend.py Project is free software: you can redistribute it and/or
12 modify it under the terms of the GNU Lesser General Public License as
12 modify it under the terms of the GNU Lesser General Public License as
13 published by the Free Software Foundation, either version 2.1 of the License,
13 published by the Free Software Foundation, either version 2.1 of the License,
14 or (at your option) any later version.
14 or (at your option) any later version.
15
15
16 git_http_backend.py Project is distributed in the hope that it will be useful,
16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU Lesser General Public License for more details.
19 GNU Lesser General Public License for more details.
20
20
21 You should have received a copy of the GNU Lesser General Public License
21 You should have received a copy of the GNU Lesser General Public License
22 along with git_http_backend.py Project.
22 along with git_http_backend.py Project.
23 If not, see <http://www.gnu.org/licenses/>.
23 If not, see <http://www.gnu.org/licenses/>.
24 '''
24 '''
25 import os
25 import os
26 import subprocess
26 import subprocess
27 from rhodecode.lib.compat import deque, Event, Thread, _bytes, _bytearray
27 from rhodecode.lib.compat import deque, Event, Thread, _bytes, _bytearray
28
28
29
29
30 class StreamFeeder(Thread):
30 class StreamFeeder(Thread):
31 """
31 """
32 Normal writing into pipe-like is blocking once the buffer is filled.
32 Normal writing into pipe-like is blocking once the buffer is filled.
33 This thread allows a thread to seep data from a file-like into a pipe
33 This thread allows a thread to seep data from a file-like into a pipe
34 without blocking the main thread.
34 without blocking the main thread.
35 We close inpipe once the end of the source stream is reached.
35 We close inpipe once the end of the source stream is reached.
36 """
36 """
37 def __init__(self, source):
37 def __init__(self, source):
38 super(StreamFeeder, self).__init__()
38 super(StreamFeeder, self).__init__()
39 self.daemon = True
39 self.daemon = True
40 filelike = False
40 filelike = False
41 self.bytes = _bytes()
41 self.bytes = _bytes()
42 if type(source) in (type(''), _bytes, _bytearray): # string-like
42 if type(source) in (type(''), _bytes, _bytearray): # string-like
43 self.bytes = _bytes(source)
43 self.bytes = _bytes(source)
44 else: # can be either file pointer or file-like
44 else: # can be either file pointer or file-like
45 if type(source) in (int, long): # file pointer it is
45 if type(source) in (int, long): # file pointer it is
46 ## converting file descriptor (int) stdin into file-like
46 ## converting file descriptor (int) stdin into file-like
47 try:
47 try:
48 source = os.fdopen(source, 'rb', 16384)
48 source = os.fdopen(source, 'rb', 16384)
49 except Exception:
49 except Exception:
50 pass
50 pass
51 # let's see if source is file-like by now
51 # let's see if source is file-like by now
52 try:
52 try:
53 filelike = source.read
53 filelike = source.read
54 except Exception:
54 except Exception:
55 pass
55 pass
56 if not filelike and not self.bytes:
56 if not filelike and not self.bytes:
57 raise TypeError("StreamFeeder's source object must be a readable "
57 raise TypeError("StreamFeeder's source object must be a readable "
58 "file-like, a file descriptor, or a string-like.")
58 "file-like, a file descriptor, or a string-like.")
59 self.source = source
59 self.source = source
60 self.readiface, self.writeiface = os.pipe()
60 self.readiface, self.writeiface = os.pipe()
61
61
62 def run(self):
62 def run(self):
63 t = self.writeiface
63 t = self.writeiface
64 if self.bytes:
64 if self.bytes:
65 os.write(t, self.bytes)
65 os.write(t, self.bytes)
66 else:
66 else:
67 s = self.source
67 s = self.source
68 b = s.read(4096)
68 b = s.read(4096)
69 while b:
69 while b:
70 os.write(t, b)
70 os.write(t, b)
71 b = s.read(4096)
71 b = s.read(4096)
72 os.close(t)
72 os.close(t)
73
73
74 @property
74 @property
75 def output(self):
75 def output(self):
76 return self.readiface
76 return self.readiface
77
77
78
78
79 class InputStreamChunker(Thread):
79 class InputStreamChunker(Thread):
80 def __init__(self, source, target, buffer_size, chunk_size):
80 def __init__(self, source, target, buffer_size, chunk_size):
81
81
82 super(InputStreamChunker, self).__init__()
82 super(InputStreamChunker, self).__init__()
83
83
84 self.daemon = True # die die die.
84 self.daemon = True # die die die.
85
85
86 self.source = source
86 self.source = source
87 self.target = target
87 self.target = target
88 self.chunk_count_max = int(buffer_size / chunk_size) + 1
88 self.chunk_count_max = int(buffer_size / chunk_size) + 1
89 self.chunk_size = chunk_size
89 self.chunk_size = chunk_size
90
90
91 self.data_added = Event()
91 self.data_added = Event()
92 self.data_added.clear()
92 self.data_added.clear()
93
93
94 self.keep_reading = Event()
94 self.keep_reading = Event()
95 self.keep_reading.set()
95 self.keep_reading.set()
96
96
97 self.EOF = Event()
97 self.EOF = Event()
98 self.EOF.clear()
98 self.EOF.clear()
99
99
100 self.go = Event()
100 self.go = Event()
101 self.go.set()
101 self.go.set()
102
102
103 def stop(self):
103 def stop(self):
104 self.go.clear()
104 self.go.clear()
105 self.EOF.set()
105 self.EOF.set()
106 try:
106 try:
107 # this is not proper, but is done to force the reader thread let
107 # this is not proper, but is done to force the reader thread let
108 # go of the input because, if successful, .close() will send EOF
108 # go of the input because, if successful, .close() will send EOF
109 # down the pipe.
109 # down the pipe.
110 self.source.close()
110 self.source.close()
111 except:
111 except:
112 pass
112 pass
113
113
114 def run(self):
114 def run(self):
115 s = self.source
115 s = self.source
116 t = self.target
116 t = self.target
117 cs = self.chunk_size
117 cs = self.chunk_size
118 ccm = self.chunk_count_max
118 ccm = self.chunk_count_max
119 kr = self.keep_reading
119 kr = self.keep_reading
120 da = self.data_added
120 da = self.data_added
121 go = self.go
121 go = self.go
122 b = s.read(cs)
122
123 try:
124 b = s.read(cs)
125 except ValueError:
126 b = ''
123
127
124 while b and go.is_set():
128 while b and go.is_set():
125 if len(t) > ccm:
129 if len(t) > ccm:
126 kr.clear()
130 kr.clear()
127 kr.wait(2)
131 kr.wait(2)
128 # # this only works on 2.7.x and up
132 # # this only works on 2.7.x and up
129 # if not kr.wait(10):
133 # if not kr.wait(10):
130 # raise Exception("Timed out while waiting for input to be read.")
134 # raise Exception("Timed out while waiting for input to be read.")
131 # instead we'll use this
135 # instead we'll use this
132 if len(t) > ccm + 3:
136 if len(t) > ccm + 3:
133 raise IOError("Timed out while waiting for input from subprocess.")
137 raise IOError("Timed out while waiting for input from subprocess.")
134 t.append(b)
138 t.append(b)
135 da.set()
139 da.set()
136 b = s.read(cs)
140 b = s.read(cs)
137 self.EOF.set()
141 self.EOF.set()
138 da.set() # for cases when done but there was no input.
142 da.set() # for cases when done but there was no input.
139
143
140
144
141 class BufferedGenerator():
145 class BufferedGenerator():
142 '''
146 '''
143 Class behaves as a non-blocking, buffered pipe reader.
147 Class behaves as a non-blocking, buffered pipe reader.
144 Reads chunks of data (through a thread)
148 Reads chunks of data (through a thread)
145 from a blocking pipe, and attaches these to an array (Deque) of chunks.
149 from a blocking pipe, and attaches these to an array (Deque) of chunks.
146 Reading is halted in the thread when max chunks is internally buffered.
150 Reading is halted in the thread when max chunks is internally buffered.
147 The .next() may operate in blocking or non-blocking fashion by yielding
151 The .next() may operate in blocking or non-blocking fashion by yielding
148 '' if no data is ready
152 '' if no data is ready
149 to be sent or by not returning until there is some data to send
153 to be sent or by not returning until there is some data to send
150 When we get EOF from underlying source pipe we raise the marker to raise
154 When we get EOF from underlying source pipe we raise the marker to raise
151 StopIteration after the last chunk of data is yielded.
155 StopIteration after the last chunk of data is yielded.
152 '''
156 '''
153
157
154 def __init__(self, source, buffer_size=65536, chunk_size=4096,
158 def __init__(self, source, buffer_size=65536, chunk_size=4096,
155 starting_values=[], bottomless=False):
159 starting_values=[], bottomless=False):
156
160
157 if bottomless:
161 if bottomless:
158 maxlen = int(buffer_size / chunk_size)
162 maxlen = int(buffer_size / chunk_size)
159 else:
163 else:
160 maxlen = None
164 maxlen = None
161
165
162 self.data = deque(starting_values, maxlen)
166 self.data = deque(starting_values, maxlen)
163
167
164 self.worker = InputStreamChunker(source, self.data, buffer_size,
168 self.worker = InputStreamChunker(source, self.data, buffer_size,
165 chunk_size)
169 chunk_size)
166 if starting_values:
170 if starting_values:
167 self.worker.data_added.set()
171 self.worker.data_added.set()
168 self.worker.start()
172 self.worker.start()
169
173
170 ####################
174 ####################
171 # Generator's methods
175 # Generator's methods
172 ####################
176 ####################
173
177
174 def __iter__(self):
178 def __iter__(self):
175 return self
179 return self
176
180
177 def next(self):
181 def next(self):
178 while not len(self.data) and not self.worker.EOF.is_set():
182 while not len(self.data) and not self.worker.EOF.is_set():
179 self.worker.data_added.clear()
183 self.worker.data_added.clear()
180 self.worker.data_added.wait(0.2)
184 self.worker.data_added.wait(0.2)
181 if len(self.data):
185 if len(self.data):
182 self.worker.keep_reading.set()
186 self.worker.keep_reading.set()
183 return _bytes(self.data.popleft())
187 return _bytes(self.data.popleft())
184 elif self.worker.EOF.is_set():
188 elif self.worker.EOF.is_set():
185 raise StopIteration
189 raise StopIteration
186
190
187 def throw(self, type, value=None, traceback=None):
191 def throw(self, type, value=None, traceback=None):
188 if not self.worker.EOF.is_set():
192 if not self.worker.EOF.is_set():
189 raise type(value)
193 raise type(value)
190
194
191 def start(self):
195 def start(self):
192 self.worker.start()
196 self.worker.start()
193
197
194 def stop(self):
198 def stop(self):
195 self.worker.stop()
199 self.worker.stop()
196
200
197 def close(self):
201 def close(self):
198 try:
202 try:
199 self.worker.stop()
203 self.worker.stop()
200 self.throw(GeneratorExit)
204 self.throw(GeneratorExit)
201 except (GeneratorExit, StopIteration):
205 except (GeneratorExit, StopIteration):
202 pass
206 pass
203
207
204 def __del__(self):
208 def __del__(self):
205 self.close()
209 self.close()
206
210
207 ####################
211 ####################
208 # Threaded reader's infrastructure.
212 # Threaded reader's infrastructure.
209 ####################
213 ####################
210 @property
214 @property
211 def input(self):
215 def input(self):
212 return self.worker.w
216 return self.worker.w
213
217
214 @property
218 @property
215 def data_added_event(self):
219 def data_added_event(self):
216 return self.worker.data_added
220 return self.worker.data_added
217
221
218 @property
222 @property
219 def data_added(self):
223 def data_added(self):
220 return self.worker.data_added.is_set()
224 return self.worker.data_added.is_set()
221
225
222 @property
226 @property
223 def reading_paused(self):
227 def reading_paused(self):
224 return not self.worker.keep_reading.is_set()
228 return not self.worker.keep_reading.is_set()
225
229
226 @property
230 @property
227 def done_reading_event(self):
231 def done_reading_event(self):
228 '''
232 '''
229 Done_reding does not mean that the iterator's buffer is empty.
233 Done_reding does not mean that the iterator's buffer is empty.
230 Iterator might have done reading from underlying source, but the read
234 Iterator might have done reading from underlying source, but the read
231 chunks might still be available for serving through .next() method.
235 chunks might still be available for serving through .next() method.
232
236
233 @return An Event class instance.
237 @return An Event class instance.
234 '''
238 '''
235 return self.worker.EOF
239 return self.worker.EOF
236
240
237 @property
241 @property
238 def done_reading(self):
242 def done_reading(self):
239 '''
243 '''
240 Done_reding does not mean that the iterator's buffer is empty.
244 Done_reding does not mean that the iterator's buffer is empty.
241 Iterator might have done reading from underlying source, but the read
245 Iterator might have done reading from underlying source, but the read
242 chunks might still be available for serving through .next() method.
246 chunks might still be available for serving through .next() method.
243
247
244 @return An Bool value.
248 @return An Bool value.
245 '''
249 '''
246 return self.worker.EOF.is_set()
250 return self.worker.EOF.is_set()
247
251
248 @property
252 @property
249 def length(self):
253 def length(self):
250 '''
254 '''
251 returns int.
255 returns int.
252
256
253 This is the lenght of the que of chunks, not the length of
257 This is the lenght of the que of chunks, not the length of
254 the combined contents in those chunks.
258 the combined contents in those chunks.
255
259
256 __len__() cannot be meaningfully implemented because this
260 __len__() cannot be meaningfully implemented because this
257 reader is just flying throuh a bottomless pit content and
261 reader is just flying throuh a bottomless pit content and
258 can only know the lenght of what it already saw.
262 can only know the lenght of what it already saw.
259
263
260 If __len__() on WSGI server per PEP 3333 returns a value,
264 If __len__() on WSGI server per PEP 3333 returns a value,
261 the responce's length will be set to that. In order not to
265 the responce's length will be set to that. In order not to
262 confuse WSGI PEP3333 servers, we will not implement __len__
266 confuse WSGI PEP3333 servers, we will not implement __len__
263 at all.
267 at all.
264 '''
268 '''
265 return len(self.data)
269 return len(self.data)
266
270
267 def prepend(self, x):
271 def prepend(self, x):
268 self.data.appendleft(x)
272 self.data.appendleft(x)
269
273
270 def append(self, x):
274 def append(self, x):
271 self.data.append(x)
275 self.data.append(x)
272
276
273 def extend(self, o):
277 def extend(self, o):
274 self.data.extend(o)
278 self.data.extend(o)
275
279
276 def __getitem__(self, i):
280 def __getitem__(self, i):
277 return self.data[i]
281 return self.data[i]
278
282
279
283
280 class SubprocessIOChunker(object):
284 class SubprocessIOChunker(object):
281 '''
285 '''
282 Processor class wrapping handling of subprocess IO.
286 Processor class wrapping handling of subprocess IO.
283
287
284 In a way, this is a "communicate()" replacement with a twist.
288 In a way, this is a "communicate()" replacement with a twist.
285
289
286 - We are multithreaded. Writing in and reading out, err are all sep threads.
290 - We are multithreaded. Writing in and reading out, err are all sep threads.
287 - We support concurrent (in and out) stream processing.
291 - We support concurrent (in and out) stream processing.
288 - The output is not a stream. It's a queue of read string (bytes, not unicode)
292 - The output is not a stream. It's a queue of read string (bytes, not unicode)
289 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
293 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
290 - We are non-blocking in more respects than communicate()
294 - We are non-blocking in more respects than communicate()
291 (reading from subprocess out pauses when internal buffer is full, but
295 (reading from subprocess out pauses when internal buffer is full, but
292 does not block the parent calling code. On the flip side, reading from
296 does not block the parent calling code. On the flip side, reading from
293 slow-yielding subprocess may block the iteration until data shows up. This
297 slow-yielding subprocess may block the iteration until data shows up. This
294 does not block the parallel inpipe reading occurring parallel thread.)
298 does not block the parallel inpipe reading occurring parallel thread.)
295
299
296 The purpose of the object is to allow us to wrap subprocess interactions into
300 The purpose of the object is to allow us to wrap subprocess interactions into
297 and interable that can be passed to a WSGI server as the application's return
301 and interable that can be passed to a WSGI server as the application's return
298 value. Because of stream-processing-ability, WSGI does not have to read ALL
302 value. Because of stream-processing-ability, WSGI does not have to read ALL
299 of the subprocess's output and buffer it, before handing it to WSGI server for
303 of the subprocess's output and buffer it, before handing it to WSGI server for
300 HTTP response. Instead, the class initializer reads just a bit of the stream
304 HTTP response. Instead, the class initializer reads just a bit of the stream
301 to figure out if error ocurred or likely to occur and if not, just hands the
305 to figure out if error ocurred or likely to occur and if not, just hands the
302 further iteration over subprocess output to the server for completion of HTTP
306 further iteration over subprocess output to the server for completion of HTTP
303 response.
307 response.
304
308
305 The real or perceived subprocess error is trapped and raised as one of
309 The real or perceived subprocess error is trapped and raised as one of
306 EnvironmentError family of exceptions
310 EnvironmentError family of exceptions
307
311
308 Example usage:
312 Example usage:
309 # try:
313 # try:
310 # answer = SubprocessIOChunker(
314 # answer = SubprocessIOChunker(
311 # cmd,
315 # cmd,
312 # input,
316 # input,
313 # buffer_size = 65536,
317 # buffer_size = 65536,
314 # chunk_size = 4096
318 # chunk_size = 4096
315 # )
319 # )
316 # except (EnvironmentError) as e:
320 # except (EnvironmentError) as e:
317 # print str(e)
321 # print str(e)
318 # raise e
322 # raise e
319 #
323 #
320 # return answer
324 # return answer
321
325
322
326
323 '''
327 '''
324 def __init__(self, cmd, inputstream=None, buffer_size=65536,
328 def __init__(self, cmd, inputstream=None, buffer_size=65536,
325 chunk_size=4096, starting_values=[], **kwargs):
329 chunk_size=4096, starting_values=[], **kwargs):
326 '''
330 '''
327 Initializes SubprocessIOChunker
331 Initializes SubprocessIOChunker
328
332
329 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
333 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
330 :param inputstream: (Default: None) A file-like, string, or file pointer.
334 :param inputstream: (Default: None) A file-like, string, or file pointer.
331 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
335 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
332 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
336 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
333 :param starting_values: (Default: []) An array of strings to put in front of output que.
337 :param starting_values: (Default: []) An array of strings to put in front of output que.
334 '''
338 '''
335
339
336 if inputstream:
340 if inputstream:
337 input_streamer = StreamFeeder(inputstream)
341 input_streamer = StreamFeeder(inputstream)
338 input_streamer.start()
342 input_streamer.start()
339 inputstream = input_streamer.output
343 inputstream = input_streamer.output
340
344
341 if isinstance(cmd, (list, tuple)):
345 if isinstance(cmd, (list, tuple)):
342 cmd = ' '.join(cmd)
346 cmd = ' '.join(cmd)
343
347
344 _shell = kwargs.get('shell') or True
348 _shell = kwargs.get('shell') or True
345 kwargs['shell'] = _shell
349 kwargs['shell'] = _shell
346 _p = subprocess.Popen(cmd,
350 _p = subprocess.Popen(cmd,
347 bufsize=-1,
351 bufsize=-1,
348 stdin=inputstream,
352 stdin=inputstream,
349 stdout=subprocess.PIPE,
353 stdout=subprocess.PIPE,
350 stderr=subprocess.PIPE,
354 stderr=subprocess.PIPE,
351 **kwargs
355 **kwargs
352 )
356 )
353
357
354 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size, starting_values)
358 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size, starting_values)
355 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
359 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
356
360
357 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
361 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
358 # doing this until we reach either end of file, or end of buffer.
362 # doing this until we reach either end of file, or end of buffer.
359 bg_out.data_added_event.wait(1)
363 bg_out.data_added_event.wait(1)
360 bg_out.data_added_event.clear()
364 bg_out.data_added_event.clear()
361
365
362 # at this point it's still ambiguous if we are done reading or just full buffer.
366 # at this point it's still ambiguous if we are done reading or just full buffer.
363 # Either way, if error (returned by ended process, or implied based on
367 # Either way, if error (returned by ended process, or implied based on
364 # presence of stuff in stderr output) we error out.
368 # presence of stuff in stderr output) we error out.
365 # Else, we are happy.
369 # Else, we are happy.
366 _returncode = _p.poll()
370 _returncode = _p.poll()
367 if _returncode or (_returncode == None and bg_err.length):
371 if _returncode or (_returncode == None and bg_err.length):
368 try:
372 try:
369 _p.terminate()
373 _p.terminate()
370 except:
374 except:
371 pass
375 pass
372 bg_out.stop()
376 bg_out.stop()
373 bg_err.stop()
377 bg_err.stop()
374 err = '%s' % ''.join(bg_err)
378 err = '%s' % ''.join(bg_err)
375 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
379 if err:
380 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
381 raise EnvironmentError("Subprocess exited with non 0 ret code:%s" % _returncode)
376
382
377 self.process = _p
383 self.process = _p
378 self.output = bg_out
384 self.output = bg_out
379 self.error = bg_err
385 self.error = bg_err
380
386
381 def __iter__(self):
387 def __iter__(self):
382 return self
388 return self
383
389
384 def next(self):
390 def next(self):
385 if self.process.poll():
391 if self.process.poll():
386 err = '%s' % ''.join(self.error)
392 err = '%s' % ''.join(self.error)
387 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
393 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
388 return self.output.next()
394 return self.output.next()
389
395
390 def throw(self, type, value=None, traceback=None):
396 def throw(self, type, value=None, traceback=None):
391 if self.output.length or not self.output.done_reading:
397 if self.output.length or not self.output.done_reading:
392 raise type(value)
398 raise type(value)
393
399
394 def close(self):
400 def close(self):
395 try:
401 try:
396 self.process.terminate()
402 self.process.terminate()
397 except:
403 except:
398 pass
404 pass
399 try:
405 try:
400 self.output.close()
406 self.output.close()
401 except:
407 except:
402 pass
408 pass
403 try:
409 try:
404 self.error.close()
410 self.error.close()
405 except:
411 except:
406 pass
412 pass
407
413
408 def __del__(self):
414 def __del__(self):
409 self.close()
415 self.close()
@@ -1,800 +1,801 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.utils
3 rhodecode.lib.utils
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Utilities library for RhodeCode
6 Utilities library for RhodeCode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import traceback
30 import traceback
31 import paste
31 import paste
32 import beaker
32 import beaker
33 import tarfile
33 import tarfile
34 import shutil
34 import shutil
35 import decorator
35 import decorator
36 import warnings
36 import warnings
37 from os.path import abspath
37 from os.path import abspath
38 from os.path import dirname as dn, join as jn
38 from os.path import dirname as dn, join as jn
39
39
40 from paste.script.command import Command, BadCommand
40 from paste.script.command import Command, BadCommand
41
41
42 from mercurial import ui, config
42 from mercurial import ui, config
43
43
44 from webhelpers.text import collapse, remove_formatting, strip_tags
44 from webhelpers.text import collapse, remove_formatting, strip_tags
45
45
46 from rhodecode.lib.vcs import get_backend
46 from rhodecode.lib.vcs import get_backend
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
47 from rhodecode.lib.vcs.backends.base import BaseChangeset
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
48 from rhodecode.lib.vcs.utils.lazy import LazyProperty
49 from rhodecode.lib.vcs.utils.helpers import get_scm
49 from rhodecode.lib.vcs.utils.helpers import get_scm
50 from rhodecode.lib.vcs.exceptions import VCSError
50 from rhodecode.lib.vcs.exceptions import VCSError
51
51
52 from rhodecode.lib.caching_query import FromCache
52 from rhodecode.lib.caching_query import FromCache
53
53
54 from rhodecode.model import meta
54 from rhodecode.model import meta
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
55 from rhodecode.model.db import Repository, User, RhodeCodeUi, \
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
56 UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
57 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
58 from rhodecode.model.repos_group import ReposGroupModel
58 from rhodecode.model.repos_group import ReposGroupModel
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
59 from rhodecode.lib.utils2 import safe_str, safe_unicode
60 from rhodecode.lib.vcs.utils.fakemod import create_module
60 from rhodecode.lib.vcs.utils.fakemod import create_module
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
64 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
65
65
66
66
67 def recursive_replace(str_, replace=' '):
67 def recursive_replace(str_, replace=' '):
68 """
68 """
69 Recursive replace of given sign to just one instance
69 Recursive replace of given sign to just one instance
70
70
71 :param str_: given string
71 :param str_: given string
72 :param replace: char to find and replace multiple instances
72 :param replace: char to find and replace multiple instances
73
73
74 Examples::
74 Examples::
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
75 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 'Mighty-Mighty-Bo-sstones'
76 'Mighty-Mighty-Bo-sstones'
77 """
77 """
78
78
79 if str_.find(replace * 2) == -1:
79 if str_.find(replace * 2) == -1:
80 return str_
80 return str_
81 else:
81 else:
82 str_ = str_.replace(replace * 2, replace)
82 str_ = str_.replace(replace * 2, replace)
83 return recursive_replace(str_, replace)
83 return recursive_replace(str_, replace)
84
84
85
85
86 def repo_name_slug(value):
86 def repo_name_slug(value):
87 """
87 """
88 Return slug of name of repository
88 Return slug of name of repository
89 This function is called on each creation/modification
89 This function is called on each creation/modification
90 of repository to prevent bad names in repo
90 of repository to prevent bad names in repo
91 """
91 """
92
92
93 slug = remove_formatting(value)
93 slug = remove_formatting(value)
94 slug = strip_tags(slug)
94 slug = strip_tags(slug)
95
95
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
96 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 slug = slug.replace(c, '-')
97 slug = slug.replace(c, '-')
98 slug = recursive_replace(slug, '-')
98 slug = recursive_replace(slug, '-')
99 slug = collapse(slug, '-')
99 slug = collapse(slug, '-')
100 return slug
100 return slug
101
101
102
102
103 def get_repo_slug(request):
103 def get_repo_slug(request):
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 if _repo:
105 if _repo:
106 _repo = _repo.rstrip('/')
106 _repo = _repo.rstrip('/')
107 return _repo
107 return _repo
108
108
109
109
110 def get_repos_group_slug(request):
110 def get_repos_group_slug(request):
111 _group = request.environ['pylons.routes_dict'].get('group_name')
111 _group = request.environ['pylons.routes_dict'].get('group_name')
112 if _group:
112 if _group:
113 _group = _group.rstrip('/')
113 _group = _group.rstrip('/')
114 return _group
114 return _group
115
115
116
116
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
117 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
118 """
118 """
119 Action logger for various actions made by users
119 Action logger for various actions made by users
120
120
121 :param user: user that made this action, can be a unique username string or
121 :param user: user that made this action, can be a unique username string or
122 object containing user_id attribute
122 object containing user_id attribute
123 :param action: action to log, should be on of predefined unique actions for
123 :param action: action to log, should be on of predefined unique actions for
124 easy translations
124 easy translations
125 :param repo: string name of repository or object containing repo_id,
125 :param repo: string name of repository or object containing repo_id,
126 that action was made on
126 that action was made on
127 :param ipaddr: optional ip address from what the action was made
127 :param ipaddr: optional ip address from what the action was made
128 :param sa: optional sqlalchemy session
128 :param sa: optional sqlalchemy session
129
129
130 """
130 """
131
131
132 if not sa:
132 if not sa:
133 sa = meta.Session()
133 sa = meta.Session()
134
134
135 try:
135 try:
136 if hasattr(user, 'user_id'):
136 if hasattr(user, 'user_id'):
137 user_obj = User.get(user.user_id)
137 user_obj = User.get(user.user_id)
138 elif isinstance(user, basestring):
138 elif isinstance(user, basestring):
139 user_obj = User.get_by_username(user)
139 user_obj = User.get_by_username(user)
140 else:
140 else:
141 raise Exception('You have to provide a user object or a username')
141 raise Exception('You have to provide a user object or a username')
142
142
143 if hasattr(repo, 'repo_id'):
143 if hasattr(repo, 'repo_id'):
144 repo_obj = Repository.get(repo.repo_id)
144 repo_obj = Repository.get(repo.repo_id)
145 repo_name = repo_obj.repo_name
145 repo_name = repo_obj.repo_name
146 elif isinstance(repo, basestring):
146 elif isinstance(repo, basestring):
147 repo_name = repo.lstrip('/')
147 repo_name = repo.lstrip('/')
148 repo_obj = Repository.get_by_repo_name(repo_name)
148 repo_obj = Repository.get_by_repo_name(repo_name)
149 else:
149 else:
150 repo_obj = None
150 repo_obj = None
151 repo_name = ''
151 repo_name = ''
152
152
153 user_log = UserLog()
153 user_log = UserLog()
154 user_log.user_id = user_obj.user_id
154 user_log.user_id = user_obj.user_id
155 user_log.username = user_obj.username
155 user_log.username = user_obj.username
156 user_log.action = safe_unicode(action)
156 user_log.action = safe_unicode(action)
157
157
158 user_log.repository = repo_obj
158 user_log.repository = repo_obj
159 user_log.repository_name = repo_name
159 user_log.repository_name = repo_name
160
160
161 user_log.action_date = datetime.datetime.now()
161 user_log.action_date = datetime.datetime.now()
162 user_log.user_ip = ipaddr
162 user_log.user_ip = ipaddr
163 sa.add(user_log)
163 sa.add(user_log)
164
164
165 log.info('Logging action %s on %s by %s' %
165 log.info('Logging action %s on %s by %s' %
166 (action, safe_unicode(repo), user_obj))
166 (action, safe_unicode(repo), user_obj))
167 if commit:
167 if commit:
168 sa.commit()
168 sa.commit()
169 except:
169 except:
170 log.error(traceback.format_exc())
170 log.error(traceback.format_exc())
171 raise
171 raise
172
172
173
173
174 def get_repos(path, recursive=False, skip_removed_repos=True):
174 def get_repos(path, recursive=False, skip_removed_repos=True):
175 """
175 """
176 Scans given path for repos and return (name,(type,path)) tuple
176 Scans given path for repos and return (name,(type,path)) tuple
177
177
178 :param path: path to scan for repositories
178 :param path: path to scan for repositories
179 :param recursive: recursive search and return names with subdirs in front
179 :param recursive: recursive search and return names with subdirs in front
180 """
180 """
181
181
182 # remove ending slash for better results
182 # remove ending slash for better results
183 path = path.rstrip(os.sep)
183 path = path.rstrip(os.sep)
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
184 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
185
185
186 def _get_repos(p):
186 def _get_repos(p):
187 if not os.access(p, os.W_OK):
187 if not os.access(p, os.W_OK):
188 return
188 return
189 for dirpath in os.listdir(p):
189 for dirpath in os.listdir(p):
190 if os.path.isfile(os.path.join(p, dirpath)):
190 if os.path.isfile(os.path.join(p, dirpath)):
191 continue
191 continue
192 cur_path = os.path.join(p, dirpath)
192 cur_path = os.path.join(p, dirpath)
193
193
194 # skip removed repos
194 # skip removed repos
195 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
195 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
196 continue
196 continue
197
197
198 #skip .<somethin> dirs
198 #skip .<somethin> dirs
199 if dirpath.startswith('.'):
199 if dirpath.startswith('.'):
200 continue
200 continue
201
201
202 try:
202 try:
203 scm_info = get_scm(cur_path)
203 scm_info = get_scm(cur_path)
204 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
204 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
205 except VCSError:
205 except VCSError:
206 if not recursive:
206 if not recursive:
207 continue
207 continue
208 #check if this dir containts other repos for recursive scan
208 #check if this dir containts other repos for recursive scan
209 rec_path = os.path.join(p, dirpath)
209 rec_path = os.path.join(p, dirpath)
210 if os.path.isdir(rec_path):
210 if os.path.isdir(rec_path):
211 for inner_scm in _get_repos(rec_path):
211 for inner_scm in _get_repos(rec_path):
212 yield inner_scm
212 yield inner_scm
213
213
214 return _get_repos(path)
214 return _get_repos(path)
215
215
216 #alias for backward compat
216 #alias for backward compat
217 get_filesystem_repos = get_repos
217 get_filesystem_repos = get_repos
218
218
219
219
220 def is_valid_repo(repo_name, base_path, scm=None):
220 def is_valid_repo(repo_name, base_path, scm=None):
221 """
221 """
222 Returns True if given path is a valid repository False otherwise.
222 Returns True if given path is a valid repository False otherwise.
223 If scm param is given also compare if given scm is the same as expected
223 If scm param is given also compare if given scm is the same as expected
224 from scm parameter
224 from scm parameter
225
225
226 :param repo_name:
226 :param repo_name:
227 :param base_path:
227 :param base_path:
228 :param scm:
228 :param scm:
229
229
230 :return True: if given path is a valid repository
230 :return True: if given path is a valid repository
231 """
231 """
232 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
232 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
233
233
234 try:
234 try:
235 scm_ = get_scm(full_path)
235 scm_ = get_scm(full_path)
236 if scm:
236 if scm:
237 return scm_[0] == scm
237 return scm_[0] == scm
238 return True
238 return True
239 except VCSError:
239 except VCSError:
240 return False
240 return False
241
241
242
242
243 def is_valid_repos_group(repos_group_name, base_path):
243 def is_valid_repos_group(repos_group_name, base_path):
244 """
244 """
245 Returns True if given path is a repos group False otherwise
245 Returns True if given path is a repos group False otherwise
246
246
247 :param repo_name:
247 :param repo_name:
248 :param base_path:
248 :param base_path:
249 """
249 """
250 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
250 full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
251
251
252 # check if it's not a repo
252 # check if it's not a repo
253 if is_valid_repo(repos_group_name, base_path):
253 if is_valid_repo(repos_group_name, base_path):
254 return False
254 return False
255
255
256 try:
256 try:
257 # we need to check bare git repos at higher level
257 # we need to check bare git repos at higher level
258 # since we might match branches/hooks/info/objects or possible
258 # since we might match branches/hooks/info/objects or possible
259 # other things inside bare git repo
259 # other things inside bare git repo
260 get_scm(os.path.dirname(full_path))
260 get_scm(os.path.dirname(full_path))
261 return False
261 return False
262 except VCSError:
262 except VCSError:
263 pass
263 pass
264
264
265 # check if it's a valid path
265 # check if it's a valid path
266 if os.path.isdir(full_path):
266 if os.path.isdir(full_path):
267 return True
267 return True
268
268
269 return False
269 return False
270
270
271
271
272 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
272 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
273 while True:
273 while True:
274 ok = raw_input(prompt)
274 ok = raw_input(prompt)
275 if ok in ('y', 'ye', 'yes'):
275 if ok in ('y', 'ye', 'yes'):
276 return True
276 return True
277 if ok in ('n', 'no', 'nop', 'nope'):
277 if ok in ('n', 'no', 'nop', 'nope'):
278 return False
278 return False
279 retries = retries - 1
279 retries = retries - 1
280 if retries < 0:
280 if retries < 0:
281 raise IOError
281 raise IOError
282 print complaint
282 print complaint
283
283
284 #propagated from mercurial documentation
284 #propagated from mercurial documentation
285 ui_sections = ['alias', 'auth',
285 ui_sections = ['alias', 'auth',
286 'decode/encode', 'defaults',
286 'decode/encode', 'defaults',
287 'diff', 'email',
287 'diff', 'email',
288 'extensions', 'format',
288 'extensions', 'format',
289 'merge-patterns', 'merge-tools',
289 'merge-patterns', 'merge-tools',
290 'hooks', 'http_proxy',
290 'hooks', 'http_proxy',
291 'smtp', 'patch',
291 'smtp', 'patch',
292 'paths', 'profiling',
292 'paths', 'profiling',
293 'server', 'trusted',
293 'server', 'trusted',
294 'ui', 'web', ]
294 'ui', 'web', ]
295
295
296
296
297 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
297 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
298 """
298 """
299 A function that will read python rc files or database
299 A function that will read python rc files or database
300 and make an mercurial ui object from read options
300 and make an mercurial ui object from read options
301
301
302 :param path: path to mercurial config file
302 :param path: path to mercurial config file
303 :param checkpaths: check the path
303 :param checkpaths: check the path
304 :param read_from: read from 'file' or 'db'
304 :param read_from: read from 'file' or 'db'
305 """
305 """
306
306
307 baseui = ui.ui()
307 baseui = ui.ui()
308
308
309 # clean the baseui object
309 # clean the baseui object
310 baseui._ocfg = config.config()
310 baseui._ocfg = config.config()
311 baseui._ucfg = config.config()
311 baseui._ucfg = config.config()
312 baseui._tcfg = config.config()
312 baseui._tcfg = config.config()
313
313
314 if read_from == 'file':
314 if read_from == 'file':
315 if not os.path.isfile(path):
315 if not os.path.isfile(path):
316 log.debug('hgrc file is not present at %s, skipping...' % path)
316 log.debug('hgrc file is not present at %s, skipping...' % path)
317 return False
317 return False
318 log.debug('reading hgrc from %s' % path)
318 log.debug('reading hgrc from %s' % path)
319 cfg = config.config()
319 cfg = config.config()
320 cfg.read(path)
320 cfg.read(path)
321 for section in ui_sections:
321 for section in ui_sections:
322 for k, v in cfg.items(section):
322 for k, v in cfg.items(section):
323 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
323 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
324 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
324 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
325
325
326 elif read_from == 'db':
326 elif read_from == 'db':
327 sa = meta.Session()
327 sa = meta.Session()
328 ret = sa.query(RhodeCodeUi)\
328 ret = sa.query(RhodeCodeUi)\
329 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
329 .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
330 .all()
330 .all()
331
331
332 hg_ui = ret
332 hg_ui = ret
333 for ui_ in hg_ui:
333 for ui_ in hg_ui:
334 if ui_.ui_active:
334 if ui_.ui_active:
335 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
335 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
336 ui_.ui_key, ui_.ui_value)
336 ui_.ui_key, ui_.ui_value)
337 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
337 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
338 safe_str(ui_.ui_value))
338 safe_str(ui_.ui_value))
339 if ui_.ui_key == 'push_ssl':
339 if ui_.ui_key == 'push_ssl':
340 # force set push_ssl requirement to False, rhodecode
340 # force set push_ssl requirement to False, rhodecode
341 # handles that
341 # handles that
342 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
342 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
343 False)
343 False)
344 if clear_session:
344 if clear_session:
345 meta.Session.remove()
345 meta.Session.remove()
346 return baseui
346 return baseui
347
347
348
348
349 def set_rhodecode_config(config):
349 def set_rhodecode_config(config):
350 """
350 """
351 Updates pylons config with new settings from database
351 Updates pylons config with new settings from database
352
352
353 :param config:
353 :param config:
354 """
354 """
355 hgsettings = RhodeCodeSetting.get_app_settings()
355 hgsettings = RhodeCodeSetting.get_app_settings()
356
356
357 for k, v in hgsettings.items():
357 for k, v in hgsettings.items():
358 config[k] = v
358 config[k] = v
359
359
360
360
361 def invalidate_cache(cache_key, *args):
361 def invalidate_cache(cache_key, *args):
362 """
362 """
363 Puts cache invalidation task into db for
363 Puts cache invalidation task into db for
364 further global cache invalidation
364 further global cache invalidation
365 """
365 """
366
366
367 from rhodecode.model.scm import ScmModel
367 from rhodecode.model.scm import ScmModel
368
368
369 if cache_key.startswith('get_repo_cached_'):
369 if cache_key.startswith('get_repo_cached_'):
370 name = cache_key.split('get_repo_cached_')[-1]
370 name = cache_key.split('get_repo_cached_')[-1]
371 ScmModel().mark_for_invalidation(name)
371 ScmModel().mark_for_invalidation(name)
372
372
373
373
374 def map_groups(path):
374 def map_groups(path):
375 """
375 """
376 Given a full path to a repository, create all nested groups that this
376 Given a full path to a repository, create all nested groups that this
377 repo is inside. This function creates parent-child relationships between
377 repo is inside. This function creates parent-child relationships between
378 groups and creates default perms for all new groups.
378 groups and creates default perms for all new groups.
379
379
380 :param paths: full path to repository
380 :param paths: full path to repository
381 """
381 """
382 sa = meta.Session()
382 sa = meta.Session()
383 groups = path.split(Repository.url_sep())
383 groups = path.split(Repository.url_sep())
384 parent = None
384 parent = None
385 group = None
385 group = None
386
386
387 # last element is repo in nested groups structure
387 # last element is repo in nested groups structure
388 groups = groups[:-1]
388 groups = groups[:-1]
389 rgm = ReposGroupModel(sa)
389 rgm = ReposGroupModel(sa)
390 for lvl, group_name in enumerate(groups):
390 for lvl, group_name in enumerate(groups):
391 group_name = '/'.join(groups[:lvl] + [group_name])
391 group_name = '/'.join(groups[:lvl] + [group_name])
392 group = RepoGroup.get_by_group_name(group_name)
392 group = RepoGroup.get_by_group_name(group_name)
393 desc = '%s group' % group_name
393 desc = '%s group' % group_name
394
394
395 # skip folders that are now removed repos
395 # skip folders that are now removed repos
396 if REMOVED_REPO_PAT.match(group_name):
396 if REMOVED_REPO_PAT.match(group_name):
397 break
397 break
398
398
399 if group is None:
399 if group is None:
400 log.debug('creating group level: %s group_name: %s' % (lvl,
400 log.debug('creating group level: %s group_name: %s' % (lvl,
401 group_name))
401 group_name))
402 group = RepoGroup(group_name, parent)
402 group = RepoGroup(group_name, parent)
403 group.group_description = desc
403 group.group_description = desc
404 sa.add(group)
404 sa.add(group)
405 rgm._create_default_perms(group)
405 rgm._create_default_perms(group)
406 sa.flush()
406 sa.flush()
407 parent = group
407 parent = group
408 return group
408 return group
409
409
410
410
411 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
411 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
412 install_git_hook=False):
412 install_git_hook=False):
413 """
413 """
414 maps all repos given in initial_repo_list, non existing repositories
414 maps all repos given in initial_repo_list, non existing repositories
415 are created, if remove_obsolete is True it also check for db entries
415 are created, if remove_obsolete is True it also check for db entries
416 that are not in initial_repo_list and removes them.
416 that are not in initial_repo_list and removes them.
417
417
418 :param initial_repo_list: list of repositories found by scanning methods
418 :param initial_repo_list: list of repositories found by scanning methods
419 :param remove_obsolete: check for obsolete entries in database
419 :param remove_obsolete: check for obsolete entries in database
420 :param install_git_hook: if this is True, also check and install githook
420 :param install_git_hook: if this is True, also check and install githook
421 for a repo if missing
421 for a repo if missing
422 """
422 """
423 from rhodecode.model.repo import RepoModel
423 from rhodecode.model.repo import RepoModel
424 from rhodecode.model.scm import ScmModel
424 from rhodecode.model.scm import ScmModel
425 sa = meta.Session()
425 sa = meta.Session()
426 rm = RepoModel()
426 rm = RepoModel()
427 user = sa.query(User).filter(User.admin == True).first()
427 user = sa.query(User).filter(User.admin == True).first()
428 if user is None:
428 if user is None:
429 raise Exception('Missing administrative account!')
429 raise Exception('Missing administrative account!')
430 added = []
430 added = []
431
431
432 # # clear cache keys
432 # # clear cache keys
433 # log.debug("Clearing cache keys now...")
433 # log.debug("Clearing cache keys now...")
434 # CacheInvalidation.clear_cache()
434 # CacheInvalidation.clear_cache()
435 # sa.commit()
435 # sa.commit()
436
436
437 ##creation defaults
437 ##creation defaults
438 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
438 defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
439 enable_statistics = defs.get('repo_enable_statistics')
439 enable_statistics = defs.get('repo_enable_statistics')
440 enable_locking = defs.get('repo_enable_locking')
440 enable_locking = defs.get('repo_enable_locking')
441 enable_downloads = defs.get('repo_enable_downloads')
441 enable_downloads = defs.get('repo_enable_downloads')
442 private = defs.get('repo_private')
442 private = defs.get('repo_private')
443
443
444 for name, repo in initial_repo_list.items():
444 for name, repo in initial_repo_list.items():
445 group = map_groups(name)
445 group = map_groups(name)
446 db_repo = rm.get_by_repo_name(name)
446 db_repo = rm.get_by_repo_name(name)
447 # found repo that is on filesystem not in RhodeCode database
447 # found repo that is on filesystem not in RhodeCode database
448 if not db_repo:
448 if not db_repo:
449 log.info('repository %s not found, creating now' % name)
449 log.info('repository %s not found, creating now' % name)
450 added.append(name)
450 added.append(name)
451 desc = (repo.description
451 desc = (repo.description
452 if repo.description != 'unknown'
452 if repo.description != 'unknown'
453 else '%s repository' % name)
453 else '%s repository' % name)
454
454
455 new_repo = rm.create_repo(
455 new_repo = rm.create_repo(
456 repo_name=name,
456 repo_name=name,
457 repo_type=repo.alias,
457 repo_type=repo.alias,
458 description=desc,
458 description=desc,
459 repos_group=getattr(group, 'group_id', None),
459 repos_group=getattr(group, 'group_id', None),
460 owner=user,
460 owner=user,
461 just_db=True,
461 just_db=True,
462 enable_locking=enable_locking,
462 enable_locking=enable_locking,
463 enable_downloads=enable_downloads,
463 enable_downloads=enable_downloads,
464 enable_statistics=enable_statistics,
464 enable_statistics=enable_statistics,
465 private=private
465 private=private
466 )
466 )
467 # we added that repo just now, and make sure it has githook
467 # we added that repo just now, and make sure it has githook
468 # installed
468 # installed
469 if new_repo.repo_type == 'git':
469 if new_repo.repo_type == 'git':
470 ScmModel().install_git_hook(new_repo.scm_instance)
470 ScmModel().install_git_hook(new_repo.scm_instance)
471 new_repo.update_changeset_cache()
471 new_repo.update_changeset_cache()
472 elif install_git_hook:
472 elif install_git_hook:
473 if db_repo.repo_type == 'git':
473 if db_repo.repo_type == 'git':
474 ScmModel().install_git_hook(db_repo.scm_instance)
474 ScmModel().install_git_hook(db_repo.scm_instance)
475 # during starting install all cache keys for all repositories in the
475 # during starting install all cache keys for all repositories in the
476 # system, this will register all repos and multiple instances
476 # system, this will register all repos and multiple instances
477 key, _prefix, _org_key = CacheInvalidation._get_key(name)
477 key, _prefix, _org_key = CacheInvalidation._get_key(name)
478 CacheInvalidation.invalidate(name)
478 CacheInvalidation.invalidate(name)
479 log.debug("Creating a cache key for %s, instance_id %s"
479 log.debug("Creating a cache key for %s, instance_id %s"
480 % (name, _prefix or 'unknown'))
480 % (name, _prefix or 'unknown'))
481
481
482 sa.commit()
482 sa.commit()
483 removed = []
483 removed = []
484 if remove_obsolete:
484 if remove_obsolete:
485 # remove from database those repositories that are not in the filesystem
485 # remove from database those repositories that are not in the filesystem
486 for repo in sa.query(Repository).all():
486 for repo in sa.query(Repository).all():
487 if repo.repo_name not in initial_repo_list.keys():
487 if repo.repo_name not in initial_repo_list.keys():
488 log.debug("Removing non-existing repository found in db `%s`" %
488 log.debug("Removing non-existing repository found in db `%s`" %
489 repo.repo_name)
489 repo.repo_name)
490 try:
490 try:
491 sa.delete(repo)
491 sa.delete(repo)
492 sa.commit()
492 sa.commit()
493 removed.append(repo.repo_name)
493 removed.append(repo.repo_name)
494 except:
494 except:
495 #don't hold further removals on error
495 #don't hold further removals on error
496 log.error(traceback.format_exc())
496 log.error(traceback.format_exc())
497 sa.rollback()
497 sa.rollback()
498
498
499 return added, removed
499 return added, removed
500
500
501
501
502 # set cache regions for beaker so celery can utilise it
502 # set cache regions for beaker so celery can utilise it
503 def add_cache(settings):
503 def add_cache(settings):
504 cache_settings = {'regions': None}
504 cache_settings = {'regions': None}
505 for key in settings.keys():
505 for key in settings.keys():
506 for prefix in ['beaker.cache.', 'cache.']:
506 for prefix in ['beaker.cache.', 'cache.']:
507 if key.startswith(prefix):
507 if key.startswith(prefix):
508 name = key.split(prefix)[1].strip()
508 name = key.split(prefix)[1].strip()
509 cache_settings[name] = settings[key].strip()
509 cache_settings[name] = settings[key].strip()
510 if cache_settings['regions']:
510 if cache_settings['regions']:
511 for region in cache_settings['regions'].split(','):
511 for region in cache_settings['regions'].split(','):
512 region = region.strip()
512 region = region.strip()
513 region_settings = {}
513 region_settings = {}
514 for key, value in cache_settings.items():
514 for key, value in cache_settings.items():
515 if key.startswith(region):
515 if key.startswith(region):
516 region_settings[key.split('.')[1]] = value
516 region_settings[key.split('.')[1]] = value
517 region_settings['expire'] = int(region_settings.get('expire',
517 region_settings['expire'] = int(region_settings.get('expire',
518 60))
518 60))
519 region_settings.setdefault('lock_dir',
519 region_settings.setdefault('lock_dir',
520 cache_settings.get('lock_dir'))
520 cache_settings.get('lock_dir'))
521 region_settings.setdefault('data_dir',
521 region_settings.setdefault('data_dir',
522 cache_settings.get('data_dir'))
522 cache_settings.get('data_dir'))
523
523
524 if 'type' not in region_settings:
524 if 'type' not in region_settings:
525 region_settings['type'] = cache_settings.get('type',
525 region_settings['type'] = cache_settings.get('type',
526 'memory')
526 'memory')
527 beaker.cache.cache_regions[region] = region_settings
527 beaker.cache.cache_regions[region] = region_settings
528
528
529
529
530 def load_rcextensions(root_path):
530 def load_rcextensions(root_path):
531 import rhodecode
531 import rhodecode
532 from rhodecode.config import conf
532 from rhodecode.config import conf
533
533
534 path = os.path.join(root_path, 'rcextensions', '__init__.py')
534 path = os.path.join(root_path, 'rcextensions', '__init__.py')
535 if os.path.isfile(path):
535 if os.path.isfile(path):
536 rcext = create_module('rc', path)
536 rcext = create_module('rc', path)
537 EXT = rhodecode.EXTENSIONS = rcext
537 EXT = rhodecode.EXTENSIONS = rcext
538 log.debug('Found rcextensions now loading %s...' % rcext)
538 log.debug('Found rcextensions now loading %s...' % rcext)
539
539
540 # Additional mappings that are not present in the pygments lexers
540 # Additional mappings that are not present in the pygments lexers
541 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
541 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
542
542
543 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
543 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
544
544
545 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
545 if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
546 log.debug('settings custom INDEX_EXTENSIONS')
546 log.debug('settings custom INDEX_EXTENSIONS')
547 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
547 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
548
548
549 #ADDITIONAL MAPPINGS
549 #ADDITIONAL MAPPINGS
550 log.debug('adding extra into INDEX_EXTENSIONS')
550 log.debug('adding extra into INDEX_EXTENSIONS')
551 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
551 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
552
552
553 # auto check if the module is not missing any data, set to default if is
553 # auto check if the module is not missing any data, set to default if is
554 # this will help autoupdate new feature of rcext module
554 # this will help autoupdate new feature of rcext module
555 from rhodecode.config import rcextensions
555 from rhodecode.config import rcextensions
556 for k in dir(rcextensions):
556 for k in dir(rcextensions):
557 if not k.startswith('_') and not hasattr(EXT, k):
557 if not k.startswith('_') and not hasattr(EXT, k):
558 setattr(EXT, k, getattr(rcextensions, k))
558 setattr(EXT, k, getattr(rcextensions, k))
559
559
560
560
561 def get_custom_lexer(extension):
561 def get_custom_lexer(extension):
562 """
562 """
563 returns a custom lexer if it's defined in rcextensions module, or None
563 returns a custom lexer if it's defined in rcextensions module, or None
564 if there's no custom lexer defined
564 if there's no custom lexer defined
565 """
565 """
566 import rhodecode
566 import rhodecode
567 from pygments import lexers
567 from pygments import lexers
568 #check if we didn't define this extension as other lexer
568 #check if we didn't define this extension as other lexer
569 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
569 if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
570 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
570 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
571 return lexers.get_lexer_by_name(_lexer_name)
571 return lexers.get_lexer_by_name(_lexer_name)
572
572
573
573
574 #==============================================================================
574 #==============================================================================
575 # TEST FUNCTIONS AND CREATORS
575 # TEST FUNCTIONS AND CREATORS
576 #==============================================================================
576 #==============================================================================
577 def create_test_index(repo_location, config, full_index):
577 def create_test_index(repo_location, config, full_index):
578 """
578 """
579 Makes default test index
579 Makes default test index
580
580
581 :param config: test config
581 :param config: test config
582 :param full_index:
582 :param full_index:
583 """
583 """
584
584
585 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
585 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
586 from rhodecode.lib.pidlock import DaemonLock, LockHeld
586 from rhodecode.lib.pidlock import DaemonLock, LockHeld
587
587
588 repo_location = repo_location
588 repo_location = repo_location
589
589
590 index_location = os.path.join(config['app_conf']['index_dir'])
590 index_location = os.path.join(config['app_conf']['index_dir'])
591 if not os.path.exists(index_location):
591 if not os.path.exists(index_location):
592 os.makedirs(index_location)
592 os.makedirs(index_location)
593
593
594 try:
594 try:
595 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
595 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
596 WhooshIndexingDaemon(index_location=index_location,
596 WhooshIndexingDaemon(index_location=index_location,
597 repo_location=repo_location)\
597 repo_location=repo_location)\
598 .run(full_index=full_index)
598 .run(full_index=full_index)
599 l.release()
599 l.release()
600 except LockHeld:
600 except LockHeld:
601 pass
601 pass
602
602
603
603
604 def create_test_env(repos_test_path, config):
604 def create_test_env(repos_test_path, config):
605 """
605 """
606 Makes a fresh database and
606 Makes a fresh database and
607 install test repository into tmp dir
607 install test repository into tmp dir
608 """
608 """
609 from rhodecode.lib.db_manage import DbManage
609 from rhodecode.lib.db_manage import DbManage
610 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
610 from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
611
611
612 # PART ONE create db
612 # PART ONE create db
613 dbconf = config['sqlalchemy.db1.url']
613 dbconf = config['sqlalchemy.db1.url']
614 log.debug('making test db %s' % dbconf)
614 log.debug('making test db %s' % dbconf)
615
615
616 # create test dir if it doesn't exist
616 # create test dir if it doesn't exist
617 if not os.path.isdir(repos_test_path):
617 if not os.path.isdir(repos_test_path):
618 log.debug('Creating testdir %s' % repos_test_path)
618 log.debug('Creating testdir %s' % repos_test_path)
619 os.makedirs(repos_test_path)
619 os.makedirs(repos_test_path)
620
620
621 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
621 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
622 tests=True)
622 tests=True)
623 dbmanage.create_tables(override=True)
623 dbmanage.create_tables(override=True)
624 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
624 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
625 dbmanage.create_default_user()
625 dbmanage.create_default_user()
626 dbmanage.admin_prompt()
626 dbmanage.admin_prompt()
627 dbmanage.create_permissions()
627 dbmanage.create_permissions()
628 dbmanage.populate_default_permissions()
628 dbmanage.populate_default_permissions()
629 Session().commit()
629 Session().commit()
630 # PART TWO make test repo
630 # PART TWO make test repo
631 log.debug('making test vcs repositories')
631 log.debug('making test vcs repositories')
632
632
633 idx_path = config['app_conf']['index_dir']
633 idx_path = config['app_conf']['index_dir']
634 data_path = config['app_conf']['cache_dir']
634 data_path = config['app_conf']['cache_dir']
635
635
636 #clean index and data
636 #clean index and data
637 if idx_path and os.path.exists(idx_path):
637 if idx_path and os.path.exists(idx_path):
638 log.debug('remove %s' % idx_path)
638 log.debug('remove %s' % idx_path)
639 shutil.rmtree(idx_path)
639 shutil.rmtree(idx_path)
640
640
641 if data_path and os.path.exists(data_path):
641 if data_path and os.path.exists(data_path):
642 log.debug('remove %s' % data_path)
642 log.debug('remove %s' % data_path)
643 shutil.rmtree(data_path)
643 shutil.rmtree(data_path)
644
644
645 #CREATE DEFAULT TEST REPOS
645 #CREATE DEFAULT TEST REPOS
646 cur_dir = dn(dn(abspath(__file__)))
646 cur_dir = dn(dn(abspath(__file__)))
647 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
647 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
648 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
648 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
649 tar.close()
649 tar.close()
650
650
651 cur_dir = dn(dn(abspath(__file__)))
651 cur_dir = dn(dn(abspath(__file__)))
652 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
652 tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
653 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
653 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
654 tar.close()
654 tar.close()
655
655
656 #LOAD VCS test stuff
656 #LOAD VCS test stuff
657 from rhodecode.tests.vcs import setup_package
657 from rhodecode.tests.vcs import setup_package
658 setup_package()
658 setup_package()
659
659
660
660
661 #==============================================================================
661 #==============================================================================
662 # PASTER COMMANDS
662 # PASTER COMMANDS
663 #==============================================================================
663 #==============================================================================
664 class BasePasterCommand(Command):
664 class BasePasterCommand(Command):
665 """
665 """
666 Abstract Base Class for paster commands.
666 Abstract Base Class for paster commands.
667
667
668 The celery commands are somewhat aggressive about loading
668 The celery commands are somewhat aggressive about loading
669 celery.conf, and since our module sets the `CELERY_LOADER`
669 celery.conf, and since our module sets the `CELERY_LOADER`
670 environment variable to our loader, we have to bootstrap a bit and
670 environment variable to our loader, we have to bootstrap a bit and
671 make sure we've had a chance to load the pylons config off of the
671 make sure we've had a chance to load the pylons config off of the
672 command line, otherwise everything fails.
672 command line, otherwise everything fails.
673 """
673 """
674 min_args = 1
674 min_args = 1
675 min_args_error = "Please provide a paster config file as an argument."
675 min_args_error = "Please provide a paster config file as an argument."
676 takes_config_file = 1
676 takes_config_file = 1
677 requires_config_file = True
677 requires_config_file = True
678
678
679 def notify_msg(self, msg, log=False):
679 def notify_msg(self, msg, log=False):
680 """Make a notification to user, additionally if logger is passed
680 """Make a notification to user, additionally if logger is passed
681 it logs this action using given logger
681 it logs this action using given logger
682
682
683 :param msg: message that will be printed to user
683 :param msg: message that will be printed to user
684 :param log: logging instance, to use to additionally log this message
684 :param log: logging instance, to use to additionally log this message
685
685
686 """
686 """
687 if log and isinstance(log, logging):
687 if log and isinstance(log, logging):
688 log(msg)
688 log(msg)
689
689
690 def run(self, args):
690 def run(self, args):
691 """
691 """
692 Overrides Command.run
692 Overrides Command.run
693
693
694 Checks for a config file argument and loads it.
694 Checks for a config file argument and loads it.
695 """
695 """
696 if len(args) < self.min_args:
696 if len(args) < self.min_args:
697 raise BadCommand(
697 raise BadCommand(
698 self.min_args_error % {'min_args': self.min_args,
698 self.min_args_error % {'min_args': self.min_args,
699 'actual_args': len(args)})
699 'actual_args': len(args)})
700
700
701 # Decrement because we're going to lob off the first argument.
701 # Decrement because we're going to lob off the first argument.
702 # @@ This is hacky
702 # @@ This is hacky
703 self.min_args -= 1
703 self.min_args -= 1
704 self.bootstrap_config(args[0])
704 self.bootstrap_config(args[0])
705 self.update_parser()
705 self.update_parser()
706 return super(BasePasterCommand, self).run(args[1:])
706 return super(BasePasterCommand, self).run(args[1:])
707
707
708 def update_parser(self):
708 def update_parser(self):
709 """
709 """
710 Abstract method. Allows for the class's parser to be updated
710 Abstract method. Allows for the class's parser to be updated
711 before the superclass's `run` method is called. Necessary to
711 before the superclass's `run` method is called. Necessary to
712 allow options/arguments to be passed through to the underlying
712 allow options/arguments to be passed through to the underlying
713 celery command.
713 celery command.
714 """
714 """
715 raise NotImplementedError("Abstract Method.")
715 raise NotImplementedError("Abstract Method.")
716
716
717 def bootstrap_config(self, conf):
717 def bootstrap_config(self, conf):
718 """
718 """
719 Loads the pylons configuration.
719 Loads the pylons configuration.
720 """
720 """
721 from pylons import config as pylonsconfig
721 from pylons import config as pylonsconfig
722
722
723 self.path_to_ini_file = os.path.realpath(conf)
723 self.path_to_ini_file = os.path.realpath(conf)
724 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
724 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
725 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
725 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
726
726
727 def _init_session(self):
727 def _init_session(self):
728 """
728 """
729 Inits SqlAlchemy Session
729 Inits SqlAlchemy Session
730 """
730 """
731 logging.config.fileConfig(self.path_to_ini_file)
731 logging.config.fileConfig(self.path_to_ini_file)
732 from pylons import config
732 from pylons import config
733 from rhodecode.model import init_model
733 from rhodecode.model import init_model
734 from rhodecode.lib.utils2 import engine_from_config
734 from rhodecode.lib.utils2 import engine_from_config
735
735
736 #get to remove repos !!
736 #get to remove repos !!
737 add_cache(config)
737 add_cache(config)
738 engine = engine_from_config(config, 'sqlalchemy.db1.')
738 engine = engine_from_config(config, 'sqlalchemy.db1.')
739 init_model(engine)
739 init_model(engine)
740
740
741
741
742 def check_git_version():
742 def check_git_version():
743 """
743 """
744 Checks what version of git is installed in system, and issues a warning
744 Checks what version of git is installed in system, and issues a warning
745 if it's too old for RhodeCode to properly work.
745 if it's too old for RhodeCode to properly work.
746 """
746 """
747 from rhodecode import BACKENDS
747 from rhodecode import BACKENDS
748 from rhodecode.lib.vcs.backends.git.repository import GitRepository
748 from rhodecode.lib.vcs.backends.git.repository import GitRepository
749 from distutils.version import StrictVersion
749 from distutils.version import StrictVersion
750
750
751 stdout, stderr = GitRepository._run_git_command('--version')
751 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
752 _safe=True)
752
753
753 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
754 ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
754 if len(ver.split('.')) > 3:
755 if len(ver.split('.')) > 3:
755 #StrictVersion needs to be only 3 element type
756 #StrictVersion needs to be only 3 element type
756 ver = '.'.join(ver.split('.')[:3])
757 ver = '.'.join(ver.split('.')[:3])
757 try:
758 try:
758 _ver = StrictVersion(ver)
759 _ver = StrictVersion(ver)
759 except:
760 except:
760 _ver = StrictVersion('0.0.0')
761 _ver = StrictVersion('0.0.0')
761 stderr = traceback.format_exc()
762 stderr = traceback.format_exc()
762
763
763 req_ver = '1.7.4'
764 req_ver = '1.7.4'
764 to_old_git = False
765 to_old_git = False
765 if _ver < StrictVersion(req_ver):
766 if _ver < StrictVersion(req_ver):
766 to_old_git = True
767 to_old_git = True
767
768
768 if 'git' in BACKENDS:
769 if 'git' in BACKENDS:
769 log.debug('GIT version detected: %s' % stdout)
770 log.debug('GIT version detected: %s' % stdout)
770 if stderr:
771 if stderr:
771 log.warning('Unable to detect git version org error was:%r' % stderr)
772 log.warning('Unable to detect git version org error was:%r' % stderr)
772 elif to_old_git:
773 elif to_old_git:
773 log.warning('RhodeCode detected git version %s, which is too old '
774 log.warning('RhodeCode detected git version %s, which is too old '
774 'for the system to function properly. Make sure '
775 'for the system to function properly. Make sure '
775 'its version is at least %s' % (ver, req_ver))
776 'its version is at least %s' % (ver, req_ver))
776 return _ver
777 return _ver
777
778
778
779
779 @decorator.decorator
780 @decorator.decorator
780 def jsonify(func, *args, **kwargs):
781 def jsonify(func, *args, **kwargs):
781 """Action decorator that formats output for JSON
782 """Action decorator that formats output for JSON
782
783
783 Given a function that will return content, this decorator will turn
784 Given a function that will return content, this decorator will turn
784 the result into JSON, with a content-type of 'application/json' and
785 the result into JSON, with a content-type of 'application/json' and
785 output it.
786 output it.
786
787
787 """
788 """
788 from pylons.decorators.util import get_pylons
789 from pylons.decorators.util import get_pylons
789 from rhodecode.lib.ext_json import json
790 from rhodecode.lib.ext_json import json
790 pylons = get_pylons(args)
791 pylons = get_pylons(args)
791 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
792 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
792 data = func(*args, **kwargs)
793 data = func(*args, **kwargs)
793 if isinstance(data, (list, tuple)):
794 if isinstance(data, (list, tuple)):
794 msg = "JSON responses with Array envelopes are susceptible to " \
795 msg = "JSON responses with Array envelopes are susceptible to " \
795 "cross-site data leak attacks, see " \
796 "cross-site data leak attacks, see " \
796 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
797 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
797 warnings.warn(msg, Warning, 2)
798 warnings.warn(msg, Warning, 2)
798 log.warning(msg)
799 log.warning(msg)
799 log.debug("Returning JSON wrapped action output")
800 log.debug("Returning JSON wrapped action output")
800 return json.dumps(data, encoding='utf-8')
801 return json.dumps(data, encoding='utf-8')
@@ -1,673 +1,687 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 vcs.backends.git
3 vcs.backends.git
4 ~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~
5
5
6 Git backend implementation.
6 Git backend implementation.
7
7
8 :created_on: Apr 8, 2010
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
10 """
11
11
12 import os
12 import os
13 import re
13 import re
14 import time
14 import time
15 import posixpath
15 import posixpath
16 import logging
16 import logging
17 import traceback
17 import traceback
18 import urllib
18 import urllib
19 import urllib2
19 import urllib2
20 from dulwich.repo import Repo, NotGitRepository
20 from dulwich.repo import Repo, NotGitRepository
21 from dulwich.objects import Tag
21 from dulwich.objects import Tag
22 from string import Template
22 from string import Template
23
23
24 import rhodecode
24 import rhodecode
25 from rhodecode.lib.vcs.backends.base import BaseRepository
25 from rhodecode.lib.vcs.backends.base import BaseRepository
26 from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
26 from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
27 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
27 from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
28 from rhodecode.lib.vcs.exceptions import EmptyRepositoryError
28 from rhodecode.lib.vcs.exceptions import EmptyRepositoryError
29 from rhodecode.lib.vcs.exceptions import RepositoryError
29 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import TagAlreadyExistError
30 from rhodecode.lib.vcs.exceptions import TagAlreadyExistError
31 from rhodecode.lib.vcs.exceptions import TagDoesNotExistError
31 from rhodecode.lib.vcs.exceptions import TagDoesNotExistError
32 from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
32 from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
33 from rhodecode.lib.vcs.utils.lazy import LazyProperty, ThreadLocalLazyProperty
33 from rhodecode.lib.vcs.utils.lazy import LazyProperty, ThreadLocalLazyProperty
34 from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
34 from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
35 from rhodecode.lib.vcs.utils.paths import abspath
35 from rhodecode.lib.vcs.utils.paths import abspath
36 from rhodecode.lib.vcs.utils.paths import get_user_home
36 from rhodecode.lib.vcs.utils.paths import get_user_home
37 from .workdir import GitWorkdir
37 from .workdir import GitWorkdir
38 from .changeset import GitChangeset
38 from .changeset import GitChangeset
39 from .inmemory import GitInMemoryChangeset
39 from .inmemory import GitInMemoryChangeset
40 from .config import ConfigFile
40 from .config import ConfigFile
41 from rhodecode.lib import subprocessio
41 from rhodecode.lib import subprocessio
42
42
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 class GitRepository(BaseRepository):
47 class GitRepository(BaseRepository):
48 """
48 """
49 Git repository backend.
49 Git repository backend.
50 """
50 """
51 DEFAULT_BRANCH_NAME = 'master'
51 DEFAULT_BRANCH_NAME = 'master'
52 scm = 'git'
52 scm = 'git'
53
53
54 def __init__(self, repo_path, create=False, src_url=None,
54 def __init__(self, repo_path, create=False, src_url=None,
55 update_after_clone=False, bare=False):
55 update_after_clone=False, bare=False):
56
56
57 self.path = abspath(repo_path)
57 self.path = abspath(repo_path)
58 repo = self._get_repo(create, src_url, update_after_clone, bare)
58 repo = self._get_repo(create, src_url, update_after_clone, bare)
59 self.bare = repo.bare
59 self.bare = repo.bare
60
60
61 self._config_files = [
61 self._config_files = [
62 bare and abspath(self.path, 'config')
62 bare and abspath(self.path, 'config')
63 or abspath(self.path, '.git', 'config'),
63 or abspath(self.path, '.git', 'config'),
64 abspath(get_user_home(), '.gitconfig'),
64 abspath(get_user_home(), '.gitconfig'),
65 ]
65 ]
66
66
67 @ThreadLocalLazyProperty
67 @ThreadLocalLazyProperty
68 def _repo(self):
68 def _repo(self):
69 repo = Repo(self.path)
69 repo = Repo(self.path)
70 #temporary set that to now at later we will move it to constructor
70 #temporary set that to now at later we will move it to constructor
71 baseui = None
71 baseui = None
72 if baseui is None:
72 if baseui is None:
73 from mercurial.ui import ui
73 from mercurial.ui import ui
74 baseui = ui()
74 baseui = ui()
75 # patch the instance of GitRepo with an "FAKE" ui object to add
75 # patch the instance of GitRepo with an "FAKE" ui object to add
76 # compatibility layer with Mercurial
76 # compatibility layer with Mercurial
77 setattr(repo, 'ui', baseui)
77 setattr(repo, 'ui', baseui)
78 return repo
78 return repo
79
79
80 @property
80 @property
81 def head(self):
81 def head(self):
82 try:
82 try:
83 return self._repo.head()
83 return self._repo.head()
84 except KeyError:
84 except KeyError:
85 return None
85 return None
86
86
87 @LazyProperty
87 @LazyProperty
88 def revisions(self):
88 def revisions(self):
89 """
89 """
90 Returns list of revisions' ids, in ascending order. Being lazy
90 Returns list of revisions' ids, in ascending order. Being lazy
91 attribute allows external tools to inject shas from cache.
91 attribute allows external tools to inject shas from cache.
92 """
92 """
93 return self._get_all_revisions()
93 return self._get_all_revisions()
94
94
95 @classmethod
95 @classmethod
96 def _run_git_command(cls, cmd, **opts):
96 def _run_git_command(cls, cmd, **opts):
97 """
97 """
98 Runs given ``cmd`` as git command and returns tuple
98 Runs given ``cmd`` as git command and returns tuple
99 (stdout, stderr).
99 (stdout, stderr).
100
100
101 :param cmd: git command to be executed
101 :param cmd: git command to be executed
102 :param opts: env options to pass into Subprocess command
102 :param opts: env options to pass into Subprocess command
103 """
103 """
104
104
105 _copts = ['-c', 'core.quotepath=false', ]
105 if '_bare' in opts:
106 _copts = []
107 del opts['_bare']
108 else:
109 _copts = ['-c', 'core.quotepath=false', ]
110 safe_call = False
111 if '_safe' in opts:
112 #no exc on failure
113 del opts['_safe']
114 safe_call = True
115
106 _str_cmd = False
116 _str_cmd = False
107 if isinstance(cmd, basestring):
117 if isinstance(cmd, basestring):
108 cmd = [cmd]
118 cmd = [cmd]
109 _str_cmd = True
119 _str_cmd = True
110
120
111 gitenv = os.environ
121 gitenv = os.environ
112 # need to clean fix GIT_DIR !
122 # need to clean fix GIT_DIR !
113 if 'GIT_DIR' in gitenv:
123 if 'GIT_DIR' in gitenv:
114 del gitenv['GIT_DIR']
124 del gitenv['GIT_DIR']
115 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
125 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
116
126
117 _git_path = rhodecode.CONFIG.get('git_path', 'git')
127 _git_path = rhodecode.CONFIG.get('git_path', 'git')
118 cmd = [_git_path] + _copts + cmd
128 cmd = [_git_path] + _copts + cmd
119 if _str_cmd:
129 if _str_cmd:
120 cmd = ' '.join(cmd)
130 cmd = ' '.join(cmd)
121 try:
131 try:
122 _opts = dict(
132 _opts = dict(
123 env=gitenv,
133 env=gitenv,
124 shell=False,
134 shell=False,
125 )
135 )
126 _opts.update(opts)
136 _opts.update(opts)
127 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
137 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
128 except (EnvironmentError, OSError), err:
138 except (EnvironmentError, OSError), err:
129 log.error(traceback.format_exc())
139 tb_err = ("Couldn't run git command (%s).\n"
130 raise RepositoryError("Couldn't run git command (%s).\n"
140 "Original error was:%s\n" % (cmd, err))
131 "Original error was:%s" % (cmd, err))
141 log.error(tb_err)
142 if safe_call:
143 return '', err
144 else:
145 raise RepositoryError(tb_err)
132
146
133 return ''.join(p.output), ''.join(p.error)
147 return ''.join(p.output), ''.join(p.error)
134
148
135 def run_git_command(self, cmd):
149 def run_git_command(self, cmd):
136 opts = {}
150 opts = {}
137 if os.path.isdir(self.path):
151 if os.path.isdir(self.path):
138 opts['cwd'] = self.path
152 opts['cwd'] = self.path
139 return self._run_git_command(cmd, **opts)
153 return self._run_git_command(cmd, **opts)
140
154
141 @classmethod
155 @classmethod
142 def _check_url(cls, url):
156 def _check_url(cls, url):
143 """
157 """
144 Functon will check given url and try to verify if it's a valid
158 Functon will check given url and try to verify if it's a valid
145 link. Sometimes it may happened that mercurial will issue basic
159 link. Sometimes it may happened that mercurial will issue basic
146 auth request that can cause whole API to hang when used from python
160 auth request that can cause whole API to hang when used from python
147 or other external calls.
161 or other external calls.
148
162
149 On failures it'll raise urllib2.HTTPError
163 On failures it'll raise urllib2.HTTPError
150 """
164 """
151 from mercurial.util import url as Url
165 from mercurial.util import url as Url
152
166
153 # those authnadlers are patched for python 2.6.5 bug an
167 # those authnadlers are patched for python 2.6.5 bug an
154 # infinit looping when given invalid resources
168 # infinit looping when given invalid resources
155 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
169 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
156
170
157 # check first if it's not an local url
171 # check first if it's not an local url
158 if os.path.isdir(url) or url.startswith('file:'):
172 if os.path.isdir(url) or url.startswith('file:'):
159 return True
173 return True
160
174
161 if('+' in url[:url.find('://')]):
175 if('+' in url[:url.find('://')]):
162 url = url[url.find('+') + 1:]
176 url = url[url.find('+') + 1:]
163
177
164 handlers = []
178 handlers = []
165 test_uri, authinfo = Url(url).authinfo()
179 test_uri, authinfo = Url(url).authinfo()
166 if not test_uri.endswith('info/refs'):
180 if not test_uri.endswith('info/refs'):
167 test_uri = test_uri.rstrip('/') + '/info/refs'
181 test_uri = test_uri.rstrip('/') + '/info/refs'
168 if authinfo:
182 if authinfo:
169 #create a password manager
183 #create a password manager
170 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
184 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
171 passmgr.add_password(*authinfo)
185 passmgr.add_password(*authinfo)
172
186
173 handlers.extend((httpbasicauthhandler(passmgr),
187 handlers.extend((httpbasicauthhandler(passmgr),
174 httpdigestauthhandler(passmgr)))
188 httpdigestauthhandler(passmgr)))
175
189
176 o = urllib2.build_opener(*handlers)
190 o = urllib2.build_opener(*handlers)
177 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
191 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
178
192
179 q = {"service": 'git-upload-pack'}
193 q = {"service": 'git-upload-pack'}
180 qs = '?%s' % urllib.urlencode(q)
194 qs = '?%s' % urllib.urlencode(q)
181 cu = "%s%s" % (test_uri, qs)
195 cu = "%s%s" % (test_uri, qs)
182 req = urllib2.Request(cu, None, {})
196 req = urllib2.Request(cu, None, {})
183
197
184 try:
198 try:
185 resp = o.open(req)
199 resp = o.open(req)
186 return resp.code == 200
200 return resp.code == 200
187 except Exception, e:
201 except Exception, e:
188 # means it cannot be cloned
202 # means it cannot be cloned
189 raise urllib2.URLError("[%s] %s" % (url, e))
203 raise urllib2.URLError("[%s] %s" % (url, e))
190
204
191 def _get_repo(self, create, src_url=None, update_after_clone=False,
205 def _get_repo(self, create, src_url=None, update_after_clone=False,
192 bare=False):
206 bare=False):
193 if create and os.path.exists(self.path):
207 if create and os.path.exists(self.path):
194 raise RepositoryError("Location already exist")
208 raise RepositoryError("Location already exist")
195 if src_url and not create:
209 if src_url and not create:
196 raise RepositoryError("Create should be set to True if src_url is "
210 raise RepositoryError("Create should be set to True if src_url is "
197 "given (clone operation creates repository)")
211 "given (clone operation creates repository)")
198 try:
212 try:
199 if create and src_url:
213 if create and src_url:
200 GitRepository._check_url(src_url)
214 GitRepository._check_url(src_url)
201 self.clone(src_url, update_after_clone, bare)
215 self.clone(src_url, update_after_clone, bare)
202 return Repo(self.path)
216 return Repo(self.path)
203 elif create:
217 elif create:
204 os.mkdir(self.path)
218 os.mkdir(self.path)
205 if bare:
219 if bare:
206 return Repo.init_bare(self.path)
220 return Repo.init_bare(self.path)
207 else:
221 else:
208 return Repo.init(self.path)
222 return Repo.init(self.path)
209 else:
223 else:
210 return self._repo
224 return self._repo
211 except (NotGitRepository, OSError), err:
225 except (NotGitRepository, OSError), err:
212 raise RepositoryError(err)
226 raise RepositoryError(err)
213
227
214 def _get_all_revisions(self):
228 def _get_all_revisions(self):
215 # we must check if this repo is not empty, since later command
229 # we must check if this repo is not empty, since later command
216 # fails if it is. And it's cheaper to ask than throw the subprocess
230 # fails if it is. And it's cheaper to ask than throw the subprocess
217 # errors
231 # errors
218 try:
232 try:
219 self._repo.head()
233 self._repo.head()
220 except KeyError:
234 except KeyError:
221 return []
235 return []
222 cmd = 'rev-list --all --reverse --date-order'
236 cmd = 'rev-list --all --reverse --date-order'
223 try:
237 try:
224 so, se = self.run_git_command(cmd)
238 so, se = self.run_git_command(cmd)
225 except RepositoryError:
239 except RepositoryError:
226 # Can be raised for empty repositories
240 # Can be raised for empty repositories
227 return []
241 return []
228 return so.splitlines()
242 return so.splitlines()
229
243
230 def _get_all_revisions2(self):
244 def _get_all_revisions2(self):
231 #alternate implementation using dulwich
245 #alternate implementation using dulwich
232 includes = [x[1][0] for x in self._parsed_refs.iteritems()
246 includes = [x[1][0] for x in self._parsed_refs.iteritems()
233 if x[1][1] != 'T']
247 if x[1][1] != 'T']
234 return [c.commit.id for c in self._repo.get_walker(include=includes)]
248 return [c.commit.id for c in self._repo.get_walker(include=includes)]
235
249
236 def _get_revision(self, revision):
250 def _get_revision(self, revision):
237 """
251 """
238 For git backend we always return integer here. This way we ensure
252 For git backend we always return integer here. This way we ensure
239 that changset's revision attribute would become integer.
253 that changset's revision attribute would become integer.
240 """
254 """
241 pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
255 pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
242 is_bstr = lambda o: isinstance(o, (str, unicode))
256 is_bstr = lambda o: isinstance(o, (str, unicode))
243 is_null = lambda o: len(o) == revision.count('0')
257 is_null = lambda o: len(o) == revision.count('0')
244
258
245 if len(self.revisions) == 0:
259 if len(self.revisions) == 0:
246 raise EmptyRepositoryError("There are no changesets yet")
260 raise EmptyRepositoryError("There are no changesets yet")
247
261
248 if revision in (None, '', 'tip', 'HEAD', 'head', -1):
262 if revision in (None, '', 'tip', 'HEAD', 'head', -1):
249 revision = self.revisions[-1]
263 revision = self.revisions[-1]
250
264
251 if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12)
265 if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12)
252 or isinstance(revision, int) or is_null(revision)):
266 or isinstance(revision, int) or is_null(revision)):
253 try:
267 try:
254 revision = self.revisions[int(revision)]
268 revision = self.revisions[int(revision)]
255 except:
269 except:
256 raise ChangesetDoesNotExistError("Revision %r does not exist "
270 raise ChangesetDoesNotExistError("Revision %r does not exist "
257 "for this repository %s" % (revision, self))
271 "for this repository %s" % (revision, self))
258
272
259 elif is_bstr(revision):
273 elif is_bstr(revision):
260 # get by branch/tag name
274 # get by branch/tag name
261 _ref_revision = self._parsed_refs.get(revision)
275 _ref_revision = self._parsed_refs.get(revision)
262 _tags_shas = self.tags.values()
276 _tags_shas = self.tags.values()
263 if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']:
277 if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']:
264 return _ref_revision[0]
278 return _ref_revision[0]
265
279
266 # maybe it's a tag ? we don't have them in self.revisions
280 # maybe it's a tag ? we don't have them in self.revisions
267 elif revision in _tags_shas:
281 elif revision in _tags_shas:
268 return _tags_shas[_tags_shas.index(revision)]
282 return _tags_shas[_tags_shas.index(revision)]
269
283
270 elif not pattern.match(revision) or revision not in self.revisions:
284 elif not pattern.match(revision) or revision not in self.revisions:
271 raise ChangesetDoesNotExistError("Revision %r does not exist "
285 raise ChangesetDoesNotExistError("Revision %r does not exist "
272 "for this repository %s" % (revision, self))
286 "for this repository %s" % (revision, self))
273
287
274 # Ensure we return full id
288 # Ensure we return full id
275 if not pattern.match(str(revision)):
289 if not pattern.match(str(revision)):
276 raise ChangesetDoesNotExistError("Given revision %r not recognized"
290 raise ChangesetDoesNotExistError("Given revision %r not recognized"
277 % revision)
291 % revision)
278 return revision
292 return revision
279
293
280 def _get_archives(self, archive_name='tip'):
294 def _get_archives(self, archive_name='tip'):
281
295
282 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
296 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
283 yield {"type": i[0], "extension": i[1], "node": archive_name}
297 yield {"type": i[0], "extension": i[1], "node": archive_name}
284
298
285 def _get_url(self, url):
299 def _get_url(self, url):
286 """
300 """
287 Returns normalized url. If schema is not given, would fall to
301 Returns normalized url. If schema is not given, would fall to
288 filesystem (``file:///``) schema.
302 filesystem (``file:///``) schema.
289 """
303 """
290 url = str(url)
304 url = str(url)
291 if url != 'default' and not '://' in url:
305 if url != 'default' and not '://' in url:
292 url = ':///'.join(('file', url))
306 url = ':///'.join(('file', url))
293 return url
307 return url
294
308
295 @LazyProperty
309 @LazyProperty
296 def name(self):
310 def name(self):
297 return os.path.basename(self.path)
311 return os.path.basename(self.path)
298
312
299 @LazyProperty
313 @LazyProperty
300 def last_change(self):
314 def last_change(self):
301 """
315 """
302 Returns last change made on this repository as datetime object
316 Returns last change made on this repository as datetime object
303 """
317 """
304 return date_fromtimestamp(self._get_mtime(), makedate()[1])
318 return date_fromtimestamp(self._get_mtime(), makedate()[1])
305
319
306 def _get_mtime(self):
320 def _get_mtime(self):
307 try:
321 try:
308 return time.mktime(self.get_changeset().date.timetuple())
322 return time.mktime(self.get_changeset().date.timetuple())
309 except RepositoryError:
323 except RepositoryError:
310 idx_loc = '' if self.bare else '.git'
324 idx_loc = '' if self.bare else '.git'
311 # fallback to filesystem
325 # fallback to filesystem
312 in_path = os.path.join(self.path, idx_loc, "index")
326 in_path = os.path.join(self.path, idx_loc, "index")
313 he_path = os.path.join(self.path, idx_loc, "HEAD")
327 he_path = os.path.join(self.path, idx_loc, "HEAD")
314 if os.path.exists(in_path):
328 if os.path.exists(in_path):
315 return os.stat(in_path).st_mtime
329 return os.stat(in_path).st_mtime
316 else:
330 else:
317 return os.stat(he_path).st_mtime
331 return os.stat(he_path).st_mtime
318
332
319 @LazyProperty
333 @LazyProperty
320 def description(self):
334 def description(self):
321 idx_loc = '' if self.bare else '.git'
335 idx_loc = '' if self.bare else '.git'
322 undefined_description = u'unknown'
336 undefined_description = u'unknown'
323 description_path = os.path.join(self.path, idx_loc, 'description')
337 description_path = os.path.join(self.path, idx_loc, 'description')
324 if os.path.isfile(description_path):
338 if os.path.isfile(description_path):
325 return safe_unicode(open(description_path).read())
339 return safe_unicode(open(description_path).read())
326 else:
340 else:
327 return undefined_description
341 return undefined_description
328
342
329 @LazyProperty
343 @LazyProperty
330 def contact(self):
344 def contact(self):
331 undefined_contact = u'Unknown'
345 undefined_contact = u'Unknown'
332 return undefined_contact
346 return undefined_contact
333
347
334 @property
348 @property
335 def branches(self):
349 def branches(self):
336 if not self.revisions:
350 if not self.revisions:
337 return {}
351 return {}
338 sortkey = lambda ctx: ctx[0]
352 sortkey = lambda ctx: ctx[0]
339 _branches = [(x[0], x[1][0])
353 _branches = [(x[0], x[1][0])
340 for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
354 for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
341 return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
355 return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
342
356
343 @LazyProperty
357 @LazyProperty
344 def tags(self):
358 def tags(self):
345 return self._get_tags()
359 return self._get_tags()
346
360
347 def _get_tags(self):
361 def _get_tags(self):
348 if not self.revisions:
362 if not self.revisions:
349 return {}
363 return {}
350
364
351 sortkey = lambda ctx: ctx[0]
365 sortkey = lambda ctx: ctx[0]
352 _tags = [(x[0], x[1][0])
366 _tags = [(x[0], x[1][0])
353 for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
367 for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
354 return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
368 return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
355
369
356 def tag(self, name, user, revision=None, message=None, date=None,
370 def tag(self, name, user, revision=None, message=None, date=None,
357 **kwargs):
371 **kwargs):
358 """
372 """
359 Creates and returns a tag for the given ``revision``.
373 Creates and returns a tag for the given ``revision``.
360
374
361 :param name: name for new tag
375 :param name: name for new tag
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 :param revision: changeset id for which new tag would be created
377 :param revision: changeset id for which new tag would be created
364 :param message: message of the tag's commit
378 :param message: message of the tag's commit
365 :param date: date of tag's commit
379 :param date: date of tag's commit
366
380
367 :raises TagAlreadyExistError: if tag with same name already exists
381 :raises TagAlreadyExistError: if tag with same name already exists
368 """
382 """
369 if name in self.tags:
383 if name in self.tags:
370 raise TagAlreadyExistError("Tag %s already exists" % name)
384 raise TagAlreadyExistError("Tag %s already exists" % name)
371 changeset = self.get_changeset(revision)
385 changeset = self.get_changeset(revision)
372 message = message or "Added tag %s for commit %s" % (name,
386 message = message or "Added tag %s for commit %s" % (name,
373 changeset.raw_id)
387 changeset.raw_id)
374 self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
388 self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
375
389
376 self._parsed_refs = self._get_parsed_refs()
390 self._parsed_refs = self._get_parsed_refs()
377 self.tags = self._get_tags()
391 self.tags = self._get_tags()
378 return changeset
392 return changeset
379
393
380 def remove_tag(self, name, user, message=None, date=None):
394 def remove_tag(self, name, user, message=None, date=None):
381 """
395 """
382 Removes tag with the given ``name``.
396 Removes tag with the given ``name``.
383
397
384 :param name: name of the tag to be removed
398 :param name: name of the tag to be removed
385 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
399 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
386 :param message: message of the tag's removal commit
400 :param message: message of the tag's removal commit
387 :param date: date of tag's removal commit
401 :param date: date of tag's removal commit
388
402
389 :raises TagDoesNotExistError: if tag with given name does not exists
403 :raises TagDoesNotExistError: if tag with given name does not exists
390 """
404 """
391 if name not in self.tags:
405 if name not in self.tags:
392 raise TagDoesNotExistError("Tag %s does not exist" % name)
406 raise TagDoesNotExistError("Tag %s does not exist" % name)
393 tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
407 tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
394 try:
408 try:
395 os.remove(tagpath)
409 os.remove(tagpath)
396 self._parsed_refs = self._get_parsed_refs()
410 self._parsed_refs = self._get_parsed_refs()
397 self.tags = self._get_tags()
411 self.tags = self._get_tags()
398 except OSError, e:
412 except OSError, e:
399 raise RepositoryError(e.strerror)
413 raise RepositoryError(e.strerror)
400
414
401 @LazyProperty
415 @LazyProperty
402 def _parsed_refs(self):
416 def _parsed_refs(self):
403 return self._get_parsed_refs()
417 return self._get_parsed_refs()
404
418
405 def _get_parsed_refs(self):
419 def _get_parsed_refs(self):
406 refs = self._repo.get_refs()
420 refs = self._repo.get_refs()
407 keys = [('refs/heads/', 'H'),
421 keys = [('refs/heads/', 'H'),
408 ('refs/remotes/origin/', 'RH'),
422 ('refs/remotes/origin/', 'RH'),
409 ('refs/tags/', 'T')]
423 ('refs/tags/', 'T')]
410 _refs = {}
424 _refs = {}
411 for ref, sha in refs.iteritems():
425 for ref, sha in refs.iteritems():
412 for k, type_ in keys:
426 for k, type_ in keys:
413 if ref.startswith(k):
427 if ref.startswith(k):
414 _key = ref[len(k):]
428 _key = ref[len(k):]
415 if type_ == 'T':
429 if type_ == 'T':
416 obj = self._repo.get_object(sha)
430 obj = self._repo.get_object(sha)
417 if isinstance(obj, Tag):
431 if isinstance(obj, Tag):
418 sha = self._repo.get_object(sha).object[1]
432 sha = self._repo.get_object(sha).object[1]
419 _refs[_key] = [sha, type_]
433 _refs[_key] = [sha, type_]
420 break
434 break
421 return _refs
435 return _refs
422
436
423 def _heads(self, reverse=False):
437 def _heads(self, reverse=False):
424 refs = self._repo.get_refs()
438 refs = self._repo.get_refs()
425 heads = {}
439 heads = {}
426
440
427 for key, val in refs.items():
441 for key, val in refs.items():
428 for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
442 for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
429 if key.startswith(ref_key):
443 if key.startswith(ref_key):
430 n = key[len(ref_key):]
444 n = key[len(ref_key):]
431 if n not in ['HEAD']:
445 if n not in ['HEAD']:
432 heads[n] = val
446 heads[n] = val
433
447
434 return heads if reverse else dict((y, x) for x, y in heads.iteritems())
448 return heads if reverse else dict((y, x) for x, y in heads.iteritems())
435
449
436 def get_changeset(self, revision=None):
450 def get_changeset(self, revision=None):
437 """
451 """
438 Returns ``GitChangeset`` object representing commit from git repository
452 Returns ``GitChangeset`` object representing commit from git repository
439 at the given revision or head (most recent commit) if None given.
453 at the given revision or head (most recent commit) if None given.
440 """
454 """
441 if isinstance(revision, GitChangeset):
455 if isinstance(revision, GitChangeset):
442 return revision
456 return revision
443 revision = self._get_revision(revision)
457 revision = self._get_revision(revision)
444 changeset = GitChangeset(repository=self, revision=revision)
458 changeset = GitChangeset(repository=self, revision=revision)
445 return changeset
459 return changeset
446
460
447 def get_changesets(self, start=None, end=None, start_date=None,
461 def get_changesets(self, start=None, end=None, start_date=None,
448 end_date=None, branch_name=None, reverse=False):
462 end_date=None, branch_name=None, reverse=False):
449 """
463 """
450 Returns iterator of ``GitChangeset`` objects from start to end (both
464 Returns iterator of ``GitChangeset`` objects from start to end (both
451 are inclusive), in ascending date order (unless ``reverse`` is set).
465 are inclusive), in ascending date order (unless ``reverse`` is set).
452
466
453 :param start: changeset ID, as str; first returned changeset
467 :param start: changeset ID, as str; first returned changeset
454 :param end: changeset ID, as str; last returned changeset
468 :param end: changeset ID, as str; last returned changeset
455 :param start_date: if specified, changesets with commit date less than
469 :param start_date: if specified, changesets with commit date less than
456 ``start_date`` would be filtered out from returned set
470 ``start_date`` would be filtered out from returned set
457 :param end_date: if specified, changesets with commit date greater than
471 :param end_date: if specified, changesets with commit date greater than
458 ``end_date`` would be filtered out from returned set
472 ``end_date`` would be filtered out from returned set
459 :param branch_name: if specified, changesets not reachable from given
473 :param branch_name: if specified, changesets not reachable from given
460 branch would be filtered out from returned set
474 branch would be filtered out from returned set
461 :param reverse: if ``True``, returned generator would be reversed
475 :param reverse: if ``True``, returned generator would be reversed
462 (meaning that returned changesets would have descending date order)
476 (meaning that returned changesets would have descending date order)
463
477
464 :raise BranchDoesNotExistError: If given ``branch_name`` does not
478 :raise BranchDoesNotExistError: If given ``branch_name`` does not
465 exist.
479 exist.
466 :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
480 :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
467 ``end`` could not be found.
481 ``end`` could not be found.
468
482
469 """
483 """
470 if branch_name and branch_name not in self.branches:
484 if branch_name and branch_name not in self.branches:
471 raise BranchDoesNotExistError("Branch '%s' not found" \
485 raise BranchDoesNotExistError("Branch '%s' not found" \
472 % branch_name)
486 % branch_name)
473 # %H at format means (full) commit hash, initial hashes are retrieved
487 # %H at format means (full) commit hash, initial hashes are retrieved
474 # in ascending date order
488 # in ascending date order
475 cmd_template = 'log --date-order --reverse --pretty=format:"%H"'
489 cmd_template = 'log --date-order --reverse --pretty=format:"%H"'
476 cmd_params = {}
490 cmd_params = {}
477 if start_date:
491 if start_date:
478 cmd_template += ' --since "$since"'
492 cmd_template += ' --since "$since"'
479 cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S')
493 cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S')
480 if end_date:
494 if end_date:
481 cmd_template += ' --until "$until"'
495 cmd_template += ' --until "$until"'
482 cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S')
496 cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S')
483 if branch_name:
497 if branch_name:
484 cmd_template += ' $branch_name'
498 cmd_template += ' $branch_name'
485 cmd_params['branch_name'] = branch_name
499 cmd_params['branch_name'] = branch_name
486 else:
500 else:
487 cmd_template += ' --all'
501 cmd_template += ' --all'
488
502
489 cmd = Template(cmd_template).safe_substitute(**cmd_params)
503 cmd = Template(cmd_template).safe_substitute(**cmd_params)
490 revs = self.run_git_command(cmd)[0].splitlines()
504 revs = self.run_git_command(cmd)[0].splitlines()
491 start_pos = 0
505 start_pos = 0
492 end_pos = len(revs)
506 end_pos = len(revs)
493 if start:
507 if start:
494 _start = self._get_revision(start)
508 _start = self._get_revision(start)
495 try:
509 try:
496 start_pos = revs.index(_start)
510 start_pos = revs.index(_start)
497 except ValueError:
511 except ValueError:
498 pass
512 pass
499
513
500 if end is not None:
514 if end is not None:
501 _end = self._get_revision(end)
515 _end = self._get_revision(end)
502 try:
516 try:
503 end_pos = revs.index(_end)
517 end_pos = revs.index(_end)
504 except ValueError:
518 except ValueError:
505 pass
519 pass
506
520
507 if None not in [start, end] and start_pos > end_pos:
521 if None not in [start, end] and start_pos > end_pos:
508 raise RepositoryError('start cannot be after end')
522 raise RepositoryError('start cannot be after end')
509
523
510 if end_pos is not None:
524 if end_pos is not None:
511 end_pos += 1
525 end_pos += 1
512
526
513 revs = revs[start_pos:end_pos]
527 revs = revs[start_pos:end_pos]
514 if reverse:
528 if reverse:
515 revs = reversed(revs)
529 revs = reversed(revs)
516 for rev in revs:
530 for rev in revs:
517 yield self.get_changeset(rev)
531 yield self.get_changeset(rev)
518
532
519 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
533 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
520 context=3):
534 context=3):
521 """
535 """
522 Returns (git like) *diff*, as plain text. Shows changes introduced by
536 Returns (git like) *diff*, as plain text. Shows changes introduced by
523 ``rev2`` since ``rev1``.
537 ``rev2`` since ``rev1``.
524
538
525 :param rev1: Entry point from which diff is shown. Can be
539 :param rev1: Entry point from which diff is shown. Can be
526 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
540 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
527 the changes since empty state of the repository until ``rev2``
541 the changes since empty state of the repository until ``rev2``
528 :param rev2: Until which revision changes should be shown.
542 :param rev2: Until which revision changes should be shown.
529 :param ignore_whitespace: If set to ``True``, would not show whitespace
543 :param ignore_whitespace: If set to ``True``, would not show whitespace
530 changes. Defaults to ``False``.
544 changes. Defaults to ``False``.
531 :param context: How many lines before/after changed lines should be
545 :param context: How many lines before/after changed lines should be
532 shown. Defaults to ``3``.
546 shown. Defaults to ``3``.
533 """
547 """
534 flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40']
548 flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40']
535 if ignore_whitespace:
549 if ignore_whitespace:
536 flags.append('-w')
550 flags.append('-w')
537
551
538 if hasattr(rev1, 'raw_id'):
552 if hasattr(rev1, 'raw_id'):
539 rev1 = getattr(rev1, 'raw_id')
553 rev1 = getattr(rev1, 'raw_id')
540
554
541 if hasattr(rev2, 'raw_id'):
555 if hasattr(rev2, 'raw_id'):
542 rev2 = getattr(rev2, 'raw_id')
556 rev2 = getattr(rev2, 'raw_id')
543
557
544 if rev1 == self.EMPTY_CHANGESET:
558 if rev1 == self.EMPTY_CHANGESET:
545 rev2 = self.get_changeset(rev2).raw_id
559 rev2 = self.get_changeset(rev2).raw_id
546 cmd = ' '.join(['show'] + flags + [rev2])
560 cmd = ' '.join(['show'] + flags + [rev2])
547 else:
561 else:
548 rev1 = self.get_changeset(rev1).raw_id
562 rev1 = self.get_changeset(rev1).raw_id
549 rev2 = self.get_changeset(rev2).raw_id
563 rev2 = self.get_changeset(rev2).raw_id
550 cmd = ' '.join(['diff'] + flags + [rev1, rev2])
564 cmd = ' '.join(['diff'] + flags + [rev1, rev2])
551
565
552 if path:
566 if path:
553 cmd += ' -- "%s"' % path
567 cmd += ' -- "%s"' % path
554
568
555 stdout, stderr = self.run_git_command(cmd)
569 stdout, stderr = self.run_git_command(cmd)
556 # If we used 'show' command, strip first few lines (until actual diff
570 # If we used 'show' command, strip first few lines (until actual diff
557 # starts)
571 # starts)
558 if rev1 == self.EMPTY_CHANGESET:
572 if rev1 == self.EMPTY_CHANGESET:
559 lines = stdout.splitlines()
573 lines = stdout.splitlines()
560 x = 0
574 x = 0
561 for line in lines:
575 for line in lines:
562 if line.startswith('diff'):
576 if line.startswith('diff'):
563 break
577 break
564 x += 1
578 x += 1
565 # Append new line just like 'diff' command do
579 # Append new line just like 'diff' command do
566 stdout = '\n'.join(lines[x:]) + '\n'
580 stdout = '\n'.join(lines[x:]) + '\n'
567 return stdout
581 return stdout
568
582
569 @LazyProperty
583 @LazyProperty
570 def in_memory_changeset(self):
584 def in_memory_changeset(self):
571 """
585 """
572 Returns ``GitInMemoryChangeset`` object for this repository.
586 Returns ``GitInMemoryChangeset`` object for this repository.
573 """
587 """
574 return GitInMemoryChangeset(self)
588 return GitInMemoryChangeset(self)
575
589
576 def clone(self, url, update_after_clone=True, bare=False):
590 def clone(self, url, update_after_clone=True, bare=False):
577 """
591 """
578 Tries to clone changes from external location.
592 Tries to clone changes from external location.
579
593
580 :param update_after_clone: If set to ``False``, git won't checkout
594 :param update_after_clone: If set to ``False``, git won't checkout
581 working directory
595 working directory
582 :param bare: If set to ``True``, repository would be cloned into
596 :param bare: If set to ``True``, repository would be cloned into
583 *bare* git repository (no working directory at all).
597 *bare* git repository (no working directory at all).
584 """
598 """
585 url = self._get_url(url)
599 url = self._get_url(url)
586 cmd = ['clone']
600 cmd = ['clone']
587 if bare:
601 if bare:
588 cmd.append('--bare')
602 cmd.append('--bare')
589 elif not update_after_clone:
603 elif not update_after_clone:
590 cmd.append('--no-checkout')
604 cmd.append('--no-checkout')
591 cmd += ['--', '"%s"' % url, '"%s"' % self.path]
605 cmd += ['--', '"%s"' % url, '"%s"' % self.path]
592 cmd = ' '.join(cmd)
606 cmd = ' '.join(cmd)
593 # If error occurs run_git_command raises RepositoryError already
607 # If error occurs run_git_command raises RepositoryError already
594 self.run_git_command(cmd)
608 self.run_git_command(cmd)
595
609
596 def pull(self, url):
610 def pull(self, url):
597 """
611 """
598 Tries to pull changes from external location.
612 Tries to pull changes from external location.
599 """
613 """
600 url = self._get_url(url)
614 url = self._get_url(url)
601 cmd = ['pull']
615 cmd = ['pull']
602 cmd.append("--ff-only")
616 cmd.append("--ff-only")
603 cmd.append(url)
617 cmd.append(url)
604 cmd = ' '.join(cmd)
618 cmd = ' '.join(cmd)
605 # If error occurs run_git_command raises RepositoryError already
619 # If error occurs run_git_command raises RepositoryError already
606 self.run_git_command(cmd)
620 self.run_git_command(cmd)
607
621
608 def fetch(self, url):
622 def fetch(self, url):
609 """
623 """
610 Tries to pull changes from external location.
624 Tries to pull changes from external location.
611 """
625 """
612 url = self._get_url(url)
626 url = self._get_url(url)
613 so, se = self.run_git_command('ls-remote -h %s' % url)
627 so, se = self.run_git_command('ls-remote -h %s' % url)
614 refs = []
628 refs = []
615 for line in (x for x in so.splitlines()):
629 for line in (x for x in so.splitlines()):
616 sha, ref = line.split('\t')
630 sha, ref = line.split('\t')
617 refs.append(ref)
631 refs.append(ref)
618 refs = ' '.join(('+%s:%s' % (r, r) for r in refs))
632 refs = ' '.join(('+%s:%s' % (r, r) for r in refs))
619 cmd = '''fetch %s -- %s''' % (url, refs)
633 cmd = '''fetch %s -- %s''' % (url, refs)
620 self.run_git_command(cmd)
634 self.run_git_command(cmd)
621
635
622 @LazyProperty
636 @LazyProperty
623 def workdir(self):
637 def workdir(self):
624 """
638 """
625 Returns ``Workdir`` instance for this repository.
639 Returns ``Workdir`` instance for this repository.
626 """
640 """
627 return GitWorkdir(self)
641 return GitWorkdir(self)
628
642
629 def get_config_value(self, section, name, config_file=None):
643 def get_config_value(self, section, name, config_file=None):
630 """
644 """
631 Returns configuration value for a given [``section``] and ``name``.
645 Returns configuration value for a given [``section``] and ``name``.
632
646
633 :param section: Section we want to retrieve value from
647 :param section: Section we want to retrieve value from
634 :param name: Name of configuration we want to retrieve
648 :param name: Name of configuration we want to retrieve
635 :param config_file: A path to file which should be used to retrieve
649 :param config_file: A path to file which should be used to retrieve
636 configuration from (might also be a list of file paths)
650 configuration from (might also be a list of file paths)
637 """
651 """
638 if config_file is None:
652 if config_file is None:
639 config_file = []
653 config_file = []
640 elif isinstance(config_file, basestring):
654 elif isinstance(config_file, basestring):
641 config_file = [config_file]
655 config_file = [config_file]
642
656
643 def gen_configs():
657 def gen_configs():
644 for path in config_file + self._config_files:
658 for path in config_file + self._config_files:
645 try:
659 try:
646 yield ConfigFile.from_path(path)
660 yield ConfigFile.from_path(path)
647 except (IOError, OSError, ValueError):
661 except (IOError, OSError, ValueError):
648 continue
662 continue
649
663
650 for config in gen_configs():
664 for config in gen_configs():
651 try:
665 try:
652 return config.get(section, name)
666 return config.get(section, name)
653 except KeyError:
667 except KeyError:
654 continue
668 continue
655 return None
669 return None
656
670
657 def get_user_name(self, config_file=None):
671 def get_user_name(self, config_file=None):
658 """
672 """
659 Returns user's name from global configuration file.
673 Returns user's name from global configuration file.
660
674
661 :param config_file: A path to file which should be used to retrieve
675 :param config_file: A path to file which should be used to retrieve
662 configuration from (might also be a list of file paths)
676 configuration from (might also be a list of file paths)
663 """
677 """
664 return self.get_config_value('user', 'name', config_file)
678 return self.get_config_value('user', 'name', config_file)
665
679
666 def get_user_email(self, config_file=None):
680 def get_user_email(self, config_file=None):
667 """
681 """
668 Returns user's email from global configuration file.
682 Returns user's email from global configuration file.
669
683
670 :param config_file: A path to file which should be used to retrieve
684 :param config_file: A path to file which should be used to retrieve
671 configuration from (might also be a list of file paths)
685 configuration from (might also be a list of file paths)
672 """
686 """
673 return self.get_config_value('user', 'email', config_file)
687 return self.get_config_value('user', 'email', config_file)
General Comments 0
You need to be logged in to leave comments. Login now