##// END OF EJS Templates
backends: use reference explicitly to properly translate GIT references to commits such as numeric branches
milka -
r4653:5035738c default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,1070 +1,1071 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Some simple helper functions
23 Some simple helper functions
24 """
24 """
25
25
26 import collections
26 import collections
27 import datetime
27 import datetime
28 import dateutil.relativedelta
28 import dateutil.relativedelta
29 import hashlib
29 import hashlib
30 import logging
30 import logging
31 import re
31 import re
32 import sys
32 import sys
33 import time
33 import time
34 import urllib
34 import urllib
35 import urlobject
35 import urlobject
36 import uuid
36 import uuid
37 import getpass
37 import getpass
38 from functools import update_wrapper, partial
38 from functools import update_wrapper, partial
39
39
40 import pygments.lexers
40 import pygments.lexers
41 import sqlalchemy
41 import sqlalchemy
42 import sqlalchemy.engine.url
42 import sqlalchemy.engine.url
43 import sqlalchemy.exc
43 import sqlalchemy.exc
44 import sqlalchemy.sql
44 import sqlalchemy.sql
45 import webob
45 import webob
46 import pyramid.threadlocal
46 import pyramid.threadlocal
47 from pyramid import compat
47 from pyramid import compat
48 from pyramid.settings import asbool
48 from pyramid.settings import asbool
49
49
50 import rhodecode
50 import rhodecode
51 from rhodecode.translation import _, _pluralize
51 from rhodecode.translation import _, _pluralize
52
52
53
53
54 def md5(s):
54 def md5(s):
55 return hashlib.md5(s).hexdigest()
55 return hashlib.md5(s).hexdigest()
56
56
57
57
58 def md5_safe(s):
58 def md5_safe(s):
59 return md5(safe_str(s))
59 return md5(safe_str(s))
60
60
61
61
62 def sha1(s):
62 def sha1(s):
63 return hashlib.sha1(s).hexdigest()
63 return hashlib.sha1(s).hexdigest()
64
64
65
65
66 def sha1_safe(s):
66 def sha1_safe(s):
67 return sha1(safe_str(s))
67 return sha1(safe_str(s))
68
68
69
69
70 def __get_lem(extra_mapping=None):
70 def __get_lem(extra_mapping=None):
71 """
71 """
72 Get language extension map based on what's inside pygments lexers
72 Get language extension map based on what's inside pygments lexers
73 """
73 """
74 d = collections.defaultdict(lambda: [])
74 d = collections.defaultdict(lambda: [])
75
75
76 def __clean(s):
76 def __clean(s):
77 s = s.lstrip('*')
77 s = s.lstrip('*')
78 s = s.lstrip('.')
78 s = s.lstrip('.')
79
79
80 if s.find('[') != -1:
80 if s.find('[') != -1:
81 exts = []
81 exts = []
82 start, stop = s.find('['), s.find(']')
82 start, stop = s.find('['), s.find(']')
83
83
84 for suffix in s[start + 1:stop]:
84 for suffix in s[start + 1:stop]:
85 exts.append(s[:s.find('[')] + suffix)
85 exts.append(s[:s.find('[')] + suffix)
86 return [e.lower() for e in exts]
86 return [e.lower() for e in exts]
87 else:
87 else:
88 return [s.lower()]
88 return [s.lower()]
89
89
90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
91 m = map(__clean, t[-2])
91 m = map(__clean, t[-2])
92 if m:
92 if m:
93 m = reduce(lambda x, y: x + y, m)
93 m = reduce(lambda x, y: x + y, m)
94 for ext in m:
94 for ext in m:
95 desc = lx.replace('Lexer', '')
95 desc = lx.replace('Lexer', '')
96 d[ext].append(desc)
96 d[ext].append(desc)
97
97
98 data = dict(d)
98 data = dict(d)
99
99
100 extra_mapping = extra_mapping or {}
100 extra_mapping = extra_mapping or {}
101 if extra_mapping:
101 if extra_mapping:
102 for k, v in extra_mapping.items():
102 for k, v in extra_mapping.items():
103 if k not in data:
103 if k not in data:
104 # register new mapping2lexer
104 # register new mapping2lexer
105 data[k] = [v]
105 data[k] = [v]
106
106
107 return data
107 return data
108
108
109
109
110 def str2bool(_str):
110 def str2bool(_str):
111 """
111 """
112 returns True/False value from given string, it tries to translate the
112 returns True/False value from given string, it tries to translate the
113 string into boolean
113 string into boolean
114
114
115 :param _str: string value to translate into boolean
115 :param _str: string value to translate into boolean
116 :rtype: boolean
116 :rtype: boolean
117 :returns: boolean from given string
117 :returns: boolean from given string
118 """
118 """
119 if _str is None:
119 if _str is None:
120 return False
120 return False
121 if _str in (True, False):
121 if _str in (True, False):
122 return _str
122 return _str
123 _str = str(_str).strip().lower()
123 _str = str(_str).strip().lower()
124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
125
125
126
126
127 def aslist(obj, sep=None, strip=True):
127 def aslist(obj, sep=None, strip=True):
128 """
128 """
129 Returns given string separated by sep as list
129 Returns given string separated by sep as list
130
130
131 :param obj:
131 :param obj:
132 :param sep:
132 :param sep:
133 :param strip:
133 :param strip:
134 """
134 """
135 if isinstance(obj, (basestring,)):
135 if isinstance(obj, (basestring,)):
136 lst = obj.split(sep)
136 lst = obj.split(sep)
137 if strip:
137 if strip:
138 lst = [v.strip() for v in lst]
138 lst = [v.strip() for v in lst]
139 return lst
139 return lst
140 elif isinstance(obj, (list, tuple)):
140 elif isinstance(obj, (list, tuple)):
141 return obj
141 return obj
142 elif obj is None:
142 elif obj is None:
143 return []
143 return []
144 else:
144 else:
145 return [obj]
145 return [obj]
146
146
147
147
148 def convert_line_endings(line, mode):
148 def convert_line_endings(line, mode):
149 """
149 """
150 Converts a given line "line end" accordingly to given mode
150 Converts a given line "line end" accordingly to given mode
151
151
152 Available modes are::
152 Available modes are::
153 0 - Unix
153 0 - Unix
154 1 - Mac
154 1 - Mac
155 2 - DOS
155 2 - DOS
156
156
157 :param line: given line to convert
157 :param line: given line to convert
158 :param mode: mode to convert to
158 :param mode: mode to convert to
159 :rtype: str
159 :rtype: str
160 :return: converted line according to mode
160 :return: converted line according to mode
161 """
161 """
162 if mode == 0:
162 if mode == 0:
163 line = line.replace('\r\n', '\n')
163 line = line.replace('\r\n', '\n')
164 line = line.replace('\r', '\n')
164 line = line.replace('\r', '\n')
165 elif mode == 1:
165 elif mode == 1:
166 line = line.replace('\r\n', '\r')
166 line = line.replace('\r\n', '\r')
167 line = line.replace('\n', '\r')
167 line = line.replace('\n', '\r')
168 elif mode == 2:
168 elif mode == 2:
169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
170 return line
170 return line
171
171
172
172
173 def detect_mode(line, default):
173 def detect_mode(line, default):
174 """
174 """
175 Detects line break for given line, if line break couldn't be found
175 Detects line break for given line, if line break couldn't be found
176 given default value is returned
176 given default value is returned
177
177
178 :param line: str line
178 :param line: str line
179 :param default: default
179 :param default: default
180 :rtype: int
180 :rtype: int
181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
182 """
182 """
183 if line.endswith('\r\n'):
183 if line.endswith('\r\n'):
184 return 2
184 return 2
185 elif line.endswith('\n'):
185 elif line.endswith('\n'):
186 return 0
186 return 0
187 elif line.endswith('\r'):
187 elif line.endswith('\r'):
188 return 1
188 return 1
189 else:
189 else:
190 return default
190 return default
191
191
192
192
193 def safe_int(val, default=None):
193 def safe_int(val, default=None):
194 """
194 """
195 Returns int() of val if val is not convertable to int use default
195 Returns int() of val if val is not convertable to int use default
196 instead
196 instead
197
197
198 :param val:
198 :param val:
199 :param default:
199 :param default:
200 """
200 """
201
201
202 try:
202 try:
203 val = int(val)
203 val = int(val)
204 except (ValueError, TypeError):
204 except (ValueError, TypeError):
205 val = default
205 val = default
206
206
207 return val
207 return val
208
208
209
209
210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
211 """
211 """
212 safe unicode function. Does few trick to turn str_ into unicode
212 safe unicode function. Does few trick to turn str_ into unicode
213
213
214 In case of UnicodeDecode error, we try to return it with encoding detected
214 In case of UnicodeDecode error, we try to return it with encoding detected
215 by chardet library if it fails fallback to unicode with errors replaced
215 by chardet library if it fails fallback to unicode with errors replaced
216
216
217 :param str_: string to decode
217 :param str_: string to decode
218 :rtype: unicode
218 :rtype: unicode
219 :returns: unicode object
219 :returns: unicode object
220 """
220 """
221 if isinstance(str_, unicode):
221 if isinstance(str_, unicode):
222 return str_
222 return str_
223
223
224 if not from_encoding:
224 if not from_encoding:
225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
226 'utf8'), sep=',')
226 'utf8'), sep=',')
227 from_encoding = DEFAULT_ENCODINGS
227 from_encoding = DEFAULT_ENCODINGS
228
228
229 if not isinstance(from_encoding, (list, tuple)):
229 if not isinstance(from_encoding, (list, tuple)):
230 from_encoding = [from_encoding]
230 from_encoding = [from_encoding]
231
231
232 try:
232 try:
233 return unicode(str_)
233 return unicode(str_)
234 except UnicodeDecodeError:
234 except UnicodeDecodeError:
235 pass
235 pass
236
236
237 for enc in from_encoding:
237 for enc in from_encoding:
238 try:
238 try:
239 return unicode(str_, enc)
239 return unicode(str_, enc)
240 except UnicodeDecodeError:
240 except UnicodeDecodeError:
241 pass
241 pass
242
242
243 if use_chardet:
243 if use_chardet:
244 try:
244 try:
245 import chardet
245 import chardet
246 encoding = chardet.detect(str_)['encoding']
246 encoding = chardet.detect(str_)['encoding']
247 if encoding is None:
247 if encoding is None:
248 raise Exception()
248 raise Exception()
249 return str_.decode(encoding)
249 return str_.decode(encoding)
250 except (ImportError, UnicodeDecodeError, Exception):
250 except (ImportError, UnicodeDecodeError, Exception):
251 return unicode(str_, from_encoding[0], 'replace')
251 return unicode(str_, from_encoding[0], 'replace')
252 else:
252 else:
253 return unicode(str_, from_encoding[0], 'replace')
253 return unicode(str_, from_encoding[0], 'replace')
254
254
255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
256 """
256 """
257 safe str function. Does few trick to turn unicode_ into string
257 safe str function. Does few trick to turn unicode_ into string
258
258
259 In case of UnicodeEncodeError, we try to return it with encoding detected
259 In case of UnicodeEncodeError, we try to return it with encoding detected
260 by chardet library if it fails fallback to string with errors replaced
260 by chardet library if it fails fallback to string with errors replaced
261
261
262 :param unicode_: unicode to encode
262 :param unicode_: unicode to encode
263 :rtype: str
263 :rtype: str
264 :returns: str object
264 :returns: str object
265 """
265 """
266
266
267 # if it's not basestr cast to str
267 # if it's not basestr cast to str
268 if not isinstance(unicode_, compat.string_types):
268 if not isinstance(unicode_, compat.string_types):
269 return str(unicode_)
269 return str(unicode_)
270
270
271 if isinstance(unicode_, str):
271 if isinstance(unicode_, str):
272 return unicode_
272 return unicode_
273
273
274 if not to_encoding:
274 if not to_encoding:
275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
276 'utf8'), sep=',')
276 'utf8'), sep=',')
277 to_encoding = DEFAULT_ENCODINGS
277 to_encoding = DEFAULT_ENCODINGS
278
278
279 if not isinstance(to_encoding, (list, tuple)):
279 if not isinstance(to_encoding, (list, tuple)):
280 to_encoding = [to_encoding]
280 to_encoding = [to_encoding]
281
281
282 for enc in to_encoding:
282 for enc in to_encoding:
283 try:
283 try:
284 return unicode_.encode(enc)
284 return unicode_.encode(enc)
285 except UnicodeEncodeError:
285 except UnicodeEncodeError:
286 pass
286 pass
287
287
288 if use_chardet:
288 if use_chardet:
289 try:
289 try:
290 import chardet
290 import chardet
291 encoding = chardet.detect(unicode_)['encoding']
291 encoding = chardet.detect(unicode_)['encoding']
292 if encoding is None:
292 if encoding is None:
293 raise UnicodeEncodeError()
293 raise UnicodeEncodeError()
294
294
295 return unicode_.encode(encoding)
295 return unicode_.encode(encoding)
296 except (ImportError, UnicodeEncodeError):
296 except (ImportError, UnicodeEncodeError):
297 return unicode_.encode(to_encoding[0], 'replace')
297 return unicode_.encode(to_encoding[0], 'replace')
298 else:
298 else:
299 return unicode_.encode(to_encoding[0], 'replace')
299 return unicode_.encode(to_encoding[0], 'replace')
300
300
301
301
302 def remove_suffix(s, suffix):
302 def remove_suffix(s, suffix):
303 if s.endswith(suffix):
303 if s.endswith(suffix):
304 s = s[:-1 * len(suffix)]
304 s = s[:-1 * len(suffix)]
305 return s
305 return s
306
306
307
307
308 def remove_prefix(s, prefix):
308 def remove_prefix(s, prefix):
309 if s.startswith(prefix):
309 if s.startswith(prefix):
310 s = s[len(prefix):]
310 s = s[len(prefix):]
311 return s
311 return s
312
312
313
313
314 def find_calling_context(ignore_modules=None):
314 def find_calling_context(ignore_modules=None):
315 """
315 """
316 Look through the calling stack and return the frame which called
316 Look through the calling stack and return the frame which called
317 this function and is part of core module ( ie. rhodecode.* )
317 this function and is part of core module ( ie. rhodecode.* )
318
318
319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
320 """
320 """
321
321
322 ignore_modules = ignore_modules or []
322 ignore_modules = ignore_modules or []
323
323
324 f = sys._getframe(2)
324 f = sys._getframe(2)
325 while f.f_back is not None:
325 while f.f_back is not None:
326 name = f.f_globals.get('__name__')
326 name = f.f_globals.get('__name__')
327 if name and name.startswith(__name__.split('.')[0]):
327 if name and name.startswith(__name__.split('.')[0]):
328 if name not in ignore_modules:
328 if name not in ignore_modules:
329 return f
329 return f
330 f = f.f_back
330 f = f.f_back
331 return None
331 return None
332
332
333
333
334 def ping_connection(connection, branch):
334 def ping_connection(connection, branch):
335 if branch:
335 if branch:
336 # "branch" refers to a sub-connection of a connection,
336 # "branch" refers to a sub-connection of a connection,
337 # we don't want to bother pinging on these.
337 # we don't want to bother pinging on these.
338 return
338 return
339
339
340 # turn off "close with result". This flag is only used with
340 # turn off "close with result". This flag is only used with
341 # "connectionless" execution, otherwise will be False in any case
341 # "connectionless" execution, otherwise will be False in any case
342 save_should_close_with_result = connection.should_close_with_result
342 save_should_close_with_result = connection.should_close_with_result
343 connection.should_close_with_result = False
343 connection.should_close_with_result = False
344
344
345 try:
345 try:
346 # run a SELECT 1. use a core select() so that
346 # run a SELECT 1. use a core select() so that
347 # the SELECT of a scalar value without a table is
347 # the SELECT of a scalar value without a table is
348 # appropriately formatted for the backend
348 # appropriately formatted for the backend
349 connection.scalar(sqlalchemy.sql.select([1]))
349 connection.scalar(sqlalchemy.sql.select([1]))
350 except sqlalchemy.exc.DBAPIError as err:
350 except sqlalchemy.exc.DBAPIError as err:
351 # catch SQLAlchemy's DBAPIError, which is a wrapper
351 # catch SQLAlchemy's DBAPIError, which is a wrapper
352 # for the DBAPI's exception. It includes a .connection_invalidated
352 # for the DBAPI's exception. It includes a .connection_invalidated
353 # attribute which specifies if this connection is a "disconnect"
353 # attribute which specifies if this connection is a "disconnect"
354 # condition, which is based on inspection of the original exception
354 # condition, which is based on inspection of the original exception
355 # by the dialect in use.
355 # by the dialect in use.
356 if err.connection_invalidated:
356 if err.connection_invalidated:
357 # run the same SELECT again - the connection will re-validate
357 # run the same SELECT again - the connection will re-validate
358 # itself and establish a new connection. The disconnect detection
358 # itself and establish a new connection. The disconnect detection
359 # here also causes the whole connection pool to be invalidated
359 # here also causes the whole connection pool to be invalidated
360 # so that all stale connections are discarded.
360 # so that all stale connections are discarded.
361 connection.scalar(sqlalchemy.sql.select([1]))
361 connection.scalar(sqlalchemy.sql.select([1]))
362 else:
362 else:
363 raise
363 raise
364 finally:
364 finally:
365 # restore "close with result"
365 # restore "close with result"
366 connection.should_close_with_result = save_should_close_with_result
366 connection.should_close_with_result = save_should_close_with_result
367
367
368
368
369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
370 """Custom engine_from_config functions."""
370 """Custom engine_from_config functions."""
371 log = logging.getLogger('sqlalchemy.engine')
371 log = logging.getLogger('sqlalchemy.engine')
372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
374
374
375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
376
376
377 def color_sql(sql):
377 def color_sql(sql):
378 color_seq = '\033[1;33m' # This is yellow: code 33
378 color_seq = '\033[1;33m' # This is yellow: code 33
379 normal = '\x1b[0m'
379 normal = '\x1b[0m'
380 return ''.join([color_seq, sql, normal])
380 return ''.join([color_seq, sql, normal])
381
381
382 if use_ping_connection:
382 if use_ping_connection:
383 log.debug('Adding ping_connection on the engine config.')
383 log.debug('Adding ping_connection on the engine config.')
384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
385
385
386 if debug:
386 if debug:
387 # attach events only for debug configuration
387 # attach events only for debug configuration
388 def before_cursor_execute(conn, cursor, statement,
388 def before_cursor_execute(conn, cursor, statement,
389 parameters, context, executemany):
389 parameters, context, executemany):
390 setattr(conn, 'query_start_time', time.time())
390 setattr(conn, 'query_start_time', time.time())
391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
392 calling_context = find_calling_context(ignore_modules=[
392 calling_context = find_calling_context(ignore_modules=[
393 'rhodecode.lib.caching_query',
393 'rhodecode.lib.caching_query',
394 'rhodecode.model.settings',
394 'rhodecode.model.settings',
395 ])
395 ])
396 if calling_context:
396 if calling_context:
397 log.info(color_sql('call context %s:%s' % (
397 log.info(color_sql('call context %s:%s' % (
398 calling_context.f_code.co_filename,
398 calling_context.f_code.co_filename,
399 calling_context.f_lineno,
399 calling_context.f_lineno,
400 )))
400 )))
401
401
402 def after_cursor_execute(conn, cursor, statement,
402 def after_cursor_execute(conn, cursor, statement,
403 parameters, context, executemany):
403 parameters, context, executemany):
404 delattr(conn, 'query_start_time')
404 delattr(conn, 'query_start_time')
405
405
406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
408
408
409 return engine
409 return engine
410
410
411
411
412 def get_encryption_key(config):
412 def get_encryption_key(config):
413 secret = config.get('rhodecode.encrypted_values.secret')
413 secret = config.get('rhodecode.encrypted_values.secret')
414 default = config['beaker.session.secret']
414 default = config['beaker.session.secret']
415 return secret or default
415 return secret or default
416
416
417
417
418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
419 short_format=False):
419 short_format=False):
420 """
420 """
421 Turns a datetime into an age string.
421 Turns a datetime into an age string.
422 If show_short_version is True, this generates a shorter string with
422 If show_short_version is True, this generates a shorter string with
423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
424
424
425 * IMPORTANT*
425 * IMPORTANT*
426 Code of this function is written in special way so it's easier to
426 Code of this function is written in special way so it's easier to
427 backport it to javascript. If you mean to update it, please also update
427 backport it to javascript. If you mean to update it, please also update
428 `jquery.timeago-extension.js` file
428 `jquery.timeago-extension.js` file
429
429
430 :param prevdate: datetime object
430 :param prevdate: datetime object
431 :param now: get current time, if not define we use
431 :param now: get current time, if not define we use
432 `datetime.datetime.now()`
432 `datetime.datetime.now()`
433 :param show_short_version: if it should approximate the date and
433 :param show_short_version: if it should approximate the date and
434 return a shorter string
434 return a shorter string
435 :param show_suffix:
435 :param show_suffix:
436 :param short_format: show short format, eg 2D instead of 2 days
436 :param short_format: show short format, eg 2D instead of 2 days
437 :rtype: unicode
437 :rtype: unicode
438 :returns: unicode words describing age
438 :returns: unicode words describing age
439 """
439 """
440
440
441 def _get_relative_delta(now, prevdate):
441 def _get_relative_delta(now, prevdate):
442 base = dateutil.relativedelta.relativedelta(now, prevdate)
442 base = dateutil.relativedelta.relativedelta(now, prevdate)
443 return {
443 return {
444 'year': base.years,
444 'year': base.years,
445 'month': base.months,
445 'month': base.months,
446 'day': base.days,
446 'day': base.days,
447 'hour': base.hours,
447 'hour': base.hours,
448 'minute': base.minutes,
448 'minute': base.minutes,
449 'second': base.seconds,
449 'second': base.seconds,
450 }
450 }
451
451
452 def _is_leap_year(year):
452 def _is_leap_year(year):
453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
454
454
455 def get_month(prevdate):
455 def get_month(prevdate):
456 return prevdate.month
456 return prevdate.month
457
457
458 def get_year(prevdate):
458 def get_year(prevdate):
459 return prevdate.year
459 return prevdate.year
460
460
461 now = now or datetime.datetime.now()
461 now = now or datetime.datetime.now()
462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
463 deltas = {}
463 deltas = {}
464 future = False
464 future = False
465
465
466 if prevdate > now:
466 if prevdate > now:
467 now_old = now
467 now_old = now
468 now = prevdate
468 now = prevdate
469 prevdate = now_old
469 prevdate = now_old
470 future = True
470 future = True
471 if future:
471 if future:
472 prevdate = prevdate.replace(microsecond=0)
472 prevdate = prevdate.replace(microsecond=0)
473 # Get date parts deltas
473 # Get date parts deltas
474 for part in order:
474 for part in order:
475 rel_delta = _get_relative_delta(now, prevdate)
475 rel_delta = _get_relative_delta(now, prevdate)
476 deltas[part] = rel_delta[part]
476 deltas[part] = rel_delta[part]
477
477
478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
479 # not 1 hour, -59 minutes and -59 seconds)
479 # not 1 hour, -59 minutes and -59 seconds)
480 offsets = [[5, 60], [4, 60], [3, 24]]
480 offsets = [[5, 60], [4, 60], [3, 24]]
481 for element in offsets: # seconds, minutes, hours
481 for element in offsets: # seconds, minutes, hours
482 num = element[0]
482 num = element[0]
483 length = element[1]
483 length = element[1]
484
484
485 part = order[num]
485 part = order[num]
486 carry_part = order[num - 1]
486 carry_part = order[num - 1]
487
487
488 if deltas[part] < 0:
488 if deltas[part] < 0:
489 deltas[part] += length
489 deltas[part] += length
490 deltas[carry_part] -= 1
490 deltas[carry_part] -= 1
491
491
492 # Same thing for days except that the increment depends on the (variable)
492 # Same thing for days except that the increment depends on the (variable)
493 # number of days in the month
493 # number of days in the month
494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
495 if deltas['day'] < 0:
495 if deltas['day'] < 0:
496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
497 deltas['day'] += 29
497 deltas['day'] += 29
498 else:
498 else:
499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
500
500
501 deltas['month'] -= 1
501 deltas['month'] -= 1
502
502
503 if deltas['month'] < 0:
503 if deltas['month'] < 0:
504 deltas['month'] += 12
504 deltas['month'] += 12
505 deltas['year'] -= 1
505 deltas['year'] -= 1
506
506
507 # Format the result
507 # Format the result
508 if short_format:
508 if short_format:
509 fmt_funcs = {
509 fmt_funcs = {
510 'year': lambda d: u'%dy' % d,
510 'year': lambda d: u'%dy' % d,
511 'month': lambda d: u'%dm' % d,
511 'month': lambda d: u'%dm' % d,
512 'day': lambda d: u'%dd' % d,
512 'day': lambda d: u'%dd' % d,
513 'hour': lambda d: u'%dh' % d,
513 'hour': lambda d: u'%dh' % d,
514 'minute': lambda d: u'%dmin' % d,
514 'minute': lambda d: u'%dmin' % d,
515 'second': lambda d: u'%dsec' % d,
515 'second': lambda d: u'%dsec' % d,
516 }
516 }
517 else:
517 else:
518 fmt_funcs = {
518 fmt_funcs = {
519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
525 }
525 }
526
526
527 i = 0
527 i = 0
528 for part in order:
528 for part in order:
529 value = deltas[part]
529 value = deltas[part]
530 if value != 0:
530 if value != 0:
531
531
532 if i < 5:
532 if i < 5:
533 sub_part = order[i + 1]
533 sub_part = order[i + 1]
534 sub_value = deltas[sub_part]
534 sub_value = deltas[sub_part]
535 else:
535 else:
536 sub_value = 0
536 sub_value = 0
537
537
538 if sub_value == 0 or show_short_version:
538 if sub_value == 0 or show_short_version:
539 _val = fmt_funcs[part](value)
539 _val = fmt_funcs[part](value)
540 if future:
540 if future:
541 if show_suffix:
541 if show_suffix:
542 return _(u'in ${ago}', mapping={'ago': _val})
542 return _(u'in ${ago}', mapping={'ago': _val})
543 else:
543 else:
544 return _(_val)
544 return _(_val)
545
545
546 else:
546 else:
547 if show_suffix:
547 if show_suffix:
548 return _(u'${ago} ago', mapping={'ago': _val})
548 return _(u'${ago} ago', mapping={'ago': _val})
549 else:
549 else:
550 return _(_val)
550 return _(_val)
551
551
552 val = fmt_funcs[part](value)
552 val = fmt_funcs[part](value)
553 val_detail = fmt_funcs[sub_part](sub_value)
553 val_detail = fmt_funcs[sub_part](sub_value)
554 mapping = {'val': val, 'detail': val_detail}
554 mapping = {'val': val, 'detail': val_detail}
555
555
556 if short_format:
556 if short_format:
557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
558 if show_suffix:
558 if show_suffix:
559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
560 if future:
560 if future:
561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
562 else:
562 else:
563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
564 if show_suffix:
564 if show_suffix:
565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
566 if future:
566 if future:
567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
568
568
569 return datetime_tmpl
569 return datetime_tmpl
570 i += 1
570 i += 1
571 return _(u'just now')
571 return _(u'just now')
572
572
573
573
574 def age_from_seconds(seconds):
574 def age_from_seconds(seconds):
575 seconds = safe_int(seconds) or 0
575 seconds = safe_int(seconds) or 0
576 prevdate = time_to_datetime(time.time() + seconds)
576 prevdate = time_to_datetime(time.time() + seconds)
577 return age(prevdate, show_suffix=False, show_short_version=True)
577 return age(prevdate, show_suffix=False, show_short_version=True)
578
578
579
579
580 def cleaned_uri(uri):
580 def cleaned_uri(uri):
581 """
581 """
582 Quotes '[' and ']' from uri if there is only one of them.
582 Quotes '[' and ']' from uri if there is only one of them.
583 according to RFC3986 we cannot use such chars in uri
583 according to RFC3986 we cannot use such chars in uri
584 :param uri:
584 :param uri:
585 :return: uri without this chars
585 :return: uri without this chars
586 """
586 """
587 return urllib.quote(uri, safe='@$:/')
587 return urllib.quote(uri, safe='@$:/')
588
588
589
589
590 def credentials_filter(uri):
590 def credentials_filter(uri):
591 """
591 """
592 Returns a url with removed credentials
592 Returns a url with removed credentials
593
593
594 :param uri:
594 :param uri:
595 """
595 """
596 import urlobject
596 import urlobject
597 url_obj = urlobject.URLObject(cleaned_uri(uri))
597 url_obj = urlobject.URLObject(cleaned_uri(uri))
598 url_obj = url_obj.without_password().without_username()
598 url_obj = url_obj.without_password().without_username()
599
599
600 return url_obj
600 return url_obj
601
601
602
602
603 def get_host_info(request):
603 def get_host_info(request):
604 """
604 """
605 Generate host info, to obtain full url e.g https://server.com
605 Generate host info, to obtain full url e.g https://server.com
606 use this
606 use this
607 `{scheme}://{netloc}`
607 `{scheme}://{netloc}`
608 """
608 """
609 if not request:
609 if not request:
610 return {}
610 return {}
611
611
612 qualified_home_url = request.route_url('home')
612 qualified_home_url = request.route_url('home')
613 parsed_url = urlobject.URLObject(qualified_home_url)
613 parsed_url = urlobject.URLObject(qualified_home_url)
614 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
614 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
615
615
616 return {
616 return {
617 'scheme': parsed_url.scheme,
617 'scheme': parsed_url.scheme,
618 'netloc': parsed_url.netloc+decoded_path,
618 'netloc': parsed_url.netloc+decoded_path,
619 'hostname': parsed_url.hostname,
619 'hostname': parsed_url.hostname,
620 }
620 }
621
621
622
622
623 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
623 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
624 qualified_home_url = request.route_url('home')
624 qualified_home_url = request.route_url('home')
625 parsed_url = urlobject.URLObject(qualified_home_url)
625 parsed_url = urlobject.URLObject(qualified_home_url)
626 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
626 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
627
627
628 args = {
628 args = {
629 'scheme': parsed_url.scheme,
629 'scheme': parsed_url.scheme,
630 'user': '',
630 'user': '',
631 'sys_user': getpass.getuser(),
631 'sys_user': getpass.getuser(),
632 # path if we use proxy-prefix
632 # path if we use proxy-prefix
633 'netloc': parsed_url.netloc+decoded_path,
633 'netloc': parsed_url.netloc+decoded_path,
634 'hostname': parsed_url.hostname,
634 'hostname': parsed_url.hostname,
635 'prefix': decoded_path,
635 'prefix': decoded_path,
636 'repo': repo_name,
636 'repo': repo_name,
637 'repoid': str(repo_id),
637 'repoid': str(repo_id),
638 'repo_type': repo_type
638 'repo_type': repo_type
639 }
639 }
640 args.update(override)
640 args.update(override)
641 args['user'] = urllib.quote(safe_str(args['user']))
641 args['user'] = urllib.quote(safe_str(args['user']))
642
642
643 for k, v in args.items():
643 for k, v in args.items():
644 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
644 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
645
645
646 # special case for SVN clone url
646 # special case for SVN clone url
647 if repo_type == 'svn':
647 if repo_type == 'svn':
648 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
648 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
649
649
650 # remove leading @ sign if it's present. Case of empty user
650 # remove leading @ sign if it's present. Case of empty user
651 url_obj = urlobject.URLObject(uri_tmpl)
651 url_obj = urlobject.URLObject(uri_tmpl)
652 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
652 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
653
653
654 return safe_unicode(url)
654 return safe_unicode(url)
655
655
656
656
657 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
657 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
658 maybe_unreachable=False):
658 maybe_unreachable=False, reference_obj=None):
659 """
659 """
660 Safe version of get_commit if this commit doesn't exists for a
660 Safe version of get_commit if this commit doesn't exists for a
661 repository it returns a Dummy one instead
661 repository it returns a Dummy one instead
662
662
663 :param repo: repository instance
663 :param repo: repository instance
664 :param commit_id: commit id as str
664 :param commit_id: commit id as str
665 :param commit_idx: numeric commit index
665 :param commit_idx: numeric commit index
666 :param pre_load: optional list of commit attributes to load
666 :param pre_load: optional list of commit attributes to load
667 :param maybe_unreachable: translate unreachable commits on git repos
667 :param maybe_unreachable: translate unreachable commits on git repos
668 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
668 """
669 """
669 # TODO(skreft): remove these circular imports
670 # TODO(skreft): remove these circular imports
670 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
671 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
671 from rhodecode.lib.vcs.exceptions import RepositoryError
672 from rhodecode.lib.vcs.exceptions import RepositoryError
672 if not isinstance(repo, BaseRepository):
673 if not isinstance(repo, BaseRepository):
673 raise Exception('You must pass an Repository '
674 raise Exception('You must pass an Repository '
674 'object as first argument got %s', type(repo))
675 'object as first argument got %s', type(repo))
675
676
676 try:
677 try:
677 commit = repo.get_commit(
678 commit = repo.get_commit(
678 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
679 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
679 maybe_unreachable=maybe_unreachable)
680 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
680 except (RepositoryError, LookupError):
681 except (RepositoryError, LookupError):
681 commit = EmptyCommit()
682 commit = EmptyCommit()
682 return commit
683 return commit
683
684
684
685
685 def datetime_to_time(dt):
686 def datetime_to_time(dt):
686 if dt:
687 if dt:
687 return time.mktime(dt.timetuple())
688 return time.mktime(dt.timetuple())
688
689
689
690
690 def time_to_datetime(tm):
691 def time_to_datetime(tm):
691 if tm:
692 if tm:
692 if isinstance(tm, compat.string_types):
693 if isinstance(tm, compat.string_types):
693 try:
694 try:
694 tm = float(tm)
695 tm = float(tm)
695 except ValueError:
696 except ValueError:
696 return
697 return
697 return datetime.datetime.fromtimestamp(tm)
698 return datetime.datetime.fromtimestamp(tm)
698
699
699
700
700 def time_to_utcdatetime(tm):
701 def time_to_utcdatetime(tm):
701 if tm:
702 if tm:
702 if isinstance(tm, compat.string_types):
703 if isinstance(tm, compat.string_types):
703 try:
704 try:
704 tm = float(tm)
705 tm = float(tm)
705 except ValueError:
706 except ValueError:
706 return
707 return
707 return datetime.datetime.utcfromtimestamp(tm)
708 return datetime.datetime.utcfromtimestamp(tm)
708
709
709
710
710 MENTIONS_REGEX = re.compile(
711 MENTIONS_REGEX = re.compile(
711 # ^@ or @ without any special chars in front
712 # ^@ or @ without any special chars in front
712 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
713 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
713 # main body starts with letter, then can be . - _
714 # main body starts with letter, then can be . - _
714 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
715 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
715 re.VERBOSE | re.MULTILINE)
716 re.VERBOSE | re.MULTILINE)
716
717
717
718
718 def extract_mentioned_users(s):
719 def extract_mentioned_users(s):
719 """
720 """
720 Returns unique usernames from given string s that have @mention
721 Returns unique usernames from given string s that have @mention
721
722
722 :param s: string to get mentions
723 :param s: string to get mentions
723 """
724 """
724 usrs = set()
725 usrs = set()
725 for username in MENTIONS_REGEX.findall(s):
726 for username in MENTIONS_REGEX.findall(s):
726 usrs.add(username)
727 usrs.add(username)
727
728
728 return sorted(list(usrs), key=lambda k: k.lower())
729 return sorted(list(usrs), key=lambda k: k.lower())
729
730
730
731
731 class AttributeDictBase(dict):
732 class AttributeDictBase(dict):
732 def __getstate__(self):
733 def __getstate__(self):
733 odict = self.__dict__ # get attribute dictionary
734 odict = self.__dict__ # get attribute dictionary
734 return odict
735 return odict
735
736
736 def __setstate__(self, dict):
737 def __setstate__(self, dict):
737 self.__dict__ = dict
738 self.__dict__ = dict
738
739
739 __setattr__ = dict.__setitem__
740 __setattr__ = dict.__setitem__
740 __delattr__ = dict.__delitem__
741 __delattr__ = dict.__delitem__
741
742
742
743
743 class StrictAttributeDict(AttributeDictBase):
744 class StrictAttributeDict(AttributeDictBase):
744 """
745 """
745 Strict Version of Attribute dict which raises an Attribute error when
746 Strict Version of Attribute dict which raises an Attribute error when
746 requested attribute is not set
747 requested attribute is not set
747 """
748 """
748 def __getattr__(self, attr):
749 def __getattr__(self, attr):
749 try:
750 try:
750 return self[attr]
751 return self[attr]
751 except KeyError:
752 except KeyError:
752 raise AttributeError('%s object has no attribute %s' % (
753 raise AttributeError('%s object has no attribute %s' % (
753 self.__class__, attr))
754 self.__class__, attr))
754
755
755
756
756 class AttributeDict(AttributeDictBase):
757 class AttributeDict(AttributeDictBase):
757 def __getattr__(self, attr):
758 def __getattr__(self, attr):
758 return self.get(attr, None)
759 return self.get(attr, None)
759
760
760
761
761
762
762 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
763 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
763 def __init__(self, default_factory=None, *args, **kwargs):
764 def __init__(self, default_factory=None, *args, **kwargs):
764 # in python3 you can omit the args to super
765 # in python3 you can omit the args to super
765 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
766 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
766 self.default_factory = default_factory
767 self.default_factory = default_factory
767
768
768
769
769 def fix_PATH(os_=None):
770 def fix_PATH(os_=None):
770 """
771 """
771 Get current active python path, and append it to PATH variable to fix
772 Get current active python path, and append it to PATH variable to fix
772 issues of subprocess calls and different python versions
773 issues of subprocess calls and different python versions
773 """
774 """
774 if os_ is None:
775 if os_ is None:
775 import os
776 import os
776 else:
777 else:
777 os = os_
778 os = os_
778
779
779 cur_path = os.path.split(sys.executable)[0]
780 cur_path = os.path.split(sys.executable)[0]
780 if not os.environ['PATH'].startswith(cur_path):
781 if not os.environ['PATH'].startswith(cur_path):
781 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
782 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
782
783
783
784
784 def obfuscate_url_pw(engine):
785 def obfuscate_url_pw(engine):
785 _url = engine or ''
786 _url = engine or ''
786 try:
787 try:
787 _url = sqlalchemy.engine.url.make_url(engine)
788 _url = sqlalchemy.engine.url.make_url(engine)
788 if _url.password:
789 if _url.password:
789 _url.password = 'XXXXX'
790 _url.password = 'XXXXX'
790 except Exception:
791 except Exception:
791 pass
792 pass
792 return unicode(_url)
793 return unicode(_url)
793
794
794
795
795 def get_server_url(environ):
796 def get_server_url(environ):
796 req = webob.Request(environ)
797 req = webob.Request(environ)
797 return req.host_url + req.script_name
798 return req.host_url + req.script_name
798
799
799
800
800 def unique_id(hexlen=32):
801 def unique_id(hexlen=32):
801 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
802 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
802 return suuid(truncate_to=hexlen, alphabet=alphabet)
803 return suuid(truncate_to=hexlen, alphabet=alphabet)
803
804
804
805
805 def suuid(url=None, truncate_to=22, alphabet=None):
806 def suuid(url=None, truncate_to=22, alphabet=None):
806 """
807 """
807 Generate and return a short URL safe UUID.
808 Generate and return a short URL safe UUID.
808
809
809 If the url parameter is provided, set the namespace to the provided
810 If the url parameter is provided, set the namespace to the provided
810 URL and generate a UUID.
811 URL and generate a UUID.
811
812
812 :param url to get the uuid for
813 :param url to get the uuid for
813 :truncate_to: truncate the basic 22 UUID to shorter version
814 :truncate_to: truncate the basic 22 UUID to shorter version
814
815
815 The IDs won't be universally unique any longer, but the probability of
816 The IDs won't be universally unique any longer, but the probability of
816 a collision will still be very low.
817 a collision will still be very low.
817 """
818 """
818 # Define our alphabet.
819 # Define our alphabet.
819 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
820 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
820
821
821 # If no URL is given, generate a random UUID.
822 # If no URL is given, generate a random UUID.
822 if url is None:
823 if url is None:
823 unique_id = uuid.uuid4().int
824 unique_id = uuid.uuid4().int
824 else:
825 else:
825 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
826 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
826
827
827 alphabet_length = len(_ALPHABET)
828 alphabet_length = len(_ALPHABET)
828 output = []
829 output = []
829 while unique_id > 0:
830 while unique_id > 0:
830 digit = unique_id % alphabet_length
831 digit = unique_id % alphabet_length
831 output.append(_ALPHABET[digit])
832 output.append(_ALPHABET[digit])
832 unique_id = int(unique_id / alphabet_length)
833 unique_id = int(unique_id / alphabet_length)
833 return "".join(output)[:truncate_to]
834 return "".join(output)[:truncate_to]
834
835
835
836
836 def get_current_rhodecode_user(request=None):
837 def get_current_rhodecode_user(request=None):
837 """
838 """
838 Gets rhodecode user from request
839 Gets rhodecode user from request
839 """
840 """
840 pyramid_request = request or pyramid.threadlocal.get_current_request()
841 pyramid_request = request or pyramid.threadlocal.get_current_request()
841
842
842 # web case
843 # web case
843 if pyramid_request and hasattr(pyramid_request, 'user'):
844 if pyramid_request and hasattr(pyramid_request, 'user'):
844 return pyramid_request.user
845 return pyramid_request.user
845
846
846 # api case
847 # api case
847 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
848 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
848 return pyramid_request.rpc_user
849 return pyramid_request.rpc_user
849
850
850 return None
851 return None
851
852
852
853
853 def action_logger_generic(action, namespace=''):
854 def action_logger_generic(action, namespace=''):
854 """
855 """
855 A generic logger for actions useful to the system overview, tries to find
856 A generic logger for actions useful to the system overview, tries to find
856 an acting user for the context of the call otherwise reports unknown user
857 an acting user for the context of the call otherwise reports unknown user
857
858
858 :param action: logging message eg 'comment 5 deleted'
859 :param action: logging message eg 'comment 5 deleted'
859 :param type: string
860 :param type: string
860
861
861 :param namespace: namespace of the logging message eg. 'repo.comments'
862 :param namespace: namespace of the logging message eg. 'repo.comments'
862 :param type: string
863 :param type: string
863
864
864 """
865 """
865
866
866 logger_name = 'rhodecode.actions'
867 logger_name = 'rhodecode.actions'
867
868
868 if namespace:
869 if namespace:
869 logger_name += '.' + namespace
870 logger_name += '.' + namespace
870
871
871 log = logging.getLogger(logger_name)
872 log = logging.getLogger(logger_name)
872
873
873 # get a user if we can
874 # get a user if we can
874 user = get_current_rhodecode_user()
875 user = get_current_rhodecode_user()
875
876
876 logfunc = log.info
877 logfunc = log.info
877
878
878 if not user:
879 if not user:
879 user = '<unknown user>'
880 user = '<unknown user>'
880 logfunc = log.warning
881 logfunc = log.warning
881
882
882 logfunc('Logging action by {}: {}'.format(user, action))
883 logfunc('Logging action by {}: {}'.format(user, action))
883
884
884
885
885 def escape_split(text, sep=',', maxsplit=-1):
886 def escape_split(text, sep=',', maxsplit=-1):
886 r"""
887 r"""
887 Allows for escaping of the separator: e.g. arg='foo\, bar'
888 Allows for escaping of the separator: e.g. arg='foo\, bar'
888
889
889 It should be noted that the way bash et. al. do command line parsing, those
890 It should be noted that the way bash et. al. do command line parsing, those
890 single quotes are required.
891 single quotes are required.
891 """
892 """
892 escaped_sep = r'\%s' % sep
893 escaped_sep = r'\%s' % sep
893
894
894 if escaped_sep not in text:
895 if escaped_sep not in text:
895 return text.split(sep, maxsplit)
896 return text.split(sep, maxsplit)
896
897
897 before, _mid, after = text.partition(escaped_sep)
898 before, _mid, after = text.partition(escaped_sep)
898 startlist = before.split(sep, maxsplit) # a regular split is fine here
899 startlist = before.split(sep, maxsplit) # a regular split is fine here
899 unfinished = startlist[-1]
900 unfinished = startlist[-1]
900 startlist = startlist[:-1]
901 startlist = startlist[:-1]
901
902
902 # recurse because there may be more escaped separators
903 # recurse because there may be more escaped separators
903 endlist = escape_split(after, sep, maxsplit)
904 endlist = escape_split(after, sep, maxsplit)
904
905
905 # finish building the escaped value. we use endlist[0] becaue the first
906 # finish building the escaped value. we use endlist[0] becaue the first
906 # part of the string sent in recursion is the rest of the escaped value.
907 # part of the string sent in recursion is the rest of the escaped value.
907 unfinished += sep + endlist[0]
908 unfinished += sep + endlist[0]
908
909
909 return startlist + [unfinished] + endlist[1:] # put together all the parts
910 return startlist + [unfinished] + endlist[1:] # put together all the parts
910
911
911
912
912 class OptionalAttr(object):
913 class OptionalAttr(object):
913 """
914 """
914 Special Optional Option that defines other attribute. Example::
915 Special Optional Option that defines other attribute. Example::
915
916
916 def test(apiuser, userid=Optional(OAttr('apiuser')):
917 def test(apiuser, userid=Optional(OAttr('apiuser')):
917 user = Optional.extract(userid)
918 user = Optional.extract(userid)
918 # calls
919 # calls
919
920
920 """
921 """
921
922
922 def __init__(self, attr_name):
923 def __init__(self, attr_name):
923 self.attr_name = attr_name
924 self.attr_name = attr_name
924
925
925 def __repr__(self):
926 def __repr__(self):
926 return '<OptionalAttr:%s>' % self.attr_name
927 return '<OptionalAttr:%s>' % self.attr_name
927
928
928 def __call__(self):
929 def __call__(self):
929 return self
930 return self
930
931
931
932
932 # alias
933 # alias
933 OAttr = OptionalAttr
934 OAttr = OptionalAttr
934
935
935
936
936 class Optional(object):
937 class Optional(object):
937 """
938 """
938 Defines an optional parameter::
939 Defines an optional parameter::
939
940
940 param = param.getval() if isinstance(param, Optional) else param
941 param = param.getval() if isinstance(param, Optional) else param
941 param = param() if isinstance(param, Optional) else param
942 param = param() if isinstance(param, Optional) else param
942
943
943 is equivalent of::
944 is equivalent of::
944
945
945 param = Optional.extract(param)
946 param = Optional.extract(param)
946
947
947 """
948 """
948
949
949 def __init__(self, type_):
950 def __init__(self, type_):
950 self.type_ = type_
951 self.type_ = type_
951
952
952 def __repr__(self):
953 def __repr__(self):
953 return '<Optional:%s>' % self.type_.__repr__()
954 return '<Optional:%s>' % self.type_.__repr__()
954
955
955 def __call__(self):
956 def __call__(self):
956 return self.getval()
957 return self.getval()
957
958
958 def getval(self):
959 def getval(self):
959 """
960 """
960 returns value from this Optional instance
961 returns value from this Optional instance
961 """
962 """
962 if isinstance(self.type_, OAttr):
963 if isinstance(self.type_, OAttr):
963 # use params name
964 # use params name
964 return self.type_.attr_name
965 return self.type_.attr_name
965 return self.type_
966 return self.type_
966
967
967 @classmethod
968 @classmethod
968 def extract(cls, val):
969 def extract(cls, val):
969 """
970 """
970 Extracts value from Optional() instance
971 Extracts value from Optional() instance
971
972
972 :param val:
973 :param val:
973 :return: original value if it's not Optional instance else
974 :return: original value if it's not Optional instance else
974 value of instance
975 value of instance
975 """
976 """
976 if isinstance(val, cls):
977 if isinstance(val, cls):
977 return val.getval()
978 return val.getval()
978 return val
979 return val
979
980
980
981
981 def glob2re(pat):
982 def glob2re(pat):
982 """
983 """
983 Translate a shell PATTERN to a regular expression.
984 Translate a shell PATTERN to a regular expression.
984
985
985 There is no way to quote meta-characters.
986 There is no way to quote meta-characters.
986 """
987 """
987
988
988 i, n = 0, len(pat)
989 i, n = 0, len(pat)
989 res = ''
990 res = ''
990 while i < n:
991 while i < n:
991 c = pat[i]
992 c = pat[i]
992 i = i+1
993 i = i+1
993 if c == '*':
994 if c == '*':
994 #res = res + '.*'
995 #res = res + '.*'
995 res = res + '[^/]*'
996 res = res + '[^/]*'
996 elif c == '?':
997 elif c == '?':
997 #res = res + '.'
998 #res = res + '.'
998 res = res + '[^/]'
999 res = res + '[^/]'
999 elif c == '[':
1000 elif c == '[':
1000 j = i
1001 j = i
1001 if j < n and pat[j] == '!':
1002 if j < n and pat[j] == '!':
1002 j = j+1
1003 j = j+1
1003 if j < n and pat[j] == ']':
1004 if j < n and pat[j] == ']':
1004 j = j+1
1005 j = j+1
1005 while j < n and pat[j] != ']':
1006 while j < n and pat[j] != ']':
1006 j = j+1
1007 j = j+1
1007 if j >= n:
1008 if j >= n:
1008 res = res + '\\['
1009 res = res + '\\['
1009 else:
1010 else:
1010 stuff = pat[i:j].replace('\\','\\\\')
1011 stuff = pat[i:j].replace('\\','\\\\')
1011 i = j+1
1012 i = j+1
1012 if stuff[0] == '!':
1013 if stuff[0] == '!':
1013 stuff = '^' + stuff[1:]
1014 stuff = '^' + stuff[1:]
1014 elif stuff[0] == '^':
1015 elif stuff[0] == '^':
1015 stuff = '\\' + stuff
1016 stuff = '\\' + stuff
1016 res = '%s[%s]' % (res, stuff)
1017 res = '%s[%s]' % (res, stuff)
1017 else:
1018 else:
1018 res = res + re.escape(c)
1019 res = res + re.escape(c)
1019 return res + '\Z(?ms)'
1020 return res + '\Z(?ms)'
1020
1021
1021
1022
1022 def parse_byte_string(size_str):
1023 def parse_byte_string(size_str):
1023 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1024 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1024 if not match:
1025 if not match:
1025 raise ValueError('Given size:%s is invalid, please make sure '
1026 raise ValueError('Given size:%s is invalid, please make sure '
1026 'to use format of <num>(MB|KB)' % size_str)
1027 'to use format of <num>(MB|KB)' % size_str)
1027
1028
1028 _parts = match.groups()
1029 _parts = match.groups()
1029 num, type_ = _parts
1030 num, type_ = _parts
1030 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1031 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1031
1032
1032
1033
1033 class CachedProperty(object):
1034 class CachedProperty(object):
1034 """
1035 """
1035 Lazy Attributes. With option to invalidate the cache by running a method
1036 Lazy Attributes. With option to invalidate the cache by running a method
1036
1037
1037 class Foo():
1038 class Foo():
1038
1039
1039 @CachedProperty
1040 @CachedProperty
1040 def heavy_func():
1041 def heavy_func():
1041 return 'super-calculation'
1042 return 'super-calculation'
1042
1043
1043 foo = Foo()
1044 foo = Foo()
1044 foo.heavy_func() # first computions
1045 foo.heavy_func() # first computions
1045 foo.heavy_func() # fetch from cache
1046 foo.heavy_func() # fetch from cache
1046 foo._invalidate_prop_cache('heavy_func')
1047 foo._invalidate_prop_cache('heavy_func')
1047 # at this point calling foo.heavy_func() will be re-computed
1048 # at this point calling foo.heavy_func() will be re-computed
1048 """
1049 """
1049
1050
1050 def __init__(self, func, func_name=None):
1051 def __init__(self, func, func_name=None):
1051
1052
1052 if func_name is None:
1053 if func_name is None:
1053 func_name = func.__name__
1054 func_name = func.__name__
1054 self.data = (func, func_name)
1055 self.data = (func, func_name)
1055 update_wrapper(self, func)
1056 update_wrapper(self, func)
1056
1057
1057 def __get__(self, inst, class_):
1058 def __get__(self, inst, class_):
1058 if inst is None:
1059 if inst is None:
1059 return self
1060 return self
1060
1061
1061 func, func_name = self.data
1062 func, func_name = self.data
1062 value = func(inst)
1063 value = func(inst)
1063 inst.__dict__[func_name] = value
1064 inst.__dict__[func_name] = value
1064 if '_invalidate_prop_cache' not in inst.__dict__:
1065 if '_invalidate_prop_cache' not in inst.__dict__:
1065 inst.__dict__['_invalidate_prop_cache'] = partial(
1066 inst.__dict__['_invalidate_prop_cache'] = partial(
1066 self._invalidate_prop_cache, inst)
1067 self._invalidate_prop_cache, inst)
1067 return value
1068 return value
1068
1069
1069 def _invalidate_prop_cache(self, inst, name):
1070 def _invalidate_prop_cache(self, inst, name):
1070 inst.__dict__.pop(name, None)
1071 inst.__dict__.pop(name, None)
@@ -1,1933 +1,1937 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from pyramid import compat
37 from pyramid import compat
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.translation import lazy_ugettext
40 from rhodecode.translation import lazy_ugettext
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 from rhodecode.lib.vcs import connection
42 from rhodecode.lib.vcs import connection
43 from rhodecode.lib.vcs.utils import author_name, author_email
43 from rhodecode.lib.vcs.utils import author_name, author_email
44 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 RepositoryError)
50 RepositoryError)
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 FILEMODE_DEFAULT = 0o100644
56 FILEMODE_DEFAULT = 0o100644
57 FILEMODE_EXECUTABLE = 0o100755
57 FILEMODE_EXECUTABLE = 0o100755
58 EMPTY_COMMIT_ID = '0' * 40
58 EMPTY_COMMIT_ID = '0' * 40
59
59
60 _Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
60 _Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61
61
62
62
63 class Reference(_Reference):
63 class Reference(_Reference):
64
64
65 @property
65 @property
66 def branch(self):
66 def branch(self):
67 if self.type == 'branch':
67 if self.type == 'branch':
68 return self.name
68 return self.name
69
69
70 @property
70 @property
71 def bookmark(self):
71 def bookmark(self):
72 if self.type == 'book':
72 if self.type == 'book':
73 return self.name
73 return self.name
74
74
75 @property
76 def to_unicode(self):
77 return reference_to_unicode(self)
78
75
79
76 def unicode_to_reference(raw):
80 def unicode_to_reference(raw):
77 """
81 """
78 Convert a unicode (or string) to a reference object.
82 Convert a unicode (or string) to a reference object.
79 If unicode evaluates to False it returns None.
83 If unicode evaluates to False it returns None.
80 """
84 """
81 if raw:
85 if raw:
82 refs = raw.split(':')
86 refs = raw.split(':')
83 return Reference(*refs)
87 return Reference(*refs)
84 else:
88 else:
85 return None
89 return None
86
90
87
91
88 def reference_to_unicode(ref):
92 def reference_to_unicode(ref):
89 """
93 """
90 Convert a reference object to unicode.
94 Convert a reference object to unicode.
91 If reference is None it returns None.
95 If reference is None it returns None.
92 """
96 """
93 if ref:
97 if ref:
94 return u':'.join(ref)
98 return u':'.join(ref)
95 else:
99 else:
96 return None
100 return None
97
101
98
102
99 class MergeFailureReason(object):
103 class MergeFailureReason(object):
100 """
104 """
101 Enumeration with all the reasons why the server side merge could fail.
105 Enumeration with all the reasons why the server side merge could fail.
102
106
103 DO NOT change the number of the reasons, as they may be stored in the
107 DO NOT change the number of the reasons, as they may be stored in the
104 database.
108 database.
105
109
106 Changing the name of a reason is acceptable and encouraged to deprecate old
110 Changing the name of a reason is acceptable and encouraged to deprecate old
107 reasons.
111 reasons.
108 """
112 """
109
113
110 # Everything went well.
114 # Everything went well.
111 NONE = 0
115 NONE = 0
112
116
113 # An unexpected exception was raised. Check the logs for more details.
117 # An unexpected exception was raised. Check the logs for more details.
114 UNKNOWN = 1
118 UNKNOWN = 1
115
119
116 # The merge was not successful, there are conflicts.
120 # The merge was not successful, there are conflicts.
117 MERGE_FAILED = 2
121 MERGE_FAILED = 2
118
122
119 # The merge succeeded but we could not push it to the target repository.
123 # The merge succeeded but we could not push it to the target repository.
120 PUSH_FAILED = 3
124 PUSH_FAILED = 3
121
125
122 # The specified target is not a head in the target repository.
126 # The specified target is not a head in the target repository.
123 TARGET_IS_NOT_HEAD = 4
127 TARGET_IS_NOT_HEAD = 4
124
128
125 # The source repository contains more branches than the target. Pushing
129 # The source repository contains more branches than the target. Pushing
126 # the merge will create additional branches in the target.
130 # the merge will create additional branches in the target.
127 HG_SOURCE_HAS_MORE_BRANCHES = 5
131 HG_SOURCE_HAS_MORE_BRANCHES = 5
128
132
129 # The target reference has multiple heads. That does not allow to correctly
133 # The target reference has multiple heads. That does not allow to correctly
130 # identify the target location. This could only happen for mercurial
134 # identify the target location. This could only happen for mercurial
131 # branches.
135 # branches.
132 HG_TARGET_HAS_MULTIPLE_HEADS = 6
136 HG_TARGET_HAS_MULTIPLE_HEADS = 6
133
137
134 # The target repository is locked
138 # The target repository is locked
135 TARGET_IS_LOCKED = 7
139 TARGET_IS_LOCKED = 7
136
140
137 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
141 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
138 # A involved commit could not be found.
142 # A involved commit could not be found.
139 _DEPRECATED_MISSING_COMMIT = 8
143 _DEPRECATED_MISSING_COMMIT = 8
140
144
141 # The target repo reference is missing.
145 # The target repo reference is missing.
142 MISSING_TARGET_REF = 9
146 MISSING_TARGET_REF = 9
143
147
144 # The source repo reference is missing.
148 # The source repo reference is missing.
145 MISSING_SOURCE_REF = 10
149 MISSING_SOURCE_REF = 10
146
150
147 # The merge was not successful, there are conflicts related to sub
151 # The merge was not successful, there are conflicts related to sub
148 # repositories.
152 # repositories.
149 SUBREPO_MERGE_FAILED = 11
153 SUBREPO_MERGE_FAILED = 11
150
154
151
155
152 class UpdateFailureReason(object):
156 class UpdateFailureReason(object):
153 """
157 """
154 Enumeration with all the reasons why the pull request update could fail.
158 Enumeration with all the reasons why the pull request update could fail.
155
159
156 DO NOT change the number of the reasons, as they may be stored in the
160 DO NOT change the number of the reasons, as they may be stored in the
157 database.
161 database.
158
162
159 Changing the name of a reason is acceptable and encouraged to deprecate old
163 Changing the name of a reason is acceptable and encouraged to deprecate old
160 reasons.
164 reasons.
161 """
165 """
162
166
163 # Everything went well.
167 # Everything went well.
164 NONE = 0
168 NONE = 0
165
169
166 # An unexpected exception was raised. Check the logs for more details.
170 # An unexpected exception was raised. Check the logs for more details.
167 UNKNOWN = 1
171 UNKNOWN = 1
168
172
169 # The pull request is up to date.
173 # The pull request is up to date.
170 NO_CHANGE = 2
174 NO_CHANGE = 2
171
175
172 # The pull request has a reference type that is not supported for update.
176 # The pull request has a reference type that is not supported for update.
173 WRONG_REF_TYPE = 3
177 WRONG_REF_TYPE = 3
174
178
175 # Update failed because the target reference is missing.
179 # Update failed because the target reference is missing.
176 MISSING_TARGET_REF = 4
180 MISSING_TARGET_REF = 4
177
181
178 # Update failed because the source reference is missing.
182 # Update failed because the source reference is missing.
179 MISSING_SOURCE_REF = 5
183 MISSING_SOURCE_REF = 5
180
184
181
185
182 class MergeResponse(object):
186 class MergeResponse(object):
183
187
184 # uses .format(**metadata) for variables
188 # uses .format(**metadata) for variables
185 MERGE_STATUS_MESSAGES = {
189 MERGE_STATUS_MESSAGES = {
186 MergeFailureReason.NONE: lazy_ugettext(
190 MergeFailureReason.NONE: lazy_ugettext(
187 u'This pull request can be automatically merged.'),
191 u'This pull request can be automatically merged.'),
188 MergeFailureReason.UNKNOWN: lazy_ugettext(
192 MergeFailureReason.UNKNOWN: lazy_ugettext(
189 u'This pull request cannot be merged because of an unhandled exception. '
193 u'This pull request cannot be merged because of an unhandled exception. '
190 u'{exception}'),
194 u'{exception}'),
191 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
195 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
192 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
196 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
193 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
197 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
194 u'This pull request could not be merged because push to '
198 u'This pull request could not be merged because push to '
195 u'target:`{target}@{merge_commit}` failed.'),
199 u'target:`{target}@{merge_commit}` failed.'),
196 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
200 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
197 u'This pull request cannot be merged because the target '
201 u'This pull request cannot be merged because the target '
198 u'`{target_ref.name}` is not a head.'),
202 u'`{target_ref.name}` is not a head.'),
199 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
203 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
200 u'This pull request cannot be merged because the source contains '
204 u'This pull request cannot be merged because the source contains '
201 u'more branches than the target.'),
205 u'more branches than the target.'),
202 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
206 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
203 u'This pull request cannot be merged because the target `{target_ref.name}` '
207 u'This pull request cannot be merged because the target `{target_ref.name}` '
204 u'has multiple heads: `{heads}`.'),
208 u'has multiple heads: `{heads}`.'),
205 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
209 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
206 u'This pull request cannot be merged because the target repository is '
210 u'This pull request cannot be merged because the target repository is '
207 u'locked by {locked_by}.'),
211 u'locked by {locked_by}.'),
208
212
209 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
213 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
210 u'This pull request cannot be merged because the target '
214 u'This pull request cannot be merged because the target '
211 u'reference `{target_ref.name}` is missing.'),
215 u'reference `{target_ref.name}` is missing.'),
212 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
216 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
213 u'This pull request cannot be merged because the source '
217 u'This pull request cannot be merged because the source '
214 u'reference `{source_ref.name}` is missing.'),
218 u'reference `{source_ref.name}` is missing.'),
215 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
219 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
216 u'This pull request cannot be merged because of conflicts related '
220 u'This pull request cannot be merged because of conflicts related '
217 u'to sub repositories.'),
221 u'to sub repositories.'),
218
222
219 # Deprecations
223 # Deprecations
220 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
224 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
221 u'This pull request cannot be merged because the target or the '
225 u'This pull request cannot be merged because the target or the '
222 u'source reference is missing.'),
226 u'source reference is missing.'),
223
227
224 }
228 }
225
229
226 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
230 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
227 self.possible = possible
231 self.possible = possible
228 self.executed = executed
232 self.executed = executed
229 self.merge_ref = merge_ref
233 self.merge_ref = merge_ref
230 self.failure_reason = failure_reason
234 self.failure_reason = failure_reason
231 self.metadata = metadata or {}
235 self.metadata = metadata or {}
232
236
233 def __repr__(self):
237 def __repr__(self):
234 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
238 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
235
239
236 def __eq__(self, other):
240 def __eq__(self, other):
237 same_instance = isinstance(other, self.__class__)
241 same_instance = isinstance(other, self.__class__)
238 return same_instance \
242 return same_instance \
239 and self.possible == other.possible \
243 and self.possible == other.possible \
240 and self.executed == other.executed \
244 and self.executed == other.executed \
241 and self.failure_reason == other.failure_reason
245 and self.failure_reason == other.failure_reason
242
246
243 @property
247 @property
244 def label(self):
248 def label(self):
245 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
249 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
246 not k.startswith('_'))
250 not k.startswith('_'))
247 return label_dict.get(self.failure_reason)
251 return label_dict.get(self.failure_reason)
248
252
249 @property
253 @property
250 def merge_status_message(self):
254 def merge_status_message(self):
251 """
255 """
252 Return a human friendly error message for the given merge status code.
256 Return a human friendly error message for the given merge status code.
253 """
257 """
254 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
258 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
255
259
256 try:
260 try:
257 return msg.format(**self.metadata)
261 return msg.format(**self.metadata)
258 except Exception:
262 except Exception:
259 log.exception('Failed to format %s message', self)
263 log.exception('Failed to format %s message', self)
260 return msg
264 return msg
261
265
262 def asdict(self):
266 def asdict(self):
263 data = {}
267 data = {}
264 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
268 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
265 'merge_status_message']:
269 'merge_status_message']:
266 data[k] = getattr(self, k)
270 data[k] = getattr(self, k)
267 return data
271 return data
268
272
269
273
270 class TargetRefMissing(ValueError):
274 class TargetRefMissing(ValueError):
271 pass
275 pass
272
276
273
277
274 class SourceRefMissing(ValueError):
278 class SourceRefMissing(ValueError):
275 pass
279 pass
276
280
277
281
278 class BaseRepository(object):
282 class BaseRepository(object):
279 """
283 """
280 Base Repository for final backends
284 Base Repository for final backends
281
285
282 .. attribute:: DEFAULT_BRANCH_NAME
286 .. attribute:: DEFAULT_BRANCH_NAME
283
287
284 name of default branch (i.e. "trunk" for svn, "master" for git etc.
288 name of default branch (i.e. "trunk" for svn, "master" for git etc.
285
289
286 .. attribute:: commit_ids
290 .. attribute:: commit_ids
287
291
288 list of all available commit ids, in ascending order
292 list of all available commit ids, in ascending order
289
293
290 .. attribute:: path
294 .. attribute:: path
291
295
292 absolute path to the repository
296 absolute path to the repository
293
297
294 .. attribute:: bookmarks
298 .. attribute:: bookmarks
295
299
296 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
300 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
297 there are no bookmarks or the backend implementation does not support
301 there are no bookmarks or the backend implementation does not support
298 bookmarks.
302 bookmarks.
299
303
300 .. attribute:: tags
304 .. attribute:: tags
301
305
302 Mapping from name to :term:`Commit ID` of the tag.
306 Mapping from name to :term:`Commit ID` of the tag.
303
307
304 """
308 """
305
309
306 DEFAULT_BRANCH_NAME = None
310 DEFAULT_BRANCH_NAME = None
307 DEFAULT_CONTACT = u"Unknown"
311 DEFAULT_CONTACT = u"Unknown"
308 DEFAULT_DESCRIPTION = u"unknown"
312 DEFAULT_DESCRIPTION = u"unknown"
309 EMPTY_COMMIT_ID = '0' * 40
313 EMPTY_COMMIT_ID = '0' * 40
310
314
311 path = None
315 path = None
312
316
313 _is_empty = None
317 _is_empty = None
314 _commit_ids = {}
318 _commit_ids = {}
315
319
316 def __init__(self, repo_path, config=None, create=False, **kwargs):
320 def __init__(self, repo_path, config=None, create=False, **kwargs):
317 """
321 """
318 Initializes repository. Raises RepositoryError if repository could
322 Initializes repository. Raises RepositoryError if repository could
319 not be find at the given ``repo_path`` or directory at ``repo_path``
323 not be find at the given ``repo_path`` or directory at ``repo_path``
320 exists and ``create`` is set to True.
324 exists and ``create`` is set to True.
321
325
322 :param repo_path: local path of the repository
326 :param repo_path: local path of the repository
323 :param config: repository configuration
327 :param config: repository configuration
324 :param create=False: if set to True, would try to create repository.
328 :param create=False: if set to True, would try to create repository.
325 :param src_url=None: if set, should be proper url from which repository
329 :param src_url=None: if set, should be proper url from which repository
326 would be cloned; requires ``create`` parameter to be set to True -
330 would be cloned; requires ``create`` parameter to be set to True -
327 raises RepositoryError if src_url is set and create evaluates to
331 raises RepositoryError if src_url is set and create evaluates to
328 False
332 False
329 """
333 """
330 raise NotImplementedError
334 raise NotImplementedError
331
335
332 def __repr__(self):
336 def __repr__(self):
333 return '<%s at %s>' % (self.__class__.__name__, self.path)
337 return '<%s at %s>' % (self.__class__.__name__, self.path)
334
338
335 def __len__(self):
339 def __len__(self):
336 return self.count()
340 return self.count()
337
341
338 def __eq__(self, other):
342 def __eq__(self, other):
339 same_instance = isinstance(other, self.__class__)
343 same_instance = isinstance(other, self.__class__)
340 return same_instance and other.path == self.path
344 return same_instance and other.path == self.path
341
345
342 def __ne__(self, other):
346 def __ne__(self, other):
343 return not self.__eq__(other)
347 return not self.__eq__(other)
344
348
345 def get_create_shadow_cache_pr_path(self, db_repo):
349 def get_create_shadow_cache_pr_path(self, db_repo):
346 path = db_repo.cached_diffs_dir
350 path = db_repo.cached_diffs_dir
347 if not os.path.exists(path):
351 if not os.path.exists(path):
348 os.makedirs(path, 0o755)
352 os.makedirs(path, 0o755)
349 return path
353 return path
350
354
351 @classmethod
355 @classmethod
352 def get_default_config(cls, default=None):
356 def get_default_config(cls, default=None):
353 config = Config()
357 config = Config()
354 if default and isinstance(default, list):
358 if default and isinstance(default, list):
355 for section, key, val in default:
359 for section, key, val in default:
356 config.set(section, key, val)
360 config.set(section, key, val)
357 return config
361 return config
358
362
359 @LazyProperty
363 @LazyProperty
360 def _remote(self):
364 def _remote(self):
361 raise NotImplementedError
365 raise NotImplementedError
362
366
363 def _heads(self, branch=None):
367 def _heads(self, branch=None):
364 return []
368 return []
365
369
366 @LazyProperty
370 @LazyProperty
367 def EMPTY_COMMIT(self):
371 def EMPTY_COMMIT(self):
368 return EmptyCommit(self.EMPTY_COMMIT_ID)
372 return EmptyCommit(self.EMPTY_COMMIT_ID)
369
373
370 @LazyProperty
374 @LazyProperty
371 def alias(self):
375 def alias(self):
372 for k, v in settings.BACKENDS.items():
376 for k, v in settings.BACKENDS.items():
373 if v.split('.')[-1] == str(self.__class__.__name__):
377 if v.split('.')[-1] == str(self.__class__.__name__):
374 return k
378 return k
375
379
376 @LazyProperty
380 @LazyProperty
377 def name(self):
381 def name(self):
378 return safe_unicode(os.path.basename(self.path))
382 return safe_unicode(os.path.basename(self.path))
379
383
380 @LazyProperty
384 @LazyProperty
381 def description(self):
385 def description(self):
382 raise NotImplementedError
386 raise NotImplementedError
383
387
384 def refs(self):
388 def refs(self):
385 """
389 """
386 returns a `dict` with branches, bookmarks, tags, and closed_branches
390 returns a `dict` with branches, bookmarks, tags, and closed_branches
387 for this repository
391 for this repository
388 """
392 """
389 return dict(
393 return dict(
390 branches=self.branches,
394 branches=self.branches,
391 branches_closed=self.branches_closed,
395 branches_closed=self.branches_closed,
392 tags=self.tags,
396 tags=self.tags,
393 bookmarks=self.bookmarks
397 bookmarks=self.bookmarks
394 )
398 )
395
399
396 @LazyProperty
400 @LazyProperty
397 def branches(self):
401 def branches(self):
398 """
402 """
399 A `dict` which maps branch names to commit ids.
403 A `dict` which maps branch names to commit ids.
400 """
404 """
401 raise NotImplementedError
405 raise NotImplementedError
402
406
403 @LazyProperty
407 @LazyProperty
404 def branches_closed(self):
408 def branches_closed(self):
405 """
409 """
406 A `dict` which maps tags names to commit ids.
410 A `dict` which maps tags names to commit ids.
407 """
411 """
408 raise NotImplementedError
412 raise NotImplementedError
409
413
410 @LazyProperty
414 @LazyProperty
411 def bookmarks(self):
415 def bookmarks(self):
412 """
416 """
413 A `dict` which maps tags names to commit ids.
417 A `dict` which maps tags names to commit ids.
414 """
418 """
415 raise NotImplementedError
419 raise NotImplementedError
416
420
417 @LazyProperty
421 @LazyProperty
418 def tags(self):
422 def tags(self):
419 """
423 """
420 A `dict` which maps tags names to commit ids.
424 A `dict` which maps tags names to commit ids.
421 """
425 """
422 raise NotImplementedError
426 raise NotImplementedError
423
427
424 @LazyProperty
428 @LazyProperty
425 def size(self):
429 def size(self):
426 """
430 """
427 Returns combined size in bytes for all repository files
431 Returns combined size in bytes for all repository files
428 """
432 """
429 tip = self.get_commit()
433 tip = self.get_commit()
430 return tip.size
434 return tip.size
431
435
432 def size_at_commit(self, commit_id):
436 def size_at_commit(self, commit_id):
433 commit = self.get_commit(commit_id)
437 commit = self.get_commit(commit_id)
434 return commit.size
438 return commit.size
435
439
436 def _check_for_empty(self):
440 def _check_for_empty(self):
437 no_commits = len(self._commit_ids) == 0
441 no_commits = len(self._commit_ids) == 0
438 if no_commits:
442 if no_commits:
439 # check on remote to be sure
443 # check on remote to be sure
440 return self._remote.is_empty()
444 return self._remote.is_empty()
441 else:
445 else:
442 return False
446 return False
443
447
444 def is_empty(self):
448 def is_empty(self):
445 if rhodecode.is_test:
449 if rhodecode.is_test:
446 return self._check_for_empty()
450 return self._check_for_empty()
447
451
448 if self._is_empty is None:
452 if self._is_empty is None:
449 # cache empty for production, but not tests
453 # cache empty for production, but not tests
450 self._is_empty = self._check_for_empty()
454 self._is_empty = self._check_for_empty()
451
455
452 return self._is_empty
456 return self._is_empty
453
457
454 @staticmethod
458 @staticmethod
455 def check_url(url, config):
459 def check_url(url, config):
456 """
460 """
457 Function will check given url and try to verify if it's a valid
461 Function will check given url and try to verify if it's a valid
458 link.
462 link.
459 """
463 """
460 raise NotImplementedError
464 raise NotImplementedError
461
465
462 @staticmethod
466 @staticmethod
463 def is_valid_repository(path):
467 def is_valid_repository(path):
464 """
468 """
465 Check if given `path` contains a valid repository of this backend
469 Check if given `path` contains a valid repository of this backend
466 """
470 """
467 raise NotImplementedError
471 raise NotImplementedError
468
472
469 # ==========================================================================
473 # ==========================================================================
470 # COMMITS
474 # COMMITS
471 # ==========================================================================
475 # ==========================================================================
472
476
473 @CachedProperty
477 @CachedProperty
474 def commit_ids(self):
478 def commit_ids(self):
475 raise NotImplementedError
479 raise NotImplementedError
476
480
477 def append_commit_id(self, commit_id):
481 def append_commit_id(self, commit_id):
478 if commit_id not in self.commit_ids:
482 if commit_id not in self.commit_ids:
479 self._rebuild_cache(self.commit_ids + [commit_id])
483 self._rebuild_cache(self.commit_ids + [commit_id])
480
484
481 # clear cache
485 # clear cache
482 self._invalidate_prop_cache('commit_ids')
486 self._invalidate_prop_cache('commit_ids')
483 self._is_empty = False
487 self._is_empty = False
484
488
485 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
489 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
486 translate_tag=None, maybe_unreachable=False):
490 translate_tag=None, maybe_unreachable=False, reference_obj=None):
487 """
491 """
488 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
492 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
489 are both None, most recent commit is returned.
493 are both None, most recent commit is returned.
490
494
491 :param pre_load: Optional. List of commit attributes to load.
495 :param pre_load: Optional. List of commit attributes to load.
492
496
493 :raises ``EmptyRepositoryError``: if there are no commits
497 :raises ``EmptyRepositoryError``: if there are no commits
494 """
498 """
495 raise NotImplementedError
499 raise NotImplementedError
496
500
497 def __iter__(self):
501 def __iter__(self):
498 for commit_id in self.commit_ids:
502 for commit_id in self.commit_ids:
499 yield self.get_commit(commit_id=commit_id)
503 yield self.get_commit(commit_id=commit_id)
500
504
501 def get_commits(
505 def get_commits(
502 self, start_id=None, end_id=None, start_date=None, end_date=None,
506 self, start_id=None, end_id=None, start_date=None, end_date=None,
503 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
507 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
504 """
508 """
505 Returns iterator of `BaseCommit` objects from start to end
509 Returns iterator of `BaseCommit` objects from start to end
506 not inclusive. This should behave just like a list, ie. end is not
510 not inclusive. This should behave just like a list, ie. end is not
507 inclusive.
511 inclusive.
508
512
509 :param start_id: None or str, must be a valid commit id
513 :param start_id: None or str, must be a valid commit id
510 :param end_id: None or str, must be a valid commit id
514 :param end_id: None or str, must be a valid commit id
511 :param start_date:
515 :param start_date:
512 :param end_date:
516 :param end_date:
513 :param branch_name:
517 :param branch_name:
514 :param show_hidden:
518 :param show_hidden:
515 :param pre_load:
519 :param pre_load:
516 :param translate_tags:
520 :param translate_tags:
517 """
521 """
518 raise NotImplementedError
522 raise NotImplementedError
519
523
520 def __getitem__(self, key):
524 def __getitem__(self, key):
521 """
525 """
522 Allows index based access to the commit objects of this repository.
526 Allows index based access to the commit objects of this repository.
523 """
527 """
524 pre_load = ["author", "branch", "date", "message", "parents"]
528 pre_load = ["author", "branch", "date", "message", "parents"]
525 if isinstance(key, slice):
529 if isinstance(key, slice):
526 return self._get_range(key, pre_load)
530 return self._get_range(key, pre_load)
527 return self.get_commit(commit_idx=key, pre_load=pre_load)
531 return self.get_commit(commit_idx=key, pre_load=pre_load)
528
532
529 def _get_range(self, slice_obj, pre_load):
533 def _get_range(self, slice_obj, pre_load):
530 for commit_id in self.commit_ids.__getitem__(slice_obj):
534 for commit_id in self.commit_ids.__getitem__(slice_obj):
531 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
535 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
532
536
533 def count(self):
537 def count(self):
534 return len(self.commit_ids)
538 return len(self.commit_ids)
535
539
536 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
540 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
537 """
541 """
538 Creates and returns a tag for the given ``commit_id``.
542 Creates and returns a tag for the given ``commit_id``.
539
543
540 :param name: name for new tag
544 :param name: name for new tag
541 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
545 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
542 :param commit_id: commit id for which new tag would be created
546 :param commit_id: commit id for which new tag would be created
543 :param message: message of the tag's commit
547 :param message: message of the tag's commit
544 :param date: date of tag's commit
548 :param date: date of tag's commit
545
549
546 :raises TagAlreadyExistError: if tag with same name already exists
550 :raises TagAlreadyExistError: if tag with same name already exists
547 """
551 """
548 raise NotImplementedError
552 raise NotImplementedError
549
553
550 def remove_tag(self, name, user, message=None, date=None):
554 def remove_tag(self, name, user, message=None, date=None):
551 """
555 """
552 Removes tag with the given ``name``.
556 Removes tag with the given ``name``.
553
557
554 :param name: name of the tag to be removed
558 :param name: name of the tag to be removed
555 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
559 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
556 :param message: message of the tag's removal commit
560 :param message: message of the tag's removal commit
557 :param date: date of tag's removal commit
561 :param date: date of tag's removal commit
558
562
559 :raises TagDoesNotExistError: if tag with given name does not exists
563 :raises TagDoesNotExistError: if tag with given name does not exists
560 """
564 """
561 raise NotImplementedError
565 raise NotImplementedError
562
566
563 def get_diff(
567 def get_diff(
564 self, commit1, commit2, path=None, ignore_whitespace=False,
568 self, commit1, commit2, path=None, ignore_whitespace=False,
565 context=3, path1=None):
569 context=3, path1=None):
566 """
570 """
567 Returns (git like) *diff*, as plain text. Shows changes introduced by
571 Returns (git like) *diff*, as plain text. Shows changes introduced by
568 `commit2` since `commit1`.
572 `commit2` since `commit1`.
569
573
570 :param commit1: Entry point from which diff is shown. Can be
574 :param commit1: Entry point from which diff is shown. Can be
571 ``self.EMPTY_COMMIT`` - in this case, patch showing all
575 ``self.EMPTY_COMMIT`` - in this case, patch showing all
572 the changes since empty state of the repository until `commit2`
576 the changes since empty state of the repository until `commit2`
573 :param commit2: Until which commit changes should be shown.
577 :param commit2: Until which commit changes should be shown.
574 :param path: Can be set to a path of a file to create a diff of that
578 :param path: Can be set to a path of a file to create a diff of that
575 file. If `path1` is also set, this value is only associated to
579 file. If `path1` is also set, this value is only associated to
576 `commit2`.
580 `commit2`.
577 :param ignore_whitespace: If set to ``True``, would not show whitespace
581 :param ignore_whitespace: If set to ``True``, would not show whitespace
578 changes. Defaults to ``False``.
582 changes. Defaults to ``False``.
579 :param context: How many lines before/after changed lines should be
583 :param context: How many lines before/after changed lines should be
580 shown. Defaults to ``3``.
584 shown. Defaults to ``3``.
581 :param path1: Can be set to a path to associate with `commit1`. This
585 :param path1: Can be set to a path to associate with `commit1`. This
582 parameter works only for backends which support diff generation for
586 parameter works only for backends which support diff generation for
583 different paths. Other backends will raise a `ValueError` if `path1`
587 different paths. Other backends will raise a `ValueError` if `path1`
584 is set and has a different value than `path`.
588 is set and has a different value than `path`.
585 :param file_path: filter this diff by given path pattern
589 :param file_path: filter this diff by given path pattern
586 """
590 """
587 raise NotImplementedError
591 raise NotImplementedError
588
592
589 def strip(self, commit_id, branch=None):
593 def strip(self, commit_id, branch=None):
590 """
594 """
591 Strip given commit_id from the repository
595 Strip given commit_id from the repository
592 """
596 """
593 raise NotImplementedError
597 raise NotImplementedError
594
598
595 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
599 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
596 """
600 """
597 Return a latest common ancestor commit if one exists for this repo
601 Return a latest common ancestor commit if one exists for this repo
598 `commit_id1` vs `commit_id2` from `repo2`.
602 `commit_id1` vs `commit_id2` from `repo2`.
599
603
600 :param commit_id1: Commit it from this repository to use as a
604 :param commit_id1: Commit it from this repository to use as a
601 target for the comparison.
605 target for the comparison.
602 :param commit_id2: Source commit id to use for comparison.
606 :param commit_id2: Source commit id to use for comparison.
603 :param repo2: Source repository to use for comparison.
607 :param repo2: Source repository to use for comparison.
604 """
608 """
605 raise NotImplementedError
609 raise NotImplementedError
606
610
607 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
611 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
608 """
612 """
609 Compare this repository's revision `commit_id1` with `commit_id2`.
613 Compare this repository's revision `commit_id1` with `commit_id2`.
610
614
611 Returns a tuple(commits, ancestor) that would be merged from
615 Returns a tuple(commits, ancestor) that would be merged from
612 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
616 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
613 will be returned as ancestor.
617 will be returned as ancestor.
614
618
615 :param commit_id1: Commit it from this repository to use as a
619 :param commit_id1: Commit it from this repository to use as a
616 target for the comparison.
620 target for the comparison.
617 :param commit_id2: Source commit id to use for comparison.
621 :param commit_id2: Source commit id to use for comparison.
618 :param repo2: Source repository to use for comparison.
622 :param repo2: Source repository to use for comparison.
619 :param merge: If set to ``True`` will do a merge compare which also
623 :param merge: If set to ``True`` will do a merge compare which also
620 returns the common ancestor.
624 returns the common ancestor.
621 :param pre_load: Optional. List of commit attributes to load.
625 :param pre_load: Optional. List of commit attributes to load.
622 """
626 """
623 raise NotImplementedError
627 raise NotImplementedError
624
628
625 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
629 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
626 user_name='', user_email='', message='', dry_run=False,
630 user_name='', user_email='', message='', dry_run=False,
627 use_rebase=False, close_branch=False):
631 use_rebase=False, close_branch=False):
628 """
632 """
629 Merge the revisions specified in `source_ref` from `source_repo`
633 Merge the revisions specified in `source_ref` from `source_repo`
630 onto the `target_ref` of this repository.
634 onto the `target_ref` of this repository.
631
635
632 `source_ref` and `target_ref` are named tupls with the following
636 `source_ref` and `target_ref` are named tupls with the following
633 fields `type`, `name` and `commit_id`.
637 fields `type`, `name` and `commit_id`.
634
638
635 Returns a MergeResponse named tuple with the following fields
639 Returns a MergeResponse named tuple with the following fields
636 'possible', 'executed', 'source_commit', 'target_commit',
640 'possible', 'executed', 'source_commit', 'target_commit',
637 'merge_commit'.
641 'merge_commit'.
638
642
639 :param repo_id: `repo_id` target repo id.
643 :param repo_id: `repo_id` target repo id.
640 :param workspace_id: `workspace_id` unique identifier.
644 :param workspace_id: `workspace_id` unique identifier.
641 :param target_ref: `target_ref` points to the commit on top of which
645 :param target_ref: `target_ref` points to the commit on top of which
642 the `source_ref` should be merged.
646 the `source_ref` should be merged.
643 :param source_repo: The repository that contains the commits to be
647 :param source_repo: The repository that contains the commits to be
644 merged.
648 merged.
645 :param source_ref: `source_ref` points to the topmost commit from
649 :param source_ref: `source_ref` points to the topmost commit from
646 the `source_repo` which should be merged.
650 the `source_repo` which should be merged.
647 :param user_name: Merge commit `user_name`.
651 :param user_name: Merge commit `user_name`.
648 :param user_email: Merge commit `user_email`.
652 :param user_email: Merge commit `user_email`.
649 :param message: Merge commit `message`.
653 :param message: Merge commit `message`.
650 :param dry_run: If `True` the merge will not take place.
654 :param dry_run: If `True` the merge will not take place.
651 :param use_rebase: If `True` commits from the source will be rebased
655 :param use_rebase: If `True` commits from the source will be rebased
652 on top of the target instead of being merged.
656 on top of the target instead of being merged.
653 :param close_branch: If `True` branch will be close before merging it
657 :param close_branch: If `True` branch will be close before merging it
654 """
658 """
655 if dry_run:
659 if dry_run:
656 message = message or settings.MERGE_DRY_RUN_MESSAGE
660 message = message or settings.MERGE_DRY_RUN_MESSAGE
657 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
661 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
658 user_name = user_name or settings.MERGE_DRY_RUN_USER
662 user_name = user_name or settings.MERGE_DRY_RUN_USER
659 else:
663 else:
660 if not user_name:
664 if not user_name:
661 raise ValueError('user_name cannot be empty')
665 raise ValueError('user_name cannot be empty')
662 if not user_email:
666 if not user_email:
663 raise ValueError('user_email cannot be empty')
667 raise ValueError('user_email cannot be empty')
664 if not message:
668 if not message:
665 raise ValueError('message cannot be empty')
669 raise ValueError('message cannot be empty')
666
670
667 try:
671 try:
668 return self._merge_repo(
672 return self._merge_repo(
669 repo_id, workspace_id, target_ref, source_repo,
673 repo_id, workspace_id, target_ref, source_repo,
670 source_ref, message, user_name, user_email, dry_run=dry_run,
674 source_ref, message, user_name, user_email, dry_run=dry_run,
671 use_rebase=use_rebase, close_branch=close_branch)
675 use_rebase=use_rebase, close_branch=close_branch)
672 except RepositoryError as exc:
676 except RepositoryError as exc:
673 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
677 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
674 return MergeResponse(
678 return MergeResponse(
675 False, False, None, MergeFailureReason.UNKNOWN,
679 False, False, None, MergeFailureReason.UNKNOWN,
676 metadata={'exception': str(exc)})
680 metadata={'exception': str(exc)})
677
681
678 def _merge_repo(self, repo_id, workspace_id, target_ref,
682 def _merge_repo(self, repo_id, workspace_id, target_ref,
679 source_repo, source_ref, merge_message,
683 source_repo, source_ref, merge_message,
680 merger_name, merger_email, dry_run=False,
684 merger_name, merger_email, dry_run=False,
681 use_rebase=False, close_branch=False):
685 use_rebase=False, close_branch=False):
682 """Internal implementation of merge."""
686 """Internal implementation of merge."""
683 raise NotImplementedError
687 raise NotImplementedError
684
688
685 def _maybe_prepare_merge_workspace(
689 def _maybe_prepare_merge_workspace(
686 self, repo_id, workspace_id, target_ref, source_ref):
690 self, repo_id, workspace_id, target_ref, source_ref):
687 """
691 """
688 Create the merge workspace.
692 Create the merge workspace.
689
693
690 :param workspace_id: `workspace_id` unique identifier.
694 :param workspace_id: `workspace_id` unique identifier.
691 """
695 """
692 raise NotImplementedError
696 raise NotImplementedError
693
697
694 @classmethod
698 @classmethod
695 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
699 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
696 """
700 """
697 Legacy version that was used before. We still need it for
701 Legacy version that was used before. We still need it for
698 backward compat
702 backward compat
699 """
703 """
700 return os.path.join(
704 return os.path.join(
701 os.path.dirname(repo_path),
705 os.path.dirname(repo_path),
702 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
706 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
703
707
704 @classmethod
708 @classmethod
705 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
709 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
706 # The name of the shadow repository must start with '.', so it is
710 # The name of the shadow repository must start with '.', so it is
707 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
711 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
708 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
712 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
709 if os.path.exists(legacy_repository_path):
713 if os.path.exists(legacy_repository_path):
710 return legacy_repository_path
714 return legacy_repository_path
711 else:
715 else:
712 return os.path.join(
716 return os.path.join(
713 os.path.dirname(repo_path),
717 os.path.dirname(repo_path),
714 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
718 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
715
719
716 def cleanup_merge_workspace(self, repo_id, workspace_id):
720 def cleanup_merge_workspace(self, repo_id, workspace_id):
717 """
721 """
718 Remove merge workspace.
722 Remove merge workspace.
719
723
720 This function MUST not fail in case there is no workspace associated to
724 This function MUST not fail in case there is no workspace associated to
721 the given `workspace_id`.
725 the given `workspace_id`.
722
726
723 :param workspace_id: `workspace_id` unique identifier.
727 :param workspace_id: `workspace_id` unique identifier.
724 """
728 """
725 shadow_repository_path = self._get_shadow_repository_path(
729 shadow_repository_path = self._get_shadow_repository_path(
726 self.path, repo_id, workspace_id)
730 self.path, repo_id, workspace_id)
727 shadow_repository_path_del = '{}.{}.delete'.format(
731 shadow_repository_path_del = '{}.{}.delete'.format(
728 shadow_repository_path, time.time())
732 shadow_repository_path, time.time())
729
733
730 # move the shadow repo, so it never conflicts with the one used.
734 # move the shadow repo, so it never conflicts with the one used.
731 # we use this method because shutil.rmtree had some edge case problems
735 # we use this method because shutil.rmtree had some edge case problems
732 # removing symlinked repositories
736 # removing symlinked repositories
733 if not os.path.isdir(shadow_repository_path):
737 if not os.path.isdir(shadow_repository_path):
734 return
738 return
735
739
736 shutil.move(shadow_repository_path, shadow_repository_path_del)
740 shutil.move(shadow_repository_path, shadow_repository_path_del)
737 try:
741 try:
738 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
742 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
739 except Exception:
743 except Exception:
740 log.exception('Failed to gracefully remove shadow repo under %s',
744 log.exception('Failed to gracefully remove shadow repo under %s',
741 shadow_repository_path_del)
745 shadow_repository_path_del)
742 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
746 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
743
747
744 # ========== #
748 # ========== #
745 # COMMIT API #
749 # COMMIT API #
746 # ========== #
750 # ========== #
747
751
748 @LazyProperty
752 @LazyProperty
749 def in_memory_commit(self):
753 def in_memory_commit(self):
750 """
754 """
751 Returns :class:`InMemoryCommit` object for this repository.
755 Returns :class:`InMemoryCommit` object for this repository.
752 """
756 """
753 raise NotImplementedError
757 raise NotImplementedError
754
758
755 # ======================== #
759 # ======================== #
756 # UTILITIES FOR SUBCLASSES #
760 # UTILITIES FOR SUBCLASSES #
757 # ======================== #
761 # ======================== #
758
762
759 def _validate_diff_commits(self, commit1, commit2):
763 def _validate_diff_commits(self, commit1, commit2):
760 """
764 """
761 Validates that the given commits are related to this repository.
765 Validates that the given commits are related to this repository.
762
766
763 Intended as a utility for sub classes to have a consistent validation
767 Intended as a utility for sub classes to have a consistent validation
764 of input parameters in methods like :meth:`get_diff`.
768 of input parameters in methods like :meth:`get_diff`.
765 """
769 """
766 self._validate_commit(commit1)
770 self._validate_commit(commit1)
767 self._validate_commit(commit2)
771 self._validate_commit(commit2)
768 if (isinstance(commit1, EmptyCommit) and
772 if (isinstance(commit1, EmptyCommit) and
769 isinstance(commit2, EmptyCommit)):
773 isinstance(commit2, EmptyCommit)):
770 raise ValueError("Cannot compare two empty commits")
774 raise ValueError("Cannot compare two empty commits")
771
775
772 def _validate_commit(self, commit):
776 def _validate_commit(self, commit):
773 if not isinstance(commit, BaseCommit):
777 if not isinstance(commit, BaseCommit):
774 raise TypeError(
778 raise TypeError(
775 "%s is not of type BaseCommit" % repr(commit))
779 "%s is not of type BaseCommit" % repr(commit))
776 if commit.repository != self and not isinstance(commit, EmptyCommit):
780 if commit.repository != self and not isinstance(commit, EmptyCommit):
777 raise ValueError(
781 raise ValueError(
778 "Commit %s must be a valid commit from this repository %s, "
782 "Commit %s must be a valid commit from this repository %s, "
779 "related to this repository instead %s." %
783 "related to this repository instead %s." %
780 (commit, self, commit.repository))
784 (commit, self, commit.repository))
781
785
782 def _validate_commit_id(self, commit_id):
786 def _validate_commit_id(self, commit_id):
783 if not isinstance(commit_id, compat.string_types):
787 if not isinstance(commit_id, compat.string_types):
784 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
788 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
785
789
786 def _validate_commit_idx(self, commit_idx):
790 def _validate_commit_idx(self, commit_idx):
787 if not isinstance(commit_idx, (int, long)):
791 if not isinstance(commit_idx, (int, long)):
788 raise TypeError("commit_idx must be a numeric value")
792 raise TypeError("commit_idx must be a numeric value")
789
793
790 def _validate_branch_name(self, branch_name):
794 def _validate_branch_name(self, branch_name):
791 if branch_name and branch_name not in self.branches_all:
795 if branch_name and branch_name not in self.branches_all:
792 msg = ("Branch %s not found in %s" % (branch_name, self))
796 msg = ("Branch %s not found in %s" % (branch_name, self))
793 raise BranchDoesNotExistError(msg)
797 raise BranchDoesNotExistError(msg)
794
798
795 #
799 #
796 # Supporting deprecated API parts
800 # Supporting deprecated API parts
797 # TODO: johbo: consider to move this into a mixin
801 # TODO: johbo: consider to move this into a mixin
798 #
802 #
799
803
800 @property
804 @property
801 def EMPTY_CHANGESET(self):
805 def EMPTY_CHANGESET(self):
802 warnings.warn(
806 warnings.warn(
803 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
807 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
804 return self.EMPTY_COMMIT_ID
808 return self.EMPTY_COMMIT_ID
805
809
806 @property
810 @property
807 def revisions(self):
811 def revisions(self):
808 warnings.warn("Use commits attribute instead", DeprecationWarning)
812 warnings.warn("Use commits attribute instead", DeprecationWarning)
809 return self.commit_ids
813 return self.commit_ids
810
814
811 @revisions.setter
815 @revisions.setter
812 def revisions(self, value):
816 def revisions(self, value):
813 warnings.warn("Use commits attribute instead", DeprecationWarning)
817 warnings.warn("Use commits attribute instead", DeprecationWarning)
814 self.commit_ids = value
818 self.commit_ids = value
815
819
816 def get_changeset(self, revision=None, pre_load=None):
820 def get_changeset(self, revision=None, pre_load=None):
817 warnings.warn("Use get_commit instead", DeprecationWarning)
821 warnings.warn("Use get_commit instead", DeprecationWarning)
818 commit_id = None
822 commit_id = None
819 commit_idx = None
823 commit_idx = None
820 if isinstance(revision, compat.string_types):
824 if isinstance(revision, compat.string_types):
821 commit_id = revision
825 commit_id = revision
822 else:
826 else:
823 commit_idx = revision
827 commit_idx = revision
824 return self.get_commit(
828 return self.get_commit(
825 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
829 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
826
830
827 def get_changesets(
831 def get_changesets(
828 self, start=None, end=None, start_date=None, end_date=None,
832 self, start=None, end=None, start_date=None, end_date=None,
829 branch_name=None, pre_load=None):
833 branch_name=None, pre_load=None):
830 warnings.warn("Use get_commits instead", DeprecationWarning)
834 warnings.warn("Use get_commits instead", DeprecationWarning)
831 start_id = self._revision_to_commit(start)
835 start_id = self._revision_to_commit(start)
832 end_id = self._revision_to_commit(end)
836 end_id = self._revision_to_commit(end)
833 return self.get_commits(
837 return self.get_commits(
834 start_id=start_id, end_id=end_id, start_date=start_date,
838 start_id=start_id, end_id=end_id, start_date=start_date,
835 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
839 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
836
840
837 def _revision_to_commit(self, revision):
841 def _revision_to_commit(self, revision):
838 """
842 """
839 Translates a revision to a commit_id
843 Translates a revision to a commit_id
840
844
841 Helps to support the old changeset based API which allows to use
845 Helps to support the old changeset based API which allows to use
842 commit ids and commit indices interchangeable.
846 commit ids and commit indices interchangeable.
843 """
847 """
844 if revision is None:
848 if revision is None:
845 return revision
849 return revision
846
850
847 if isinstance(revision, compat.string_types):
851 if isinstance(revision, compat.string_types):
848 commit_id = revision
852 commit_id = revision
849 else:
853 else:
850 commit_id = self.commit_ids[revision]
854 commit_id = self.commit_ids[revision]
851 return commit_id
855 return commit_id
852
856
853 @property
857 @property
854 def in_memory_changeset(self):
858 def in_memory_changeset(self):
855 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
859 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
856 return self.in_memory_commit
860 return self.in_memory_commit
857
861
858 def get_path_permissions(self, username):
862 def get_path_permissions(self, username):
859 """
863 """
860 Returns a path permission checker or None if not supported
864 Returns a path permission checker or None if not supported
861
865
862 :param username: session user name
866 :param username: session user name
863 :return: an instance of BasePathPermissionChecker or None
867 :return: an instance of BasePathPermissionChecker or None
864 """
868 """
865 return None
869 return None
866
870
867 def install_hooks(self, force=False):
871 def install_hooks(self, force=False):
868 return self._remote.install_hooks(force)
872 return self._remote.install_hooks(force)
869
873
870 def get_hooks_info(self):
874 def get_hooks_info(self):
871 return self._remote.get_hooks_info()
875 return self._remote.get_hooks_info()
872
876
873
877
874 class BaseCommit(object):
878 class BaseCommit(object):
875 """
879 """
876 Each backend should implement it's commit representation.
880 Each backend should implement it's commit representation.
877
881
878 **Attributes**
882 **Attributes**
879
883
880 ``repository``
884 ``repository``
881 repository object within which commit exists
885 repository object within which commit exists
882
886
883 ``id``
887 ``id``
884 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
888 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
885 just ``tip``.
889 just ``tip``.
886
890
887 ``raw_id``
891 ``raw_id``
888 raw commit representation (i.e. full 40 length sha for git
892 raw commit representation (i.e. full 40 length sha for git
889 backend)
893 backend)
890
894
891 ``short_id``
895 ``short_id``
892 shortened (if apply) version of ``raw_id``; it would be simple
896 shortened (if apply) version of ``raw_id``; it would be simple
893 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
897 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
894 as ``raw_id`` for subversion
898 as ``raw_id`` for subversion
895
899
896 ``idx``
900 ``idx``
897 commit index
901 commit index
898
902
899 ``files``
903 ``files``
900 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
904 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
901
905
902 ``dirs``
906 ``dirs``
903 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
907 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
904
908
905 ``nodes``
909 ``nodes``
906 combined list of ``Node`` objects
910 combined list of ``Node`` objects
907
911
908 ``author``
912 ``author``
909 author of the commit, as unicode
913 author of the commit, as unicode
910
914
911 ``message``
915 ``message``
912 message of the commit, as unicode
916 message of the commit, as unicode
913
917
914 ``parents``
918 ``parents``
915 list of parent commits
919 list of parent commits
916
920
917 """
921 """
918 repository = None
922 repository = None
919 branch = None
923 branch = None
920
924
921 """
925 """
922 Depending on the backend this should be set to the branch name of the
926 Depending on the backend this should be set to the branch name of the
923 commit. Backends not supporting branches on commits should leave this
927 commit. Backends not supporting branches on commits should leave this
924 value as ``None``.
928 value as ``None``.
925 """
929 """
926
930
927 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
931 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
928 """
932 """
929 This template is used to generate a default prefix for repository archives
933 This template is used to generate a default prefix for repository archives
930 if no prefix has been specified.
934 if no prefix has been specified.
931 """
935 """
932
936
933 def __str__(self):
937 def __str__(self):
934 return '<%s at %s:%s>' % (
938 return '<%s at %s:%s>' % (
935 self.__class__.__name__, self.idx, self.short_id)
939 self.__class__.__name__, self.idx, self.short_id)
936
940
937 def __repr__(self):
941 def __repr__(self):
938 return self.__str__()
942 return self.__str__()
939
943
940 def __unicode__(self):
944 def __unicode__(self):
941 return u'%s:%s' % (self.idx, self.short_id)
945 return u'%s:%s' % (self.idx, self.short_id)
942
946
943 def __eq__(self, other):
947 def __eq__(self, other):
944 same_instance = isinstance(other, self.__class__)
948 same_instance = isinstance(other, self.__class__)
945 return same_instance and self.raw_id == other.raw_id
949 return same_instance and self.raw_id == other.raw_id
946
950
947 def __json__(self):
951 def __json__(self):
948 parents = []
952 parents = []
949 try:
953 try:
950 for parent in self.parents:
954 for parent in self.parents:
951 parents.append({'raw_id': parent.raw_id})
955 parents.append({'raw_id': parent.raw_id})
952 except NotImplementedError:
956 except NotImplementedError:
953 # empty commit doesn't have parents implemented
957 # empty commit doesn't have parents implemented
954 pass
958 pass
955
959
956 return {
960 return {
957 'short_id': self.short_id,
961 'short_id': self.short_id,
958 'raw_id': self.raw_id,
962 'raw_id': self.raw_id,
959 'revision': self.idx,
963 'revision': self.idx,
960 'message': self.message,
964 'message': self.message,
961 'date': self.date,
965 'date': self.date,
962 'author': self.author,
966 'author': self.author,
963 'parents': parents,
967 'parents': parents,
964 'branch': self.branch
968 'branch': self.branch
965 }
969 }
966
970
967 def __getstate__(self):
971 def __getstate__(self):
968 d = self.__dict__.copy()
972 d = self.__dict__.copy()
969 d.pop('_remote', None)
973 d.pop('_remote', None)
970 d.pop('repository', None)
974 d.pop('repository', None)
971 return d
975 return d
972
976
973 def serialize(self):
977 def serialize(self):
974 return self.__json__()
978 return self.__json__()
975
979
976 def _get_refs(self):
980 def _get_refs(self):
977 return {
981 return {
978 'branches': [self.branch] if self.branch else [],
982 'branches': [self.branch] if self.branch else [],
979 'bookmarks': getattr(self, 'bookmarks', []),
983 'bookmarks': getattr(self, 'bookmarks', []),
980 'tags': self.tags
984 'tags': self.tags
981 }
985 }
982
986
983 @LazyProperty
987 @LazyProperty
984 def last(self):
988 def last(self):
985 """
989 """
986 ``True`` if this is last commit in repository, ``False``
990 ``True`` if this is last commit in repository, ``False``
987 otherwise; trying to access this attribute while there is no
991 otherwise; trying to access this attribute while there is no
988 commits would raise `EmptyRepositoryError`
992 commits would raise `EmptyRepositoryError`
989 """
993 """
990 if self.repository is None:
994 if self.repository is None:
991 raise CommitError("Cannot check if it's most recent commit")
995 raise CommitError("Cannot check if it's most recent commit")
992 return self.raw_id == self.repository.commit_ids[-1]
996 return self.raw_id == self.repository.commit_ids[-1]
993
997
994 @LazyProperty
998 @LazyProperty
995 def parents(self):
999 def parents(self):
996 """
1000 """
997 Returns list of parent commits.
1001 Returns list of parent commits.
998 """
1002 """
999 raise NotImplementedError
1003 raise NotImplementedError
1000
1004
1001 @LazyProperty
1005 @LazyProperty
1002 def first_parent(self):
1006 def first_parent(self):
1003 """
1007 """
1004 Returns list of parent commits.
1008 Returns list of parent commits.
1005 """
1009 """
1006 return self.parents[0] if self.parents else EmptyCommit()
1010 return self.parents[0] if self.parents else EmptyCommit()
1007
1011
1008 @property
1012 @property
1009 def merge(self):
1013 def merge(self):
1010 """
1014 """
1011 Returns boolean if commit is a merge.
1015 Returns boolean if commit is a merge.
1012 """
1016 """
1013 return len(self.parents) > 1
1017 return len(self.parents) > 1
1014
1018
1015 @LazyProperty
1019 @LazyProperty
1016 def children(self):
1020 def children(self):
1017 """
1021 """
1018 Returns list of child commits.
1022 Returns list of child commits.
1019 """
1023 """
1020 raise NotImplementedError
1024 raise NotImplementedError
1021
1025
1022 @LazyProperty
1026 @LazyProperty
1023 def id(self):
1027 def id(self):
1024 """
1028 """
1025 Returns string identifying this commit.
1029 Returns string identifying this commit.
1026 """
1030 """
1027 raise NotImplementedError
1031 raise NotImplementedError
1028
1032
1029 @LazyProperty
1033 @LazyProperty
1030 def raw_id(self):
1034 def raw_id(self):
1031 """
1035 """
1032 Returns raw string identifying this commit.
1036 Returns raw string identifying this commit.
1033 """
1037 """
1034 raise NotImplementedError
1038 raise NotImplementedError
1035
1039
1036 @LazyProperty
1040 @LazyProperty
1037 def short_id(self):
1041 def short_id(self):
1038 """
1042 """
1039 Returns shortened version of ``raw_id`` attribute, as string,
1043 Returns shortened version of ``raw_id`` attribute, as string,
1040 identifying this commit, useful for presentation to users.
1044 identifying this commit, useful for presentation to users.
1041 """
1045 """
1042 raise NotImplementedError
1046 raise NotImplementedError
1043
1047
1044 @LazyProperty
1048 @LazyProperty
1045 def idx(self):
1049 def idx(self):
1046 """
1050 """
1047 Returns integer identifying this commit.
1051 Returns integer identifying this commit.
1048 """
1052 """
1049 raise NotImplementedError
1053 raise NotImplementedError
1050
1054
1051 @LazyProperty
1055 @LazyProperty
1052 def committer(self):
1056 def committer(self):
1053 """
1057 """
1054 Returns committer for this commit
1058 Returns committer for this commit
1055 """
1059 """
1056 raise NotImplementedError
1060 raise NotImplementedError
1057
1061
1058 @LazyProperty
1062 @LazyProperty
1059 def committer_name(self):
1063 def committer_name(self):
1060 """
1064 """
1061 Returns committer name for this commit
1065 Returns committer name for this commit
1062 """
1066 """
1063
1067
1064 return author_name(self.committer)
1068 return author_name(self.committer)
1065
1069
1066 @LazyProperty
1070 @LazyProperty
1067 def committer_email(self):
1071 def committer_email(self):
1068 """
1072 """
1069 Returns committer email address for this commit
1073 Returns committer email address for this commit
1070 """
1074 """
1071
1075
1072 return author_email(self.committer)
1076 return author_email(self.committer)
1073
1077
1074 @LazyProperty
1078 @LazyProperty
1075 def author(self):
1079 def author(self):
1076 """
1080 """
1077 Returns author for this commit
1081 Returns author for this commit
1078 """
1082 """
1079
1083
1080 raise NotImplementedError
1084 raise NotImplementedError
1081
1085
1082 @LazyProperty
1086 @LazyProperty
1083 def author_name(self):
1087 def author_name(self):
1084 """
1088 """
1085 Returns author name for this commit
1089 Returns author name for this commit
1086 """
1090 """
1087
1091
1088 return author_name(self.author)
1092 return author_name(self.author)
1089
1093
1090 @LazyProperty
1094 @LazyProperty
1091 def author_email(self):
1095 def author_email(self):
1092 """
1096 """
1093 Returns author email address for this commit
1097 Returns author email address for this commit
1094 """
1098 """
1095
1099
1096 return author_email(self.author)
1100 return author_email(self.author)
1097
1101
1098 def get_file_mode(self, path):
1102 def get_file_mode(self, path):
1099 """
1103 """
1100 Returns stat mode of the file at `path`.
1104 Returns stat mode of the file at `path`.
1101 """
1105 """
1102 raise NotImplementedError
1106 raise NotImplementedError
1103
1107
1104 def is_link(self, path):
1108 def is_link(self, path):
1105 """
1109 """
1106 Returns ``True`` if given `path` is a symlink
1110 Returns ``True`` if given `path` is a symlink
1107 """
1111 """
1108 raise NotImplementedError
1112 raise NotImplementedError
1109
1113
1110 def is_node_binary(self, path):
1114 def is_node_binary(self, path):
1111 """
1115 """
1112 Returns ``True`` is given path is a binary file
1116 Returns ``True`` is given path is a binary file
1113 """
1117 """
1114 raise NotImplementedError
1118 raise NotImplementedError
1115
1119
1116 def get_file_content(self, path):
1120 def get_file_content(self, path):
1117 """
1121 """
1118 Returns content of the file at the given `path`.
1122 Returns content of the file at the given `path`.
1119 """
1123 """
1120 raise NotImplementedError
1124 raise NotImplementedError
1121
1125
1122 def get_file_content_streamed(self, path):
1126 def get_file_content_streamed(self, path):
1123 """
1127 """
1124 returns a streaming response from vcsserver with file content
1128 returns a streaming response from vcsserver with file content
1125 """
1129 """
1126 raise NotImplementedError
1130 raise NotImplementedError
1127
1131
1128 def get_file_size(self, path):
1132 def get_file_size(self, path):
1129 """
1133 """
1130 Returns size of the file at the given `path`.
1134 Returns size of the file at the given `path`.
1131 """
1135 """
1132 raise NotImplementedError
1136 raise NotImplementedError
1133
1137
1134 def get_path_commit(self, path, pre_load=None):
1138 def get_path_commit(self, path, pre_load=None):
1135 """
1139 """
1136 Returns last commit of the file at the given `path`.
1140 Returns last commit of the file at the given `path`.
1137
1141
1138 :param pre_load: Optional. List of commit attributes to load.
1142 :param pre_load: Optional. List of commit attributes to load.
1139 """
1143 """
1140 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1144 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1141 if not commits:
1145 if not commits:
1142 raise RepositoryError(
1146 raise RepositoryError(
1143 'Failed to fetch history for path {}. '
1147 'Failed to fetch history for path {}. '
1144 'Please check if such path exists in your repository'.format(
1148 'Please check if such path exists in your repository'.format(
1145 path))
1149 path))
1146 return commits[0]
1150 return commits[0]
1147
1151
1148 def get_path_history(self, path, limit=None, pre_load=None):
1152 def get_path_history(self, path, limit=None, pre_load=None):
1149 """
1153 """
1150 Returns history of file as reversed list of :class:`BaseCommit`
1154 Returns history of file as reversed list of :class:`BaseCommit`
1151 objects for which file at given `path` has been modified.
1155 objects for which file at given `path` has been modified.
1152
1156
1153 :param limit: Optional. Allows to limit the size of the returned
1157 :param limit: Optional. Allows to limit the size of the returned
1154 history. This is intended as a hint to the underlying backend, so
1158 history. This is intended as a hint to the underlying backend, so
1155 that it can apply optimizations depending on the limit.
1159 that it can apply optimizations depending on the limit.
1156 :param pre_load: Optional. List of commit attributes to load.
1160 :param pre_load: Optional. List of commit attributes to load.
1157 """
1161 """
1158 raise NotImplementedError
1162 raise NotImplementedError
1159
1163
1160 def get_file_annotate(self, path, pre_load=None):
1164 def get_file_annotate(self, path, pre_load=None):
1161 """
1165 """
1162 Returns a generator of four element tuples with
1166 Returns a generator of four element tuples with
1163 lineno, sha, commit lazy loader and line
1167 lineno, sha, commit lazy loader and line
1164
1168
1165 :param pre_load: Optional. List of commit attributes to load.
1169 :param pre_load: Optional. List of commit attributes to load.
1166 """
1170 """
1167 raise NotImplementedError
1171 raise NotImplementedError
1168
1172
1169 def get_nodes(self, path):
1173 def get_nodes(self, path):
1170 """
1174 """
1171 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1175 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1172 state of commit at the given ``path``.
1176 state of commit at the given ``path``.
1173
1177
1174 :raises ``CommitError``: if node at the given ``path`` is not
1178 :raises ``CommitError``: if node at the given ``path`` is not
1175 instance of ``DirNode``
1179 instance of ``DirNode``
1176 """
1180 """
1177 raise NotImplementedError
1181 raise NotImplementedError
1178
1182
1179 def get_node(self, path):
1183 def get_node(self, path):
1180 """
1184 """
1181 Returns ``Node`` object from the given ``path``.
1185 Returns ``Node`` object from the given ``path``.
1182
1186
1183 :raises ``NodeDoesNotExistError``: if there is no node at the given
1187 :raises ``NodeDoesNotExistError``: if there is no node at the given
1184 ``path``
1188 ``path``
1185 """
1189 """
1186 raise NotImplementedError
1190 raise NotImplementedError
1187
1191
1188 def get_largefile_node(self, path):
1192 def get_largefile_node(self, path):
1189 """
1193 """
1190 Returns the path to largefile from Mercurial/Git-lfs storage.
1194 Returns the path to largefile from Mercurial/Git-lfs storage.
1191 or None if it's not a largefile node
1195 or None if it's not a largefile node
1192 """
1196 """
1193 return None
1197 return None
1194
1198
1195 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1199 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1196 archive_dir_name=None, write_metadata=False, mtime=None,
1200 archive_dir_name=None, write_metadata=False, mtime=None,
1197 archive_at_path='/'):
1201 archive_at_path='/'):
1198 """
1202 """
1199 Creates an archive containing the contents of the repository.
1203 Creates an archive containing the contents of the repository.
1200
1204
1201 :param archive_dest_path: path to the file which to create the archive.
1205 :param archive_dest_path: path to the file which to create the archive.
1202 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1206 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1203 :param archive_dir_name: name of root directory in archive.
1207 :param archive_dir_name: name of root directory in archive.
1204 Default is repository name and commit's short_id joined with dash:
1208 Default is repository name and commit's short_id joined with dash:
1205 ``"{repo_name}-{short_id}"``.
1209 ``"{repo_name}-{short_id}"``.
1206 :param write_metadata: write a metadata file into archive.
1210 :param write_metadata: write a metadata file into archive.
1207 :param mtime: custom modification time for archive creation, defaults
1211 :param mtime: custom modification time for archive creation, defaults
1208 to time.time() if not given.
1212 to time.time() if not given.
1209 :param archive_at_path: pack files at this path (default '/')
1213 :param archive_at_path: pack files at this path (default '/')
1210
1214
1211 :raise VCSError: If prefix has a problem.
1215 :raise VCSError: If prefix has a problem.
1212 """
1216 """
1213 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1217 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1214 if kind not in allowed_kinds:
1218 if kind not in allowed_kinds:
1215 raise ImproperArchiveTypeError(
1219 raise ImproperArchiveTypeError(
1216 'Archive kind (%s) not supported use one of %s' %
1220 'Archive kind (%s) not supported use one of %s' %
1217 (kind, allowed_kinds))
1221 (kind, allowed_kinds))
1218
1222
1219 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1223 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1220 mtime = mtime is not None or time.mktime(self.date.timetuple())
1224 mtime = mtime is not None or time.mktime(self.date.timetuple())
1221 commit_id = self.raw_id
1225 commit_id = self.raw_id
1222
1226
1223 return self.repository._remote.archive_repo(
1227 return self.repository._remote.archive_repo(
1224 archive_dest_path, kind, mtime, archive_at_path,
1228 archive_dest_path, kind, mtime, archive_at_path,
1225 archive_dir_name, commit_id)
1229 archive_dir_name, commit_id)
1226
1230
1227 def _validate_archive_prefix(self, archive_dir_name):
1231 def _validate_archive_prefix(self, archive_dir_name):
1228 if archive_dir_name is None:
1232 if archive_dir_name is None:
1229 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1233 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1230 repo_name=safe_str(self.repository.name),
1234 repo_name=safe_str(self.repository.name),
1231 short_id=self.short_id)
1235 short_id=self.short_id)
1232 elif not isinstance(archive_dir_name, str):
1236 elif not isinstance(archive_dir_name, str):
1233 raise ValueError("prefix not a bytes object: %s" % repr(archive_dir_name))
1237 raise ValueError("prefix not a bytes object: %s" % repr(archive_dir_name))
1234 elif archive_dir_name.startswith('/'):
1238 elif archive_dir_name.startswith('/'):
1235 raise VCSError("Prefix cannot start with leading slash")
1239 raise VCSError("Prefix cannot start with leading slash")
1236 elif archive_dir_name.strip() == '':
1240 elif archive_dir_name.strip() == '':
1237 raise VCSError("Prefix cannot be empty")
1241 raise VCSError("Prefix cannot be empty")
1238 return archive_dir_name
1242 return archive_dir_name
1239
1243
1240 @LazyProperty
1244 @LazyProperty
1241 def root(self):
1245 def root(self):
1242 """
1246 """
1243 Returns ``RootNode`` object for this commit.
1247 Returns ``RootNode`` object for this commit.
1244 """
1248 """
1245 return self.get_node('')
1249 return self.get_node('')
1246
1250
1247 def next(self, branch=None):
1251 def next(self, branch=None):
1248 """
1252 """
1249 Returns next commit from current, if branch is gives it will return
1253 Returns next commit from current, if branch is gives it will return
1250 next commit belonging to this branch
1254 next commit belonging to this branch
1251
1255
1252 :param branch: show commits within the given named branch
1256 :param branch: show commits within the given named branch
1253 """
1257 """
1254 indexes = xrange(self.idx + 1, self.repository.count())
1258 indexes = xrange(self.idx + 1, self.repository.count())
1255 return self._find_next(indexes, branch)
1259 return self._find_next(indexes, branch)
1256
1260
1257 def prev(self, branch=None):
1261 def prev(self, branch=None):
1258 """
1262 """
1259 Returns previous commit from current, if branch is gives it will
1263 Returns previous commit from current, if branch is gives it will
1260 return previous commit belonging to this branch
1264 return previous commit belonging to this branch
1261
1265
1262 :param branch: show commit within the given named branch
1266 :param branch: show commit within the given named branch
1263 """
1267 """
1264 indexes = xrange(self.idx - 1, -1, -1)
1268 indexes = xrange(self.idx - 1, -1, -1)
1265 return self._find_next(indexes, branch)
1269 return self._find_next(indexes, branch)
1266
1270
1267 def _find_next(self, indexes, branch=None):
1271 def _find_next(self, indexes, branch=None):
1268 if branch and self.branch != branch:
1272 if branch and self.branch != branch:
1269 raise VCSError('Branch option used on commit not belonging '
1273 raise VCSError('Branch option used on commit not belonging '
1270 'to that branch')
1274 'to that branch')
1271
1275
1272 for next_idx in indexes:
1276 for next_idx in indexes:
1273 commit = self.repository.get_commit(commit_idx=next_idx)
1277 commit = self.repository.get_commit(commit_idx=next_idx)
1274 if branch and branch != commit.branch:
1278 if branch and branch != commit.branch:
1275 continue
1279 continue
1276 return commit
1280 return commit
1277 raise CommitDoesNotExistError
1281 raise CommitDoesNotExistError
1278
1282
1279 def diff(self, ignore_whitespace=True, context=3):
1283 def diff(self, ignore_whitespace=True, context=3):
1280 """
1284 """
1281 Returns a `Diff` object representing the change made by this commit.
1285 Returns a `Diff` object representing the change made by this commit.
1282 """
1286 """
1283 parent = self.first_parent
1287 parent = self.first_parent
1284 diff = self.repository.get_diff(
1288 diff = self.repository.get_diff(
1285 parent, self,
1289 parent, self,
1286 ignore_whitespace=ignore_whitespace,
1290 ignore_whitespace=ignore_whitespace,
1287 context=context)
1291 context=context)
1288 return diff
1292 return diff
1289
1293
1290 @LazyProperty
1294 @LazyProperty
1291 def added(self):
1295 def added(self):
1292 """
1296 """
1293 Returns list of added ``FileNode`` objects.
1297 Returns list of added ``FileNode`` objects.
1294 """
1298 """
1295 raise NotImplementedError
1299 raise NotImplementedError
1296
1300
1297 @LazyProperty
1301 @LazyProperty
1298 def changed(self):
1302 def changed(self):
1299 """
1303 """
1300 Returns list of modified ``FileNode`` objects.
1304 Returns list of modified ``FileNode`` objects.
1301 """
1305 """
1302 raise NotImplementedError
1306 raise NotImplementedError
1303
1307
1304 @LazyProperty
1308 @LazyProperty
1305 def removed(self):
1309 def removed(self):
1306 """
1310 """
1307 Returns list of removed ``FileNode`` objects.
1311 Returns list of removed ``FileNode`` objects.
1308 """
1312 """
1309 raise NotImplementedError
1313 raise NotImplementedError
1310
1314
1311 @LazyProperty
1315 @LazyProperty
1312 def size(self):
1316 def size(self):
1313 """
1317 """
1314 Returns total number of bytes from contents of all filenodes.
1318 Returns total number of bytes from contents of all filenodes.
1315 """
1319 """
1316 return sum((node.size for node in self.get_filenodes_generator()))
1320 return sum((node.size for node in self.get_filenodes_generator()))
1317
1321
1318 def walk(self, topurl=''):
1322 def walk(self, topurl=''):
1319 """
1323 """
1320 Similar to os.walk method. Insted of filesystem it walks through
1324 Similar to os.walk method. Insted of filesystem it walks through
1321 commit starting at given ``topurl``. Returns generator of tuples
1325 commit starting at given ``topurl``. Returns generator of tuples
1322 (topnode, dirnodes, filenodes).
1326 (topnode, dirnodes, filenodes).
1323 """
1327 """
1324 topnode = self.get_node(topurl)
1328 topnode = self.get_node(topurl)
1325 if not topnode.is_dir():
1329 if not topnode.is_dir():
1326 return
1330 return
1327 yield (topnode, topnode.dirs, topnode.files)
1331 yield (topnode, topnode.dirs, topnode.files)
1328 for dirnode in topnode.dirs:
1332 for dirnode in topnode.dirs:
1329 for tup in self.walk(dirnode.path):
1333 for tup in self.walk(dirnode.path):
1330 yield tup
1334 yield tup
1331
1335
1332 def get_filenodes_generator(self):
1336 def get_filenodes_generator(self):
1333 """
1337 """
1334 Returns generator that yields *all* file nodes.
1338 Returns generator that yields *all* file nodes.
1335 """
1339 """
1336 for topnode, dirs, files in self.walk():
1340 for topnode, dirs, files in self.walk():
1337 for node in files:
1341 for node in files:
1338 yield node
1342 yield node
1339
1343
1340 #
1344 #
1341 # Utilities for sub classes to support consistent behavior
1345 # Utilities for sub classes to support consistent behavior
1342 #
1346 #
1343
1347
1344 def no_node_at_path(self, path):
1348 def no_node_at_path(self, path):
1345 return NodeDoesNotExistError(
1349 return NodeDoesNotExistError(
1346 u"There is no file nor directory at the given path: "
1350 u"There is no file nor directory at the given path: "
1347 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1351 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1348
1352
1349 def _fix_path(self, path):
1353 def _fix_path(self, path):
1350 """
1354 """
1351 Paths are stored without trailing slash so we need to get rid off it if
1355 Paths are stored without trailing slash so we need to get rid off it if
1352 needed.
1356 needed.
1353 """
1357 """
1354 return path.rstrip('/')
1358 return path.rstrip('/')
1355
1359
1356 #
1360 #
1357 # Deprecated API based on changesets
1361 # Deprecated API based on changesets
1358 #
1362 #
1359
1363
1360 @property
1364 @property
1361 def revision(self):
1365 def revision(self):
1362 warnings.warn("Use idx instead", DeprecationWarning)
1366 warnings.warn("Use idx instead", DeprecationWarning)
1363 return self.idx
1367 return self.idx
1364
1368
1365 @revision.setter
1369 @revision.setter
1366 def revision(self, value):
1370 def revision(self, value):
1367 warnings.warn("Use idx instead", DeprecationWarning)
1371 warnings.warn("Use idx instead", DeprecationWarning)
1368 self.idx = value
1372 self.idx = value
1369
1373
1370 def get_file_changeset(self, path):
1374 def get_file_changeset(self, path):
1371 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1375 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1372 return self.get_path_commit(path)
1376 return self.get_path_commit(path)
1373
1377
1374
1378
1375 class BaseChangesetClass(type):
1379 class BaseChangesetClass(type):
1376
1380
1377 def __instancecheck__(self, instance):
1381 def __instancecheck__(self, instance):
1378 return isinstance(instance, BaseCommit)
1382 return isinstance(instance, BaseCommit)
1379
1383
1380
1384
1381 class BaseChangeset(BaseCommit):
1385 class BaseChangeset(BaseCommit):
1382
1386
1383 __metaclass__ = BaseChangesetClass
1387 __metaclass__ = BaseChangesetClass
1384
1388
1385 def __new__(cls, *args, **kwargs):
1389 def __new__(cls, *args, **kwargs):
1386 warnings.warn(
1390 warnings.warn(
1387 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1391 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1388 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1392 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1389
1393
1390
1394
1391 class BaseInMemoryCommit(object):
1395 class BaseInMemoryCommit(object):
1392 """
1396 """
1393 Represents differences between repository's state (most recent head) and
1397 Represents differences between repository's state (most recent head) and
1394 changes made *in place*.
1398 changes made *in place*.
1395
1399
1396 **Attributes**
1400 **Attributes**
1397
1401
1398 ``repository``
1402 ``repository``
1399 repository object for this in-memory-commit
1403 repository object for this in-memory-commit
1400
1404
1401 ``added``
1405 ``added``
1402 list of ``FileNode`` objects marked as *added*
1406 list of ``FileNode`` objects marked as *added*
1403
1407
1404 ``changed``
1408 ``changed``
1405 list of ``FileNode`` objects marked as *changed*
1409 list of ``FileNode`` objects marked as *changed*
1406
1410
1407 ``removed``
1411 ``removed``
1408 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1412 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1409 *removed*
1413 *removed*
1410
1414
1411 ``parents``
1415 ``parents``
1412 list of :class:`BaseCommit` instances representing parents of
1416 list of :class:`BaseCommit` instances representing parents of
1413 in-memory commit. Should always be 2-element sequence.
1417 in-memory commit. Should always be 2-element sequence.
1414
1418
1415 """
1419 """
1416
1420
1417 def __init__(self, repository):
1421 def __init__(self, repository):
1418 self.repository = repository
1422 self.repository = repository
1419 self.added = []
1423 self.added = []
1420 self.changed = []
1424 self.changed = []
1421 self.removed = []
1425 self.removed = []
1422 self.parents = []
1426 self.parents = []
1423
1427
1424 def add(self, *filenodes):
1428 def add(self, *filenodes):
1425 """
1429 """
1426 Marks given ``FileNode`` objects as *to be committed*.
1430 Marks given ``FileNode`` objects as *to be committed*.
1427
1431
1428 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1432 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1429 latest commit
1433 latest commit
1430 :raises ``NodeAlreadyAddedError``: if node with same path is already
1434 :raises ``NodeAlreadyAddedError``: if node with same path is already
1431 marked as *added*
1435 marked as *added*
1432 """
1436 """
1433 # Check if not already marked as *added* first
1437 # Check if not already marked as *added* first
1434 for node in filenodes:
1438 for node in filenodes:
1435 if node.path in (n.path for n in self.added):
1439 if node.path in (n.path for n in self.added):
1436 raise NodeAlreadyAddedError(
1440 raise NodeAlreadyAddedError(
1437 "Such FileNode %s is already marked for addition"
1441 "Such FileNode %s is already marked for addition"
1438 % node.path)
1442 % node.path)
1439 for node in filenodes:
1443 for node in filenodes:
1440 self.added.append(node)
1444 self.added.append(node)
1441
1445
1442 def change(self, *filenodes):
1446 def change(self, *filenodes):
1443 """
1447 """
1444 Marks given ``FileNode`` objects to be *changed* in next commit.
1448 Marks given ``FileNode`` objects to be *changed* in next commit.
1445
1449
1446 :raises ``EmptyRepositoryError``: if there are no commits yet
1450 :raises ``EmptyRepositoryError``: if there are no commits yet
1447 :raises ``NodeAlreadyExistsError``: if node with same path is already
1451 :raises ``NodeAlreadyExistsError``: if node with same path is already
1448 marked to be *changed*
1452 marked to be *changed*
1449 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1453 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1450 marked to be *removed*
1454 marked to be *removed*
1451 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1455 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1452 commit
1456 commit
1453 :raises ``NodeNotChangedError``: if node hasn't really be changed
1457 :raises ``NodeNotChangedError``: if node hasn't really be changed
1454 """
1458 """
1455 for node in filenodes:
1459 for node in filenodes:
1456 if node.path in (n.path for n in self.removed):
1460 if node.path in (n.path for n in self.removed):
1457 raise NodeAlreadyRemovedError(
1461 raise NodeAlreadyRemovedError(
1458 "Node at %s is already marked as removed" % node.path)
1462 "Node at %s is already marked as removed" % node.path)
1459 try:
1463 try:
1460 self.repository.get_commit()
1464 self.repository.get_commit()
1461 except EmptyRepositoryError:
1465 except EmptyRepositoryError:
1462 raise EmptyRepositoryError(
1466 raise EmptyRepositoryError(
1463 "Nothing to change - try to *add* new nodes rather than "
1467 "Nothing to change - try to *add* new nodes rather than "
1464 "changing them")
1468 "changing them")
1465 for node in filenodes:
1469 for node in filenodes:
1466 if node.path in (n.path for n in self.changed):
1470 if node.path in (n.path for n in self.changed):
1467 raise NodeAlreadyChangedError(
1471 raise NodeAlreadyChangedError(
1468 "Node at '%s' is already marked as changed" % node.path)
1472 "Node at '%s' is already marked as changed" % node.path)
1469 self.changed.append(node)
1473 self.changed.append(node)
1470
1474
1471 def remove(self, *filenodes):
1475 def remove(self, *filenodes):
1472 """
1476 """
1473 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1477 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1474 *removed* in next commit.
1478 *removed* in next commit.
1475
1479
1476 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1480 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1477 be *removed*
1481 be *removed*
1478 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1482 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1479 be *changed*
1483 be *changed*
1480 """
1484 """
1481 for node in filenodes:
1485 for node in filenodes:
1482 if node.path in (n.path for n in self.removed):
1486 if node.path in (n.path for n in self.removed):
1483 raise NodeAlreadyRemovedError(
1487 raise NodeAlreadyRemovedError(
1484 "Node is already marked to for removal at %s" % node.path)
1488 "Node is already marked to for removal at %s" % node.path)
1485 if node.path in (n.path for n in self.changed):
1489 if node.path in (n.path for n in self.changed):
1486 raise NodeAlreadyChangedError(
1490 raise NodeAlreadyChangedError(
1487 "Node is already marked to be changed at %s" % node.path)
1491 "Node is already marked to be changed at %s" % node.path)
1488 # We only mark node as *removed* - real removal is done by
1492 # We only mark node as *removed* - real removal is done by
1489 # commit method
1493 # commit method
1490 self.removed.append(node)
1494 self.removed.append(node)
1491
1495
1492 def reset(self):
1496 def reset(self):
1493 """
1497 """
1494 Resets this instance to initial state (cleans ``added``, ``changed``
1498 Resets this instance to initial state (cleans ``added``, ``changed``
1495 and ``removed`` lists).
1499 and ``removed`` lists).
1496 """
1500 """
1497 self.added = []
1501 self.added = []
1498 self.changed = []
1502 self.changed = []
1499 self.removed = []
1503 self.removed = []
1500 self.parents = []
1504 self.parents = []
1501
1505
1502 def get_ipaths(self):
1506 def get_ipaths(self):
1503 """
1507 """
1504 Returns generator of paths from nodes marked as added, changed or
1508 Returns generator of paths from nodes marked as added, changed or
1505 removed.
1509 removed.
1506 """
1510 """
1507 for node in itertools.chain(self.added, self.changed, self.removed):
1511 for node in itertools.chain(self.added, self.changed, self.removed):
1508 yield node.path
1512 yield node.path
1509
1513
1510 def get_paths(self):
1514 def get_paths(self):
1511 """
1515 """
1512 Returns list of paths from nodes marked as added, changed or removed.
1516 Returns list of paths from nodes marked as added, changed or removed.
1513 """
1517 """
1514 return list(self.get_ipaths())
1518 return list(self.get_ipaths())
1515
1519
1516 def check_integrity(self, parents=None):
1520 def check_integrity(self, parents=None):
1517 """
1521 """
1518 Checks in-memory commit's integrity. Also, sets parents if not
1522 Checks in-memory commit's integrity. Also, sets parents if not
1519 already set.
1523 already set.
1520
1524
1521 :raises CommitError: if any error occurs (i.e.
1525 :raises CommitError: if any error occurs (i.e.
1522 ``NodeDoesNotExistError``).
1526 ``NodeDoesNotExistError``).
1523 """
1527 """
1524 if not self.parents:
1528 if not self.parents:
1525 parents = parents or []
1529 parents = parents or []
1526 if len(parents) == 0:
1530 if len(parents) == 0:
1527 try:
1531 try:
1528 parents = [self.repository.get_commit(), None]
1532 parents = [self.repository.get_commit(), None]
1529 except EmptyRepositoryError:
1533 except EmptyRepositoryError:
1530 parents = [None, None]
1534 parents = [None, None]
1531 elif len(parents) == 1:
1535 elif len(parents) == 1:
1532 parents += [None]
1536 parents += [None]
1533 self.parents = parents
1537 self.parents = parents
1534
1538
1535 # Local parents, only if not None
1539 # Local parents, only if not None
1536 parents = [p for p in self.parents if p]
1540 parents = [p for p in self.parents if p]
1537
1541
1538 # Check nodes marked as added
1542 # Check nodes marked as added
1539 for p in parents:
1543 for p in parents:
1540 for node in self.added:
1544 for node in self.added:
1541 try:
1545 try:
1542 p.get_node(node.path)
1546 p.get_node(node.path)
1543 except NodeDoesNotExistError:
1547 except NodeDoesNotExistError:
1544 pass
1548 pass
1545 else:
1549 else:
1546 raise NodeAlreadyExistsError(
1550 raise NodeAlreadyExistsError(
1547 "Node `%s` already exists at %s" % (node.path, p))
1551 "Node `%s` already exists at %s" % (node.path, p))
1548
1552
1549 # Check nodes marked as changed
1553 # Check nodes marked as changed
1550 missing = set(self.changed)
1554 missing = set(self.changed)
1551 not_changed = set(self.changed)
1555 not_changed = set(self.changed)
1552 if self.changed and not parents:
1556 if self.changed and not parents:
1553 raise NodeDoesNotExistError(str(self.changed[0].path))
1557 raise NodeDoesNotExistError(str(self.changed[0].path))
1554 for p in parents:
1558 for p in parents:
1555 for node in self.changed:
1559 for node in self.changed:
1556 try:
1560 try:
1557 old = p.get_node(node.path)
1561 old = p.get_node(node.path)
1558 missing.remove(node)
1562 missing.remove(node)
1559 # if content actually changed, remove node from not_changed
1563 # if content actually changed, remove node from not_changed
1560 if old.content != node.content:
1564 if old.content != node.content:
1561 not_changed.remove(node)
1565 not_changed.remove(node)
1562 except NodeDoesNotExistError:
1566 except NodeDoesNotExistError:
1563 pass
1567 pass
1564 if self.changed and missing:
1568 if self.changed and missing:
1565 raise NodeDoesNotExistError(
1569 raise NodeDoesNotExistError(
1566 "Node `%s` marked as modified but missing in parents: %s"
1570 "Node `%s` marked as modified but missing in parents: %s"
1567 % (node.path, parents))
1571 % (node.path, parents))
1568
1572
1569 if self.changed and not_changed:
1573 if self.changed and not_changed:
1570 raise NodeNotChangedError(
1574 raise NodeNotChangedError(
1571 "Node `%s` wasn't actually changed (parents: %s)"
1575 "Node `%s` wasn't actually changed (parents: %s)"
1572 % (not_changed.pop().path, parents))
1576 % (not_changed.pop().path, parents))
1573
1577
1574 # Check nodes marked as removed
1578 # Check nodes marked as removed
1575 if self.removed and not parents:
1579 if self.removed and not parents:
1576 raise NodeDoesNotExistError(
1580 raise NodeDoesNotExistError(
1577 "Cannot remove node at %s as there "
1581 "Cannot remove node at %s as there "
1578 "were no parents specified" % self.removed[0].path)
1582 "were no parents specified" % self.removed[0].path)
1579 really_removed = set()
1583 really_removed = set()
1580 for p in parents:
1584 for p in parents:
1581 for node in self.removed:
1585 for node in self.removed:
1582 try:
1586 try:
1583 p.get_node(node.path)
1587 p.get_node(node.path)
1584 really_removed.add(node)
1588 really_removed.add(node)
1585 except CommitError:
1589 except CommitError:
1586 pass
1590 pass
1587 not_removed = set(self.removed) - really_removed
1591 not_removed = set(self.removed) - really_removed
1588 if not_removed:
1592 if not_removed:
1589 # TODO: johbo: This code branch does not seem to be covered
1593 # TODO: johbo: This code branch does not seem to be covered
1590 raise NodeDoesNotExistError(
1594 raise NodeDoesNotExistError(
1591 "Cannot remove node at %s from "
1595 "Cannot remove node at %s from "
1592 "following parents: %s" % (not_removed, parents))
1596 "following parents: %s" % (not_removed, parents))
1593
1597
1594 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1598 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1595 """
1599 """
1596 Performs in-memory commit (doesn't check workdir in any way) and
1600 Performs in-memory commit (doesn't check workdir in any way) and
1597 returns newly created :class:`BaseCommit`. Updates repository's
1601 returns newly created :class:`BaseCommit`. Updates repository's
1598 attribute `commits`.
1602 attribute `commits`.
1599
1603
1600 .. note::
1604 .. note::
1601
1605
1602 While overriding this method each backend's should call
1606 While overriding this method each backend's should call
1603 ``self.check_integrity(parents)`` in the first place.
1607 ``self.check_integrity(parents)`` in the first place.
1604
1608
1605 :param message: message of the commit
1609 :param message: message of the commit
1606 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1610 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1607 :param parents: single parent or sequence of parents from which commit
1611 :param parents: single parent or sequence of parents from which commit
1608 would be derived
1612 would be derived
1609 :param date: ``datetime.datetime`` instance. Defaults to
1613 :param date: ``datetime.datetime`` instance. Defaults to
1610 ``datetime.datetime.now()``.
1614 ``datetime.datetime.now()``.
1611 :param branch: branch name, as string. If none given, default backend's
1615 :param branch: branch name, as string. If none given, default backend's
1612 branch would be used.
1616 branch would be used.
1613
1617
1614 :raises ``CommitError``: if any error occurs while committing
1618 :raises ``CommitError``: if any error occurs while committing
1615 """
1619 """
1616 raise NotImplementedError
1620 raise NotImplementedError
1617
1621
1618
1622
1619 class BaseInMemoryChangesetClass(type):
1623 class BaseInMemoryChangesetClass(type):
1620
1624
1621 def __instancecheck__(self, instance):
1625 def __instancecheck__(self, instance):
1622 return isinstance(instance, BaseInMemoryCommit)
1626 return isinstance(instance, BaseInMemoryCommit)
1623
1627
1624
1628
1625 class BaseInMemoryChangeset(BaseInMemoryCommit):
1629 class BaseInMemoryChangeset(BaseInMemoryCommit):
1626
1630
1627 __metaclass__ = BaseInMemoryChangesetClass
1631 __metaclass__ = BaseInMemoryChangesetClass
1628
1632
1629 def __new__(cls, *args, **kwargs):
1633 def __new__(cls, *args, **kwargs):
1630 warnings.warn(
1634 warnings.warn(
1631 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1635 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1632 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1636 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1633
1637
1634
1638
1635 class EmptyCommit(BaseCommit):
1639 class EmptyCommit(BaseCommit):
1636 """
1640 """
1637 An dummy empty commit. It's possible to pass hash when creating
1641 An dummy empty commit. It's possible to pass hash when creating
1638 an EmptyCommit
1642 an EmptyCommit
1639 """
1643 """
1640
1644
1641 def __init__(
1645 def __init__(
1642 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1646 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1643 message='', author='', date=None):
1647 message='', author='', date=None):
1644 self._empty_commit_id = commit_id
1648 self._empty_commit_id = commit_id
1645 # TODO: johbo: Solve idx parameter, default value does not make
1649 # TODO: johbo: Solve idx parameter, default value does not make
1646 # too much sense
1650 # too much sense
1647 self.idx = idx
1651 self.idx = idx
1648 self.message = message
1652 self.message = message
1649 self.author = author
1653 self.author = author
1650 self.date = date or datetime.datetime.fromtimestamp(0)
1654 self.date = date or datetime.datetime.fromtimestamp(0)
1651 self.repository = repo
1655 self.repository = repo
1652 self.alias = alias
1656 self.alias = alias
1653
1657
1654 @LazyProperty
1658 @LazyProperty
1655 def raw_id(self):
1659 def raw_id(self):
1656 """
1660 """
1657 Returns raw string identifying this commit, useful for web
1661 Returns raw string identifying this commit, useful for web
1658 representation.
1662 representation.
1659 """
1663 """
1660
1664
1661 return self._empty_commit_id
1665 return self._empty_commit_id
1662
1666
1663 @LazyProperty
1667 @LazyProperty
1664 def branch(self):
1668 def branch(self):
1665 if self.alias:
1669 if self.alias:
1666 from rhodecode.lib.vcs.backends import get_backend
1670 from rhodecode.lib.vcs.backends import get_backend
1667 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1671 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1668
1672
1669 @LazyProperty
1673 @LazyProperty
1670 def short_id(self):
1674 def short_id(self):
1671 return self.raw_id[:12]
1675 return self.raw_id[:12]
1672
1676
1673 @LazyProperty
1677 @LazyProperty
1674 def id(self):
1678 def id(self):
1675 return self.raw_id
1679 return self.raw_id
1676
1680
1677 def get_path_commit(self, path):
1681 def get_path_commit(self, path):
1678 return self
1682 return self
1679
1683
1680 def get_file_content(self, path):
1684 def get_file_content(self, path):
1681 return u''
1685 return u''
1682
1686
1683 def get_file_content_streamed(self, path):
1687 def get_file_content_streamed(self, path):
1684 yield self.get_file_content()
1688 yield self.get_file_content()
1685
1689
1686 def get_file_size(self, path):
1690 def get_file_size(self, path):
1687 return 0
1691 return 0
1688
1692
1689
1693
1690 class EmptyChangesetClass(type):
1694 class EmptyChangesetClass(type):
1691
1695
1692 def __instancecheck__(self, instance):
1696 def __instancecheck__(self, instance):
1693 return isinstance(instance, EmptyCommit)
1697 return isinstance(instance, EmptyCommit)
1694
1698
1695
1699
1696 class EmptyChangeset(EmptyCommit):
1700 class EmptyChangeset(EmptyCommit):
1697
1701
1698 __metaclass__ = EmptyChangesetClass
1702 __metaclass__ = EmptyChangesetClass
1699
1703
1700 def __new__(cls, *args, **kwargs):
1704 def __new__(cls, *args, **kwargs):
1701 warnings.warn(
1705 warnings.warn(
1702 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1706 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1703 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1707 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1704
1708
1705 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1709 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1706 alias=None, revision=-1, message='', author='', date=None):
1710 alias=None, revision=-1, message='', author='', date=None):
1707 if requested_revision is not None:
1711 if requested_revision is not None:
1708 warnings.warn(
1712 warnings.warn(
1709 "Parameter requested_revision not supported anymore",
1713 "Parameter requested_revision not supported anymore",
1710 DeprecationWarning)
1714 DeprecationWarning)
1711 super(EmptyChangeset, self).__init__(
1715 super(EmptyChangeset, self).__init__(
1712 commit_id=cs, repo=repo, alias=alias, idx=revision,
1716 commit_id=cs, repo=repo, alias=alias, idx=revision,
1713 message=message, author=author, date=date)
1717 message=message, author=author, date=date)
1714
1718
1715 @property
1719 @property
1716 def revision(self):
1720 def revision(self):
1717 warnings.warn("Use idx instead", DeprecationWarning)
1721 warnings.warn("Use idx instead", DeprecationWarning)
1718 return self.idx
1722 return self.idx
1719
1723
1720 @revision.setter
1724 @revision.setter
1721 def revision(self, value):
1725 def revision(self, value):
1722 warnings.warn("Use idx instead", DeprecationWarning)
1726 warnings.warn("Use idx instead", DeprecationWarning)
1723 self.idx = value
1727 self.idx = value
1724
1728
1725
1729
1726 class EmptyRepository(BaseRepository):
1730 class EmptyRepository(BaseRepository):
1727 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1731 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1728 pass
1732 pass
1729
1733
1730 def get_diff(self, *args, **kwargs):
1734 def get_diff(self, *args, **kwargs):
1731 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1735 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1732 return GitDiff('')
1736 return GitDiff('')
1733
1737
1734
1738
1735 class CollectionGenerator(object):
1739 class CollectionGenerator(object):
1736
1740
1737 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1741 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1738 self.repo = repo
1742 self.repo = repo
1739 self.commit_ids = commit_ids
1743 self.commit_ids = commit_ids
1740 # TODO: (oliver) this isn't currently hooked up
1744 # TODO: (oliver) this isn't currently hooked up
1741 self.collection_size = None
1745 self.collection_size = None
1742 self.pre_load = pre_load
1746 self.pre_load = pre_load
1743 self.translate_tag = translate_tag
1747 self.translate_tag = translate_tag
1744
1748
1745 def __len__(self):
1749 def __len__(self):
1746 if self.collection_size is not None:
1750 if self.collection_size is not None:
1747 return self.collection_size
1751 return self.collection_size
1748 return self.commit_ids.__len__()
1752 return self.commit_ids.__len__()
1749
1753
1750 def __iter__(self):
1754 def __iter__(self):
1751 for commit_id in self.commit_ids:
1755 for commit_id in self.commit_ids:
1752 # TODO: johbo: Mercurial passes in commit indices or commit ids
1756 # TODO: johbo: Mercurial passes in commit indices or commit ids
1753 yield self._commit_factory(commit_id)
1757 yield self._commit_factory(commit_id)
1754
1758
1755 def _commit_factory(self, commit_id):
1759 def _commit_factory(self, commit_id):
1756 """
1760 """
1757 Allows backends to override the way commits are generated.
1761 Allows backends to override the way commits are generated.
1758 """
1762 """
1759 return self.repo.get_commit(
1763 return self.repo.get_commit(
1760 commit_id=commit_id, pre_load=self.pre_load,
1764 commit_id=commit_id, pre_load=self.pre_load,
1761 translate_tag=self.translate_tag)
1765 translate_tag=self.translate_tag)
1762
1766
1763 def __getslice__(self, i, j):
1767 def __getslice__(self, i, j):
1764 """
1768 """
1765 Returns an iterator of sliced repository
1769 Returns an iterator of sliced repository
1766 """
1770 """
1767 commit_ids = self.commit_ids[i:j]
1771 commit_ids = self.commit_ids[i:j]
1768 return self.__class__(
1772 return self.__class__(
1769 self.repo, commit_ids, pre_load=self.pre_load,
1773 self.repo, commit_ids, pre_load=self.pre_load,
1770 translate_tag=self.translate_tag)
1774 translate_tag=self.translate_tag)
1771
1775
1772 def __repr__(self):
1776 def __repr__(self):
1773 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1777 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1774
1778
1775
1779
1776 class Config(object):
1780 class Config(object):
1777 """
1781 """
1778 Represents the configuration for a repository.
1782 Represents the configuration for a repository.
1779
1783
1780 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1784 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1781 standard library. It implements only the needed subset.
1785 standard library. It implements only the needed subset.
1782 """
1786 """
1783
1787
1784 def __init__(self):
1788 def __init__(self):
1785 self._values = {}
1789 self._values = {}
1786
1790
1787 def copy(self):
1791 def copy(self):
1788 clone = Config()
1792 clone = Config()
1789 for section, values in self._values.items():
1793 for section, values in self._values.items():
1790 clone._values[section] = values.copy()
1794 clone._values[section] = values.copy()
1791 return clone
1795 return clone
1792
1796
1793 def __repr__(self):
1797 def __repr__(self):
1794 return '<Config(%s sections) at %s>' % (
1798 return '<Config(%s sections) at %s>' % (
1795 len(self._values), hex(id(self)))
1799 len(self._values), hex(id(self)))
1796
1800
1797 def items(self, section):
1801 def items(self, section):
1798 return self._values.get(section, {}).iteritems()
1802 return self._values.get(section, {}).iteritems()
1799
1803
1800 def get(self, section, option):
1804 def get(self, section, option):
1801 return self._values.get(section, {}).get(option)
1805 return self._values.get(section, {}).get(option)
1802
1806
1803 def set(self, section, option, value):
1807 def set(self, section, option, value):
1804 section_values = self._values.setdefault(section, {})
1808 section_values = self._values.setdefault(section, {})
1805 section_values[option] = value
1809 section_values[option] = value
1806
1810
1807 def clear_section(self, section):
1811 def clear_section(self, section):
1808 self._values[section] = {}
1812 self._values[section] = {}
1809
1813
1810 def serialize(self):
1814 def serialize(self):
1811 """
1815 """
1812 Creates a list of three tuples (section, key, value) representing
1816 Creates a list of three tuples (section, key, value) representing
1813 this config object.
1817 this config object.
1814 """
1818 """
1815 items = []
1819 items = []
1816 for section in self._values:
1820 for section in self._values:
1817 for option, value in self._values[section].items():
1821 for option, value in self._values[section].items():
1818 items.append(
1822 items.append(
1819 (safe_str(section), safe_str(option), safe_str(value)))
1823 (safe_str(section), safe_str(option), safe_str(value)))
1820 return items
1824 return items
1821
1825
1822
1826
1823 class Diff(object):
1827 class Diff(object):
1824 """
1828 """
1825 Represents a diff result from a repository backend.
1829 Represents a diff result from a repository backend.
1826
1830
1827 Subclasses have to provide a backend specific value for
1831 Subclasses have to provide a backend specific value for
1828 :attr:`_header_re` and :attr:`_meta_re`.
1832 :attr:`_header_re` and :attr:`_meta_re`.
1829 """
1833 """
1830 _meta_re = None
1834 _meta_re = None
1831 _header_re = None
1835 _header_re = None
1832
1836
1833 def __init__(self, raw_diff):
1837 def __init__(self, raw_diff):
1834 self.raw = raw_diff
1838 self.raw = raw_diff
1835
1839
1836 def chunks(self):
1840 def chunks(self):
1837 """
1841 """
1838 split the diff in chunks of separate --git a/file b/file chunks
1842 split the diff in chunks of separate --git a/file b/file chunks
1839 to make diffs consistent we must prepend with \n, and make sure
1843 to make diffs consistent we must prepend with \n, and make sure
1840 we can detect last chunk as this was also has special rule
1844 we can detect last chunk as this was also has special rule
1841 """
1845 """
1842
1846
1843 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1847 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1844 header = diff_parts[0]
1848 header = diff_parts[0]
1845
1849
1846 if self._meta_re:
1850 if self._meta_re:
1847 match = self._meta_re.match(header)
1851 match = self._meta_re.match(header)
1848
1852
1849 chunks = diff_parts[1:]
1853 chunks = diff_parts[1:]
1850 total_chunks = len(chunks)
1854 total_chunks = len(chunks)
1851
1855
1852 return (
1856 return (
1853 DiffChunk(chunk, self, cur_chunk == total_chunks)
1857 DiffChunk(chunk, self, cur_chunk == total_chunks)
1854 for cur_chunk, chunk in enumerate(chunks, start=1))
1858 for cur_chunk, chunk in enumerate(chunks, start=1))
1855
1859
1856
1860
1857 class DiffChunk(object):
1861 class DiffChunk(object):
1858
1862
1859 def __init__(self, chunk, diff, last_chunk):
1863 def __init__(self, chunk, diff, last_chunk):
1860 self._diff = diff
1864 self._diff = diff
1861
1865
1862 # since we split by \ndiff --git that part is lost from original diff
1866 # since we split by \ndiff --git that part is lost from original diff
1863 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1867 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1864 if not last_chunk:
1868 if not last_chunk:
1865 chunk += '\n'
1869 chunk += '\n'
1866
1870
1867 match = self._diff._header_re.match(chunk)
1871 match = self._diff._header_re.match(chunk)
1868 self.header = match.groupdict()
1872 self.header = match.groupdict()
1869 self.diff = chunk[match.end():]
1873 self.diff = chunk[match.end():]
1870 self.raw = chunk
1874 self.raw = chunk
1871
1875
1872
1876
1873 class BasePathPermissionChecker(object):
1877 class BasePathPermissionChecker(object):
1874
1878
1875 @staticmethod
1879 @staticmethod
1876 def create_from_patterns(includes, excludes):
1880 def create_from_patterns(includes, excludes):
1877 if includes and '*' in includes and not excludes:
1881 if includes and '*' in includes and not excludes:
1878 return AllPathPermissionChecker()
1882 return AllPathPermissionChecker()
1879 elif excludes and '*' in excludes:
1883 elif excludes and '*' in excludes:
1880 return NonePathPermissionChecker()
1884 return NonePathPermissionChecker()
1881 else:
1885 else:
1882 return PatternPathPermissionChecker(includes, excludes)
1886 return PatternPathPermissionChecker(includes, excludes)
1883
1887
1884 @property
1888 @property
1885 def has_full_access(self):
1889 def has_full_access(self):
1886 raise NotImplemented()
1890 raise NotImplemented()
1887
1891
1888 def has_access(self, path):
1892 def has_access(self, path):
1889 raise NotImplemented()
1893 raise NotImplemented()
1890
1894
1891
1895
1892 class AllPathPermissionChecker(BasePathPermissionChecker):
1896 class AllPathPermissionChecker(BasePathPermissionChecker):
1893
1897
1894 @property
1898 @property
1895 def has_full_access(self):
1899 def has_full_access(self):
1896 return True
1900 return True
1897
1901
1898 def has_access(self, path):
1902 def has_access(self, path):
1899 return True
1903 return True
1900
1904
1901
1905
1902 class NonePathPermissionChecker(BasePathPermissionChecker):
1906 class NonePathPermissionChecker(BasePathPermissionChecker):
1903
1907
1904 @property
1908 @property
1905 def has_full_access(self):
1909 def has_full_access(self):
1906 return False
1910 return False
1907
1911
1908 def has_access(self, path):
1912 def has_access(self, path):
1909 return False
1913 return False
1910
1914
1911
1915
1912 class PatternPathPermissionChecker(BasePathPermissionChecker):
1916 class PatternPathPermissionChecker(BasePathPermissionChecker):
1913
1917
1914 def __init__(self, includes, excludes):
1918 def __init__(self, includes, excludes):
1915 self.includes = includes
1919 self.includes = includes
1916 self.excludes = excludes
1920 self.excludes = excludes
1917 self.includes_re = [] if not includes else [
1921 self.includes_re = [] if not includes else [
1918 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1922 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1919 self.excludes_re = [] if not excludes else [
1923 self.excludes_re = [] if not excludes else [
1920 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1924 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1921
1925
1922 @property
1926 @property
1923 def has_full_access(self):
1927 def has_full_access(self):
1924 return '*' in self.includes and not self.excludes
1928 return '*' in self.includes and not self.excludes
1925
1929
1926 def has_access(self, path):
1930 def has_access(self, path):
1927 for regex in self.excludes_re:
1931 for regex in self.excludes_re:
1928 if regex.match(path):
1932 if regex.match(path):
1929 return False
1933 return False
1930 for regex in self.includes_re:
1934 for regex in self.includes_re:
1931 if regex.match(path):
1935 if regex.match(path):
1932 return True
1936 return True
1933 return False
1937 return False
@@ -1,1034 +1,1043 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'master'
57 DEFAULT_BRANCH_NAME = 'master'
58
58
59 contact = BaseRepository.DEFAULT_CONTACT
59 contact = BaseRepository.DEFAULT_CONTACT
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
63
63
64 self.path = safe_str(os.path.abspath(repo_path))
64 self.path = safe_str(os.path.abspath(repo_path))
65 self.config = config if config else self.get_default_config()
65 self.config = config if config else self.get_default_config()
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67
67
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69
69
70 # caches
70 # caches
71 self._commit_ids = {}
71 self._commit_ids = {}
72
72
73 @LazyProperty
73 @LazyProperty
74 def _remote(self):
74 def _remote(self):
75 repo_id = self.path
75 repo_id = self.path
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77
77
78 @LazyProperty
78 @LazyProperty
79 def bare(self):
79 def bare(self):
80 return self._remote.bare()
80 return self._remote.bare()
81
81
82 @LazyProperty
82 @LazyProperty
83 def head(self):
83 def head(self):
84 return self._remote.head()
84 return self._remote.head()
85
85
86 @CachedProperty
86 @CachedProperty
87 def commit_ids(self):
87 def commit_ids(self):
88 """
88 """
89 Returns list of commit ids, in ascending order. Being lazy
89 Returns list of commit ids, in ascending order. Being lazy
90 attribute allows external tools to inject commit ids from cache.
90 attribute allows external tools to inject commit ids from cache.
91 """
91 """
92 commit_ids = self._get_all_commit_ids()
92 commit_ids = self._get_all_commit_ids()
93 self._rebuild_cache(commit_ids)
93 self._rebuild_cache(commit_ids)
94 return commit_ids
94 return commit_ids
95
95
96 def _rebuild_cache(self, commit_ids):
96 def _rebuild_cache(self, commit_ids):
97 self._commit_ids = dict((commit_id, index)
97 self._commit_ids = dict((commit_id, index)
98 for index, commit_id in enumerate(commit_ids))
98 for index, commit_id in enumerate(commit_ids))
99
99
100 def run_git_command(self, cmd, **opts):
100 def run_git_command(self, cmd, **opts):
101 """
101 """
102 Runs given ``cmd`` as git command and returns tuple
102 Runs given ``cmd`` as git command and returns tuple
103 (stdout, stderr).
103 (stdout, stderr).
104
104
105 :param cmd: git command to be executed
105 :param cmd: git command to be executed
106 :param opts: env options to pass into Subprocess command
106 :param opts: env options to pass into Subprocess command
107 """
107 """
108 if not isinstance(cmd, list):
108 if not isinstance(cmd, list):
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110
110
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 out, err = self._remote.run_git_command(cmd, **opts)
112 out, err = self._remote.run_git_command(cmd, **opts)
113 if err and not skip_stderr_log:
113 if err and not skip_stderr_log:
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 return out, err
115 return out, err
116
116
117 @staticmethod
117 @staticmethod
118 def check_url(url, config):
118 def check_url(url, config):
119 """
119 """
120 Function will check given url and try to verify if it's a valid
120 Function will check given url and try to verify if it's a valid
121 link. Sometimes it may happened that git will issue basic
121 link. Sometimes it may happened that git will issue basic
122 auth request that can cause whole API to hang when used from python
122 auth request that can cause whole API to hang when used from python
123 or other external calls.
123 or other external calls.
124
124
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 when the return code is non 200
126 when the return code is non 200
127 """
127 """
128 # check first if it's not an url
128 # check first if it's not an url
129 if os.path.isdir(url) or url.startswith('file:'):
129 if os.path.isdir(url) or url.startswith('file:'):
130 return True
130 return True
131
131
132 if '+' in url.split('://', 1)[0]:
132 if '+' in url.split('://', 1)[0]:
133 url = url.split('+', 1)[1]
133 url = url.split('+', 1)[1]
134
134
135 # Request the _remote to verify the url
135 # Request the _remote to verify the url
136 return connection.Git.check_url(url, config.serialize())
136 return connection.Git.check_url(url, config.serialize())
137
137
138 @staticmethod
138 @staticmethod
139 def is_valid_repository(path):
139 def is_valid_repository(path):
140 if os.path.isdir(os.path.join(path, '.git')):
140 if os.path.isdir(os.path.join(path, '.git')):
141 return True
141 return True
142 # check case of bare repository
142 # check case of bare repository
143 try:
143 try:
144 GitRepository(path)
144 GitRepository(path)
145 return True
145 return True
146 except VCSError:
146 except VCSError:
147 pass
147 pass
148 return False
148 return False
149
149
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 bare=False):
151 bare=False):
152 if create and os.path.exists(self.path):
152 if create and os.path.exists(self.path):
153 raise RepositoryError(
153 raise RepositoryError(
154 "Cannot create repository at %s, location already exist"
154 "Cannot create repository at %s, location already exist"
155 % self.path)
155 % self.path)
156
156
157 if bare and do_workspace_checkout:
157 if bare and do_workspace_checkout:
158 raise RepositoryError("Cannot update a bare repository")
158 raise RepositoryError("Cannot update a bare repository")
159 try:
159 try:
160
160
161 if src_url:
161 if src_url:
162 # check URL before any actions
162 # check URL before any actions
163 GitRepository.check_url(src_url, self.config)
163 GitRepository.check_url(src_url, self.config)
164
164
165 if create:
165 if create:
166 os.makedirs(self.path, mode=0o755)
166 os.makedirs(self.path, mode=0o755)
167
167
168 if bare:
168 if bare:
169 self._remote.init_bare()
169 self._remote.init_bare()
170 else:
170 else:
171 self._remote.init()
171 self._remote.init()
172
172
173 if src_url and bare:
173 if src_url and bare:
174 # bare repository only allows a fetch and checkout is not allowed
174 # bare repository only allows a fetch and checkout is not allowed
175 self.fetch(src_url, commit_ids=None)
175 self.fetch(src_url, commit_ids=None)
176 elif src_url:
176 elif src_url:
177 self.pull(src_url, commit_ids=None,
177 self.pull(src_url, commit_ids=None,
178 update_after=do_workspace_checkout)
178 update_after=do_workspace_checkout)
179
179
180 else:
180 else:
181 if not self._remote.assert_correct_path():
181 if not self._remote.assert_correct_path():
182 raise RepositoryError(
182 raise RepositoryError(
183 'Path "%s" does not contain a Git repository' %
183 'Path "%s" does not contain a Git repository' %
184 (self.path,))
184 (self.path,))
185
185
186 # TODO: johbo: check if we have to translate the OSError here
186 # TODO: johbo: check if we have to translate the OSError here
187 except OSError as err:
187 except OSError as err:
188 raise RepositoryError(err)
188 raise RepositoryError(err)
189
189
190 def _get_all_commit_ids(self):
190 def _get_all_commit_ids(self):
191 return self._remote.get_all_commit_ids()
191 return self._remote.get_all_commit_ids()
192
192
193 def _get_commit_ids(self, filters=None):
193 def _get_commit_ids(self, filters=None):
194 # we must check if this repo is not empty, since later command
194 # we must check if this repo is not empty, since later command
195 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # errors
196 # errors
197
197
198 head = self._remote.head(show_exc=False)
198 head = self._remote.head(show_exc=False)
199
199
200 if not head:
200 if not head:
201 return []
201 return []
202
202
203 rev_filter = ['--branches', '--tags']
203 rev_filter = ['--branches', '--tags']
204 extra_filter = []
204 extra_filter = []
205
205
206 if filters:
206 if filters:
207 if filters.get('since'):
207 if filters.get('since'):
208 extra_filter.append('--since=%s' % (filters['since']))
208 extra_filter.append('--since=%s' % (filters['since']))
209 if filters.get('until'):
209 if filters.get('until'):
210 extra_filter.append('--until=%s' % (filters['until']))
210 extra_filter.append('--until=%s' % (filters['until']))
211 if filters.get('branch_name'):
211 if filters.get('branch_name'):
212 rev_filter = []
212 rev_filter = []
213 extra_filter.append(filters['branch_name'])
213 extra_filter.append(filters['branch_name'])
214 rev_filter.extend(extra_filter)
214 rev_filter.extend(extra_filter)
215
215
216 # if filters.get('start') or filters.get('end'):
216 # if filters.get('start') or filters.get('end'):
217 # # skip is offset, max-count is limit
217 # # skip is offset, max-count is limit
218 # if filters.get('start'):
218 # if filters.get('start'):
219 # extra_filter += ' --skip=%s' % filters['start']
219 # extra_filter += ' --skip=%s' % filters['start']
220 # if filters.get('end'):
220 # if filters.get('end'):
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222
222
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 try:
224 try:
225 output, __ = self.run_git_command(cmd)
225 output, __ = self.run_git_command(cmd)
226 except RepositoryError:
226 except RepositoryError:
227 # Can be raised for empty repositories
227 # Can be raised for empty repositories
228 return []
228 return []
229 return output.splitlines()
229 return output.splitlines()
230
230
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False):
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
232
232 def is_null(value):
233 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
234 return len(value) == commit_id_or_idx.count('0')
234
235
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
237 return self.commit_ids[-1]
237
238
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 *map(safe_str, [commit_id_or_idx, self.name]))
240 *map(safe_str, [commit_id_or_idx, self.name]))
240
241
241 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
242 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
242 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
243 is_branch = reference_obj and reference_obj.branch
243 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
244 is_numeric_idx = \
245 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
246 or isinstance(commit_id_or_idx, int)
247
248 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
244 try:
249 try:
245 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
250 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
246 except Exception:
251 except Exception:
247 raise CommitDoesNotExistError(commit_missing_err)
252 raise CommitDoesNotExistError(commit_missing_err)
248
253
249 elif is_bstr:
254 elif is_bstr:
250 # Need to call remote to translate id for tagging scenario
255 # Need to call remote to translate id for tagging scenarios,
256 # or branch that are numeric
251 try:
257 try:
252 remote_data = self._remote.get_object(commit_id_or_idx,
258 remote_data = self._remote.get_object(commit_id_or_idx,
253 maybe_unreachable=maybe_unreachable)
259 maybe_unreachable=maybe_unreachable)
254 commit_id_or_idx = remote_data["commit_id"]
260 commit_id_or_idx = remote_data["commit_id"]
255 except (CommitDoesNotExistError,):
261 except (CommitDoesNotExistError,):
256 raise CommitDoesNotExistError(commit_missing_err)
262 raise CommitDoesNotExistError(commit_missing_err)
257
263
258 # Ensure we return full id
264 # Ensure we return full id
259 if not SHA_PATTERN.match(str(commit_id_or_idx)):
265 if not SHA_PATTERN.match(str(commit_id_or_idx)):
260 raise CommitDoesNotExistError(
266 raise CommitDoesNotExistError(
261 "Given commit id %s not recognized" % commit_id_or_idx)
267 "Given commit id %s not recognized" % commit_id_or_idx)
262 return commit_id_or_idx
268 return commit_id_or_idx
263
269
264 def get_hook_location(self):
270 def get_hook_location(self):
265 """
271 """
266 returns absolute path to location where hooks are stored
272 returns absolute path to location where hooks are stored
267 """
273 """
268 loc = os.path.join(self.path, 'hooks')
274 loc = os.path.join(self.path, 'hooks')
269 if not self.bare:
275 if not self.bare:
270 loc = os.path.join(self.path, '.git', 'hooks')
276 loc = os.path.join(self.path, '.git', 'hooks')
271 return loc
277 return loc
272
278
273 @LazyProperty
279 @LazyProperty
274 def last_change(self):
280 def last_change(self):
275 """
281 """
276 Returns last change made on this repository as
282 Returns last change made on this repository as
277 `datetime.datetime` object.
283 `datetime.datetime` object.
278 """
284 """
279 try:
285 try:
280 return self.get_commit().date
286 return self.get_commit().date
281 except RepositoryError:
287 except RepositoryError:
282 tzoffset = makedate()[1]
288 tzoffset = makedate()[1]
283 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
289 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
284
290
285 def _get_fs_mtime(self):
291 def _get_fs_mtime(self):
286 idx_loc = '' if self.bare else '.git'
292 idx_loc = '' if self.bare else '.git'
287 # fallback to filesystem
293 # fallback to filesystem
288 in_path = os.path.join(self.path, idx_loc, "index")
294 in_path = os.path.join(self.path, idx_loc, "index")
289 he_path = os.path.join(self.path, idx_loc, "HEAD")
295 he_path = os.path.join(self.path, idx_loc, "HEAD")
290 if os.path.exists(in_path):
296 if os.path.exists(in_path):
291 return os.stat(in_path).st_mtime
297 return os.stat(in_path).st_mtime
292 else:
298 else:
293 return os.stat(he_path).st_mtime
299 return os.stat(he_path).st_mtime
294
300
295 @LazyProperty
301 @LazyProperty
296 def description(self):
302 def description(self):
297 description = self._remote.get_description()
303 description = self._remote.get_description()
298 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
304 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
299
305
300 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
306 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
301 if self.is_empty():
307 if self.is_empty():
302 return OrderedDict()
308 return OrderedDict()
303
309
304 result = []
310 result = []
305 for ref, sha in self._refs.iteritems():
311 for ref, sha in self._refs.iteritems():
306 if ref.startswith(prefix):
312 if ref.startswith(prefix):
307 ref_name = ref
313 ref_name = ref
308 if strip_prefix:
314 if strip_prefix:
309 ref_name = ref[len(prefix):]
315 ref_name = ref[len(prefix):]
310 result.append((safe_unicode(ref_name), sha))
316 result.append((safe_unicode(ref_name), sha))
311
317
312 def get_name(entry):
318 def get_name(entry):
313 return entry[0]
319 return entry[0]
314
320
315 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
321 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
316
322
317 def _get_branches(self):
323 def _get_branches(self):
318 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
324 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
319
325
320 @CachedProperty
326 @CachedProperty
321 def branches(self):
327 def branches(self):
322 return self._get_branches()
328 return self._get_branches()
323
329
324 @CachedProperty
330 @CachedProperty
325 def branches_closed(self):
331 def branches_closed(self):
326 return {}
332 return {}
327
333
328 @CachedProperty
334 @CachedProperty
329 def bookmarks(self):
335 def bookmarks(self):
330 return {}
336 return {}
331
337
332 @CachedProperty
338 @CachedProperty
333 def branches_all(self):
339 def branches_all(self):
334 all_branches = {}
340 all_branches = {}
335 all_branches.update(self.branches)
341 all_branches.update(self.branches)
336 all_branches.update(self.branches_closed)
342 all_branches.update(self.branches_closed)
337 return all_branches
343 return all_branches
338
344
339 @CachedProperty
345 @CachedProperty
340 def tags(self):
346 def tags(self):
341 return self._get_tags()
347 return self._get_tags()
342
348
343 def _get_tags(self):
349 def _get_tags(self):
344 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
350 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
345
351
346 def tag(self, name, user, commit_id=None, message=None, date=None,
352 def tag(self, name, user, commit_id=None, message=None, date=None,
347 **kwargs):
353 **kwargs):
348 # TODO: fix this method to apply annotated tags correct with message
354 # TODO: fix this method to apply annotated tags correct with message
349 """
355 """
350 Creates and returns a tag for the given ``commit_id``.
356 Creates and returns a tag for the given ``commit_id``.
351
357
352 :param name: name for new tag
358 :param name: name for new tag
353 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
359 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
354 :param commit_id: commit id for which new tag would be created
360 :param commit_id: commit id for which new tag would be created
355 :param message: message of the tag's commit
361 :param message: message of the tag's commit
356 :param date: date of tag's commit
362 :param date: date of tag's commit
357
363
358 :raises TagAlreadyExistError: if tag with same name already exists
364 :raises TagAlreadyExistError: if tag with same name already exists
359 """
365 """
360 if name in self.tags:
366 if name in self.tags:
361 raise TagAlreadyExistError("Tag %s already exists" % name)
367 raise TagAlreadyExistError("Tag %s already exists" % name)
362 commit = self.get_commit(commit_id=commit_id)
368 commit = self.get_commit(commit_id=commit_id)
363 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
369 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
364
370
365 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
371 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
366
372
367 self._invalidate_prop_cache('tags')
373 self._invalidate_prop_cache('tags')
368 self._invalidate_prop_cache('_refs')
374 self._invalidate_prop_cache('_refs')
369
375
370 return commit
376 return commit
371
377
372 def remove_tag(self, name, user, message=None, date=None):
378 def remove_tag(self, name, user, message=None, date=None):
373 """
379 """
374 Removes tag with the given ``name``.
380 Removes tag with the given ``name``.
375
381
376 :param name: name of the tag to be removed
382 :param name: name of the tag to be removed
377 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
383 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
378 :param message: message of the tag's removal commit
384 :param message: message of the tag's removal commit
379 :param date: date of tag's removal commit
385 :param date: date of tag's removal commit
380
386
381 :raises TagDoesNotExistError: if tag with given name does not exists
387 :raises TagDoesNotExistError: if tag with given name does not exists
382 """
388 """
383 if name not in self.tags:
389 if name not in self.tags:
384 raise TagDoesNotExistError("Tag %s does not exist" % name)
390 raise TagDoesNotExistError("Tag %s does not exist" % name)
385
391
386 self._remote.tag_remove(name)
392 self._remote.tag_remove(name)
387 self._invalidate_prop_cache('tags')
393 self._invalidate_prop_cache('tags')
388 self._invalidate_prop_cache('_refs')
394 self._invalidate_prop_cache('_refs')
389
395
390 def _get_refs(self):
396 def _get_refs(self):
391 return self._remote.get_refs()
397 return self._remote.get_refs()
392
398
393 @CachedProperty
399 @CachedProperty
394 def _refs(self):
400 def _refs(self):
395 return self._get_refs()
401 return self._get_refs()
396
402
397 @property
403 @property
398 def _ref_tree(self):
404 def _ref_tree(self):
399 node = tree = {}
405 node = tree = {}
400 for ref, sha in self._refs.iteritems():
406 for ref, sha in self._refs.iteritems():
401 path = ref.split('/')
407 path = ref.split('/')
402 for bit in path[:-1]:
408 for bit in path[:-1]:
403 node = node.setdefault(bit, {})
409 node = node.setdefault(bit, {})
404 node[path[-1]] = sha
410 node[path[-1]] = sha
405 node = tree
411 node = tree
406 return tree
412 return tree
407
413
408 def get_remote_ref(self, ref_name):
414 def get_remote_ref(self, ref_name):
409 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
415 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
410 try:
416 try:
411 return self._refs[ref_key]
417 return self._refs[ref_key]
412 except Exception:
418 except Exception:
413 return
419 return
414
420
415 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
421 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
416 translate_tag=True, maybe_unreachable=False):
422 translate_tag=True, maybe_unreachable=False, reference_obj=None):
417 """
423 """
418 Returns `GitCommit` object representing commit from git repository
424 Returns `GitCommit` object representing commit from git repository
419 at the given `commit_id` or head (most recent commit) if None given.
425 at the given `commit_id` or head (most recent commit) if None given.
420 """
426 """
427
421 if self.is_empty():
428 if self.is_empty():
422 raise EmptyRepositoryError("There are no commits yet")
429 raise EmptyRepositoryError("There are no commits yet")
423
430
424 if commit_id is not None:
431 if commit_id is not None:
425 self._validate_commit_id(commit_id)
432 self._validate_commit_id(commit_id)
426 try:
433 try:
427 # we have cached idx, use it without contacting the remote
434 # we have cached idx, use it without contacting the remote
428 idx = self._commit_ids[commit_id]
435 idx = self._commit_ids[commit_id]
429 return GitCommit(self, commit_id, idx, pre_load=pre_load)
436 return GitCommit(self, commit_id, idx, pre_load=pre_load)
430 except KeyError:
437 except KeyError:
431 pass
438 pass
432
439
433 elif commit_idx is not None:
440 elif commit_idx is not None:
434 self._validate_commit_idx(commit_idx)
441 self._validate_commit_idx(commit_idx)
435 try:
442 try:
436 _commit_id = self.commit_ids[commit_idx]
443 _commit_id = self.commit_ids[commit_idx]
437 if commit_idx < 0:
444 if commit_idx < 0:
438 commit_idx = self.commit_ids.index(_commit_id)
445 commit_idx = self.commit_ids.index(_commit_id)
439 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
446 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
440 except IndexError:
447 except IndexError:
441 commit_id = commit_idx
448 commit_id = commit_idx
442 else:
449 else:
443 commit_id = "tip"
450 commit_id = "tip"
444
451
445 if translate_tag:
452 if translate_tag:
446 commit_id = self._lookup_commit(commit_id, maybe_unreachable=maybe_unreachable)
453 commit_id = self._lookup_commit(
454 commit_id, maybe_unreachable=maybe_unreachable,
455 reference_obj=reference_obj)
447
456
448 try:
457 try:
449 idx = self._commit_ids[commit_id]
458 idx = self._commit_ids[commit_id]
450 except KeyError:
459 except KeyError:
451 idx = -1
460 idx = -1
452
461
453 return GitCommit(self, commit_id, idx, pre_load=pre_load)
462 return GitCommit(self, commit_id, idx, pre_load=pre_load)
454
463
455 def get_commits(
464 def get_commits(
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
465 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
466 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
458 """
467 """
459 Returns generator of `GitCommit` objects from start to end (both
468 Returns generator of `GitCommit` objects from start to end (both
460 are inclusive), in ascending date order.
469 are inclusive), in ascending date order.
461
470
462 :param start_id: None, str(commit_id)
471 :param start_id: None, str(commit_id)
463 :param end_id: None, str(commit_id)
472 :param end_id: None, str(commit_id)
464 :param start_date: if specified, commits with commit date less than
473 :param start_date: if specified, commits with commit date less than
465 ``start_date`` would be filtered out from returned set
474 ``start_date`` would be filtered out from returned set
466 :param end_date: if specified, commits with commit date greater than
475 :param end_date: if specified, commits with commit date greater than
467 ``end_date`` would be filtered out from returned set
476 ``end_date`` would be filtered out from returned set
468 :param branch_name: if specified, commits not reachable from given
477 :param branch_name: if specified, commits not reachable from given
469 branch would be filtered out from returned set
478 branch would be filtered out from returned set
470 :param show_hidden: Show hidden commits such as obsolete or hidden from
479 :param show_hidden: Show hidden commits such as obsolete or hidden from
471 Mercurial evolve
480 Mercurial evolve
472 :raise BranchDoesNotExistError: If given `branch_name` does not
481 :raise BranchDoesNotExistError: If given `branch_name` does not
473 exist.
482 exist.
474 :raise CommitDoesNotExistError: If commits for given `start` or
483 :raise CommitDoesNotExistError: If commits for given `start` or
475 `end` could not be found.
484 `end` could not be found.
476
485
477 """
486 """
478 if self.is_empty():
487 if self.is_empty():
479 raise EmptyRepositoryError("There are no commits yet")
488 raise EmptyRepositoryError("There are no commits yet")
480
489
481 self._validate_branch_name(branch_name)
490 self._validate_branch_name(branch_name)
482
491
483 if start_id is not None:
492 if start_id is not None:
484 self._validate_commit_id(start_id)
493 self._validate_commit_id(start_id)
485 if end_id is not None:
494 if end_id is not None:
486 self._validate_commit_id(end_id)
495 self._validate_commit_id(end_id)
487
496
488 start_raw_id = self._lookup_commit(start_id)
497 start_raw_id = self._lookup_commit(start_id)
489 start_pos = self._commit_ids[start_raw_id] if start_id else None
498 start_pos = self._commit_ids[start_raw_id] if start_id else None
490 end_raw_id = self._lookup_commit(end_id)
499 end_raw_id = self._lookup_commit(end_id)
491 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
500 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
492
501
493 if None not in [start_id, end_id] and start_pos > end_pos:
502 if None not in [start_id, end_id] and start_pos > end_pos:
494 raise RepositoryError(
503 raise RepositoryError(
495 "Start commit '%s' cannot be after end commit '%s'" %
504 "Start commit '%s' cannot be after end commit '%s'" %
496 (start_id, end_id))
505 (start_id, end_id))
497
506
498 if end_pos is not None:
507 if end_pos is not None:
499 end_pos += 1
508 end_pos += 1
500
509
501 filter_ = []
510 filter_ = []
502 if branch_name:
511 if branch_name:
503 filter_.append({'branch_name': branch_name})
512 filter_.append({'branch_name': branch_name})
504 if start_date and not end_date:
513 if start_date and not end_date:
505 filter_.append({'since': start_date})
514 filter_.append({'since': start_date})
506 if end_date and not start_date:
515 if end_date and not start_date:
507 filter_.append({'until': end_date})
516 filter_.append({'until': end_date})
508 if start_date and end_date:
517 if start_date and end_date:
509 filter_.append({'since': start_date})
518 filter_.append({'since': start_date})
510 filter_.append({'until': end_date})
519 filter_.append({'until': end_date})
511
520
512 # if start_pos or end_pos:
521 # if start_pos or end_pos:
513 # filter_.append({'start': start_pos})
522 # filter_.append({'start': start_pos})
514 # filter_.append({'end': end_pos})
523 # filter_.append({'end': end_pos})
515
524
516 if filter_:
525 if filter_:
517 revfilters = {
526 revfilters = {
518 'branch_name': branch_name,
527 'branch_name': branch_name,
519 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
528 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
520 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
529 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
521 'start': start_pos,
530 'start': start_pos,
522 'end': end_pos,
531 'end': end_pos,
523 }
532 }
524 commit_ids = self._get_commit_ids(filters=revfilters)
533 commit_ids = self._get_commit_ids(filters=revfilters)
525
534
526 else:
535 else:
527 commit_ids = self.commit_ids
536 commit_ids = self.commit_ids
528
537
529 if start_pos or end_pos:
538 if start_pos or end_pos:
530 commit_ids = commit_ids[start_pos: end_pos]
539 commit_ids = commit_ids[start_pos: end_pos]
531
540
532 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
541 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
533 translate_tag=translate_tags)
542 translate_tag=translate_tags)
534
543
535 def get_diff(
544 def get_diff(
536 self, commit1, commit2, path='', ignore_whitespace=False,
545 self, commit1, commit2, path='', ignore_whitespace=False,
537 context=3, path1=None):
546 context=3, path1=None):
538 """
547 """
539 Returns (git like) *diff*, as plain text. Shows changes introduced by
548 Returns (git like) *diff*, as plain text. Shows changes introduced by
540 ``commit2`` since ``commit1``.
549 ``commit2`` since ``commit1``.
541
550
542 :param commit1: Entry point from which diff is shown. Can be
551 :param commit1: Entry point from which diff is shown. Can be
543 ``self.EMPTY_COMMIT`` - in this case, patch showing all
552 ``self.EMPTY_COMMIT`` - in this case, patch showing all
544 the changes since empty state of the repository until ``commit2``
553 the changes since empty state of the repository until ``commit2``
545 :param commit2: Until which commits changes should be shown.
554 :param commit2: Until which commits changes should be shown.
546 :param ignore_whitespace: If set to ``True``, would not show whitespace
555 :param ignore_whitespace: If set to ``True``, would not show whitespace
547 changes. Defaults to ``False``.
556 changes. Defaults to ``False``.
548 :param context: How many lines before/after changed lines should be
557 :param context: How many lines before/after changed lines should be
549 shown. Defaults to ``3``.
558 shown. Defaults to ``3``.
550 """
559 """
551 self._validate_diff_commits(commit1, commit2)
560 self._validate_diff_commits(commit1, commit2)
552 if path1 is not None and path1 != path:
561 if path1 is not None and path1 != path:
553 raise ValueError("Diff of two different paths not supported.")
562 raise ValueError("Diff of two different paths not supported.")
554
563
555 if path:
564 if path:
556 file_filter = path
565 file_filter = path
557 else:
566 else:
558 file_filter = None
567 file_filter = None
559
568
560 diff = self._remote.diff(
569 diff = self._remote.diff(
561 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
570 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
562 opt_ignorews=ignore_whitespace,
571 opt_ignorews=ignore_whitespace,
563 context=context)
572 context=context)
564 return GitDiff(diff)
573 return GitDiff(diff)
565
574
566 def strip(self, commit_id, branch_name):
575 def strip(self, commit_id, branch_name):
567 commit = self.get_commit(commit_id=commit_id)
576 commit = self.get_commit(commit_id=commit_id)
568 if commit.merge:
577 if commit.merge:
569 raise Exception('Cannot reset to merge commit')
578 raise Exception('Cannot reset to merge commit')
570
579
571 # parent is going to be the new head now
580 # parent is going to be the new head now
572 commit = commit.parents[0]
581 commit = commit.parents[0]
573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
582 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574
583
575 # clear cached properties
584 # clear cached properties
576 self._invalidate_prop_cache('commit_ids')
585 self._invalidate_prop_cache('commit_ids')
577 self._invalidate_prop_cache('_refs')
586 self._invalidate_prop_cache('_refs')
578 self._invalidate_prop_cache('branches')
587 self._invalidate_prop_cache('branches')
579
588
580 return len(self.commit_ids)
589 return len(self.commit_ids)
581
590
582 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
591 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
583 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
592 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
584 self, commit_id1, repo2, commit_id2)
593 self, commit_id1, repo2, commit_id2)
585
594
586 if commit_id1 == commit_id2:
595 if commit_id1 == commit_id2:
587 return commit_id1
596 return commit_id1
588
597
589 if self != repo2:
598 if self != repo2:
590 commits = self._remote.get_missing_revs(
599 commits = self._remote.get_missing_revs(
591 commit_id1, commit_id2, repo2.path)
600 commit_id1, commit_id2, repo2.path)
592 if commits:
601 if commits:
593 commit = repo2.get_commit(commits[-1])
602 commit = repo2.get_commit(commits[-1])
594 if commit.parents:
603 if commit.parents:
595 ancestor_id = commit.parents[0].raw_id
604 ancestor_id = commit.parents[0].raw_id
596 else:
605 else:
597 ancestor_id = None
606 ancestor_id = None
598 else:
607 else:
599 # no commits from other repo, ancestor_id is the commit_id2
608 # no commits from other repo, ancestor_id is the commit_id2
600 ancestor_id = commit_id2
609 ancestor_id = commit_id2
601 else:
610 else:
602 output, __ = self.run_git_command(
611 output, __ = self.run_git_command(
603 ['merge-base', commit_id1, commit_id2])
612 ['merge-base', commit_id1, commit_id2])
604 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
613 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
605
614
606 log.debug('Found common ancestor with sha: %s', ancestor_id)
615 log.debug('Found common ancestor with sha: %s', ancestor_id)
607
616
608 return ancestor_id
617 return ancestor_id
609
618
610 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
619 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
611 repo1 = self
620 repo1 = self
612 ancestor_id = None
621 ancestor_id = None
613
622
614 if commit_id1 == commit_id2:
623 if commit_id1 == commit_id2:
615 commits = []
624 commits = []
616 elif repo1 != repo2:
625 elif repo1 != repo2:
617 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
626 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
618 repo2.path)
627 repo2.path)
619 commits = [
628 commits = [
620 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
629 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
621 for commit_id in reversed(missing_ids)]
630 for commit_id in reversed(missing_ids)]
622 else:
631 else:
623 output, __ = repo1.run_git_command(
632 output, __ = repo1.run_git_command(
624 ['log', '--reverse', '--pretty=format: %H', '-s',
633 ['log', '--reverse', '--pretty=format: %H', '-s',
625 '%s..%s' % (commit_id1, commit_id2)])
634 '%s..%s' % (commit_id1, commit_id2)])
626 commits = [
635 commits = [
627 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
636 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
628 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
637 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
629
638
630 return commits
639 return commits
631
640
632 @LazyProperty
641 @LazyProperty
633 def in_memory_commit(self):
642 def in_memory_commit(self):
634 """
643 """
635 Returns ``GitInMemoryCommit`` object for this repository.
644 Returns ``GitInMemoryCommit`` object for this repository.
636 """
645 """
637 return GitInMemoryCommit(self)
646 return GitInMemoryCommit(self)
638
647
639 def pull(self, url, commit_ids=None, update_after=False):
648 def pull(self, url, commit_ids=None, update_after=False):
640 """
649 """
641 Pull changes from external location. Pull is different in GIT
650 Pull changes from external location. Pull is different in GIT
642 that fetch since it's doing a checkout
651 that fetch since it's doing a checkout
643
652
644 :param commit_ids: Optional. Can be set to a list of commit ids
653 :param commit_ids: Optional. Can be set to a list of commit ids
645 which shall be pulled from the other repository.
654 which shall be pulled from the other repository.
646 """
655 """
647 refs = None
656 refs = None
648 if commit_ids is not None:
657 if commit_ids is not None:
649 remote_refs = self._remote.get_remote_refs(url)
658 remote_refs = self._remote.get_remote_refs(url)
650 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
659 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
651 self._remote.pull(url, refs=refs, update_after=update_after)
660 self._remote.pull(url, refs=refs, update_after=update_after)
652 self._remote.invalidate_vcs_cache()
661 self._remote.invalidate_vcs_cache()
653
662
654 def fetch(self, url, commit_ids=None):
663 def fetch(self, url, commit_ids=None):
655 """
664 """
656 Fetch all git objects from external location.
665 Fetch all git objects from external location.
657 """
666 """
658 self._remote.sync_fetch(url, refs=commit_ids)
667 self._remote.sync_fetch(url, refs=commit_ids)
659 self._remote.invalidate_vcs_cache()
668 self._remote.invalidate_vcs_cache()
660
669
661 def push(self, url):
670 def push(self, url):
662 refs = None
671 refs = None
663 self._remote.sync_push(url, refs=refs)
672 self._remote.sync_push(url, refs=refs)
664
673
665 def set_refs(self, ref_name, commit_id):
674 def set_refs(self, ref_name, commit_id):
666 self._remote.set_refs(ref_name, commit_id)
675 self._remote.set_refs(ref_name, commit_id)
667 self._invalidate_prop_cache('_refs')
676 self._invalidate_prop_cache('_refs')
668
677
669 def remove_ref(self, ref_name):
678 def remove_ref(self, ref_name):
670 self._remote.remove_ref(ref_name)
679 self._remote.remove_ref(ref_name)
671 self._invalidate_prop_cache('_refs')
680 self._invalidate_prop_cache('_refs')
672
681
673 def run_gc(self, prune=True):
682 def run_gc(self, prune=True):
674 cmd = ['gc', '--aggressive']
683 cmd = ['gc', '--aggressive']
675 if prune:
684 if prune:
676 cmd += ['--prune=now']
685 cmd += ['--prune=now']
677 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
686 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
678 return stderr
687 return stderr
679
688
680 def _update_server_info(self):
689 def _update_server_info(self):
681 """
690 """
682 runs gits update-server-info command in this repo instance
691 runs gits update-server-info command in this repo instance
683 """
692 """
684 self._remote.update_server_info()
693 self._remote.update_server_info()
685
694
686 def _current_branch(self):
695 def _current_branch(self):
687 """
696 """
688 Return the name of the current branch.
697 Return the name of the current branch.
689
698
690 It only works for non bare repositories (i.e. repositories with a
699 It only works for non bare repositories (i.e. repositories with a
691 working copy)
700 working copy)
692 """
701 """
693 if self.bare:
702 if self.bare:
694 raise RepositoryError('Bare git repos do not have active branches')
703 raise RepositoryError('Bare git repos do not have active branches')
695
704
696 if self.is_empty():
705 if self.is_empty():
697 return None
706 return None
698
707
699 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
708 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
700 return stdout.strip()
709 return stdout.strip()
701
710
702 def _checkout(self, branch_name, create=False, force=False):
711 def _checkout(self, branch_name, create=False, force=False):
703 """
712 """
704 Checkout a branch in the working directory.
713 Checkout a branch in the working directory.
705
714
706 It tries to create the branch if create is True, failing if the branch
715 It tries to create the branch if create is True, failing if the branch
707 already exists.
716 already exists.
708
717
709 It only works for non bare repositories (i.e. repositories with a
718 It only works for non bare repositories (i.e. repositories with a
710 working copy)
719 working copy)
711 """
720 """
712 if self.bare:
721 if self.bare:
713 raise RepositoryError('Cannot checkout branches in a bare git repo')
722 raise RepositoryError('Cannot checkout branches in a bare git repo')
714
723
715 cmd = ['checkout']
724 cmd = ['checkout']
716 if force:
725 if force:
717 cmd.append('-f')
726 cmd.append('-f')
718 if create:
727 if create:
719 cmd.append('-b')
728 cmd.append('-b')
720 cmd.append(branch_name)
729 cmd.append(branch_name)
721 self.run_git_command(cmd, fail_on_stderr=False)
730 self.run_git_command(cmd, fail_on_stderr=False)
722
731
723 def _create_branch(self, branch_name, commit_id):
732 def _create_branch(self, branch_name, commit_id):
724 """
733 """
725 creates a branch in a GIT repo
734 creates a branch in a GIT repo
726 """
735 """
727 self._remote.create_branch(branch_name, commit_id)
736 self._remote.create_branch(branch_name, commit_id)
728
737
729 def _identify(self):
738 def _identify(self):
730 """
739 """
731 Return the current state of the working directory.
740 Return the current state of the working directory.
732 """
741 """
733 if self.bare:
742 if self.bare:
734 raise RepositoryError('Bare git repos do not have active branches')
743 raise RepositoryError('Bare git repos do not have active branches')
735
744
736 if self.is_empty():
745 if self.is_empty():
737 return None
746 return None
738
747
739 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
748 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
740 return stdout.strip()
749 return stdout.strip()
741
750
742 def _local_clone(self, clone_path, branch_name, source_branch=None):
751 def _local_clone(self, clone_path, branch_name, source_branch=None):
743 """
752 """
744 Create a local clone of the current repo.
753 Create a local clone of the current repo.
745 """
754 """
746 # N.B.(skreft): the --branch option is required as otherwise the shallow
755 # N.B.(skreft): the --branch option is required as otherwise the shallow
747 # clone will only fetch the active branch.
756 # clone will only fetch the active branch.
748 cmd = ['clone', '--branch', branch_name,
757 cmd = ['clone', '--branch', branch_name,
749 self.path, os.path.abspath(clone_path)]
758 self.path, os.path.abspath(clone_path)]
750
759
751 self.run_git_command(cmd, fail_on_stderr=False)
760 self.run_git_command(cmd, fail_on_stderr=False)
752
761
753 # if we get the different source branch, make sure we also fetch it for
762 # if we get the different source branch, make sure we also fetch it for
754 # merge conditions
763 # merge conditions
755 if source_branch and source_branch != branch_name:
764 if source_branch and source_branch != branch_name:
756 # check if the ref exists.
765 # check if the ref exists.
757 shadow_repo = GitRepository(os.path.abspath(clone_path))
766 shadow_repo = GitRepository(os.path.abspath(clone_path))
758 if shadow_repo.get_remote_ref(source_branch):
767 if shadow_repo.get_remote_ref(source_branch):
759 cmd = ['fetch', self.path, source_branch]
768 cmd = ['fetch', self.path, source_branch]
760 self.run_git_command(cmd, fail_on_stderr=False)
769 self.run_git_command(cmd, fail_on_stderr=False)
761
770
762 def _local_fetch(self, repository_path, branch_name, use_origin=False):
771 def _local_fetch(self, repository_path, branch_name, use_origin=False):
763 """
772 """
764 Fetch a branch from a local repository.
773 Fetch a branch from a local repository.
765 """
774 """
766 repository_path = os.path.abspath(repository_path)
775 repository_path = os.path.abspath(repository_path)
767 if repository_path == self.path:
776 if repository_path == self.path:
768 raise ValueError('Cannot fetch from the same repository')
777 raise ValueError('Cannot fetch from the same repository')
769
778
770 if use_origin:
779 if use_origin:
771 branch_name = '+{branch}:refs/heads/{branch}'.format(
780 branch_name = '+{branch}:refs/heads/{branch}'.format(
772 branch=branch_name)
781 branch=branch_name)
773
782
774 cmd = ['fetch', '--no-tags', '--update-head-ok',
783 cmd = ['fetch', '--no-tags', '--update-head-ok',
775 repository_path, branch_name]
784 repository_path, branch_name]
776 self.run_git_command(cmd, fail_on_stderr=False)
785 self.run_git_command(cmd, fail_on_stderr=False)
777
786
778 def _local_reset(self, branch_name):
787 def _local_reset(self, branch_name):
779 branch_name = '{}'.format(branch_name)
788 branch_name = '{}'.format(branch_name)
780 cmd = ['reset', '--hard', branch_name, '--']
789 cmd = ['reset', '--hard', branch_name, '--']
781 self.run_git_command(cmd, fail_on_stderr=False)
790 self.run_git_command(cmd, fail_on_stderr=False)
782
791
783 def _last_fetch_heads(self):
792 def _last_fetch_heads(self):
784 """
793 """
785 Return the last fetched heads that need merging.
794 Return the last fetched heads that need merging.
786
795
787 The algorithm is defined at
796 The algorithm is defined at
788 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
797 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
789 """
798 """
790 if not self.bare:
799 if not self.bare:
791 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
800 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
792 else:
801 else:
793 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
802 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
794
803
795 heads = []
804 heads = []
796 with open(fetch_heads_path) as f:
805 with open(fetch_heads_path) as f:
797 for line in f:
806 for line in f:
798 if ' not-for-merge ' in line:
807 if ' not-for-merge ' in line:
799 continue
808 continue
800 line = re.sub('\t.*', '', line, flags=re.DOTALL)
809 line = re.sub('\t.*', '', line, flags=re.DOTALL)
801 heads.append(line)
810 heads.append(line)
802
811
803 return heads
812 return heads
804
813
805 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
814 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
806 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
815 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
807
816
808 def _local_pull(self, repository_path, branch_name, ff_only=True):
817 def _local_pull(self, repository_path, branch_name, ff_only=True):
809 """
818 """
810 Pull a branch from a local repository.
819 Pull a branch from a local repository.
811 """
820 """
812 if self.bare:
821 if self.bare:
813 raise RepositoryError('Cannot pull into a bare git repository')
822 raise RepositoryError('Cannot pull into a bare git repository')
814 # N.B.(skreft): The --ff-only option is to make sure this is a
823 # N.B.(skreft): The --ff-only option is to make sure this is a
815 # fast-forward (i.e., we are only pulling new changes and there are no
824 # fast-forward (i.e., we are only pulling new changes and there are no
816 # conflicts with our current branch)
825 # conflicts with our current branch)
817 # Additionally, that option needs to go before --no-tags, otherwise git
826 # Additionally, that option needs to go before --no-tags, otherwise git
818 # pull complains about it being an unknown flag.
827 # pull complains about it being an unknown flag.
819 cmd = ['pull']
828 cmd = ['pull']
820 if ff_only:
829 if ff_only:
821 cmd.append('--ff-only')
830 cmd.append('--ff-only')
822 cmd.extend(['--no-tags', repository_path, branch_name])
831 cmd.extend(['--no-tags', repository_path, branch_name])
823 self.run_git_command(cmd, fail_on_stderr=False)
832 self.run_git_command(cmd, fail_on_stderr=False)
824
833
825 def _local_merge(self, merge_message, user_name, user_email, heads):
834 def _local_merge(self, merge_message, user_name, user_email, heads):
826 """
835 """
827 Merge the given head into the checked out branch.
836 Merge the given head into the checked out branch.
828
837
829 It will force a merge commit.
838 It will force a merge commit.
830
839
831 Currently it raises an error if the repo is empty, as it is not possible
840 Currently it raises an error if the repo is empty, as it is not possible
832 to create a merge commit in an empty repo.
841 to create a merge commit in an empty repo.
833
842
834 :param merge_message: The message to use for the merge commit.
843 :param merge_message: The message to use for the merge commit.
835 :param heads: the heads to merge.
844 :param heads: the heads to merge.
836 """
845 """
837 if self.bare:
846 if self.bare:
838 raise RepositoryError('Cannot merge into a bare git repository')
847 raise RepositoryError('Cannot merge into a bare git repository')
839
848
840 if not heads:
849 if not heads:
841 return
850 return
842
851
843 if self.is_empty():
852 if self.is_empty():
844 # TODO(skreft): do something more robust in this case.
853 # TODO(skreft): do something more robust in this case.
845 raise RepositoryError('Do not know how to merge into empty repositories yet')
854 raise RepositoryError('Do not know how to merge into empty repositories yet')
846 unresolved = None
855 unresolved = None
847
856
848 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
857 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
849 # commit message. We also specify the user who is doing the merge.
858 # commit message. We also specify the user who is doing the merge.
850 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
859 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
851 '-c', 'user.email=%s' % safe_str(user_email),
860 '-c', 'user.email=%s' % safe_str(user_email),
852 'merge', '--no-ff', '-m', safe_str(merge_message)]
861 'merge', '--no-ff', '-m', safe_str(merge_message)]
853
862
854 merge_cmd = cmd + heads
863 merge_cmd = cmd + heads
855
864
856 try:
865 try:
857 self.run_git_command(merge_cmd, fail_on_stderr=False)
866 self.run_git_command(merge_cmd, fail_on_stderr=False)
858 except RepositoryError:
867 except RepositoryError:
859 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
868 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
860 fail_on_stderr=False)[0].splitlines()
869 fail_on_stderr=False)[0].splitlines()
861 # NOTE(marcink): we add U notation for consistent with HG backend output
870 # NOTE(marcink): we add U notation for consistent with HG backend output
862 unresolved = ['U {}'.format(f) for f in files]
871 unresolved = ['U {}'.format(f) for f in files]
863
872
864 # Cleanup any merge leftovers
873 # Cleanup any merge leftovers
865 self._remote.invalidate_vcs_cache()
874 self._remote.invalidate_vcs_cache()
866 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
875 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
867
876
868 if unresolved:
877 if unresolved:
869 raise UnresolvedFilesInRepo(unresolved)
878 raise UnresolvedFilesInRepo(unresolved)
870 else:
879 else:
871 raise
880 raise
872
881
873 def _local_push(
882 def _local_push(
874 self, source_branch, repository_path, target_branch,
883 self, source_branch, repository_path, target_branch,
875 enable_hooks=False, rc_scm_data=None):
884 enable_hooks=False, rc_scm_data=None):
876 """
885 """
877 Push the source_branch to the given repository and target_branch.
886 Push the source_branch to the given repository and target_branch.
878
887
879 Currently it if the target_branch is not master and the target repo is
888 Currently it if the target_branch is not master and the target repo is
880 empty, the push will work, but then GitRepository won't be able to find
889 empty, the push will work, but then GitRepository won't be able to find
881 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
890 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
882 pointing to master, which does not exist).
891 pointing to master, which does not exist).
883
892
884 It does not run the hooks in the target repo.
893 It does not run the hooks in the target repo.
885 """
894 """
886 # TODO(skreft): deal with the case in which the target repo is empty,
895 # TODO(skreft): deal with the case in which the target repo is empty,
887 # and the target_branch is not master.
896 # and the target_branch is not master.
888 target_repo = GitRepository(repository_path)
897 target_repo = GitRepository(repository_path)
889 if (not target_repo.bare and
898 if (not target_repo.bare and
890 target_repo._current_branch() == target_branch):
899 target_repo._current_branch() == target_branch):
891 # Git prevents pushing to the checked out branch, so simulate it by
900 # Git prevents pushing to the checked out branch, so simulate it by
892 # pulling into the target repository.
901 # pulling into the target repository.
893 target_repo._local_pull(self.path, source_branch)
902 target_repo._local_pull(self.path, source_branch)
894 else:
903 else:
895 cmd = ['push', os.path.abspath(repository_path),
904 cmd = ['push', os.path.abspath(repository_path),
896 '%s:%s' % (source_branch, target_branch)]
905 '%s:%s' % (source_branch, target_branch)]
897 gitenv = {}
906 gitenv = {}
898 if rc_scm_data:
907 if rc_scm_data:
899 gitenv.update({'RC_SCM_DATA': rc_scm_data})
908 gitenv.update({'RC_SCM_DATA': rc_scm_data})
900
909
901 if not enable_hooks:
910 if not enable_hooks:
902 gitenv['RC_SKIP_HOOKS'] = '1'
911 gitenv['RC_SKIP_HOOKS'] = '1'
903 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
912 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
904
913
905 def _get_new_pr_branch(self, source_branch, target_branch):
914 def _get_new_pr_branch(self, source_branch, target_branch):
906 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
915 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
907 pr_branches = []
916 pr_branches = []
908 for branch in self.branches:
917 for branch in self.branches:
909 if branch.startswith(prefix):
918 if branch.startswith(prefix):
910 pr_branches.append(int(branch[len(prefix):]))
919 pr_branches.append(int(branch[len(prefix):]))
911
920
912 if not pr_branches:
921 if not pr_branches:
913 branch_id = 0
922 branch_id = 0
914 else:
923 else:
915 branch_id = max(pr_branches) + 1
924 branch_id = max(pr_branches) + 1
916
925
917 return '%s%d' % (prefix, branch_id)
926 return '%s%d' % (prefix, branch_id)
918
927
919 def _maybe_prepare_merge_workspace(
928 def _maybe_prepare_merge_workspace(
920 self, repo_id, workspace_id, target_ref, source_ref):
929 self, repo_id, workspace_id, target_ref, source_ref):
921 shadow_repository_path = self._get_shadow_repository_path(
930 shadow_repository_path = self._get_shadow_repository_path(
922 self.path, repo_id, workspace_id)
931 self.path, repo_id, workspace_id)
923 if not os.path.exists(shadow_repository_path):
932 if not os.path.exists(shadow_repository_path):
924 self._local_clone(
933 self._local_clone(
925 shadow_repository_path, target_ref.name, source_ref.name)
934 shadow_repository_path, target_ref.name, source_ref.name)
926 log.debug('Prepared %s shadow repository in %s',
935 log.debug('Prepared %s shadow repository in %s',
927 self.alias, shadow_repository_path)
936 self.alias, shadow_repository_path)
928
937
929 return shadow_repository_path
938 return shadow_repository_path
930
939
931 def _merge_repo(self, repo_id, workspace_id, target_ref,
940 def _merge_repo(self, repo_id, workspace_id, target_ref,
932 source_repo, source_ref, merge_message,
941 source_repo, source_ref, merge_message,
933 merger_name, merger_email, dry_run=False,
942 merger_name, merger_email, dry_run=False,
934 use_rebase=False, close_branch=False):
943 use_rebase=False, close_branch=False):
935
944
936 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
945 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
937 'rebase' if use_rebase else 'merge', dry_run)
946 'rebase' if use_rebase else 'merge', dry_run)
938 if target_ref.commit_id != self.branches[target_ref.name]:
947 if target_ref.commit_id != self.branches[target_ref.name]:
939 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
948 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
940 target_ref.commit_id, self.branches[target_ref.name])
949 target_ref.commit_id, self.branches[target_ref.name])
941 return MergeResponse(
950 return MergeResponse(
942 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
951 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
943 metadata={'target_ref': target_ref})
952 metadata={'target_ref': target_ref})
944
953
945 shadow_repository_path = self._maybe_prepare_merge_workspace(
954 shadow_repository_path = self._maybe_prepare_merge_workspace(
946 repo_id, workspace_id, target_ref, source_ref)
955 repo_id, workspace_id, target_ref, source_ref)
947 shadow_repo = self.get_shadow_instance(shadow_repository_path)
956 shadow_repo = self.get_shadow_instance(shadow_repository_path)
948
957
949 # checkout source, if it's different. Otherwise we could not
958 # checkout source, if it's different. Otherwise we could not
950 # fetch proper commits for merge testing
959 # fetch proper commits for merge testing
951 if source_ref.name != target_ref.name:
960 if source_ref.name != target_ref.name:
952 if shadow_repo.get_remote_ref(source_ref.name):
961 if shadow_repo.get_remote_ref(source_ref.name):
953 shadow_repo._checkout(source_ref.name, force=True)
962 shadow_repo._checkout(source_ref.name, force=True)
954
963
955 # checkout target, and fetch changes
964 # checkout target, and fetch changes
956 shadow_repo._checkout(target_ref.name, force=True)
965 shadow_repo._checkout(target_ref.name, force=True)
957
966
958 # fetch/reset pull the target, in case it is changed
967 # fetch/reset pull the target, in case it is changed
959 # this handles even force changes
968 # this handles even force changes
960 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
969 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
961 shadow_repo._local_reset(target_ref.name)
970 shadow_repo._local_reset(target_ref.name)
962
971
963 # Need to reload repo to invalidate the cache, or otherwise we cannot
972 # Need to reload repo to invalidate the cache, or otherwise we cannot
964 # retrieve the last target commit.
973 # retrieve the last target commit.
965 shadow_repo = self.get_shadow_instance(shadow_repository_path)
974 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
975 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
967 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
976 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
968 target_ref, target_ref.commit_id,
977 target_ref, target_ref.commit_id,
969 shadow_repo.branches[target_ref.name])
978 shadow_repo.branches[target_ref.name])
970 return MergeResponse(
979 return MergeResponse(
971 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
980 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
972 metadata={'target_ref': target_ref})
981 metadata={'target_ref': target_ref})
973
982
974 # calculate new branch
983 # calculate new branch
975 pr_branch = shadow_repo._get_new_pr_branch(
984 pr_branch = shadow_repo._get_new_pr_branch(
976 source_ref.name, target_ref.name)
985 source_ref.name, target_ref.name)
977 log.debug('using pull-request merge branch: `%s`', pr_branch)
986 log.debug('using pull-request merge branch: `%s`', pr_branch)
978 # checkout to temp branch, and fetch changes
987 # checkout to temp branch, and fetch changes
979 shadow_repo._checkout(pr_branch, create=True)
988 shadow_repo._checkout(pr_branch, create=True)
980 try:
989 try:
981 shadow_repo._local_fetch(source_repo.path, source_ref.name)
990 shadow_repo._local_fetch(source_repo.path, source_ref.name)
982 except RepositoryError:
991 except RepositoryError:
983 log.exception('Failure when doing local fetch on '
992 log.exception('Failure when doing local fetch on '
984 'shadow repo: %s', shadow_repo)
993 'shadow repo: %s', shadow_repo)
985 return MergeResponse(
994 return MergeResponse(
986 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
995 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
987 metadata={'source_ref': source_ref})
996 metadata={'source_ref': source_ref})
988
997
989 merge_ref = None
998 merge_ref = None
990 merge_failure_reason = MergeFailureReason.NONE
999 merge_failure_reason = MergeFailureReason.NONE
991 metadata = {}
1000 metadata = {}
992 try:
1001 try:
993 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1002 shadow_repo._local_merge(merge_message, merger_name, merger_email,
994 [source_ref.commit_id])
1003 [source_ref.commit_id])
995 merge_possible = True
1004 merge_possible = True
996
1005
997 # Need to invalidate the cache, or otherwise we
1006 # Need to invalidate the cache, or otherwise we
998 # cannot retrieve the merge commit.
1007 # cannot retrieve the merge commit.
999 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1008 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1000 merge_commit_id = shadow_repo.branches[pr_branch]
1009 merge_commit_id = shadow_repo.branches[pr_branch]
1001
1010
1002 # Set a reference pointing to the merge commit. This reference may
1011 # Set a reference pointing to the merge commit. This reference may
1003 # be used to easily identify the last successful merge commit in
1012 # be used to easily identify the last successful merge commit in
1004 # the shadow repository.
1013 # the shadow repository.
1005 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1014 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1006 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1015 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1007 except RepositoryError as e:
1016 except RepositoryError as e:
1008 log.exception('Failure when doing local merge on git shadow repo')
1017 log.exception('Failure when doing local merge on git shadow repo')
1009 if isinstance(e, UnresolvedFilesInRepo):
1018 if isinstance(e, UnresolvedFilesInRepo):
1010 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1019 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1011
1020
1012 merge_possible = False
1021 merge_possible = False
1013 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1022 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1014
1023
1015 if merge_possible and not dry_run:
1024 if merge_possible and not dry_run:
1016 try:
1025 try:
1017 shadow_repo._local_push(
1026 shadow_repo._local_push(
1018 pr_branch, self.path, target_ref.name, enable_hooks=True,
1027 pr_branch, self.path, target_ref.name, enable_hooks=True,
1019 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1028 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1020 merge_succeeded = True
1029 merge_succeeded = True
1021 except RepositoryError:
1030 except RepositoryError:
1022 log.exception(
1031 log.exception(
1023 'Failure when doing local push from the shadow '
1032 'Failure when doing local push from the shadow '
1024 'repository to the target repository at %s.', self.path)
1033 'repository to the target repository at %s.', self.path)
1025 merge_succeeded = False
1034 merge_succeeded = False
1026 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1035 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1027 metadata['target'] = 'git shadow repo'
1036 metadata['target'] = 'git shadow repo'
1028 metadata['merge_commit'] = pr_branch
1037 metadata['merge_commit'] = pr_branch
1029 else:
1038 else:
1030 merge_succeeded = False
1039 merge_succeeded = False
1031
1040
1032 return MergeResponse(
1041 return MergeResponse(
1033 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1042 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1034 metadata=metadata)
1043 metadata=metadata)
@@ -1,1012 +1,1012 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
301 self, commit_id1, repo2, commit_id2)
302
302
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 return commit_id1
304 return commit_id1
305
305
306 ancestors = self._remote.revs_from_revspec(
306 ancestors = self._remote.revs_from_revspec(
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 other_path=repo2.path)
308 other_path=repo2.path)
309
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
313 return ancestor_id
314
314
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 if commit_id1 == commit_id2:
316 if commit_id1 == commit_id2:
317 commits = []
317 commits = []
318 else:
318 else:
319 if merge:
319 if merge:
320 indexes = self._remote.revs_from_revspec(
320 indexes = self._remote.revs_from_revspec(
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 else:
323 else:
324 indexes = self._remote.revs_from_revspec(
324 indexes = self._remote.revs_from_revspec(
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 commit_id1, other_path=repo2.path)
326 commit_id1, other_path=repo2.path)
327
327
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 for idx in indexes]
329 for idx in indexes]
330
330
331 return commits
331 return commits
332
332
333 @staticmethod
333 @staticmethod
334 def check_url(url, config):
334 def check_url(url, config):
335 """
335 """
336 Function will check given url and try to verify if it's a valid
336 Function will check given url and try to verify if it's a valid
337 link. Sometimes it may happened that mercurial will issue basic
337 link. Sometimes it may happened that mercurial will issue basic
338 auth request that can cause whole API to hang when used from python
338 auth request that can cause whole API to hang when used from python
339 or other external calls.
339 or other external calls.
340
340
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 when the return code is non 200
342 when the return code is non 200
343 """
343 """
344 # check first if it's not an local url
344 # check first if it's not an local url
345 if os.path.isdir(url) or url.startswith('file:'):
345 if os.path.isdir(url) or url.startswith('file:'):
346 return True
346 return True
347
347
348 # Request the _remote to verify the url
348 # Request the _remote to verify the url
349 return connection.Hg.check_url(url, config.serialize())
349 return connection.Hg.check_url(url, config.serialize())
350
350
351 @staticmethod
351 @staticmethod
352 def is_valid_repository(path):
352 def is_valid_repository(path):
353 return os.path.isdir(os.path.join(path, '.hg'))
353 return os.path.isdir(os.path.join(path, '.hg'))
354
354
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 """
356 """
357 Function will check for mercurial repository in given path. If there
357 Function will check for mercurial repository in given path. If there
358 is no repository in that path it will raise an exception unless
358 is no repository in that path it will raise an exception unless
359 `create` parameter is set to True - in that case repository would
359 `create` parameter is set to True - in that case repository would
360 be created.
360 be created.
361
361
362 If `src_url` is given, would try to clone repository from the
362 If `src_url` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to `do_workspace_checkout` flag.
364 working copy accordingly to `do_workspace_checkout` flag.
365 """
365 """
366 if create and os.path.exists(self.path):
366 if create and os.path.exists(self.path):
367 raise RepositoryError(
367 raise RepositoryError(
368 "Cannot create repository at %s, location already exist"
368 "Cannot create repository at %s, location already exist"
369 % self.path)
369 % self.path)
370
370
371 if src_url:
371 if src_url:
372 url = str(self._get_url(src_url))
372 url = str(self._get_url(src_url))
373 MercurialRepository.check_url(url, self.config)
373 MercurialRepository.check_url(url, self.config)
374
374
375 self._remote.clone(url, self.path, do_workspace_checkout)
375 self._remote.clone(url, self.path, do_workspace_checkout)
376
376
377 # Don't try to create if we've already cloned repo
377 # Don't try to create if we've already cloned repo
378 create = False
378 create = False
379
379
380 if create:
380 if create:
381 os.makedirs(self.path, mode=0o755)
381 os.makedirs(self.path, mode=0o755)
382 self._remote.localrepository(create)
382 self._remote.localrepository(create)
383
383
384 @LazyProperty
384 @LazyProperty
385 def in_memory_commit(self):
385 def in_memory_commit(self):
386 return MercurialInMemoryCommit(self)
386 return MercurialInMemoryCommit(self)
387
387
388 @LazyProperty
388 @LazyProperty
389 def description(self):
389 def description(self):
390 description = self._remote.get_config_value(
390 description = self._remote.get_config_value(
391 'web', 'description', untrusted=True)
391 'web', 'description', untrusted=True)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
393
393
394 @LazyProperty
394 @LazyProperty
395 def contact(self):
395 def contact(self):
396 contact = (
396 contact = (
397 self._remote.get_config_value("web", "contact") or
397 self._remote.get_config_value("web", "contact") or
398 self._remote.get_config_value("ui", "username"))
398 self._remote.get_config_value("ui", "username"))
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
400
400
401 @LazyProperty
401 @LazyProperty
402 def last_change(self):
402 def last_change(self):
403 """
403 """
404 Returns last change made on this repository as
404 Returns last change made on this repository as
405 `datetime.datetime` object.
405 `datetime.datetime` object.
406 """
406 """
407 try:
407 try:
408 return self.get_commit().date
408 return self.get_commit().date
409 except RepositoryError:
409 except RepositoryError:
410 tzoffset = makedate()[1]
410 tzoffset = makedate()[1]
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412
412
413 def _get_fs_mtime(self):
413 def _get_fs_mtime(self):
414 # fallback to filesystem
414 # fallback to filesystem
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 st_path = os.path.join(self.path, '.hg', "store")
416 st_path = os.path.join(self.path, '.hg', "store")
417 if os.path.exists(cl_path):
417 if os.path.exists(cl_path):
418 return os.stat(cl_path).st_mtime
418 return os.stat(cl_path).st_mtime
419 else:
419 else:
420 return os.stat(st_path).st_mtime
420 return os.stat(st_path).st_mtime
421
421
422 def _get_url(self, url):
422 def _get_url(self, url):
423 """
423 """
424 Returns normalized url. If schema is not given, would fall
424 Returns normalized url. If schema is not given, would fall
425 to filesystem
425 to filesystem
426 (``file:///``) schema.
426 (``file:///``) schema.
427 """
427 """
428 url = url.encode('utf8')
428 url = url.encode('utf8')
429 if url != 'default' and '://' not in url:
429 if url != 'default' and '://' not in url:
430 url = "file:" + urllib.pathname2url(url)
430 url = "file:" + urllib.pathname2url(url)
431 return url
431 return url
432
432
433 def get_hook_location(self):
433 def get_hook_location(self):
434 """
434 """
435 returns absolute path to location where hooks are stored
435 returns absolute path to location where hooks are stored
436 """
436 """
437 return os.path.join(self.path, '.hg', '.hgrc')
437 return os.path.join(self.path, '.hg', '.hgrc')
438
438
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 translate_tag=None, maybe_unreachable=False):
440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
441 """
441 """
442 Returns ``MercurialCommit`` object representing repository's
442 Returns ``MercurialCommit`` object representing repository's
443 commit at the given `commit_id` or `commit_idx`.
443 commit at the given `commit_id` or `commit_idx`.
444 """
444 """
445 if self.is_empty():
445 if self.is_empty():
446 raise EmptyRepositoryError("There are no commits yet")
446 raise EmptyRepositoryError("There are no commits yet")
447
447
448 if commit_id is not None:
448 if commit_id is not None:
449 self._validate_commit_id(commit_id)
449 self._validate_commit_id(commit_id)
450 try:
450 try:
451 # we have cached idx, use it without contacting the remote
451 # we have cached idx, use it without contacting the remote
452 idx = self._commit_ids[commit_id]
452 idx = self._commit_ids[commit_id]
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 except KeyError:
454 except KeyError:
455 pass
455 pass
456
456
457 elif commit_idx is not None:
457 elif commit_idx is not None:
458 self._validate_commit_idx(commit_idx)
458 self._validate_commit_idx(commit_idx)
459 try:
459 try:
460 _commit_id = self.commit_ids[commit_idx]
460 _commit_id = self.commit_ids[commit_idx]
461 if commit_idx < 0:
461 if commit_idx < 0:
462 commit_idx = self.commit_ids.index(_commit_id)
462 commit_idx = self.commit_ids.index(_commit_id)
463
463
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 except IndexError:
465 except IndexError:
466 commit_id = commit_idx
466 commit_id = commit_idx
467 else:
467 else:
468 commit_id = "tip"
468 commit_id = "tip"
469
469
470 if isinstance(commit_id, unicode):
470 if isinstance(commit_id, unicode):
471 commit_id = safe_str(commit_id)
471 commit_id = safe_str(commit_id)
472
472
473 try:
473 try:
474 raw_id, idx = self._remote.lookup(commit_id, both=True)
474 raw_id, idx = self._remote.lookup(commit_id, both=True)
475 except CommitDoesNotExistError:
475 except CommitDoesNotExistError:
476 msg = "Commit {} does not exist for `{}`".format(
476 msg = "Commit {} does not exist for `{}`".format(
477 *map(safe_str, [commit_id, self.name]))
477 *map(safe_str, [commit_id, self.name]))
478 raise CommitDoesNotExistError(msg)
478 raise CommitDoesNotExistError(msg)
479
479
480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
481
481
482 def get_commits(
482 def get_commits(
483 self, start_id=None, end_id=None, start_date=None, end_date=None,
483 self, start_id=None, end_id=None, start_date=None, end_date=None,
484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
485 """
485 """
486 Returns generator of ``MercurialCommit`` objects from start to end
486 Returns generator of ``MercurialCommit`` objects from start to end
487 (both are inclusive)
487 (both are inclusive)
488
488
489 :param start_id: None, str(commit_id)
489 :param start_id: None, str(commit_id)
490 :param end_id: None, str(commit_id)
490 :param end_id: None, str(commit_id)
491 :param start_date: if specified, commits with commit date less than
491 :param start_date: if specified, commits with commit date less than
492 ``start_date`` would be filtered out from returned set
492 ``start_date`` would be filtered out from returned set
493 :param end_date: if specified, commits with commit date greater than
493 :param end_date: if specified, commits with commit date greater than
494 ``end_date`` would be filtered out from returned set
494 ``end_date`` would be filtered out from returned set
495 :param branch_name: if specified, commits not reachable from given
495 :param branch_name: if specified, commits not reachable from given
496 branch would be filtered out from returned set
496 branch would be filtered out from returned set
497 :param show_hidden: Show hidden commits such as obsolete or hidden from
497 :param show_hidden: Show hidden commits such as obsolete or hidden from
498 Mercurial evolve
498 Mercurial evolve
499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
500 exist.
500 exist.
501 :raise CommitDoesNotExistError: If commit for given ``start`` or
501 :raise CommitDoesNotExistError: If commit for given ``start`` or
502 ``end`` could not be found.
502 ``end`` could not be found.
503 """
503 """
504 # actually we should check now if it's not an empty repo
504 # actually we should check now if it's not an empty repo
505 if self.is_empty():
505 if self.is_empty():
506 raise EmptyRepositoryError("There are no commits yet")
506 raise EmptyRepositoryError("There are no commits yet")
507 self._validate_branch_name(branch_name)
507 self._validate_branch_name(branch_name)
508
508
509 branch_ancestors = False
509 branch_ancestors = False
510 if start_id is not None:
510 if start_id is not None:
511 self._validate_commit_id(start_id)
511 self._validate_commit_id(start_id)
512 c_start = self.get_commit(commit_id=start_id)
512 c_start = self.get_commit(commit_id=start_id)
513 start_pos = self._commit_ids[c_start.raw_id]
513 start_pos = self._commit_ids[c_start.raw_id]
514 else:
514 else:
515 start_pos = None
515 start_pos = None
516
516
517 if end_id is not None:
517 if end_id is not None:
518 self._validate_commit_id(end_id)
518 self._validate_commit_id(end_id)
519 c_end = self.get_commit(commit_id=end_id)
519 c_end = self.get_commit(commit_id=end_id)
520 end_pos = max(0, self._commit_ids[c_end.raw_id])
520 end_pos = max(0, self._commit_ids[c_end.raw_id])
521 else:
521 else:
522 end_pos = None
522 end_pos = None
523
523
524 if None not in [start_id, end_id] and start_pos > end_pos:
524 if None not in [start_id, end_id] and start_pos > end_pos:
525 raise RepositoryError(
525 raise RepositoryError(
526 "Start commit '%s' cannot be after end commit '%s'" %
526 "Start commit '%s' cannot be after end commit '%s'" %
527 (start_id, end_id))
527 (start_id, end_id))
528
528
529 if end_pos is not None:
529 if end_pos is not None:
530 end_pos += 1
530 end_pos += 1
531
531
532 commit_filter = []
532 commit_filter = []
533
533
534 if branch_name and not branch_ancestors:
534 if branch_name and not branch_ancestors:
535 commit_filter.append('branch("%s")' % (branch_name,))
535 commit_filter.append('branch("%s")' % (branch_name,))
536 elif branch_name and branch_ancestors:
536 elif branch_name and branch_ancestors:
537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
538
538
539 if start_date and not end_date:
539 if start_date and not end_date:
540 commit_filter.append('date(">%s")' % (start_date,))
540 commit_filter.append('date(">%s")' % (start_date,))
541 if end_date and not start_date:
541 if end_date and not start_date:
542 commit_filter.append('date("<%s")' % (end_date,))
542 commit_filter.append('date("<%s")' % (end_date,))
543 if start_date and end_date:
543 if start_date and end_date:
544 commit_filter.append(
544 commit_filter.append(
545 'date(">%s") and date("<%s")' % (start_date, end_date))
545 'date(">%s") and date("<%s")' % (start_date, end_date))
546
546
547 if not show_hidden:
547 if not show_hidden:
548 commit_filter.append('not obsolete()')
548 commit_filter.append('not obsolete()')
549 commit_filter.append('not hidden()')
549 commit_filter.append('not hidden()')
550
550
551 # TODO: johbo: Figure out a simpler way for this solution
551 # TODO: johbo: Figure out a simpler way for this solution
552 collection_generator = CollectionGenerator
552 collection_generator = CollectionGenerator
553 if commit_filter:
553 if commit_filter:
554 commit_filter = ' and '.join(map(safe_str, commit_filter))
554 commit_filter = ' and '.join(map(safe_str, commit_filter))
555 revisions = self._remote.rev_range([commit_filter])
555 revisions = self._remote.rev_range([commit_filter])
556 collection_generator = MercurialIndexBasedCollectionGenerator
556 collection_generator = MercurialIndexBasedCollectionGenerator
557 else:
557 else:
558 revisions = self.commit_ids
558 revisions = self.commit_ids
559
559
560 if start_pos or end_pos:
560 if start_pos or end_pos:
561 revisions = revisions[start_pos:end_pos]
561 revisions = revisions[start_pos:end_pos]
562
562
563 return collection_generator(self, revisions, pre_load=pre_load)
563 return collection_generator(self, revisions, pre_load=pre_load)
564
564
565 def pull(self, url, commit_ids=None):
565 def pull(self, url, commit_ids=None):
566 """
566 """
567 Pull changes from external location.
567 Pull changes from external location.
568
568
569 :param commit_ids: Optional. Can be set to a list of commit ids
569 :param commit_ids: Optional. Can be set to a list of commit ids
570 which shall be pulled from the other repository.
570 which shall be pulled from the other repository.
571 """
571 """
572 url = self._get_url(url)
572 url = self._get_url(url)
573 self._remote.pull(url, commit_ids=commit_ids)
573 self._remote.pull(url, commit_ids=commit_ids)
574 self._remote.invalidate_vcs_cache()
574 self._remote.invalidate_vcs_cache()
575
575
576 def fetch(self, url, commit_ids=None):
576 def fetch(self, url, commit_ids=None):
577 """
577 """
578 Backward compatibility with GIT fetch==pull
578 Backward compatibility with GIT fetch==pull
579 """
579 """
580 return self.pull(url, commit_ids=commit_ids)
580 return self.pull(url, commit_ids=commit_ids)
581
581
582 def push(self, url):
582 def push(self, url):
583 url = self._get_url(url)
583 url = self._get_url(url)
584 self._remote.sync_push(url)
584 self._remote.sync_push(url)
585
585
586 def _local_clone(self, clone_path):
586 def _local_clone(self, clone_path):
587 """
587 """
588 Create a local clone of the current repo.
588 Create a local clone of the current repo.
589 """
589 """
590 self._remote.clone(self.path, clone_path, update_after_clone=True,
590 self._remote.clone(self.path, clone_path, update_after_clone=True,
591 hooks=False)
591 hooks=False)
592
592
593 def _update(self, revision, clean=False):
593 def _update(self, revision, clean=False):
594 """
594 """
595 Update the working copy to the specified revision.
595 Update the working copy to the specified revision.
596 """
596 """
597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
598 self._remote.update(revision, clean=clean)
598 self._remote.update(revision, clean=clean)
599
599
600 def _identify(self):
600 def _identify(self):
601 """
601 """
602 Return the current state of the working directory.
602 Return the current state of the working directory.
603 """
603 """
604 return self._remote.identify().strip().rstrip('+')
604 return self._remote.identify().strip().rstrip('+')
605
605
606 def _heads(self, branch=None):
606 def _heads(self, branch=None):
607 """
607 """
608 Return the commit ids of the repository heads.
608 Return the commit ids of the repository heads.
609 """
609 """
610 return self._remote.heads(branch=branch).strip().split(' ')
610 return self._remote.heads(branch=branch).strip().split(' ')
611
611
612 def _ancestor(self, revision1, revision2):
612 def _ancestor(self, revision1, revision2):
613 """
613 """
614 Return the common ancestor of the two revisions.
614 Return the common ancestor of the two revisions.
615 """
615 """
616 return self._remote.ancestor(revision1, revision2)
616 return self._remote.ancestor(revision1, revision2)
617
617
618 def _local_push(
618 def _local_push(
619 self, revision, repository_path, push_branches=False,
619 self, revision, repository_path, push_branches=False,
620 enable_hooks=False):
620 enable_hooks=False):
621 """
621 """
622 Push the given revision to the specified repository.
622 Push the given revision to the specified repository.
623
623
624 :param push_branches: allow to create branches in the target repo.
624 :param push_branches: allow to create branches in the target repo.
625 """
625 """
626 self._remote.push(
626 self._remote.push(
627 [revision], repository_path, hooks=enable_hooks,
627 [revision], repository_path, hooks=enable_hooks,
628 push_branches=push_branches)
628 push_branches=push_branches)
629
629
630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
632 """
632 """
633 Merge the given source_revision into the checked out revision.
633 Merge the given source_revision into the checked out revision.
634
634
635 Returns the commit id of the merge and a boolean indicating if the
635 Returns the commit id of the merge and a boolean indicating if the
636 commit needs to be pushed.
636 commit needs to be pushed.
637 """
637 """
638 source_ref_commit_id = source_ref.commit_id
638 source_ref_commit_id = source_ref.commit_id
639 target_ref_commit_id = target_ref.commit_id
639 target_ref_commit_id = target_ref.commit_id
640
640
641 # update our workdir to target ref, for proper merge
641 # update our workdir to target ref, for proper merge
642 self._update(target_ref_commit_id, clean=True)
642 self._update(target_ref_commit_id, clean=True)
643
643
644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
646
646
647 if close_commit_id:
647 if close_commit_id:
648 # NOTE(marcink): if we get the close commit, this is our new source
648 # NOTE(marcink): if we get the close commit, this is our new source
649 # which will include the close commit itself.
649 # which will include the close commit itself.
650 source_ref_commit_id = close_commit_id
650 source_ref_commit_id = close_commit_id
651
651
652 if ancestor == source_ref_commit_id:
652 if ancestor == source_ref_commit_id:
653 # Nothing to do, the changes were already integrated
653 # Nothing to do, the changes were already integrated
654 return target_ref_commit_id, False
654 return target_ref_commit_id, False
655
655
656 elif ancestor == target_ref_commit_id and is_the_same_branch:
656 elif ancestor == target_ref_commit_id and is_the_same_branch:
657 # In this case we should force a commit message
657 # In this case we should force a commit message
658 return source_ref_commit_id, True
658 return source_ref_commit_id, True
659
659
660 unresolved = None
660 unresolved = None
661 if use_rebase:
661 if use_rebase:
662 try:
662 try:
663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
665 self._remote.rebase(
665 self._remote.rebase(
666 source=source_ref_commit_id, dest=target_ref_commit_id)
666 source=source_ref_commit_id, dest=target_ref_commit_id)
667 self._remote.invalidate_vcs_cache()
667 self._remote.invalidate_vcs_cache()
668 self._update(bookmark_name, clean=True)
668 self._update(bookmark_name, clean=True)
669 return self._identify(), True
669 return self._identify(), True
670 except RepositoryError as e:
670 except RepositoryError as e:
671 # The rebase-abort may raise another exception which 'hides'
671 # The rebase-abort may raise another exception which 'hides'
672 # the original one, therefore we log it here.
672 # the original one, therefore we log it here.
673 log.exception('Error while rebasing shadow repo during merge.')
673 log.exception('Error while rebasing shadow repo during merge.')
674 if 'unresolved conflicts' in safe_str(e):
674 if 'unresolved conflicts' in safe_str(e):
675 unresolved = self._remote.get_unresolved_files()
675 unresolved = self._remote.get_unresolved_files()
676 log.debug('unresolved files: %s', unresolved)
676 log.debug('unresolved files: %s', unresolved)
677
677
678 # Cleanup any rebase leftovers
678 # Cleanup any rebase leftovers
679 self._remote.invalidate_vcs_cache()
679 self._remote.invalidate_vcs_cache()
680 self._remote.rebase(abort=True)
680 self._remote.rebase(abort=True)
681 self._remote.invalidate_vcs_cache()
681 self._remote.invalidate_vcs_cache()
682 self._remote.update(clean=True)
682 self._remote.update(clean=True)
683 if unresolved:
683 if unresolved:
684 raise UnresolvedFilesInRepo(unresolved)
684 raise UnresolvedFilesInRepo(unresolved)
685 else:
685 else:
686 raise
686 raise
687 else:
687 else:
688 try:
688 try:
689 self._remote.merge(source_ref_commit_id)
689 self._remote.merge(source_ref_commit_id)
690 self._remote.invalidate_vcs_cache()
690 self._remote.invalidate_vcs_cache()
691 self._remote.commit(
691 self._remote.commit(
692 message=safe_str(merge_message),
692 message=safe_str(merge_message),
693 username=safe_str('%s <%s>' % (user_name, user_email)))
693 username=safe_str('%s <%s>' % (user_name, user_email)))
694 self._remote.invalidate_vcs_cache()
694 self._remote.invalidate_vcs_cache()
695 return self._identify(), True
695 return self._identify(), True
696 except RepositoryError as e:
696 except RepositoryError as e:
697 # The merge-abort may raise another exception which 'hides'
697 # The merge-abort may raise another exception which 'hides'
698 # the original one, therefore we log it here.
698 # the original one, therefore we log it here.
699 log.exception('Error while merging shadow repo during merge.')
699 log.exception('Error while merging shadow repo during merge.')
700 if 'unresolved merge conflicts' in safe_str(e):
700 if 'unresolved merge conflicts' in safe_str(e):
701 unresolved = self._remote.get_unresolved_files()
701 unresolved = self._remote.get_unresolved_files()
702 log.debug('unresolved files: %s', unresolved)
702 log.debug('unresolved files: %s', unresolved)
703
703
704 # Cleanup any merge leftovers
704 # Cleanup any merge leftovers
705 self._remote.update(clean=True)
705 self._remote.update(clean=True)
706 if unresolved:
706 if unresolved:
707 raise UnresolvedFilesInRepo(unresolved)
707 raise UnresolvedFilesInRepo(unresolved)
708 else:
708 else:
709 raise
709 raise
710
710
711 def _local_close(self, target_ref, user_name, user_email,
711 def _local_close(self, target_ref, user_name, user_email,
712 source_ref, close_message=''):
712 source_ref, close_message=''):
713 """
713 """
714 Close the branch of the given source_revision
714 Close the branch of the given source_revision
715
715
716 Returns the commit id of the close and a boolean indicating if the
716 Returns the commit id of the close and a boolean indicating if the
717 commit needs to be pushed.
717 commit needs to be pushed.
718 """
718 """
719 self._update(source_ref.commit_id)
719 self._update(source_ref.commit_id)
720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
721 try:
721 try:
722 self._remote.commit(
722 self._remote.commit(
723 message=safe_str(message),
723 message=safe_str(message),
724 username=safe_str('%s <%s>' % (user_name, user_email)),
724 username=safe_str('%s <%s>' % (user_name, user_email)),
725 close_branch=True)
725 close_branch=True)
726 self._remote.invalidate_vcs_cache()
726 self._remote.invalidate_vcs_cache()
727 return self._identify(), True
727 return self._identify(), True
728 except RepositoryError:
728 except RepositoryError:
729 # Cleanup any commit leftovers
729 # Cleanup any commit leftovers
730 self._remote.update(clean=True)
730 self._remote.update(clean=True)
731 raise
731 raise
732
732
733 def _is_the_same_branch(self, target_ref, source_ref):
733 def _is_the_same_branch(self, target_ref, source_ref):
734 return (
734 return (
735 self._get_branch_name(target_ref) ==
735 self._get_branch_name(target_ref) ==
736 self._get_branch_name(source_ref))
736 self._get_branch_name(source_ref))
737
737
738 def _get_branch_name(self, ref):
738 def _get_branch_name(self, ref):
739 if ref.type == 'branch':
739 if ref.type == 'branch':
740 return ref.name
740 return ref.name
741 return self._remote.ctx_branch(ref.commit_id)
741 return self._remote.ctx_branch(ref.commit_id)
742
742
743 def _maybe_prepare_merge_workspace(
743 def _maybe_prepare_merge_workspace(
744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
745 shadow_repository_path = self._get_shadow_repository_path(
745 shadow_repository_path = self._get_shadow_repository_path(
746 self.path, repo_id, workspace_id)
746 self.path, repo_id, workspace_id)
747 if not os.path.exists(shadow_repository_path):
747 if not os.path.exists(shadow_repository_path):
748 self._local_clone(shadow_repository_path)
748 self._local_clone(shadow_repository_path)
749 log.debug(
749 log.debug(
750 'Prepared shadow repository in %s', shadow_repository_path)
750 'Prepared shadow repository in %s', shadow_repository_path)
751
751
752 return shadow_repository_path
752 return shadow_repository_path
753
753
754 def _merge_repo(self, repo_id, workspace_id, target_ref,
754 def _merge_repo(self, repo_id, workspace_id, target_ref,
755 source_repo, source_ref, merge_message,
755 source_repo, source_ref, merge_message,
756 merger_name, merger_email, dry_run=False,
756 merger_name, merger_email, dry_run=False,
757 use_rebase=False, close_branch=False):
757 use_rebase=False, close_branch=False):
758
758
759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
760 'rebase' if use_rebase else 'merge', dry_run)
760 'rebase' if use_rebase else 'merge', dry_run)
761 if target_ref.commit_id not in self._heads():
761 if target_ref.commit_id not in self._heads():
762 return MergeResponse(
762 return MergeResponse(
763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
764 metadata={'target_ref': target_ref})
764 metadata={'target_ref': target_ref})
765
765
766 try:
766 try:
767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
768 heads_all = self._heads(target_ref.name)
768 heads_all = self._heads(target_ref.name)
769 max_heads = 10
769 max_heads = 10
770 if len(heads_all) > max_heads:
770 if len(heads_all) > max_heads:
771 heads = '\n,'.join(
771 heads = '\n,'.join(
772 heads_all[:max_heads] +
772 heads_all[:max_heads] +
773 ['and {} more.'.format(len(heads_all)-max_heads)])
773 ['and {} more.'.format(len(heads_all)-max_heads)])
774 else:
774 else:
775 heads = '\n,'.join(heads_all)
775 heads = '\n,'.join(heads_all)
776 metadata = {
776 metadata = {
777 'target_ref': target_ref,
777 'target_ref': target_ref,
778 'source_ref': source_ref,
778 'source_ref': source_ref,
779 'heads': heads
779 'heads': heads
780 }
780 }
781 return MergeResponse(
781 return MergeResponse(
782 False, False, None,
782 False, False, None,
783 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
783 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
784 metadata=metadata)
784 metadata=metadata)
785 except CommitDoesNotExistError:
785 except CommitDoesNotExistError:
786 log.exception('Failure when looking up branch heads on hg target')
786 log.exception('Failure when looking up branch heads on hg target')
787 return MergeResponse(
787 return MergeResponse(
788 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
788 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
789 metadata={'target_ref': target_ref})
789 metadata={'target_ref': target_ref})
790
790
791 shadow_repository_path = self._maybe_prepare_merge_workspace(
791 shadow_repository_path = self._maybe_prepare_merge_workspace(
792 repo_id, workspace_id, target_ref, source_ref)
792 repo_id, workspace_id, target_ref, source_ref)
793 shadow_repo = self.get_shadow_instance(shadow_repository_path)
793 shadow_repo = self.get_shadow_instance(shadow_repository_path)
794
794
795 log.debug('Pulling in target reference %s', target_ref)
795 log.debug('Pulling in target reference %s', target_ref)
796 self._validate_pull_reference(target_ref)
796 self._validate_pull_reference(target_ref)
797 shadow_repo._local_pull(self.path, target_ref)
797 shadow_repo._local_pull(self.path, target_ref)
798
798
799 try:
799 try:
800 log.debug('Pulling in source reference %s', source_ref)
800 log.debug('Pulling in source reference %s', source_ref)
801 source_repo._validate_pull_reference(source_ref)
801 source_repo._validate_pull_reference(source_ref)
802 shadow_repo._local_pull(source_repo.path, source_ref)
802 shadow_repo._local_pull(source_repo.path, source_ref)
803 except CommitDoesNotExistError:
803 except CommitDoesNotExistError:
804 log.exception('Failure when doing local pull on hg shadow repo')
804 log.exception('Failure when doing local pull on hg shadow repo')
805 return MergeResponse(
805 return MergeResponse(
806 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
806 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
807 metadata={'source_ref': source_ref})
807 metadata={'source_ref': source_ref})
808
808
809 merge_ref = None
809 merge_ref = None
810 merge_commit_id = None
810 merge_commit_id = None
811 close_commit_id = None
811 close_commit_id = None
812 merge_failure_reason = MergeFailureReason.NONE
812 merge_failure_reason = MergeFailureReason.NONE
813 metadata = {}
813 metadata = {}
814
814
815 # enforce that close branch should be used only in case we source from
815 # enforce that close branch should be used only in case we source from
816 # an actual Branch
816 # an actual Branch
817 close_branch = close_branch and source_ref.type == 'branch'
817 close_branch = close_branch and source_ref.type == 'branch'
818
818
819 # don't allow to close branch if source and target are the same
819 # don't allow to close branch if source and target are the same
820 close_branch = close_branch and source_ref.name != target_ref.name
820 close_branch = close_branch and source_ref.name != target_ref.name
821
821
822 needs_push_on_close = False
822 needs_push_on_close = False
823 if close_branch and not use_rebase and not dry_run:
823 if close_branch and not use_rebase and not dry_run:
824 try:
824 try:
825 close_commit_id, needs_push_on_close = shadow_repo._local_close(
825 close_commit_id, needs_push_on_close = shadow_repo._local_close(
826 target_ref, merger_name, merger_email, source_ref)
826 target_ref, merger_name, merger_email, source_ref)
827 merge_possible = True
827 merge_possible = True
828 except RepositoryError:
828 except RepositoryError:
829 log.exception('Failure when doing close branch on '
829 log.exception('Failure when doing close branch on '
830 'shadow repo: %s', shadow_repo)
830 'shadow repo: %s', shadow_repo)
831 merge_possible = False
831 merge_possible = False
832 merge_failure_reason = MergeFailureReason.MERGE_FAILED
832 merge_failure_reason = MergeFailureReason.MERGE_FAILED
833 else:
833 else:
834 merge_possible = True
834 merge_possible = True
835
835
836 needs_push = False
836 needs_push = False
837 if merge_possible:
837 if merge_possible:
838
838
839 try:
839 try:
840 merge_commit_id, needs_push = shadow_repo._local_merge(
840 merge_commit_id, needs_push = shadow_repo._local_merge(
841 target_ref, merge_message, merger_name, merger_email,
841 target_ref, merge_message, merger_name, merger_email,
842 source_ref, use_rebase=use_rebase,
842 source_ref, use_rebase=use_rebase,
843 close_commit_id=close_commit_id, dry_run=dry_run)
843 close_commit_id=close_commit_id, dry_run=dry_run)
844 merge_possible = True
844 merge_possible = True
845
845
846 # read the state of the close action, if it
846 # read the state of the close action, if it
847 # maybe required a push
847 # maybe required a push
848 needs_push = needs_push or needs_push_on_close
848 needs_push = needs_push or needs_push_on_close
849
849
850 # Set a bookmark pointing to the merge commit. This bookmark
850 # Set a bookmark pointing to the merge commit. This bookmark
851 # may be used to easily identify the last successful merge
851 # may be used to easily identify the last successful merge
852 # commit in the shadow repository.
852 # commit in the shadow repository.
853 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
853 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
854 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
854 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
855 except SubrepoMergeError:
855 except SubrepoMergeError:
856 log.exception(
856 log.exception(
857 'Subrepo merge error during local merge on hg shadow repo.')
857 'Subrepo merge error during local merge on hg shadow repo.')
858 merge_possible = False
858 merge_possible = False
859 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
859 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
860 needs_push = False
860 needs_push = False
861 except RepositoryError as e:
861 except RepositoryError as e:
862 log.exception('Failure when doing local merge on hg shadow repo')
862 log.exception('Failure when doing local merge on hg shadow repo')
863 if isinstance(e, UnresolvedFilesInRepo):
863 if isinstance(e, UnresolvedFilesInRepo):
864 all_conflicts = list(e.args[0])
864 all_conflicts = list(e.args[0])
865 max_conflicts = 20
865 max_conflicts = 20
866 if len(all_conflicts) > max_conflicts:
866 if len(all_conflicts) > max_conflicts:
867 conflicts = all_conflicts[:max_conflicts] \
867 conflicts = all_conflicts[:max_conflicts] \
868 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
868 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
869 else:
869 else:
870 conflicts = all_conflicts
870 conflicts = all_conflicts
871 metadata['unresolved_files'] = \
871 metadata['unresolved_files'] = \
872 '\n* conflict: ' + \
872 '\n* conflict: ' + \
873 ('\n * conflict: '.join(conflicts))
873 ('\n * conflict: '.join(conflicts))
874
874
875 merge_possible = False
875 merge_possible = False
876 merge_failure_reason = MergeFailureReason.MERGE_FAILED
876 merge_failure_reason = MergeFailureReason.MERGE_FAILED
877 needs_push = False
877 needs_push = False
878
878
879 if merge_possible and not dry_run:
879 if merge_possible and not dry_run:
880 if needs_push:
880 if needs_push:
881 # In case the target is a bookmark, update it, so after pushing
881 # In case the target is a bookmark, update it, so after pushing
882 # the bookmarks is also updated in the target.
882 # the bookmarks is also updated in the target.
883 if target_ref.type == 'book':
883 if target_ref.type == 'book':
884 shadow_repo.bookmark(
884 shadow_repo.bookmark(
885 target_ref.name, revision=merge_commit_id)
885 target_ref.name, revision=merge_commit_id)
886 try:
886 try:
887 shadow_repo_with_hooks = self.get_shadow_instance(
887 shadow_repo_with_hooks = self.get_shadow_instance(
888 shadow_repository_path,
888 shadow_repository_path,
889 enable_hooks=True)
889 enable_hooks=True)
890 # This is the actual merge action, we push from shadow
890 # This is the actual merge action, we push from shadow
891 # into origin.
891 # into origin.
892 # Note: the push_branches option will push any new branch
892 # Note: the push_branches option will push any new branch
893 # defined in the source repository to the target. This may
893 # defined in the source repository to the target. This may
894 # be dangerous as branches are permanent in Mercurial.
894 # be dangerous as branches are permanent in Mercurial.
895 # This feature was requested in issue #441.
895 # This feature was requested in issue #441.
896 shadow_repo_with_hooks._local_push(
896 shadow_repo_with_hooks._local_push(
897 merge_commit_id, self.path, push_branches=True,
897 merge_commit_id, self.path, push_branches=True,
898 enable_hooks=True)
898 enable_hooks=True)
899
899
900 # maybe we also need to push the close_commit_id
900 # maybe we also need to push the close_commit_id
901 if close_commit_id:
901 if close_commit_id:
902 shadow_repo_with_hooks._local_push(
902 shadow_repo_with_hooks._local_push(
903 close_commit_id, self.path, push_branches=True,
903 close_commit_id, self.path, push_branches=True,
904 enable_hooks=True)
904 enable_hooks=True)
905 merge_succeeded = True
905 merge_succeeded = True
906 except RepositoryError:
906 except RepositoryError:
907 log.exception(
907 log.exception(
908 'Failure when doing local push from the shadow '
908 'Failure when doing local push from the shadow '
909 'repository to the target repository at %s.', self.path)
909 'repository to the target repository at %s.', self.path)
910 merge_succeeded = False
910 merge_succeeded = False
911 merge_failure_reason = MergeFailureReason.PUSH_FAILED
911 merge_failure_reason = MergeFailureReason.PUSH_FAILED
912 metadata['target'] = 'hg shadow repo'
912 metadata['target'] = 'hg shadow repo'
913 metadata['merge_commit'] = merge_commit_id
913 metadata['merge_commit'] = merge_commit_id
914 else:
914 else:
915 merge_succeeded = True
915 merge_succeeded = True
916 else:
916 else:
917 merge_succeeded = False
917 merge_succeeded = False
918
918
919 return MergeResponse(
919 return MergeResponse(
920 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
920 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
921 metadata=metadata)
921 metadata=metadata)
922
922
923 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
923 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
924 config = self.config.copy()
924 config = self.config.copy()
925 if not enable_hooks:
925 if not enable_hooks:
926 config.clear_section('hooks')
926 config.clear_section('hooks')
927 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
927 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
928
928
929 def _validate_pull_reference(self, reference):
929 def _validate_pull_reference(self, reference):
930 if not (reference.name in self.bookmarks or
930 if not (reference.name in self.bookmarks or
931 reference.name in self.branches or
931 reference.name in self.branches or
932 self.get_commit(reference.commit_id)):
932 self.get_commit(reference.commit_id)):
933 raise CommitDoesNotExistError(
933 raise CommitDoesNotExistError(
934 'Unknown branch, bookmark or commit id')
934 'Unknown branch, bookmark or commit id')
935
935
936 def _local_pull(self, repository_path, reference):
936 def _local_pull(self, repository_path, reference):
937 """
937 """
938 Fetch a branch, bookmark or commit from a local repository.
938 Fetch a branch, bookmark or commit from a local repository.
939 """
939 """
940 repository_path = os.path.abspath(repository_path)
940 repository_path = os.path.abspath(repository_path)
941 if repository_path == self.path:
941 if repository_path == self.path:
942 raise ValueError('Cannot pull from the same repository')
942 raise ValueError('Cannot pull from the same repository')
943
943
944 reference_type_to_option_name = {
944 reference_type_to_option_name = {
945 'book': 'bookmark',
945 'book': 'bookmark',
946 'branch': 'branch',
946 'branch': 'branch',
947 }
947 }
948 option_name = reference_type_to_option_name.get(
948 option_name = reference_type_to_option_name.get(
949 reference.type, 'revision')
949 reference.type, 'revision')
950
950
951 if option_name == 'revision':
951 if option_name == 'revision':
952 ref = reference.commit_id
952 ref = reference.commit_id
953 else:
953 else:
954 ref = reference.name
954 ref = reference.name
955
955
956 options = {option_name: [ref]}
956 options = {option_name: [ref]}
957 self._remote.pull_cmd(repository_path, hooks=False, **options)
957 self._remote.pull_cmd(repository_path, hooks=False, **options)
958 self._remote.invalidate_vcs_cache()
958 self._remote.invalidate_vcs_cache()
959
959
960 def bookmark(self, bookmark, revision=None):
960 def bookmark(self, bookmark, revision=None):
961 if isinstance(bookmark, unicode):
961 if isinstance(bookmark, unicode):
962 bookmark = safe_str(bookmark)
962 bookmark = safe_str(bookmark)
963 self._remote.bookmark(bookmark, revision=revision)
963 self._remote.bookmark(bookmark, revision=revision)
964 self._remote.invalidate_vcs_cache()
964 self._remote.invalidate_vcs_cache()
965
965
966 def get_path_permissions(self, username):
966 def get_path_permissions(self, username):
967 hgacl_file = os.path.join(self.path, '.hg/hgacl')
967 hgacl_file = os.path.join(self.path, '.hg/hgacl')
968
968
969 def read_patterns(suffix):
969 def read_patterns(suffix):
970 svalue = None
970 svalue = None
971 for section, option in [
971 for section, option in [
972 ('narrowacl', username + suffix),
972 ('narrowacl', username + suffix),
973 ('narrowacl', 'default' + suffix),
973 ('narrowacl', 'default' + suffix),
974 ('narrowhgacl', username + suffix),
974 ('narrowhgacl', username + suffix),
975 ('narrowhgacl', 'default' + suffix)
975 ('narrowhgacl', 'default' + suffix)
976 ]:
976 ]:
977 try:
977 try:
978 svalue = hgacl.get(section, option)
978 svalue = hgacl.get(section, option)
979 break # stop at the first value we find
979 break # stop at the first value we find
980 except configparser.NoOptionError:
980 except configparser.NoOptionError:
981 pass
981 pass
982 if not svalue:
982 if not svalue:
983 return None
983 return None
984 result = ['/']
984 result = ['/']
985 for pattern in svalue.split():
985 for pattern in svalue.split():
986 result.append(pattern)
986 result.append(pattern)
987 if '*' not in pattern and '?' not in pattern:
987 if '*' not in pattern and '?' not in pattern:
988 result.append(pattern + '/*')
988 result.append(pattern + '/*')
989 return result
989 return result
990
990
991 if os.path.exists(hgacl_file):
991 if os.path.exists(hgacl_file):
992 try:
992 try:
993 hgacl = configparser.RawConfigParser()
993 hgacl = configparser.RawConfigParser()
994 hgacl.read(hgacl_file)
994 hgacl.read(hgacl_file)
995
995
996 includes = read_patterns('.includes')
996 includes = read_patterns('.includes')
997 excludes = read_patterns('.excludes')
997 excludes = read_patterns('.excludes')
998 return BasePathPermissionChecker.create_from_patterns(
998 return BasePathPermissionChecker.create_from_patterns(
999 includes, excludes)
999 includes, excludes)
1000 except BaseException as e:
1000 except BaseException as e:
1001 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1001 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1002 hgacl_file, self.name, e)
1002 hgacl_file, self.name, e)
1003 raise exceptions.RepositoryRequirementError(msg)
1003 raise exceptions.RepositoryRequirementError(msg)
1004 else:
1004 else:
1005 return None
1005 return None
1006
1006
1007
1007
1008 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1008 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1009
1009
1010 def _commit_factory(self, commit_id):
1010 def _commit_factory(self, commit_id):
1011 return self.repo.get_commit(
1011 return self.repo.get_commit(
1012 commit_idx=commit_id, pre_load=self.pre_load)
1012 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,370 +1,370 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN repository module
22 SVN repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
32 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.utils import safe_str, safe_unicode
33 from rhodecode.lib.utils import safe_str, safe_unicode
34 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends.svn.commit import (
37 from rhodecode.lib.vcs.backends.svn.commit import (
38 SubversionCommit, _date_from_svn_properties)
38 SubversionCommit, _date_from_svn_properties)
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 VCSError, NodeDoesNotExistError)
44 VCSError, NodeDoesNotExistError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class SubversionRepository(base.BaseRepository):
50 class SubversionRepository(base.BaseRepository):
51 """
51 """
52 Subversion backend implementation
52 Subversion backend implementation
53
53
54 .. important::
54 .. important::
55
55
56 It is very important to distinguish the commit index and the commit id
56 It is very important to distinguish the commit index and the commit id
57 which is assigned by Subversion. The first one is always handled as an
57 which is assigned by Subversion. The first one is always handled as an
58 `int` by this implementation. The commit id assigned by Subversion on
58 `int` by this implementation. The commit id assigned by Subversion on
59 the other side will always be a `str`.
59 the other side will always be a `str`.
60
60
61 There is a specific trap since the first commit will have the index
61 There is a specific trap since the first commit will have the index
62 ``0`` but the svn id will be ``"1"``.
62 ``0`` but the svn id will be ``"1"``.
63
63
64 """
64 """
65
65
66 # Note: Subversion does not really have a default branch name.
66 # Note: Subversion does not really have a default branch name.
67 DEFAULT_BRANCH_NAME = None
67 DEFAULT_BRANCH_NAME = None
68
68
69 contact = base.BaseRepository.DEFAULT_CONTACT
69 contact = base.BaseRepository.DEFAULT_CONTACT
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71
71
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 bare=False, **kwargs):
73 bare=False, **kwargs):
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 self.config = config if config else self.get_default_config()
75 self.config = config if config else self.get_default_config()
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77
77
78 self._init_repo(create, src_url)
78 self._init_repo(create, src_url)
79
79
80 # caches
80 # caches
81 self._commit_ids = {}
81 self._commit_ids = {}
82
82
83 @LazyProperty
83 @LazyProperty
84 def _remote(self):
84 def _remote(self):
85 repo_id = self.path
85 repo_id = self.path
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87
87
88 def _init_repo(self, create, src_url):
88 def _init_repo(self, create, src_url):
89 if create and os.path.exists(self.path):
89 if create and os.path.exists(self.path):
90 raise RepositoryError(
90 raise RepositoryError(
91 "Cannot create repository at %s, location already exist"
91 "Cannot create repository at %s, location already exist"
92 % self.path)
92 % self.path)
93
93
94 if create:
94 if create:
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 if src_url:
96 if src_url:
97 src_url = _sanitize_url(src_url)
97 src_url = _sanitize_url(src_url)
98 self._remote.import_remote_repository(src_url)
98 self._remote.import_remote_repository(src_url)
99 else:
99 else:
100 self._check_path()
100 self._check_path()
101
101
102 @CachedProperty
102 @CachedProperty
103 def commit_ids(self):
103 def commit_ids(self):
104 head = self._remote.lookup(None)
104 head = self._remote.lookup(None)
105 return [str(r) for r in xrange(1, head + 1)]
105 return [str(r) for r in xrange(1, head + 1)]
106
106
107 def _rebuild_cache(self, commit_ids):
107 def _rebuild_cache(self, commit_ids):
108 pass
108 pass
109
109
110 def run_svn_command(self, cmd, **opts):
110 def run_svn_command(self, cmd, **opts):
111 """
111 """
112 Runs given ``cmd`` as svn command and returns tuple
112 Runs given ``cmd`` as svn command and returns tuple
113 (stdout, stderr).
113 (stdout, stderr).
114
114
115 :param cmd: full svn command to be executed
115 :param cmd: full svn command to be executed
116 :param opts: env options to pass into Subprocess command
116 :param opts: env options to pass into Subprocess command
117 """
117 """
118 if not isinstance(cmd, list):
118 if not isinstance(cmd, list):
119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
120
120
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 out, err = self._remote.run_svn_command(cmd, **opts)
122 out, err = self._remote.run_svn_command(cmd, **opts)
123 if err and not skip_stderr_log:
123 if err and not skip_stderr_log:
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 return out, err
125 return out, err
126
126
127 @LazyProperty
127 @LazyProperty
128 def branches(self):
128 def branches(self):
129 return self._tags_or_branches('vcs_svn_branch')
129 return self._tags_or_branches('vcs_svn_branch')
130
130
131 @LazyProperty
131 @LazyProperty
132 def branches_closed(self):
132 def branches_closed(self):
133 return {}
133 return {}
134
134
135 @LazyProperty
135 @LazyProperty
136 def bookmarks(self):
136 def bookmarks(self):
137 return {}
137 return {}
138
138
139 @LazyProperty
139 @LazyProperty
140 def branches_all(self):
140 def branches_all(self):
141 # TODO: johbo: Implement proper branch support
141 # TODO: johbo: Implement proper branch support
142 all_branches = {}
142 all_branches = {}
143 all_branches.update(self.branches)
143 all_branches.update(self.branches)
144 all_branches.update(self.branches_closed)
144 all_branches.update(self.branches_closed)
145 return all_branches
145 return all_branches
146
146
147 @LazyProperty
147 @LazyProperty
148 def tags(self):
148 def tags(self):
149 return self._tags_or_branches('vcs_svn_tag')
149 return self._tags_or_branches('vcs_svn_tag')
150
150
151 def _tags_or_branches(self, config_section):
151 def _tags_or_branches(self, config_section):
152 found_items = {}
152 found_items = {}
153
153
154 if self.is_empty():
154 if self.is_empty():
155 return {}
155 return {}
156
156
157 for pattern in self._patterns_from_section(config_section):
157 for pattern in self._patterns_from_section(config_section):
158 pattern = vcspath.sanitize(pattern)
158 pattern = vcspath.sanitize(pattern)
159 tip = self.get_commit()
159 tip = self.get_commit()
160 try:
160 try:
161 if pattern.endswith('*'):
161 if pattern.endswith('*'):
162 basedir = tip.get_node(vcspath.dirname(pattern))
162 basedir = tip.get_node(vcspath.dirname(pattern))
163 directories = basedir.dirs
163 directories = basedir.dirs
164 else:
164 else:
165 directories = (tip.get_node(pattern), )
165 directories = (tip.get_node(pattern), )
166 except NodeDoesNotExistError:
166 except NodeDoesNotExistError:
167 continue
167 continue
168 found_items.update(
168 found_items.update(
169 (safe_unicode(n.path),
169 (safe_unicode(n.path),
170 self.commit_ids[-1])
170 self.commit_ids[-1])
171 for n in directories)
171 for n in directories)
172
172
173 def get_name(item):
173 def get_name(item):
174 return item[0]
174 return item[0]
175
175
176 return OrderedDict(sorted(found_items.items(), key=get_name))
176 return OrderedDict(sorted(found_items.items(), key=get_name))
177
177
178 def _patterns_from_section(self, section):
178 def _patterns_from_section(self, section):
179 return (pattern for key, pattern in self.config.items(section))
179 return (pattern for key, pattern in self.config.items(section))
180
180
181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
182 if self != repo2:
182 if self != repo2:
183 raise ValueError(
183 raise ValueError(
184 "Subversion does not support getting common ancestor of"
184 "Subversion does not support getting common ancestor of"
185 " different repositories.")
185 " different repositories.")
186
186
187 if int(commit_id1) < int(commit_id2):
187 if int(commit_id1) < int(commit_id2):
188 return commit_id1
188 return commit_id1
189 return commit_id2
189 return commit_id2
190
190
191 def verify(self):
191 def verify(self):
192 verify = self._remote.verify()
192 verify = self._remote.verify()
193
193
194 self._remote.invalidate_vcs_cache()
194 self._remote.invalidate_vcs_cache()
195 return verify
195 return verify
196
196
197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
198 # TODO: johbo: Implement better comparison, this is a very naive
198 # TODO: johbo: Implement better comparison, this is a very naive
199 # version which does not allow to compare branches, tags or folders
199 # version which does not allow to compare branches, tags or folders
200 # at all.
200 # at all.
201 if repo2 != self:
201 if repo2 != self:
202 raise ValueError(
202 raise ValueError(
203 "Subversion does not support comparison of of different "
203 "Subversion does not support comparison of of different "
204 "repositories.")
204 "repositories.")
205
205
206 if commit_id1 == commit_id2:
206 if commit_id1 == commit_id2:
207 return []
207 return []
208
208
209 commit_idx1 = self._get_commit_idx(commit_id1)
209 commit_idx1 = self._get_commit_idx(commit_id1)
210 commit_idx2 = self._get_commit_idx(commit_id2)
210 commit_idx2 = self._get_commit_idx(commit_id2)
211
211
212 commits = [
212 commits = [
213 self.get_commit(commit_idx=idx)
213 self.get_commit(commit_idx=idx)
214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
215
215
216 return commits
216 return commits
217
217
218 def _get_commit_idx(self, commit_id):
218 def _get_commit_idx(self, commit_id):
219 try:
219 try:
220 svn_rev = int(commit_id)
220 svn_rev = int(commit_id)
221 except:
221 except:
222 # TODO: johbo: this might be only one case, HEAD, check this
222 # TODO: johbo: this might be only one case, HEAD, check this
223 svn_rev = self._remote.lookup(commit_id)
223 svn_rev = self._remote.lookup(commit_id)
224 commit_idx = svn_rev - 1
224 commit_idx = svn_rev - 1
225 if commit_idx >= len(self.commit_ids):
225 if commit_idx >= len(self.commit_ids):
226 raise CommitDoesNotExistError(
226 raise CommitDoesNotExistError(
227 "Commit at index %s does not exist." % (commit_idx, ))
227 "Commit at index %s does not exist." % (commit_idx, ))
228 return commit_idx
228 return commit_idx
229
229
230 @staticmethod
230 @staticmethod
231 def check_url(url, config):
231 def check_url(url, config):
232 """
232 """
233 Check if `url` is a valid source to import a Subversion repository.
233 Check if `url` is a valid source to import a Subversion repository.
234 """
234 """
235 # convert to URL if it's a local directory
235 # convert to URL if it's a local directory
236 if os.path.isdir(url):
236 if os.path.isdir(url):
237 url = 'file://' + urllib.pathname2url(url)
237 url = 'file://' + urllib.pathname2url(url)
238 return connection.Svn.check_url(url, config.serialize())
238 return connection.Svn.check_url(url, config.serialize())
239
239
240 @staticmethod
240 @staticmethod
241 def is_valid_repository(path):
241 def is_valid_repository(path):
242 try:
242 try:
243 SubversionRepository(path)
243 SubversionRepository(path)
244 return True
244 return True
245 except VCSError:
245 except VCSError:
246 pass
246 pass
247 return False
247 return False
248
248
249 def _check_path(self):
249 def _check_path(self):
250 if not os.path.exists(self.path):
250 if not os.path.exists(self.path):
251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
252 if not self._remote.is_path_valid_repository(self.path):
252 if not self._remote.is_path_valid_repository(self.path):
253 raise VCSError(
253 raise VCSError(
254 'Path "%s" does not contain a Subversion repository' %
254 'Path "%s" does not contain a Subversion repository' %
255 (self.path, ))
255 (self.path, ))
256
256
257 @LazyProperty
257 @LazyProperty
258 def last_change(self):
258 def last_change(self):
259 """
259 """
260 Returns last change made on this repository as
260 Returns last change made on this repository as
261 `datetime.datetime` object.
261 `datetime.datetime` object.
262 """
262 """
263 # Subversion always has a first commit which has id "0" and contains
263 # Subversion always has a first commit which has id "0" and contains
264 # what we are looking for.
264 # what we are looking for.
265 last_id = len(self.commit_ids)
265 last_id = len(self.commit_ids)
266 properties = self._remote.revision_properties(last_id)
266 properties = self._remote.revision_properties(last_id)
267 return _date_from_svn_properties(properties)
267 return _date_from_svn_properties(properties)
268
268
269 @LazyProperty
269 @LazyProperty
270 def in_memory_commit(self):
270 def in_memory_commit(self):
271 return SubversionInMemoryCommit(self)
271 return SubversionInMemoryCommit(self)
272
272
273 def get_hook_location(self):
273 def get_hook_location(self):
274 """
274 """
275 returns absolute path to location where hooks are stored
275 returns absolute path to location where hooks are stored
276 """
276 """
277 return os.path.join(self.path, 'hooks')
277 return os.path.join(self.path, 'hooks')
278
278
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
280 translate_tag=None, maybe_unreachable=False):
280 translate_tag=None, maybe_unreachable=False, reference_obj=None):
281 if self.is_empty():
281 if self.is_empty():
282 raise EmptyRepositoryError("There are no commits yet")
282 raise EmptyRepositoryError("There are no commits yet")
283 if commit_id is not None:
283 if commit_id is not None:
284 self._validate_commit_id(commit_id)
284 self._validate_commit_id(commit_id)
285 elif commit_idx is not None:
285 elif commit_idx is not None:
286 self._validate_commit_idx(commit_idx)
286 self._validate_commit_idx(commit_idx)
287 try:
287 try:
288 commit_id = self.commit_ids[commit_idx]
288 commit_id = self.commit_ids[commit_idx]
289 except IndexError:
289 except IndexError:
290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
291
291
292 commit_id = self._sanitize_commit_id(commit_id)
292 commit_id = self._sanitize_commit_id(commit_id)
293 commit = SubversionCommit(repository=self, commit_id=commit_id)
293 commit = SubversionCommit(repository=self, commit_id=commit_id)
294 return commit
294 return commit
295
295
296 def get_commits(
296 def get_commits(
297 self, start_id=None, end_id=None, start_date=None, end_date=None,
297 self, start_id=None, end_id=None, start_date=None, end_date=None,
298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
299 if self.is_empty():
299 if self.is_empty():
300 raise EmptyRepositoryError("There are no commit_ids yet")
300 raise EmptyRepositoryError("There are no commit_ids yet")
301 self._validate_branch_name(branch_name)
301 self._validate_branch_name(branch_name)
302
302
303 if start_id is not None:
303 if start_id is not None:
304 self._validate_commit_id(start_id)
304 self._validate_commit_id(start_id)
305 if end_id is not None:
305 if end_id is not None:
306 self._validate_commit_id(end_id)
306 self._validate_commit_id(end_id)
307
307
308 start_raw_id = self._sanitize_commit_id(start_id)
308 start_raw_id = self._sanitize_commit_id(start_id)
309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
310 end_raw_id = self._sanitize_commit_id(end_id)
310 end_raw_id = self._sanitize_commit_id(end_id)
311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
312
312
313 if None not in [start_id, end_id] and start_pos > end_pos:
313 if None not in [start_id, end_id] and start_pos > end_pos:
314 raise RepositoryError(
314 raise RepositoryError(
315 "Start commit '%s' cannot be after end commit '%s'" %
315 "Start commit '%s' cannot be after end commit '%s'" %
316 (start_id, end_id))
316 (start_id, end_id))
317 if end_pos is not None:
317 if end_pos is not None:
318 end_pos += 1
318 end_pos += 1
319
319
320 # Date based filtering
320 # Date based filtering
321 if start_date or end_date:
321 if start_date or end_date:
322 start_raw_id, end_raw_id = self._remote.lookup_interval(
322 start_raw_id, end_raw_id = self._remote.lookup_interval(
323 date_astimestamp(start_date) if start_date else None,
323 date_astimestamp(start_date) if start_date else None,
324 date_astimestamp(end_date) if end_date else None)
324 date_astimestamp(end_date) if end_date else None)
325 start_pos = start_raw_id - 1
325 start_pos = start_raw_id - 1
326 end_pos = end_raw_id
326 end_pos = end_raw_id
327
327
328 commit_ids = self.commit_ids
328 commit_ids = self.commit_ids
329
329
330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
332 svn_rev = long(self.commit_ids[-1])
332 svn_rev = long(self.commit_ids[-1])
333 commit_ids = self._remote.node_history(
333 commit_ids = self._remote.node_history(
334 path=branch_name, revision=svn_rev, limit=None)
334 path=branch_name, revision=svn_rev, limit=None)
335 commit_ids = [str(i) for i in reversed(commit_ids)]
335 commit_ids = [str(i) for i in reversed(commit_ids)]
336
336
337 if start_pos or end_pos:
337 if start_pos or end_pos:
338 commit_ids = commit_ids[start_pos:end_pos]
338 commit_ids = commit_ids[start_pos:end_pos]
339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
340
340
341 def _sanitize_commit_id(self, commit_id):
341 def _sanitize_commit_id(self, commit_id):
342 if commit_id and commit_id.isdigit():
342 if commit_id and commit_id.isdigit():
343 if int(commit_id) <= len(self.commit_ids):
343 if int(commit_id) <= len(self.commit_ids):
344 return commit_id
344 return commit_id
345 else:
345 else:
346 raise CommitDoesNotExistError(
346 raise CommitDoesNotExistError(
347 "Commit %s does not exist." % (commit_id, ))
347 "Commit %s does not exist." % (commit_id, ))
348 if commit_id not in [
348 if commit_id not in [
349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
350 raise CommitDoesNotExistError(
350 raise CommitDoesNotExistError(
351 "Commit id %s not understood." % (commit_id, ))
351 "Commit id %s not understood." % (commit_id, ))
352 svn_rev = self._remote.lookup('HEAD')
352 svn_rev = self._remote.lookup('HEAD')
353 return str(svn_rev)
353 return str(svn_rev)
354
354
355 def get_diff(
355 def get_diff(
356 self, commit1, commit2, path=None, ignore_whitespace=False,
356 self, commit1, commit2, path=None, ignore_whitespace=False,
357 context=3, path1=None):
357 context=3, path1=None):
358 self._validate_diff_commits(commit1, commit2)
358 self._validate_diff_commits(commit1, commit2)
359 svn_rev1 = long(commit1.raw_id)
359 svn_rev1 = long(commit1.raw_id)
360 svn_rev2 = long(commit2.raw_id)
360 svn_rev2 = long(commit2.raw_id)
361 diff = self._remote.diff(
361 diff = self._remote.diff(
362 svn_rev1, svn_rev2, path1=path1, path2=path,
362 svn_rev1, svn_rev2, path1=path1, path2=path,
363 ignore_whitespace=ignore_whitespace, context=context)
363 ignore_whitespace=ignore_whitespace, context=context)
364 return SubversionDiff(diff)
364 return SubversionDiff(diff)
365
365
366
366
367 def _sanitize_url(url):
367 def _sanitize_url(url):
368 if '://' not in url:
368 if '://' not in url:
369 url = 'file://' + urllib.pathname2url(url)
369 url = 'file://' + urllib.pathname2url(url)
370 return url
370 return url
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now