##// END OF EJS Templates
git: adjusted code for new libgit2 backend...
marcink -
r3842:8bd67598 default
parent child Browse files
Show More
@@ -1,1029 +1,1070 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Some simple helper functions
23 Some simple helper functions
24 """
24 """
25
25
26 import collections
26 import collections
27 import datetime
27 import datetime
28 import dateutil.relativedelta
28 import dateutil.relativedelta
29 import hashlib
29 import hashlib
30 import logging
30 import logging
31 import re
31 import re
32 import sys
32 import sys
33 import time
33 import time
34 import urllib
34 import urllib
35 import urlobject
35 import urlobject
36 import uuid
36 import uuid
37 import getpass
37 import getpass
38 from functools import update_wrapper, partial
38
39
39 import pygments.lexers
40 import pygments.lexers
40 import sqlalchemy
41 import sqlalchemy
41 import sqlalchemy.engine.url
42 import sqlalchemy.engine.url
42 import sqlalchemy.exc
43 import sqlalchemy.exc
43 import sqlalchemy.sql
44 import sqlalchemy.sql
44 import webob
45 import webob
45 import pyramid.threadlocal
46 import pyramid.threadlocal
46 from pyramid import compat
47 from pyramid import compat
47 from pyramid.settings import asbool
48 from pyramid.settings import asbool
48
49
49 import rhodecode
50 import rhodecode
50 from rhodecode.translation import _, _pluralize
51 from rhodecode.translation import _, _pluralize
51
52
52
53
53 def md5(s):
54 def md5(s):
54 return hashlib.md5(s).hexdigest()
55 return hashlib.md5(s).hexdigest()
55
56
56
57
57 def md5_safe(s):
58 def md5_safe(s):
58 return md5(safe_str(s))
59 return md5(safe_str(s))
59
60
60
61
61 def sha1(s):
62 def sha1(s):
62 return hashlib.sha1(s).hexdigest()
63 return hashlib.sha1(s).hexdigest()
63
64
64
65
65 def sha1_safe(s):
66 def sha1_safe(s):
66 return sha1(safe_str(s))
67 return sha1(safe_str(s))
67
68
68
69
69 def __get_lem(extra_mapping=None):
70 def __get_lem(extra_mapping=None):
70 """
71 """
71 Get language extension map based on what's inside pygments lexers
72 Get language extension map based on what's inside pygments lexers
72 """
73 """
73 d = collections.defaultdict(lambda: [])
74 d = collections.defaultdict(lambda: [])
74
75
75 def __clean(s):
76 def __clean(s):
76 s = s.lstrip('*')
77 s = s.lstrip('*')
77 s = s.lstrip('.')
78 s = s.lstrip('.')
78
79
79 if s.find('[') != -1:
80 if s.find('[') != -1:
80 exts = []
81 exts = []
81 start, stop = s.find('['), s.find(']')
82 start, stop = s.find('['), s.find(']')
82
83
83 for suffix in s[start + 1:stop]:
84 for suffix in s[start + 1:stop]:
84 exts.append(s[:s.find('[')] + suffix)
85 exts.append(s[:s.find('[')] + suffix)
85 return [e.lower() for e in exts]
86 return [e.lower() for e in exts]
86 else:
87 else:
87 return [s.lower()]
88 return [s.lower()]
88
89
89 for lx, t in sorted(pygments.lexers.LEXERS.items()):
90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
90 m = map(__clean, t[-2])
91 m = map(__clean, t[-2])
91 if m:
92 if m:
92 m = reduce(lambda x, y: x + y, m)
93 m = reduce(lambda x, y: x + y, m)
93 for ext in m:
94 for ext in m:
94 desc = lx.replace('Lexer', '')
95 desc = lx.replace('Lexer', '')
95 d[ext].append(desc)
96 d[ext].append(desc)
96
97
97 data = dict(d)
98 data = dict(d)
98
99
99 extra_mapping = extra_mapping or {}
100 extra_mapping = extra_mapping or {}
100 if extra_mapping:
101 if extra_mapping:
101 for k, v in extra_mapping.items():
102 for k, v in extra_mapping.items():
102 if k not in data:
103 if k not in data:
103 # register new mapping2lexer
104 # register new mapping2lexer
104 data[k] = [v]
105 data[k] = [v]
105
106
106 return data
107 return data
107
108
108
109
109 def str2bool(_str):
110 def str2bool(_str):
110 """
111 """
111 returns True/False value from given string, it tries to translate the
112 returns True/False value from given string, it tries to translate the
112 string into boolean
113 string into boolean
113
114
114 :param _str: string value to translate into boolean
115 :param _str: string value to translate into boolean
115 :rtype: boolean
116 :rtype: boolean
116 :returns: boolean from given string
117 :returns: boolean from given string
117 """
118 """
118 if _str is None:
119 if _str is None:
119 return False
120 return False
120 if _str in (True, False):
121 if _str in (True, False):
121 return _str
122 return _str
122 _str = str(_str).strip().lower()
123 _str = str(_str).strip().lower()
123 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
124
125
125
126
126 def aslist(obj, sep=None, strip=True):
127 def aslist(obj, sep=None, strip=True):
127 """
128 """
128 Returns given string separated by sep as list
129 Returns given string separated by sep as list
129
130
130 :param obj:
131 :param obj:
131 :param sep:
132 :param sep:
132 :param strip:
133 :param strip:
133 """
134 """
134 if isinstance(obj, (basestring,)):
135 if isinstance(obj, (basestring,)):
135 lst = obj.split(sep)
136 lst = obj.split(sep)
136 if strip:
137 if strip:
137 lst = [v.strip() for v in lst]
138 lst = [v.strip() for v in lst]
138 return lst
139 return lst
139 elif isinstance(obj, (list, tuple)):
140 elif isinstance(obj, (list, tuple)):
140 return obj
141 return obj
141 elif obj is None:
142 elif obj is None:
142 return []
143 return []
143 else:
144 else:
144 return [obj]
145 return [obj]
145
146
146
147
147 def convert_line_endings(line, mode):
148 def convert_line_endings(line, mode):
148 """
149 """
149 Converts a given line "line end" accordingly to given mode
150 Converts a given line "line end" accordingly to given mode
150
151
151 Available modes are::
152 Available modes are::
152 0 - Unix
153 0 - Unix
153 1 - Mac
154 1 - Mac
154 2 - DOS
155 2 - DOS
155
156
156 :param line: given line to convert
157 :param line: given line to convert
157 :param mode: mode to convert to
158 :param mode: mode to convert to
158 :rtype: str
159 :rtype: str
159 :return: converted line according to mode
160 :return: converted line according to mode
160 """
161 """
161 if mode == 0:
162 if mode == 0:
162 line = line.replace('\r\n', '\n')
163 line = line.replace('\r\n', '\n')
163 line = line.replace('\r', '\n')
164 line = line.replace('\r', '\n')
164 elif mode == 1:
165 elif mode == 1:
165 line = line.replace('\r\n', '\r')
166 line = line.replace('\r\n', '\r')
166 line = line.replace('\n', '\r')
167 line = line.replace('\n', '\r')
167 elif mode == 2:
168 elif mode == 2:
168 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
169 return line
170 return line
170
171
171
172
172 def detect_mode(line, default):
173 def detect_mode(line, default):
173 """
174 """
174 Detects line break for given line, if line break couldn't be found
175 Detects line break for given line, if line break couldn't be found
175 given default value is returned
176 given default value is returned
176
177
177 :param line: str line
178 :param line: str line
178 :param default: default
179 :param default: default
179 :rtype: int
180 :rtype: int
180 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
181 """
182 """
182 if line.endswith('\r\n'):
183 if line.endswith('\r\n'):
183 return 2
184 return 2
184 elif line.endswith('\n'):
185 elif line.endswith('\n'):
185 return 0
186 return 0
186 elif line.endswith('\r'):
187 elif line.endswith('\r'):
187 return 1
188 return 1
188 else:
189 else:
189 return default
190 return default
190
191
191
192
192 def safe_int(val, default=None):
193 def safe_int(val, default=None):
193 """
194 """
194 Returns int() of val if val is not convertable to int use default
195 Returns int() of val if val is not convertable to int use default
195 instead
196 instead
196
197
197 :param val:
198 :param val:
198 :param default:
199 :param default:
199 """
200 """
200
201
201 try:
202 try:
202 val = int(val)
203 val = int(val)
203 except (ValueError, TypeError):
204 except (ValueError, TypeError):
204 val = default
205 val = default
205
206
206 return val
207 return val
207
208
208
209
209 def safe_unicode(str_, from_encoding=None):
210 def safe_unicode(str_, from_encoding=None):
210 """
211 """
211 safe unicode function. Does few trick to turn str_ into unicode
212 safe unicode function. Does few trick to turn str_ into unicode
212
213
213 In case of UnicodeDecode error, we try to return it with encoding detected
214 In case of UnicodeDecode error, we try to return it with encoding detected
214 by chardet library if it fails fallback to unicode with errors replaced
215 by chardet library if it fails fallback to unicode with errors replaced
215
216
216 :param str_: string to decode
217 :param str_: string to decode
217 :rtype: unicode
218 :rtype: unicode
218 :returns: unicode object
219 :returns: unicode object
219 """
220 """
220 if isinstance(str_, unicode):
221 if isinstance(str_, unicode):
221 return str_
222 return str_
222
223
223 if not from_encoding:
224 if not from_encoding:
224 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
225 'utf8'), sep=',')
226 'utf8'), sep=',')
226 from_encoding = DEFAULT_ENCODINGS
227 from_encoding = DEFAULT_ENCODINGS
227
228
228 if not isinstance(from_encoding, (list, tuple)):
229 if not isinstance(from_encoding, (list, tuple)):
229 from_encoding = [from_encoding]
230 from_encoding = [from_encoding]
230
231
231 try:
232 try:
232 return unicode(str_)
233 return unicode(str_)
233 except UnicodeDecodeError:
234 except UnicodeDecodeError:
234 pass
235 pass
235
236
236 for enc in from_encoding:
237 for enc in from_encoding:
237 try:
238 try:
238 return unicode(str_, enc)
239 return unicode(str_, enc)
239 except UnicodeDecodeError:
240 except UnicodeDecodeError:
240 pass
241 pass
241
242
242 try:
243 try:
243 import chardet
244 import chardet
244 encoding = chardet.detect(str_)['encoding']
245 encoding = chardet.detect(str_)['encoding']
245 if encoding is None:
246 if encoding is None:
246 raise Exception()
247 raise Exception()
247 return str_.decode(encoding)
248 return str_.decode(encoding)
248 except (ImportError, UnicodeDecodeError, Exception):
249 except (ImportError, UnicodeDecodeError, Exception):
249 return unicode(str_, from_encoding[0], 'replace')
250 return unicode(str_, from_encoding[0], 'replace')
250
251
251
252
252 def safe_str(unicode_, to_encoding=None):
253 def safe_str(unicode_, to_encoding=None):
253 """
254 """
254 safe str function. Does few trick to turn unicode_ into string
255 safe str function. Does few trick to turn unicode_ into string
255
256
256 In case of UnicodeEncodeError, we try to return it with encoding detected
257 In case of UnicodeEncodeError, we try to return it with encoding detected
257 by chardet library if it fails fallback to string with errors replaced
258 by chardet library if it fails fallback to string with errors replaced
258
259
259 :param unicode_: unicode to encode
260 :param unicode_: unicode to encode
260 :rtype: str
261 :rtype: str
261 :returns: str object
262 :returns: str object
262 """
263 """
263
264
264 # if it's not basestr cast to str
265 # if it's not basestr cast to str
265 if not isinstance(unicode_, compat.string_types):
266 if not isinstance(unicode_, compat.string_types):
266 return str(unicode_)
267 return str(unicode_)
267
268
268 if isinstance(unicode_, str):
269 if isinstance(unicode_, str):
269 return unicode_
270 return unicode_
270
271
271 if not to_encoding:
272 if not to_encoding:
272 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
273 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
273 'utf8'), sep=',')
274 'utf8'), sep=',')
274 to_encoding = DEFAULT_ENCODINGS
275 to_encoding = DEFAULT_ENCODINGS
275
276
276 if not isinstance(to_encoding, (list, tuple)):
277 if not isinstance(to_encoding, (list, tuple)):
277 to_encoding = [to_encoding]
278 to_encoding = [to_encoding]
278
279
279 for enc in to_encoding:
280 for enc in to_encoding:
280 try:
281 try:
281 return unicode_.encode(enc)
282 return unicode_.encode(enc)
282 except UnicodeEncodeError:
283 except UnicodeEncodeError:
283 pass
284 pass
284
285
285 try:
286 try:
286 import chardet
287 import chardet
287 encoding = chardet.detect(unicode_)['encoding']
288 encoding = chardet.detect(unicode_)['encoding']
288 if encoding is None:
289 if encoding is None:
289 raise UnicodeEncodeError()
290 raise UnicodeEncodeError()
290
291
291 return unicode_.encode(encoding)
292 return unicode_.encode(encoding)
292 except (ImportError, UnicodeEncodeError):
293 except (ImportError, UnicodeEncodeError):
293 return unicode_.encode(to_encoding[0], 'replace')
294 return unicode_.encode(to_encoding[0], 'replace')
294
295
295
296
296 def remove_suffix(s, suffix):
297 def remove_suffix(s, suffix):
297 if s.endswith(suffix):
298 if s.endswith(suffix):
298 s = s[:-1 * len(suffix)]
299 s = s[:-1 * len(suffix)]
299 return s
300 return s
300
301
301
302
302 def remove_prefix(s, prefix):
303 def remove_prefix(s, prefix):
303 if s.startswith(prefix):
304 if s.startswith(prefix):
304 s = s[len(prefix):]
305 s = s[len(prefix):]
305 return s
306 return s
306
307
307
308
308 def find_calling_context(ignore_modules=None):
309 def find_calling_context(ignore_modules=None):
309 """
310 """
310 Look through the calling stack and return the frame which called
311 Look through the calling stack and return the frame which called
311 this function and is part of core module ( ie. rhodecode.* )
312 this function and is part of core module ( ie. rhodecode.* )
312
313
313 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
314 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
314 """
315 """
315
316
316 ignore_modules = ignore_modules or []
317 ignore_modules = ignore_modules or []
317
318
318 f = sys._getframe(2)
319 f = sys._getframe(2)
319 while f.f_back is not None:
320 while f.f_back is not None:
320 name = f.f_globals.get('__name__')
321 name = f.f_globals.get('__name__')
321 if name and name.startswith(__name__.split('.')[0]):
322 if name and name.startswith(__name__.split('.')[0]):
322 if name not in ignore_modules:
323 if name not in ignore_modules:
323 return f
324 return f
324 f = f.f_back
325 f = f.f_back
325 return None
326 return None
326
327
327
328
328 def ping_connection(connection, branch):
329 def ping_connection(connection, branch):
329 if branch:
330 if branch:
330 # "branch" refers to a sub-connection of a connection,
331 # "branch" refers to a sub-connection of a connection,
331 # we don't want to bother pinging on these.
332 # we don't want to bother pinging on these.
332 return
333 return
333
334
334 # turn off "close with result". This flag is only used with
335 # turn off "close with result". This flag is only used with
335 # "connectionless" execution, otherwise will be False in any case
336 # "connectionless" execution, otherwise will be False in any case
336 save_should_close_with_result = connection.should_close_with_result
337 save_should_close_with_result = connection.should_close_with_result
337 connection.should_close_with_result = False
338 connection.should_close_with_result = False
338
339
339 try:
340 try:
340 # run a SELECT 1. use a core select() so that
341 # run a SELECT 1. use a core select() so that
341 # the SELECT of a scalar value without a table is
342 # the SELECT of a scalar value without a table is
342 # appropriately formatted for the backend
343 # appropriately formatted for the backend
343 connection.scalar(sqlalchemy.sql.select([1]))
344 connection.scalar(sqlalchemy.sql.select([1]))
344 except sqlalchemy.exc.DBAPIError as err:
345 except sqlalchemy.exc.DBAPIError as err:
345 # catch SQLAlchemy's DBAPIError, which is a wrapper
346 # catch SQLAlchemy's DBAPIError, which is a wrapper
346 # for the DBAPI's exception. It includes a .connection_invalidated
347 # for the DBAPI's exception. It includes a .connection_invalidated
347 # attribute which specifies if this connection is a "disconnect"
348 # attribute which specifies if this connection is a "disconnect"
348 # condition, which is based on inspection of the original exception
349 # condition, which is based on inspection of the original exception
349 # by the dialect in use.
350 # by the dialect in use.
350 if err.connection_invalidated:
351 if err.connection_invalidated:
351 # run the same SELECT again - the connection will re-validate
352 # run the same SELECT again - the connection will re-validate
352 # itself and establish a new connection. The disconnect detection
353 # itself and establish a new connection. The disconnect detection
353 # here also causes the whole connection pool to be invalidated
354 # here also causes the whole connection pool to be invalidated
354 # so that all stale connections are discarded.
355 # so that all stale connections are discarded.
355 connection.scalar(sqlalchemy.sql.select([1]))
356 connection.scalar(sqlalchemy.sql.select([1]))
356 else:
357 else:
357 raise
358 raise
358 finally:
359 finally:
359 # restore "close with result"
360 # restore "close with result"
360 connection.should_close_with_result = save_should_close_with_result
361 connection.should_close_with_result = save_should_close_with_result
361
362
362
363
363 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
364 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
364 """Custom engine_from_config functions."""
365 """Custom engine_from_config functions."""
365 log = logging.getLogger('sqlalchemy.engine')
366 log = logging.getLogger('sqlalchemy.engine')
366 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
367 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
367 debug = asbool(configuration.get('debug'))
368 debug = asbool(configuration.get('debug'))
368
369
369 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
370 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
370
371
371 def color_sql(sql):
372 def color_sql(sql):
372 color_seq = '\033[1;33m' # This is yellow: code 33
373 color_seq = '\033[1;33m' # This is yellow: code 33
373 normal = '\x1b[0m'
374 normal = '\x1b[0m'
374 return ''.join([color_seq, sql, normal])
375 return ''.join([color_seq, sql, normal])
375
376
376 if use_ping_connection:
377 if use_ping_connection:
377 log.debug('Adding ping_connection on the engine config.')
378 log.debug('Adding ping_connection on the engine config.')
378 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
379 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
379
380
380 if debug:
381 if debug:
381 # attach events only for debug configuration
382 # attach events only for debug configuration
382 def before_cursor_execute(conn, cursor, statement,
383 def before_cursor_execute(conn, cursor, statement,
383 parameters, context, executemany):
384 parameters, context, executemany):
384 setattr(conn, 'query_start_time', time.time())
385 setattr(conn, 'query_start_time', time.time())
385 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
386 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
386 calling_context = find_calling_context(ignore_modules=[
387 calling_context = find_calling_context(ignore_modules=[
387 'rhodecode.lib.caching_query',
388 'rhodecode.lib.caching_query',
388 'rhodecode.model.settings',
389 'rhodecode.model.settings',
389 ])
390 ])
390 if calling_context:
391 if calling_context:
391 log.info(color_sql('call context %s:%s' % (
392 log.info(color_sql('call context %s:%s' % (
392 calling_context.f_code.co_filename,
393 calling_context.f_code.co_filename,
393 calling_context.f_lineno,
394 calling_context.f_lineno,
394 )))
395 )))
395
396
396 def after_cursor_execute(conn, cursor, statement,
397 def after_cursor_execute(conn, cursor, statement,
397 parameters, context, executemany):
398 parameters, context, executemany):
398 delattr(conn, 'query_start_time')
399 delattr(conn, 'query_start_time')
399
400
400 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
401 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
401 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
402 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
402
403
403 return engine
404 return engine
404
405
405
406
406 def get_encryption_key(config):
407 def get_encryption_key(config):
407 secret = config.get('rhodecode.encrypted_values.secret')
408 secret = config.get('rhodecode.encrypted_values.secret')
408 default = config['beaker.session.secret']
409 default = config['beaker.session.secret']
409 return secret or default
410 return secret or default
410
411
411
412
412 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
413 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
413 short_format=False):
414 short_format=False):
414 """
415 """
415 Turns a datetime into an age string.
416 Turns a datetime into an age string.
416 If show_short_version is True, this generates a shorter string with
417 If show_short_version is True, this generates a shorter string with
417 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
418 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
418
419
419 * IMPORTANT*
420 * IMPORTANT*
420 Code of this function is written in special way so it's easier to
421 Code of this function is written in special way so it's easier to
421 backport it to javascript. If you mean to update it, please also update
422 backport it to javascript. If you mean to update it, please also update
422 `jquery.timeago-extension.js` file
423 `jquery.timeago-extension.js` file
423
424
424 :param prevdate: datetime object
425 :param prevdate: datetime object
425 :param now: get current time, if not define we use
426 :param now: get current time, if not define we use
426 `datetime.datetime.now()`
427 `datetime.datetime.now()`
427 :param show_short_version: if it should approximate the date and
428 :param show_short_version: if it should approximate the date and
428 return a shorter string
429 return a shorter string
429 :param show_suffix:
430 :param show_suffix:
430 :param short_format: show short format, eg 2D instead of 2 days
431 :param short_format: show short format, eg 2D instead of 2 days
431 :rtype: unicode
432 :rtype: unicode
432 :returns: unicode words describing age
433 :returns: unicode words describing age
433 """
434 """
434
435
435 def _get_relative_delta(now, prevdate):
436 def _get_relative_delta(now, prevdate):
436 base = dateutil.relativedelta.relativedelta(now, prevdate)
437 base = dateutil.relativedelta.relativedelta(now, prevdate)
437 return {
438 return {
438 'year': base.years,
439 'year': base.years,
439 'month': base.months,
440 'month': base.months,
440 'day': base.days,
441 'day': base.days,
441 'hour': base.hours,
442 'hour': base.hours,
442 'minute': base.minutes,
443 'minute': base.minutes,
443 'second': base.seconds,
444 'second': base.seconds,
444 }
445 }
445
446
446 def _is_leap_year(year):
447 def _is_leap_year(year):
447 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
448 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
448
449
449 def get_month(prevdate):
450 def get_month(prevdate):
450 return prevdate.month
451 return prevdate.month
451
452
452 def get_year(prevdate):
453 def get_year(prevdate):
453 return prevdate.year
454 return prevdate.year
454
455
455 now = now or datetime.datetime.now()
456 now = now or datetime.datetime.now()
456 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
457 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
457 deltas = {}
458 deltas = {}
458 future = False
459 future = False
459
460
460 if prevdate > now:
461 if prevdate > now:
461 now_old = now
462 now_old = now
462 now = prevdate
463 now = prevdate
463 prevdate = now_old
464 prevdate = now_old
464 future = True
465 future = True
465 if future:
466 if future:
466 prevdate = prevdate.replace(microsecond=0)
467 prevdate = prevdate.replace(microsecond=0)
467 # Get date parts deltas
468 # Get date parts deltas
468 for part in order:
469 for part in order:
469 rel_delta = _get_relative_delta(now, prevdate)
470 rel_delta = _get_relative_delta(now, prevdate)
470 deltas[part] = rel_delta[part]
471 deltas[part] = rel_delta[part]
471
472
472 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
473 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
473 # not 1 hour, -59 minutes and -59 seconds)
474 # not 1 hour, -59 minutes and -59 seconds)
474 offsets = [[5, 60], [4, 60], [3, 24]]
475 offsets = [[5, 60], [4, 60], [3, 24]]
475 for element in offsets: # seconds, minutes, hours
476 for element in offsets: # seconds, minutes, hours
476 num = element[0]
477 num = element[0]
477 length = element[1]
478 length = element[1]
478
479
479 part = order[num]
480 part = order[num]
480 carry_part = order[num - 1]
481 carry_part = order[num - 1]
481
482
482 if deltas[part] < 0:
483 if deltas[part] < 0:
483 deltas[part] += length
484 deltas[part] += length
484 deltas[carry_part] -= 1
485 deltas[carry_part] -= 1
485
486
486 # Same thing for days except that the increment depends on the (variable)
487 # Same thing for days except that the increment depends on the (variable)
487 # number of days in the month
488 # number of days in the month
488 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
489 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
489 if deltas['day'] < 0:
490 if deltas['day'] < 0:
490 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
491 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
491 deltas['day'] += 29
492 deltas['day'] += 29
492 else:
493 else:
493 deltas['day'] += month_lengths[get_month(prevdate) - 1]
494 deltas['day'] += month_lengths[get_month(prevdate) - 1]
494
495
495 deltas['month'] -= 1
496 deltas['month'] -= 1
496
497
497 if deltas['month'] < 0:
498 if deltas['month'] < 0:
498 deltas['month'] += 12
499 deltas['month'] += 12
499 deltas['year'] -= 1
500 deltas['year'] -= 1
500
501
501 # Format the result
502 # Format the result
502 if short_format:
503 if short_format:
503 fmt_funcs = {
504 fmt_funcs = {
504 'year': lambda d: u'%dy' % d,
505 'year': lambda d: u'%dy' % d,
505 'month': lambda d: u'%dm' % d,
506 'month': lambda d: u'%dm' % d,
506 'day': lambda d: u'%dd' % d,
507 'day': lambda d: u'%dd' % d,
507 'hour': lambda d: u'%dh' % d,
508 'hour': lambda d: u'%dh' % d,
508 'minute': lambda d: u'%dmin' % d,
509 'minute': lambda d: u'%dmin' % d,
509 'second': lambda d: u'%dsec' % d,
510 'second': lambda d: u'%dsec' % d,
510 }
511 }
511 else:
512 else:
512 fmt_funcs = {
513 fmt_funcs = {
513 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
514 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
514 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
515 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
515 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
516 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
516 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
517 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
517 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
518 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
518 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
519 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
519 }
520 }
520
521
521 i = 0
522 i = 0
522 for part in order:
523 for part in order:
523 value = deltas[part]
524 value = deltas[part]
524 if value != 0:
525 if value != 0:
525
526
526 if i < 5:
527 if i < 5:
527 sub_part = order[i + 1]
528 sub_part = order[i + 1]
528 sub_value = deltas[sub_part]
529 sub_value = deltas[sub_part]
529 else:
530 else:
530 sub_value = 0
531 sub_value = 0
531
532
532 if sub_value == 0 or show_short_version:
533 if sub_value == 0 or show_short_version:
533 _val = fmt_funcs[part](value)
534 _val = fmt_funcs[part](value)
534 if future:
535 if future:
535 if show_suffix:
536 if show_suffix:
536 return _(u'in ${ago}', mapping={'ago': _val})
537 return _(u'in ${ago}', mapping={'ago': _val})
537 else:
538 else:
538 return _(_val)
539 return _(_val)
539
540
540 else:
541 else:
541 if show_suffix:
542 if show_suffix:
542 return _(u'${ago} ago', mapping={'ago': _val})
543 return _(u'${ago} ago', mapping={'ago': _val})
543 else:
544 else:
544 return _(_val)
545 return _(_val)
545
546
546 val = fmt_funcs[part](value)
547 val = fmt_funcs[part](value)
547 val_detail = fmt_funcs[sub_part](sub_value)
548 val_detail = fmt_funcs[sub_part](sub_value)
548 mapping = {'val': val, 'detail': val_detail}
549 mapping = {'val': val, 'detail': val_detail}
549
550
550 if short_format:
551 if short_format:
551 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
552 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
552 if show_suffix:
553 if show_suffix:
553 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
554 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
554 if future:
555 if future:
555 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
556 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
556 else:
557 else:
557 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
558 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
558 if show_suffix:
559 if show_suffix:
559 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
560 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
560 if future:
561 if future:
561 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
562 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
562
563
563 return datetime_tmpl
564 return datetime_tmpl
564 i += 1
565 i += 1
565 return _(u'just now')
566 return _(u'just now')
566
567
567
568
568 def age_from_seconds(seconds):
569 def age_from_seconds(seconds):
569 seconds = safe_int(seconds) or 0
570 seconds = safe_int(seconds) or 0
570 prevdate = time_to_datetime(time.time() + seconds)
571 prevdate = time_to_datetime(time.time() + seconds)
571 return age(prevdate, show_suffix=False, show_short_version=True)
572 return age(prevdate, show_suffix=False, show_short_version=True)
572
573
573
574
574 def cleaned_uri(uri):
575 def cleaned_uri(uri):
575 """
576 """
576 Quotes '[' and ']' from uri if there is only one of them.
577 Quotes '[' and ']' from uri if there is only one of them.
577 according to RFC3986 we cannot use such chars in uri
578 according to RFC3986 we cannot use such chars in uri
578 :param uri:
579 :param uri:
579 :return: uri without this chars
580 :return: uri without this chars
580 """
581 """
581 return urllib.quote(uri, safe='@$:/')
582 return urllib.quote(uri, safe='@$:/')
582
583
583
584
584 def uri_filter(uri):
585 def uri_filter(uri):
585 """
586 """
586 Removes user:password from given url string
587 Removes user:password from given url string
587
588
588 :param uri:
589 :param uri:
589 :rtype: unicode
590 :rtype: unicode
590 :returns: filtered list of strings
591 :returns: filtered list of strings
591 """
592 """
592 if not uri:
593 if not uri:
593 return ''
594 return ''
594
595
595 proto = ''
596 proto = ''
596
597
597 for pat in ('https://', 'http://'):
598 for pat in ('https://', 'http://'):
598 if uri.startswith(pat):
599 if uri.startswith(pat):
599 uri = uri[len(pat):]
600 uri = uri[len(pat):]
600 proto = pat
601 proto = pat
601 break
602 break
602
603
603 # remove passwords and username
604 # remove passwords and username
604 uri = uri[uri.find('@') + 1:]
605 uri = uri[uri.find('@') + 1:]
605
606
606 # get the port
607 # get the port
607 cred_pos = uri.find(':')
608 cred_pos = uri.find(':')
608 if cred_pos == -1:
609 if cred_pos == -1:
609 host, port = uri, None
610 host, port = uri, None
610 else:
611 else:
611 host, port = uri[:cred_pos], uri[cred_pos + 1:]
612 host, port = uri[:cred_pos], uri[cred_pos + 1:]
612
613
613 return filter(None, [proto, host, port])
614 return filter(None, [proto, host, port])
614
615
615
616
616 def credentials_filter(uri):
617 def credentials_filter(uri):
617 """
618 """
618 Returns a url with removed credentials
619 Returns a url with removed credentials
619
620
620 :param uri:
621 :param uri:
621 """
622 """
622
623
623 uri = uri_filter(uri)
624 uri = uri_filter(uri)
624 # check if we have port
625 # check if we have port
625 if len(uri) > 2 and uri[2]:
626 if len(uri) > 2 and uri[2]:
626 uri[2] = ':' + uri[2]
627 uri[2] = ':' + uri[2]
627
628
628 return ''.join(uri)
629 return ''.join(uri)
629
630
630
631
631 def get_clone_url(request, uri_tmpl, repo_name, repo_id, **override):
632 def get_clone_url(request, uri_tmpl, repo_name, repo_id, **override):
632 qualifed_home_url = request.route_url('home')
633 qualifed_home_url = request.route_url('home')
633 parsed_url = urlobject.URLObject(qualifed_home_url)
634 parsed_url = urlobject.URLObject(qualifed_home_url)
634 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
635 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
635
636
636 args = {
637 args = {
637 'scheme': parsed_url.scheme,
638 'scheme': parsed_url.scheme,
638 'user': '',
639 'user': '',
639 'sys_user': getpass.getuser(),
640 'sys_user': getpass.getuser(),
640 # path if we use proxy-prefix
641 # path if we use proxy-prefix
641 'netloc': parsed_url.netloc+decoded_path,
642 'netloc': parsed_url.netloc+decoded_path,
642 'hostname': parsed_url.hostname,
643 'hostname': parsed_url.hostname,
643 'prefix': decoded_path,
644 'prefix': decoded_path,
644 'repo': repo_name,
645 'repo': repo_name,
645 'repoid': str(repo_id)
646 'repoid': str(repo_id)
646 }
647 }
647 args.update(override)
648 args.update(override)
648 args['user'] = urllib.quote(safe_str(args['user']))
649 args['user'] = urllib.quote(safe_str(args['user']))
649
650
650 for k, v in args.items():
651 for k, v in args.items():
651 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
652 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
652
653
653 # remove leading @ sign if it's present. Case of empty user
654 # remove leading @ sign if it's present. Case of empty user
654 url_obj = urlobject.URLObject(uri_tmpl)
655 url_obj = urlobject.URLObject(uri_tmpl)
655 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
656 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
656
657
657 return safe_unicode(url)
658 return safe_unicode(url)
658
659
659
660
660 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
661 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
661 """
662 """
662 Safe version of get_commit if this commit doesn't exists for a
663 Safe version of get_commit if this commit doesn't exists for a
663 repository it returns a Dummy one instead
664 repository it returns a Dummy one instead
664
665
665 :param repo: repository instance
666 :param repo: repository instance
666 :param commit_id: commit id as str
667 :param commit_id: commit id as str
667 :param pre_load: optional list of commit attributes to load
668 :param pre_load: optional list of commit attributes to load
668 """
669 """
669 # TODO(skreft): remove these circular imports
670 # TODO(skreft): remove these circular imports
670 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
671 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
671 from rhodecode.lib.vcs.exceptions import RepositoryError
672 from rhodecode.lib.vcs.exceptions import RepositoryError
672 if not isinstance(repo, BaseRepository):
673 if not isinstance(repo, BaseRepository):
673 raise Exception('You must pass an Repository '
674 raise Exception('You must pass an Repository '
674 'object as first argument got %s', type(repo))
675 'object as first argument got %s', type(repo))
675
676
676 try:
677 try:
677 commit = repo.get_commit(
678 commit = repo.get_commit(
678 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
679 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
679 except (RepositoryError, LookupError):
680 except (RepositoryError, LookupError):
680 commit = EmptyCommit()
681 commit = EmptyCommit()
681 return commit
682 return commit
682
683
683
684
684 def datetime_to_time(dt):
685 def datetime_to_time(dt):
685 if dt:
686 if dt:
686 return time.mktime(dt.timetuple())
687 return time.mktime(dt.timetuple())
687
688
688
689
689 def time_to_datetime(tm):
690 def time_to_datetime(tm):
690 if tm:
691 if tm:
691 if isinstance(tm, compat.string_types):
692 if isinstance(tm, compat.string_types):
692 try:
693 try:
693 tm = float(tm)
694 tm = float(tm)
694 except ValueError:
695 except ValueError:
695 return
696 return
696 return datetime.datetime.fromtimestamp(tm)
697 return datetime.datetime.fromtimestamp(tm)
697
698
698
699
699 def time_to_utcdatetime(tm):
700 def time_to_utcdatetime(tm):
700 if tm:
701 if tm:
701 if isinstance(tm, compat.string_types):
702 if isinstance(tm, compat.string_types):
702 try:
703 try:
703 tm = float(tm)
704 tm = float(tm)
704 except ValueError:
705 except ValueError:
705 return
706 return
706 return datetime.datetime.utcfromtimestamp(tm)
707 return datetime.datetime.utcfromtimestamp(tm)
707
708
708
709
709 MENTIONS_REGEX = re.compile(
710 MENTIONS_REGEX = re.compile(
710 # ^@ or @ without any special chars in front
711 # ^@ or @ without any special chars in front
711 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
712 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
712 # main body starts with letter, then can be . - _
713 # main body starts with letter, then can be . - _
713 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
714 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
714 re.VERBOSE | re.MULTILINE)
715 re.VERBOSE | re.MULTILINE)
715
716
716
717
717 def extract_mentioned_users(s):
718 def extract_mentioned_users(s):
718 """
719 """
719 Returns unique usernames from given string s that have @mention
720 Returns unique usernames from given string s that have @mention
720
721
721 :param s: string to get mentions
722 :param s: string to get mentions
722 """
723 """
723 usrs = set()
724 usrs = set()
724 for username in MENTIONS_REGEX.findall(s):
725 for username in MENTIONS_REGEX.findall(s):
725 usrs.add(username)
726 usrs.add(username)
726
727
727 return sorted(list(usrs), key=lambda k: k.lower())
728 return sorted(list(usrs), key=lambda k: k.lower())
728
729
729
730
730 class AttributeDictBase(dict):
731 class AttributeDictBase(dict):
731 def __getstate__(self):
732 def __getstate__(self):
732 odict = self.__dict__ # get attribute dictionary
733 odict = self.__dict__ # get attribute dictionary
733 return odict
734 return odict
734
735
735 def __setstate__(self, dict):
736 def __setstate__(self, dict):
736 self.__dict__ = dict
737 self.__dict__ = dict
737
738
738 __setattr__ = dict.__setitem__
739 __setattr__ = dict.__setitem__
739 __delattr__ = dict.__delitem__
740 __delattr__ = dict.__delitem__
740
741
741
742
742 class StrictAttributeDict(AttributeDictBase):
743 class StrictAttributeDict(AttributeDictBase):
743 """
744 """
744 Strict Version of Attribute dict which raises an Attribute error when
745 Strict Version of Attribute dict which raises an Attribute error when
745 requested attribute is not set
746 requested attribute is not set
746 """
747 """
747 def __getattr__(self, attr):
748 def __getattr__(self, attr):
748 try:
749 try:
749 return self[attr]
750 return self[attr]
750 except KeyError:
751 except KeyError:
751 raise AttributeError('%s object has no attribute %s' % (
752 raise AttributeError('%s object has no attribute %s' % (
752 self.__class__, attr))
753 self.__class__, attr))
753
754
754
755
755 class AttributeDict(AttributeDictBase):
756 class AttributeDict(AttributeDictBase):
756 def __getattr__(self, attr):
757 def __getattr__(self, attr):
757 return self.get(attr, None)
758 return self.get(attr, None)
758
759
759
760
760
761
761 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
762 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
762 def __init__(self, default_factory=None, *args, **kwargs):
763 def __init__(self, default_factory=None, *args, **kwargs):
763 # in python3 you can omit the args to super
764 # in python3 you can omit the args to super
764 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
765 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
765 self.default_factory = default_factory
766 self.default_factory = default_factory
766
767
767
768
768 def fix_PATH(os_=None):
769 def fix_PATH(os_=None):
769 """
770 """
770 Get current active python path, and append it to PATH variable to fix
771 Get current active python path, and append it to PATH variable to fix
771 issues of subprocess calls and different python versions
772 issues of subprocess calls and different python versions
772 """
773 """
773 if os_ is None:
774 if os_ is None:
774 import os
775 import os
775 else:
776 else:
776 os = os_
777 os = os_
777
778
778 cur_path = os.path.split(sys.executable)[0]
779 cur_path = os.path.split(sys.executable)[0]
779 if not os.environ['PATH'].startswith(cur_path):
780 if not os.environ['PATH'].startswith(cur_path):
780 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
781 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
781
782
782
783
783 def obfuscate_url_pw(engine):
784 def obfuscate_url_pw(engine):
784 _url = engine or ''
785 _url = engine or ''
785 try:
786 try:
786 _url = sqlalchemy.engine.url.make_url(engine)
787 _url = sqlalchemy.engine.url.make_url(engine)
787 if _url.password:
788 if _url.password:
788 _url.password = 'XXXXX'
789 _url.password = 'XXXXX'
789 except Exception:
790 except Exception:
790 pass
791 pass
791 return unicode(_url)
792 return unicode(_url)
792
793
793
794
794 def get_server_url(environ):
795 def get_server_url(environ):
795 req = webob.Request(environ)
796 req = webob.Request(environ)
796 return req.host_url + req.script_name
797 return req.host_url + req.script_name
797
798
798
799
799 def unique_id(hexlen=32):
800 def unique_id(hexlen=32):
800 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
801 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
801 return suuid(truncate_to=hexlen, alphabet=alphabet)
802 return suuid(truncate_to=hexlen, alphabet=alphabet)
802
803
803
804
804 def suuid(url=None, truncate_to=22, alphabet=None):
805 def suuid(url=None, truncate_to=22, alphabet=None):
805 """
806 """
806 Generate and return a short URL safe UUID.
807 Generate and return a short URL safe UUID.
807
808
808 If the url parameter is provided, set the namespace to the provided
809 If the url parameter is provided, set the namespace to the provided
809 URL and generate a UUID.
810 URL and generate a UUID.
810
811
811 :param url to get the uuid for
812 :param url to get the uuid for
812 :truncate_to: truncate the basic 22 UUID to shorter version
813 :truncate_to: truncate the basic 22 UUID to shorter version
813
814
814 The IDs won't be universally unique any longer, but the probability of
815 The IDs won't be universally unique any longer, but the probability of
815 a collision will still be very low.
816 a collision will still be very low.
816 """
817 """
817 # Define our alphabet.
818 # Define our alphabet.
818 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
819 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
819
820
820 # If no URL is given, generate a random UUID.
821 # If no URL is given, generate a random UUID.
821 if url is None:
822 if url is None:
822 unique_id = uuid.uuid4().int
823 unique_id = uuid.uuid4().int
823 else:
824 else:
824 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
825 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
825
826
826 alphabet_length = len(_ALPHABET)
827 alphabet_length = len(_ALPHABET)
827 output = []
828 output = []
828 while unique_id > 0:
829 while unique_id > 0:
829 digit = unique_id % alphabet_length
830 digit = unique_id % alphabet_length
830 output.append(_ALPHABET[digit])
831 output.append(_ALPHABET[digit])
831 unique_id = int(unique_id / alphabet_length)
832 unique_id = int(unique_id / alphabet_length)
832 return "".join(output)[:truncate_to]
833 return "".join(output)[:truncate_to]
833
834
834
835
835 def get_current_rhodecode_user(request=None):
836 def get_current_rhodecode_user(request=None):
836 """
837 """
837 Gets rhodecode user from request
838 Gets rhodecode user from request
838 """
839 """
839 pyramid_request = request or pyramid.threadlocal.get_current_request()
840 pyramid_request = request or pyramid.threadlocal.get_current_request()
840
841
841 # web case
842 # web case
842 if pyramid_request and hasattr(pyramid_request, 'user'):
843 if pyramid_request and hasattr(pyramid_request, 'user'):
843 return pyramid_request.user
844 return pyramid_request.user
844
845
845 # api case
846 # api case
846 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
847 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
847 return pyramid_request.rpc_user
848 return pyramid_request.rpc_user
848
849
849 return None
850 return None
850
851
851
852
852 def action_logger_generic(action, namespace=''):
853 def action_logger_generic(action, namespace=''):
853 """
854 """
854 A generic logger for actions useful to the system overview, tries to find
855 A generic logger for actions useful to the system overview, tries to find
855 an acting user for the context of the call otherwise reports unknown user
856 an acting user for the context of the call otherwise reports unknown user
856
857
857 :param action: logging message eg 'comment 5 deleted'
858 :param action: logging message eg 'comment 5 deleted'
858 :param type: string
859 :param type: string
859
860
860 :param namespace: namespace of the logging message eg. 'repo.comments'
861 :param namespace: namespace of the logging message eg. 'repo.comments'
861 :param type: string
862 :param type: string
862
863
863 """
864 """
864
865
865 logger_name = 'rhodecode.actions'
866 logger_name = 'rhodecode.actions'
866
867
867 if namespace:
868 if namespace:
868 logger_name += '.' + namespace
869 logger_name += '.' + namespace
869
870
870 log = logging.getLogger(logger_name)
871 log = logging.getLogger(logger_name)
871
872
872 # get a user if we can
873 # get a user if we can
873 user = get_current_rhodecode_user()
874 user = get_current_rhodecode_user()
874
875
875 logfunc = log.info
876 logfunc = log.info
876
877
877 if not user:
878 if not user:
878 user = '<unknown user>'
879 user = '<unknown user>'
879 logfunc = log.warning
880 logfunc = log.warning
880
881
881 logfunc('Logging action by {}: {}'.format(user, action))
882 logfunc('Logging action by {}: {}'.format(user, action))
882
883
883
884
884 def escape_split(text, sep=',', maxsplit=-1):
885 def escape_split(text, sep=',', maxsplit=-1):
885 r"""
886 r"""
886 Allows for escaping of the separator: e.g. arg='foo\, bar'
887 Allows for escaping of the separator: e.g. arg='foo\, bar'
887
888
888 It should be noted that the way bash et. al. do command line parsing, those
889 It should be noted that the way bash et. al. do command line parsing, those
889 single quotes are required.
890 single quotes are required.
890 """
891 """
891 escaped_sep = r'\%s' % sep
892 escaped_sep = r'\%s' % sep
892
893
893 if escaped_sep not in text:
894 if escaped_sep not in text:
894 return text.split(sep, maxsplit)
895 return text.split(sep, maxsplit)
895
896
896 before, _mid, after = text.partition(escaped_sep)
897 before, _mid, after = text.partition(escaped_sep)
897 startlist = before.split(sep, maxsplit) # a regular split is fine here
898 startlist = before.split(sep, maxsplit) # a regular split is fine here
898 unfinished = startlist[-1]
899 unfinished = startlist[-1]
899 startlist = startlist[:-1]
900 startlist = startlist[:-1]
900
901
901 # recurse because there may be more escaped separators
902 # recurse because there may be more escaped separators
902 endlist = escape_split(after, sep, maxsplit)
903 endlist = escape_split(after, sep, maxsplit)
903
904
904 # finish building the escaped value. we use endlist[0] becaue the first
905 # finish building the escaped value. we use endlist[0] becaue the first
905 # part of the string sent in recursion is the rest of the escaped value.
906 # part of the string sent in recursion is the rest of the escaped value.
906 unfinished += sep + endlist[0]
907 unfinished += sep + endlist[0]
907
908
908 return startlist + [unfinished] + endlist[1:] # put together all the parts
909 return startlist + [unfinished] + endlist[1:] # put together all the parts
909
910
910
911
911 class OptionalAttr(object):
912 class OptionalAttr(object):
912 """
913 """
913 Special Optional Option that defines other attribute. Example::
914 Special Optional Option that defines other attribute. Example::
914
915
915 def test(apiuser, userid=Optional(OAttr('apiuser')):
916 def test(apiuser, userid=Optional(OAttr('apiuser')):
916 user = Optional.extract(userid)
917 user = Optional.extract(userid)
917 # calls
918 # calls
918
919
919 """
920 """
920
921
921 def __init__(self, attr_name):
922 def __init__(self, attr_name):
922 self.attr_name = attr_name
923 self.attr_name = attr_name
923
924
924 def __repr__(self):
925 def __repr__(self):
925 return '<OptionalAttr:%s>' % self.attr_name
926 return '<OptionalAttr:%s>' % self.attr_name
926
927
927 def __call__(self):
928 def __call__(self):
928 return self
929 return self
929
930
930
931
931 # alias
932 # alias
932 OAttr = OptionalAttr
933 OAttr = OptionalAttr
933
934
934
935
935 class Optional(object):
936 class Optional(object):
936 """
937 """
937 Defines an optional parameter::
938 Defines an optional parameter::
938
939
939 param = param.getval() if isinstance(param, Optional) else param
940 param = param.getval() if isinstance(param, Optional) else param
940 param = param() if isinstance(param, Optional) else param
941 param = param() if isinstance(param, Optional) else param
941
942
942 is equivalent of::
943 is equivalent of::
943
944
944 param = Optional.extract(param)
945 param = Optional.extract(param)
945
946
946 """
947 """
947
948
948 def __init__(self, type_):
949 def __init__(self, type_):
949 self.type_ = type_
950 self.type_ = type_
950
951
951 def __repr__(self):
952 def __repr__(self):
952 return '<Optional:%s>' % self.type_.__repr__()
953 return '<Optional:%s>' % self.type_.__repr__()
953
954
954 def __call__(self):
955 def __call__(self):
955 return self.getval()
956 return self.getval()
956
957
957 def getval(self):
958 def getval(self):
958 """
959 """
959 returns value from this Optional instance
960 returns value from this Optional instance
960 """
961 """
961 if isinstance(self.type_, OAttr):
962 if isinstance(self.type_, OAttr):
962 # use params name
963 # use params name
963 return self.type_.attr_name
964 return self.type_.attr_name
964 return self.type_
965 return self.type_
965
966
966 @classmethod
967 @classmethod
967 def extract(cls, val):
968 def extract(cls, val):
968 """
969 """
969 Extracts value from Optional() instance
970 Extracts value from Optional() instance
970
971
971 :param val:
972 :param val:
972 :return: original value if it's not Optional instance else
973 :return: original value if it's not Optional instance else
973 value of instance
974 value of instance
974 """
975 """
975 if isinstance(val, cls):
976 if isinstance(val, cls):
976 return val.getval()
977 return val.getval()
977 return val
978 return val
978
979
979
980
980 def glob2re(pat):
981 def glob2re(pat):
981 """
982 """
982 Translate a shell PATTERN to a regular expression.
983 Translate a shell PATTERN to a regular expression.
983
984
984 There is no way to quote meta-characters.
985 There is no way to quote meta-characters.
985 """
986 """
986
987
987 i, n = 0, len(pat)
988 i, n = 0, len(pat)
988 res = ''
989 res = ''
989 while i < n:
990 while i < n:
990 c = pat[i]
991 c = pat[i]
991 i = i+1
992 i = i+1
992 if c == '*':
993 if c == '*':
993 #res = res + '.*'
994 #res = res + '.*'
994 res = res + '[^/]*'
995 res = res + '[^/]*'
995 elif c == '?':
996 elif c == '?':
996 #res = res + '.'
997 #res = res + '.'
997 res = res + '[^/]'
998 res = res + '[^/]'
998 elif c == '[':
999 elif c == '[':
999 j = i
1000 j = i
1000 if j < n and pat[j] == '!':
1001 if j < n and pat[j] == '!':
1001 j = j+1
1002 j = j+1
1002 if j < n and pat[j] == ']':
1003 if j < n and pat[j] == ']':
1003 j = j+1
1004 j = j+1
1004 while j < n and pat[j] != ']':
1005 while j < n and pat[j] != ']':
1005 j = j+1
1006 j = j+1
1006 if j >= n:
1007 if j >= n:
1007 res = res + '\\['
1008 res = res + '\\['
1008 else:
1009 else:
1009 stuff = pat[i:j].replace('\\','\\\\')
1010 stuff = pat[i:j].replace('\\','\\\\')
1010 i = j+1
1011 i = j+1
1011 if stuff[0] == '!':
1012 if stuff[0] == '!':
1012 stuff = '^' + stuff[1:]
1013 stuff = '^' + stuff[1:]
1013 elif stuff[0] == '^':
1014 elif stuff[0] == '^':
1014 stuff = '\\' + stuff
1015 stuff = '\\' + stuff
1015 res = '%s[%s]' % (res, stuff)
1016 res = '%s[%s]' % (res, stuff)
1016 else:
1017 else:
1017 res = res + re.escape(c)
1018 res = res + re.escape(c)
1018 return res + '\Z(?ms)'
1019 return res + '\Z(?ms)'
1019
1020
1020
1021
1021 def parse_byte_string(size_str):
1022 def parse_byte_string(size_str):
1022 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1023 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1023 if not match:
1024 if not match:
1024 raise ValueError('Given size:%s is invalid, please make sure '
1025 raise ValueError('Given size:%s is invalid, please make sure '
1025 'to use format of <num>(MB|KB)' % size_str)
1026 'to use format of <num>(MB|KB)' % size_str)
1026
1027
1027 _parts = match.groups()
1028 _parts = match.groups()
1028 num, type_ = _parts
1029 num, type_ = _parts
1029 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1030 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1031
1032
1033 class CachedProperty(object):
1034 """
1035 Lazy Attributes. With option to invalidate the cache by running a method
1036
1037 class Foo():
1038
1039 @CachedProperty
1040 def heavy_func():
1041 return 'super-calculation'
1042
1043 foo = Foo()
1044 foo.heavy_func() # first computions
1045 foo.heavy_func() # fetch from cache
1046 foo._invalidate_prop_cache('heavy_func')
1047 # at this point calling foo.heavy_func() will be re-computed
1048 """
1049
1050 def __init__(self, func, func_name=None):
1051
1052 if func_name is None:
1053 func_name = func.__name__
1054 self.data = (func, func_name)
1055 update_wrapper(self, func)
1056
1057 def __get__(self, inst, class_):
1058 if inst is None:
1059 return self
1060
1061 func, func_name = self.data
1062 value = func(inst)
1063 inst.__dict__[func_name] = value
1064 if '_invalidate_prop_cache' not in inst.__dict__:
1065 inst.__dict__['_invalidate_prop_cache'] = partial(
1066 self._invalidate_prop_cache, inst)
1067 return value
1068
1069 def _invalidate_prop_cache(self, inst, name):
1070 inst.__dict__.pop(name, None)
@@ -1,1861 +1,1879 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from zope.cachedescriptors.property import CachedProperty
37
36
38 from pyramid import compat
37 from pyramid import compat
39
38
39 import rhodecode
40 from rhodecode.translation import lazy_ugettext
40 from rhodecode.translation import lazy_ugettext
41 from rhodecode.lib.utils2 import safe_str, safe_unicode
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 from rhodecode.lib.vcs import connection
42 from rhodecode.lib.vcs import connection
43 from rhodecode.lib.vcs.utils import author_name, author_email
43 from rhodecode.lib.vcs.utils import author_name, author_email
44 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 RepositoryError)
50 RepositoryError)
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 FILEMODE_DEFAULT = 0o100644
56 FILEMODE_DEFAULT = 0o100644
57 FILEMODE_EXECUTABLE = 0o100755
57 FILEMODE_EXECUTABLE = 0o100755
58 EMPTY_COMMIT_ID = '0' * 40
58 EMPTY_COMMIT_ID = '0' * 40
59
59
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61
61
62
62
63 class MergeFailureReason(object):
63 class MergeFailureReason(object):
64 """
64 """
65 Enumeration with all the reasons why the server side merge could fail.
65 Enumeration with all the reasons why the server side merge could fail.
66
66
67 DO NOT change the number of the reasons, as they may be stored in the
67 DO NOT change the number of the reasons, as they may be stored in the
68 database.
68 database.
69
69
70 Changing the name of a reason is acceptable and encouraged to deprecate old
70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 reasons.
71 reasons.
72 """
72 """
73
73
74 # Everything went well.
74 # Everything went well.
75 NONE = 0
75 NONE = 0
76
76
77 # An unexpected exception was raised. Check the logs for more details.
77 # An unexpected exception was raised. Check the logs for more details.
78 UNKNOWN = 1
78 UNKNOWN = 1
79
79
80 # The merge was not successful, there are conflicts.
80 # The merge was not successful, there are conflicts.
81 MERGE_FAILED = 2
81 MERGE_FAILED = 2
82
82
83 # The merge succeeded but we could not push it to the target repository.
83 # The merge succeeded but we could not push it to the target repository.
84 PUSH_FAILED = 3
84 PUSH_FAILED = 3
85
85
86 # The specified target is not a head in the target repository.
86 # The specified target is not a head in the target repository.
87 TARGET_IS_NOT_HEAD = 4
87 TARGET_IS_NOT_HEAD = 4
88
88
89 # The source repository contains more branches than the target. Pushing
89 # The source repository contains more branches than the target. Pushing
90 # the merge will create additional branches in the target.
90 # the merge will create additional branches in the target.
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92
92
93 # The target reference has multiple heads. That does not allow to correctly
93 # The target reference has multiple heads. That does not allow to correctly
94 # identify the target location. This could only happen for mercurial
94 # identify the target location. This could only happen for mercurial
95 # branches.
95 # branches.
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97
97
98 # The target repository is locked
98 # The target repository is locked
99 TARGET_IS_LOCKED = 7
99 TARGET_IS_LOCKED = 7
100
100
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 # A involved commit could not be found.
102 # A involved commit could not be found.
103 _DEPRECATED_MISSING_COMMIT = 8
103 _DEPRECATED_MISSING_COMMIT = 8
104
104
105 # The target repo reference is missing.
105 # The target repo reference is missing.
106 MISSING_TARGET_REF = 9
106 MISSING_TARGET_REF = 9
107
107
108 # The source repo reference is missing.
108 # The source repo reference is missing.
109 MISSING_SOURCE_REF = 10
109 MISSING_SOURCE_REF = 10
110
110
111 # The merge was not successful, there are conflicts related to sub
111 # The merge was not successful, there are conflicts related to sub
112 # repositories.
112 # repositories.
113 SUBREPO_MERGE_FAILED = 11
113 SUBREPO_MERGE_FAILED = 11
114
114
115
115
116 class UpdateFailureReason(object):
116 class UpdateFailureReason(object):
117 """
117 """
118 Enumeration with all the reasons why the pull request update could fail.
118 Enumeration with all the reasons why the pull request update could fail.
119
119
120 DO NOT change the number of the reasons, as they may be stored in the
120 DO NOT change the number of the reasons, as they may be stored in the
121 database.
121 database.
122
122
123 Changing the name of a reason is acceptable and encouraged to deprecate old
123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 reasons.
124 reasons.
125 """
125 """
126
126
127 # Everything went well.
127 # Everything went well.
128 NONE = 0
128 NONE = 0
129
129
130 # An unexpected exception was raised. Check the logs for more details.
130 # An unexpected exception was raised. Check the logs for more details.
131 UNKNOWN = 1
131 UNKNOWN = 1
132
132
133 # The pull request is up to date.
133 # The pull request is up to date.
134 NO_CHANGE = 2
134 NO_CHANGE = 2
135
135
136 # The pull request has a reference type that is not supported for update.
136 # The pull request has a reference type that is not supported for update.
137 WRONG_REF_TYPE = 3
137 WRONG_REF_TYPE = 3
138
138
139 # Update failed because the target reference is missing.
139 # Update failed because the target reference is missing.
140 MISSING_TARGET_REF = 4
140 MISSING_TARGET_REF = 4
141
141
142 # Update failed because the source reference is missing.
142 # Update failed because the source reference is missing.
143 MISSING_SOURCE_REF = 5
143 MISSING_SOURCE_REF = 5
144
144
145
145
146 class MergeResponse(object):
146 class MergeResponse(object):
147
147
148 # uses .format(**metadata) for variables
148 # uses .format(**metadata) for variables
149 MERGE_STATUS_MESSAGES = {
149 MERGE_STATUS_MESSAGES = {
150 MergeFailureReason.NONE: lazy_ugettext(
150 MergeFailureReason.NONE: lazy_ugettext(
151 u'This pull request can be automatically merged.'),
151 u'This pull request can be automatically merged.'),
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 u'This pull request cannot be merged because of an unhandled exception. '
153 u'This pull request cannot be merged because of an unhandled exception. '
154 u'{exception}'),
154 u'{exception}'),
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 u'This pull request cannot be merged because of merge conflicts.'),
156 u'This pull request cannot be merged because of merge conflicts.'),
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 u'This pull request could not be merged because push to '
158 u'This pull request could not be merged because push to '
159 u'target:`{target}@{merge_commit}` failed.'),
159 u'target:`{target}@{merge_commit}` failed.'),
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 u'This pull request cannot be merged because the target '
161 u'This pull request cannot be merged because the target '
162 u'`{target_ref.name}` is not a head.'),
162 u'`{target_ref.name}` is not a head.'),
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 u'This pull request cannot be merged because the source contains '
164 u'This pull request cannot be merged because the source contains '
165 u'more branches than the target.'),
165 u'more branches than the target.'),
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 u'has multiple heads: `{heads}`.'),
168 u'has multiple heads: `{heads}`.'),
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 u'This pull request cannot be merged because the target repository is '
170 u'This pull request cannot be merged because the target repository is '
171 u'locked by {locked_by}.'),
171 u'locked by {locked_by}.'),
172
172
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 u'This pull request cannot be merged because the target '
174 u'This pull request cannot be merged because the target '
175 u'reference `{target_ref.name}` is missing.'),
175 u'reference `{target_ref.name}` is missing.'),
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 u'This pull request cannot be merged because the source '
177 u'This pull request cannot be merged because the source '
178 u'reference `{source_ref.name}` is missing.'),
178 u'reference `{source_ref.name}` is missing.'),
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 u'This pull request cannot be merged because of conflicts related '
180 u'This pull request cannot be merged because of conflicts related '
181 u'to sub repositories.'),
181 u'to sub repositories.'),
182
182
183 # Deprecations
183 # Deprecations
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 u'This pull request cannot be merged because the target or the '
185 u'This pull request cannot be merged because the target or the '
186 u'source reference is missing.'),
186 u'source reference is missing.'),
187
187
188 }
188 }
189
189
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 self.possible = possible
191 self.possible = possible
192 self.executed = executed
192 self.executed = executed
193 self.merge_ref = merge_ref
193 self.merge_ref = merge_ref
194 self.failure_reason = failure_reason
194 self.failure_reason = failure_reason
195 self.metadata = metadata or {}
195 self.metadata = metadata or {}
196
196
197 def __repr__(self):
197 def __repr__(self):
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199
199
200 def __eq__(self, other):
200 def __eq__(self, other):
201 same_instance = isinstance(other, self.__class__)
201 same_instance = isinstance(other, self.__class__)
202 return same_instance \
202 return same_instance \
203 and self.possible == other.possible \
203 and self.possible == other.possible \
204 and self.executed == other.executed \
204 and self.executed == other.executed \
205 and self.failure_reason == other.failure_reason
205 and self.failure_reason == other.failure_reason
206
206
207 @property
207 @property
208 def label(self):
208 def label(self):
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 not k.startswith('_'))
210 not k.startswith('_'))
211 return label_dict.get(self.failure_reason)
211 return label_dict.get(self.failure_reason)
212
212
213 @property
213 @property
214 def merge_status_message(self):
214 def merge_status_message(self):
215 """
215 """
216 Return a human friendly error message for the given merge status code.
216 Return a human friendly error message for the given merge status code.
217 """
217 """
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219 try:
219 try:
220 return msg.format(**self.metadata)
220 return msg.format(**self.metadata)
221 except Exception:
221 except Exception:
222 log.exception('Failed to format %s message', self)
222 log.exception('Failed to format %s message', self)
223 return msg
223 return msg
224
224
225 def asdict(self):
225 def asdict(self):
226 data = {}
226 data = {}
227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 'merge_status_message']:
228 'merge_status_message']:
229 data[k] = getattr(self, k)
229 data[k] = getattr(self, k)
230 return data
230 return data
231
231
232
232
233 class BaseRepository(object):
233 class BaseRepository(object):
234 """
234 """
235 Base Repository for final backends
235 Base Repository for final backends
236
236
237 .. attribute:: DEFAULT_BRANCH_NAME
237 .. attribute:: DEFAULT_BRANCH_NAME
238
238
239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240
240
241 .. attribute:: commit_ids
241 .. attribute:: commit_ids
242
242
243 list of all available commit ids, in ascending order
243 list of all available commit ids, in ascending order
244
244
245 .. attribute:: path
245 .. attribute:: path
246
246
247 absolute path to the repository
247 absolute path to the repository
248
248
249 .. attribute:: bookmarks
249 .. attribute:: bookmarks
250
250
251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 there are no bookmarks or the backend implementation does not support
252 there are no bookmarks or the backend implementation does not support
253 bookmarks.
253 bookmarks.
254
254
255 .. attribute:: tags
255 .. attribute:: tags
256
256
257 Mapping from name to :term:`Commit ID` of the tag.
257 Mapping from name to :term:`Commit ID` of the tag.
258
258
259 """
259 """
260
260
261 DEFAULT_BRANCH_NAME = None
261 DEFAULT_BRANCH_NAME = None
262 DEFAULT_CONTACT = u"Unknown"
262 DEFAULT_CONTACT = u"Unknown"
263 DEFAULT_DESCRIPTION = u"unknown"
263 DEFAULT_DESCRIPTION = u"unknown"
264 EMPTY_COMMIT_ID = '0' * 40
264 EMPTY_COMMIT_ID = '0' * 40
265
265
266 path = None
266 path = None
267 _commit_ids_ver = 0
267
268 _is_empty = None
269 _commit_ids = {}
268
270
269 def __init__(self, repo_path, config=None, create=False, **kwargs):
271 def __init__(self, repo_path, config=None, create=False, **kwargs):
270 """
272 """
271 Initializes repository. Raises RepositoryError if repository could
273 Initializes repository. Raises RepositoryError if repository could
272 not be find at the given ``repo_path`` or directory at ``repo_path``
274 not be find at the given ``repo_path`` or directory at ``repo_path``
273 exists and ``create`` is set to True.
275 exists and ``create`` is set to True.
274
276
275 :param repo_path: local path of the repository
277 :param repo_path: local path of the repository
276 :param config: repository configuration
278 :param config: repository configuration
277 :param create=False: if set to True, would try to create repository.
279 :param create=False: if set to True, would try to create repository.
278 :param src_url=None: if set, should be proper url from which repository
280 :param src_url=None: if set, should be proper url from which repository
279 would be cloned; requires ``create`` parameter to be set to True -
281 would be cloned; requires ``create`` parameter to be set to True -
280 raises RepositoryError if src_url is set and create evaluates to
282 raises RepositoryError if src_url is set and create evaluates to
281 False
283 False
282 """
284 """
283 raise NotImplementedError
285 raise NotImplementedError
284
286
285 def __repr__(self):
287 def __repr__(self):
286 return '<%s at %s>' % (self.__class__.__name__, self.path)
288 return '<%s at %s>' % (self.__class__.__name__, self.path)
287
289
288 def __len__(self):
290 def __len__(self):
289 return self.count()
291 return self.count()
290
292
291 def __eq__(self, other):
293 def __eq__(self, other):
292 same_instance = isinstance(other, self.__class__)
294 same_instance = isinstance(other, self.__class__)
293 return same_instance and other.path == self.path
295 return same_instance and other.path == self.path
294
296
295 def __ne__(self, other):
297 def __ne__(self, other):
296 return not self.__eq__(other)
298 return not self.__eq__(other)
297
299
298 def get_create_shadow_cache_pr_path(self, db_repo):
300 def get_create_shadow_cache_pr_path(self, db_repo):
299 path = db_repo.cached_diffs_dir
301 path = db_repo.cached_diffs_dir
300 if not os.path.exists(path):
302 if not os.path.exists(path):
301 os.makedirs(path, 0o755)
303 os.makedirs(path, 0o755)
302 return path
304 return path
303
305
304 @classmethod
306 @classmethod
305 def get_default_config(cls, default=None):
307 def get_default_config(cls, default=None):
306 config = Config()
308 config = Config()
307 if default and isinstance(default, list):
309 if default and isinstance(default, list):
308 for section, key, val in default:
310 for section, key, val in default:
309 config.set(section, key, val)
311 config.set(section, key, val)
310 return config
312 return config
311
313
312 @LazyProperty
314 @LazyProperty
313 def _remote(self):
315 def _remote(self):
314 raise NotImplementedError
316 raise NotImplementedError
315
317
316 def _heads(self, branch=None):
318 def _heads(self, branch=None):
317 return []
319 return []
318
320
319 @LazyProperty
321 @LazyProperty
320 def EMPTY_COMMIT(self):
322 def EMPTY_COMMIT(self):
321 return EmptyCommit(self.EMPTY_COMMIT_ID)
323 return EmptyCommit(self.EMPTY_COMMIT_ID)
322
324
323 @LazyProperty
325 @LazyProperty
324 def alias(self):
326 def alias(self):
325 for k, v in settings.BACKENDS.items():
327 for k, v in settings.BACKENDS.items():
326 if v.split('.')[-1] == str(self.__class__.__name__):
328 if v.split('.')[-1] == str(self.__class__.__name__):
327 return k
329 return k
328
330
329 @LazyProperty
331 @LazyProperty
330 def name(self):
332 def name(self):
331 return safe_unicode(os.path.basename(self.path))
333 return safe_unicode(os.path.basename(self.path))
332
334
333 @LazyProperty
335 @LazyProperty
334 def description(self):
336 def description(self):
335 raise NotImplementedError
337 raise NotImplementedError
336
338
337 def refs(self):
339 def refs(self):
338 """
340 """
339 returns a `dict` with branches, bookmarks, tags, and closed_branches
341 returns a `dict` with branches, bookmarks, tags, and closed_branches
340 for this repository
342 for this repository
341 """
343 """
342 return dict(
344 return dict(
343 branches=self.branches,
345 branches=self.branches,
344 branches_closed=self.branches_closed,
346 branches_closed=self.branches_closed,
345 tags=self.tags,
347 tags=self.tags,
346 bookmarks=self.bookmarks
348 bookmarks=self.bookmarks
347 )
349 )
348
350
349 @LazyProperty
351 @LazyProperty
350 def branches(self):
352 def branches(self):
351 """
353 """
352 A `dict` which maps branch names to commit ids.
354 A `dict` which maps branch names to commit ids.
353 """
355 """
354 raise NotImplementedError
356 raise NotImplementedError
355
357
356 @LazyProperty
358 @LazyProperty
357 def branches_closed(self):
359 def branches_closed(self):
358 """
360 """
359 A `dict` which maps tags names to commit ids.
361 A `dict` which maps tags names to commit ids.
360 """
362 """
361 raise NotImplementedError
363 raise NotImplementedError
362
364
363 @LazyProperty
365 @LazyProperty
364 def bookmarks(self):
366 def bookmarks(self):
365 """
367 """
366 A `dict` which maps tags names to commit ids.
368 A `dict` which maps tags names to commit ids.
367 """
369 """
368 raise NotImplementedError
370 raise NotImplementedError
369
371
370 @LazyProperty
372 @LazyProperty
371 def tags(self):
373 def tags(self):
372 """
374 """
373 A `dict` which maps tags names to commit ids.
375 A `dict` which maps tags names to commit ids.
374 """
376 """
375 raise NotImplementedError
377 raise NotImplementedError
376
378
377 @LazyProperty
379 @LazyProperty
378 def size(self):
380 def size(self):
379 """
381 """
380 Returns combined size in bytes for all repository files
382 Returns combined size in bytes for all repository files
381 """
383 """
382 tip = self.get_commit()
384 tip = self.get_commit()
383 return tip.size
385 return tip.size
384
386
385 def size_at_commit(self, commit_id):
387 def size_at_commit(self, commit_id):
386 commit = self.get_commit(commit_id)
388 commit = self.get_commit(commit_id)
387 return commit.size
389 return commit.size
388
390
391 def _check_for_empty(self):
392 no_commits = len(self._commit_ids) == 0
393 if no_commits:
394 # check on remote to be sure
395 return self._remote.is_empty()
396 else:
397 return False
398
389 def is_empty(self):
399 def is_empty(self):
390 return self._remote.is_empty()
400 if rhodecode.is_test:
401 return self._check_for_empty()
402
403 if self._is_empty is None:
404 # cache empty for production, but not tests
405 self._is_empty = self._check_for_empty()
406
407 return self._is_empty
391
408
392 @staticmethod
409 @staticmethod
393 def check_url(url, config):
410 def check_url(url, config):
394 """
411 """
395 Function will check given url and try to verify if it's a valid
412 Function will check given url and try to verify if it's a valid
396 link.
413 link.
397 """
414 """
398 raise NotImplementedError
415 raise NotImplementedError
399
416
400 @staticmethod
417 @staticmethod
401 def is_valid_repository(path):
418 def is_valid_repository(path):
402 """
419 """
403 Check if given `path` contains a valid repository of this backend
420 Check if given `path` contains a valid repository of this backend
404 """
421 """
405 raise NotImplementedError
422 raise NotImplementedError
406
423
407 # ==========================================================================
424 # ==========================================================================
408 # COMMITS
425 # COMMITS
409 # ==========================================================================
426 # ==========================================================================
410
427
411 @CachedProperty('_commit_ids_ver')
428 @CachedProperty
412 def commit_ids(self):
429 def commit_ids(self):
413 raise NotImplementedError
430 raise NotImplementedError
414
431
415 def append_commit_id(self, commit_id):
432 def append_commit_id(self, commit_id):
416 if commit_id not in self.commit_ids:
433 if commit_id not in self.commit_ids:
417 self._rebuild_cache(self.commit_ids + [commit_id])
434 self._rebuild_cache(self.commit_ids + [commit_id])
418 self._commit_ids_ver = time.time()
435 # clear cache
436 self._invalidate_prop_cache('commit_ids')
419
437
420 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
421 """
439 """
422 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
440 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
423 are both None, most recent commit is returned.
441 are both None, most recent commit is returned.
424
442
425 :param pre_load: Optional. List of commit attributes to load.
443 :param pre_load: Optional. List of commit attributes to load.
426
444
427 :raises ``EmptyRepositoryError``: if there are no commits
445 :raises ``EmptyRepositoryError``: if there are no commits
428 """
446 """
429 raise NotImplementedError
447 raise NotImplementedError
430
448
431 def __iter__(self):
449 def __iter__(self):
432 for commit_id in self.commit_ids:
450 for commit_id in self.commit_ids:
433 yield self.get_commit(commit_id=commit_id)
451 yield self.get_commit(commit_id=commit_id)
434
452
435 def get_commits(
453 def get_commits(
436 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 self, start_id=None, end_id=None, start_date=None, end_date=None,
437 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
455 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
438 """
456 """
439 Returns iterator of `BaseCommit` objects from start to end
457 Returns iterator of `BaseCommit` objects from start to end
440 not inclusive. This should behave just like a list, ie. end is not
458 not inclusive. This should behave just like a list, ie. end is not
441 inclusive.
459 inclusive.
442
460
443 :param start_id: None or str, must be a valid commit id
461 :param start_id: None or str, must be a valid commit id
444 :param end_id: None or str, must be a valid commit id
462 :param end_id: None or str, must be a valid commit id
445 :param start_date:
463 :param start_date:
446 :param end_date:
464 :param end_date:
447 :param branch_name:
465 :param branch_name:
448 :param show_hidden:
466 :param show_hidden:
449 :param pre_load:
467 :param pre_load:
450 :param translate_tags:
468 :param translate_tags:
451 """
469 """
452 raise NotImplementedError
470 raise NotImplementedError
453
471
454 def __getitem__(self, key):
472 def __getitem__(self, key):
455 """
473 """
456 Allows index based access to the commit objects of this repository.
474 Allows index based access to the commit objects of this repository.
457 """
475 """
458 pre_load = ["author", "branch", "date", "message", "parents"]
476 pre_load = ["author", "branch", "date", "message", "parents"]
459 if isinstance(key, slice):
477 if isinstance(key, slice):
460 return self._get_range(key, pre_load)
478 return self._get_range(key, pre_load)
461 return self.get_commit(commit_idx=key, pre_load=pre_load)
479 return self.get_commit(commit_idx=key, pre_load=pre_load)
462
480
463 def _get_range(self, slice_obj, pre_load):
481 def _get_range(self, slice_obj, pre_load):
464 for commit_id in self.commit_ids.__getitem__(slice_obj):
482 for commit_id in self.commit_ids.__getitem__(slice_obj):
465 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
483 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
466
484
467 def count(self):
485 def count(self):
468 return len(self.commit_ids)
486 return len(self.commit_ids)
469
487
470 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
488 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
471 """
489 """
472 Creates and returns a tag for the given ``commit_id``.
490 Creates and returns a tag for the given ``commit_id``.
473
491
474 :param name: name for new tag
492 :param name: name for new tag
475 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
493 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
476 :param commit_id: commit id for which new tag would be created
494 :param commit_id: commit id for which new tag would be created
477 :param message: message of the tag's commit
495 :param message: message of the tag's commit
478 :param date: date of tag's commit
496 :param date: date of tag's commit
479
497
480 :raises TagAlreadyExistError: if tag with same name already exists
498 :raises TagAlreadyExistError: if tag with same name already exists
481 """
499 """
482 raise NotImplementedError
500 raise NotImplementedError
483
501
484 def remove_tag(self, name, user, message=None, date=None):
502 def remove_tag(self, name, user, message=None, date=None):
485 """
503 """
486 Removes tag with the given ``name``.
504 Removes tag with the given ``name``.
487
505
488 :param name: name of the tag to be removed
506 :param name: name of the tag to be removed
489 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
507 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
490 :param message: message of the tag's removal commit
508 :param message: message of the tag's removal commit
491 :param date: date of tag's removal commit
509 :param date: date of tag's removal commit
492
510
493 :raises TagDoesNotExistError: if tag with given name does not exists
511 :raises TagDoesNotExistError: if tag with given name does not exists
494 """
512 """
495 raise NotImplementedError
513 raise NotImplementedError
496
514
497 def get_diff(
515 def get_diff(
498 self, commit1, commit2, path=None, ignore_whitespace=False,
516 self, commit1, commit2, path=None, ignore_whitespace=False,
499 context=3, path1=None):
517 context=3, path1=None):
500 """
518 """
501 Returns (git like) *diff*, as plain text. Shows changes introduced by
519 Returns (git like) *diff*, as plain text. Shows changes introduced by
502 `commit2` since `commit1`.
520 `commit2` since `commit1`.
503
521
504 :param commit1: Entry point from which diff is shown. Can be
522 :param commit1: Entry point from which diff is shown. Can be
505 ``self.EMPTY_COMMIT`` - in this case, patch showing all
523 ``self.EMPTY_COMMIT`` - in this case, patch showing all
506 the changes since empty state of the repository until `commit2`
524 the changes since empty state of the repository until `commit2`
507 :param commit2: Until which commit changes should be shown.
525 :param commit2: Until which commit changes should be shown.
508 :param path: Can be set to a path of a file to create a diff of that
526 :param path: Can be set to a path of a file to create a diff of that
509 file. If `path1` is also set, this value is only associated to
527 file. If `path1` is also set, this value is only associated to
510 `commit2`.
528 `commit2`.
511 :param ignore_whitespace: If set to ``True``, would not show whitespace
529 :param ignore_whitespace: If set to ``True``, would not show whitespace
512 changes. Defaults to ``False``.
530 changes. Defaults to ``False``.
513 :param context: How many lines before/after changed lines should be
531 :param context: How many lines before/after changed lines should be
514 shown. Defaults to ``3``.
532 shown. Defaults to ``3``.
515 :param path1: Can be set to a path to associate with `commit1`. This
533 :param path1: Can be set to a path to associate with `commit1`. This
516 parameter works only for backends which support diff generation for
534 parameter works only for backends which support diff generation for
517 different paths. Other backends will raise a `ValueError` if `path1`
535 different paths. Other backends will raise a `ValueError` if `path1`
518 is set and has a different value than `path`.
536 is set and has a different value than `path`.
519 :param file_path: filter this diff by given path pattern
537 :param file_path: filter this diff by given path pattern
520 """
538 """
521 raise NotImplementedError
539 raise NotImplementedError
522
540
523 def strip(self, commit_id, branch=None):
541 def strip(self, commit_id, branch=None):
524 """
542 """
525 Strip given commit_id from the repository
543 Strip given commit_id from the repository
526 """
544 """
527 raise NotImplementedError
545 raise NotImplementedError
528
546
529 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
547 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
530 """
548 """
531 Return a latest common ancestor commit if one exists for this repo
549 Return a latest common ancestor commit if one exists for this repo
532 `commit_id1` vs `commit_id2` from `repo2`.
550 `commit_id1` vs `commit_id2` from `repo2`.
533
551
534 :param commit_id1: Commit it from this repository to use as a
552 :param commit_id1: Commit it from this repository to use as a
535 target for the comparison.
553 target for the comparison.
536 :param commit_id2: Source commit id to use for comparison.
554 :param commit_id2: Source commit id to use for comparison.
537 :param repo2: Source repository to use for comparison.
555 :param repo2: Source repository to use for comparison.
538 """
556 """
539 raise NotImplementedError
557 raise NotImplementedError
540
558
541 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
559 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
542 """
560 """
543 Compare this repository's revision `commit_id1` with `commit_id2`.
561 Compare this repository's revision `commit_id1` with `commit_id2`.
544
562
545 Returns a tuple(commits, ancestor) that would be merged from
563 Returns a tuple(commits, ancestor) that would be merged from
546 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
564 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
547 will be returned as ancestor.
565 will be returned as ancestor.
548
566
549 :param commit_id1: Commit it from this repository to use as a
567 :param commit_id1: Commit it from this repository to use as a
550 target for the comparison.
568 target for the comparison.
551 :param commit_id2: Source commit id to use for comparison.
569 :param commit_id2: Source commit id to use for comparison.
552 :param repo2: Source repository to use for comparison.
570 :param repo2: Source repository to use for comparison.
553 :param merge: If set to ``True`` will do a merge compare which also
571 :param merge: If set to ``True`` will do a merge compare which also
554 returns the common ancestor.
572 returns the common ancestor.
555 :param pre_load: Optional. List of commit attributes to load.
573 :param pre_load: Optional. List of commit attributes to load.
556 """
574 """
557 raise NotImplementedError
575 raise NotImplementedError
558
576
559 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
577 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
560 user_name='', user_email='', message='', dry_run=False,
578 user_name='', user_email='', message='', dry_run=False,
561 use_rebase=False, close_branch=False):
579 use_rebase=False, close_branch=False):
562 """
580 """
563 Merge the revisions specified in `source_ref` from `source_repo`
581 Merge the revisions specified in `source_ref` from `source_repo`
564 onto the `target_ref` of this repository.
582 onto the `target_ref` of this repository.
565
583
566 `source_ref` and `target_ref` are named tupls with the following
584 `source_ref` and `target_ref` are named tupls with the following
567 fields `type`, `name` and `commit_id`.
585 fields `type`, `name` and `commit_id`.
568
586
569 Returns a MergeResponse named tuple with the following fields
587 Returns a MergeResponse named tuple with the following fields
570 'possible', 'executed', 'source_commit', 'target_commit',
588 'possible', 'executed', 'source_commit', 'target_commit',
571 'merge_commit'.
589 'merge_commit'.
572
590
573 :param repo_id: `repo_id` target repo id.
591 :param repo_id: `repo_id` target repo id.
574 :param workspace_id: `workspace_id` unique identifier.
592 :param workspace_id: `workspace_id` unique identifier.
575 :param target_ref: `target_ref` points to the commit on top of which
593 :param target_ref: `target_ref` points to the commit on top of which
576 the `source_ref` should be merged.
594 the `source_ref` should be merged.
577 :param source_repo: The repository that contains the commits to be
595 :param source_repo: The repository that contains the commits to be
578 merged.
596 merged.
579 :param source_ref: `source_ref` points to the topmost commit from
597 :param source_ref: `source_ref` points to the topmost commit from
580 the `source_repo` which should be merged.
598 the `source_repo` which should be merged.
581 :param user_name: Merge commit `user_name`.
599 :param user_name: Merge commit `user_name`.
582 :param user_email: Merge commit `user_email`.
600 :param user_email: Merge commit `user_email`.
583 :param message: Merge commit `message`.
601 :param message: Merge commit `message`.
584 :param dry_run: If `True` the merge will not take place.
602 :param dry_run: If `True` the merge will not take place.
585 :param use_rebase: If `True` commits from the source will be rebased
603 :param use_rebase: If `True` commits from the source will be rebased
586 on top of the target instead of being merged.
604 on top of the target instead of being merged.
587 :param close_branch: If `True` branch will be close before merging it
605 :param close_branch: If `True` branch will be close before merging it
588 """
606 """
589 if dry_run:
607 if dry_run:
590 message = message or settings.MERGE_DRY_RUN_MESSAGE
608 message = message or settings.MERGE_DRY_RUN_MESSAGE
591 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
609 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
592 user_name = user_name or settings.MERGE_DRY_RUN_USER
610 user_name = user_name or settings.MERGE_DRY_RUN_USER
593 else:
611 else:
594 if not user_name:
612 if not user_name:
595 raise ValueError('user_name cannot be empty')
613 raise ValueError('user_name cannot be empty')
596 if not user_email:
614 if not user_email:
597 raise ValueError('user_email cannot be empty')
615 raise ValueError('user_email cannot be empty')
598 if not message:
616 if not message:
599 raise ValueError('message cannot be empty')
617 raise ValueError('message cannot be empty')
600
618
601 try:
619 try:
602 return self._merge_repo(
620 return self._merge_repo(
603 repo_id, workspace_id, target_ref, source_repo,
621 repo_id, workspace_id, target_ref, source_repo,
604 source_ref, message, user_name, user_email, dry_run=dry_run,
622 source_ref, message, user_name, user_email, dry_run=dry_run,
605 use_rebase=use_rebase, close_branch=close_branch)
623 use_rebase=use_rebase, close_branch=close_branch)
606 except RepositoryError as exc:
624 except RepositoryError as exc:
607 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
625 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
608 return MergeResponse(
626 return MergeResponse(
609 False, False, None, MergeFailureReason.UNKNOWN,
627 False, False, None, MergeFailureReason.UNKNOWN,
610 metadata={'exception': str(exc)})
628 metadata={'exception': str(exc)})
611
629
612 def _merge_repo(self, repo_id, workspace_id, target_ref,
630 def _merge_repo(self, repo_id, workspace_id, target_ref,
613 source_repo, source_ref, merge_message,
631 source_repo, source_ref, merge_message,
614 merger_name, merger_email, dry_run=False,
632 merger_name, merger_email, dry_run=False,
615 use_rebase=False, close_branch=False):
633 use_rebase=False, close_branch=False):
616 """Internal implementation of merge."""
634 """Internal implementation of merge."""
617 raise NotImplementedError
635 raise NotImplementedError
618
636
619 def _maybe_prepare_merge_workspace(
637 def _maybe_prepare_merge_workspace(
620 self, repo_id, workspace_id, target_ref, source_ref):
638 self, repo_id, workspace_id, target_ref, source_ref):
621 """
639 """
622 Create the merge workspace.
640 Create the merge workspace.
623
641
624 :param workspace_id: `workspace_id` unique identifier.
642 :param workspace_id: `workspace_id` unique identifier.
625 """
643 """
626 raise NotImplementedError
644 raise NotImplementedError
627
645
628 def _get_legacy_shadow_repository_path(self, workspace_id):
646 def _get_legacy_shadow_repository_path(self, workspace_id):
629 """
647 """
630 Legacy version that was used before. We still need it for
648 Legacy version that was used before. We still need it for
631 backward compat
649 backward compat
632 """
650 """
633 return os.path.join(
651 return os.path.join(
634 os.path.dirname(self.path),
652 os.path.dirname(self.path),
635 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
653 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
636
654
637 def _get_shadow_repository_path(self, repo_id, workspace_id):
655 def _get_shadow_repository_path(self, repo_id, workspace_id):
638 # The name of the shadow repository must start with '.', so it is
656 # The name of the shadow repository must start with '.', so it is
639 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
657 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
640 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
658 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
641 if os.path.exists(legacy_repository_path):
659 if os.path.exists(legacy_repository_path):
642 return legacy_repository_path
660 return legacy_repository_path
643 else:
661 else:
644 return os.path.join(
662 return os.path.join(
645 os.path.dirname(self.path),
663 os.path.dirname(self.path),
646 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
664 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
647
665
648 def cleanup_merge_workspace(self, repo_id, workspace_id):
666 def cleanup_merge_workspace(self, repo_id, workspace_id):
649 """
667 """
650 Remove merge workspace.
668 Remove merge workspace.
651
669
652 This function MUST not fail in case there is no workspace associated to
670 This function MUST not fail in case there is no workspace associated to
653 the given `workspace_id`.
671 the given `workspace_id`.
654
672
655 :param workspace_id: `workspace_id` unique identifier.
673 :param workspace_id: `workspace_id` unique identifier.
656 """
674 """
657 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
675 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
658 shadow_repository_path_del = '{}.{}.delete'.format(
676 shadow_repository_path_del = '{}.{}.delete'.format(
659 shadow_repository_path, time.time())
677 shadow_repository_path, time.time())
660
678
661 # move the shadow repo, so it never conflicts with the one used.
679 # move the shadow repo, so it never conflicts with the one used.
662 # we use this method because shutil.rmtree had some edge case problems
680 # we use this method because shutil.rmtree had some edge case problems
663 # removing symlinked repositories
681 # removing symlinked repositories
664 if not os.path.isdir(shadow_repository_path):
682 if not os.path.isdir(shadow_repository_path):
665 return
683 return
666
684
667 shutil.move(shadow_repository_path, shadow_repository_path_del)
685 shutil.move(shadow_repository_path, shadow_repository_path_del)
668 try:
686 try:
669 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
687 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
670 except Exception:
688 except Exception:
671 log.exception('Failed to gracefully remove shadow repo under %s',
689 log.exception('Failed to gracefully remove shadow repo under %s',
672 shadow_repository_path_del)
690 shadow_repository_path_del)
673 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
691 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
674
692
675 # ========== #
693 # ========== #
676 # COMMIT API #
694 # COMMIT API #
677 # ========== #
695 # ========== #
678
696
679 @LazyProperty
697 @LazyProperty
680 def in_memory_commit(self):
698 def in_memory_commit(self):
681 """
699 """
682 Returns :class:`InMemoryCommit` object for this repository.
700 Returns :class:`InMemoryCommit` object for this repository.
683 """
701 """
684 raise NotImplementedError
702 raise NotImplementedError
685
703
686 # ======================== #
704 # ======================== #
687 # UTILITIES FOR SUBCLASSES #
705 # UTILITIES FOR SUBCLASSES #
688 # ======================== #
706 # ======================== #
689
707
690 def _validate_diff_commits(self, commit1, commit2):
708 def _validate_diff_commits(self, commit1, commit2):
691 """
709 """
692 Validates that the given commits are related to this repository.
710 Validates that the given commits are related to this repository.
693
711
694 Intended as a utility for sub classes to have a consistent validation
712 Intended as a utility for sub classes to have a consistent validation
695 of input parameters in methods like :meth:`get_diff`.
713 of input parameters in methods like :meth:`get_diff`.
696 """
714 """
697 self._validate_commit(commit1)
715 self._validate_commit(commit1)
698 self._validate_commit(commit2)
716 self._validate_commit(commit2)
699 if (isinstance(commit1, EmptyCommit) and
717 if (isinstance(commit1, EmptyCommit) and
700 isinstance(commit2, EmptyCommit)):
718 isinstance(commit2, EmptyCommit)):
701 raise ValueError("Cannot compare two empty commits")
719 raise ValueError("Cannot compare two empty commits")
702
720
703 def _validate_commit(self, commit):
721 def _validate_commit(self, commit):
704 if not isinstance(commit, BaseCommit):
722 if not isinstance(commit, BaseCommit):
705 raise TypeError(
723 raise TypeError(
706 "%s is not of type BaseCommit" % repr(commit))
724 "%s is not of type BaseCommit" % repr(commit))
707 if commit.repository != self and not isinstance(commit, EmptyCommit):
725 if commit.repository != self and not isinstance(commit, EmptyCommit):
708 raise ValueError(
726 raise ValueError(
709 "Commit %s must be a valid commit from this repository %s, "
727 "Commit %s must be a valid commit from this repository %s, "
710 "related to this repository instead %s." %
728 "related to this repository instead %s." %
711 (commit, self, commit.repository))
729 (commit, self, commit.repository))
712
730
713 def _validate_commit_id(self, commit_id):
731 def _validate_commit_id(self, commit_id):
714 if not isinstance(commit_id, compat.string_types):
732 if not isinstance(commit_id, compat.string_types):
715 raise TypeError("commit_id must be a string value")
733 raise TypeError("commit_id must be a string value")
716
734
717 def _validate_commit_idx(self, commit_idx):
735 def _validate_commit_idx(self, commit_idx):
718 if not isinstance(commit_idx, (int, long)):
736 if not isinstance(commit_idx, (int, long)):
719 raise TypeError("commit_idx must be a numeric value")
737 raise TypeError("commit_idx must be a numeric value")
720
738
721 def _validate_branch_name(self, branch_name):
739 def _validate_branch_name(self, branch_name):
722 if branch_name and branch_name not in self.branches_all:
740 if branch_name and branch_name not in self.branches_all:
723 msg = ("Branch %s not found in %s" % (branch_name, self))
741 msg = ("Branch %s not found in %s" % (branch_name, self))
724 raise BranchDoesNotExistError(msg)
742 raise BranchDoesNotExistError(msg)
725
743
726 #
744 #
727 # Supporting deprecated API parts
745 # Supporting deprecated API parts
728 # TODO: johbo: consider to move this into a mixin
746 # TODO: johbo: consider to move this into a mixin
729 #
747 #
730
748
731 @property
749 @property
732 def EMPTY_CHANGESET(self):
750 def EMPTY_CHANGESET(self):
733 warnings.warn(
751 warnings.warn(
734 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
752 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
735 return self.EMPTY_COMMIT_ID
753 return self.EMPTY_COMMIT_ID
736
754
737 @property
755 @property
738 def revisions(self):
756 def revisions(self):
739 warnings.warn("Use commits attribute instead", DeprecationWarning)
757 warnings.warn("Use commits attribute instead", DeprecationWarning)
740 return self.commit_ids
758 return self.commit_ids
741
759
742 @revisions.setter
760 @revisions.setter
743 def revisions(self, value):
761 def revisions(self, value):
744 warnings.warn("Use commits attribute instead", DeprecationWarning)
762 warnings.warn("Use commits attribute instead", DeprecationWarning)
745 self.commit_ids = value
763 self.commit_ids = value
746
764
747 def get_changeset(self, revision=None, pre_load=None):
765 def get_changeset(self, revision=None, pre_load=None):
748 warnings.warn("Use get_commit instead", DeprecationWarning)
766 warnings.warn("Use get_commit instead", DeprecationWarning)
749 commit_id = None
767 commit_id = None
750 commit_idx = None
768 commit_idx = None
751 if isinstance(revision, compat.string_types):
769 if isinstance(revision, compat.string_types):
752 commit_id = revision
770 commit_id = revision
753 else:
771 else:
754 commit_idx = revision
772 commit_idx = revision
755 return self.get_commit(
773 return self.get_commit(
756 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
774 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
757
775
758 def get_changesets(
776 def get_changesets(
759 self, start=None, end=None, start_date=None, end_date=None,
777 self, start=None, end=None, start_date=None, end_date=None,
760 branch_name=None, pre_load=None):
778 branch_name=None, pre_load=None):
761 warnings.warn("Use get_commits instead", DeprecationWarning)
779 warnings.warn("Use get_commits instead", DeprecationWarning)
762 start_id = self._revision_to_commit(start)
780 start_id = self._revision_to_commit(start)
763 end_id = self._revision_to_commit(end)
781 end_id = self._revision_to_commit(end)
764 return self.get_commits(
782 return self.get_commits(
765 start_id=start_id, end_id=end_id, start_date=start_date,
783 start_id=start_id, end_id=end_id, start_date=start_date,
766 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
784 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
767
785
768 def _revision_to_commit(self, revision):
786 def _revision_to_commit(self, revision):
769 """
787 """
770 Translates a revision to a commit_id
788 Translates a revision to a commit_id
771
789
772 Helps to support the old changeset based API which allows to use
790 Helps to support the old changeset based API which allows to use
773 commit ids and commit indices interchangeable.
791 commit ids and commit indices interchangeable.
774 """
792 """
775 if revision is None:
793 if revision is None:
776 return revision
794 return revision
777
795
778 if isinstance(revision, compat.string_types):
796 if isinstance(revision, compat.string_types):
779 commit_id = revision
797 commit_id = revision
780 else:
798 else:
781 commit_id = self.commit_ids[revision]
799 commit_id = self.commit_ids[revision]
782 return commit_id
800 return commit_id
783
801
784 @property
802 @property
785 def in_memory_changeset(self):
803 def in_memory_changeset(self):
786 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
804 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
787 return self.in_memory_commit
805 return self.in_memory_commit
788
806
789 def get_path_permissions(self, username):
807 def get_path_permissions(self, username):
790 """
808 """
791 Returns a path permission checker or None if not supported
809 Returns a path permission checker or None if not supported
792
810
793 :param username: session user name
811 :param username: session user name
794 :return: an instance of BasePathPermissionChecker or None
812 :return: an instance of BasePathPermissionChecker or None
795 """
813 """
796 return None
814 return None
797
815
798 def install_hooks(self, force=False):
816 def install_hooks(self, force=False):
799 return self._remote.install_hooks(force)
817 return self._remote.install_hooks(force)
800
818
801 def get_hooks_info(self):
819 def get_hooks_info(self):
802 return self._remote.get_hooks_info()
820 return self._remote.get_hooks_info()
803
821
804
822
805 class BaseCommit(object):
823 class BaseCommit(object):
806 """
824 """
807 Each backend should implement it's commit representation.
825 Each backend should implement it's commit representation.
808
826
809 **Attributes**
827 **Attributes**
810
828
811 ``repository``
829 ``repository``
812 repository object within which commit exists
830 repository object within which commit exists
813
831
814 ``id``
832 ``id``
815 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
833 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
816 just ``tip``.
834 just ``tip``.
817
835
818 ``raw_id``
836 ``raw_id``
819 raw commit representation (i.e. full 40 length sha for git
837 raw commit representation (i.e. full 40 length sha for git
820 backend)
838 backend)
821
839
822 ``short_id``
840 ``short_id``
823 shortened (if apply) version of ``raw_id``; it would be simple
841 shortened (if apply) version of ``raw_id``; it would be simple
824 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
842 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
825 as ``raw_id`` for subversion
843 as ``raw_id`` for subversion
826
844
827 ``idx``
845 ``idx``
828 commit index
846 commit index
829
847
830 ``files``
848 ``files``
831 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
849 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
832
850
833 ``dirs``
851 ``dirs``
834 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
852 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
835
853
836 ``nodes``
854 ``nodes``
837 combined list of ``Node`` objects
855 combined list of ``Node`` objects
838
856
839 ``author``
857 ``author``
840 author of the commit, as unicode
858 author of the commit, as unicode
841
859
842 ``message``
860 ``message``
843 message of the commit, as unicode
861 message of the commit, as unicode
844
862
845 ``parents``
863 ``parents``
846 list of parent commits
864 list of parent commits
847
865
848 """
866 """
849
867
850 branch = None
868 branch = None
851 """
869 """
852 Depending on the backend this should be set to the branch name of the
870 Depending on the backend this should be set to the branch name of the
853 commit. Backends not supporting branches on commits should leave this
871 commit. Backends not supporting branches on commits should leave this
854 value as ``None``.
872 value as ``None``.
855 """
873 """
856
874
857 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
875 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
858 """
876 """
859 This template is used to generate a default prefix for repository archives
877 This template is used to generate a default prefix for repository archives
860 if no prefix has been specified.
878 if no prefix has been specified.
861 """
879 """
862
880
863 def __str__(self):
881 def __str__(self):
864 return '<%s at %s:%s>' % (
882 return '<%s at %s:%s>' % (
865 self.__class__.__name__, self.idx, self.short_id)
883 self.__class__.__name__, self.idx, self.short_id)
866
884
867 def __repr__(self):
885 def __repr__(self):
868 return self.__str__()
886 return self.__str__()
869
887
870 def __unicode__(self):
888 def __unicode__(self):
871 return u'%s:%s' % (self.idx, self.short_id)
889 return u'%s:%s' % (self.idx, self.short_id)
872
890
873 def __eq__(self, other):
891 def __eq__(self, other):
874 same_instance = isinstance(other, self.__class__)
892 same_instance = isinstance(other, self.__class__)
875 return same_instance and self.raw_id == other.raw_id
893 return same_instance and self.raw_id == other.raw_id
876
894
877 def __json__(self):
895 def __json__(self):
878 parents = []
896 parents = []
879 try:
897 try:
880 for parent in self.parents:
898 for parent in self.parents:
881 parents.append({'raw_id': parent.raw_id})
899 parents.append({'raw_id': parent.raw_id})
882 except NotImplementedError:
900 except NotImplementedError:
883 # empty commit doesn't have parents implemented
901 # empty commit doesn't have parents implemented
884 pass
902 pass
885
903
886 return {
904 return {
887 'short_id': self.short_id,
905 'short_id': self.short_id,
888 'raw_id': self.raw_id,
906 'raw_id': self.raw_id,
889 'revision': self.idx,
907 'revision': self.idx,
890 'message': self.message,
908 'message': self.message,
891 'date': self.date,
909 'date': self.date,
892 'author': self.author,
910 'author': self.author,
893 'parents': parents,
911 'parents': parents,
894 'branch': self.branch
912 'branch': self.branch
895 }
913 }
896
914
897 def __getstate__(self):
915 def __getstate__(self):
898 d = self.__dict__.copy()
916 d = self.__dict__.copy()
899 d.pop('_remote', None)
917 d.pop('_remote', None)
900 d.pop('repository', None)
918 d.pop('repository', None)
901 return d
919 return d
902
920
903 def _get_refs(self):
921 def _get_refs(self):
904 return {
922 return {
905 'branches': [self.branch] if self.branch else [],
923 'branches': [self.branch] if self.branch else [],
906 'bookmarks': getattr(self, 'bookmarks', []),
924 'bookmarks': getattr(self, 'bookmarks', []),
907 'tags': self.tags
925 'tags': self.tags
908 }
926 }
909
927
910 @LazyProperty
928 @LazyProperty
911 def last(self):
929 def last(self):
912 """
930 """
913 ``True`` if this is last commit in repository, ``False``
931 ``True`` if this is last commit in repository, ``False``
914 otherwise; trying to access this attribute while there is no
932 otherwise; trying to access this attribute while there is no
915 commits would raise `EmptyRepositoryError`
933 commits would raise `EmptyRepositoryError`
916 """
934 """
917 if self.repository is None:
935 if self.repository is None:
918 raise CommitError("Cannot check if it's most recent commit")
936 raise CommitError("Cannot check if it's most recent commit")
919 return self.raw_id == self.repository.commit_ids[-1]
937 return self.raw_id == self.repository.commit_ids[-1]
920
938
921 @LazyProperty
939 @LazyProperty
922 def parents(self):
940 def parents(self):
923 """
941 """
924 Returns list of parent commits.
942 Returns list of parent commits.
925 """
943 """
926 raise NotImplementedError
944 raise NotImplementedError
927
945
928 @LazyProperty
946 @LazyProperty
929 def first_parent(self):
947 def first_parent(self):
930 """
948 """
931 Returns list of parent commits.
949 Returns list of parent commits.
932 """
950 """
933 return self.parents[0] if self.parents else EmptyCommit()
951 return self.parents[0] if self.parents else EmptyCommit()
934
952
935 @property
953 @property
936 def merge(self):
954 def merge(self):
937 """
955 """
938 Returns boolean if commit is a merge.
956 Returns boolean if commit is a merge.
939 """
957 """
940 return len(self.parents) > 1
958 return len(self.parents) > 1
941
959
942 @LazyProperty
960 @LazyProperty
943 def children(self):
961 def children(self):
944 """
962 """
945 Returns list of child commits.
963 Returns list of child commits.
946 """
964 """
947 raise NotImplementedError
965 raise NotImplementedError
948
966
949 @LazyProperty
967 @LazyProperty
950 def id(self):
968 def id(self):
951 """
969 """
952 Returns string identifying this commit.
970 Returns string identifying this commit.
953 """
971 """
954 raise NotImplementedError
972 raise NotImplementedError
955
973
956 @LazyProperty
974 @LazyProperty
957 def raw_id(self):
975 def raw_id(self):
958 """
976 """
959 Returns raw string identifying this commit.
977 Returns raw string identifying this commit.
960 """
978 """
961 raise NotImplementedError
979 raise NotImplementedError
962
980
963 @LazyProperty
981 @LazyProperty
964 def short_id(self):
982 def short_id(self):
965 """
983 """
966 Returns shortened version of ``raw_id`` attribute, as string,
984 Returns shortened version of ``raw_id`` attribute, as string,
967 identifying this commit, useful for presentation to users.
985 identifying this commit, useful for presentation to users.
968 """
986 """
969 raise NotImplementedError
987 raise NotImplementedError
970
988
971 @LazyProperty
989 @LazyProperty
972 def idx(self):
990 def idx(self):
973 """
991 """
974 Returns integer identifying this commit.
992 Returns integer identifying this commit.
975 """
993 """
976 raise NotImplementedError
994 raise NotImplementedError
977
995
978 @LazyProperty
996 @LazyProperty
979 def committer(self):
997 def committer(self):
980 """
998 """
981 Returns committer for this commit
999 Returns committer for this commit
982 """
1000 """
983 raise NotImplementedError
1001 raise NotImplementedError
984
1002
985 @LazyProperty
1003 @LazyProperty
986 def committer_name(self):
1004 def committer_name(self):
987 """
1005 """
988 Returns committer name for this commit
1006 Returns committer name for this commit
989 """
1007 """
990
1008
991 return author_name(self.committer)
1009 return author_name(self.committer)
992
1010
993 @LazyProperty
1011 @LazyProperty
994 def committer_email(self):
1012 def committer_email(self):
995 """
1013 """
996 Returns committer email address for this commit
1014 Returns committer email address for this commit
997 """
1015 """
998
1016
999 return author_email(self.committer)
1017 return author_email(self.committer)
1000
1018
1001 @LazyProperty
1019 @LazyProperty
1002 def author(self):
1020 def author(self):
1003 """
1021 """
1004 Returns author for this commit
1022 Returns author for this commit
1005 """
1023 """
1006
1024
1007 raise NotImplementedError
1025 raise NotImplementedError
1008
1026
1009 @LazyProperty
1027 @LazyProperty
1010 def author_name(self):
1028 def author_name(self):
1011 """
1029 """
1012 Returns author name for this commit
1030 Returns author name for this commit
1013 """
1031 """
1014
1032
1015 return author_name(self.author)
1033 return author_name(self.author)
1016
1034
1017 @LazyProperty
1035 @LazyProperty
1018 def author_email(self):
1036 def author_email(self):
1019 """
1037 """
1020 Returns author email address for this commit
1038 Returns author email address for this commit
1021 """
1039 """
1022
1040
1023 return author_email(self.author)
1041 return author_email(self.author)
1024
1042
1025 def get_file_mode(self, path):
1043 def get_file_mode(self, path):
1026 """
1044 """
1027 Returns stat mode of the file at `path`.
1045 Returns stat mode of the file at `path`.
1028 """
1046 """
1029 raise NotImplementedError
1047 raise NotImplementedError
1030
1048
1031 def is_link(self, path):
1049 def is_link(self, path):
1032 """
1050 """
1033 Returns ``True`` if given `path` is a symlink
1051 Returns ``True`` if given `path` is a symlink
1034 """
1052 """
1035 raise NotImplementedError
1053 raise NotImplementedError
1036
1054
1037 def get_file_content(self, path):
1055 def get_file_content(self, path):
1038 """
1056 """
1039 Returns content of the file at the given `path`.
1057 Returns content of the file at the given `path`.
1040 """
1058 """
1041 raise NotImplementedError
1059 raise NotImplementedError
1042
1060
1043 def get_file_size(self, path):
1061 def get_file_size(self, path):
1044 """
1062 """
1045 Returns size of the file at the given `path`.
1063 Returns size of the file at the given `path`.
1046 """
1064 """
1047 raise NotImplementedError
1065 raise NotImplementedError
1048
1066
1049 def get_path_commit(self, path, pre_load=None):
1067 def get_path_commit(self, path, pre_load=None):
1050 """
1068 """
1051 Returns last commit of the file at the given `path`.
1069 Returns last commit of the file at the given `path`.
1052
1070
1053 :param pre_load: Optional. List of commit attributes to load.
1071 :param pre_load: Optional. List of commit attributes to load.
1054 """
1072 """
1055 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1073 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1056 if not commits:
1074 if not commits:
1057 raise RepositoryError(
1075 raise RepositoryError(
1058 'Failed to fetch history for path {}. '
1076 'Failed to fetch history for path {}. '
1059 'Please check if such path exists in your repository'.format(
1077 'Please check if such path exists in your repository'.format(
1060 path))
1078 path))
1061 return commits[0]
1079 return commits[0]
1062
1080
1063 def get_path_history(self, path, limit=None, pre_load=None):
1081 def get_path_history(self, path, limit=None, pre_load=None):
1064 """
1082 """
1065 Returns history of file as reversed list of :class:`BaseCommit`
1083 Returns history of file as reversed list of :class:`BaseCommit`
1066 objects for which file at given `path` has been modified.
1084 objects for which file at given `path` has been modified.
1067
1085
1068 :param limit: Optional. Allows to limit the size of the returned
1086 :param limit: Optional. Allows to limit the size of the returned
1069 history. This is intended as a hint to the underlying backend, so
1087 history. This is intended as a hint to the underlying backend, so
1070 that it can apply optimizations depending on the limit.
1088 that it can apply optimizations depending on the limit.
1071 :param pre_load: Optional. List of commit attributes to load.
1089 :param pre_load: Optional. List of commit attributes to load.
1072 """
1090 """
1073 raise NotImplementedError
1091 raise NotImplementedError
1074
1092
1075 def get_file_annotate(self, path, pre_load=None):
1093 def get_file_annotate(self, path, pre_load=None):
1076 """
1094 """
1077 Returns a generator of four element tuples with
1095 Returns a generator of four element tuples with
1078 lineno, sha, commit lazy loader and line
1096 lineno, sha, commit lazy loader and line
1079
1097
1080 :param pre_load: Optional. List of commit attributes to load.
1098 :param pre_load: Optional. List of commit attributes to load.
1081 """
1099 """
1082 raise NotImplementedError
1100 raise NotImplementedError
1083
1101
1084 def get_nodes(self, path):
1102 def get_nodes(self, path):
1085 """
1103 """
1086 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1104 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1087 state of commit at the given ``path``.
1105 state of commit at the given ``path``.
1088
1106
1089 :raises ``CommitError``: if node at the given ``path`` is not
1107 :raises ``CommitError``: if node at the given ``path`` is not
1090 instance of ``DirNode``
1108 instance of ``DirNode``
1091 """
1109 """
1092 raise NotImplementedError
1110 raise NotImplementedError
1093
1111
1094 def get_node(self, path):
1112 def get_node(self, path):
1095 """
1113 """
1096 Returns ``Node`` object from the given ``path``.
1114 Returns ``Node`` object from the given ``path``.
1097
1115
1098 :raises ``NodeDoesNotExistError``: if there is no node at the given
1116 :raises ``NodeDoesNotExistError``: if there is no node at the given
1099 ``path``
1117 ``path``
1100 """
1118 """
1101 raise NotImplementedError
1119 raise NotImplementedError
1102
1120
1103 def get_largefile_node(self, path):
1121 def get_largefile_node(self, path):
1104 """
1122 """
1105 Returns the path to largefile from Mercurial/Git-lfs storage.
1123 Returns the path to largefile from Mercurial/Git-lfs storage.
1106 or None if it's not a largefile node
1124 or None if it's not a largefile node
1107 """
1125 """
1108 return None
1126 return None
1109
1127
1110 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1128 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1111 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1129 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1112 """
1130 """
1113 Creates an archive containing the contents of the repository.
1131 Creates an archive containing the contents of the repository.
1114
1132
1115 :param archive_dest_path: path to the file which to create the archive.
1133 :param archive_dest_path: path to the file which to create the archive.
1116 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1134 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1117 :param prefix: name of root directory in archive.
1135 :param prefix: name of root directory in archive.
1118 Default is repository name and commit's short_id joined with dash:
1136 Default is repository name and commit's short_id joined with dash:
1119 ``"{repo_name}-{short_id}"``.
1137 ``"{repo_name}-{short_id}"``.
1120 :param write_metadata: write a metadata file into archive.
1138 :param write_metadata: write a metadata file into archive.
1121 :param mtime: custom modification time for archive creation, defaults
1139 :param mtime: custom modification time for archive creation, defaults
1122 to time.time() if not given.
1140 to time.time() if not given.
1123 :param archive_at_path: pack files at this path (default '/')
1141 :param archive_at_path: pack files at this path (default '/')
1124
1142
1125 :raise VCSError: If prefix has a problem.
1143 :raise VCSError: If prefix has a problem.
1126 """
1144 """
1127 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1145 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1128 if kind not in allowed_kinds:
1146 if kind not in allowed_kinds:
1129 raise ImproperArchiveTypeError(
1147 raise ImproperArchiveTypeError(
1130 'Archive kind (%s) not supported use one of %s' %
1148 'Archive kind (%s) not supported use one of %s' %
1131 (kind, allowed_kinds))
1149 (kind, allowed_kinds))
1132
1150
1133 prefix = self._validate_archive_prefix(prefix)
1151 prefix = self._validate_archive_prefix(prefix)
1134
1152
1135 mtime = mtime is not None or time.mktime(self.date.timetuple())
1153 mtime = mtime is not None or time.mktime(self.date.timetuple())
1136
1154
1137 file_info = []
1155 file_info = []
1138 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1156 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1139 for _r, _d, files in cur_rev.walk(archive_at_path):
1157 for _r, _d, files in cur_rev.walk(archive_at_path):
1140 for f in files:
1158 for f in files:
1141 f_path = os.path.join(prefix, f.path)
1159 f_path = os.path.join(prefix, f.path)
1142 file_info.append(
1160 file_info.append(
1143 (f_path, f.mode, f.is_link(), f.raw_bytes))
1161 (f_path, f.mode, f.is_link(), f.raw_bytes))
1144
1162
1145 if write_metadata:
1163 if write_metadata:
1146 metadata = [
1164 metadata = [
1147 ('repo_name', self.repository.name),
1165 ('repo_name', self.repository.name),
1148 ('commit_id', self.raw_id),
1166 ('commit_id', self.raw_id),
1149 ('mtime', mtime),
1167 ('mtime', mtime),
1150 ('branch', self.branch),
1168 ('branch', self.branch),
1151 ('tags', ','.join(self.tags)),
1169 ('tags', ','.join(self.tags)),
1152 ]
1170 ]
1153 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1171 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1154 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1172 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1155
1173
1156 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1174 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1157
1175
1158 def _validate_archive_prefix(self, prefix):
1176 def _validate_archive_prefix(self, prefix):
1159 if prefix is None:
1177 if prefix is None:
1160 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1178 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1161 repo_name=safe_str(self.repository.name),
1179 repo_name=safe_str(self.repository.name),
1162 short_id=self.short_id)
1180 short_id=self.short_id)
1163 elif not isinstance(prefix, str):
1181 elif not isinstance(prefix, str):
1164 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1182 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1165 elif prefix.startswith('/'):
1183 elif prefix.startswith('/'):
1166 raise VCSError("Prefix cannot start with leading slash")
1184 raise VCSError("Prefix cannot start with leading slash")
1167 elif prefix.strip() == '':
1185 elif prefix.strip() == '':
1168 raise VCSError("Prefix cannot be empty")
1186 raise VCSError("Prefix cannot be empty")
1169 return prefix
1187 return prefix
1170
1188
1171 @LazyProperty
1189 @LazyProperty
1172 def root(self):
1190 def root(self):
1173 """
1191 """
1174 Returns ``RootNode`` object for this commit.
1192 Returns ``RootNode`` object for this commit.
1175 """
1193 """
1176 return self.get_node('')
1194 return self.get_node('')
1177
1195
1178 def next(self, branch=None):
1196 def next(self, branch=None):
1179 """
1197 """
1180 Returns next commit from current, if branch is gives it will return
1198 Returns next commit from current, if branch is gives it will return
1181 next commit belonging to this branch
1199 next commit belonging to this branch
1182
1200
1183 :param branch: show commits within the given named branch
1201 :param branch: show commits within the given named branch
1184 """
1202 """
1185 indexes = xrange(self.idx + 1, self.repository.count())
1203 indexes = xrange(self.idx + 1, self.repository.count())
1186 return self._find_next(indexes, branch)
1204 return self._find_next(indexes, branch)
1187
1205
1188 def prev(self, branch=None):
1206 def prev(self, branch=None):
1189 """
1207 """
1190 Returns previous commit from current, if branch is gives it will
1208 Returns previous commit from current, if branch is gives it will
1191 return previous commit belonging to this branch
1209 return previous commit belonging to this branch
1192
1210
1193 :param branch: show commit within the given named branch
1211 :param branch: show commit within the given named branch
1194 """
1212 """
1195 indexes = xrange(self.idx - 1, -1, -1)
1213 indexes = xrange(self.idx - 1, -1, -1)
1196 return self._find_next(indexes, branch)
1214 return self._find_next(indexes, branch)
1197
1215
1198 def _find_next(self, indexes, branch=None):
1216 def _find_next(self, indexes, branch=None):
1199 if branch and self.branch != branch:
1217 if branch and self.branch != branch:
1200 raise VCSError('Branch option used on commit not belonging '
1218 raise VCSError('Branch option used on commit not belonging '
1201 'to that branch')
1219 'to that branch')
1202
1220
1203 for next_idx in indexes:
1221 for next_idx in indexes:
1204 commit = self.repository.get_commit(commit_idx=next_idx)
1222 commit = self.repository.get_commit(commit_idx=next_idx)
1205 if branch and branch != commit.branch:
1223 if branch and branch != commit.branch:
1206 continue
1224 continue
1207 return commit
1225 return commit
1208 raise CommitDoesNotExistError
1226 raise CommitDoesNotExistError
1209
1227
1210 def diff(self, ignore_whitespace=True, context=3):
1228 def diff(self, ignore_whitespace=True, context=3):
1211 """
1229 """
1212 Returns a `Diff` object representing the change made by this commit.
1230 Returns a `Diff` object representing the change made by this commit.
1213 """
1231 """
1214 parent = self.first_parent
1232 parent = self.first_parent
1215 diff = self.repository.get_diff(
1233 diff = self.repository.get_diff(
1216 parent, self,
1234 parent, self,
1217 ignore_whitespace=ignore_whitespace,
1235 ignore_whitespace=ignore_whitespace,
1218 context=context)
1236 context=context)
1219 return diff
1237 return diff
1220
1238
1221 @LazyProperty
1239 @LazyProperty
1222 def added(self):
1240 def added(self):
1223 """
1241 """
1224 Returns list of added ``FileNode`` objects.
1242 Returns list of added ``FileNode`` objects.
1225 """
1243 """
1226 raise NotImplementedError
1244 raise NotImplementedError
1227
1245
1228 @LazyProperty
1246 @LazyProperty
1229 def changed(self):
1247 def changed(self):
1230 """
1248 """
1231 Returns list of modified ``FileNode`` objects.
1249 Returns list of modified ``FileNode`` objects.
1232 """
1250 """
1233 raise NotImplementedError
1251 raise NotImplementedError
1234
1252
1235 @LazyProperty
1253 @LazyProperty
1236 def removed(self):
1254 def removed(self):
1237 """
1255 """
1238 Returns list of removed ``FileNode`` objects.
1256 Returns list of removed ``FileNode`` objects.
1239 """
1257 """
1240 raise NotImplementedError
1258 raise NotImplementedError
1241
1259
1242 @LazyProperty
1260 @LazyProperty
1243 def size(self):
1261 def size(self):
1244 """
1262 """
1245 Returns total number of bytes from contents of all filenodes.
1263 Returns total number of bytes from contents of all filenodes.
1246 """
1264 """
1247 return sum((node.size for node in self.get_filenodes_generator()))
1265 return sum((node.size for node in self.get_filenodes_generator()))
1248
1266
1249 def walk(self, topurl=''):
1267 def walk(self, topurl=''):
1250 """
1268 """
1251 Similar to os.walk method. Insted of filesystem it walks through
1269 Similar to os.walk method. Insted of filesystem it walks through
1252 commit starting at given ``topurl``. Returns generator of tuples
1270 commit starting at given ``topurl``. Returns generator of tuples
1253 (topnode, dirnodes, filenodes).
1271 (topnode, dirnodes, filenodes).
1254 """
1272 """
1255 topnode = self.get_node(topurl)
1273 topnode = self.get_node(topurl)
1256 if not topnode.is_dir():
1274 if not topnode.is_dir():
1257 return
1275 return
1258 yield (topnode, topnode.dirs, topnode.files)
1276 yield (topnode, topnode.dirs, topnode.files)
1259 for dirnode in topnode.dirs:
1277 for dirnode in topnode.dirs:
1260 for tup in self.walk(dirnode.path):
1278 for tup in self.walk(dirnode.path):
1261 yield tup
1279 yield tup
1262
1280
1263 def get_filenodes_generator(self):
1281 def get_filenodes_generator(self):
1264 """
1282 """
1265 Returns generator that yields *all* file nodes.
1283 Returns generator that yields *all* file nodes.
1266 """
1284 """
1267 for topnode, dirs, files in self.walk():
1285 for topnode, dirs, files in self.walk():
1268 for node in files:
1286 for node in files:
1269 yield node
1287 yield node
1270
1288
1271 #
1289 #
1272 # Utilities for sub classes to support consistent behavior
1290 # Utilities for sub classes to support consistent behavior
1273 #
1291 #
1274
1292
1275 def no_node_at_path(self, path):
1293 def no_node_at_path(self, path):
1276 return NodeDoesNotExistError(
1294 return NodeDoesNotExistError(
1277 u"There is no file nor directory at the given path: "
1295 u"There is no file nor directory at the given path: "
1278 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1296 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1279
1297
1280 def _fix_path(self, path):
1298 def _fix_path(self, path):
1281 """
1299 """
1282 Paths are stored without trailing slash so we need to get rid off it if
1300 Paths are stored without trailing slash so we need to get rid off it if
1283 needed.
1301 needed.
1284 """
1302 """
1285 return path.rstrip('/')
1303 return path.rstrip('/')
1286
1304
1287 #
1305 #
1288 # Deprecated API based on changesets
1306 # Deprecated API based on changesets
1289 #
1307 #
1290
1308
1291 @property
1309 @property
1292 def revision(self):
1310 def revision(self):
1293 warnings.warn("Use idx instead", DeprecationWarning)
1311 warnings.warn("Use idx instead", DeprecationWarning)
1294 return self.idx
1312 return self.idx
1295
1313
1296 @revision.setter
1314 @revision.setter
1297 def revision(self, value):
1315 def revision(self, value):
1298 warnings.warn("Use idx instead", DeprecationWarning)
1316 warnings.warn("Use idx instead", DeprecationWarning)
1299 self.idx = value
1317 self.idx = value
1300
1318
1301 def get_file_changeset(self, path):
1319 def get_file_changeset(self, path):
1302 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1320 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1303 return self.get_path_commit(path)
1321 return self.get_path_commit(path)
1304
1322
1305
1323
1306 class BaseChangesetClass(type):
1324 class BaseChangesetClass(type):
1307
1325
1308 def __instancecheck__(self, instance):
1326 def __instancecheck__(self, instance):
1309 return isinstance(instance, BaseCommit)
1327 return isinstance(instance, BaseCommit)
1310
1328
1311
1329
1312 class BaseChangeset(BaseCommit):
1330 class BaseChangeset(BaseCommit):
1313
1331
1314 __metaclass__ = BaseChangesetClass
1332 __metaclass__ = BaseChangesetClass
1315
1333
1316 def __new__(cls, *args, **kwargs):
1334 def __new__(cls, *args, **kwargs):
1317 warnings.warn(
1335 warnings.warn(
1318 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1336 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1319 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1337 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1320
1338
1321
1339
1322 class BaseInMemoryCommit(object):
1340 class BaseInMemoryCommit(object):
1323 """
1341 """
1324 Represents differences between repository's state (most recent head) and
1342 Represents differences between repository's state (most recent head) and
1325 changes made *in place*.
1343 changes made *in place*.
1326
1344
1327 **Attributes**
1345 **Attributes**
1328
1346
1329 ``repository``
1347 ``repository``
1330 repository object for this in-memory-commit
1348 repository object for this in-memory-commit
1331
1349
1332 ``added``
1350 ``added``
1333 list of ``FileNode`` objects marked as *added*
1351 list of ``FileNode`` objects marked as *added*
1334
1352
1335 ``changed``
1353 ``changed``
1336 list of ``FileNode`` objects marked as *changed*
1354 list of ``FileNode`` objects marked as *changed*
1337
1355
1338 ``removed``
1356 ``removed``
1339 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1357 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1340 *removed*
1358 *removed*
1341
1359
1342 ``parents``
1360 ``parents``
1343 list of :class:`BaseCommit` instances representing parents of
1361 list of :class:`BaseCommit` instances representing parents of
1344 in-memory commit. Should always be 2-element sequence.
1362 in-memory commit. Should always be 2-element sequence.
1345
1363
1346 """
1364 """
1347
1365
1348 def __init__(self, repository):
1366 def __init__(self, repository):
1349 self.repository = repository
1367 self.repository = repository
1350 self.added = []
1368 self.added = []
1351 self.changed = []
1369 self.changed = []
1352 self.removed = []
1370 self.removed = []
1353 self.parents = []
1371 self.parents = []
1354
1372
1355 def add(self, *filenodes):
1373 def add(self, *filenodes):
1356 """
1374 """
1357 Marks given ``FileNode`` objects as *to be committed*.
1375 Marks given ``FileNode`` objects as *to be committed*.
1358
1376
1359 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1377 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1360 latest commit
1378 latest commit
1361 :raises ``NodeAlreadyAddedError``: if node with same path is already
1379 :raises ``NodeAlreadyAddedError``: if node with same path is already
1362 marked as *added*
1380 marked as *added*
1363 """
1381 """
1364 # Check if not already marked as *added* first
1382 # Check if not already marked as *added* first
1365 for node in filenodes:
1383 for node in filenodes:
1366 if node.path in (n.path for n in self.added):
1384 if node.path in (n.path for n in self.added):
1367 raise NodeAlreadyAddedError(
1385 raise NodeAlreadyAddedError(
1368 "Such FileNode %s is already marked for addition"
1386 "Such FileNode %s is already marked for addition"
1369 % node.path)
1387 % node.path)
1370 for node in filenodes:
1388 for node in filenodes:
1371 self.added.append(node)
1389 self.added.append(node)
1372
1390
1373 def change(self, *filenodes):
1391 def change(self, *filenodes):
1374 """
1392 """
1375 Marks given ``FileNode`` objects to be *changed* in next commit.
1393 Marks given ``FileNode`` objects to be *changed* in next commit.
1376
1394
1377 :raises ``EmptyRepositoryError``: if there are no commits yet
1395 :raises ``EmptyRepositoryError``: if there are no commits yet
1378 :raises ``NodeAlreadyExistsError``: if node with same path is already
1396 :raises ``NodeAlreadyExistsError``: if node with same path is already
1379 marked to be *changed*
1397 marked to be *changed*
1380 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1398 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1381 marked to be *removed*
1399 marked to be *removed*
1382 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1400 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1383 commit
1401 commit
1384 :raises ``NodeNotChangedError``: if node hasn't really be changed
1402 :raises ``NodeNotChangedError``: if node hasn't really be changed
1385 """
1403 """
1386 for node in filenodes:
1404 for node in filenodes:
1387 if node.path in (n.path for n in self.removed):
1405 if node.path in (n.path for n in self.removed):
1388 raise NodeAlreadyRemovedError(
1406 raise NodeAlreadyRemovedError(
1389 "Node at %s is already marked as removed" % node.path)
1407 "Node at %s is already marked as removed" % node.path)
1390 try:
1408 try:
1391 self.repository.get_commit()
1409 self.repository.get_commit()
1392 except EmptyRepositoryError:
1410 except EmptyRepositoryError:
1393 raise EmptyRepositoryError(
1411 raise EmptyRepositoryError(
1394 "Nothing to change - try to *add* new nodes rather than "
1412 "Nothing to change - try to *add* new nodes rather than "
1395 "changing them")
1413 "changing them")
1396 for node in filenodes:
1414 for node in filenodes:
1397 if node.path in (n.path for n in self.changed):
1415 if node.path in (n.path for n in self.changed):
1398 raise NodeAlreadyChangedError(
1416 raise NodeAlreadyChangedError(
1399 "Node at '%s' is already marked as changed" % node.path)
1417 "Node at '%s' is already marked as changed" % node.path)
1400 self.changed.append(node)
1418 self.changed.append(node)
1401
1419
1402 def remove(self, *filenodes):
1420 def remove(self, *filenodes):
1403 """
1421 """
1404 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1422 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1405 *removed* in next commit.
1423 *removed* in next commit.
1406
1424
1407 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1425 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1408 be *removed*
1426 be *removed*
1409 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1427 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1410 be *changed*
1428 be *changed*
1411 """
1429 """
1412 for node in filenodes:
1430 for node in filenodes:
1413 if node.path in (n.path for n in self.removed):
1431 if node.path in (n.path for n in self.removed):
1414 raise NodeAlreadyRemovedError(
1432 raise NodeAlreadyRemovedError(
1415 "Node is already marked to for removal at %s" % node.path)
1433 "Node is already marked to for removal at %s" % node.path)
1416 if node.path in (n.path for n in self.changed):
1434 if node.path in (n.path for n in self.changed):
1417 raise NodeAlreadyChangedError(
1435 raise NodeAlreadyChangedError(
1418 "Node is already marked to be changed at %s" % node.path)
1436 "Node is already marked to be changed at %s" % node.path)
1419 # We only mark node as *removed* - real removal is done by
1437 # We only mark node as *removed* - real removal is done by
1420 # commit method
1438 # commit method
1421 self.removed.append(node)
1439 self.removed.append(node)
1422
1440
1423 def reset(self):
1441 def reset(self):
1424 """
1442 """
1425 Resets this instance to initial state (cleans ``added``, ``changed``
1443 Resets this instance to initial state (cleans ``added``, ``changed``
1426 and ``removed`` lists).
1444 and ``removed`` lists).
1427 """
1445 """
1428 self.added = []
1446 self.added = []
1429 self.changed = []
1447 self.changed = []
1430 self.removed = []
1448 self.removed = []
1431 self.parents = []
1449 self.parents = []
1432
1450
1433 def get_ipaths(self):
1451 def get_ipaths(self):
1434 """
1452 """
1435 Returns generator of paths from nodes marked as added, changed or
1453 Returns generator of paths from nodes marked as added, changed or
1436 removed.
1454 removed.
1437 """
1455 """
1438 for node in itertools.chain(self.added, self.changed, self.removed):
1456 for node in itertools.chain(self.added, self.changed, self.removed):
1439 yield node.path
1457 yield node.path
1440
1458
1441 def get_paths(self):
1459 def get_paths(self):
1442 """
1460 """
1443 Returns list of paths from nodes marked as added, changed or removed.
1461 Returns list of paths from nodes marked as added, changed or removed.
1444 """
1462 """
1445 return list(self.get_ipaths())
1463 return list(self.get_ipaths())
1446
1464
1447 def check_integrity(self, parents=None):
1465 def check_integrity(self, parents=None):
1448 """
1466 """
1449 Checks in-memory commit's integrity. Also, sets parents if not
1467 Checks in-memory commit's integrity. Also, sets parents if not
1450 already set.
1468 already set.
1451
1469
1452 :raises CommitError: if any error occurs (i.e.
1470 :raises CommitError: if any error occurs (i.e.
1453 ``NodeDoesNotExistError``).
1471 ``NodeDoesNotExistError``).
1454 """
1472 """
1455 if not self.parents:
1473 if not self.parents:
1456 parents = parents or []
1474 parents = parents or []
1457 if len(parents) == 0:
1475 if len(parents) == 0:
1458 try:
1476 try:
1459 parents = [self.repository.get_commit(), None]
1477 parents = [self.repository.get_commit(), None]
1460 except EmptyRepositoryError:
1478 except EmptyRepositoryError:
1461 parents = [None, None]
1479 parents = [None, None]
1462 elif len(parents) == 1:
1480 elif len(parents) == 1:
1463 parents += [None]
1481 parents += [None]
1464 self.parents = parents
1482 self.parents = parents
1465
1483
1466 # Local parents, only if not None
1484 # Local parents, only if not None
1467 parents = [p for p in self.parents if p]
1485 parents = [p for p in self.parents if p]
1468
1486
1469 # Check nodes marked as added
1487 # Check nodes marked as added
1470 for p in parents:
1488 for p in parents:
1471 for node in self.added:
1489 for node in self.added:
1472 try:
1490 try:
1473 p.get_node(node.path)
1491 p.get_node(node.path)
1474 except NodeDoesNotExistError:
1492 except NodeDoesNotExistError:
1475 pass
1493 pass
1476 else:
1494 else:
1477 raise NodeAlreadyExistsError(
1495 raise NodeAlreadyExistsError(
1478 "Node `%s` already exists at %s" % (node.path, p))
1496 "Node `%s` already exists at %s" % (node.path, p))
1479
1497
1480 # Check nodes marked as changed
1498 # Check nodes marked as changed
1481 missing = set(self.changed)
1499 missing = set(self.changed)
1482 not_changed = set(self.changed)
1500 not_changed = set(self.changed)
1483 if self.changed and not parents:
1501 if self.changed and not parents:
1484 raise NodeDoesNotExistError(str(self.changed[0].path))
1502 raise NodeDoesNotExistError(str(self.changed[0].path))
1485 for p in parents:
1503 for p in parents:
1486 for node in self.changed:
1504 for node in self.changed:
1487 try:
1505 try:
1488 old = p.get_node(node.path)
1506 old = p.get_node(node.path)
1489 missing.remove(node)
1507 missing.remove(node)
1490 # if content actually changed, remove node from not_changed
1508 # if content actually changed, remove node from not_changed
1491 if old.content != node.content:
1509 if old.content != node.content:
1492 not_changed.remove(node)
1510 not_changed.remove(node)
1493 except NodeDoesNotExistError:
1511 except NodeDoesNotExistError:
1494 pass
1512 pass
1495 if self.changed and missing:
1513 if self.changed and missing:
1496 raise NodeDoesNotExistError(
1514 raise NodeDoesNotExistError(
1497 "Node `%s` marked as modified but missing in parents: %s"
1515 "Node `%s` marked as modified but missing in parents: %s"
1498 % (node.path, parents))
1516 % (node.path, parents))
1499
1517
1500 if self.changed and not_changed:
1518 if self.changed and not_changed:
1501 raise NodeNotChangedError(
1519 raise NodeNotChangedError(
1502 "Node `%s` wasn't actually changed (parents: %s)"
1520 "Node `%s` wasn't actually changed (parents: %s)"
1503 % (not_changed.pop().path, parents))
1521 % (not_changed.pop().path, parents))
1504
1522
1505 # Check nodes marked as removed
1523 # Check nodes marked as removed
1506 if self.removed and not parents:
1524 if self.removed and not parents:
1507 raise NodeDoesNotExistError(
1525 raise NodeDoesNotExistError(
1508 "Cannot remove node at %s as there "
1526 "Cannot remove node at %s as there "
1509 "were no parents specified" % self.removed[0].path)
1527 "were no parents specified" % self.removed[0].path)
1510 really_removed = set()
1528 really_removed = set()
1511 for p in parents:
1529 for p in parents:
1512 for node in self.removed:
1530 for node in self.removed:
1513 try:
1531 try:
1514 p.get_node(node.path)
1532 p.get_node(node.path)
1515 really_removed.add(node)
1533 really_removed.add(node)
1516 except CommitError:
1534 except CommitError:
1517 pass
1535 pass
1518 not_removed = set(self.removed) - really_removed
1536 not_removed = set(self.removed) - really_removed
1519 if not_removed:
1537 if not_removed:
1520 # TODO: johbo: This code branch does not seem to be covered
1538 # TODO: johbo: This code branch does not seem to be covered
1521 raise NodeDoesNotExistError(
1539 raise NodeDoesNotExistError(
1522 "Cannot remove node at %s from "
1540 "Cannot remove node at %s from "
1523 "following parents: %s" % (not_removed, parents))
1541 "following parents: %s" % (not_removed, parents))
1524
1542
1525 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1543 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1526 """
1544 """
1527 Performs in-memory commit (doesn't check workdir in any way) and
1545 Performs in-memory commit (doesn't check workdir in any way) and
1528 returns newly created :class:`BaseCommit`. Updates repository's
1546 returns newly created :class:`BaseCommit`. Updates repository's
1529 attribute `commits`.
1547 attribute `commits`.
1530
1548
1531 .. note::
1549 .. note::
1532
1550
1533 While overriding this method each backend's should call
1551 While overriding this method each backend's should call
1534 ``self.check_integrity(parents)`` in the first place.
1552 ``self.check_integrity(parents)`` in the first place.
1535
1553
1536 :param message: message of the commit
1554 :param message: message of the commit
1537 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1555 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1538 :param parents: single parent or sequence of parents from which commit
1556 :param parents: single parent or sequence of parents from which commit
1539 would be derived
1557 would be derived
1540 :param date: ``datetime.datetime`` instance. Defaults to
1558 :param date: ``datetime.datetime`` instance. Defaults to
1541 ``datetime.datetime.now()``.
1559 ``datetime.datetime.now()``.
1542 :param branch: branch name, as string. If none given, default backend's
1560 :param branch: branch name, as string. If none given, default backend's
1543 branch would be used.
1561 branch would be used.
1544
1562
1545 :raises ``CommitError``: if any error occurs while committing
1563 :raises ``CommitError``: if any error occurs while committing
1546 """
1564 """
1547 raise NotImplementedError
1565 raise NotImplementedError
1548
1566
1549
1567
1550 class BaseInMemoryChangesetClass(type):
1568 class BaseInMemoryChangesetClass(type):
1551
1569
1552 def __instancecheck__(self, instance):
1570 def __instancecheck__(self, instance):
1553 return isinstance(instance, BaseInMemoryCommit)
1571 return isinstance(instance, BaseInMemoryCommit)
1554
1572
1555
1573
1556 class BaseInMemoryChangeset(BaseInMemoryCommit):
1574 class BaseInMemoryChangeset(BaseInMemoryCommit):
1557
1575
1558 __metaclass__ = BaseInMemoryChangesetClass
1576 __metaclass__ = BaseInMemoryChangesetClass
1559
1577
1560 def __new__(cls, *args, **kwargs):
1578 def __new__(cls, *args, **kwargs):
1561 warnings.warn(
1579 warnings.warn(
1562 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1580 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1563 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1581 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1564
1582
1565
1583
1566 class EmptyCommit(BaseCommit):
1584 class EmptyCommit(BaseCommit):
1567 """
1585 """
1568 An dummy empty commit. It's possible to pass hash when creating
1586 An dummy empty commit. It's possible to pass hash when creating
1569 an EmptyCommit
1587 an EmptyCommit
1570 """
1588 """
1571
1589
1572 def __init__(
1590 def __init__(
1573 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1591 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1574 message='', author='', date=None):
1592 message='', author='', date=None):
1575 self._empty_commit_id = commit_id
1593 self._empty_commit_id = commit_id
1576 # TODO: johbo: Solve idx parameter, default value does not make
1594 # TODO: johbo: Solve idx parameter, default value does not make
1577 # too much sense
1595 # too much sense
1578 self.idx = idx
1596 self.idx = idx
1579 self.message = message
1597 self.message = message
1580 self.author = author
1598 self.author = author
1581 self.date = date or datetime.datetime.fromtimestamp(0)
1599 self.date = date or datetime.datetime.fromtimestamp(0)
1582 self.repository = repo
1600 self.repository = repo
1583 self.alias = alias
1601 self.alias = alias
1584
1602
1585 @LazyProperty
1603 @LazyProperty
1586 def raw_id(self):
1604 def raw_id(self):
1587 """
1605 """
1588 Returns raw string identifying this commit, useful for web
1606 Returns raw string identifying this commit, useful for web
1589 representation.
1607 representation.
1590 """
1608 """
1591
1609
1592 return self._empty_commit_id
1610 return self._empty_commit_id
1593
1611
1594 @LazyProperty
1612 @LazyProperty
1595 def branch(self):
1613 def branch(self):
1596 if self.alias:
1614 if self.alias:
1597 from rhodecode.lib.vcs.backends import get_backend
1615 from rhodecode.lib.vcs.backends import get_backend
1598 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1616 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1599
1617
1600 @LazyProperty
1618 @LazyProperty
1601 def short_id(self):
1619 def short_id(self):
1602 return self.raw_id[:12]
1620 return self.raw_id[:12]
1603
1621
1604 @LazyProperty
1622 @LazyProperty
1605 def id(self):
1623 def id(self):
1606 return self.raw_id
1624 return self.raw_id
1607
1625
1608 def get_path_commit(self, path):
1626 def get_path_commit(self, path):
1609 return self
1627 return self
1610
1628
1611 def get_file_content(self, path):
1629 def get_file_content(self, path):
1612 return u''
1630 return u''
1613
1631
1614 def get_file_size(self, path):
1632 def get_file_size(self, path):
1615 return 0
1633 return 0
1616
1634
1617
1635
1618 class EmptyChangesetClass(type):
1636 class EmptyChangesetClass(type):
1619
1637
1620 def __instancecheck__(self, instance):
1638 def __instancecheck__(self, instance):
1621 return isinstance(instance, EmptyCommit)
1639 return isinstance(instance, EmptyCommit)
1622
1640
1623
1641
1624 class EmptyChangeset(EmptyCommit):
1642 class EmptyChangeset(EmptyCommit):
1625
1643
1626 __metaclass__ = EmptyChangesetClass
1644 __metaclass__ = EmptyChangesetClass
1627
1645
1628 def __new__(cls, *args, **kwargs):
1646 def __new__(cls, *args, **kwargs):
1629 warnings.warn(
1647 warnings.warn(
1630 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1648 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1631 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1649 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1632
1650
1633 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1651 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1634 alias=None, revision=-1, message='', author='', date=None):
1652 alias=None, revision=-1, message='', author='', date=None):
1635 if requested_revision is not None:
1653 if requested_revision is not None:
1636 warnings.warn(
1654 warnings.warn(
1637 "Parameter requested_revision not supported anymore",
1655 "Parameter requested_revision not supported anymore",
1638 DeprecationWarning)
1656 DeprecationWarning)
1639 super(EmptyChangeset, self).__init__(
1657 super(EmptyChangeset, self).__init__(
1640 commit_id=cs, repo=repo, alias=alias, idx=revision,
1658 commit_id=cs, repo=repo, alias=alias, idx=revision,
1641 message=message, author=author, date=date)
1659 message=message, author=author, date=date)
1642
1660
1643 @property
1661 @property
1644 def revision(self):
1662 def revision(self):
1645 warnings.warn("Use idx instead", DeprecationWarning)
1663 warnings.warn("Use idx instead", DeprecationWarning)
1646 return self.idx
1664 return self.idx
1647
1665
1648 @revision.setter
1666 @revision.setter
1649 def revision(self, value):
1667 def revision(self, value):
1650 warnings.warn("Use idx instead", DeprecationWarning)
1668 warnings.warn("Use idx instead", DeprecationWarning)
1651 self.idx = value
1669 self.idx = value
1652
1670
1653
1671
1654 class EmptyRepository(BaseRepository):
1672 class EmptyRepository(BaseRepository):
1655 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1673 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1656 pass
1674 pass
1657
1675
1658 def get_diff(self, *args, **kwargs):
1676 def get_diff(self, *args, **kwargs):
1659 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1677 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1660 return GitDiff('')
1678 return GitDiff('')
1661
1679
1662
1680
1663 class CollectionGenerator(object):
1681 class CollectionGenerator(object):
1664
1682
1665 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1683 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1666 self.repo = repo
1684 self.repo = repo
1667 self.commit_ids = commit_ids
1685 self.commit_ids = commit_ids
1668 # TODO: (oliver) this isn't currently hooked up
1686 # TODO: (oliver) this isn't currently hooked up
1669 self.collection_size = None
1687 self.collection_size = None
1670 self.pre_load = pre_load
1688 self.pre_load = pre_load
1671 self.translate_tag = translate_tag
1689 self.translate_tag = translate_tag
1672
1690
1673 def __len__(self):
1691 def __len__(self):
1674 if self.collection_size is not None:
1692 if self.collection_size is not None:
1675 return self.collection_size
1693 return self.collection_size
1676 return self.commit_ids.__len__()
1694 return self.commit_ids.__len__()
1677
1695
1678 def __iter__(self):
1696 def __iter__(self):
1679 for commit_id in self.commit_ids:
1697 for commit_id in self.commit_ids:
1680 # TODO: johbo: Mercurial passes in commit indices or commit ids
1698 # TODO: johbo: Mercurial passes in commit indices or commit ids
1681 yield self._commit_factory(commit_id)
1699 yield self._commit_factory(commit_id)
1682
1700
1683 def _commit_factory(self, commit_id):
1701 def _commit_factory(self, commit_id):
1684 """
1702 """
1685 Allows backends to override the way commits are generated.
1703 Allows backends to override the way commits are generated.
1686 """
1704 """
1687 return self.repo.get_commit(
1705 return self.repo.get_commit(
1688 commit_id=commit_id, pre_load=self.pre_load,
1706 commit_id=commit_id, pre_load=self.pre_load,
1689 translate_tag=self.translate_tag)
1707 translate_tag=self.translate_tag)
1690
1708
1691 def __getslice__(self, i, j):
1709 def __getslice__(self, i, j):
1692 """
1710 """
1693 Returns an iterator of sliced repository
1711 Returns an iterator of sliced repository
1694 """
1712 """
1695 commit_ids = self.commit_ids[i:j]
1713 commit_ids = self.commit_ids[i:j]
1696 return self.__class__(
1714 return self.__class__(
1697 self.repo, commit_ids, pre_load=self.pre_load,
1715 self.repo, commit_ids, pre_load=self.pre_load,
1698 translate_tag=self.translate_tag)
1716 translate_tag=self.translate_tag)
1699
1717
1700 def __repr__(self):
1718 def __repr__(self):
1701 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1719 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1702
1720
1703
1721
1704 class Config(object):
1722 class Config(object):
1705 """
1723 """
1706 Represents the configuration for a repository.
1724 Represents the configuration for a repository.
1707
1725
1708 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1726 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1709 standard library. It implements only the needed subset.
1727 standard library. It implements only the needed subset.
1710 """
1728 """
1711
1729
1712 def __init__(self):
1730 def __init__(self):
1713 self._values = {}
1731 self._values = {}
1714
1732
1715 def copy(self):
1733 def copy(self):
1716 clone = Config()
1734 clone = Config()
1717 for section, values in self._values.items():
1735 for section, values in self._values.items():
1718 clone._values[section] = values.copy()
1736 clone._values[section] = values.copy()
1719 return clone
1737 return clone
1720
1738
1721 def __repr__(self):
1739 def __repr__(self):
1722 return '<Config(%s sections) at %s>' % (
1740 return '<Config(%s sections) at %s>' % (
1723 len(self._values), hex(id(self)))
1741 len(self._values), hex(id(self)))
1724
1742
1725 def items(self, section):
1743 def items(self, section):
1726 return self._values.get(section, {}).iteritems()
1744 return self._values.get(section, {}).iteritems()
1727
1745
1728 def get(self, section, option):
1746 def get(self, section, option):
1729 return self._values.get(section, {}).get(option)
1747 return self._values.get(section, {}).get(option)
1730
1748
1731 def set(self, section, option, value):
1749 def set(self, section, option, value):
1732 section_values = self._values.setdefault(section, {})
1750 section_values = self._values.setdefault(section, {})
1733 section_values[option] = value
1751 section_values[option] = value
1734
1752
1735 def clear_section(self, section):
1753 def clear_section(self, section):
1736 self._values[section] = {}
1754 self._values[section] = {}
1737
1755
1738 def serialize(self):
1756 def serialize(self):
1739 """
1757 """
1740 Creates a list of three tuples (section, key, value) representing
1758 Creates a list of three tuples (section, key, value) representing
1741 this config object.
1759 this config object.
1742 """
1760 """
1743 items = []
1761 items = []
1744 for section in self._values:
1762 for section in self._values:
1745 for option, value in self._values[section].items():
1763 for option, value in self._values[section].items():
1746 items.append(
1764 items.append(
1747 (safe_str(section), safe_str(option), safe_str(value)))
1765 (safe_str(section), safe_str(option), safe_str(value)))
1748 return items
1766 return items
1749
1767
1750
1768
1751 class Diff(object):
1769 class Diff(object):
1752 """
1770 """
1753 Represents a diff result from a repository backend.
1771 Represents a diff result from a repository backend.
1754
1772
1755 Subclasses have to provide a backend specific value for
1773 Subclasses have to provide a backend specific value for
1756 :attr:`_header_re` and :attr:`_meta_re`.
1774 :attr:`_header_re` and :attr:`_meta_re`.
1757 """
1775 """
1758 _meta_re = None
1776 _meta_re = None
1759 _header_re = None
1777 _header_re = None
1760
1778
1761 def __init__(self, raw_diff):
1779 def __init__(self, raw_diff):
1762 self.raw = raw_diff
1780 self.raw = raw_diff
1763
1781
1764 def chunks(self):
1782 def chunks(self):
1765 """
1783 """
1766 split the diff in chunks of separate --git a/file b/file chunks
1784 split the diff in chunks of separate --git a/file b/file chunks
1767 to make diffs consistent we must prepend with \n, and make sure
1785 to make diffs consistent we must prepend with \n, and make sure
1768 we can detect last chunk as this was also has special rule
1786 we can detect last chunk as this was also has special rule
1769 """
1787 """
1770
1788
1771 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1789 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1772 header = diff_parts[0]
1790 header = diff_parts[0]
1773
1791
1774 if self._meta_re:
1792 if self._meta_re:
1775 match = self._meta_re.match(header)
1793 match = self._meta_re.match(header)
1776
1794
1777 chunks = diff_parts[1:]
1795 chunks = diff_parts[1:]
1778 total_chunks = len(chunks)
1796 total_chunks = len(chunks)
1779
1797
1780 return (
1798 return (
1781 DiffChunk(chunk, self, cur_chunk == total_chunks)
1799 DiffChunk(chunk, self, cur_chunk == total_chunks)
1782 for cur_chunk, chunk in enumerate(chunks, start=1))
1800 for cur_chunk, chunk in enumerate(chunks, start=1))
1783
1801
1784
1802
1785 class DiffChunk(object):
1803 class DiffChunk(object):
1786
1804
1787 def __init__(self, chunk, diff, last_chunk):
1805 def __init__(self, chunk, diff, last_chunk):
1788 self._diff = diff
1806 self._diff = diff
1789
1807
1790 # since we split by \ndiff --git that part is lost from original diff
1808 # since we split by \ndiff --git that part is lost from original diff
1791 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1809 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1792 if not last_chunk:
1810 if not last_chunk:
1793 chunk += '\n'
1811 chunk += '\n'
1794
1812
1795 match = self._diff._header_re.match(chunk)
1813 match = self._diff._header_re.match(chunk)
1796 self.header = match.groupdict()
1814 self.header = match.groupdict()
1797 self.diff = chunk[match.end():]
1815 self.diff = chunk[match.end():]
1798 self.raw = chunk
1816 self.raw = chunk
1799
1817
1800
1818
1801 class BasePathPermissionChecker(object):
1819 class BasePathPermissionChecker(object):
1802
1820
1803 @staticmethod
1821 @staticmethod
1804 def create_from_patterns(includes, excludes):
1822 def create_from_patterns(includes, excludes):
1805 if includes and '*' in includes and not excludes:
1823 if includes and '*' in includes and not excludes:
1806 return AllPathPermissionChecker()
1824 return AllPathPermissionChecker()
1807 elif excludes and '*' in excludes:
1825 elif excludes and '*' in excludes:
1808 return NonePathPermissionChecker()
1826 return NonePathPermissionChecker()
1809 else:
1827 else:
1810 return PatternPathPermissionChecker(includes, excludes)
1828 return PatternPathPermissionChecker(includes, excludes)
1811
1829
1812 @property
1830 @property
1813 def has_full_access(self):
1831 def has_full_access(self):
1814 raise NotImplemented()
1832 raise NotImplemented()
1815
1833
1816 def has_access(self, path):
1834 def has_access(self, path):
1817 raise NotImplemented()
1835 raise NotImplemented()
1818
1836
1819
1837
1820 class AllPathPermissionChecker(BasePathPermissionChecker):
1838 class AllPathPermissionChecker(BasePathPermissionChecker):
1821
1839
1822 @property
1840 @property
1823 def has_full_access(self):
1841 def has_full_access(self):
1824 return True
1842 return True
1825
1843
1826 def has_access(self, path):
1844 def has_access(self, path):
1827 return True
1845 return True
1828
1846
1829
1847
1830 class NonePathPermissionChecker(BasePathPermissionChecker):
1848 class NonePathPermissionChecker(BasePathPermissionChecker):
1831
1849
1832 @property
1850 @property
1833 def has_full_access(self):
1851 def has_full_access(self):
1834 return False
1852 return False
1835
1853
1836 def has_access(self, path):
1854 def has_access(self, path):
1837 return False
1855 return False
1838
1856
1839
1857
1840 class PatternPathPermissionChecker(BasePathPermissionChecker):
1858 class PatternPathPermissionChecker(BasePathPermissionChecker):
1841
1859
1842 def __init__(self, includes, excludes):
1860 def __init__(self, includes, excludes):
1843 self.includes = includes
1861 self.includes = includes
1844 self.excludes = excludes
1862 self.excludes = excludes
1845 self.includes_re = [] if not includes else [
1863 self.includes_re = [] if not includes else [
1846 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1864 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1847 self.excludes_re = [] if not excludes else [
1865 self.excludes_re = [] if not excludes else [
1848 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1866 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1849
1867
1850 @property
1868 @property
1851 def has_full_access(self):
1869 def has_full_access(self):
1852 return '*' in self.includes and not self.excludes
1870 return '*' in self.includes and not self.excludes
1853
1871
1854 def has_access(self, path):
1872 def has_access(self, path):
1855 for regex in self.excludes_re:
1873 for regex in self.excludes_re:
1856 if regex.match(path):
1874 if regex.match(path):
1857 return False
1875 return False
1858 for regex in self.includes_re:
1876 for regex in self.includes_re:
1859 if regex.match(path):
1877 if regex.match(path):
1860 return True
1878 return True
1861 return False
1879 return False
@@ -1,533 +1,507 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT commit module
22 GIT commit module
23 """
23 """
24
24
25 import re
25 import re
26 import stat
26 import stat
27 from itertools import chain
27 from itertools import chain
28 from StringIO import StringIO
28 from StringIO import StringIO
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 from rhodecode.lib.utils import safe_unicode, safe_str
33 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.lib.utils2 import safe_int
35 from rhodecode.lib.vcs.conf import settings
35 from rhodecode.lib.vcs.conf import settings
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 from rhodecode.lib.vcs.nodes import (
38 from rhodecode.lib.vcs.nodes import (
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 RemovedFileNodesGenerator, LargeFileNode)
41 RemovedFileNodesGenerator, LargeFileNode)
42 from rhodecode.lib.vcs.compat import configparser
42 from rhodecode.lib.vcs.compat import configparser
43
43
44
44
45 class GitCommit(base.BaseCommit):
45 class GitCommit(base.BaseCommit):
46 """
46 """
47 Represents state of the repository at single commit id.
47 Represents state of the repository at single commit id.
48 """
48 """
49 _author_property = 'author'
50 _committer_property = 'committer'
51 _date_property = 'commit_time'
52 _date_tz_property = 'commit_timezone'
53 _message_property = 'message'
54 _parents_property = 'parents'
55
49
56 _filter_pre_load = [
50 _filter_pre_load = [
57 # done through a more complex tree walk on parents
51 # done through a more complex tree walk on parents
58 "affected_files",
52 "affected_files",
59 # based on repository cached property
53 # based on repository cached property
60 "branch",
54 "branch",
61 # done through subprocess not remote call
55 # done through subprocess not remote call
62 "children",
56 "children",
63 # done through a more complex tree walk on parents
57 # done through a more complex tree walk on parents
64 "status",
58 "status",
65 # mercurial specific property not supported here
59 # mercurial specific property not supported here
66 "_file_paths",
60 "_file_paths",
67 # mercurial specific property not supported here
61 # mercurial specific property not supported here
68 'obsolete',
62 'obsolete',
69 # mercurial specific property not supported here
63 # mercurial specific property not supported here
70 'phase',
64 'phase',
71 # mercurial specific property not supported here
65 # mercurial specific property not supported here
72 'hidden'
66 'hidden'
73 ]
67 ]
74
68
75 def __init__(self, repository, raw_id, idx, pre_load=None):
69 def __init__(self, repository, raw_id, idx, pre_load=None):
76 self.repository = repository
70 self.repository = repository
77 self._remote = repository._remote
71 self._remote = repository._remote
78 # TODO: johbo: Tweak of raw_id should not be necessary
72 # TODO: johbo: Tweak of raw_id should not be necessary
79 self.raw_id = safe_str(raw_id)
73 self.raw_id = safe_str(raw_id)
80 self.idx = idx
74 self.idx = idx
81
75
82 self._set_bulk_properties(pre_load)
76 self._set_bulk_properties(pre_load)
83
77
84 # caches
78 # caches
85 self._stat_modes = {} # stat info for paths
79 self._stat_modes = {} # stat info for paths
86 self._paths = {} # path processed with parse_tree
80 self._paths = {} # path processed with parse_tree
87 self.nodes = {}
81 self.nodes = {}
88 self._submodules = None
82 self._submodules = None
89
83
90 def _set_bulk_properties(self, pre_load):
84 def _set_bulk_properties(self, pre_load):
91 if not pre_load:
85 if not pre_load:
92 return
86 return
93 pre_load = [entry for entry in pre_load
87 pre_load = [entry for entry in pre_load
94 if entry not in self._filter_pre_load]
88 if entry not in self._filter_pre_load]
95 if not pre_load:
89 if not pre_load:
96 return
90 return
97
91
98 result = self._remote.bulk_request(self.raw_id, pre_load)
92 result = self._remote.bulk_request(self.raw_id, pre_load)
99 for attr, value in result.items():
93 for attr, value in result.items():
100 if attr in ["author", "message"]:
94 if attr in ["author", "message"]:
101 if value:
95 if value:
102 value = safe_unicode(value)
96 value = safe_unicode(value)
103 elif attr == "date":
97 elif attr == "date":
104 value = utcdate_fromtimestamp(*value)
98 value = utcdate_fromtimestamp(*value)
105 elif attr == "parents":
99 elif attr == "parents":
106 value = self._make_commits(value)
100 value = self._make_commits(value)
107 self.__dict__[attr] = value
101 self.__dict__[attr] = value
108
102
109 @LazyProperty
103 @LazyProperty
110 def _commit(self):
104 def _commit(self):
111 return self._remote[self.raw_id]
105 return self._remote[self.raw_id]
112
106
113 @LazyProperty
107 @LazyProperty
114 def _tree_id(self):
108 def _tree_id(self):
115 return self._remote[self._commit['tree']]['id']
109 return self._remote[self._commit['tree']]['id']
116
110
117 @LazyProperty
111 @LazyProperty
118 def id(self):
112 def id(self):
119 return self.raw_id
113 return self.raw_id
120
114
121 @LazyProperty
115 @LazyProperty
122 def short_id(self):
116 def short_id(self):
123 return self.raw_id[:12]
117 return self.raw_id[:12]
124
118
125 @LazyProperty
119 @LazyProperty
126 def message(self):
120 def message(self):
127 return safe_unicode(
121 return safe_unicode(self._remote.message(self.id))
128 self._remote.commit_attribute(self.id, self._message_property))
129
122
130 @LazyProperty
123 @LazyProperty
131 def committer(self):
124 def committer(self):
132 return safe_unicode(
125 return safe_unicode(self._remote.author(self.id))
133 self._remote.commit_attribute(self.id, self._committer_property))
134
126
135 @LazyProperty
127 @LazyProperty
136 def author(self):
128 def author(self):
137 return safe_unicode(
129 return safe_unicode(self._remote.author(self.id))
138 self._remote.commit_attribute(self.id, self._author_property))
139
130
140 @LazyProperty
131 @LazyProperty
141 def date(self):
132 def date(self):
142 unix_ts, tz = self._remote.get_object_attrs(
133 unix_ts, tz = self._remote.date(self.raw_id)
143 self.raw_id, self._date_property, self._date_tz_property)
144 return utcdate_fromtimestamp(unix_ts, tz)
134 return utcdate_fromtimestamp(unix_ts, tz)
145
135
146 @LazyProperty
136 @LazyProperty
147 def status(self):
137 def status(self):
148 """
138 """
149 Returns modified, added, removed, deleted files for current commit
139 Returns modified, added, removed, deleted files for current commit
150 """
140 """
151 return self.changed, self.added, self.removed
141 return self.changed, self.added, self.removed
152
142
153 @LazyProperty
143 @LazyProperty
154 def tags(self):
144 def tags(self):
155 tags = [safe_unicode(name) for name,
145 tags = [safe_unicode(name) for name,
156 commit_id in self.repository.tags.iteritems()
146 commit_id in self.repository.tags.iteritems()
157 if commit_id == self.raw_id]
147 if commit_id == self.raw_id]
158 return tags
148 return tags
159
149
160 @LazyProperty
150 @LazyProperty
161 def branch(self):
151 def commit_branches(self):
152 branches = []
162 for name, commit_id in self.repository.branches.iteritems():
153 for name, commit_id in self.repository.branches.iteritems():
163 if commit_id == self.raw_id:
154 if commit_id == self.raw_id:
164 return safe_unicode(name)
155 branches.append(name)
156 return branches
157
158 @LazyProperty
159 def branch(self):
160 # actually commit can have multiple branches
161 branches = self.commit_branches
162 if branches:
163 return branches[0]
164
165 return None
165 return None
166
166
167 def _get_id_for_path(self, path):
167 def _get_tree_id_for_path(self, path):
168 path = safe_str(path)
168 path = safe_str(path)
169 if path in self._paths:
169 if path in self._paths:
170 return self._paths[path]
170 return self._paths[path]
171
171
172 tree_id = self._tree_id
172 tree_id = self._tree_id
173
173
174 path = path.strip('/')
174 path = path.strip('/')
175 if path == '':
175 if path == '':
176 data = [tree_id, "tree"]
176 data = [tree_id, "tree"]
177 self._paths[''] = data
177 self._paths[''] = data
178 return data
178 return data
179
179
180 parts = path.split('/')
180 tree_id, tree_type, tree_mode = \
181 dirs, name = parts[:-1], parts[-1]
181 self._remote.tree_and_type_for_path(self.raw_id, path)
182 cur_dir = ''
182 if tree_id is None:
183
183 raise self.no_node_at_path(path)
184 # initially extract things from root dir
185 tree_items = self._remote.tree_items(tree_id)
186 self._process_tree_items(tree_items, cur_dir)
187
184
188 for dir in dirs:
185 self._paths[path] = [tree_id, tree_type]
189 if cur_dir:
186 self._stat_modes[path] = tree_mode
190 cur_dir = '/'.join((cur_dir, dir))
191 else:
192 cur_dir = dir
193 dir_id = None
194 for item, stat_, id_, type_ in tree_items:
195 if item == dir:
196 dir_id = id_
197 break
198 if dir_id:
199 if type_ != "tree":
200 raise CommitError('%s is not a directory' % cur_dir)
201 # update tree
202 tree_items = self._remote.tree_items(dir_id)
203 else:
204 raise CommitError('%s have not been found' % cur_dir)
205
206 # cache all items from the given traversed tree
207 self._process_tree_items(tree_items, cur_dir)
208
187
209 if path not in self._paths:
188 if path not in self._paths:
210 raise self.no_node_at_path(path)
189 raise self.no_node_at_path(path)
211
190
212 return self._paths[path]
191 return self._paths[path]
213
192
214 def _process_tree_items(self, items, cur_dir):
215 for item, stat_, id_, type_ in items:
216 if cur_dir:
217 name = '/'.join((cur_dir, item))
218 else:
219 name = item
220 self._paths[name] = [id_, type_]
221 self._stat_modes[name] = stat_
222
223 def _get_kind(self, path):
193 def _get_kind(self, path):
224 path_id, type_ = self._get_id_for_path(path)
194 tree_id, type_ = self._get_tree_id_for_path(path)
225 if type_ == 'blob':
195 if type_ == 'blob':
226 return NodeKind.FILE
196 return NodeKind.FILE
227 elif type_ == 'tree':
197 elif type_ == 'tree':
228 return NodeKind.DIR
198 return NodeKind.DIR
229 elif type == 'link':
199 elif type_ == 'link':
230 return NodeKind.SUBMODULE
200 return NodeKind.SUBMODULE
231 return None
201 return None
232
202
233 def _get_filectx(self, path):
203 def _get_filectx(self, path):
234 path = self._fix_path(path)
204 path = self._fix_path(path)
235 if self._get_kind(path) != NodeKind.FILE:
205 if self._get_kind(path) != NodeKind.FILE:
236 raise CommitError(
206 raise CommitError(
237 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
238 return path
208 return path
239
209
240 def _get_file_nodes(self):
210 def _get_file_nodes(self):
241 return chain(*(t[2] for t in self.walk()))
211 return chain(*(t[2] for t in self.walk()))
242
212
243 @LazyProperty
213 @LazyProperty
244 def parents(self):
214 def parents(self):
245 """
215 """
246 Returns list of parent commits.
216 Returns list of parent commits.
247 """
217 """
248 parent_ids = self._remote.commit_attribute(
218 parent_ids = self._remote.parents(self.id)
249 self.id, self._parents_property)
250 return self._make_commits(parent_ids)
219 return self._make_commits(parent_ids)
251
220
252 @LazyProperty
221 @LazyProperty
253 def children(self):
222 def children(self):
254 """
223 """
255 Returns list of child commits.
224 Returns list of child commits.
256 """
225 """
257 rev_filter = settings.GIT_REV_FILTER
226 rev_filter = settings.GIT_REV_FILTER
258 output, __ = self.repository.run_git_command(
227 output, __ = self.repository.run_git_command(
259 ['rev-list', '--children'] + rev_filter)
228 ['rev-list', '--children'] + rev_filter)
260
229
261 child_ids = []
230 child_ids = []
262 pat = re.compile(r'^%s' % self.raw_id)
231 pat = re.compile(r'^%s' % self.raw_id)
263 for l in output.splitlines():
232 for l in output.splitlines():
264 if pat.match(l):
233 if pat.match(l):
265 found_ids = l.split(' ')[1:]
234 found_ids = l.split(' ')[1:]
266 child_ids.extend(found_ids)
235 child_ids.extend(found_ids)
267 return self._make_commits(child_ids)
236 return self._make_commits(child_ids)
268
237
269 def _make_commits(self, commit_ids, pre_load=None):
238 def _make_commits(self, commit_ids):
270 return [
239 def commit_maker(_commit_id):
271 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load,
240 return self.repository.get_commit(commit_id=commit_id)
272 translate_tag=False)
241
273 for commit_id in commit_ids]
242 return [commit_maker(commit_id) for commit_id in commit_ids]
274
243
275 def get_file_mode(self, path):
244 def get_file_mode(self, path):
276 """
245 """
277 Returns stat mode of the file at the given `path`.
246 Returns stat mode of the file at the given `path`.
278 """
247 """
279 path = safe_str(path)
248 path = safe_str(path)
280 # ensure path is traversed
249 # ensure path is traversed
281 self._get_id_for_path(path)
250 self._get_tree_id_for_path(path)
282 return self._stat_modes[path]
251 return self._stat_modes[path]
283
252
284 def is_link(self, path):
253 def is_link(self, path):
285 return stat.S_ISLNK(self.get_file_mode(path))
254 return stat.S_ISLNK(self.get_file_mode(path))
286
255
287 def get_file_content(self, path):
256 def get_file_content(self, path):
288 """
257 """
289 Returns content of the file at given `path`.
258 Returns content of the file at given `path`.
290 """
259 """
291 id_, _ = self._get_id_for_path(path)
260 tree_id, _ = self._get_tree_id_for_path(path)
292 return self._remote.blob_as_pretty_string(id_)
261 return self._remote.blob_as_pretty_string(tree_id)
293
262
294 def get_file_size(self, path):
263 def get_file_size(self, path):
295 """
264 """
296 Returns size of the file at given `path`.
265 Returns size of the file at given `path`.
297 """
266 """
298 id_, _ = self._get_id_for_path(path)
267 tree_id, _ = self._get_tree_id_for_path(path)
299 return self._remote.blob_raw_length(id_)
268 return self._remote.blob_raw_length(tree_id)
300
269
301 def get_path_history(self, path, limit=None, pre_load=None):
270 def get_path_history(self, path, limit=None, pre_load=None):
302 """
271 """
303 Returns history of file as reversed list of `GitCommit` objects for
272 Returns history of file as reversed list of `GitCommit` objects for
304 which file at given `path` has been modified.
273 which file at given `path` has been modified.
305
274
306 TODO: This function now uses an underlying 'git' command which works
275 TODO: This function now uses an underlying 'git' command which works
307 quickly but ideally we should replace with an algorithm.
276 quickly but ideally we should replace with an algorithm.
308 """
277 """
309 self._get_filectx(path)
278 self._get_filectx(path)
310 f_path = safe_str(path)
279 f_path = safe_str(path)
311
280
312 # optimize for n==1, rev-list is much faster for that use-case
281 # optimize for n==1, rev-list is much faster for that use-case
313 if limit == 1:
282 if limit == 1:
314 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
283 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
315 else:
284 else:
316 cmd = ['log']
285 cmd = ['log']
317 if limit:
286 if limit:
318 cmd.extend(['-n', str(safe_int(limit, 0))])
287 cmd.extend(['-n', str(safe_int(limit, 0))])
319 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
288 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
320
289
321 output, __ = self.repository.run_git_command(cmd)
290 output, __ = self.repository.run_git_command(cmd)
322 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
291 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
323
292
324 return [
293 return [
325 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
294 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
326 for commit_id in commit_ids]
295 for commit_id in commit_ids]
327
296
328 def get_file_annotate(self, path, pre_load=None):
297 def get_file_annotate(self, path, pre_load=None):
329 """
298 """
330 Returns a generator of four element tuples with
299 Returns a generator of four element tuples with
331 lineno, commit_id, commit lazy loader and line
300 lineno, commit_id, commit lazy loader and line
332
301
333 TODO: This function now uses os underlying 'git' command which is
302 TODO: This function now uses os underlying 'git' command which is
334 generally not good. Should be replaced with algorithm iterating
303 generally not good. Should be replaced with algorithm iterating
335 commits.
304 commits.
336 """
305 """
337 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
306 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
338 # -l ==> outputs long shas (and we need all 40 characters)
307 # -l ==> outputs long shas (and we need all 40 characters)
339 # --root ==> doesn't put '^' character for bounderies
308 # --root ==> doesn't put '^' character for bounderies
340 # -r commit_id ==> blames for the given commit
309 # -r commit_id ==> blames for the given commit
341 output, __ = self.repository.run_git_command(cmd)
310 output, __ = self.repository.run_git_command(cmd)
342
311
343 for i, blame_line in enumerate(output.split('\n')[:-1]):
312 for i, blame_line in enumerate(output.split('\n')[:-1]):
344 line_no = i + 1
313 line_no = i + 1
345 commit_id, line = re.split(r' ', blame_line, 1)
314 commit_id, line = re.split(r' ', blame_line, 1)
346 yield (
315 yield (
347 line_no, commit_id,
316 line_no, commit_id,
348 lambda: self.repository.get_commit(commit_id=commit_id,
317 lambda: self.repository.get_commit(commit_id=commit_id,
349 pre_load=pre_load),
318 pre_load=pre_load),
350 line)
319 line)
351
320
352 def get_nodes(self, path):
321 def get_nodes(self, path):
322
353 if self._get_kind(path) != NodeKind.DIR:
323 if self._get_kind(path) != NodeKind.DIR:
354 raise CommitError(
324 raise CommitError(
355 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
325 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
356 path = self._fix_path(path)
326 path = self._fix_path(path)
357 id_, _ = self._get_id_for_path(path)
327
358 tree_id = self._remote[id_]['id']
328 tree_id, _ = self._get_tree_id_for_path(path)
329
359 dirnodes = []
330 dirnodes = []
360 filenodes = []
331 filenodes = []
361 alias = self.repository.alias
332
333 # extracted tree ID gives us our files...
362 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
334 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
363 if type_ == 'link':
335 if type_ == 'link':
364 url = self._get_submodule_url('/'.join((path, name)))
336 url = self._get_submodule_url('/'.join((path, name)))
365 dirnodes.append(SubModuleNode(
337 dirnodes.append(SubModuleNode(
366 name, url=url, commit=id_, alias=alias))
338 name, url=url, commit=id_, alias=self.repository.alias))
367 continue
339 continue
368
340
369 if path != '':
341 if path != '':
370 obj_path = '/'.join((path, name))
342 obj_path = '/'.join((path, name))
371 else:
343 else:
372 obj_path = name
344 obj_path = name
373 if obj_path not in self._stat_modes:
345 if obj_path not in self._stat_modes:
374 self._stat_modes[obj_path] = stat_
346 self._stat_modes[obj_path] = stat_
375
347
376 if type_ == 'tree':
348 if type_ == 'tree':
377 dirnodes.append(DirNode(obj_path, commit=self))
349 dirnodes.append(DirNode(obj_path, commit=self))
378 elif type_ == 'blob':
350 elif type_ == 'blob':
379 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
351 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
380 else:
352 else:
381 raise CommitError(
353 raise CommitError(
382 "Requested object should be Tree or Blob, is %s", type_)
354 "Requested object should be Tree or Blob, is %s", type_)
383
355
384 nodes = dirnodes + filenodes
356 nodes = dirnodes + filenodes
385 for node in nodes:
357 for node in nodes:
386 if node.path not in self.nodes:
358 if node.path not in self.nodes:
387 self.nodes[node.path] = node
359 self.nodes[node.path] = node
388 nodes.sort()
360 nodes.sort()
389 return nodes
361 return nodes
390
362
391 def get_node(self, path, pre_load=None):
363 def get_node(self, path, pre_load=None):
392 if isinstance(path, unicode):
364 if isinstance(path, unicode):
393 path = path.encode('utf-8')
365 path = path.encode('utf-8')
394 path = self._fix_path(path)
366 path = self._fix_path(path)
395 if path not in self.nodes:
367 if path not in self.nodes:
396 try:
368 try:
397 id_, type_ = self._get_id_for_path(path)
369 tree_id, type_ = self._get_tree_id_for_path(path)
398 except CommitError:
370 except CommitError:
399 raise NodeDoesNotExistError(
371 raise NodeDoesNotExistError(
400 "Cannot find one of parents' directories for a given "
372 "Cannot find one of parents' directories for a given "
401 "path: %s" % path)
373 "path: %s" % path)
402
374
403 if type_ == 'link':
375 if type_ == 'link':
404 url = self._get_submodule_url(path)
376 url = self._get_submodule_url(path)
405 node = SubModuleNode(path, url=url, commit=id_,
377 node = SubModuleNode(path, url=url, commit=tree_id,
406 alias=self.repository.alias)
378 alias=self.repository.alias)
407 elif type_ == 'tree':
379 elif type_ == 'tree':
408 if path == '':
380 if path == '':
409 node = RootNode(commit=self)
381 node = RootNode(commit=self)
410 else:
382 else:
411 node = DirNode(path, commit=self)
383 node = DirNode(path, commit=self)
412 elif type_ == 'blob':
384 elif type_ == 'blob':
413 node = FileNode(path, commit=self, pre_load=pre_load)
385 node = FileNode(path, commit=self, pre_load=pre_load)
386 self._stat_modes[path] = node.mode
414 else:
387 else:
415 raise self.no_node_at_path(path)
388 raise self.no_node_at_path(path)
416
389
417 # cache node
390 # cache node
418 self.nodes[path] = node
391 self.nodes[path] = node
392
419 return self.nodes[path]
393 return self.nodes[path]
420
394
421 def get_largefile_node(self, path):
395 def get_largefile_node(self, path):
422 id_, _ = self._get_id_for_path(path)
396 tree_id, _ = self._get_tree_id_for_path(path)
423 pointer_spec = self._remote.is_large_file(id_)
397 pointer_spec = self._remote.is_large_file(tree_id)
424
398
425 if pointer_spec:
399 if pointer_spec:
426 # content of that file regular FileNode is the hash of largefile
400 # content of that file regular FileNode is the hash of largefile
427 file_id = pointer_spec.get('oid_hash')
401 file_id = pointer_spec.get('oid_hash')
428 if self._remote.in_largefiles_store(file_id):
402 if self._remote.in_largefiles_store(file_id):
429 lf_path = self._remote.store_path(file_id)
403 lf_path = self._remote.store_path(file_id)
430 return LargeFileNode(lf_path, commit=self, org_path=path)
404 return LargeFileNode(lf_path, commit=self, org_path=path)
431
405
432 @LazyProperty
406 @LazyProperty
433 def affected_files(self):
407 def affected_files(self):
434 """
408 """
435 Gets a fast accessible file changes for given commit
409 Gets a fast accessible file changes for given commit
436 """
410 """
437 added, modified, deleted = self._changes_cache
411 added, modified, deleted = self._changes_cache
438 return list(added.union(modified).union(deleted))
412 return list(added.union(modified).union(deleted))
439
413
440 @LazyProperty
414 @LazyProperty
441 def _changes_cache(self):
415 def _changes_cache(self):
442 added = set()
416 added = set()
443 modified = set()
417 modified = set()
444 deleted = set()
418 deleted = set()
445 _r = self._remote
419 _r = self._remote
446
420
447 parents = self.parents
421 parents = self.parents
448 if not self.parents:
422 if not self.parents:
449 parents = [base.EmptyCommit()]
423 parents = [base.EmptyCommit()]
450 for parent in parents:
424 for parent in parents:
451 if isinstance(parent, base.EmptyCommit):
425 if isinstance(parent, base.EmptyCommit):
452 oid = None
426 oid = None
453 else:
427 else:
454 oid = parent.raw_id
428 oid = parent.raw_id
455 changes = _r.tree_changes(oid, self.raw_id)
429 changes = _r.tree_changes(oid, self.raw_id)
456 for (oldpath, newpath), (_, _), (_, _) in changes:
430 for (oldpath, newpath), (_, _), (_, _) in changes:
457 if newpath and oldpath:
431 if newpath and oldpath:
458 modified.add(newpath)
432 modified.add(newpath)
459 elif newpath and not oldpath:
433 elif newpath and not oldpath:
460 added.add(newpath)
434 added.add(newpath)
461 elif not newpath and oldpath:
435 elif not newpath and oldpath:
462 deleted.add(oldpath)
436 deleted.add(oldpath)
463 return added, modified, deleted
437 return added, modified, deleted
464
438
465 def _get_paths_for_status(self, status):
439 def _get_paths_for_status(self, status):
466 """
440 """
467 Returns sorted list of paths for given ``status``.
441 Returns sorted list of paths for given ``status``.
468
442
469 :param status: one of: *added*, *modified* or *deleted*
443 :param status: one of: *added*, *modified* or *deleted*
470 """
444 """
471 added, modified, deleted = self._changes_cache
445 added, modified, deleted = self._changes_cache
472 return sorted({
446 return sorted({
473 'added': list(added),
447 'added': list(added),
474 'modified': list(modified),
448 'modified': list(modified),
475 'deleted': list(deleted)}[status]
449 'deleted': list(deleted)}[status]
476 )
450 )
477
451
478 @LazyProperty
452 @LazyProperty
479 def added(self):
453 def added(self):
480 """
454 """
481 Returns list of added ``FileNode`` objects.
455 Returns list of added ``FileNode`` objects.
482 """
456 """
483 if not self.parents:
457 if not self.parents:
484 return list(self._get_file_nodes())
458 return list(self._get_file_nodes())
485 return AddedFileNodesGenerator(
459 return AddedFileNodesGenerator(
486 [n for n in self._get_paths_for_status('added')], self)
460 [n for n in self._get_paths_for_status('added')], self)
487
461
488 @LazyProperty
462 @LazyProperty
489 def changed(self):
463 def changed(self):
490 """
464 """
491 Returns list of modified ``FileNode`` objects.
465 Returns list of modified ``FileNode`` objects.
492 """
466 """
493 if not self.parents:
467 if not self.parents:
494 return []
468 return []
495 return ChangedFileNodesGenerator(
469 return ChangedFileNodesGenerator(
496 [n for n in self._get_paths_for_status('modified')], self)
470 [n for n in self._get_paths_for_status('modified')], self)
497
471
498 @LazyProperty
472 @LazyProperty
499 def removed(self):
473 def removed(self):
500 """
474 """
501 Returns list of removed ``FileNode`` objects.
475 Returns list of removed ``FileNode`` objects.
502 """
476 """
503 if not self.parents:
477 if not self.parents:
504 return []
478 return []
505 return RemovedFileNodesGenerator(
479 return RemovedFileNodesGenerator(
506 [n for n in self._get_paths_for_status('deleted')], self)
480 [n for n in self._get_paths_for_status('deleted')], self)
507
481
508 def _get_submodule_url(self, submodule_path):
482 def _get_submodule_url(self, submodule_path):
509 git_modules_path = '.gitmodules'
483 git_modules_path = '.gitmodules'
510
484
511 if self._submodules is None:
485 if self._submodules is None:
512 self._submodules = {}
486 self._submodules = {}
513
487
514 try:
488 try:
515 submodules_node = self.get_node(git_modules_path)
489 submodules_node = self.get_node(git_modules_path)
516 except NodeDoesNotExistError:
490 except NodeDoesNotExistError:
517 return None
491 return None
518
492
519 content = submodules_node.content
493 content = submodules_node.content
520
494
521 # ConfigParser fails if there are whitespaces
495 # ConfigParser fails if there are whitespaces
522 content = '\n'.join(l.strip() for l in content.split('\n'))
496 content = '\n'.join(l.strip() for l in content.split('\n'))
523
497
524 parser = configparser.ConfigParser()
498 parser = configparser.ConfigParser()
525 parser.readfp(StringIO(content))
499 parser.readfp(StringIO(content))
526
500
527 for section in parser.sections():
501 for section in parser.sections():
528 path = parser.get(section, 'path')
502 path = parser.get(section, 'path')
529 url = parser.get(section, 'url')
503 url = parser.get(section, 'url')
530 if path and url:
504 if path and url:
531 self._submodules[path.strip('/')] = url
505 self._submodules[path.strip('/')] = url
532
506
533 return self._submodules.get(submodule_path.strip('/'))
507 return self._submodules.get(submodule_path.strip('/'))
@@ -1,1037 +1,1021 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import time
29
28
30 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import CachedProperty
32
30
33 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
35 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
36 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
38 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
41 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
42 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
43 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
45 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
46 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
47
46
48
47
49 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
50
49
51 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
52
51
53
52
54 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
55 """
54 """
56 Git repository backend.
55 Git repository backend.
57 """
56 """
58 DEFAULT_BRANCH_NAME = 'master'
57 DEFAULT_BRANCH_NAME = 'master'
59
58
60 contact = BaseRepository.DEFAULT_CONTACT
59 contact = BaseRepository.DEFAULT_CONTACT
61
60
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
63 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
64
63
65 self.path = safe_str(os.path.abspath(repo_path))
64 self.path = safe_str(os.path.abspath(repo_path))
66 self.config = config if config else self.get_default_config()
65 self.config = config if config else self.get_default_config()
67 self.with_wire = with_wire
66 self.with_wire = with_wire
68
67
69 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
70
69
71 # caches
70 # caches
72 self._commit_ids = {}
71 self._commit_ids = {}
73
72
74 # dependent that trigger re-computation of commit_ids
75 self._commit_ids_ver = 0
76
77 @LazyProperty
73 @LazyProperty
78 def _remote(self):
74 def _remote(self):
79 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75 return connection.Git(self.path, self.config, with_wire=self.with_wire)
80
76
81 @LazyProperty
77 @LazyProperty
82 def bare(self):
78 def bare(self):
83 return self._remote.bare()
79 return self._remote.bare()
84
80
85 @LazyProperty
81 @LazyProperty
86 def head(self):
82 def head(self):
87 return self._remote.head()
83 return self._remote.head()
88
84
89 @CachedProperty('_commit_ids_ver')
85 @CachedProperty
90 def commit_ids(self):
86 def commit_ids(self):
91 """
87 """
92 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
93 attribute allows external tools to inject commit ids from cache.
89 attribute allows external tools to inject commit ids from cache.
94 """
90 """
95 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
96 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
97 return commit_ids
93 return commit_ids
98
94
99 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
100 self._commit_ids = dict((commit_id, index)
96 self._commit_ids = dict((commit_id, index)
101 for index, commit_id in enumerate(commit_ids))
97 for index, commit_id in enumerate(commit_ids))
102
98
103 def run_git_command(self, cmd, **opts):
99 def run_git_command(self, cmd, **opts):
104 """
100 """
105 Runs given ``cmd`` as git command and returns tuple
101 Runs given ``cmd`` as git command and returns tuple
106 (stdout, stderr).
102 (stdout, stderr).
107
103
108 :param cmd: git command to be executed
104 :param cmd: git command to be executed
109 :param opts: env options to pass into Subprocess command
105 :param opts: env options to pass into Subprocess command
110 """
106 """
111 if not isinstance(cmd, list):
107 if not isinstance(cmd, list):
112 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
113
109
114 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
115 out, err = self._remote.run_git_command(cmd, **opts)
111 out, err = self._remote.run_git_command(cmd, **opts)
116 if err and not skip_stderr_log:
112 if err and not skip_stderr_log:
117 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
118 return out, err
114 return out, err
119
115
120 @staticmethod
116 @staticmethod
121 def check_url(url, config):
117 def check_url(url, config):
122 """
118 """
123 Function will check given url and try to verify if it's a valid
119 Function will check given url and try to verify if it's a valid
124 link. Sometimes it may happened that git will issue basic
120 link. Sometimes it may happened that git will issue basic
125 auth request that can cause whole API to hang when used from python
121 auth request that can cause whole API to hang when used from python
126 or other external calls.
122 or other external calls.
127
123
128 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
129 when the return code is non 200
125 when the return code is non 200
130 """
126 """
131 # check first if it's not an url
127 # check first if it's not an url
132 if os.path.isdir(url) or url.startswith('file:'):
128 if os.path.isdir(url) or url.startswith('file:'):
133 return True
129 return True
134
130
135 if '+' in url.split('://', 1)[0]:
131 if '+' in url.split('://', 1)[0]:
136 url = url.split('+', 1)[1]
132 url = url.split('+', 1)[1]
137
133
138 # Request the _remote to verify the url
134 # Request the _remote to verify the url
139 return connection.Git.check_url(url, config.serialize())
135 return connection.Git.check_url(url, config.serialize())
140
136
141 @staticmethod
137 @staticmethod
142 def is_valid_repository(path):
138 def is_valid_repository(path):
143 if os.path.isdir(os.path.join(path, '.git')):
139 if os.path.isdir(os.path.join(path, '.git')):
144 return True
140 return True
145 # check case of bare repository
141 # check case of bare repository
146 try:
142 try:
147 GitRepository(path)
143 GitRepository(path)
148 return True
144 return True
149 except VCSError:
145 except VCSError:
150 pass
146 pass
151 return False
147 return False
152
148
153 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
154 bare=False):
150 bare=False):
155 if create and os.path.exists(self.path):
151 if create and os.path.exists(self.path):
156 raise RepositoryError(
152 raise RepositoryError(
157 "Cannot create repository at %s, location already exist"
153 "Cannot create repository at %s, location already exist"
158 % self.path)
154 % self.path)
159
155
160 if bare and do_workspace_checkout:
156 if bare and do_workspace_checkout:
161 raise RepositoryError("Cannot update a bare repository")
157 raise RepositoryError("Cannot update a bare repository")
162 try:
158 try:
163
159
164 if src_url:
160 if src_url:
165 # check URL before any actions
161 # check URL before any actions
166 GitRepository.check_url(src_url, self.config)
162 GitRepository.check_url(src_url, self.config)
167
163
168 if create:
164 if create:
169 os.makedirs(self.path, mode=0o755)
165 os.makedirs(self.path, mode=0o755)
170
166
171 if bare:
167 if bare:
172 self._remote.init_bare()
168 self._remote.init_bare()
173 else:
169 else:
174 self._remote.init()
170 self._remote.init()
175
171
176 if src_url and bare:
172 if src_url and bare:
177 # bare repository only allows a fetch and checkout is not allowed
173 # bare repository only allows a fetch and checkout is not allowed
178 self.fetch(src_url, commit_ids=None)
174 self.fetch(src_url, commit_ids=None)
179 elif src_url:
175 elif src_url:
180 self.pull(src_url, commit_ids=None,
176 self.pull(src_url, commit_ids=None,
181 update_after=do_workspace_checkout)
177 update_after=do_workspace_checkout)
182
178
183 else:
179 else:
184 if not self._remote.assert_correct_path():
180 if not self._remote.assert_correct_path():
185 raise RepositoryError(
181 raise RepositoryError(
186 'Path "%s" does not contain a Git repository' %
182 'Path "%s" does not contain a Git repository' %
187 (self.path,))
183 (self.path,))
188
184
189 # TODO: johbo: check if we have to translate the OSError here
185 # TODO: johbo: check if we have to translate the OSError here
190 except OSError as err:
186 except OSError as err:
191 raise RepositoryError(err)
187 raise RepositoryError(err)
192
188
193 def _get_all_commit_ids(self, filters=None):
189 def _get_all_commit_ids(self):
190 return self._remote.get_all_commit_ids()
191
192 def _get_commit_ids(self, filters=None):
194 # we must check if this repo is not empty, since later command
193 # we must check if this repo is not empty, since later command
195 # fails if it is. And it's cheaper to ask than throw the subprocess
194 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # errors
195 # errors
197
196
198 head = self._remote.head(show_exc=False)
197 head = self._remote.head(show_exc=False)
198
199 if not head:
199 if not head:
200 return []
200 return []
201
201
202 rev_filter = ['--branches', '--tags']
202 rev_filter = ['--branches', '--tags']
203 extra_filter = []
203 extra_filter = []
204
204
205 if filters:
205 if filters:
206 if filters.get('since'):
206 if filters.get('since'):
207 extra_filter.append('--since=%s' % (filters['since']))
207 extra_filter.append('--since=%s' % (filters['since']))
208 if filters.get('until'):
208 if filters.get('until'):
209 extra_filter.append('--until=%s' % (filters['until']))
209 extra_filter.append('--until=%s' % (filters['until']))
210 if filters.get('branch_name'):
210 if filters.get('branch_name'):
211 rev_filter = ['--tags']
211 rev_filter = []
212 extra_filter.append(filters['branch_name'])
212 extra_filter.append(filters['branch_name'])
213 rev_filter.extend(extra_filter)
213 rev_filter.extend(extra_filter)
214
214
215 # if filters.get('start') or filters.get('end'):
215 # if filters.get('start') or filters.get('end'):
216 # # skip is offset, max-count is limit
216 # # skip is offset, max-count is limit
217 # if filters.get('start'):
217 # if filters.get('start'):
218 # extra_filter += ' --skip=%s' % filters['start']
218 # extra_filter += ' --skip=%s' % filters['start']
219 # if filters.get('end'):
219 # if filters.get('end'):
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221
221
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 try:
223 try:
224 output, __ = self.run_git_command(cmd)
224 output, __ = self.run_git_command(cmd)
225 except RepositoryError:
225 except RepositoryError:
226 # Can be raised for empty repositories
226 # Can be raised for empty repositories
227 return []
227 return []
228 return output.splitlines()
228 return output.splitlines()
229
229
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
231 def is_null(value):
231 def is_null(value):
232 return len(value) == commit_id_or_idx.count('0')
232 return len(value) == commit_id_or_idx.count('0')
233
233
234 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
234 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 return self.commit_ids[-1]
235 return self.commit_ids[-1]
236 commit_missing_err = "Commit {} does not exist for `{}`".format(
237 *map(safe_str, [commit_id_or_idx, self.name]))
236
238
237 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
239 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
238 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
240 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
239 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
241 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
240 try:
242 try:
241 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
243 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
242 except Exception:
244 except Exception:
243 msg = "Commit {} does not exist for `{}`".format(commit_id_or_idx, self.name)
245 raise CommitDoesNotExistError(commit_missing_err)
244 raise CommitDoesNotExistError(msg)
245
246
246 elif is_bstr:
247 elif is_bstr:
247 # check full path ref, eg. refs/heads/master
248 # Need to call remote to translate id for tagging scenario
248 ref_id = self._refs.get(commit_id_or_idx)
249 try:
249 if ref_id:
250 remote_data = self._remote.get_object(commit_id_or_idx)
250 return ref_id
251 commit_id_or_idx = remote_data["commit_id"]
251
252 except (CommitDoesNotExistError,):
252 # check branch name
253 raise CommitDoesNotExistError(commit_missing_err)
253 branch_ids = self.branches.values()
254 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
255 if ref_id:
256 return ref_id
257
258 # check tag name
259 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
260 if ref_id:
261 return ref_id
262
263 if (not SHA_PATTERN.match(commit_id_or_idx) or
264 commit_id_or_idx not in self.commit_ids):
265 msg = "Commit {} does not exist for `{}`".format(commit_id_or_idx, self.name)
266 raise CommitDoesNotExistError(msg)
267
254
268 # Ensure we return full id
255 # Ensure we return full id
269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
256 if not SHA_PATTERN.match(str(commit_id_or_idx)):
270 raise CommitDoesNotExistError(
257 raise CommitDoesNotExistError(
271 "Given commit id %s not recognized" % commit_id_or_idx)
258 "Given commit id %s not recognized" % commit_id_or_idx)
272 return commit_id_or_idx
259 return commit_id_or_idx
273
260
274 def get_hook_location(self):
261 def get_hook_location(self):
275 """
262 """
276 returns absolute path to location where hooks are stored
263 returns absolute path to location where hooks are stored
277 """
264 """
278 loc = os.path.join(self.path, 'hooks')
265 loc = os.path.join(self.path, 'hooks')
279 if not self.bare:
266 if not self.bare:
280 loc = os.path.join(self.path, '.git', 'hooks')
267 loc = os.path.join(self.path, '.git', 'hooks')
281 return loc
268 return loc
282
269
283 @LazyProperty
270 @LazyProperty
284 def last_change(self):
271 def last_change(self):
285 """
272 """
286 Returns last change made on this repository as
273 Returns last change made on this repository as
287 `datetime.datetime` object.
274 `datetime.datetime` object.
288 """
275 """
289 try:
276 try:
290 return self.get_commit().date
277 return self.get_commit().date
291 except RepositoryError:
278 except RepositoryError:
292 tzoffset = makedate()[1]
279 tzoffset = makedate()[1]
293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
280 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
294
281
295 def _get_fs_mtime(self):
282 def _get_fs_mtime(self):
296 idx_loc = '' if self.bare else '.git'
283 idx_loc = '' if self.bare else '.git'
297 # fallback to filesystem
284 # fallback to filesystem
298 in_path = os.path.join(self.path, idx_loc, "index")
285 in_path = os.path.join(self.path, idx_loc, "index")
299 he_path = os.path.join(self.path, idx_loc, "HEAD")
286 he_path = os.path.join(self.path, idx_loc, "HEAD")
300 if os.path.exists(in_path):
287 if os.path.exists(in_path):
301 return os.stat(in_path).st_mtime
288 return os.stat(in_path).st_mtime
302 else:
289 else:
303 return os.stat(he_path).st_mtime
290 return os.stat(he_path).st_mtime
304
291
305 @LazyProperty
292 @LazyProperty
306 def description(self):
293 def description(self):
307 description = self._remote.get_description()
294 description = self._remote.get_description()
308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
295 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
309
296
310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
297 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
311 if self.is_empty():
298 if self.is_empty():
312 return OrderedDict()
299 return OrderedDict()
313
300
314 result = []
301 result = []
315 for ref, sha in self._refs.iteritems():
302 for ref, sha in self._refs.iteritems():
316 if ref.startswith(prefix):
303 if ref.startswith(prefix):
317 ref_name = ref
304 ref_name = ref
318 if strip_prefix:
305 if strip_prefix:
319 ref_name = ref[len(prefix):]
306 ref_name = ref[len(prefix):]
320 result.append((safe_unicode(ref_name), sha))
307 result.append((safe_unicode(ref_name), sha))
321
308
322 def get_name(entry):
309 def get_name(entry):
323 return entry[0]
310 return entry[0]
324
311
325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
312 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
326
313
327 def _get_branches(self):
314 def _get_branches(self):
328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
315 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
329
316
330 @LazyProperty
317 @CachedProperty
331 def branches(self):
318 def branches(self):
332 return self._get_branches()
319 return self._get_branches()
333
320
334 @LazyProperty
321 @CachedProperty
335 def branches_closed(self):
322 def branches_closed(self):
336 return {}
323 return {}
337
324
338 @LazyProperty
325 @CachedProperty
339 def bookmarks(self):
326 def bookmarks(self):
340 return {}
327 return {}
341
328
342 @LazyProperty
329 @CachedProperty
343 def branches_all(self):
330 def branches_all(self):
344 all_branches = {}
331 all_branches = {}
345 all_branches.update(self.branches)
332 all_branches.update(self.branches)
346 all_branches.update(self.branches_closed)
333 all_branches.update(self.branches_closed)
347 return all_branches
334 return all_branches
348
335
349 @LazyProperty
336 @CachedProperty
350 def tags(self):
337 def tags(self):
351 return self._get_tags()
338 return self._get_tags()
352
339
353 def _get_tags(self):
340 def _get_tags(self):
354 return self._get_refs_entries(
341 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
355 prefix='refs/tags/', strip_prefix=True, reverse=True)
356
342
357 def tag(self, name, user, commit_id=None, message=None, date=None,
343 def tag(self, name, user, commit_id=None, message=None, date=None,
358 **kwargs):
344 **kwargs):
359 # TODO: fix this method to apply annotated tags correct with message
345 # TODO: fix this method to apply annotated tags correct with message
360 """
346 """
361 Creates and returns a tag for the given ``commit_id``.
347 Creates and returns a tag for the given ``commit_id``.
362
348
363 :param name: name for new tag
349 :param name: name for new tag
364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
350 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
365 :param commit_id: commit id for which new tag would be created
351 :param commit_id: commit id for which new tag would be created
366 :param message: message of the tag's commit
352 :param message: message of the tag's commit
367 :param date: date of tag's commit
353 :param date: date of tag's commit
368
354
369 :raises TagAlreadyExistError: if tag with same name already exists
355 :raises TagAlreadyExistError: if tag with same name already exists
370 """
356 """
357 print self._refs
371 if name in self.tags:
358 if name in self.tags:
372 raise TagAlreadyExistError("Tag %s already exists" % name)
359 raise TagAlreadyExistError("Tag %s already exists" % name)
373 commit = self.get_commit(commit_id=commit_id)
360 commit = self.get_commit(commit_id=commit_id)
374 message = message or "Added tag %s for commit %s" % (
361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
375 name, commit.raw_id)
362
376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
377
364
378 self._refs = self._get_refs()
365 self._invalidate_prop_cache('tags')
379 self.tags = self._get_tags()
366 self._invalidate_prop_cache('_refs')
367
380 return commit
368 return commit
381
369
382 def remove_tag(self, name, user, message=None, date=None):
370 def remove_tag(self, name, user, message=None, date=None):
383 """
371 """
384 Removes tag with the given ``name``.
372 Removes tag with the given ``name``.
385
373
386 :param name: name of the tag to be removed
374 :param name: name of the tag to be removed
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 :param message: message of the tag's removal commit
376 :param message: message of the tag's removal commit
389 :param date: date of tag's removal commit
377 :param date: date of tag's removal commit
390
378
391 :raises TagDoesNotExistError: if tag with given name does not exists
379 :raises TagDoesNotExistError: if tag with given name does not exists
392 """
380 """
393 if name not in self.tags:
381 if name not in self.tags:
394 raise TagDoesNotExistError("Tag %s does not exist" % name)
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
395 tagpath = vcspath.join(
383
396 self._remote.get_refs_path(), 'refs', 'tags', name)
384 self._remote.tag_remove(name)
397 try:
385 self._invalidate_prop_cache('tags')
398 os.remove(tagpath)
386 self._invalidate_prop_cache('_refs')
399 self._refs = self._get_refs()
400 self.tags = self._get_tags()
401 except OSError as e:
402 raise RepositoryError(e.strerror)
403
387
404 def _get_refs(self):
388 def _get_refs(self):
405 return self._remote.get_refs()
389 return self._remote.get_refs()
406
390
407 @LazyProperty
391 @CachedProperty
408 def _refs(self):
392 def _refs(self):
409 return self._get_refs()
393 return self._get_refs()
410
394
411 @property
395 @property
412 def _ref_tree(self):
396 def _ref_tree(self):
413 node = tree = {}
397 node = tree = {}
414 for ref, sha in self._refs.iteritems():
398 for ref, sha in self._refs.iteritems():
415 path = ref.split('/')
399 path = ref.split('/')
416 for bit in path[:-1]:
400 for bit in path[:-1]:
417 node = node.setdefault(bit, {})
401 node = node.setdefault(bit, {})
418 node[path[-1]] = sha
402 node[path[-1]] = sha
419 node = tree
403 node = tree
420 return tree
404 return tree
421
405
422 def get_remote_ref(self, ref_name):
406 def get_remote_ref(self, ref_name):
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 try:
408 try:
425 return self._refs[ref_key]
409 return self._refs[ref_key]
426 except Exception:
410 except Exception:
427 return
411 return
428
412
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
430 """
414 """
431 Returns `GitCommit` object representing commit from git repository
415 Returns `GitCommit` object representing commit from git repository
432 at the given `commit_id` or head (most recent commit) if None given.
416 at the given `commit_id` or head (most recent commit) if None given.
433 """
417 """
434 if self.is_empty():
418 if self.is_empty():
435 raise EmptyRepositoryError("There are no commits yet")
419 raise EmptyRepositoryError("There are no commits yet")
436
420
437 if commit_id is not None:
421 if commit_id is not None:
438 self._validate_commit_id(commit_id)
422 self._validate_commit_id(commit_id)
439 try:
423 try:
440 # we have cached idx, use it without contacting the remote
424 # we have cached idx, use it without contacting the remote
441 idx = self._commit_ids[commit_id]
425 idx = self._commit_ids[commit_id]
442 return GitCommit(self, commit_id, idx, pre_load=pre_load)
426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
443 except KeyError:
427 except KeyError:
444 pass
428 pass
445
429
446 elif commit_idx is not None:
430 elif commit_idx is not None:
447 self._validate_commit_idx(commit_idx)
431 self._validate_commit_idx(commit_idx)
448 try:
432 try:
449 _commit_id = self.commit_ids[commit_idx]
433 _commit_id = self.commit_ids[commit_idx]
450 if commit_idx < 0:
434 if commit_idx < 0:
451 commit_idx = self.commit_ids.index(_commit_id)
435 commit_idx = self.commit_ids.index(_commit_id)
452 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
453 except IndexError:
437 except IndexError:
454 commit_id = commit_idx
438 commit_id = commit_idx
455 else:
439 else:
456 commit_id = "tip"
440 commit_id = "tip"
457
441
442 if translate_tag:
458 commit_id = self._lookup_commit(commit_id)
443 commit_id = self._lookup_commit(commit_id)
459 remote_idx = None
460 if translate_tag:
461 # Need to call remote to translate id for tagging scenario
462 remote_data = self._remote.get_object(commit_id)
463 commit_id = remote_data["commit_id"]
464 remote_idx = remote_data["idx"]
465
444
466 try:
445 try:
467 idx = self._commit_ids[commit_id]
446 idx = self._commit_ids[commit_id]
468 except KeyError:
447 except KeyError:
469 idx = remote_idx or 0
448 idx = -1
470
449
471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
472
451
473 def get_commits(
452 def get_commits(
474 self, start_id=None, end_id=None, start_date=None, end_date=None,
453 self, start_id=None, end_id=None, start_date=None, end_date=None,
475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
476 """
455 """
477 Returns generator of `GitCommit` objects from start to end (both
456 Returns generator of `GitCommit` objects from start to end (both
478 are inclusive), in ascending date order.
457 are inclusive), in ascending date order.
479
458
480 :param start_id: None, str(commit_id)
459 :param start_id: None, str(commit_id)
481 :param end_id: None, str(commit_id)
460 :param end_id: None, str(commit_id)
482 :param start_date: if specified, commits with commit date less than
461 :param start_date: if specified, commits with commit date less than
483 ``start_date`` would be filtered out from returned set
462 ``start_date`` would be filtered out from returned set
484 :param end_date: if specified, commits with commit date greater than
463 :param end_date: if specified, commits with commit date greater than
485 ``end_date`` would be filtered out from returned set
464 ``end_date`` would be filtered out from returned set
486 :param branch_name: if specified, commits not reachable from given
465 :param branch_name: if specified, commits not reachable from given
487 branch would be filtered out from returned set
466 branch would be filtered out from returned set
488 :param show_hidden: Show hidden commits such as obsolete or hidden from
467 :param show_hidden: Show hidden commits such as obsolete or hidden from
489 Mercurial evolve
468 Mercurial evolve
490 :raise BranchDoesNotExistError: If given `branch_name` does not
469 :raise BranchDoesNotExistError: If given `branch_name` does not
491 exist.
470 exist.
492 :raise CommitDoesNotExistError: If commits for given `start` or
471 :raise CommitDoesNotExistError: If commits for given `start` or
493 `end` could not be found.
472 `end` could not be found.
494
473
495 """
474 """
496 if self.is_empty():
475 if self.is_empty():
497 raise EmptyRepositoryError("There are no commits yet")
476 raise EmptyRepositoryError("There are no commits yet")
498
477
499 self._validate_branch_name(branch_name)
478 self._validate_branch_name(branch_name)
500
479
501 if start_id is not None:
480 if start_id is not None:
502 self._validate_commit_id(start_id)
481 self._validate_commit_id(start_id)
503 if end_id is not None:
482 if end_id is not None:
504 self._validate_commit_id(end_id)
483 self._validate_commit_id(end_id)
505
484
506 start_raw_id = self._lookup_commit(start_id)
485 start_raw_id = self._lookup_commit(start_id)
507 start_pos = self._commit_ids[start_raw_id] if start_id else None
486 start_pos = self._commit_ids[start_raw_id] if start_id else None
508 end_raw_id = self._lookup_commit(end_id)
487 end_raw_id = self._lookup_commit(end_id)
509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
510
489
511 if None not in [start_id, end_id] and start_pos > end_pos:
490 if None not in [start_id, end_id] and start_pos > end_pos:
512 raise RepositoryError(
491 raise RepositoryError(
513 "Start commit '%s' cannot be after end commit '%s'" %
492 "Start commit '%s' cannot be after end commit '%s'" %
514 (start_id, end_id))
493 (start_id, end_id))
515
494
516 if end_pos is not None:
495 if end_pos is not None:
517 end_pos += 1
496 end_pos += 1
518
497
519 filter_ = []
498 filter_ = []
520 if branch_name:
499 if branch_name:
521 filter_.append({'branch_name': branch_name})
500 filter_.append({'branch_name': branch_name})
522 if start_date and not end_date:
501 if start_date and not end_date:
523 filter_.append({'since': start_date})
502 filter_.append({'since': start_date})
524 if end_date and not start_date:
503 if end_date and not start_date:
525 filter_.append({'until': end_date})
504 filter_.append({'until': end_date})
526 if start_date and end_date:
505 if start_date and end_date:
527 filter_.append({'since': start_date})
506 filter_.append({'since': start_date})
528 filter_.append({'until': end_date})
507 filter_.append({'until': end_date})
529
508
530 # if start_pos or end_pos:
509 # if start_pos or end_pos:
531 # filter_.append({'start': start_pos})
510 # filter_.append({'start': start_pos})
532 # filter_.append({'end': end_pos})
511 # filter_.append({'end': end_pos})
533
512
534 if filter_:
513 if filter_:
535 revfilters = {
514 revfilters = {
536 'branch_name': branch_name,
515 'branch_name': branch_name,
537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
539 'start': start_pos,
518 'start': start_pos,
540 'end': end_pos,
519 'end': end_pos,
541 }
520 }
542 commit_ids = self._get_all_commit_ids(filters=revfilters)
521 commit_ids = self._get_commit_ids(filters=revfilters)
543
522
544 # pure python stuff, it's slow due to walker walking whole repo
545 # def get_revs(walker):
546 # for walker_entry in walker:
547 # yield walker_entry.commit.id
548 # revfilters = {}
549 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
550 else:
523 else:
551 commit_ids = self.commit_ids
524 commit_ids = self.commit_ids
552
525
553 if start_pos or end_pos:
526 if start_pos or end_pos:
554 commit_ids = commit_ids[start_pos: end_pos]
527 commit_ids = commit_ids[start_pos: end_pos]
555
528
556 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
557 translate_tag=translate_tags)
530 translate_tag=translate_tags)
558
531
559 def get_diff(
532 def get_diff(
560 self, commit1, commit2, path='', ignore_whitespace=False,
533 self, commit1, commit2, path='', ignore_whitespace=False,
561 context=3, path1=None):
534 context=3, path1=None):
562 """
535 """
563 Returns (git like) *diff*, as plain text. Shows changes introduced by
536 Returns (git like) *diff*, as plain text. Shows changes introduced by
564 ``commit2`` since ``commit1``.
537 ``commit2`` since ``commit1``.
565
538
566 :param commit1: Entry point from which diff is shown. Can be
539 :param commit1: Entry point from which diff is shown. Can be
567 ``self.EMPTY_COMMIT`` - in this case, patch showing all
540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
568 the changes since empty state of the repository until ``commit2``
541 the changes since empty state of the repository until ``commit2``
569 :param commit2: Until which commits changes should be shown.
542 :param commit2: Until which commits changes should be shown.
570 :param ignore_whitespace: If set to ``True``, would not show whitespace
543 :param ignore_whitespace: If set to ``True``, would not show whitespace
571 changes. Defaults to ``False``.
544 changes. Defaults to ``False``.
572 :param context: How many lines before/after changed lines should be
545 :param context: How many lines before/after changed lines should be
573 shown. Defaults to ``3``.
546 shown. Defaults to ``3``.
574 """
547 """
575 self._validate_diff_commits(commit1, commit2)
548 self._validate_diff_commits(commit1, commit2)
576 if path1 is not None and path1 != path:
549 if path1 is not None and path1 != path:
577 raise ValueError("Diff of two different paths not supported.")
550 raise ValueError("Diff of two different paths not supported.")
578
551
579 flags = [
552 flags = [
580 '-U%s' % context, '--full-index', '--binary', '-p',
553 '-U%s' % context, '--full-index', '--binary', '-p',
581 '-M', '--abbrev=40']
554 '-M', '--abbrev=40']
582 if ignore_whitespace:
555 if ignore_whitespace:
583 flags.append('-w')
556 flags.append('-w')
584
557
585 if commit1 == self.EMPTY_COMMIT:
558 if commit1 == self.EMPTY_COMMIT:
586 cmd = ['show'] + flags + [commit2.raw_id]
559 cmd = ['show'] + flags + [commit2.raw_id]
587 else:
560 else:
588 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
561 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
589
562
590 if path:
563 if path:
591 cmd.extend(['--', path])
564 cmd.extend(['--', path])
592
565
593 stdout, __ = self.run_git_command(cmd)
566 stdout, __ = self.run_git_command(cmd)
594 # If we used 'show' command, strip first few lines (until actual diff
567 # If we used 'show' command, strip first few lines (until actual diff
595 # starts)
568 # starts)
596 if commit1 == self.EMPTY_COMMIT:
569 if commit1 == self.EMPTY_COMMIT:
597 lines = stdout.splitlines()
570 lines = stdout.splitlines()
598 x = 0
571 x = 0
599 for line in lines:
572 for line in lines:
600 if line.startswith('diff'):
573 if line.startswith('diff'):
601 break
574 break
602 x += 1
575 x += 1
603 # Append new line just like 'diff' command do
576 # Append new line just like 'diff' command do
604 stdout = '\n'.join(lines[x:]) + '\n'
577 stdout = '\n'.join(lines[x:]) + '\n'
605 return GitDiff(stdout)
578 return GitDiff(stdout)
606
579
607 def strip(self, commit_id, branch_name):
580 def strip(self, commit_id, branch_name):
608 commit = self.get_commit(commit_id=commit_id)
581 commit = self.get_commit(commit_id=commit_id)
609 if commit.merge:
582 if commit.merge:
610 raise Exception('Cannot reset to merge commit')
583 raise Exception('Cannot reset to merge commit')
611
584
612 # parent is going to be the new head now
585 # parent is going to be the new head now
613 commit = commit.parents[0]
586 commit = commit.parents[0]
614 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
587 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
615
588
616 self._commit_ids_ver = time.time()
589 # clear cached properties
617 # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it
590 self._invalidate_prop_cache('commit_ids')
591 self._invalidate_prop_cache('_refs')
592 self._invalidate_prop_cache('branches')
593
618 return len(self.commit_ids)
594 return len(self.commit_ids)
619
595
620 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
596 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
621 if commit_id1 == commit_id2:
597 if commit_id1 == commit_id2:
622 return commit_id1
598 return commit_id1
623
599
624 if self != repo2:
600 if self != repo2:
625 commits = self._remote.get_missing_revs(
601 commits = self._remote.get_missing_revs(
626 commit_id1, commit_id2, repo2.path)
602 commit_id1, commit_id2, repo2.path)
627 if commits:
603 if commits:
628 commit = repo2.get_commit(commits[-1])
604 commit = repo2.get_commit(commits[-1])
629 if commit.parents:
605 if commit.parents:
630 ancestor_id = commit.parents[0].raw_id
606 ancestor_id = commit.parents[0].raw_id
631 else:
607 else:
632 ancestor_id = None
608 ancestor_id = None
633 else:
609 else:
634 # no commits from other repo, ancestor_id is the commit_id2
610 # no commits from other repo, ancestor_id is the commit_id2
635 ancestor_id = commit_id2
611 ancestor_id = commit_id2
636 else:
612 else:
637 output, __ = self.run_git_command(
613 output, __ = self.run_git_command(
638 ['merge-base', commit_id1, commit_id2])
614 ['merge-base', commit_id1, commit_id2])
639 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
615 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
640
616
641 return ancestor_id
617 return ancestor_id
642
618
643 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
619 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
644 repo1 = self
620 repo1 = self
645 ancestor_id = None
621 ancestor_id = None
646
622
647 if commit_id1 == commit_id2:
623 if commit_id1 == commit_id2:
648 commits = []
624 commits = []
649 elif repo1 != repo2:
625 elif repo1 != repo2:
650 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
626 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
651 repo2.path)
627 repo2.path)
652 commits = [
628 commits = [
653 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
629 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
654 for commit_id in reversed(missing_ids)]
630 for commit_id in reversed(missing_ids)]
655 else:
631 else:
656 output, __ = repo1.run_git_command(
632 output, __ = repo1.run_git_command(
657 ['log', '--reverse', '--pretty=format: %H', '-s',
633 ['log', '--reverse', '--pretty=format: %H', '-s',
658 '%s..%s' % (commit_id1, commit_id2)])
634 '%s..%s' % (commit_id1, commit_id2)])
659 commits = [
635 commits = [
660 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
636 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
661 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
637 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
662
638
663 return commits
639 return commits
664
640
665 @LazyProperty
641 @LazyProperty
666 def in_memory_commit(self):
642 def in_memory_commit(self):
667 """
643 """
668 Returns ``GitInMemoryCommit`` object for this repository.
644 Returns ``GitInMemoryCommit`` object for this repository.
669 """
645 """
670 return GitInMemoryCommit(self)
646 return GitInMemoryCommit(self)
671
647
672 def pull(self, url, commit_ids=None, update_after=False):
648 def pull(self, url, commit_ids=None, update_after=False):
673 """
649 """
674 Pull changes from external location. Pull is different in GIT
650 Pull changes from external location. Pull is different in GIT
675 that fetch since it's doing a checkout
651 that fetch since it's doing a checkout
676
652
677 :param commit_ids: Optional. Can be set to a list of commit ids
653 :param commit_ids: Optional. Can be set to a list of commit ids
678 which shall be pulled from the other repository.
654 which shall be pulled from the other repository.
679 """
655 """
680 refs = None
656 refs = None
681 if commit_ids is not None:
657 if commit_ids is not None:
682 remote_refs = self._remote.get_remote_refs(url)
658 remote_refs = self._remote.get_remote_refs(url)
683 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
659 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
684 self._remote.pull(url, refs=refs, update_after=update_after)
660 self._remote.pull(url, refs=refs, update_after=update_after)
685 self._remote.invalidate_vcs_cache()
661 self._remote.invalidate_vcs_cache()
686
662
687 def fetch(self, url, commit_ids=None):
663 def fetch(self, url, commit_ids=None):
688 """
664 """
689 Fetch all git objects from external location.
665 Fetch all git objects from external location.
690 """
666 """
691 self._remote.sync_fetch(url, refs=commit_ids)
667 self._remote.sync_fetch(url, refs=commit_ids)
692 self._remote.invalidate_vcs_cache()
668 self._remote.invalidate_vcs_cache()
693
669
694 def push(self, url):
670 def push(self, url):
695 refs = None
671 refs = None
696 self._remote.sync_push(url, refs=refs)
672 self._remote.sync_push(url, refs=refs)
697
673
698 def set_refs(self, ref_name, commit_id):
674 def set_refs(self, ref_name, commit_id):
699 self._remote.set_refs(ref_name, commit_id)
675 self._remote.set_refs(ref_name, commit_id)
676 self._invalidate_prop_cache('_refs')
700
677
701 def remove_ref(self, ref_name):
678 def remove_ref(self, ref_name):
702 self._remote.remove_ref(ref_name)
679 self._remote.remove_ref(ref_name)
680 self._invalidate_prop_cache('_refs')
703
681
704 def _update_server_info(self):
682 def _update_server_info(self):
705 """
683 """
706 runs gits update-server-info command in this repo instance
684 runs gits update-server-info command in this repo instance
707 """
685 """
708 self._remote.update_server_info()
686 self._remote.update_server_info()
709
687
710 def _current_branch(self):
688 def _current_branch(self):
711 """
689 """
712 Return the name of the current branch.
690 Return the name of the current branch.
713
691
714 It only works for non bare repositories (i.e. repositories with a
692 It only works for non bare repositories (i.e. repositories with a
715 working copy)
693 working copy)
716 """
694 """
717 if self.bare:
695 if self.bare:
718 raise RepositoryError('Bare git repos do not have active branches')
696 raise RepositoryError('Bare git repos do not have active branches')
719
697
720 if self.is_empty():
698 if self.is_empty():
721 return None
699 return None
722
700
723 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
701 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
724 return stdout.strip()
702 return stdout.strip()
725
703
726 def _checkout(self, branch_name, create=False, force=False):
704 def _checkout(self, branch_name, create=False, force=False):
727 """
705 """
728 Checkout a branch in the working directory.
706 Checkout a branch in the working directory.
729
707
730 It tries to create the branch if create is True, failing if the branch
708 It tries to create the branch if create is True, failing if the branch
731 already exists.
709 already exists.
732
710
733 It only works for non bare repositories (i.e. repositories with a
711 It only works for non bare repositories (i.e. repositories with a
734 working copy)
712 working copy)
735 """
713 """
736 if self.bare:
714 if self.bare:
737 raise RepositoryError('Cannot checkout branches in a bare git repo')
715 raise RepositoryError('Cannot checkout branches in a bare git repo')
738
716
739 cmd = ['checkout']
717 cmd = ['checkout']
740 if force:
718 if force:
741 cmd.append('-f')
719 cmd.append('-f')
742 if create:
720 if create:
743 cmd.append('-b')
721 cmd.append('-b')
744 cmd.append(branch_name)
722 cmd.append(branch_name)
745 self.run_git_command(cmd, fail_on_stderr=False)
723 self.run_git_command(cmd, fail_on_stderr=False)
746
724
725 def _create_branch(self, branch_name, commit_id):
726 """
727 creates a branch in a GIT repo
728 """
729 self._remote.create_branch(branch_name, commit_id)
730
747 def _identify(self):
731 def _identify(self):
748 """
732 """
749 Return the current state of the working directory.
733 Return the current state of the working directory.
750 """
734 """
751 if self.bare:
735 if self.bare:
752 raise RepositoryError('Bare git repos do not have active branches')
736 raise RepositoryError('Bare git repos do not have active branches')
753
737
754 if self.is_empty():
738 if self.is_empty():
755 return None
739 return None
756
740
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
741 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 return stdout.strip()
742 return stdout.strip()
759
743
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
744 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 """
745 """
762 Create a local clone of the current repo.
746 Create a local clone of the current repo.
763 """
747 """
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
748 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 # clone will only fetch the active branch.
749 # clone will only fetch the active branch.
766 cmd = ['clone', '--branch', branch_name,
750 cmd = ['clone', '--branch', branch_name,
767 self.path, os.path.abspath(clone_path)]
751 self.path, os.path.abspath(clone_path)]
768
752
769 self.run_git_command(cmd, fail_on_stderr=False)
753 self.run_git_command(cmd, fail_on_stderr=False)
770
754
771 # if we get the different source branch, make sure we also fetch it for
755 # if we get the different source branch, make sure we also fetch it for
772 # merge conditions
756 # merge conditions
773 if source_branch and source_branch != branch_name:
757 if source_branch and source_branch != branch_name:
774 # check if the ref exists.
758 # check if the ref exists.
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
759 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 if shadow_repo.get_remote_ref(source_branch):
760 if shadow_repo.get_remote_ref(source_branch):
777 cmd = ['fetch', self.path, source_branch]
761 cmd = ['fetch', self.path, source_branch]
778 self.run_git_command(cmd, fail_on_stderr=False)
762 self.run_git_command(cmd, fail_on_stderr=False)
779
763
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
764 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 """
765 """
782 Fetch a branch from a local repository.
766 Fetch a branch from a local repository.
783 """
767 """
784 repository_path = os.path.abspath(repository_path)
768 repository_path = os.path.abspath(repository_path)
785 if repository_path == self.path:
769 if repository_path == self.path:
786 raise ValueError('Cannot fetch from the same repository')
770 raise ValueError('Cannot fetch from the same repository')
787
771
788 if use_origin:
772 if use_origin:
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
773 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 branch=branch_name)
774 branch=branch_name)
791
775
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
776 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 repository_path, branch_name]
777 repository_path, branch_name]
794 self.run_git_command(cmd, fail_on_stderr=False)
778 self.run_git_command(cmd, fail_on_stderr=False)
795
779
796 def _local_reset(self, branch_name):
780 def _local_reset(self, branch_name):
797 branch_name = '{}'.format(branch_name)
781 branch_name = '{}'.format(branch_name)
798 cmd = ['reset', '--hard', branch_name, '--']
782 cmd = ['reset', '--hard', branch_name, '--']
799 self.run_git_command(cmd, fail_on_stderr=False)
783 self.run_git_command(cmd, fail_on_stderr=False)
800
784
801 def _last_fetch_heads(self):
785 def _last_fetch_heads(self):
802 """
786 """
803 Return the last fetched heads that need merging.
787 Return the last fetched heads that need merging.
804
788
805 The algorithm is defined at
789 The algorithm is defined at
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
790 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 """
791 """
808 if not self.bare:
792 if not self.bare:
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
793 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 else:
794 else:
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
795 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812
796
813 heads = []
797 heads = []
814 with open(fetch_heads_path) as f:
798 with open(fetch_heads_path) as f:
815 for line in f:
799 for line in f:
816 if ' not-for-merge ' in line:
800 if ' not-for-merge ' in line:
817 continue
801 continue
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
802 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 heads.append(line)
803 heads.append(line)
820
804
821 return heads
805 return heads
822
806
823 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
807 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
824 return GitRepository(shadow_repository_path)
808 return GitRepository(shadow_repository_path)
825
809
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
810 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 """
811 """
828 Pull a branch from a local repository.
812 Pull a branch from a local repository.
829 """
813 """
830 if self.bare:
814 if self.bare:
831 raise RepositoryError('Cannot pull into a bare git repository')
815 raise RepositoryError('Cannot pull into a bare git repository')
832 # N.B.(skreft): The --ff-only option is to make sure this is a
816 # N.B.(skreft): The --ff-only option is to make sure this is a
833 # fast-forward (i.e., we are only pulling new changes and there are no
817 # fast-forward (i.e., we are only pulling new changes and there are no
834 # conflicts with our current branch)
818 # conflicts with our current branch)
835 # Additionally, that option needs to go before --no-tags, otherwise git
819 # Additionally, that option needs to go before --no-tags, otherwise git
836 # pull complains about it being an unknown flag.
820 # pull complains about it being an unknown flag.
837 cmd = ['pull']
821 cmd = ['pull']
838 if ff_only:
822 if ff_only:
839 cmd.append('--ff-only')
823 cmd.append('--ff-only')
840 cmd.extend(['--no-tags', repository_path, branch_name])
824 cmd.extend(['--no-tags', repository_path, branch_name])
841 self.run_git_command(cmd, fail_on_stderr=False)
825 self.run_git_command(cmd, fail_on_stderr=False)
842
826
843 def _local_merge(self, merge_message, user_name, user_email, heads):
827 def _local_merge(self, merge_message, user_name, user_email, heads):
844 """
828 """
845 Merge the given head into the checked out branch.
829 Merge the given head into the checked out branch.
846
830
847 It will force a merge commit.
831 It will force a merge commit.
848
832
849 Currently it raises an error if the repo is empty, as it is not possible
833 Currently it raises an error if the repo is empty, as it is not possible
850 to create a merge commit in an empty repo.
834 to create a merge commit in an empty repo.
851
835
852 :param merge_message: The message to use for the merge commit.
836 :param merge_message: The message to use for the merge commit.
853 :param heads: the heads to merge.
837 :param heads: the heads to merge.
854 """
838 """
855 if self.bare:
839 if self.bare:
856 raise RepositoryError('Cannot merge into a bare git repository')
840 raise RepositoryError('Cannot merge into a bare git repository')
857
841
858 if not heads:
842 if not heads:
859 return
843 return
860
844
861 if self.is_empty():
845 if self.is_empty():
862 # TODO(skreft): do somehting more robust in this case.
846 # TODO(skreft): do somehting more robust in this case.
863 raise RepositoryError(
847 raise RepositoryError(
864 'Do not know how to merge into empty repositories yet')
848 'Do not know how to merge into empty repositories yet')
865
849
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
850 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 # commit message. We also specify the user who is doing the merge.
851 # commit message. We also specify the user who is doing the merge.
868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
852 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
869 '-c', 'user.email=%s' % safe_str(user_email),
853 '-c', 'user.email=%s' % safe_str(user_email),
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
854 'merge', '--no-ff', '-m', safe_str(merge_message)]
871 cmd.extend(heads)
855 cmd.extend(heads)
872 try:
856 try:
873 output = self.run_git_command(cmd, fail_on_stderr=False)
857 output = self.run_git_command(cmd, fail_on_stderr=False)
874 except RepositoryError:
858 except RepositoryError:
875 # Cleanup any merge leftovers
859 # Cleanup any merge leftovers
876 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
860 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
877 raise
861 raise
878
862
879 def _local_push(
863 def _local_push(
880 self, source_branch, repository_path, target_branch,
864 self, source_branch, repository_path, target_branch,
881 enable_hooks=False, rc_scm_data=None):
865 enable_hooks=False, rc_scm_data=None):
882 """
866 """
883 Push the source_branch to the given repository and target_branch.
867 Push the source_branch to the given repository and target_branch.
884
868
885 Currently it if the target_branch is not master and the target repo is
869 Currently it if the target_branch is not master and the target repo is
886 empty, the push will work, but then GitRepository won't be able to find
870 empty, the push will work, but then GitRepository won't be able to find
887 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
871 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
888 pointing to master, which does not exist).
872 pointing to master, which does not exist).
889
873
890 It does not run the hooks in the target repo.
874 It does not run the hooks in the target repo.
891 """
875 """
892 # TODO(skreft): deal with the case in which the target repo is empty,
876 # TODO(skreft): deal with the case in which the target repo is empty,
893 # and the target_branch is not master.
877 # and the target_branch is not master.
894 target_repo = GitRepository(repository_path)
878 target_repo = GitRepository(repository_path)
895 if (not target_repo.bare and
879 if (not target_repo.bare and
896 target_repo._current_branch() == target_branch):
880 target_repo._current_branch() == target_branch):
897 # Git prevents pushing to the checked out branch, so simulate it by
881 # Git prevents pushing to the checked out branch, so simulate it by
898 # pulling into the target repository.
882 # pulling into the target repository.
899 target_repo._local_pull(self.path, source_branch)
883 target_repo._local_pull(self.path, source_branch)
900 else:
884 else:
901 cmd = ['push', os.path.abspath(repository_path),
885 cmd = ['push', os.path.abspath(repository_path),
902 '%s:%s' % (source_branch, target_branch)]
886 '%s:%s' % (source_branch, target_branch)]
903 gitenv = {}
887 gitenv = {}
904 if rc_scm_data:
888 if rc_scm_data:
905 gitenv.update({'RC_SCM_DATA': rc_scm_data})
889 gitenv.update({'RC_SCM_DATA': rc_scm_data})
906
890
907 if not enable_hooks:
891 if not enable_hooks:
908 gitenv['RC_SKIP_HOOKS'] = '1'
892 gitenv['RC_SKIP_HOOKS'] = '1'
909 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
893 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
910
894
911 def _get_new_pr_branch(self, source_branch, target_branch):
895 def _get_new_pr_branch(self, source_branch, target_branch):
912 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
896 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
913 pr_branches = []
897 pr_branches = []
914 for branch in self.branches:
898 for branch in self.branches:
915 if branch.startswith(prefix):
899 if branch.startswith(prefix):
916 pr_branches.append(int(branch[len(prefix):]))
900 pr_branches.append(int(branch[len(prefix):]))
917
901
918 if not pr_branches:
902 if not pr_branches:
919 branch_id = 0
903 branch_id = 0
920 else:
904 else:
921 branch_id = max(pr_branches) + 1
905 branch_id = max(pr_branches) + 1
922
906
923 return '%s%d' % (prefix, branch_id)
907 return '%s%d' % (prefix, branch_id)
924
908
925 def _maybe_prepare_merge_workspace(
909 def _maybe_prepare_merge_workspace(
926 self, repo_id, workspace_id, target_ref, source_ref):
910 self, repo_id, workspace_id, target_ref, source_ref):
927 shadow_repository_path = self._get_shadow_repository_path(
911 shadow_repository_path = self._get_shadow_repository_path(
928 repo_id, workspace_id)
912 repo_id, workspace_id)
929 if not os.path.exists(shadow_repository_path):
913 if not os.path.exists(shadow_repository_path):
930 self._local_clone(
914 self._local_clone(
931 shadow_repository_path, target_ref.name, source_ref.name)
915 shadow_repository_path, target_ref.name, source_ref.name)
932 log.debug(
916 log.debug(
933 'Prepared shadow repository in %s', shadow_repository_path)
917 'Prepared shadow repository in %s', shadow_repository_path)
934
918
935 return shadow_repository_path
919 return shadow_repository_path
936
920
937 def _merge_repo(self, repo_id, workspace_id, target_ref,
921 def _merge_repo(self, repo_id, workspace_id, target_ref,
938 source_repo, source_ref, merge_message,
922 source_repo, source_ref, merge_message,
939 merger_name, merger_email, dry_run=False,
923 merger_name, merger_email, dry_run=False,
940 use_rebase=False, close_branch=False):
924 use_rebase=False, close_branch=False):
941
925
942 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
926 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
943 'rebase' if use_rebase else 'merge', dry_run)
927 'rebase' if use_rebase else 'merge', dry_run)
944 if target_ref.commit_id != self.branches[target_ref.name]:
928 if target_ref.commit_id != self.branches[target_ref.name]:
945 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
929 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
946 target_ref.commit_id, self.branches[target_ref.name])
930 target_ref.commit_id, self.branches[target_ref.name])
947 return MergeResponse(
931 return MergeResponse(
948 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
932 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
949 metadata={'target_ref': target_ref})
933 metadata={'target_ref': target_ref})
950
934
951 shadow_repository_path = self._maybe_prepare_merge_workspace(
935 shadow_repository_path = self._maybe_prepare_merge_workspace(
952 repo_id, workspace_id, target_ref, source_ref)
936 repo_id, workspace_id, target_ref, source_ref)
953 shadow_repo = self._get_shadow_instance(shadow_repository_path)
937 shadow_repo = self._get_shadow_instance(shadow_repository_path)
954
938
955 # checkout source, if it's different. Otherwise we could not
939 # checkout source, if it's different. Otherwise we could not
956 # fetch proper commits for merge testing
940 # fetch proper commits for merge testing
957 if source_ref.name != target_ref.name:
941 if source_ref.name != target_ref.name:
958 if shadow_repo.get_remote_ref(source_ref.name):
942 if shadow_repo.get_remote_ref(source_ref.name):
959 shadow_repo._checkout(source_ref.name, force=True)
943 shadow_repo._checkout(source_ref.name, force=True)
960
944
961 # checkout target, and fetch changes
945 # checkout target, and fetch changes
962 shadow_repo._checkout(target_ref.name, force=True)
946 shadow_repo._checkout(target_ref.name, force=True)
963
947
964 # fetch/reset pull the target, in case it is changed
948 # fetch/reset pull the target, in case it is changed
965 # this handles even force changes
949 # this handles even force changes
966 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
950 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
967 shadow_repo._local_reset(target_ref.name)
951 shadow_repo._local_reset(target_ref.name)
968
952
969 # Need to reload repo to invalidate the cache, or otherwise we cannot
953 # Need to reload repo to invalidate the cache, or otherwise we cannot
970 # retrieve the last target commit.
954 # retrieve the last target commit.
971 shadow_repo = self._get_shadow_instance(shadow_repository_path)
955 shadow_repo = self._get_shadow_instance(shadow_repository_path)
972 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
956 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
973 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
957 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
974 target_ref, target_ref.commit_id,
958 target_ref, target_ref.commit_id,
975 shadow_repo.branches[target_ref.name])
959 shadow_repo.branches[target_ref.name])
976 return MergeResponse(
960 return MergeResponse(
977 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
978 metadata={'target_ref': target_ref})
962 metadata={'target_ref': target_ref})
979
963
980 # calculate new branch
964 # calculate new branch
981 pr_branch = shadow_repo._get_new_pr_branch(
965 pr_branch = shadow_repo._get_new_pr_branch(
982 source_ref.name, target_ref.name)
966 source_ref.name, target_ref.name)
983 log.debug('using pull-request merge branch: `%s`', pr_branch)
967 log.debug('using pull-request merge branch: `%s`', pr_branch)
984 # checkout to temp branch, and fetch changes
968 # checkout to temp branch, and fetch changes
985 shadow_repo._checkout(pr_branch, create=True)
969 shadow_repo._checkout(pr_branch, create=True)
986 try:
970 try:
987 shadow_repo._local_fetch(source_repo.path, source_ref.name)
971 shadow_repo._local_fetch(source_repo.path, source_ref.name)
988 except RepositoryError:
972 except RepositoryError:
989 log.exception('Failure when doing local fetch on '
973 log.exception('Failure when doing local fetch on '
990 'shadow repo: %s', shadow_repo)
974 'shadow repo: %s', shadow_repo)
991 return MergeResponse(
975 return MergeResponse(
992 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
976 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
993 metadata={'source_ref': source_ref})
977 metadata={'source_ref': source_ref})
994
978
995 merge_ref = None
979 merge_ref = None
996 merge_failure_reason = MergeFailureReason.NONE
980 merge_failure_reason = MergeFailureReason.NONE
997 metadata = {}
981 metadata = {}
998 try:
982 try:
999 shadow_repo._local_merge(merge_message, merger_name, merger_email,
983 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1000 [source_ref.commit_id])
984 [source_ref.commit_id])
1001 merge_possible = True
985 merge_possible = True
1002
986
1003 # Need to reload repo to invalidate the cache, or otherwise we
987 # Need to reload repo to invalidate the cache, or otherwise we
1004 # cannot retrieve the merge commit.
988 # cannot retrieve the merge commit.
1005 shadow_repo = GitRepository(shadow_repository_path)
989 shadow_repo = GitRepository(shadow_repository_path)
1006 merge_commit_id = shadow_repo.branches[pr_branch]
990 merge_commit_id = shadow_repo.branches[pr_branch]
1007
991
1008 # Set a reference pointing to the merge commit. This reference may
992 # Set a reference pointing to the merge commit. This reference may
1009 # be used to easily identify the last successful merge commit in
993 # be used to easily identify the last successful merge commit in
1010 # the shadow repository.
994 # the shadow repository.
1011 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
995 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1012 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
996 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1013 except RepositoryError:
997 except RepositoryError:
1014 log.exception('Failure when doing local merge on git shadow repo')
998 log.exception('Failure when doing local merge on git shadow repo')
1015 merge_possible = False
999 merge_possible = False
1016 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1000 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1017
1001
1018 if merge_possible and not dry_run:
1002 if merge_possible and not dry_run:
1019 try:
1003 try:
1020 shadow_repo._local_push(
1004 shadow_repo._local_push(
1021 pr_branch, self.path, target_ref.name, enable_hooks=True,
1005 pr_branch, self.path, target_ref.name, enable_hooks=True,
1022 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1006 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1023 merge_succeeded = True
1007 merge_succeeded = True
1024 except RepositoryError:
1008 except RepositoryError:
1025 log.exception(
1009 log.exception(
1026 'Failure when doing local push from the shadow '
1010 'Failure when doing local push from the shadow '
1027 'repository to the target repository at %s.', self.path)
1011 'repository to the target repository at %s.', self.path)
1028 merge_succeeded = False
1012 merge_succeeded = False
1029 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1013 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1030 metadata['target'] = 'git shadow repo'
1014 metadata['target'] = 'git shadow repo'
1031 metadata['merge_commit'] = pr_branch
1015 metadata['merge_commit'] = pr_branch
1032 else:
1016 else:
1033 merge_succeeded = False
1017 merge_succeeded = False
1034
1018
1035 return MergeResponse(
1019 return MergeResponse(
1036 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1020 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1037 metadata=metadata)
1021 metadata=metadata)
@@ -1,380 +1,381 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG commit module
22 HG commit module
23 """
23 """
24
24
25 import os
25 import os
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 from rhodecode.lib.utils import safe_str, safe_unicode
30 from rhodecode.lib.utils import safe_str, safe_unicode
31 from rhodecode.lib.vcs import path as vcspath
31 from rhodecode.lib.vcs import path as vcspath
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 from rhodecode.lib.vcs.exceptions import CommitError
34 from rhodecode.lib.vcs.exceptions import CommitError
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
38 LargeFileNode, LARGEFILE_PREFIX)
38 LargeFileNode, LARGEFILE_PREFIX)
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
40
40
41
41
42 class MercurialCommit(base.BaseCommit):
42 class MercurialCommit(base.BaseCommit):
43 """
43 """
44 Represents state of the repository at the single commit.
44 Represents state of the repository at the single commit.
45 """
45 """
46
46
47 _filter_pre_load = [
47 _filter_pre_load = [
48 # git specific property not supported here
48 # git specific property not supported here
49 "_commit",
49 "_commit",
50 ]
50 ]
51
51
52 def __init__(self, repository, raw_id, idx, pre_load=None):
52 def __init__(self, repository, raw_id, idx, pre_load=None):
53 raw_id = safe_str(raw_id)
53 raw_id = safe_str(raw_id)
54
54
55 self.repository = repository
55 self.repository = repository
56 self._remote = repository._remote
56 self._remote = repository._remote
57
57
58 self.raw_id = raw_id
58 self.raw_id = raw_id
59 self.idx = idx
59 self.idx = idx
60
60
61 self._set_bulk_properties(pre_load)
61 self._set_bulk_properties(pre_load)
62
62
63 # caches
63 # caches
64 self.nodes = {}
64 self.nodes = {}
65
65
66 def _set_bulk_properties(self, pre_load):
66 def _set_bulk_properties(self, pre_load):
67 if not pre_load:
67 if not pre_load:
68 return
68 return
69 pre_load = [entry for entry in pre_load
69 pre_load = [entry for entry in pre_load
70 if entry not in self._filter_pre_load]
70 if entry not in self._filter_pre_load]
71 if not pre_load:
71 if not pre_load:
72 return
72 return
73
73
74 result = self._remote.bulk_request(self.idx, pre_load)
74 result = self._remote.bulk_request(self.idx, pre_load)
75 for attr, value in result.items():
75 for attr, value in result.items():
76 if attr in ["author", "branch", "message"]:
76 if attr in ["author", "branch", "message"]:
77 value = safe_unicode(value)
77 value = safe_unicode(value)
78 elif attr == "affected_files":
78 elif attr == "affected_files":
79 value = map(safe_unicode, value)
79 value = map(safe_unicode, value)
80 elif attr == "date":
80 elif attr == "date":
81 value = utcdate_fromtimestamp(*value)
81 value = utcdate_fromtimestamp(*value)
82 elif attr in ["children", "parents"]:
82 elif attr in ["children", "parents"]:
83 value = self._make_commits(value)
83 value = self._make_commits(value)
84 elif attr in ["phase"]:
84 elif attr in ["phase"]:
85 value = self._get_phase_text(value)
85 value = self._get_phase_text(value)
86 self.__dict__[attr] = value
86 self.__dict__[attr] = value
87
87
88 @LazyProperty
88 @LazyProperty
89 def tags(self):
89 def tags(self):
90 tags = [name for name, commit_id in self.repository.tags.iteritems()
90 tags = [name for name, commit_id in self.repository.tags.iteritems()
91 if commit_id == self.raw_id]
91 if commit_id == self.raw_id]
92 return tags
92 return tags
93
93
94 @LazyProperty
94 @LazyProperty
95 def branch(self):
95 def branch(self):
96 return safe_unicode(self._remote.ctx_branch(self.idx))
96 return safe_unicode(self._remote.ctx_branch(self.idx))
97
97
98 @LazyProperty
98 @LazyProperty
99 def bookmarks(self):
99 def bookmarks(self):
100 bookmarks = [
100 bookmarks = [
101 name for name, commit_id in self.repository.bookmarks.iteritems()
101 name for name, commit_id in self.repository.bookmarks.iteritems()
102 if commit_id == self.raw_id]
102 if commit_id == self.raw_id]
103 return bookmarks
103 return bookmarks
104
104
105 @LazyProperty
105 @LazyProperty
106 def message(self):
106 def message(self):
107 return safe_unicode(self._remote.ctx_description(self.idx))
107 return safe_unicode(self._remote.ctx_description(self.idx))
108
108
109 @LazyProperty
109 @LazyProperty
110 def committer(self):
110 def committer(self):
111 return safe_unicode(self.author)
111 return safe_unicode(self.author)
112
112
113 @LazyProperty
113 @LazyProperty
114 def author(self):
114 def author(self):
115 return safe_unicode(self._remote.ctx_user(self.idx))
115 return safe_unicode(self._remote.ctx_user(self.idx))
116
116
117 @LazyProperty
117 @LazyProperty
118 def date(self):
118 def date(self):
119 return utcdate_fromtimestamp(*self._remote.ctx_date(self.idx))
119 return utcdate_fromtimestamp(*self._remote.ctx_date(self.idx))
120
120
121 @LazyProperty
121 @LazyProperty
122 def status(self):
122 def status(self):
123 """
123 """
124 Returns modified, added, removed, deleted files for current commit
124 Returns modified, added, removed, deleted files for current commit
125 """
125 """
126 return self._remote.ctx_status(self.idx)
126 return self._remote.ctx_status(self.idx)
127
127
128 @LazyProperty
128 @LazyProperty
129 def _file_paths(self):
129 def _file_paths(self):
130 return self._remote.ctx_list(self.idx)
130 return self._remote.ctx_list(self.idx)
131
131
132 @LazyProperty
132 @LazyProperty
133 def _dir_paths(self):
133 def _dir_paths(self):
134 p = list(set(get_dirs_for_path(*self._file_paths)))
134 p = list(set(get_dirs_for_path(*self._file_paths)))
135 p.insert(0, '')
135 p.insert(0, '')
136 return p
136 return p
137
137
138 @LazyProperty
138 @LazyProperty
139 def _paths(self):
139 def _paths(self):
140 return self._dir_paths + self._file_paths
140 return self._dir_paths + self._file_paths
141
141
142 @LazyProperty
142 @LazyProperty
143 def id(self):
143 def id(self):
144 if self.last:
144 if self.last:
145 return u'tip'
145 return u'tip'
146 return self.short_id
146 return self.short_id
147
147
148 @LazyProperty
148 @LazyProperty
149 def short_id(self):
149 def short_id(self):
150 return self.raw_id[:12]
150 return self.raw_id[:12]
151
151
152 def _make_commits(self, indexes, pre_load=None):
152 def _make_commits(self, indexes, pre_load=None):
153 return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load)
153 return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load)
154 for idx in indexes if idx >= 0]
154 for idx in indexes if idx >= 0]
155
155
156 @LazyProperty
156 @LazyProperty
157 def parents(self):
157 def parents(self):
158 """
158 """
159 Returns list of parent commits.
159 Returns list of parent commits.
160 """
160 """
161 parents = self._remote.ctx_parents(self.idx)
161 parents = self._remote.ctx_parents(self.idx)
162 return self._make_commits(parents)
162 return self._make_commits(parents)
163
163
164 def _get_phase_text(self, phase_id):
164 def _get_phase_text(self, phase_id):
165 return {
165 return {
166 0: 'public',
166 0: 'public',
167 1: 'draft',
167 1: 'draft',
168 2: 'secret',
168 2: 'secret',
169 }.get(phase_id) or ''
169 }.get(phase_id) or ''
170
170
171 @LazyProperty
171 @LazyProperty
172 def phase(self):
172 def phase(self):
173 phase_id = self._remote.ctx_phase(self.idx)
173 phase_id = self._remote.ctx_phase(self.idx)
174 phase_text = self._get_phase_text(phase_id)
174 phase_text = self._get_phase_text(phase_id)
175
175
176 return safe_unicode(phase_text)
176 return safe_unicode(phase_text)
177
177
178 @LazyProperty
178 @LazyProperty
179 def obsolete(self):
179 def obsolete(self):
180 obsolete = self._remote.ctx_obsolete(self.idx)
180 obsolete = self._remote.ctx_obsolete(self.idx)
181 return obsolete
181 return obsolete
182
182
183 @LazyProperty
183 @LazyProperty
184 def hidden(self):
184 def hidden(self):
185 hidden = self._remote.ctx_hidden(self.idx)
185 hidden = self._remote.ctx_hidden(self.idx)
186 return hidden
186 return hidden
187
187
188 @LazyProperty
188 @LazyProperty
189 def children(self):
189 def children(self):
190 """
190 """
191 Returns list of child commits.
191 Returns list of child commits.
192 """
192 """
193 children = self._remote.ctx_children(self.idx)
193 children = self._remote.ctx_children(self.idx)
194 return self._make_commits(children)
194 return self._make_commits(children)
195
195
196 def _fix_path(self, path):
196 def _fix_path(self, path):
197 """
197 """
198 Mercurial keeps filenodes as str so we need to encode from unicode
198 Mercurial keeps filenodes as str so we need to encode from unicode
199 to str.
199 to str.
200 """
200 """
201 return safe_str(super(MercurialCommit, self)._fix_path(path))
201 return safe_str(super(MercurialCommit, self)._fix_path(path))
202
202
203 def _get_kind(self, path):
203 def _get_kind(self, path):
204 path = self._fix_path(path)
204 path = self._fix_path(path)
205 if path in self._file_paths:
205 if path in self._file_paths:
206 return NodeKind.FILE
206 return NodeKind.FILE
207 elif path in self._dir_paths:
207 elif path in self._dir_paths:
208 return NodeKind.DIR
208 return NodeKind.DIR
209 else:
209 else:
210 raise CommitError(
210 raise CommitError(
211 "Node does not exist at the given path '%s'" % (path, ))
211 "Node does not exist at the given path '%s'" % (path, ))
212
212
213 def _get_filectx(self, path):
213 def _get_filectx(self, path):
214 path = self._fix_path(path)
214 path = self._fix_path(path)
215 if self._get_kind(path) != NodeKind.FILE:
215 if self._get_kind(path) != NodeKind.FILE:
216 raise CommitError(
216 raise CommitError(
217 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
217 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
218 return path
218 return path
219
219
220 def get_file_mode(self, path):
220 def get_file_mode(self, path):
221 """
221 """
222 Returns stat mode of the file at the given ``path``.
222 Returns stat mode of the file at the given ``path``.
223 """
223 """
224 path = self._get_filectx(path)
224 path = self._get_filectx(path)
225 if 'x' in self._remote.fctx_flags(self.idx, path):
225 if 'x' in self._remote.fctx_flags(self.idx, path):
226 return base.FILEMODE_EXECUTABLE
226 return base.FILEMODE_EXECUTABLE
227 else:
227 else:
228 return base.FILEMODE_DEFAULT
228 return base.FILEMODE_DEFAULT
229
229
230 def is_link(self, path):
230 def is_link(self, path):
231 path = self._get_filectx(path)
231 path = self._get_filectx(path)
232 return 'l' in self._remote.fctx_flags(self.idx, path)
232 return 'l' in self._remote.fctx_flags(self.idx, path)
233
233
234 def get_file_content(self, path):
234 def get_file_content(self, path):
235 """
235 """
236 Returns content of the file at given ``path``.
236 Returns content of the file at given ``path``.
237 """
237 """
238 path = self._get_filectx(path)
238 path = self._get_filectx(path)
239 return self._remote.fctx_data(self.idx, path)
239 return self._remote.fctx_data(self.idx, path)
240
240
241 def get_file_size(self, path):
241 def get_file_size(self, path):
242 """
242 """
243 Returns size of the file at given ``path``.
243 Returns size of the file at given ``path``.
244 """
244 """
245 path = self._get_filectx(path)
245 path = self._get_filectx(path)
246 return self._remote.fctx_size(self.idx, path)
246 return self._remote.fctx_size(self.idx, path)
247
247
248 def get_path_history(self, path, limit=None, pre_load=None):
248 def get_path_history(self, path, limit=None, pre_load=None):
249 """
249 """
250 Returns history of file as reversed list of `MercurialCommit` objects
250 Returns history of file as reversed list of `MercurialCommit` objects
251 for which file at given ``path`` has been modified.
251 for which file at given ``path`` has been modified.
252 """
252 """
253 path = self._get_filectx(path)
253 path = self._get_filectx(path)
254 hist = self._remote.node_history(self.idx, path, limit)
254 hist = self._remote.node_history(self.idx, path, limit)
255 return [
255 return [
256 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
256 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
257 for commit_id in hist]
257 for commit_id in hist]
258
258
259 def get_file_annotate(self, path, pre_load=None):
259 def get_file_annotate(self, path, pre_load=None):
260 """
260 """
261 Returns a generator of four element tuples with
261 Returns a generator of four element tuples with
262 lineno, commit_id, commit lazy loader and line
262 lineno, commit_id, commit lazy loader and line
263 """
263 """
264 result = self._remote.fctx_annotate(self.idx, path)
264 result = self._remote.fctx_annotate(self.idx, path)
265
265
266 for ln_no, commit_id, content in result:
266 for ln_no, commit_id, content in result:
267 yield (
267 yield (
268 ln_no, commit_id,
268 ln_no, commit_id,
269 lambda: self.repository.get_commit(commit_id=commit_id,
269 lambda: self.repository.get_commit(commit_id=commit_id,
270 pre_load=pre_load),
270 pre_load=pre_load),
271 content)
271 content)
272
272
273 def get_nodes(self, path):
273 def get_nodes(self, path):
274 """
274 """
275 Returns combined ``DirNode`` and ``FileNode`` objects list representing
275 Returns combined ``DirNode`` and ``FileNode`` objects list representing
276 state of commit at the given ``path``. If node at the given ``path``
276 state of commit at the given ``path``. If node at the given ``path``
277 is not instance of ``DirNode``, CommitError would be raised.
277 is not instance of ``DirNode``, CommitError would be raised.
278 """
278 """
279
279
280 if self._get_kind(path) != NodeKind.DIR:
280 if self._get_kind(path) != NodeKind.DIR:
281 raise CommitError(
281 raise CommitError(
282 "Directory does not exist for idx %s at '%s'" % (self.raw_id, path))
282 "Directory does not exist for idx %s at '%s'" % (self.raw_id, path))
283 path = self._fix_path(path)
283 path = self._fix_path(path)
284
284
285 filenodes = [
285 filenodes = [
286 FileNode(f, commit=self) for f in self._file_paths
286 FileNode(f, commit=self) for f in self._file_paths
287 if os.path.dirname(f) == path]
287 if os.path.dirname(f) == path]
288 # TODO: johbo: Check if this can be done in a more obvious way
288 # TODO: johbo: Check if this can be done in a more obvious way
289 dirs = path == '' and '' or [
289 dirs = path == '' and '' or [
290 d for d in self._dir_paths
290 d for d in self._dir_paths
291 if d and vcspath.dirname(d) == path]
291 if d and vcspath.dirname(d) == path]
292 dirnodes = [
292 dirnodes = [
293 DirNode(d, commit=self) for d in dirs
293 DirNode(d, commit=self) for d in dirs
294 if os.path.dirname(d) == path]
294 if os.path.dirname(d) == path]
295
295
296 alias = self.repository.alias
296 alias = self.repository.alias
297 for k, vals in self._submodules.iteritems():
297 for k, vals in self._submodules.iteritems():
298 if vcspath.dirname(k) == path:
298 if vcspath.dirname(k) == path:
299 loc = vals[0]
299 loc = vals[0]
300 commit = vals[1]
300 commit = vals[1]
301 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
301 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
302
302 nodes = dirnodes + filenodes
303 nodes = dirnodes + filenodes
303 # cache nodes
304 for node in nodes:
304 for node in nodes:
305 if node.path not in self.nodes:
305 self.nodes[node.path] = node
306 self.nodes[node.path] = node
306 nodes.sort()
307 nodes.sort()
307
308
308 return nodes
309 return nodes
309
310
310 def get_node(self, path, pre_load=None):
311 def get_node(self, path, pre_load=None):
311 """
312 """
312 Returns `Node` object from the given `path`. If there is no node at
313 Returns `Node` object from the given `path`. If there is no node at
313 the given `path`, `NodeDoesNotExistError` would be raised.
314 the given `path`, `NodeDoesNotExistError` would be raised.
314 """
315 """
315 path = self._fix_path(path)
316 path = self._fix_path(path)
316
317
317 if path not in self.nodes:
318 if path not in self.nodes:
318 if path in self._file_paths:
319 if path in self._file_paths:
319 node = FileNode(path, commit=self, pre_load=pre_load)
320 node = FileNode(path, commit=self, pre_load=pre_load)
320 elif path in self._dir_paths:
321 elif path in self._dir_paths:
321 if path == '':
322 if path == '':
322 node = RootNode(commit=self)
323 node = RootNode(commit=self)
323 else:
324 else:
324 node = DirNode(path, commit=self)
325 node = DirNode(path, commit=self)
325 else:
326 else:
326 raise self.no_node_at_path(path)
327 raise self.no_node_at_path(path)
327
328
328 # cache node
329 # cache node
329 self.nodes[path] = node
330 self.nodes[path] = node
330 return self.nodes[path]
331 return self.nodes[path]
331
332
332 def get_largefile_node(self, path):
333 def get_largefile_node(self, path):
333
334
334 if self._remote.is_large_file(path):
335 if self._remote.is_large_file(path):
335 # content of that file regular FileNode is the hash of largefile
336 # content of that file regular FileNode is the hash of largefile
336 file_id = self.get_file_content(path).strip()
337 file_id = self.get_file_content(path).strip()
337
338
338 if self._remote.in_largefiles_store(file_id):
339 if self._remote.in_largefiles_store(file_id):
339 lf_path = self._remote.store_path(file_id)
340 lf_path = self._remote.store_path(file_id)
340 return LargeFileNode(lf_path, commit=self, org_path=path)
341 return LargeFileNode(lf_path, commit=self, org_path=path)
341 elif self._remote.in_user_cache(file_id):
342 elif self._remote.in_user_cache(file_id):
342 lf_path = self._remote.store_path(file_id)
343 lf_path = self._remote.store_path(file_id)
343 self._remote.link(file_id, path)
344 self._remote.link(file_id, path)
344 return LargeFileNode(lf_path, commit=self, org_path=path)
345 return LargeFileNode(lf_path, commit=self, org_path=path)
345
346
346 @LazyProperty
347 @LazyProperty
347 def _submodules(self):
348 def _submodules(self):
348 """
349 """
349 Returns a dictionary with submodule information from substate file
350 Returns a dictionary with submodule information from substate file
350 of hg repository.
351 of hg repository.
351 """
352 """
352 return self._remote.ctx_substate(self.idx)
353 return self._remote.ctx_substate(self.idx)
353
354
354 @LazyProperty
355 @LazyProperty
355 def affected_files(self):
356 def affected_files(self):
356 """
357 """
357 Gets a fast accessible file changes for given commit
358 Gets a fast accessible file changes for given commit
358 """
359 """
359 return self._remote.ctx_files(self.idx)
360 return self._remote.ctx_files(self.idx)
360
361
361 @property
362 @property
362 def added(self):
363 def added(self):
363 """
364 """
364 Returns list of added ``FileNode`` objects.
365 Returns list of added ``FileNode`` objects.
365 """
366 """
366 return AddedFileNodesGenerator([n for n in self.status[1]], self)
367 return AddedFileNodesGenerator([n for n in self.status[1]], self)
367
368
368 @property
369 @property
369 def changed(self):
370 def changed(self):
370 """
371 """
371 Returns list of modified ``FileNode`` objects.
372 Returns list of modified ``FileNode`` objects.
372 """
373 """
373 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
374 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
374
375
375 @property
376 @property
376 def removed(self):
377 def removed(self):
377 """
378 """
378 Returns list of removed ``FileNode`` objects.
379 Returns list of removed ``FileNode`` objects.
379 """
380 """
380 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
381 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
@@ -1,949 +1,946 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import time
28 import urllib
27 import urllib
29
28
30 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import CachedProperty
32
30
33 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
36 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
47 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
48
47
49 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
50 nullid = "\0" * 20
49 nullid = "\0" * 20
51
50
52 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
53
52
54
53
55 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
56 """
55 """
57 Mercurial repository backend
56 Mercurial repository backend
58 """
57 """
59 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
60
59
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
63 """
62 """
64 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
65 ``repo_path``.
64 ``repo_path``.
66
65
67 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
68 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
69 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
70 it does not exist rather than raising exception
69 it does not exist rather than raising exception
71 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
72 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
73 making a clone
72 making a clone
74 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
75 """
74 """
76
75
77 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
78 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
79 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
80 # special requirements
79 # special requirements
81 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
82 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
83 self.with_wire = with_wire
82 self.with_wire = with_wire
84
83
85 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
86
85
87 # caches
86 # caches
88 self._commit_ids = {}
87 self._commit_ids = {}
89
88
90 # dependent that trigger re-computation of commit_ids
91 self._commit_ids_ver = 0
92
93 @LazyProperty
89 @LazyProperty
94 def _remote(self):
90 def _remote(self):
95 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
96
92
97 @CachedProperty('_commit_ids_ver')
93 @CachedProperty
98 def commit_ids(self):
94 def commit_ids(self):
99 """
95 """
100 Returns list of commit ids, in ascending order. Being lazy
96 Returns list of commit ids, in ascending order. Being lazy
101 attribute allows external tools to inject shas from cache.
97 attribute allows external tools to inject shas from cache.
102 """
98 """
103 commit_ids = self._get_all_commit_ids()
99 commit_ids = self._get_all_commit_ids()
104 self._rebuild_cache(commit_ids)
100 self._rebuild_cache(commit_ids)
105 return commit_ids
101 return commit_ids
106
102
107 def _rebuild_cache(self, commit_ids):
103 def _rebuild_cache(self, commit_ids):
108 self._commit_ids = dict((commit_id, index)
104 self._commit_ids = dict((commit_id, index)
109 for index, commit_id in enumerate(commit_ids))
105 for index, commit_id in enumerate(commit_ids))
110
106
111 @LazyProperty
107 @CachedProperty
112 def branches(self):
108 def branches(self):
113 return self._get_branches()
109 return self._get_branches()
114
110
115 @LazyProperty
111 @CachedProperty
116 def branches_closed(self):
112 def branches_closed(self):
117 return self._get_branches(active=False, closed=True)
113 return self._get_branches(active=False, closed=True)
118
114
119 @LazyProperty
115 @CachedProperty
120 def branches_all(self):
116 def branches_all(self):
121 all_branches = {}
117 all_branches = {}
122 all_branches.update(self.branches)
118 all_branches.update(self.branches)
123 all_branches.update(self.branches_closed)
119 all_branches.update(self.branches_closed)
124 return all_branches
120 return all_branches
125
121
126 def _get_branches(self, active=True, closed=False):
122 def _get_branches(self, active=True, closed=False):
127 """
123 """
128 Gets branches for this repository
124 Gets branches for this repository
129 Returns only not closed active branches by default
125 Returns only not closed active branches by default
130
126
131 :param active: return also active branches
127 :param active: return also active branches
132 :param closed: return also closed branches
128 :param closed: return also closed branches
133
129
134 """
130 """
135 if self.is_empty():
131 if self.is_empty():
136 return {}
132 return {}
137
133
138 def get_name(ctx):
134 def get_name(ctx):
139 return ctx[0]
135 return ctx[0]
140
136
141 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
142 self._remote.branches(active, closed).items()]
138 self._remote.branches(active, closed).items()]
143
139
144 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
145
141
146 @LazyProperty
142 @CachedProperty
147 def tags(self):
143 def tags(self):
148 """
144 """
149 Gets tags for this repository
145 Gets tags for this repository
150 """
146 """
151 return self._get_tags()
147 return self._get_tags()
152
148
153 def _get_tags(self):
149 def _get_tags(self):
154 if self.is_empty():
150 if self.is_empty():
155 return {}
151 return {}
156
152
157 def get_name(ctx):
153 def get_name(ctx):
158 return ctx[0]
154 return ctx[0]
159
155
160 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
161 self._remote.tags().items()]
157 self._remote.tags().items()]
162
158
163 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
164
160
165 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
161 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
166 """
162 """
167 Creates and returns a tag for the given ``commit_id``.
163 Creates and returns a tag for the given ``commit_id``.
168
164
169 :param name: name for new tag
165 :param name: name for new tag
170 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
171 :param commit_id: commit id for which new tag would be created
167 :param commit_id: commit id for which new tag would be created
172 :param message: message of the tag's commit
168 :param message: message of the tag's commit
173 :param date: date of tag's commit
169 :param date: date of tag's commit
174
170
175 :raises TagAlreadyExistError: if tag with same name already exists
171 :raises TagAlreadyExistError: if tag with same name already exists
176 """
172 """
177 if name in self.tags:
173 if name in self.tags:
178 raise TagAlreadyExistError("Tag %s already exists" % name)
174 raise TagAlreadyExistError("Tag %s already exists" % name)
179
175
180 commit = self.get_commit(commit_id=commit_id)
176 commit = self.get_commit(commit_id=commit_id)
181 local = kwargs.setdefault('local', False)
177 local = kwargs.setdefault('local', False)
182
178
183 if message is None:
179 if message is None:
184 message = "Added tag %s for commit %s" % (name, commit.short_id)
180 message = "Added tag %s for commit %s" % (name, commit.short_id)
185
181
186 date, tz = date_to_timestamp_plus_offset(date)
182 date, tz = date_to_timestamp_plus_offset(date)
187
183
188 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
184 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
189 self._remote.invalidate_vcs_cache()
185 self._remote.invalidate_vcs_cache()
190
186
191 # Reinitialize tags
187 # Reinitialize tags
192 self.tags = self._get_tags()
188 self.tags = self._get_tags()
193 tag_id = self.tags[name]
189 tag_id = self.tags[name]
194
190
195 return self.get_commit(commit_id=tag_id)
191 return self.get_commit(commit_id=tag_id)
196
192
197 def remove_tag(self, name, user, message=None, date=None):
193 def remove_tag(self, name, user, message=None, date=None):
198 """
194 """
199 Removes tag with the given `name`.
195 Removes tag with the given `name`.
200
196
201 :param name: name of the tag to be removed
197 :param name: name of the tag to be removed
202 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
203 :param message: message of the tag's removal commit
199 :param message: message of the tag's removal commit
204 :param date: date of tag's removal commit
200 :param date: date of tag's removal commit
205
201
206 :raises TagDoesNotExistError: if tag with given name does not exists
202 :raises TagDoesNotExistError: if tag with given name does not exists
207 """
203 """
208 if name not in self.tags:
204 if name not in self.tags:
209 raise TagDoesNotExistError("Tag %s does not exist" % name)
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
210
206
211 if message is None:
207 if message is None:
212 message = "Removed tag %s" % name
208 message = "Removed tag %s" % name
213 local = False
209 local = False
214
210
215 date, tz = date_to_timestamp_plus_offset(date)
211 date, tz = date_to_timestamp_plus_offset(date)
216
212
217 self._remote.tag(name, nullid, message, local, user, date, tz)
213 self._remote.tag(name, nullid, message, local, user, date, tz)
218 self._remote.invalidate_vcs_cache()
214 self._remote.invalidate_vcs_cache()
219 self.tags = self._get_tags()
215 self.tags = self._get_tags()
220
216
221 @LazyProperty
217 @LazyProperty
222 def bookmarks(self):
218 def bookmarks(self):
223 """
219 """
224 Gets bookmarks for this repository
220 Gets bookmarks for this repository
225 """
221 """
226 return self._get_bookmarks()
222 return self._get_bookmarks()
227
223
228 def _get_bookmarks(self):
224 def _get_bookmarks(self):
229 if self.is_empty():
225 if self.is_empty():
230 return {}
226 return {}
231
227
232 def get_name(ctx):
228 def get_name(ctx):
233 return ctx[0]
229 return ctx[0]
234
230
235 _bookmarks = [
231 _bookmarks = [
236 (safe_unicode(n), hexlify(h)) for n, h in
232 (safe_unicode(n), hexlify(h)) for n, h in
237 self._remote.bookmarks().items()]
233 self._remote.bookmarks().items()]
238
234
239 return OrderedDict(sorted(_bookmarks, key=get_name))
235 return OrderedDict(sorted(_bookmarks, key=get_name))
240
236
241 def _get_all_commit_ids(self):
237 def _get_all_commit_ids(self):
242 return self._remote.get_all_commit_ids('visible')
238 return self._remote.get_all_commit_ids('visible')
243
239
244 def get_diff(
240 def get_diff(
245 self, commit1, commit2, path='', ignore_whitespace=False,
241 self, commit1, commit2, path='', ignore_whitespace=False,
246 context=3, path1=None):
242 context=3, path1=None):
247 """
243 """
248 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 Returns (git like) *diff*, as plain text. Shows changes introduced by
249 `commit2` since `commit1`.
245 `commit2` since `commit1`.
250
246
251 :param commit1: Entry point from which diff is shown. Can be
247 :param commit1: Entry point from which diff is shown. Can be
252 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 ``self.EMPTY_COMMIT`` - in this case, patch showing all
253 the changes since empty state of the repository until `commit2`
249 the changes since empty state of the repository until `commit2`
254 :param commit2: Until which commit changes should be shown.
250 :param commit2: Until which commit changes should be shown.
255 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 :param ignore_whitespace: If set to ``True``, would not show whitespace
256 changes. Defaults to ``False``.
252 changes. Defaults to ``False``.
257 :param context: How many lines before/after changed lines should be
253 :param context: How many lines before/after changed lines should be
258 shown. Defaults to ``3``.
254 shown. Defaults to ``3``.
259 """
255 """
260 self._validate_diff_commits(commit1, commit2)
256 self._validate_diff_commits(commit1, commit2)
261 if path1 is not None and path1 != path:
257 if path1 is not None and path1 != path:
262 raise ValueError("Diff of two different paths not supported.")
258 raise ValueError("Diff of two different paths not supported.")
263
259
264 if path:
260 if path:
265 file_filter = [self.path, path]
261 file_filter = [self.path, path]
266 else:
262 else:
267 file_filter = None
263 file_filter = None
268
264
269 diff = self._remote.diff(
265 diff = self._remote.diff(
270 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
271 opt_git=True, opt_ignorews=ignore_whitespace,
267 opt_git=True, opt_ignorews=ignore_whitespace,
272 context=context)
268 context=context)
273 return MercurialDiff(diff)
269 return MercurialDiff(diff)
274
270
275 def strip(self, commit_id, branch=None):
271 def strip(self, commit_id, branch=None):
276 self._remote.strip(commit_id, update=False, backup="none")
272 self._remote.strip(commit_id, update=False, backup="none")
277
273
278 self._remote.invalidate_vcs_cache()
274 self._remote.invalidate_vcs_cache()
279 self._commit_ids_ver = time.time()
275 # clear cache
280 # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it
276 self._invalidate_prop_cache('commit_ids')
277
281 return len(self.commit_ids)
278 return len(self.commit_ids)
282
279
283 def verify(self):
280 def verify(self):
284 verify = self._remote.verify()
281 verify = self._remote.verify()
285
282
286 self._remote.invalidate_vcs_cache()
283 self._remote.invalidate_vcs_cache()
287 return verify
284 return verify
288
285
289 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
286 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
290 if commit_id1 == commit_id2:
287 if commit_id1 == commit_id2:
291 return commit_id1
288 return commit_id1
292
289
293 ancestors = self._remote.revs_from_revspec(
290 ancestors = self._remote.revs_from_revspec(
294 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
291 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
295 other_path=repo2.path)
292 other_path=repo2.path)
296 return repo2[ancestors[0]].raw_id if ancestors else None
293 return repo2[ancestors[0]].raw_id if ancestors else None
297
294
298 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
295 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
299 if commit_id1 == commit_id2:
296 if commit_id1 == commit_id2:
300 commits = []
297 commits = []
301 else:
298 else:
302 if merge:
299 if merge:
303 indexes = self._remote.revs_from_revspec(
300 indexes = self._remote.revs_from_revspec(
304 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
301 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
305 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
302 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
306 else:
303 else:
307 indexes = self._remote.revs_from_revspec(
304 indexes = self._remote.revs_from_revspec(
308 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
305 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
309 commit_id1, other_path=repo2.path)
306 commit_id1, other_path=repo2.path)
310
307
311 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
308 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
312 for idx in indexes]
309 for idx in indexes]
313
310
314 return commits
311 return commits
315
312
316 @staticmethod
313 @staticmethod
317 def check_url(url, config):
314 def check_url(url, config):
318 """
315 """
319 Function will check given url and try to verify if it's a valid
316 Function will check given url and try to verify if it's a valid
320 link. Sometimes it may happened that mercurial will issue basic
317 link. Sometimes it may happened that mercurial will issue basic
321 auth request that can cause whole API to hang when used from python
318 auth request that can cause whole API to hang when used from python
322 or other external calls.
319 or other external calls.
323
320
324 On failures it'll raise urllib2.HTTPError, exception is also thrown
321 On failures it'll raise urllib2.HTTPError, exception is also thrown
325 when the return code is non 200
322 when the return code is non 200
326 """
323 """
327 # check first if it's not an local url
324 # check first if it's not an local url
328 if os.path.isdir(url) or url.startswith('file:'):
325 if os.path.isdir(url) or url.startswith('file:'):
329 return True
326 return True
330
327
331 # Request the _remote to verify the url
328 # Request the _remote to verify the url
332 return connection.Hg.check_url(url, config.serialize())
329 return connection.Hg.check_url(url, config.serialize())
333
330
334 @staticmethod
331 @staticmethod
335 def is_valid_repository(path):
332 def is_valid_repository(path):
336 return os.path.isdir(os.path.join(path, '.hg'))
333 return os.path.isdir(os.path.join(path, '.hg'))
337
334
338 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
335 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
339 """
336 """
340 Function will check for mercurial repository in given path. If there
337 Function will check for mercurial repository in given path. If there
341 is no repository in that path it will raise an exception unless
338 is no repository in that path it will raise an exception unless
342 `create` parameter is set to True - in that case repository would
339 `create` parameter is set to True - in that case repository would
343 be created.
340 be created.
344
341
345 If `src_url` is given, would try to clone repository from the
342 If `src_url` is given, would try to clone repository from the
346 location at given clone_point. Additionally it'll make update to
343 location at given clone_point. Additionally it'll make update to
347 working copy accordingly to `do_workspace_checkout` flag.
344 working copy accordingly to `do_workspace_checkout` flag.
348 """
345 """
349 if create and os.path.exists(self.path):
346 if create and os.path.exists(self.path):
350 raise RepositoryError(
347 raise RepositoryError(
351 "Cannot create repository at %s, location already exist"
348 "Cannot create repository at %s, location already exist"
352 % self.path)
349 % self.path)
353
350
354 if src_url:
351 if src_url:
355 url = str(self._get_url(src_url))
352 url = str(self._get_url(src_url))
356 MercurialRepository.check_url(url, self.config)
353 MercurialRepository.check_url(url, self.config)
357
354
358 self._remote.clone(url, self.path, do_workspace_checkout)
355 self._remote.clone(url, self.path, do_workspace_checkout)
359
356
360 # Don't try to create if we've already cloned repo
357 # Don't try to create if we've already cloned repo
361 create = False
358 create = False
362
359
363 if create:
360 if create:
364 os.makedirs(self.path, mode=0o755)
361 os.makedirs(self.path, mode=0o755)
365
362
366 self._remote.localrepository(create)
363 self._remote.localrepository(create)
367
364
368 @LazyProperty
365 @LazyProperty
369 def in_memory_commit(self):
366 def in_memory_commit(self):
370 return MercurialInMemoryCommit(self)
367 return MercurialInMemoryCommit(self)
371
368
372 @LazyProperty
369 @LazyProperty
373 def description(self):
370 def description(self):
374 description = self._remote.get_config_value(
371 description = self._remote.get_config_value(
375 'web', 'description', untrusted=True)
372 'web', 'description', untrusted=True)
376 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
373 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
377
374
378 @LazyProperty
375 @LazyProperty
379 def contact(self):
376 def contact(self):
380 contact = (
377 contact = (
381 self._remote.get_config_value("web", "contact") or
378 self._remote.get_config_value("web", "contact") or
382 self._remote.get_config_value("ui", "username"))
379 self._remote.get_config_value("ui", "username"))
383 return safe_unicode(contact or self.DEFAULT_CONTACT)
380 return safe_unicode(contact or self.DEFAULT_CONTACT)
384
381
385 @LazyProperty
382 @LazyProperty
386 def last_change(self):
383 def last_change(self):
387 """
384 """
388 Returns last change made on this repository as
385 Returns last change made on this repository as
389 `datetime.datetime` object.
386 `datetime.datetime` object.
390 """
387 """
391 try:
388 try:
392 return self.get_commit().date
389 return self.get_commit().date
393 except RepositoryError:
390 except RepositoryError:
394 tzoffset = makedate()[1]
391 tzoffset = makedate()[1]
395 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
392 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
396
393
397 def _get_fs_mtime(self):
394 def _get_fs_mtime(self):
398 # fallback to filesystem
395 # fallback to filesystem
399 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
396 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
400 st_path = os.path.join(self.path, '.hg', "store")
397 st_path = os.path.join(self.path, '.hg', "store")
401 if os.path.exists(cl_path):
398 if os.path.exists(cl_path):
402 return os.stat(cl_path).st_mtime
399 return os.stat(cl_path).st_mtime
403 else:
400 else:
404 return os.stat(st_path).st_mtime
401 return os.stat(st_path).st_mtime
405
402
406 def _get_url(self, url):
403 def _get_url(self, url):
407 """
404 """
408 Returns normalized url. If schema is not given, would fall
405 Returns normalized url. If schema is not given, would fall
409 to filesystem
406 to filesystem
410 (``file:///``) schema.
407 (``file:///``) schema.
411 """
408 """
412 url = url.encode('utf8')
409 url = url.encode('utf8')
413 if url != 'default' and '://' not in url:
410 if url != 'default' and '://' not in url:
414 url = "file:" + urllib.pathname2url(url)
411 url = "file:" + urllib.pathname2url(url)
415 return url
412 return url
416
413
417 def get_hook_location(self):
414 def get_hook_location(self):
418 """
415 """
419 returns absolute path to location where hooks are stored
416 returns absolute path to location where hooks are stored
420 """
417 """
421 return os.path.join(self.path, '.hg', '.hgrc')
418 return os.path.join(self.path, '.hg', '.hgrc')
422
419
423 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
420 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
424 """
421 """
425 Returns ``MercurialCommit`` object representing repository's
422 Returns ``MercurialCommit`` object representing repository's
426 commit at the given `commit_id` or `commit_idx`.
423 commit at the given `commit_id` or `commit_idx`.
427 """
424 """
428 if self.is_empty():
425 if self.is_empty():
429 raise EmptyRepositoryError("There are no commits yet")
426 raise EmptyRepositoryError("There are no commits yet")
430
427
431 if commit_id is not None:
428 if commit_id is not None:
432 self._validate_commit_id(commit_id)
429 self._validate_commit_id(commit_id)
433 try:
430 try:
434 # we have cached idx, use it without contacting the remote
431 # we have cached idx, use it without contacting the remote
435 idx = self._commit_ids[commit_id]
432 idx = self._commit_ids[commit_id]
436 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
433 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
437 except KeyError:
434 except KeyError:
438 pass
435 pass
439
436
440 elif commit_idx is not None:
437 elif commit_idx is not None:
441 self._validate_commit_idx(commit_idx)
438 self._validate_commit_idx(commit_idx)
442 try:
439 try:
443 _commit_id = self.commit_ids[commit_idx]
440 _commit_id = self.commit_ids[commit_idx]
444 if commit_idx < 0:
441 if commit_idx < 0:
445 commit_idx = self.commit_ids.index(_commit_id)
442 commit_idx = self.commit_ids.index(_commit_id)
446
443
447 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
444 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
448 except IndexError:
445 except IndexError:
449 commit_id = commit_idx
446 commit_id = commit_idx
450 else:
447 else:
451 commit_id = "tip"
448 commit_id = "tip"
452
449
453 if isinstance(commit_id, unicode):
450 if isinstance(commit_id, unicode):
454 commit_id = safe_str(commit_id)
451 commit_id = safe_str(commit_id)
455
452
456 try:
453 try:
457 raw_id, idx = self._remote.lookup(commit_id, both=True)
454 raw_id, idx = self._remote.lookup(commit_id, both=True)
458 except CommitDoesNotExistError:
455 except CommitDoesNotExistError:
459 msg = "Commit {} does not exist for `{}`".format(
456 msg = "Commit {} does not exist for `{}`".format(
460 *map(safe_str, [commit_id, self.name]))
457 *map(safe_str, [commit_id, self.name]))
461 raise CommitDoesNotExistError(msg)
458 raise CommitDoesNotExistError(msg)
462
459
463 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
460 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
464
461
465 def get_commits(
462 def get_commits(
466 self, start_id=None, end_id=None, start_date=None, end_date=None,
463 self, start_id=None, end_id=None, start_date=None, end_date=None,
467 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
464 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
468 """
465 """
469 Returns generator of ``MercurialCommit`` objects from start to end
466 Returns generator of ``MercurialCommit`` objects from start to end
470 (both are inclusive)
467 (both are inclusive)
471
468
472 :param start_id: None, str(commit_id)
469 :param start_id: None, str(commit_id)
473 :param end_id: None, str(commit_id)
470 :param end_id: None, str(commit_id)
474 :param start_date: if specified, commits with commit date less than
471 :param start_date: if specified, commits with commit date less than
475 ``start_date`` would be filtered out from returned set
472 ``start_date`` would be filtered out from returned set
476 :param end_date: if specified, commits with commit date greater than
473 :param end_date: if specified, commits with commit date greater than
477 ``end_date`` would be filtered out from returned set
474 ``end_date`` would be filtered out from returned set
478 :param branch_name: if specified, commits not reachable from given
475 :param branch_name: if specified, commits not reachable from given
479 branch would be filtered out from returned set
476 branch would be filtered out from returned set
480 :param show_hidden: Show hidden commits such as obsolete or hidden from
477 :param show_hidden: Show hidden commits such as obsolete or hidden from
481 Mercurial evolve
478 Mercurial evolve
482 :raise BranchDoesNotExistError: If given ``branch_name`` does not
479 :raise BranchDoesNotExistError: If given ``branch_name`` does not
483 exist.
480 exist.
484 :raise CommitDoesNotExistError: If commit for given ``start`` or
481 :raise CommitDoesNotExistError: If commit for given ``start`` or
485 ``end`` could not be found.
482 ``end`` could not be found.
486 """
483 """
487 # actually we should check now if it's not an empty repo
484 # actually we should check now if it's not an empty repo
488 if self.is_empty():
485 if self.is_empty():
489 raise EmptyRepositoryError("There are no commits yet")
486 raise EmptyRepositoryError("There are no commits yet")
490 self._validate_branch_name(branch_name)
487 self._validate_branch_name(branch_name)
491
488
492 branch_ancestors = False
489 branch_ancestors = False
493 if start_id is not None:
490 if start_id is not None:
494 self._validate_commit_id(start_id)
491 self._validate_commit_id(start_id)
495 c_start = self.get_commit(commit_id=start_id)
492 c_start = self.get_commit(commit_id=start_id)
496 start_pos = self._commit_ids[c_start.raw_id]
493 start_pos = self._commit_ids[c_start.raw_id]
497 else:
494 else:
498 start_pos = None
495 start_pos = None
499
496
500 if end_id is not None:
497 if end_id is not None:
501 self._validate_commit_id(end_id)
498 self._validate_commit_id(end_id)
502 c_end = self.get_commit(commit_id=end_id)
499 c_end = self.get_commit(commit_id=end_id)
503 end_pos = max(0, self._commit_ids[c_end.raw_id])
500 end_pos = max(0, self._commit_ids[c_end.raw_id])
504 else:
501 else:
505 end_pos = None
502 end_pos = None
506
503
507 if None not in [start_id, end_id] and start_pos > end_pos:
504 if None not in [start_id, end_id] and start_pos > end_pos:
508 raise RepositoryError(
505 raise RepositoryError(
509 "Start commit '%s' cannot be after end commit '%s'" %
506 "Start commit '%s' cannot be after end commit '%s'" %
510 (start_id, end_id))
507 (start_id, end_id))
511
508
512 if end_pos is not None:
509 if end_pos is not None:
513 end_pos += 1
510 end_pos += 1
514
511
515 commit_filter = []
512 commit_filter = []
516
513
517 if branch_name and not branch_ancestors:
514 if branch_name and not branch_ancestors:
518 commit_filter.append('branch("%s")' % (branch_name,))
515 commit_filter.append('branch("%s")' % (branch_name,))
519 elif branch_name and branch_ancestors:
516 elif branch_name and branch_ancestors:
520 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
517 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
521
518
522 if start_date and not end_date:
519 if start_date and not end_date:
523 commit_filter.append('date(">%s")' % (start_date,))
520 commit_filter.append('date(">%s")' % (start_date,))
524 if end_date and not start_date:
521 if end_date and not start_date:
525 commit_filter.append('date("<%s")' % (end_date,))
522 commit_filter.append('date("<%s")' % (end_date,))
526 if start_date and end_date:
523 if start_date and end_date:
527 commit_filter.append(
524 commit_filter.append(
528 'date(">%s") and date("<%s")' % (start_date, end_date))
525 'date(">%s") and date("<%s")' % (start_date, end_date))
529
526
530 if not show_hidden:
527 if not show_hidden:
531 commit_filter.append('not obsolete()')
528 commit_filter.append('not obsolete()')
532 commit_filter.append('not hidden()')
529 commit_filter.append('not hidden()')
533
530
534 # TODO: johbo: Figure out a simpler way for this solution
531 # TODO: johbo: Figure out a simpler way for this solution
535 collection_generator = CollectionGenerator
532 collection_generator = CollectionGenerator
536 if commit_filter:
533 if commit_filter:
537 commit_filter = ' and '.join(map(safe_str, commit_filter))
534 commit_filter = ' and '.join(map(safe_str, commit_filter))
538 revisions = self._remote.rev_range([commit_filter])
535 revisions = self._remote.rev_range([commit_filter])
539 collection_generator = MercurialIndexBasedCollectionGenerator
536 collection_generator = MercurialIndexBasedCollectionGenerator
540 else:
537 else:
541 revisions = self.commit_ids
538 revisions = self.commit_ids
542
539
543 if start_pos or end_pos:
540 if start_pos or end_pos:
544 revisions = revisions[start_pos:end_pos]
541 revisions = revisions[start_pos:end_pos]
545
542
546 return collection_generator(self, revisions, pre_load=pre_load)
543 return collection_generator(self, revisions, pre_load=pre_load)
547
544
548 def pull(self, url, commit_ids=None):
545 def pull(self, url, commit_ids=None):
549 """
546 """
550 Pull changes from external location.
547 Pull changes from external location.
551
548
552 :param commit_ids: Optional. Can be set to a list of commit ids
549 :param commit_ids: Optional. Can be set to a list of commit ids
553 which shall be pulled from the other repository.
550 which shall be pulled from the other repository.
554 """
551 """
555 url = self._get_url(url)
552 url = self._get_url(url)
556 self._remote.pull(url, commit_ids=commit_ids)
553 self._remote.pull(url, commit_ids=commit_ids)
557 self._remote.invalidate_vcs_cache()
554 self._remote.invalidate_vcs_cache()
558
555
559 def fetch(self, url, commit_ids=None):
556 def fetch(self, url, commit_ids=None):
560 """
557 """
561 Backward compatibility with GIT fetch==pull
558 Backward compatibility with GIT fetch==pull
562 """
559 """
563 return self.pull(url, commit_ids=commit_ids)
560 return self.pull(url, commit_ids=commit_ids)
564
561
565 def push(self, url):
562 def push(self, url):
566 url = self._get_url(url)
563 url = self._get_url(url)
567 self._remote.sync_push(url)
564 self._remote.sync_push(url)
568
565
569 def _local_clone(self, clone_path):
566 def _local_clone(self, clone_path):
570 """
567 """
571 Create a local clone of the current repo.
568 Create a local clone of the current repo.
572 """
569 """
573 self._remote.clone(self.path, clone_path, update_after_clone=True,
570 self._remote.clone(self.path, clone_path, update_after_clone=True,
574 hooks=False)
571 hooks=False)
575
572
576 def _update(self, revision, clean=False):
573 def _update(self, revision, clean=False):
577 """
574 """
578 Update the working copy to the specified revision.
575 Update the working copy to the specified revision.
579 """
576 """
580 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
577 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
581 self._remote.update(revision, clean=clean)
578 self._remote.update(revision, clean=clean)
582
579
583 def _identify(self):
580 def _identify(self):
584 """
581 """
585 Return the current state of the working directory.
582 Return the current state of the working directory.
586 """
583 """
587 return self._remote.identify().strip().rstrip('+')
584 return self._remote.identify().strip().rstrip('+')
588
585
589 def _heads(self, branch=None):
586 def _heads(self, branch=None):
590 """
587 """
591 Return the commit ids of the repository heads.
588 Return the commit ids of the repository heads.
592 """
589 """
593 return self._remote.heads(branch=branch).strip().split(' ')
590 return self._remote.heads(branch=branch).strip().split(' ')
594
591
595 def _ancestor(self, revision1, revision2):
592 def _ancestor(self, revision1, revision2):
596 """
593 """
597 Return the common ancestor of the two revisions.
594 Return the common ancestor of the two revisions.
598 """
595 """
599 return self._remote.ancestor(revision1, revision2)
596 return self._remote.ancestor(revision1, revision2)
600
597
601 def _local_push(
598 def _local_push(
602 self, revision, repository_path, push_branches=False,
599 self, revision, repository_path, push_branches=False,
603 enable_hooks=False):
600 enable_hooks=False):
604 """
601 """
605 Push the given revision to the specified repository.
602 Push the given revision to the specified repository.
606
603
607 :param push_branches: allow to create branches in the target repo.
604 :param push_branches: allow to create branches in the target repo.
608 """
605 """
609 self._remote.push(
606 self._remote.push(
610 [revision], repository_path, hooks=enable_hooks,
607 [revision], repository_path, hooks=enable_hooks,
611 push_branches=push_branches)
608 push_branches=push_branches)
612
609
613 def _local_merge(self, target_ref, merge_message, user_name, user_email,
610 def _local_merge(self, target_ref, merge_message, user_name, user_email,
614 source_ref, use_rebase=False, dry_run=False):
611 source_ref, use_rebase=False, dry_run=False):
615 """
612 """
616 Merge the given source_revision into the checked out revision.
613 Merge the given source_revision into the checked out revision.
617
614
618 Returns the commit id of the merge and a boolean indicating if the
615 Returns the commit id of the merge and a boolean indicating if the
619 commit needs to be pushed.
616 commit needs to be pushed.
620 """
617 """
621 self._update(target_ref.commit_id, clean=True)
618 self._update(target_ref.commit_id, clean=True)
622
619
623 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
620 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
624 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
621 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
625
622
626 if ancestor == source_ref.commit_id:
623 if ancestor == source_ref.commit_id:
627 # Nothing to do, the changes were already integrated
624 # Nothing to do, the changes were already integrated
628 return target_ref.commit_id, False
625 return target_ref.commit_id, False
629
626
630 elif ancestor == target_ref.commit_id and is_the_same_branch:
627 elif ancestor == target_ref.commit_id and is_the_same_branch:
631 # In this case we should force a commit message
628 # In this case we should force a commit message
632 return source_ref.commit_id, True
629 return source_ref.commit_id, True
633
630
634 if use_rebase:
631 if use_rebase:
635 try:
632 try:
636 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
633 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
637 target_ref.commit_id)
634 target_ref.commit_id)
638 self.bookmark(bookmark_name, revision=source_ref.commit_id)
635 self.bookmark(bookmark_name, revision=source_ref.commit_id)
639 self._remote.rebase(
636 self._remote.rebase(
640 source=source_ref.commit_id, dest=target_ref.commit_id)
637 source=source_ref.commit_id, dest=target_ref.commit_id)
641 self._remote.invalidate_vcs_cache()
638 self._remote.invalidate_vcs_cache()
642 self._update(bookmark_name, clean=True)
639 self._update(bookmark_name, clean=True)
643 return self._identify(), True
640 return self._identify(), True
644 except RepositoryError:
641 except RepositoryError:
645 # The rebase-abort may raise another exception which 'hides'
642 # The rebase-abort may raise another exception which 'hides'
646 # the original one, therefore we log it here.
643 # the original one, therefore we log it here.
647 log.exception('Error while rebasing shadow repo during merge.')
644 log.exception('Error while rebasing shadow repo during merge.')
648
645
649 # Cleanup any rebase leftovers
646 # Cleanup any rebase leftovers
650 self._remote.invalidate_vcs_cache()
647 self._remote.invalidate_vcs_cache()
651 self._remote.rebase(abort=True)
648 self._remote.rebase(abort=True)
652 self._remote.invalidate_vcs_cache()
649 self._remote.invalidate_vcs_cache()
653 self._remote.update(clean=True)
650 self._remote.update(clean=True)
654 raise
651 raise
655 else:
652 else:
656 try:
653 try:
657 self._remote.merge(source_ref.commit_id)
654 self._remote.merge(source_ref.commit_id)
658 self._remote.invalidate_vcs_cache()
655 self._remote.invalidate_vcs_cache()
659 self._remote.commit(
656 self._remote.commit(
660 message=safe_str(merge_message),
657 message=safe_str(merge_message),
661 username=safe_str('%s <%s>' % (user_name, user_email)))
658 username=safe_str('%s <%s>' % (user_name, user_email)))
662 self._remote.invalidate_vcs_cache()
659 self._remote.invalidate_vcs_cache()
663 return self._identify(), True
660 return self._identify(), True
664 except RepositoryError:
661 except RepositoryError:
665 # Cleanup any merge leftovers
662 # Cleanup any merge leftovers
666 self._remote.update(clean=True)
663 self._remote.update(clean=True)
667 raise
664 raise
668
665
669 def _local_close(self, target_ref, user_name, user_email,
666 def _local_close(self, target_ref, user_name, user_email,
670 source_ref, close_message=''):
667 source_ref, close_message=''):
671 """
668 """
672 Close the branch of the given source_revision
669 Close the branch of the given source_revision
673
670
674 Returns the commit id of the close and a boolean indicating if the
671 Returns the commit id of the close and a boolean indicating if the
675 commit needs to be pushed.
672 commit needs to be pushed.
676 """
673 """
677 self._update(source_ref.commit_id)
674 self._update(source_ref.commit_id)
678 message = close_message or "Closing branch: `{}`".format(source_ref.name)
675 message = close_message or "Closing branch: `{}`".format(source_ref.name)
679 try:
676 try:
680 self._remote.commit(
677 self._remote.commit(
681 message=safe_str(message),
678 message=safe_str(message),
682 username=safe_str('%s <%s>' % (user_name, user_email)),
679 username=safe_str('%s <%s>' % (user_name, user_email)),
683 close_branch=True)
680 close_branch=True)
684 self._remote.invalidate_vcs_cache()
681 self._remote.invalidate_vcs_cache()
685 return self._identify(), True
682 return self._identify(), True
686 except RepositoryError:
683 except RepositoryError:
687 # Cleanup any commit leftovers
684 # Cleanup any commit leftovers
688 self._remote.update(clean=True)
685 self._remote.update(clean=True)
689 raise
686 raise
690
687
691 def _is_the_same_branch(self, target_ref, source_ref):
688 def _is_the_same_branch(self, target_ref, source_ref):
692 return (
689 return (
693 self._get_branch_name(target_ref) ==
690 self._get_branch_name(target_ref) ==
694 self._get_branch_name(source_ref))
691 self._get_branch_name(source_ref))
695
692
696 def _get_branch_name(self, ref):
693 def _get_branch_name(self, ref):
697 if ref.type == 'branch':
694 if ref.type == 'branch':
698 return ref.name
695 return ref.name
699 return self._remote.ctx_branch(ref.commit_id)
696 return self._remote.ctx_branch(ref.commit_id)
700
697
701 def _maybe_prepare_merge_workspace(
698 def _maybe_prepare_merge_workspace(
702 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
699 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
703 shadow_repository_path = self._get_shadow_repository_path(
700 shadow_repository_path = self._get_shadow_repository_path(
704 repo_id, workspace_id)
701 repo_id, workspace_id)
705 if not os.path.exists(shadow_repository_path):
702 if not os.path.exists(shadow_repository_path):
706 self._local_clone(shadow_repository_path)
703 self._local_clone(shadow_repository_path)
707 log.debug(
704 log.debug(
708 'Prepared shadow repository in %s', shadow_repository_path)
705 'Prepared shadow repository in %s', shadow_repository_path)
709
706
710 return shadow_repository_path
707 return shadow_repository_path
711
708
712 def _merge_repo(self, repo_id, workspace_id, target_ref,
709 def _merge_repo(self, repo_id, workspace_id, target_ref,
713 source_repo, source_ref, merge_message,
710 source_repo, source_ref, merge_message,
714 merger_name, merger_email, dry_run=False,
711 merger_name, merger_email, dry_run=False,
715 use_rebase=False, close_branch=False):
712 use_rebase=False, close_branch=False):
716
713
717 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
714 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
718 'rebase' if use_rebase else 'merge', dry_run)
715 'rebase' if use_rebase else 'merge', dry_run)
719 if target_ref.commit_id not in self._heads():
716 if target_ref.commit_id not in self._heads():
720 return MergeResponse(
717 return MergeResponse(
721 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
718 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
722 metadata={'target_ref': target_ref})
719 metadata={'target_ref': target_ref})
723
720
724 try:
721 try:
725 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
722 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
726 heads = '\n,'.join(self._heads(target_ref.name))
723 heads = '\n,'.join(self._heads(target_ref.name))
727 metadata = {
724 metadata = {
728 'target_ref': target_ref,
725 'target_ref': target_ref,
729 'source_ref': source_ref,
726 'source_ref': source_ref,
730 'heads': heads
727 'heads': heads
731 }
728 }
732 return MergeResponse(
729 return MergeResponse(
733 False, False, None,
730 False, False, None,
734 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
731 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
735 metadata=metadata)
732 metadata=metadata)
736 except CommitDoesNotExistError:
733 except CommitDoesNotExistError:
737 log.exception('Failure when looking up branch heads on hg target')
734 log.exception('Failure when looking up branch heads on hg target')
738 return MergeResponse(
735 return MergeResponse(
739 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
736 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
740 metadata={'target_ref': target_ref})
737 metadata={'target_ref': target_ref})
741
738
742 shadow_repository_path = self._maybe_prepare_merge_workspace(
739 shadow_repository_path = self._maybe_prepare_merge_workspace(
743 repo_id, workspace_id, target_ref, source_ref)
740 repo_id, workspace_id, target_ref, source_ref)
744 shadow_repo = self._get_shadow_instance(shadow_repository_path)
741 shadow_repo = self._get_shadow_instance(shadow_repository_path)
745
742
746 log.debug('Pulling in target reference %s', target_ref)
743 log.debug('Pulling in target reference %s', target_ref)
747 self._validate_pull_reference(target_ref)
744 self._validate_pull_reference(target_ref)
748 shadow_repo._local_pull(self.path, target_ref)
745 shadow_repo._local_pull(self.path, target_ref)
749
746
750 try:
747 try:
751 log.debug('Pulling in source reference %s', source_ref)
748 log.debug('Pulling in source reference %s', source_ref)
752 source_repo._validate_pull_reference(source_ref)
749 source_repo._validate_pull_reference(source_ref)
753 shadow_repo._local_pull(source_repo.path, source_ref)
750 shadow_repo._local_pull(source_repo.path, source_ref)
754 except CommitDoesNotExistError:
751 except CommitDoesNotExistError:
755 log.exception('Failure when doing local pull on hg shadow repo')
752 log.exception('Failure when doing local pull on hg shadow repo')
756 return MergeResponse(
753 return MergeResponse(
757 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
754 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
758 metadata={'source_ref': source_ref})
755 metadata={'source_ref': source_ref})
759
756
760 merge_ref = None
757 merge_ref = None
761 merge_commit_id = None
758 merge_commit_id = None
762 close_commit_id = None
759 close_commit_id = None
763 merge_failure_reason = MergeFailureReason.NONE
760 merge_failure_reason = MergeFailureReason.NONE
764 metadata = {}
761 metadata = {}
765
762
766 # enforce that close branch should be used only in case we source from
763 # enforce that close branch should be used only in case we source from
767 # an actual Branch
764 # an actual Branch
768 close_branch = close_branch and source_ref.type == 'branch'
765 close_branch = close_branch and source_ref.type == 'branch'
769
766
770 # don't allow to close branch if source and target are the same
767 # don't allow to close branch if source and target are the same
771 close_branch = close_branch and source_ref.name != target_ref.name
768 close_branch = close_branch and source_ref.name != target_ref.name
772
769
773 needs_push_on_close = False
770 needs_push_on_close = False
774 if close_branch and not use_rebase and not dry_run:
771 if close_branch and not use_rebase and not dry_run:
775 try:
772 try:
776 close_commit_id, needs_push_on_close = shadow_repo._local_close(
773 close_commit_id, needs_push_on_close = shadow_repo._local_close(
777 target_ref, merger_name, merger_email, source_ref)
774 target_ref, merger_name, merger_email, source_ref)
778 merge_possible = True
775 merge_possible = True
779 except RepositoryError:
776 except RepositoryError:
780 log.exception('Failure when doing close branch on '
777 log.exception('Failure when doing close branch on '
781 'shadow repo: %s', shadow_repo)
778 'shadow repo: %s', shadow_repo)
782 merge_possible = False
779 merge_possible = False
783 merge_failure_reason = MergeFailureReason.MERGE_FAILED
780 merge_failure_reason = MergeFailureReason.MERGE_FAILED
784 else:
781 else:
785 merge_possible = True
782 merge_possible = True
786
783
787 needs_push = False
784 needs_push = False
788 if merge_possible:
785 if merge_possible:
789 try:
786 try:
790 merge_commit_id, needs_push = shadow_repo._local_merge(
787 merge_commit_id, needs_push = shadow_repo._local_merge(
791 target_ref, merge_message, merger_name, merger_email,
788 target_ref, merge_message, merger_name, merger_email,
792 source_ref, use_rebase=use_rebase, dry_run=dry_run)
789 source_ref, use_rebase=use_rebase, dry_run=dry_run)
793 merge_possible = True
790 merge_possible = True
794
791
795 # read the state of the close action, if it
792 # read the state of the close action, if it
796 # maybe required a push
793 # maybe required a push
797 needs_push = needs_push or needs_push_on_close
794 needs_push = needs_push or needs_push_on_close
798
795
799 # Set a bookmark pointing to the merge commit. This bookmark
796 # Set a bookmark pointing to the merge commit. This bookmark
800 # may be used to easily identify the last successful merge
797 # may be used to easily identify the last successful merge
801 # commit in the shadow repository.
798 # commit in the shadow repository.
802 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
799 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
803 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
800 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
804 except SubrepoMergeError:
801 except SubrepoMergeError:
805 log.exception(
802 log.exception(
806 'Subrepo merge error during local merge on hg shadow repo.')
803 'Subrepo merge error during local merge on hg shadow repo.')
807 merge_possible = False
804 merge_possible = False
808 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
805 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
809 needs_push = False
806 needs_push = False
810 except RepositoryError:
807 except RepositoryError:
811 log.exception('Failure when doing local merge on hg shadow repo')
808 log.exception('Failure when doing local merge on hg shadow repo')
812 merge_possible = False
809 merge_possible = False
813 merge_failure_reason = MergeFailureReason.MERGE_FAILED
810 merge_failure_reason = MergeFailureReason.MERGE_FAILED
814 needs_push = False
811 needs_push = False
815
812
816 if merge_possible and not dry_run:
813 if merge_possible and not dry_run:
817 if needs_push:
814 if needs_push:
818 # In case the target is a bookmark, update it, so after pushing
815 # In case the target is a bookmark, update it, so after pushing
819 # the bookmarks is also updated in the target.
816 # the bookmarks is also updated in the target.
820 if target_ref.type == 'book':
817 if target_ref.type == 'book':
821 shadow_repo.bookmark(
818 shadow_repo.bookmark(
822 target_ref.name, revision=merge_commit_id)
819 target_ref.name, revision=merge_commit_id)
823 try:
820 try:
824 shadow_repo_with_hooks = self._get_shadow_instance(
821 shadow_repo_with_hooks = self._get_shadow_instance(
825 shadow_repository_path,
822 shadow_repository_path,
826 enable_hooks=True)
823 enable_hooks=True)
827 # This is the actual merge action, we push from shadow
824 # This is the actual merge action, we push from shadow
828 # into origin.
825 # into origin.
829 # Note: the push_branches option will push any new branch
826 # Note: the push_branches option will push any new branch
830 # defined in the source repository to the target. This may
827 # defined in the source repository to the target. This may
831 # be dangerous as branches are permanent in Mercurial.
828 # be dangerous as branches are permanent in Mercurial.
832 # This feature was requested in issue #441.
829 # This feature was requested in issue #441.
833 shadow_repo_with_hooks._local_push(
830 shadow_repo_with_hooks._local_push(
834 merge_commit_id, self.path, push_branches=True,
831 merge_commit_id, self.path, push_branches=True,
835 enable_hooks=True)
832 enable_hooks=True)
836
833
837 # maybe we also need to push the close_commit_id
834 # maybe we also need to push the close_commit_id
838 if close_commit_id:
835 if close_commit_id:
839 shadow_repo_with_hooks._local_push(
836 shadow_repo_with_hooks._local_push(
840 close_commit_id, self.path, push_branches=True,
837 close_commit_id, self.path, push_branches=True,
841 enable_hooks=True)
838 enable_hooks=True)
842 merge_succeeded = True
839 merge_succeeded = True
843 except RepositoryError:
840 except RepositoryError:
844 log.exception(
841 log.exception(
845 'Failure when doing local push from the shadow '
842 'Failure when doing local push from the shadow '
846 'repository to the target repository at %s.', self.path)
843 'repository to the target repository at %s.', self.path)
847 merge_succeeded = False
844 merge_succeeded = False
848 merge_failure_reason = MergeFailureReason.PUSH_FAILED
845 merge_failure_reason = MergeFailureReason.PUSH_FAILED
849 metadata['target'] = 'hg shadow repo'
846 metadata['target'] = 'hg shadow repo'
850 metadata['merge_commit'] = merge_commit_id
847 metadata['merge_commit'] = merge_commit_id
851 else:
848 else:
852 merge_succeeded = True
849 merge_succeeded = True
853 else:
850 else:
854 merge_succeeded = False
851 merge_succeeded = False
855
852
856 return MergeResponse(
853 return MergeResponse(
857 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
854 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
858 metadata=metadata)
855 metadata=metadata)
859
856
860 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
857 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
861 config = self.config.copy()
858 config = self.config.copy()
862 if not enable_hooks:
859 if not enable_hooks:
863 config.clear_section('hooks')
860 config.clear_section('hooks')
864 return MercurialRepository(shadow_repository_path, config)
861 return MercurialRepository(shadow_repository_path, config)
865
862
866 def _validate_pull_reference(self, reference):
863 def _validate_pull_reference(self, reference):
867 if not (reference.name in self.bookmarks or
864 if not (reference.name in self.bookmarks or
868 reference.name in self.branches or
865 reference.name in self.branches or
869 self.get_commit(reference.commit_id)):
866 self.get_commit(reference.commit_id)):
870 raise CommitDoesNotExistError(
867 raise CommitDoesNotExistError(
871 'Unknown branch, bookmark or commit id')
868 'Unknown branch, bookmark or commit id')
872
869
873 def _local_pull(self, repository_path, reference):
870 def _local_pull(self, repository_path, reference):
874 """
871 """
875 Fetch a branch, bookmark or commit from a local repository.
872 Fetch a branch, bookmark or commit from a local repository.
876 """
873 """
877 repository_path = os.path.abspath(repository_path)
874 repository_path = os.path.abspath(repository_path)
878 if repository_path == self.path:
875 if repository_path == self.path:
879 raise ValueError('Cannot pull from the same repository')
876 raise ValueError('Cannot pull from the same repository')
880
877
881 reference_type_to_option_name = {
878 reference_type_to_option_name = {
882 'book': 'bookmark',
879 'book': 'bookmark',
883 'branch': 'branch',
880 'branch': 'branch',
884 }
881 }
885 option_name = reference_type_to_option_name.get(
882 option_name = reference_type_to_option_name.get(
886 reference.type, 'revision')
883 reference.type, 'revision')
887
884
888 if option_name == 'revision':
885 if option_name == 'revision':
889 ref = reference.commit_id
886 ref = reference.commit_id
890 else:
887 else:
891 ref = reference.name
888 ref = reference.name
892
889
893 options = {option_name: [ref]}
890 options = {option_name: [ref]}
894 self._remote.pull_cmd(repository_path, hooks=False, **options)
891 self._remote.pull_cmd(repository_path, hooks=False, **options)
895 self._remote.invalidate_vcs_cache()
892 self._remote.invalidate_vcs_cache()
896
893
897 def bookmark(self, bookmark, revision=None):
894 def bookmark(self, bookmark, revision=None):
898 if isinstance(bookmark, unicode):
895 if isinstance(bookmark, unicode):
899 bookmark = safe_str(bookmark)
896 bookmark = safe_str(bookmark)
900 self._remote.bookmark(bookmark, revision=revision)
897 self._remote.bookmark(bookmark, revision=revision)
901 self._remote.invalidate_vcs_cache()
898 self._remote.invalidate_vcs_cache()
902
899
903 def get_path_permissions(self, username):
900 def get_path_permissions(self, username):
904 hgacl_file = os.path.join(self.path, '.hg/hgacl')
901 hgacl_file = os.path.join(self.path, '.hg/hgacl')
905
902
906 def read_patterns(suffix):
903 def read_patterns(suffix):
907 svalue = None
904 svalue = None
908 for section, option in [
905 for section, option in [
909 ('narrowacl', username + suffix),
906 ('narrowacl', username + suffix),
910 ('narrowacl', 'default' + suffix),
907 ('narrowacl', 'default' + suffix),
911 ('narrowhgacl', username + suffix),
908 ('narrowhgacl', username + suffix),
912 ('narrowhgacl', 'default' + suffix)
909 ('narrowhgacl', 'default' + suffix)
913 ]:
910 ]:
914 try:
911 try:
915 svalue = hgacl.get(section, option)
912 svalue = hgacl.get(section, option)
916 break # stop at the first value we find
913 break # stop at the first value we find
917 except configparser.NoOptionError:
914 except configparser.NoOptionError:
918 pass
915 pass
919 if not svalue:
916 if not svalue:
920 return None
917 return None
921 result = ['/']
918 result = ['/']
922 for pattern in svalue.split():
919 for pattern in svalue.split():
923 result.append(pattern)
920 result.append(pattern)
924 if '*' not in pattern and '?' not in pattern:
921 if '*' not in pattern and '?' not in pattern:
925 result.append(pattern + '/*')
922 result.append(pattern + '/*')
926 return result
923 return result
927
924
928 if os.path.exists(hgacl_file):
925 if os.path.exists(hgacl_file):
929 try:
926 try:
930 hgacl = configparser.RawConfigParser()
927 hgacl = configparser.RawConfigParser()
931 hgacl.read(hgacl_file)
928 hgacl.read(hgacl_file)
932
929
933 includes = read_patterns('.includes')
930 includes = read_patterns('.includes')
934 excludes = read_patterns('.excludes')
931 excludes = read_patterns('.excludes')
935 return BasePathPermissionChecker.create_from_patterns(
932 return BasePathPermissionChecker.create_from_patterns(
936 includes, excludes)
933 includes, excludes)
937 except BaseException as e:
934 except BaseException as e:
938 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
935 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
939 hgacl_file, self.name, e)
936 hgacl_file, self.name, e)
940 raise exceptions.RepositoryRequirementError(msg)
937 raise exceptions.RepositoryRequirementError(msg)
941 else:
938 else:
942 return None
939 return None
943
940
944
941
945 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
942 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
946
943
947 def _commit_factory(self, commit_id):
944 def _commit_factory(self, commit_id):
948 return self.repo.get_commit(
945 return self.repo.get_commit(
949 commit_idx=commit_id, pre_load=self.pre_load)
946 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,367 +1,368 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN repository module
22 SVN repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import CachedProperty
31
30
32 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.datelib import date_astimestamp
32 from rhodecode.lib.datelib import date_astimestamp
34 from rhodecode.lib.utils import safe_str, safe_unicode
33 from rhodecode.lib.utils import safe_str, safe_unicode
34 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends.svn.commit import (
37 from rhodecode.lib.vcs.backends.svn.commit import (
38 SubversionCommit, _date_from_svn_properties)
38 SubversionCommit, _date_from_svn_properties)
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 VCSError, NodeDoesNotExistError)
44 VCSError, NodeDoesNotExistError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class SubversionRepository(base.BaseRepository):
50 class SubversionRepository(base.BaseRepository):
51 """
51 """
52 Subversion backend implementation
52 Subversion backend implementation
53
53
54 .. important::
54 .. important::
55
55
56 It is very important to distinguish the commit index and the commit id
56 It is very important to distinguish the commit index and the commit id
57 which is assigned by Subversion. The first one is always handled as an
57 which is assigned by Subversion. The first one is always handled as an
58 `int` by this implementation. The commit id assigned by Subversion on
58 `int` by this implementation. The commit id assigned by Subversion on
59 the other side will always be a `str`.
59 the other side will always be a `str`.
60
60
61 There is a specific trap since the first commit will have the index
61 There is a specific trap since the first commit will have the index
62 ``0`` but the svn id will be ``"1"``.
62 ``0`` but the svn id will be ``"1"``.
63
63
64 """
64 """
65
65
66 # Note: Subversion does not really have a default branch name.
66 # Note: Subversion does not really have a default branch name.
67 DEFAULT_BRANCH_NAME = None
67 DEFAULT_BRANCH_NAME = None
68
68
69 contact = base.BaseRepository.DEFAULT_CONTACT
69 contact = base.BaseRepository.DEFAULT_CONTACT
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71
71
72 def __init__(self, repo_path, config=None, create=False, src_url=None, bare=False,
72 def __init__(self, repo_path, config=None, create=False, src_url=None, bare=False,
73 **kwargs):
73 **kwargs):
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 self.config = config if config else self.get_default_config()
75 self.config = config if config else self.get_default_config()
76
76
77 self._init_repo(create, src_url)
77 self._init_repo(create, src_url)
78
78
79 # dependent that trigger re-computation of commit_ids
79 # caches
80 self._commit_ids_ver = 0
80 self._commit_ids = {}
81
81
82
82 @LazyProperty
83 @LazyProperty
83 def _remote(self):
84 def _remote(self):
84 return connection.Svn(self.path, self.config)
85 return connection.Svn(self.path, self.config)
85
86
86 def _init_repo(self, create, src_url):
87 def _init_repo(self, create, src_url):
87 if create and os.path.exists(self.path):
88 if create and os.path.exists(self.path):
88 raise RepositoryError(
89 raise RepositoryError(
89 "Cannot create repository at %s, location already exist"
90 "Cannot create repository at %s, location already exist"
90 % self.path)
91 % self.path)
91
92
92 if create:
93 if create:
93 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
94 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
94 if src_url:
95 if src_url:
95 src_url = _sanitize_url(src_url)
96 src_url = _sanitize_url(src_url)
96 self._remote.import_remote_repository(src_url)
97 self._remote.import_remote_repository(src_url)
97 else:
98 else:
98 self._check_path()
99 self._check_path()
99
100
100 @CachedProperty('_commit_ids_ver')
101 @CachedProperty
101 def commit_ids(self):
102 def commit_ids(self):
102 head = self._remote.lookup(None)
103 head = self._remote.lookup(None)
103 return [str(r) for r in xrange(1, head + 1)]
104 return [str(r) for r in xrange(1, head + 1)]
104
105
105 def _rebuild_cache(self, commit_ids):
106 def _rebuild_cache(self, commit_ids):
106 pass
107 pass
107
108
108 def run_svn_command(self, cmd, **opts):
109 def run_svn_command(self, cmd, **opts):
109 """
110 """
110 Runs given ``cmd`` as svn command and returns tuple
111 Runs given ``cmd`` as svn command and returns tuple
111 (stdout, stderr).
112 (stdout, stderr).
112
113
113 :param cmd: full svn command to be executed
114 :param cmd: full svn command to be executed
114 :param opts: env options to pass into Subprocess command
115 :param opts: env options to pass into Subprocess command
115 """
116 """
116 if not isinstance(cmd, list):
117 if not isinstance(cmd, list):
117 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
118 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
118
119
119 skip_stderr_log = opts.pop('skip_stderr_log', False)
120 skip_stderr_log = opts.pop('skip_stderr_log', False)
120 out, err = self._remote.run_svn_command(cmd, **opts)
121 out, err = self._remote.run_svn_command(cmd, **opts)
121 if err and not skip_stderr_log:
122 if err and not skip_stderr_log:
122 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
123 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
123 return out, err
124 return out, err
124
125
125 @LazyProperty
126 @LazyProperty
126 def branches(self):
127 def branches(self):
127 return self._tags_or_branches('vcs_svn_branch')
128 return self._tags_or_branches('vcs_svn_branch')
128
129
129 @LazyProperty
130 @LazyProperty
130 def branches_closed(self):
131 def branches_closed(self):
131 return {}
132 return {}
132
133
133 @LazyProperty
134 @LazyProperty
134 def bookmarks(self):
135 def bookmarks(self):
135 return {}
136 return {}
136
137
137 @LazyProperty
138 @LazyProperty
138 def branches_all(self):
139 def branches_all(self):
139 # TODO: johbo: Implement proper branch support
140 # TODO: johbo: Implement proper branch support
140 all_branches = {}
141 all_branches = {}
141 all_branches.update(self.branches)
142 all_branches.update(self.branches)
142 all_branches.update(self.branches_closed)
143 all_branches.update(self.branches_closed)
143 return all_branches
144 return all_branches
144
145
145 @LazyProperty
146 @LazyProperty
146 def tags(self):
147 def tags(self):
147 return self._tags_or_branches('vcs_svn_tag')
148 return self._tags_or_branches('vcs_svn_tag')
148
149
149 def _tags_or_branches(self, config_section):
150 def _tags_or_branches(self, config_section):
150 found_items = {}
151 found_items = {}
151
152
152 if self.is_empty():
153 if self.is_empty():
153 return {}
154 return {}
154
155
155 for pattern in self._patterns_from_section(config_section):
156 for pattern in self._patterns_from_section(config_section):
156 pattern = vcspath.sanitize(pattern)
157 pattern = vcspath.sanitize(pattern)
157 tip = self.get_commit()
158 tip = self.get_commit()
158 try:
159 try:
159 if pattern.endswith('*'):
160 if pattern.endswith('*'):
160 basedir = tip.get_node(vcspath.dirname(pattern))
161 basedir = tip.get_node(vcspath.dirname(pattern))
161 directories = basedir.dirs
162 directories = basedir.dirs
162 else:
163 else:
163 directories = (tip.get_node(pattern), )
164 directories = (tip.get_node(pattern), )
164 except NodeDoesNotExistError:
165 except NodeDoesNotExistError:
165 continue
166 continue
166 found_items.update(
167 found_items.update(
167 (safe_unicode(n.path),
168 (safe_unicode(n.path),
168 self.commit_ids[-1])
169 self.commit_ids[-1])
169 for n in directories)
170 for n in directories)
170
171
171 def get_name(item):
172 def get_name(item):
172 return item[0]
173 return item[0]
173
174
174 return OrderedDict(sorted(found_items.items(), key=get_name))
175 return OrderedDict(sorted(found_items.items(), key=get_name))
175
176
176 def _patterns_from_section(self, section):
177 def _patterns_from_section(self, section):
177 return (pattern for key, pattern in self.config.items(section))
178 return (pattern for key, pattern in self.config.items(section))
178
179
179 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
180 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
180 if self != repo2:
181 if self != repo2:
181 raise ValueError(
182 raise ValueError(
182 "Subversion does not support getting common ancestor of"
183 "Subversion does not support getting common ancestor of"
183 " different repositories.")
184 " different repositories.")
184
185
185 if int(commit_id1) < int(commit_id2):
186 if int(commit_id1) < int(commit_id2):
186 return commit_id1
187 return commit_id1
187 return commit_id2
188 return commit_id2
188
189
189 def verify(self):
190 def verify(self):
190 verify = self._remote.verify()
191 verify = self._remote.verify()
191
192
192 self._remote.invalidate_vcs_cache()
193 self._remote.invalidate_vcs_cache()
193 return verify
194 return verify
194
195
195 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
196 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
196 # TODO: johbo: Implement better comparison, this is a very naive
197 # TODO: johbo: Implement better comparison, this is a very naive
197 # version which does not allow to compare branches, tags or folders
198 # version which does not allow to compare branches, tags or folders
198 # at all.
199 # at all.
199 if repo2 != self:
200 if repo2 != self:
200 raise ValueError(
201 raise ValueError(
201 "Subversion does not support comparison of of different "
202 "Subversion does not support comparison of of different "
202 "repositories.")
203 "repositories.")
203
204
204 if commit_id1 == commit_id2:
205 if commit_id1 == commit_id2:
205 return []
206 return []
206
207
207 commit_idx1 = self._get_commit_idx(commit_id1)
208 commit_idx1 = self._get_commit_idx(commit_id1)
208 commit_idx2 = self._get_commit_idx(commit_id2)
209 commit_idx2 = self._get_commit_idx(commit_id2)
209
210
210 commits = [
211 commits = [
211 self.get_commit(commit_idx=idx)
212 self.get_commit(commit_idx=idx)
212 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
213 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
213
214
214 return commits
215 return commits
215
216
216 def _get_commit_idx(self, commit_id):
217 def _get_commit_idx(self, commit_id):
217 try:
218 try:
218 svn_rev = int(commit_id)
219 svn_rev = int(commit_id)
219 except:
220 except:
220 # TODO: johbo: this might be only one case, HEAD, check this
221 # TODO: johbo: this might be only one case, HEAD, check this
221 svn_rev = self._remote.lookup(commit_id)
222 svn_rev = self._remote.lookup(commit_id)
222 commit_idx = svn_rev - 1
223 commit_idx = svn_rev - 1
223 if commit_idx >= len(self.commit_ids):
224 if commit_idx >= len(self.commit_ids):
224 raise CommitDoesNotExistError(
225 raise CommitDoesNotExistError(
225 "Commit at index %s does not exist." % (commit_idx, ))
226 "Commit at index %s does not exist." % (commit_idx, ))
226 return commit_idx
227 return commit_idx
227
228
228 @staticmethod
229 @staticmethod
229 def check_url(url, config):
230 def check_url(url, config):
230 """
231 """
231 Check if `url` is a valid source to import a Subversion repository.
232 Check if `url` is a valid source to import a Subversion repository.
232 """
233 """
233 # convert to URL if it's a local directory
234 # convert to URL if it's a local directory
234 if os.path.isdir(url):
235 if os.path.isdir(url):
235 url = 'file://' + urllib.pathname2url(url)
236 url = 'file://' + urllib.pathname2url(url)
236 return connection.Svn.check_url(url, config.serialize())
237 return connection.Svn.check_url(url, config.serialize())
237
238
238 @staticmethod
239 @staticmethod
239 def is_valid_repository(path):
240 def is_valid_repository(path):
240 try:
241 try:
241 SubversionRepository(path)
242 SubversionRepository(path)
242 return True
243 return True
243 except VCSError:
244 except VCSError:
244 pass
245 pass
245 return False
246 return False
246
247
247 def _check_path(self):
248 def _check_path(self):
248 if not os.path.exists(self.path):
249 if not os.path.exists(self.path):
249 raise VCSError('Path "%s" does not exist!' % (self.path, ))
250 raise VCSError('Path "%s" does not exist!' % (self.path, ))
250 if not self._remote.is_path_valid_repository(self.path):
251 if not self._remote.is_path_valid_repository(self.path):
251 raise VCSError(
252 raise VCSError(
252 'Path "%s" does not contain a Subversion repository' %
253 'Path "%s" does not contain a Subversion repository' %
253 (self.path, ))
254 (self.path, ))
254
255
255 @LazyProperty
256 @LazyProperty
256 def last_change(self):
257 def last_change(self):
257 """
258 """
258 Returns last change made on this repository as
259 Returns last change made on this repository as
259 `datetime.datetime` object.
260 `datetime.datetime` object.
260 """
261 """
261 # Subversion always has a first commit which has id "0" and contains
262 # Subversion always has a first commit which has id "0" and contains
262 # what we are looking for.
263 # what we are looking for.
263 last_id = len(self.commit_ids)
264 last_id = len(self.commit_ids)
264 properties = self._remote.revision_properties(last_id)
265 properties = self._remote.revision_properties(last_id)
265 return _date_from_svn_properties(properties)
266 return _date_from_svn_properties(properties)
266
267
267 @LazyProperty
268 @LazyProperty
268 def in_memory_commit(self):
269 def in_memory_commit(self):
269 return SubversionInMemoryCommit(self)
270 return SubversionInMemoryCommit(self)
270
271
271 def get_hook_location(self):
272 def get_hook_location(self):
272 """
273 """
273 returns absolute path to location where hooks are stored
274 returns absolute path to location where hooks are stored
274 """
275 """
275 return os.path.join(self.path, 'hooks')
276 return os.path.join(self.path, 'hooks')
276
277
277 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
278 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
278 if self.is_empty():
279 if self.is_empty():
279 raise EmptyRepositoryError("There are no commits yet")
280 raise EmptyRepositoryError("There are no commits yet")
280 if commit_id is not None:
281 if commit_id is not None:
281 self._validate_commit_id(commit_id)
282 self._validate_commit_id(commit_id)
282 elif commit_idx is not None:
283 elif commit_idx is not None:
283 self._validate_commit_idx(commit_idx)
284 self._validate_commit_idx(commit_idx)
284 try:
285 try:
285 commit_id = self.commit_ids[commit_idx]
286 commit_id = self.commit_ids[commit_idx]
286 except IndexError:
287 except IndexError:
287 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
288 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
288
289
289 commit_id = self._sanitize_commit_id(commit_id)
290 commit_id = self._sanitize_commit_id(commit_id)
290 commit = SubversionCommit(repository=self, commit_id=commit_id)
291 commit = SubversionCommit(repository=self, commit_id=commit_id)
291 return commit
292 return commit
292
293
293 def get_commits(
294 def get_commits(
294 self, start_id=None, end_id=None, start_date=None, end_date=None,
295 self, start_id=None, end_id=None, start_date=None, end_date=None,
295 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
296 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
296 if self.is_empty():
297 if self.is_empty():
297 raise EmptyRepositoryError("There are no commit_ids yet")
298 raise EmptyRepositoryError("There are no commit_ids yet")
298 self._validate_branch_name(branch_name)
299 self._validate_branch_name(branch_name)
299
300
300 if start_id is not None:
301 if start_id is not None:
301 self._validate_commit_id(start_id)
302 self._validate_commit_id(start_id)
302 if end_id is not None:
303 if end_id is not None:
303 self._validate_commit_id(end_id)
304 self._validate_commit_id(end_id)
304
305
305 start_raw_id = self._sanitize_commit_id(start_id)
306 start_raw_id = self._sanitize_commit_id(start_id)
306 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
307 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
307 end_raw_id = self._sanitize_commit_id(end_id)
308 end_raw_id = self._sanitize_commit_id(end_id)
308 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
309 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
309
310
310 if None not in [start_id, end_id] and start_pos > end_pos:
311 if None not in [start_id, end_id] and start_pos > end_pos:
311 raise RepositoryError(
312 raise RepositoryError(
312 "Start commit '%s' cannot be after end commit '%s'" %
313 "Start commit '%s' cannot be after end commit '%s'" %
313 (start_id, end_id))
314 (start_id, end_id))
314 if end_pos is not None:
315 if end_pos is not None:
315 end_pos += 1
316 end_pos += 1
316
317
317 # Date based filtering
318 # Date based filtering
318 if start_date or end_date:
319 if start_date or end_date:
319 start_raw_id, end_raw_id = self._remote.lookup_interval(
320 start_raw_id, end_raw_id = self._remote.lookup_interval(
320 date_astimestamp(start_date) if start_date else None,
321 date_astimestamp(start_date) if start_date else None,
321 date_astimestamp(end_date) if end_date else None)
322 date_astimestamp(end_date) if end_date else None)
322 start_pos = start_raw_id - 1
323 start_pos = start_raw_id - 1
323 end_pos = end_raw_id
324 end_pos = end_raw_id
324
325
325 commit_ids = self.commit_ids
326 commit_ids = self.commit_ids
326
327
327 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
328 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
328 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
329 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
329 svn_rev = long(self.commit_ids[-1])
330 svn_rev = long(self.commit_ids[-1])
330 commit_ids = self._remote.node_history(
331 commit_ids = self._remote.node_history(
331 path=branch_name, revision=svn_rev, limit=None)
332 path=branch_name, revision=svn_rev, limit=None)
332 commit_ids = [str(i) for i in reversed(commit_ids)]
333 commit_ids = [str(i) for i in reversed(commit_ids)]
333
334
334 if start_pos or end_pos:
335 if start_pos or end_pos:
335 commit_ids = commit_ids[start_pos:end_pos]
336 commit_ids = commit_ids[start_pos:end_pos]
336 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
337 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
337
338
338 def _sanitize_commit_id(self, commit_id):
339 def _sanitize_commit_id(self, commit_id):
339 if commit_id and commit_id.isdigit():
340 if commit_id and commit_id.isdigit():
340 if int(commit_id) <= len(self.commit_ids):
341 if int(commit_id) <= len(self.commit_ids):
341 return commit_id
342 return commit_id
342 else:
343 else:
343 raise CommitDoesNotExistError(
344 raise CommitDoesNotExistError(
344 "Commit %s does not exist." % (commit_id, ))
345 "Commit %s does not exist." % (commit_id, ))
345 if commit_id not in [
346 if commit_id not in [
346 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
347 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
347 raise CommitDoesNotExistError(
348 raise CommitDoesNotExistError(
348 "Commit id %s not understood." % (commit_id, ))
349 "Commit id %s not understood." % (commit_id, ))
349 svn_rev = self._remote.lookup('HEAD')
350 svn_rev = self._remote.lookup('HEAD')
350 return str(svn_rev)
351 return str(svn_rev)
351
352
352 def get_diff(
353 def get_diff(
353 self, commit1, commit2, path=None, ignore_whitespace=False,
354 self, commit1, commit2, path=None, ignore_whitespace=False,
354 context=3, path1=None):
355 context=3, path1=None):
355 self._validate_diff_commits(commit1, commit2)
356 self._validate_diff_commits(commit1, commit2)
356 svn_rev1 = long(commit1.raw_id)
357 svn_rev1 = long(commit1.raw_id)
357 svn_rev2 = long(commit2.raw_id)
358 svn_rev2 = long(commit2.raw_id)
358 diff = self._remote.diff(
359 diff = self._remote.diff(
359 svn_rev1, svn_rev2, path1=path1, path2=path,
360 svn_rev1, svn_rev2, path1=path1, path2=path,
360 ignore_whitespace=ignore_whitespace, context=context)
361 ignore_whitespace=ignore_whitespace, context=context)
361 return SubversionDiff(diff)
362 return SubversionDiff(diff)
362
363
363
364
364 def _sanitize_url(url):
365 def _sanitize_url(url):
365 if '://' not in url:
366 if '://' not in url:
366 url = 'file://' + urllib.pathname2url(url)
367 url = 'file://' + urllib.pathname2url(url)
367 return url
368 return url
@@ -1,1275 +1,1276 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import mock
22 import mock
23 import os
23 import os
24 import sys
24 import sys
25 import shutil
25 import shutil
26
26
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib.utils import make_db_config
29 from rhodecode.lib.utils import make_db_config
30 from rhodecode.lib.vcs.backends.base import Reference
30 from rhodecode.lib.vcs.backends.base import Reference
31 from rhodecode.lib.vcs.backends.git import (
31 from rhodecode.lib.vcs.backends.git import (
32 GitRepository, GitCommit, discover_git_version)
32 GitRepository, GitCommit, discover_git_version)
33 from rhodecode.lib.vcs.exceptions import (
33 from rhodecode.lib.vcs.exceptions import (
34 RepositoryError, VCSError, NodeDoesNotExistError)
34 RepositoryError, VCSError, NodeDoesNotExistError)
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39
39
40
40
41 pytestmark = pytest.mark.backends("git")
41 pytestmark = pytest.mark.backends("git")
42
42
43
43
44 class TestGitRepository(object):
44 class TestGitRepository(object):
45
45
46 @pytest.fixture(autouse=True)
46 @pytest.fixture(autouse=True)
47 def prepare(self, request, baseapp):
47 def prepare(self, request, baseapp):
48 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
48 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
49 self.repo.count()
49
50
50 def get_clone_repo(self, tmp_path_factory):
51 def get_clone_repo(self, tmp_path_factory):
51 """
52 """
52 Return a non bare clone of the base repo.
53 Return a non bare clone of the base repo.
53 """
54 """
54 clone_path = tmp_path_factory.mktemp('clone-url')
55 clone_path = tmp_path_factory.mktemp('clone-url')
55 repo_clone = GitRepository(
56 repo_clone = GitRepository(
56 clone_path, create=True, src_url=self.repo.path, bare=False)
57 clone_path, create=True, src_url=self.repo.path, bare=False)
57
58
58 return repo_clone
59 return repo_clone
59
60
60 def get_empty_repo(self, tmp_path_factory, bare=False):
61 def get_empty_repo(self, tmp_path_factory, bare=False):
61 """
62 """
62 Return a non bare empty repo.
63 Return a non bare empty repo.
63 """
64 """
64 clone_path = tmp_path_factory.mktemp('empty-repo')
65 clone_path = tmp_path_factory.mktemp('empty-repo')
65 return GitRepository(clone_path, create=True, bare=bare)
66 return GitRepository(clone_path, create=True, bare=bare)
66
67
67 def test_wrong_repo_path(self):
68 def test_wrong_repo_path(self):
68 wrong_repo_path = '/tmp/errorrepo_git'
69 wrong_repo_path = '/tmp/errorrepo_git'
69 with pytest.raises(RepositoryError):
70 with pytest.raises(RepositoryError):
70 GitRepository(wrong_repo_path)
71 GitRepository(wrong_repo_path)
71
72
72 def test_repo_clone(self, tmp_path_factory):
73 def test_repo_clone(self, tmp_path_factory):
73 repo = GitRepository(TEST_GIT_REPO)
74 repo = GitRepository(TEST_GIT_REPO)
74 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE
75 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE
75 repo_clone = GitRepository(
76 repo_clone = GitRepository(
76 clone_path,
77 clone_path,
77 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
78 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
78
79
79 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
80 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
80 # Checking hashes of commits should be enough
81 # Checking hashes of commits should be enough
81 for commit in repo.get_commits():
82 for commit in repo.get_commits():
82 raw_id = commit.raw_id
83 raw_id = commit.raw_id
83 assert raw_id == repo_clone.get_commit(raw_id).raw_id
84 assert raw_id == repo_clone.get_commit(raw_id).raw_id
84
85
85 def test_repo_clone_without_create(self):
86 def test_repo_clone_without_create(self):
86 with pytest.raises(RepositoryError):
87 with pytest.raises(RepositoryError):
87 GitRepository(
88 GitRepository(
88 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
89 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
89
90
90 def test_repo_clone_with_update(self, tmp_path_factory):
91 def test_repo_clone_with_update(self, tmp_path_factory):
91 repo = GitRepository(TEST_GIT_REPO)
92 repo = GitRepository(TEST_GIT_REPO)
92 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_update'
93 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_update'
93
94
94 repo_clone = GitRepository(
95 repo_clone = GitRepository(
95 clone_path,
96 clone_path,
96 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
97 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
97 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98
99
99 # check if current workdir was updated
100 # check if current workdir was updated
100 fpath = os.path.join(clone_path, 'MANIFEST.in')
101 fpath = os.path.join(clone_path, 'MANIFEST.in')
101 assert os.path.isfile(fpath)
102 assert os.path.isfile(fpath)
102
103
103 def test_repo_clone_without_update(self, tmp_path_factory):
104 def test_repo_clone_without_update(self, tmp_path_factory):
104 repo = GitRepository(TEST_GIT_REPO)
105 repo = GitRepository(TEST_GIT_REPO)
105 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_without_update'
106 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_without_update'
106 repo_clone = GitRepository(
107 repo_clone = GitRepository(
107 clone_path,
108 clone_path,
108 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
109 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
109 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
110 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
110 # check if current workdir was *NOT* updated
111 # check if current workdir was *NOT* updated
111 fpath = os.path.join(clone_path, 'MANIFEST.in')
112 fpath = os.path.join(clone_path, 'MANIFEST.in')
112 # Make sure it's not bare repo
113 # Make sure it's not bare repo
113 assert not repo_clone.bare
114 assert not repo_clone.bare
114 assert not os.path.isfile(fpath)
115 assert not os.path.isfile(fpath)
115
116
116 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
117 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
117 repo = GitRepository(TEST_GIT_REPO)
118 repo = GitRepository(TEST_GIT_REPO)
118 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_bare.git'
119 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_bare.git'
119 repo_clone = GitRepository(
120 repo_clone = GitRepository(
120 clone_path, create=True, src_url=repo.path, bare=True)
121 clone_path, create=True, src_url=repo.path, bare=True)
121 assert repo_clone.bare
122 assert repo_clone.bare
122
123
123 def test_create_repo_is_not_bare_by_default(self):
124 def test_create_repo_is_not_bare_by_default(self):
124 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
125 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
125 assert not repo.bare
126 assert not repo.bare
126
127
127 def test_create_bare_repo(self):
128 def test_create_bare_repo(self):
128 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
129 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
129 assert repo.bare
130 assert repo.bare
130
131
131 def test_update_server_info(self):
132 def test_update_server_info(self):
132 self.repo._update_server_info()
133 self.repo._update_server_info()
133
134
134 def test_fetch(self, vcsbackend_git):
135 def test_fetch(self, vcsbackend_git):
135 # Note: This is a git specific part of the API, it's only implemented
136 # Note: This is a git specific part of the API, it's only implemented
136 # by the git backend.
137 # by the git backend.
137 source_repo = vcsbackend_git.repo
138 source_repo = vcsbackend_git.repo
138 target_repo = vcsbackend_git.create_repo(bare=True)
139 target_repo = vcsbackend_git.create_repo(bare=True)
139 target_repo.fetch(source_repo.path)
140 target_repo.fetch(source_repo.path)
140 # Note: Get a fresh instance, avoids caching trouble
141 # Note: Get a fresh instance, avoids caching trouble
141 target_repo = vcsbackend_git.backend(target_repo.path)
142 target_repo = vcsbackend_git.backend(target_repo.path)
142 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
143 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
143
144
144 def test_commit_ids(self):
145 def test_commit_ids(self):
145 # there are 112 commits (by now)
146 # there are 112 commits (by now)
146 # so we can assume they would be available from now on
147 # so we can assume they would be available from now on
147 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
148 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
148 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
149 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
149 'fa6600f6848800641328adbf7811fd2372c02ab2',
150 'fa6600f6848800641328adbf7811fd2372c02ab2',
150 '102607b09cdd60e2793929c4f90478be29f85a17',
151 '102607b09cdd60e2793929c4f90478be29f85a17',
151 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
152 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
152 '2d1028c054665b962fa3d307adfc923ddd528038',
153 '2d1028c054665b962fa3d307adfc923ddd528038',
153 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
154 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
154 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
155 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
155 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
156 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
156 '8430a588b43b5d6da365400117c89400326e7992',
157 '8430a588b43b5d6da365400117c89400326e7992',
157 'd955cd312c17b02143c04fa1099a352b04368118',
158 'd955cd312c17b02143c04fa1099a352b04368118',
158 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
159 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
159 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
160 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
160 'f298fe1189f1b69779a4423f40b48edf92a703fc',
161 'f298fe1189f1b69779a4423f40b48edf92a703fc',
161 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
162 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
162 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
163 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
163 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
164 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
164 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
165 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
165 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
166 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
166 '45223f8f114c64bf4d6f853e3c35a369a6305520',
167 '45223f8f114c64bf4d6f853e3c35a369a6305520',
167 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
168 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
168 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
169 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
169 '27d48942240f5b91dfda77accd2caac94708cc7d',
170 '27d48942240f5b91dfda77accd2caac94708cc7d',
170 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
171 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
171 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
172 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
172 assert subset.issubset(set(self.repo.commit_ids))
173 assert subset.issubset(set(self.repo.commit_ids))
173
174
174 def test_slicing(self):
175 def test_slicing(self):
175 # 4 1 5 10 95
176 # 4 1 5 10 95
176 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
177 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
177 (10, 20, 10), (5, 100, 95)]:
178 (10, 20, 10), (5, 100, 95)]:
178 commit_ids = list(self.repo[sfrom:sto])
179 commit_ids = list(self.repo[sfrom:sto])
179 assert len(commit_ids) == size
180 assert len(commit_ids) == size
180 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
181 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
181 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
182 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
182
183
183 def test_branches(self):
184 def test_branches(self):
184 # TODO: Need more tests here
185 # TODO: Need more tests here
185 # Removed (those are 'remotes' branches for cloned repo)
186 # Removed (those are 'remotes' branches for cloned repo)
186 # assert 'master' in self.repo.branches
187 # assert 'master' in self.repo.branches
187 # assert 'gittree' in self.repo.branches
188 # assert 'gittree' in self.repo.branches
188 # assert 'web-branch' in self.repo.branches
189 # assert 'web-branch' in self.repo.branches
189 for __, commit_id in self.repo.branches.items():
190 for __, commit_id in self.repo.branches.items():
190 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
191 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
191
192
192 def test_tags(self):
193 def test_tags(self):
193 # TODO: Need more tests here
194 # TODO: Need more tests here
194 assert 'v0.1.1' in self.repo.tags
195 assert 'v0.1.1' in self.repo.tags
195 assert 'v0.1.2' in self.repo.tags
196 assert 'v0.1.2' in self.repo.tags
196 for __, commit_id in self.repo.tags.items():
197 for __, commit_id in self.repo.tags.items():
197 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
198 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
198
199
199 def _test_single_commit_cache(self, commit_id):
200 def _test_single_commit_cache(self, commit_id):
200 commit = self.repo.get_commit(commit_id)
201 commit = self.repo.get_commit(commit_id)
201 assert commit_id in self.repo.commits
202 assert commit_id in self.repo.commits
202 assert commit is self.repo.commits[commit_id]
203 assert commit is self.repo.commits[commit_id]
203
204
204 def test_initial_commit(self):
205 def test_initial_commit(self):
205 commit_id = self.repo.commit_ids[0]
206 commit_id = self.repo.commit_ids[0]
206 init_commit = self.repo.get_commit(commit_id)
207 init_commit = self.repo.get_commit(commit_id)
207 init_author = init_commit.author
208 init_author = init_commit.author
208
209
209 assert init_commit.message == 'initial import\n'
210 assert init_commit.message == 'initial import\n'
210 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
211 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
211 assert init_author == init_commit.committer
212 assert init_author == init_commit.committer
212 for path in ('vcs/__init__.py',
213 for path in ('vcs/__init__.py',
213 'vcs/backends/BaseRepository.py',
214 'vcs/backends/BaseRepository.py',
214 'vcs/backends/__init__.py'):
215 'vcs/backends/__init__.py'):
215 assert isinstance(init_commit.get_node(path), FileNode)
216 assert isinstance(init_commit.get_node(path), FileNode)
216 for path in ('', 'vcs', 'vcs/backends'):
217 for path in ('', 'vcs', 'vcs/backends'):
217 assert isinstance(init_commit.get_node(path), DirNode)
218 assert isinstance(init_commit.get_node(path), DirNode)
218
219
219 with pytest.raises(NodeDoesNotExistError):
220 with pytest.raises(NodeDoesNotExistError):
220 init_commit.get_node(path='foobar')
221 init_commit.get_node(path='foobar')
221
222
222 node = init_commit.get_node('vcs/')
223 node = init_commit.get_node('vcs/')
223 assert hasattr(node, 'kind')
224 assert hasattr(node, 'kind')
224 assert node.kind == NodeKind.DIR
225 assert node.kind == NodeKind.DIR
225
226
226 node = init_commit.get_node('vcs')
227 node = init_commit.get_node('vcs')
227 assert hasattr(node, 'kind')
228 assert hasattr(node, 'kind')
228 assert node.kind == NodeKind.DIR
229 assert node.kind == NodeKind.DIR
229
230
230 node = init_commit.get_node('vcs/__init__.py')
231 node = init_commit.get_node('vcs/__init__.py')
231 assert hasattr(node, 'kind')
232 assert hasattr(node, 'kind')
232 assert node.kind == NodeKind.FILE
233 assert node.kind == NodeKind.FILE
233
234
234 def test_not_existing_commit(self):
235 def test_not_existing_commit(self):
235 with pytest.raises(RepositoryError):
236 with pytest.raises(RepositoryError):
236 self.repo.get_commit('f' * 40)
237 self.repo.get_commit('f' * 40)
237
238
238 def test_commit10(self):
239 def test_commit10(self):
239
240
240 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
241 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
241 README = """===
242 README = """===
242 VCS
243 VCS
243 ===
244 ===
244
245
245 Various Version Control System management abstraction layer for Python.
246 Various Version Control System management abstraction layer for Python.
246
247
247 Introduction
248 Introduction
248 ------------
249 ------------
249
250
250 TODO: To be written...
251 TODO: To be written...
251
252
252 """
253 """
253 node = commit10.get_node('README.rst')
254 node = commit10.get_node('README.rst')
254 assert node.kind == NodeKind.FILE
255 assert node.kind == NodeKind.FILE
255 assert node.content == README
256 assert node.content == README
256
257
257 def test_head(self):
258 def test_head(self):
258 assert self.repo.head == self.repo.get_commit().raw_id
259 assert self.repo.head == self.repo.get_commit().raw_id
259
260
260 def test_checkout_with_create(self, tmp_path_factory):
261 def test_checkout_with_create(self, tmp_path_factory):
261 repo_clone = self.get_clone_repo(tmp_path_factory)
262 repo_clone = self.get_clone_repo(tmp_path_factory)
262
263
263 new_branch = 'new_branch'
264 new_branch = 'new_branch'
264 assert repo_clone._current_branch() == 'master'
265 assert repo_clone._current_branch() == 'master'
265 assert set(repo_clone.branches) == {'master'}
266 assert set(repo_clone.branches) == {'master'}
266 repo_clone._checkout(new_branch, create=True)
267 repo_clone._checkout(new_branch, create=True)
267
268
268 # Branches is a lazy property so we need to recrete the Repo object.
269 # Branches is a lazy property so we need to recrete the Repo object.
269 repo_clone = GitRepository(repo_clone.path)
270 repo_clone = GitRepository(repo_clone.path)
270 assert set(repo_clone.branches) == {'master', new_branch}
271 assert set(repo_clone.branches) == {'master', new_branch}
271 assert repo_clone._current_branch() == new_branch
272 assert repo_clone._current_branch() == new_branch
272
273
273 def test_checkout(self, tmp_path_factory):
274 def test_checkout(self, tmp_path_factory):
274 repo_clone = self.get_clone_repo(tmp_path_factory)
275 repo_clone = self.get_clone_repo(tmp_path_factory)
275
276
276 repo_clone._checkout('new_branch', create=True)
277 repo_clone._checkout('new_branch', create=True)
277 repo_clone._checkout('master')
278 repo_clone._checkout('master')
278
279
279 assert repo_clone._current_branch() == 'master'
280 assert repo_clone._current_branch() == 'master'
280
281
281 def test_checkout_same_branch(self, tmp_path_factory):
282 def test_checkout_same_branch(self, tmp_path_factory):
282 repo_clone = self.get_clone_repo(tmp_path_factory)
283 repo_clone = self.get_clone_repo(tmp_path_factory)
283
284
284 repo_clone._checkout('master')
285 repo_clone._checkout('master')
285 assert repo_clone._current_branch() == 'master'
286 assert repo_clone._current_branch() == 'master'
286
287
287 def test_checkout_branch_already_exists(self, tmp_path_factory):
288 def test_checkout_branch_already_exists(self, tmp_path_factory):
288 repo_clone = self.get_clone_repo(tmp_path_factory)
289 repo_clone = self.get_clone_repo(tmp_path_factory)
289
290
290 with pytest.raises(RepositoryError):
291 with pytest.raises(RepositoryError):
291 repo_clone._checkout('master', create=True)
292 repo_clone._checkout('master', create=True)
292
293
293 def test_checkout_bare_repo(self):
294 def test_checkout_bare_repo(self):
294 with pytest.raises(RepositoryError):
295 with pytest.raises(RepositoryError):
295 self.repo._checkout('master')
296 self.repo._checkout('master')
296
297
297 def test_current_branch_bare_repo(self):
298 def test_current_branch_bare_repo(self):
298 with pytest.raises(RepositoryError):
299 with pytest.raises(RepositoryError):
299 self.repo._current_branch()
300 self.repo._current_branch()
300
301
301 def test_current_branch_empty_repo(self, tmp_path_factory):
302 def test_current_branch_empty_repo(self, tmp_path_factory):
302 repo = self.get_empty_repo(tmp_path_factory)
303 repo = self.get_empty_repo(tmp_path_factory)
303 assert repo._current_branch() is None
304 assert repo._current_branch() is None
304
305
305 def test_local_clone(self, tmp_path_factory):
306 def test_local_clone(self, tmp_path_factory):
306 clone_path = tmp_path_factory.mktemp('test-local-clone')
307 clone_path = tmp_path_factory.mktemp('test-local-clone')
307 self.repo._local_clone(clone_path, 'master')
308 self.repo._local_clone(clone_path, 'master')
308 repo_clone = GitRepository(clone_path)
309 repo_clone = GitRepository(clone_path)
309
310
310 assert self.repo.commit_ids == repo_clone.commit_ids
311 assert self.repo.commit_ids == repo_clone.commit_ids
311
312
312 def test_local_clone_with_specific_branch(self, tmp_path_factory):
313 def test_local_clone_with_specific_branch(self, tmp_path_factory):
313 source_repo = self.get_clone_repo(tmp_path_factory)
314 source_repo = self.get_clone_repo(tmp_path_factory)
314
315
315 # Create a new branch in source repo
316 # Create a new branch in source repo
316 new_branch_commit = source_repo.commit_ids[-3]
317 new_branch_commit = source_repo.commit_ids[-3]
317 source_repo._checkout(new_branch_commit)
318 source_repo._checkout(new_branch_commit)
318 source_repo._checkout('new_branch', create=True)
319 source_repo._checkout('new_branch', create=True)
319
320
320 clone_path = tmp_path_factory.mktemp('git-clone-path-1')
321 clone_path = tmp_path_factory.mktemp('git-clone-path-1')
321 source_repo._local_clone(clone_path, 'new_branch')
322 source_repo._local_clone(clone_path, 'new_branch')
322 repo_clone = GitRepository(clone_path)
323 repo_clone = GitRepository(clone_path)
323
324
324 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
325 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
325
326
326 clone_path = tmp_path_factory.mktemp('git-clone-path-2')
327 clone_path = tmp_path_factory.mktemp('git-clone-path-2')
327 source_repo._local_clone(clone_path, 'master')
328 source_repo._local_clone(clone_path, 'master')
328 repo_clone = GitRepository(clone_path)
329 repo_clone = GitRepository(clone_path)
329
330
330 assert source_repo.commit_ids == repo_clone.commit_ids
331 assert source_repo.commit_ids == repo_clone.commit_ids
331
332
332 def test_local_clone_fails_if_target_exists(self):
333 def test_local_clone_fails_if_target_exists(self):
333 with pytest.raises(RepositoryError):
334 with pytest.raises(RepositoryError):
334 self.repo._local_clone(self.repo.path, 'master')
335 self.repo._local_clone(self.repo.path, 'master')
335
336
336 def test_local_fetch(self, tmp_path_factory):
337 def test_local_fetch(self, tmp_path_factory):
337 target_repo = self.get_empty_repo(tmp_path_factory)
338 target_repo = self.get_empty_repo(tmp_path_factory)
338 source_repo = self.get_clone_repo(tmp_path_factory)
339 source_repo = self.get_clone_repo(tmp_path_factory)
339
340
340 # Create a new branch in source repo
341 # Create a new branch in source repo
341 master_commit = source_repo.commit_ids[-1]
342 master_commit = source_repo.commit_ids[-1]
342 new_branch_commit = source_repo.commit_ids[-3]
343 new_branch_commit = source_repo.commit_ids[-3]
343 source_repo._checkout(new_branch_commit)
344 source_repo._checkout(new_branch_commit)
344 source_repo._checkout('new_branch', create=True)
345 source_repo._checkout('new_branch', create=True)
345
346
346 target_repo._local_fetch(source_repo.path, 'new_branch')
347 target_repo._local_fetch(source_repo.path, 'new_branch')
347 assert target_repo._last_fetch_heads() == [new_branch_commit]
348 assert target_repo._last_fetch_heads() == [new_branch_commit]
348
349
349 target_repo._local_fetch(source_repo.path, 'master')
350 target_repo._local_fetch(source_repo.path, 'master')
350 assert target_repo._last_fetch_heads() == [master_commit]
351 assert target_repo._last_fetch_heads() == [master_commit]
351
352
352 def test_local_fetch_from_bare_repo(self, tmp_path_factory):
353 def test_local_fetch_from_bare_repo(self, tmp_path_factory):
353 target_repo = self.get_empty_repo(tmp_path_factory)
354 target_repo = self.get_empty_repo(tmp_path_factory)
354 target_repo._local_fetch(self.repo.path, 'master')
355 target_repo._local_fetch(self.repo.path, 'master')
355
356
356 master_commit = self.repo.commit_ids[-1]
357 master_commit = self.repo.commit_ids[-1]
357 assert target_repo._last_fetch_heads() == [master_commit]
358 assert target_repo._last_fetch_heads() == [master_commit]
358
359
359 def test_local_fetch_from_same_repo(self):
360 def test_local_fetch_from_same_repo(self):
360 with pytest.raises(ValueError):
361 with pytest.raises(ValueError):
361 self.repo._local_fetch(self.repo.path, 'master')
362 self.repo._local_fetch(self.repo.path, 'master')
362
363
363 def test_local_fetch_branch_does_not_exist(self, tmp_path_factory):
364 def test_local_fetch_branch_does_not_exist(self, tmp_path_factory):
364 target_repo = self.get_empty_repo(tmp_path_factory)
365 target_repo = self.get_empty_repo(tmp_path_factory)
365
366
366 with pytest.raises(RepositoryError):
367 with pytest.raises(RepositoryError):
367 target_repo._local_fetch(self.repo.path, 'new_branch')
368 target_repo._local_fetch(self.repo.path, 'new_branch')
368
369
369 def test_local_pull(self, tmp_path_factory):
370 def test_local_pull(self, tmp_path_factory):
370 target_repo = self.get_empty_repo(tmp_path_factory)
371 target_repo = self.get_empty_repo(tmp_path_factory)
371 source_repo = self.get_clone_repo(tmp_path_factory)
372 source_repo = self.get_clone_repo(tmp_path_factory)
372
373
373 # Create a new branch in source repo
374 # Create a new branch in source repo
374 master_commit = source_repo.commit_ids[-1]
375 master_commit = source_repo.commit_ids[-1]
375 new_branch_commit = source_repo.commit_ids[-3]
376 new_branch_commit = source_repo.commit_ids[-3]
376 source_repo._checkout(new_branch_commit)
377 source_repo._checkout(new_branch_commit)
377 source_repo._checkout('new_branch', create=True)
378 source_repo._checkout('new_branch', create=True)
378
379
379 target_repo._local_pull(source_repo.path, 'new_branch')
380 target_repo._local_pull(source_repo.path, 'new_branch')
380 target_repo = GitRepository(target_repo.path)
381 target_repo = GitRepository(target_repo.path)
381 assert target_repo.head == new_branch_commit
382 assert target_repo.head == new_branch_commit
382
383
383 target_repo._local_pull(source_repo.path, 'master')
384 target_repo._local_pull(source_repo.path, 'master')
384 target_repo = GitRepository(target_repo.path)
385 target_repo = GitRepository(target_repo.path)
385 assert target_repo.head == master_commit
386 assert target_repo.head == master_commit
386
387
387 def test_local_pull_in_bare_repo(self):
388 def test_local_pull_in_bare_repo(self):
388 with pytest.raises(RepositoryError):
389 with pytest.raises(RepositoryError):
389 self.repo._local_pull(self.repo.path, 'master')
390 self.repo._local_pull(self.repo.path, 'master')
390
391
391 def test_local_merge(self, tmp_path_factory):
392 def test_local_merge(self, tmp_path_factory):
392 target_repo = self.get_empty_repo(tmp_path_factory)
393 target_repo = self.get_empty_repo(tmp_path_factory)
393 source_repo = self.get_clone_repo(tmp_path_factory)
394 source_repo = self.get_clone_repo(tmp_path_factory)
394
395
395 # Create a new branch in source repo
396 # Create a new branch in source repo
396 master_commit = source_repo.commit_ids[-1]
397 master_commit = source_repo.commit_ids[-1]
397 new_branch_commit = source_repo.commit_ids[-3]
398 new_branch_commit = source_repo.commit_ids[-3]
398 source_repo._checkout(new_branch_commit)
399 source_repo._checkout(new_branch_commit)
399 source_repo._checkout('new_branch', create=True)
400 source_repo._checkout('new_branch', create=True)
400
401
401 # This is required as one cannot do a -ff-only merge in an empty repo.
402 # This is required as one cannot do a -ff-only merge in an empty repo.
402 target_repo._local_pull(source_repo.path, 'new_branch')
403 target_repo._local_pull(source_repo.path, 'new_branch')
403
404
404 target_repo._local_fetch(source_repo.path, 'master')
405 target_repo._local_fetch(source_repo.path, 'master')
405 merge_message = 'Merge message\n\nDescription:...'
406 merge_message = 'Merge message\n\nDescription:...'
406 user_name = 'Albert Einstein'
407 user_name = 'Albert Einstein'
407 user_email = 'albert@einstein.com'
408 user_email = 'albert@einstein.com'
408 target_repo._local_merge(merge_message, user_name, user_email,
409 target_repo._local_merge(merge_message, user_name, user_email,
409 target_repo._last_fetch_heads())
410 target_repo._last_fetch_heads())
410
411
411 target_repo = GitRepository(target_repo.path)
412 target_repo = GitRepository(target_repo.path)
412 assert target_repo.commit_ids[-2] == master_commit
413 assert target_repo.commit_ids[-2] == master_commit
413 last_commit = target_repo.get_commit(target_repo.head)
414 last_commit = target_repo.get_commit(target_repo.head)
414 assert last_commit.message.strip() == merge_message
415 assert last_commit.message.strip() == merge_message
415 assert last_commit.author == '%s <%s>' % (user_name, user_email)
416 assert last_commit.author == '%s <%s>' % (user_name, user_email)
416
417
417 assert not os.path.exists(
418 assert not os.path.exists(
418 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
419 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
419
420
420 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
421 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
421 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
422 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
422 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
423 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
423
424
424 target_repo._local_fetch(self.repo.path, 'master')
425 target_repo._local_fetch(self.repo.path, 'master')
425 with pytest.raises(RepositoryError):
426 with pytest.raises(RepositoryError):
426 target_repo._local_merge(
427 target_repo._local_merge(
427 'merge_message', 'user name', 'user@name.com',
428 'merge_message', 'user name', 'user@name.com',
428 target_repo._last_fetch_heads())
429 target_repo._last_fetch_heads())
429
430
430 # Check we are not left in an intermediate merge state
431 # Check we are not left in an intermediate merge state
431 assert not os.path.exists(
432 assert not os.path.exists(
432 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
433 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
433
434
434 def test_local_merge_into_empty_repo(self, tmp_path_factory):
435 def test_local_merge_into_empty_repo(self, tmp_path_factory):
435 target_repo = self.get_empty_repo(tmp_path_factory)
436 target_repo = self.get_empty_repo(tmp_path_factory)
436
437
437 # This is required as one cannot do a -ff-only merge in an empty repo.
438 # This is required as one cannot do a -ff-only merge in an empty repo.
438 target_repo._local_fetch(self.repo.path, 'master')
439 target_repo._local_fetch(self.repo.path, 'master')
439 with pytest.raises(RepositoryError):
440 with pytest.raises(RepositoryError):
440 target_repo._local_merge(
441 target_repo._local_merge(
441 'merge_message', 'user name', 'user@name.com',
442 'merge_message', 'user name', 'user@name.com',
442 target_repo._last_fetch_heads())
443 target_repo._last_fetch_heads())
443
444
444 def test_local_merge_in_bare_repo(self):
445 def test_local_merge_in_bare_repo(self):
445 with pytest.raises(RepositoryError):
446 with pytest.raises(RepositoryError):
446 self.repo._local_merge(
447 self.repo._local_merge(
447 'merge_message', 'user name', 'user@name.com', None)
448 'merge_message', 'user name', 'user@name.com', None)
448
449
449 def test_local_push_non_bare(self, tmp_path_factory):
450 def test_local_push_non_bare(self, tmp_path_factory):
450 target_repo = self.get_empty_repo(tmp_path_factory)
451 target_repo = self.get_empty_repo(tmp_path_factory)
451
452
452 pushed_branch = 'pushed_branch'
453 pushed_branch = 'pushed_branch'
453 self.repo._local_push('master', target_repo.path, pushed_branch)
454 self.repo._local_push('master', target_repo.path, pushed_branch)
454 # Fix the HEAD of the target repo, or otherwise GitRepository won't
455 # Fix the HEAD of the target repo, or otherwise GitRepository won't
455 # report any branches.
456 # report any branches.
456 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
457 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
457 f.write('ref: refs/heads/%s' % pushed_branch)
458 f.write('ref: refs/heads/%s' % pushed_branch)
458
459
459 target_repo = GitRepository(target_repo.path)
460 target_repo = GitRepository(target_repo.path)
460
461
461 assert (target_repo.branches[pushed_branch] ==
462 assert (target_repo.branches[pushed_branch] ==
462 self.repo.branches['master'])
463 self.repo.branches['master'])
463
464
464 def test_local_push_bare(self, tmp_path_factory):
465 def test_local_push_bare(self, tmp_path_factory):
465 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
466 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
466
467
467 pushed_branch = 'pushed_branch'
468 pushed_branch = 'pushed_branch'
468 self.repo._local_push('master', target_repo.path, pushed_branch)
469 self.repo._local_push('master', target_repo.path, pushed_branch)
469 # Fix the HEAD of the target repo, or otherwise GitRepository won't
470 # Fix the HEAD of the target repo, or otherwise GitRepository won't
470 # report any branches.
471 # report any branches.
471 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
472 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
472 f.write('ref: refs/heads/%s' % pushed_branch)
473 f.write('ref: refs/heads/%s' % pushed_branch)
473
474
474 target_repo = GitRepository(target_repo.path)
475 target_repo = GitRepository(target_repo.path)
475
476
476 assert (target_repo.branches[pushed_branch] ==
477 assert (target_repo.branches[pushed_branch] ==
477 self.repo.branches['master'])
478 self.repo.branches['master'])
478
479
479 def test_local_push_non_bare_target_branch_is_checked_out(self, tmp_path_factory):
480 def test_local_push_non_bare_target_branch_is_checked_out(self, tmp_path_factory):
480 target_repo = self.get_clone_repo(tmp_path_factory)
481 target_repo = self.get_clone_repo(tmp_path_factory)
481
482
482 pushed_branch = 'pushed_branch'
483 pushed_branch = 'pushed_branch'
483 # Create a new branch in source repo
484 # Create a new branch in source repo
484 new_branch_commit = target_repo.commit_ids[-3]
485 new_branch_commit = target_repo.commit_ids[-3]
485 target_repo._checkout(new_branch_commit)
486 target_repo._checkout(new_branch_commit)
486 target_repo._checkout(pushed_branch, create=True)
487 target_repo._checkout(pushed_branch, create=True)
487
488
488 self.repo._local_push('master', target_repo.path, pushed_branch)
489 self.repo._local_push('master', target_repo.path, pushed_branch)
489
490
490 target_repo = GitRepository(target_repo.path)
491 target_repo = GitRepository(target_repo.path)
491
492
492 assert (target_repo.branches[pushed_branch] ==
493 assert (target_repo.branches[pushed_branch] ==
493 self.repo.branches['master'])
494 self.repo.branches['master'])
494
495
495 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
496 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
496 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
497 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
497 with pytest.raises(RepositoryError):
498 with pytest.raises(RepositoryError):
498 self.repo._local_push('master', target_repo.path, 'master')
499 self.repo._local_push('master', target_repo.path, 'master')
499
500
500 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmp_path_factory):
501 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmp_path_factory):
501 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
502 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
502
503
503 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
504 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
504 self.repo._local_push(
505 self.repo._local_push(
505 'master', target_repo.path, 'master', enable_hooks=True)
506 'master', target_repo.path, 'master', enable_hooks=True)
506 env = run_mock.call_args[1]['extra_env']
507 env = run_mock.call_args[1]['extra_env']
507 assert 'RC_SKIP_HOOKS' not in env
508 assert 'RC_SKIP_HOOKS' not in env
508
509
509 def _add_failing_hook(self, repo_path, hook_name, bare=False):
510 def _add_failing_hook(self, repo_path, hook_name, bare=False):
510 path_components = (
511 path_components = (
511 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
512 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
512 hook_path = os.path.join(repo_path, *path_components)
513 hook_path = os.path.join(repo_path, *path_components)
513 with open(hook_path, 'w') as f:
514 with open(hook_path, 'w') as f:
514 script_lines = [
515 script_lines = [
515 '#!%s' % sys.executable,
516 '#!%s' % sys.executable,
516 'import os',
517 'import os',
517 'import sys',
518 'import sys',
518 'if os.environ.get("RC_SKIP_HOOKS"):',
519 'if os.environ.get("RC_SKIP_HOOKS"):',
519 ' sys.exit(0)',
520 ' sys.exit(0)',
520 'sys.exit(1)',
521 'sys.exit(1)',
521 ]
522 ]
522 f.write('\n'.join(script_lines))
523 f.write('\n'.join(script_lines))
523 os.chmod(hook_path, 0o755)
524 os.chmod(hook_path, 0o755)
524
525
525 def test_local_push_does_not_execute_hook(self, tmp_path_factory):
526 def test_local_push_does_not_execute_hook(self, tmp_path_factory):
526 target_repo = self.get_empty_repo(tmp_path_factory)
527 target_repo = self.get_empty_repo(tmp_path_factory)
527
528
528 pushed_branch = 'pushed_branch'
529 pushed_branch = 'pushed_branch'
529 self._add_failing_hook(target_repo.path, 'pre-receive')
530 self._add_failing_hook(target_repo.path, 'pre-receive')
530 self.repo._local_push('master', target_repo.path, pushed_branch)
531 self.repo._local_push('master', target_repo.path, pushed_branch)
531 # Fix the HEAD of the target repo, or otherwise GitRepository won't
532 # Fix the HEAD of the target repo, or otherwise GitRepository won't
532 # report any branches.
533 # report any branches.
533 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
534 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
534 f.write('ref: refs/heads/%s' % pushed_branch)
535 f.write('ref: refs/heads/%s' % pushed_branch)
535
536
536 target_repo = GitRepository(target_repo.path)
537 target_repo = GitRepository(target_repo.path)
537
538
538 assert (target_repo.branches[pushed_branch] ==
539 assert (target_repo.branches[pushed_branch] ==
539 self.repo.branches['master'])
540 self.repo.branches['master'])
540
541
541 def test_local_push_executes_hook(self, tmp_path_factory):
542 def test_local_push_executes_hook(self, tmp_path_factory):
542 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
543 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
543 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
544 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
544 with pytest.raises(RepositoryError):
545 with pytest.raises(RepositoryError):
545 self.repo._local_push(
546 self.repo._local_push(
546 'master', target_repo.path, 'master', enable_hooks=True)
547 'master', target_repo.path, 'master', enable_hooks=True)
547
548
548 def test_maybe_prepare_merge_workspace(self):
549 def test_maybe_prepare_merge_workspace(self):
549 workspace = self.repo._maybe_prepare_merge_workspace(
550 workspace = self.repo._maybe_prepare_merge_workspace(
550 2, 'pr2', Reference('branch', 'master', 'unused'),
551 2, 'pr2', Reference('branch', 'master', 'unused'),
551 Reference('branch', 'master', 'unused'))
552 Reference('branch', 'master', 'unused'))
552
553
553 assert os.path.isdir(workspace)
554 assert os.path.isdir(workspace)
554 workspace_repo = GitRepository(workspace)
555 workspace_repo = GitRepository(workspace)
555 assert workspace_repo.branches == self.repo.branches
556 assert workspace_repo.branches == self.repo.branches
556
557
557 # Calling it a second time should also succeed
558 # Calling it a second time should also succeed
558 workspace = self.repo._maybe_prepare_merge_workspace(
559 workspace = self.repo._maybe_prepare_merge_workspace(
559 2, 'pr2', Reference('branch', 'master', 'unused'),
560 2, 'pr2', Reference('branch', 'master', 'unused'),
560 Reference('branch', 'master', 'unused'))
561 Reference('branch', 'master', 'unused'))
561 assert os.path.isdir(workspace)
562 assert os.path.isdir(workspace)
562
563
563 def test_maybe_prepare_merge_workspace_different_refs(self):
564 def test_maybe_prepare_merge_workspace_different_refs(self):
564 workspace = self.repo._maybe_prepare_merge_workspace(
565 workspace = self.repo._maybe_prepare_merge_workspace(
565 2, 'pr2', Reference('branch', 'master', 'unused'),
566 2, 'pr2', Reference('branch', 'master', 'unused'),
566 Reference('branch', 'develop', 'unused'))
567 Reference('branch', 'develop', 'unused'))
567
568
568 assert os.path.isdir(workspace)
569 assert os.path.isdir(workspace)
569 workspace_repo = GitRepository(workspace)
570 workspace_repo = GitRepository(workspace)
570 assert workspace_repo.branches == self.repo.branches
571 assert workspace_repo.branches == self.repo.branches
571
572
572 # Calling it a second time should also succeed
573 # Calling it a second time should also succeed
573 workspace = self.repo._maybe_prepare_merge_workspace(
574 workspace = self.repo._maybe_prepare_merge_workspace(
574 2, 'pr2', Reference('branch', 'master', 'unused'),
575 2, 'pr2', Reference('branch', 'master', 'unused'),
575 Reference('branch', 'develop', 'unused'))
576 Reference('branch', 'develop', 'unused'))
576 assert os.path.isdir(workspace)
577 assert os.path.isdir(workspace)
577
578
578 def test_cleanup_merge_workspace(self):
579 def test_cleanup_merge_workspace(self):
579 workspace = self.repo._maybe_prepare_merge_workspace(
580 workspace = self.repo._maybe_prepare_merge_workspace(
580 2, 'pr3', Reference('branch', 'master', 'unused'),
581 2, 'pr3', Reference('branch', 'master', 'unused'),
581 Reference('branch', 'master', 'unused'))
582 Reference('branch', 'master', 'unused'))
582 self.repo.cleanup_merge_workspace(2, 'pr3')
583 self.repo.cleanup_merge_workspace(2, 'pr3')
583
584
584 assert not os.path.exists(workspace)
585 assert not os.path.exists(workspace)
585
586
586 def test_cleanup_merge_workspace_invalid_workspace_id(self):
587 def test_cleanup_merge_workspace_invalid_workspace_id(self):
587 # No assert: because in case of an inexistent workspace this function
588 # No assert: because in case of an inexistent workspace this function
588 # should still succeed.
589 # should still succeed.
589 self.repo.cleanup_merge_workspace(1, 'pr4')
590 self.repo.cleanup_merge_workspace(1, 'pr4')
590
591
591 def test_set_refs(self):
592 def test_set_refs(self):
592 test_ref = 'refs/test-refs/abcde'
593 test_ref = 'refs/test-refs/abcde'
593 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
594 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
594
595
595 self.repo.set_refs(test_ref, test_commit_id)
596 self.repo.set_refs(test_ref, test_commit_id)
596 stdout, _ = self.repo.run_git_command(['show-ref'])
597 stdout, _ = self.repo.run_git_command(['show-ref'])
597 assert test_ref in stdout
598 assert test_ref in stdout
598 assert test_commit_id in stdout
599 assert test_commit_id in stdout
599
600
600 def test_remove_ref(self):
601 def test_remove_ref(self):
601 test_ref = 'refs/test-refs/abcde'
602 test_ref = 'refs/test-refs/abcde'
602 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
603 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
603 self.repo.set_refs(test_ref, test_commit_id)
604 self.repo.set_refs(test_ref, test_commit_id)
604 stdout, _ = self.repo.run_git_command(['show-ref'])
605 stdout, _ = self.repo.run_git_command(['show-ref'])
605 assert test_ref in stdout
606 assert test_ref in stdout
606 assert test_commit_id in stdout
607 assert test_commit_id in stdout
607
608
608 self.repo.remove_ref(test_ref)
609 self.repo.remove_ref(test_ref)
609 stdout, _ = self.repo.run_git_command(['show-ref'])
610 stdout, _ = self.repo.run_git_command(['show-ref'])
610 assert test_ref not in stdout
611 assert test_ref not in stdout
611 assert test_commit_id not in stdout
612 assert test_commit_id not in stdout
612
613
613
614
614 class TestGitCommit(object):
615 class TestGitCommit(object):
615
616
616 @pytest.fixture(autouse=True)
617 @pytest.fixture(autouse=True)
617 def prepare(self):
618 def prepare(self):
618 self.repo = GitRepository(TEST_GIT_REPO)
619 self.repo = GitRepository(TEST_GIT_REPO)
619
620
620 def test_default_commit(self):
621 def test_default_commit(self):
621 tip = self.repo.get_commit()
622 tip = self.repo.get_commit()
622 assert tip == self.repo.get_commit(None)
623 assert tip == self.repo.get_commit(None)
623 assert tip == self.repo.get_commit('tip')
624 assert tip == self.repo.get_commit('tip')
624
625
625 def test_root_node(self):
626 def test_root_node(self):
626 tip = self.repo.get_commit()
627 tip = self.repo.get_commit()
627 assert tip.root is tip.get_node('')
628 assert tip.root is tip.get_node('')
628
629
629 def test_lazy_fetch(self):
630 def test_lazy_fetch(self):
630 """
631 """
631 Test if commit's nodes expands and are cached as we walk through
632 Test if commit's nodes expands and are cached as we walk through
632 the commit. This test is somewhat hard to write as order of tests
633 the commit. This test is somewhat hard to write as order of tests
633 is a key here. Written by running command after command in a shell.
634 is a key here. Written by running command after command in a shell.
634 """
635 """
635 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
636 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
636 assert commit_id in self.repo.commit_ids
637 assert commit_id in self.repo.commit_ids
637 commit = self.repo.get_commit(commit_id)
638 commit = self.repo.get_commit(commit_id)
638 assert len(commit.nodes) == 0
639 assert len(commit.nodes) == 0
639 root = commit.root
640 root = commit.root
640 assert len(commit.nodes) == 1
641 assert len(commit.nodes) == 1
641 assert len(root.nodes) == 8
642 assert len(root.nodes) == 8
642 # accessing root.nodes updates commit.nodes
643 # accessing root.nodes updates commit.nodes
643 assert len(commit.nodes) == 9
644 assert len(commit.nodes) == 9
644
645
645 docs = root.get_node('docs')
646 docs = root.get_node('docs')
646 # we haven't yet accessed anything new as docs dir was already cached
647 # we haven't yet accessed anything new as docs dir was already cached
647 assert len(commit.nodes) == 9
648 assert len(commit.nodes) == 9
648 assert len(docs.nodes) == 8
649 assert len(docs.nodes) == 8
649 # accessing docs.nodes updates commit.nodes
650 # accessing docs.nodes updates commit.nodes
650 assert len(commit.nodes) == 17
651 assert len(commit.nodes) == 17
651
652
652 assert docs is commit.get_node('docs')
653 assert docs is commit.get_node('docs')
653 assert docs is root.nodes[0]
654 assert docs is root.nodes[0]
654 assert docs is root.dirs[0]
655 assert docs is root.dirs[0]
655 assert docs is commit.get_node('docs')
656 assert docs is commit.get_node('docs')
656
657
657 def test_nodes_with_commit(self):
658 def test_nodes_with_commit(self):
658 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
659 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
659 commit = self.repo.get_commit(commit_id)
660 commit = self.repo.get_commit(commit_id)
660 root = commit.root
661 root = commit.root
661 docs = root.get_node('docs')
662 docs = root.get_node('docs')
662 assert docs is commit.get_node('docs')
663 assert docs is commit.get_node('docs')
663 api = docs.get_node('api')
664 api = docs.get_node('api')
664 assert api is commit.get_node('docs/api')
665 assert api is commit.get_node('docs/api')
665 index = api.get_node('index.rst')
666 index = api.get_node('index.rst')
666 assert index is commit.get_node('docs/api/index.rst')
667 assert index is commit.get_node('docs/api/index.rst')
667 assert index is commit.get_node('docs')\
668 assert index is commit.get_node('docs')\
668 .get_node('api')\
669 .get_node('api')\
669 .get_node('index.rst')
670 .get_node('index.rst')
670
671
671 def test_branch_and_tags(self):
672 def test_branch_and_tags(self):
672 """
673 """
673 rev0 = self.repo.commit_ids[0]
674 rev0 = self.repo.commit_ids[0]
674 commit0 = self.repo.get_commit(rev0)
675 commit0 = self.repo.get_commit(rev0)
675 assert commit0.branch == 'master'
676 assert commit0.branch == 'master'
676 assert commit0.tags == []
677 assert commit0.tags == []
677
678
678 rev10 = self.repo.commit_ids[10]
679 rev10 = self.repo.commit_ids[10]
679 commit10 = self.repo.get_commit(rev10)
680 commit10 = self.repo.get_commit(rev10)
680 assert commit10.branch == 'master'
681 assert commit10.branch == 'master'
681 assert commit10.tags == []
682 assert commit10.tags == []
682
683
683 rev44 = self.repo.commit_ids[44]
684 rev44 = self.repo.commit_ids[44]
684 commit44 = self.repo.get_commit(rev44)
685 commit44 = self.repo.get_commit(rev44)
685 assert commit44.branch == 'web-branch'
686 assert commit44.branch == 'web-branch'
686
687
687 tip = self.repo.get_commit('tip')
688 tip = self.repo.get_commit('tip')
688 assert 'tip' in tip.tags
689 assert 'tip' in tip.tags
689 """
690 """
690 # Those tests would fail - branches are now going
691 # Those tests would fail - branches are now going
691 # to be changed at main API in order to support git backend
692 # to be changed at main API in order to support git backend
692 pass
693 pass
693
694
694 def test_file_size(self):
695 def test_file_size(self):
695 to_check = (
696 to_check = (
696 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
697 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
697 'vcs/backends/BaseRepository.py', 502),
698 'vcs/backends/BaseRepository.py', 502),
698 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
699 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
699 'vcs/backends/hg.py', 854),
700 'vcs/backends/hg.py', 854),
700 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
701 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
701 'setup.py', 1068),
702 'setup.py', 1068),
702
703
703 ('d955cd312c17b02143c04fa1099a352b04368118',
704 ('d955cd312c17b02143c04fa1099a352b04368118',
704 'vcs/backends/base.py', 2921),
705 'vcs/backends/base.py', 2921),
705 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
706 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
706 'vcs/backends/base.py', 3936),
707 'vcs/backends/base.py', 3936),
707 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
708 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
708 'vcs/backends/base.py', 6189),
709 'vcs/backends/base.py', 6189),
709 )
710 )
710 for commit_id, path, size in to_check:
711 for commit_id, path, size in to_check:
711 node = self.repo.get_commit(commit_id).get_node(path)
712 node = self.repo.get_commit(commit_id).get_node(path)
712 assert node.is_file()
713 assert node.is_file()
713 assert node.size == size
714 assert node.size == size
714
715
715 def test_file_history_from_commits(self):
716 def test_file_history_from_commits(self):
716 node = self.repo[10].get_node('setup.py')
717 node = self.repo[10].get_node('setup.py')
717 commit_ids = [commit.raw_id for commit in node.history]
718 commit_ids = [commit.raw_id for commit in node.history]
718 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
719 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
719
720
720 node = self.repo[20].get_node('setup.py')
721 node = self.repo[20].get_node('setup.py')
721 node_ids = [commit.raw_id for commit in node.history]
722 node_ids = [commit.raw_id for commit in node.history]
722 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
723 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
723 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
724 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
724
725
725 # special case we check history from commit that has this particular
726 # special case we check history from commit that has this particular
726 # file changed this means we check if it's included as well
727 # file changed this means we check if it's included as well
727 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
728 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
728 .get_node('setup.py')
729 .get_node('setup.py')
729 node_ids = [commit.raw_id for commit in node.history]
730 node_ids = [commit.raw_id for commit in node.history]
730 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
731 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
731 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
732 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
732
733
733 def test_file_history(self):
734 def test_file_history(self):
734 # we can only check if those commits are present in the history
735 # we can only check if those commits are present in the history
735 # as we cannot update this test every time file is changed
736 # as we cannot update this test every time file is changed
736 files = {
737 files = {
737 'setup.py': [
738 'setup.py': [
738 '54386793436c938cff89326944d4c2702340037d',
739 '54386793436c938cff89326944d4c2702340037d',
739 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
740 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
740 '998ed409c795fec2012b1c0ca054d99888b22090',
741 '998ed409c795fec2012b1c0ca054d99888b22090',
741 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
742 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
742 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
743 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
743 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
744 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
744 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
745 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
745 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
746 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
746 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
747 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
747 ],
748 ],
748 'vcs/nodes.py': [
749 'vcs/nodes.py': [
749 '33fa3223355104431402a888fa77a4e9956feb3e',
750 '33fa3223355104431402a888fa77a4e9956feb3e',
750 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
751 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
751 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
752 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
752 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
753 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
753 'c877b68d18e792a66b7f4c529ea02c8f80801542',
754 'c877b68d18e792a66b7f4c529ea02c8f80801542',
754 '4313566d2e417cb382948f8d9d7c765330356054',
755 '4313566d2e417cb382948f8d9d7c765330356054',
755 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
756 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
756 '54386793436c938cff89326944d4c2702340037d',
757 '54386793436c938cff89326944d4c2702340037d',
757 '54000345d2e78b03a99d561399e8e548de3f3203',
758 '54000345d2e78b03a99d561399e8e548de3f3203',
758 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
759 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
759 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
760 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
760 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
761 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
761 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
762 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
762 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
763 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
763 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
764 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
764 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
765 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
765 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
766 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
766 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
767 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
767 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
768 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
768 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
769 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
769 'f15c21f97864b4f071cddfbf2750ec2e23859414',
770 'f15c21f97864b4f071cddfbf2750ec2e23859414',
770 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
771 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
771 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
772 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
772 '84dec09632a4458f79f50ddbbd155506c460b4f9',
773 '84dec09632a4458f79f50ddbbd155506c460b4f9',
773 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
774 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
774 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
775 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
775 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
776 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
776 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
777 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
777 '6970b057cffe4aab0a792aa634c89f4bebf01441',
778 '6970b057cffe4aab0a792aa634c89f4bebf01441',
778 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
779 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
779 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
780 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
780 ],
781 ],
781 'vcs/backends/git.py': [
782 'vcs/backends/git.py': [
782 '4cf116ad5a457530381135e2f4c453e68a1b0105',
783 '4cf116ad5a457530381135e2f4c453e68a1b0105',
783 '9a751d84d8e9408e736329767387f41b36935153',
784 '9a751d84d8e9408e736329767387f41b36935153',
784 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
785 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
785 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
786 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
786 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
787 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
787 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
788 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
788 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
789 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
789 '54000345d2e78b03a99d561399e8e548de3f3203',
790 '54000345d2e78b03a99d561399e8e548de3f3203',
790 ],
791 ],
791 }
792 }
792 for path, commit_ids in files.items():
793 for path, commit_ids in files.items():
793 node = self.repo.get_commit(commit_ids[0]).get_node(path)
794 node = self.repo.get_commit(commit_ids[0]).get_node(path)
794 node_ids = [commit.raw_id for commit in node.history]
795 node_ids = [commit.raw_id for commit in node.history]
795 assert set(commit_ids).issubset(set(node_ids)), (
796 assert set(commit_ids).issubset(set(node_ids)), (
796 "We assumed that %s is subset of commit_ids for which file %s "
797 "We assumed that %s is subset of commit_ids for which file %s "
797 "has been changed, and history of that node returned: %s"
798 "has been changed, and history of that node returned: %s"
798 % (commit_ids, path, node_ids))
799 % (commit_ids, path, node_ids))
799
800
800 def test_file_annotate(self):
801 def test_file_annotate(self):
801 files = {
802 files = {
802 'vcs/backends/__init__.py': {
803 'vcs/backends/__init__.py': {
803 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
804 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
804 'lines_no': 1,
805 'lines_no': 1,
805 'commits': [
806 'commits': [
806 'c1214f7e79e02fc37156ff215cd71275450cffc3',
807 'c1214f7e79e02fc37156ff215cd71275450cffc3',
807 ],
808 ],
808 },
809 },
809 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
810 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
810 'lines_no': 21,
811 'lines_no': 21,
811 'commits': [
812 'commits': [
812 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
813 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
813 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
814 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
814 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
815 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
815 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
816 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
816 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
817 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
817 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
818 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
818 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
819 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
819 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
820 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
820 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
821 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
821 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
822 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
822 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
823 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
823 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
824 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
824 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
825 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
825 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
826 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
826 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
829 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
829 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
830 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
830 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 ],
834 ],
834 },
835 },
835 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
836 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
836 'lines_no': 32,
837 'lines_no': 32,
837 'commits': [
838 'commits': [
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
841 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
844 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '54000345d2e78b03a99d561399e8e548de3f3203',
846 '54000345d2e78b03a99d561399e8e548de3f3203',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
849 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
849 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
850 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
850 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
851 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
851 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
852 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
852 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
853 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
853 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
854 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
854 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
855 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
855 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
856 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
856 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
857 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
857 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
858 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
858 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
859 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
864 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
866 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
866 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
868 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
868 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
869 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
869 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
870 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
870 ],
871 ],
871 },
872 },
872 },
873 },
873 }
874 }
874
875
875 for fname, commit_dict in files.items():
876 for fname, commit_dict in files.items():
876 for commit_id, __ in commit_dict.items():
877 for commit_id, __ in commit_dict.items():
877 commit = self.repo.get_commit(commit_id)
878 commit = self.repo.get_commit(commit_id)
878
879
879 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
880 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
880 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
881 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
881 assert l1_1 == l1_2
882 assert l1_1 == l1_2
882 l1 = l1_1
883 l1 = l1_1
883 l2 = files[fname][commit_id]['commits']
884 l2 = files[fname][commit_id]['commits']
884 assert l1 == l2, (
885 assert l1 == l2, (
885 "The lists of commit_ids for %s@commit_id %s"
886 "The lists of commit_ids for %s@commit_id %s"
886 "from annotation list should match each other, "
887 "from annotation list should match each other, "
887 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
888 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
888
889
889 def test_files_state(self):
890 def test_files_state(self):
890 """
891 """
891 Tests state of FileNodes.
892 Tests state of FileNodes.
892 """
893 """
893 node = self.repo\
894 node = self.repo\
894 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
895 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
895 .get_node('vcs/utils/diffs.py')
896 .get_node('vcs/utils/diffs.py')
896 assert node.state, NodeState.ADDED
897 assert node.state, NodeState.ADDED
897 assert node.added
898 assert node.added
898 assert not node.changed
899 assert not node.changed
899 assert not node.not_changed
900 assert not node.not_changed
900 assert not node.removed
901 assert not node.removed
901
902
902 node = self.repo\
903 node = self.repo\
903 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
904 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
904 .get_node('.hgignore')
905 .get_node('.hgignore')
905 assert node.state, NodeState.CHANGED
906 assert node.state, NodeState.CHANGED
906 assert not node.added
907 assert not node.added
907 assert node.changed
908 assert node.changed
908 assert not node.not_changed
909 assert not node.not_changed
909 assert not node.removed
910 assert not node.removed
910
911
911 node = self.repo\
912 node = self.repo\
912 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
913 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
913 .get_node('setup.py')
914 .get_node('setup.py')
914 assert node.state, NodeState.NOT_CHANGED
915 assert node.state, NodeState.NOT_CHANGED
915 assert not node.added
916 assert not node.added
916 assert not node.changed
917 assert not node.changed
917 assert node.not_changed
918 assert node.not_changed
918 assert not node.removed
919 assert not node.removed
919
920
920 # If node has REMOVED state then trying to fetch it would raise
921 # If node has REMOVED state then trying to fetch it would raise
921 # CommitError exception
922 # CommitError exception
922 commit = self.repo.get_commit(
923 commit = self.repo.get_commit(
923 'fa6600f6848800641328adbf7811fd2372c02ab2')
924 'fa6600f6848800641328adbf7811fd2372c02ab2')
924 path = 'vcs/backends/BaseRepository.py'
925 path = 'vcs/backends/BaseRepository.py'
925 with pytest.raises(NodeDoesNotExistError):
926 with pytest.raises(NodeDoesNotExistError):
926 commit.get_node(path)
927 commit.get_node(path)
927 # but it would be one of ``removed`` (commit's attribute)
928 # but it would be one of ``removed`` (commit's attribute)
928 assert path in [rf.path for rf in commit.removed]
929 assert path in [rf.path for rf in commit.removed]
929
930
930 commit = self.repo.get_commit(
931 commit = self.repo.get_commit(
931 '54386793436c938cff89326944d4c2702340037d')
932 '54386793436c938cff89326944d4c2702340037d')
932 changed = [
933 changed = [
933 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
934 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
934 'vcs/nodes.py']
935 'vcs/nodes.py']
935 assert set(changed) == set([f.path for f in commit.changed])
936 assert set(changed) == set([f.path for f in commit.changed])
936
937
937 def test_unicode_branch_refs(self):
938 def test_unicode_branch_refs(self):
938 unicode_branches = {
939 unicode_branches = {
939 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
940 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
940 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
941 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
941 }
942 }
942 with mock.patch(
943 with mock.patch(
943 ("rhodecode.lib.vcs.backends.git.repository"
944 ("rhodecode.lib.vcs.backends.git.repository"
944 ".GitRepository._refs"),
945 ".GitRepository._refs"),
945 unicode_branches):
946 unicode_branches):
946 branches = self.repo.branches
947 branches = self.repo.branches
947
948
948 assert 'unicode' in branches
949 assert 'unicode' in branches
949 assert u'uniΓ§ΓΆβˆ‚e' in branches
950 assert u'uniΓ§ΓΆβˆ‚e' in branches
950
951
951 def test_unicode_tag_refs(self):
952 def test_unicode_tag_refs(self):
952 unicode_tags = {
953 unicode_tags = {
953 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
954 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
954 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
955 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
955 }
956 }
956 with mock.patch(
957 with mock.patch(
957 ("rhodecode.lib.vcs.backends.git.repository"
958 ("rhodecode.lib.vcs.backends.git.repository"
958 ".GitRepository._refs"),
959 ".GitRepository._refs"),
959 unicode_tags):
960 unicode_tags):
960 tags = self.repo.tags
961 tags = self.repo.tags
961
962
962 assert 'unicode' in tags
963 assert 'unicode' in tags
963 assert u'uniΓ§ΓΆβˆ‚e' in tags
964 assert u'uniΓ§ΓΆβˆ‚e' in tags
964
965
965 def test_commit_message_is_unicode(self):
966 def test_commit_message_is_unicode(self):
966 for commit in self.repo:
967 for commit in self.repo:
967 assert type(commit.message) == unicode
968 assert type(commit.message) == unicode
968
969
969 def test_commit_author_is_unicode(self):
970 def test_commit_author_is_unicode(self):
970 for commit in self.repo:
971 for commit in self.repo:
971 assert type(commit.author) == unicode
972 assert type(commit.author) == unicode
972
973
973 def test_repo_files_content_is_unicode(self):
974 def test_repo_files_content_is_unicode(self):
974 commit = self.repo.get_commit()
975 commit = self.repo.get_commit()
975 for node in commit.get_node('/'):
976 for node in commit.get_node('/'):
976 if node.is_file():
977 if node.is_file():
977 assert type(node.content) == unicode
978 assert type(node.content) == unicode
978
979
979 def test_wrong_path(self):
980 def test_wrong_path(self):
980 # There is 'setup.py' in the root dir but not there:
981 # There is 'setup.py' in the root dir but not there:
981 path = 'foo/bar/setup.py'
982 path = 'foo/bar/setup.py'
982 tip = self.repo.get_commit()
983 tip = self.repo.get_commit()
983 with pytest.raises(VCSError):
984 with pytest.raises(VCSError):
984 tip.get_node(path)
985 tip.get_node(path)
985
986
986 @pytest.mark.parametrize("author_email, commit_id", [
987 @pytest.mark.parametrize("author_email, commit_id", [
987 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
988 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
988 ('lukasz.balcerzak@python-center.pl',
989 ('lukasz.balcerzak@python-center.pl',
989 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
990 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
990 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
991 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
991 ])
992 ])
992 def test_author_email(self, author_email, commit_id):
993 def test_author_email(self, author_email, commit_id):
993 commit = self.repo.get_commit(commit_id)
994 commit = self.repo.get_commit(commit_id)
994 assert author_email == commit.author_email
995 assert author_email == commit.author_email
995
996
996 @pytest.mark.parametrize("author, commit_id", [
997 @pytest.mark.parametrize("author, commit_id", [
997 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
998 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
998 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
999 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
999 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1000 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1000 ])
1001 ])
1001 def test_author_username(self, author, commit_id):
1002 def test_author_username(self, author, commit_id):
1002 commit = self.repo.get_commit(commit_id)
1003 commit = self.repo.get_commit(commit_id)
1003 assert author == commit.author_name
1004 assert author == commit.author_name
1004
1005
1005
1006
1006 class TestLargeFileRepo(object):
1007 class TestLargeFileRepo(object):
1007
1008
1008 def test_large_file(self, backend_git):
1009 def test_large_file(self, backend_git):
1009 conf = make_db_config()
1010 conf = make_db_config()
1010 repo = backend_git.create_test_repo('largefiles', conf)
1011 repo = backend_git.create_test_repo('largefiles', conf)
1011
1012
1012 tip = repo.scm_instance().get_commit()
1013 tip = repo.scm_instance().get_commit()
1013
1014
1014 # extract stored LF node into the origin cache
1015 # extract stored LF node into the origin cache
1015 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1016 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1016
1017
1017 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1018 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1018 oid_path = os.path.join(lfs_store, oid)
1019 oid_path = os.path.join(lfs_store, oid)
1019 oid_destination = os.path.join(
1020 oid_destination = os.path.join(
1020 conf.get('vcs_git_lfs', 'store_location'), oid)
1021 conf.get('vcs_git_lfs', 'store_location'), oid)
1021 shutil.copy(oid_path, oid_destination)
1022 shutil.copy(oid_path, oid_destination)
1022
1023
1023 node = tip.get_node('1MB.zip')
1024 node = tip.get_node('1MB.zip')
1024
1025
1025 lf_node = node.get_largefile_node()
1026 lf_node = node.get_largefile_node()
1026
1027
1027 assert lf_node.is_largefile() is True
1028 assert lf_node.is_largefile() is True
1028 assert lf_node.size == 1024000
1029 assert lf_node.size == 1024000
1029 assert lf_node.name == '1MB.zip'
1030 assert lf_node.name == '1MB.zip'
1030
1031
1031
1032
1032 @pytest.mark.usefixtures("vcs_repository_support")
1033 @pytest.mark.usefixtures("vcs_repository_support")
1033 class TestGitSpecificWithRepo(BackendTestMixin):
1034 class TestGitSpecificWithRepo(BackendTestMixin):
1034
1035
1035 @classmethod
1036 @classmethod
1036 def _get_commits(cls):
1037 def _get_commits(cls):
1037 return [
1038 return [
1038 {
1039 {
1039 'message': 'Initial',
1040 'message': 'Initial',
1040 'author': 'Joe Doe <joe.doe@example.com>',
1041 'author': 'Joe Doe <joe.doe@example.com>',
1041 'date': datetime.datetime(2010, 1, 1, 20),
1042 'date': datetime.datetime(2010, 1, 1, 20),
1042 'added': [
1043 'added': [
1043 FileNode('foobar/static/js/admin/base.js', content='base'),
1044 FileNode('foobar/static/js/admin/base.js', content='base'),
1044 FileNode(
1045 FileNode(
1045 'foobar/static/admin', content='admin',
1046 'foobar/static/admin', content='admin',
1046 mode=0o120000), # this is a link
1047 mode=0o120000), # this is a link
1047 FileNode('foo', content='foo'),
1048 FileNode('foo', content='foo'),
1048 ],
1049 ],
1049 },
1050 },
1050 {
1051 {
1051 'message': 'Second',
1052 'message': 'Second',
1052 'author': 'Joe Doe <joe.doe@example.com>',
1053 'author': 'Joe Doe <joe.doe@example.com>',
1053 'date': datetime.datetime(2010, 1, 1, 22),
1054 'date': datetime.datetime(2010, 1, 1, 22),
1054 'added': [
1055 'added': [
1055 FileNode('foo2', content='foo2'),
1056 FileNode('foo2', content='foo2'),
1056 ],
1057 ],
1057 },
1058 },
1058 ]
1059 ]
1059
1060
1060 def test_paths_slow_traversing(self):
1061 def test_paths_slow_traversing(self):
1061 commit = self.repo.get_commit()
1062 commit = self.repo.get_commit()
1062 assert commit.get_node('foobar').get_node('static').get_node('js')\
1063 assert commit.get_node('foobar').get_node('static').get_node('js')\
1063 .get_node('admin').get_node('base.js').content == 'base'
1064 .get_node('admin').get_node('base.js').content == 'base'
1064
1065
1065 def test_paths_fast_traversing(self):
1066 def test_paths_fast_traversing(self):
1066 commit = self.repo.get_commit()
1067 commit = self.repo.get_commit()
1067 assert (
1068 assert (
1068 commit.get_node('foobar/static/js/admin/base.js').content ==
1069 commit.get_node('foobar/static/js/admin/base.js').content ==
1069 'base')
1070 'base')
1070
1071
1071 def test_get_diff_runs_git_command_with_hashes(self):
1072 def test_get_diff_runs_git_command_with_hashes(self):
1072 comm1 = self.repo[0]
1073 comm1 = self.repo[0]
1073 comm2 = self.repo[1]
1074 comm2 = self.repo[1]
1074 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1075 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1075 self.repo.get_diff(comm1, comm2)
1076 self.repo.get_diff(comm1, comm2)
1076
1077
1077 self.repo.run_git_command.assert_called_once_with(
1078 self.repo.run_git_command.assert_called_once_with(
1078 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1079 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1079 '--abbrev=40', comm1.raw_id, comm2.raw_id])
1080 '--abbrev=40', comm1.raw_id, comm2.raw_id])
1080
1081
1081 def test_get_diff_runs_git_command_with_str_hashes(self):
1082 def test_get_diff_runs_git_command_with_str_hashes(self):
1082 comm2 = self.repo[1]
1083 comm2 = self.repo[1]
1083 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1084 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1084 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1085 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1085 self.repo.run_git_command.assert_called_once_with(
1086 self.repo.run_git_command.assert_called_once_with(
1086 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1087 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1087 '--abbrev=40', comm2.raw_id])
1088 '--abbrev=40', comm2.raw_id])
1088
1089
1089 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1090 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1090 comm1 = self.repo[0]
1091 comm1 = self.repo[0]
1091 comm2 = self.repo[1]
1092 comm2 = self.repo[1]
1092 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1093 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1093 self.repo.get_diff(comm1, comm2, 'foo')
1094 self.repo.get_diff(comm1, comm2, 'foo')
1094 self.repo.run_git_command.assert_called_once_with(
1095 self.repo.run_git_command.assert_called_once_with(
1095 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1096 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1096 '--abbrev=40', self.repo._lookup_commit(0),
1097 '--abbrev=40', self.repo._lookup_commit(0),
1097 comm2.raw_id, '--', 'foo'])
1098 comm2.raw_id, '--', 'foo'])
1098
1099
1099
1100
1100 @pytest.mark.usefixtures("vcs_repository_support")
1101 @pytest.mark.usefixtures("vcs_repository_support")
1101 class TestGitRegression(BackendTestMixin):
1102 class TestGitRegression(BackendTestMixin):
1102
1103
1103 @classmethod
1104 @classmethod
1104 def _get_commits(cls):
1105 def _get_commits(cls):
1105 return [
1106 return [
1106 {
1107 {
1107 'message': 'Initial',
1108 'message': 'Initial',
1108 'author': 'Joe Doe <joe.doe@example.com>',
1109 'author': 'Joe Doe <joe.doe@example.com>',
1109 'date': datetime.datetime(2010, 1, 1, 20),
1110 'date': datetime.datetime(2010, 1, 1, 20),
1110 'added': [
1111 'added': [
1111 FileNode('bot/__init__.py', content='base'),
1112 FileNode('bot/__init__.py', content='base'),
1112 FileNode('bot/templates/404.html', content='base'),
1113 FileNode('bot/templates/404.html', content='base'),
1113 FileNode('bot/templates/500.html', content='base'),
1114 FileNode('bot/templates/500.html', content='base'),
1114 ],
1115 ],
1115 },
1116 },
1116 {
1117 {
1117 'message': 'Second',
1118 'message': 'Second',
1118 'author': 'Joe Doe <joe.doe@example.com>',
1119 'author': 'Joe Doe <joe.doe@example.com>',
1119 'date': datetime.datetime(2010, 1, 1, 22),
1120 'date': datetime.datetime(2010, 1, 1, 22),
1120 'added': [
1121 'added': [
1121 FileNode('bot/build/migrations/1.py', content='foo2'),
1122 FileNode('bot/build/migrations/1.py', content='foo2'),
1122 FileNode('bot/build/migrations/2.py', content='foo2'),
1123 FileNode('bot/build/migrations/2.py', content='foo2'),
1123 FileNode(
1124 FileNode(
1124 'bot/build/static/templates/f.html', content='foo2'),
1125 'bot/build/static/templates/f.html', content='foo2'),
1125 FileNode(
1126 FileNode(
1126 'bot/build/static/templates/f1.html', content='foo2'),
1127 'bot/build/static/templates/f1.html', content='foo2'),
1127 FileNode('bot/build/templates/err.html', content='foo2'),
1128 FileNode('bot/build/templates/err.html', content='foo2'),
1128 FileNode('bot/build/templates/err2.html', content='foo2'),
1129 FileNode('bot/build/templates/err2.html', content='foo2'),
1129 ],
1130 ],
1130 },
1131 },
1131 ]
1132 ]
1132
1133
1133 @pytest.mark.parametrize("path, expected_paths", [
1134 @pytest.mark.parametrize("path, expected_paths", [
1134 ('bot', [
1135 ('bot', [
1135 'bot/build',
1136 'bot/build',
1136 'bot/templates',
1137 'bot/templates',
1137 'bot/__init__.py']),
1138 'bot/__init__.py']),
1138 ('bot/build', [
1139 ('bot/build', [
1139 'bot/build/migrations',
1140 'bot/build/migrations',
1140 'bot/build/static',
1141 'bot/build/static',
1141 'bot/build/templates']),
1142 'bot/build/templates']),
1142 ('bot/build/static', [
1143 ('bot/build/static', [
1143 'bot/build/static/templates']),
1144 'bot/build/static/templates']),
1144 ('bot/build/static/templates', [
1145 ('bot/build/static/templates', [
1145 'bot/build/static/templates/f.html',
1146 'bot/build/static/templates/f.html',
1146 'bot/build/static/templates/f1.html']),
1147 'bot/build/static/templates/f1.html']),
1147 ('bot/build/templates', [
1148 ('bot/build/templates', [
1148 'bot/build/templates/err.html',
1149 'bot/build/templates/err.html',
1149 'bot/build/templates/err2.html']),
1150 'bot/build/templates/err2.html']),
1150 ('bot/templates/', [
1151 ('bot/templates/', [
1151 'bot/templates/404.html',
1152 'bot/templates/404.html',
1152 'bot/templates/500.html']),
1153 'bot/templates/500.html']),
1153 ])
1154 ])
1154 def test_similar_paths(self, path, expected_paths):
1155 def test_similar_paths(self, path, expected_paths):
1155 commit = self.repo.get_commit()
1156 commit = self.repo.get_commit()
1156 paths = [n.path for n in commit.get_nodes(path)]
1157 paths = [n.path for n in commit.get_nodes(path)]
1157 assert paths == expected_paths
1158 assert paths == expected_paths
1158
1159
1159
1160
1160 class TestDiscoverGitVersion(object):
1161 class TestDiscoverGitVersion(object):
1161
1162
1162 def test_returns_git_version(self, baseapp):
1163 def test_returns_git_version(self, baseapp):
1163 version = discover_git_version()
1164 version = discover_git_version()
1164 assert version
1165 assert version
1165
1166
1166 def test_returns_empty_string_without_vcsserver(self):
1167 def test_returns_empty_string_without_vcsserver(self):
1167 mock_connection = mock.Mock()
1168 mock_connection = mock.Mock()
1168 mock_connection.discover_git_version = mock.Mock(
1169 mock_connection.discover_git_version = mock.Mock(
1169 side_effect=Exception)
1170 side_effect=Exception)
1170 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1171 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1171 version = discover_git_version()
1172 version = discover_git_version()
1172 assert version == ''
1173 assert version == ''
1173
1174
1174
1175
1175 class TestGetSubmoduleUrl(object):
1176 class TestGetSubmoduleUrl(object):
1176 def test_submodules_file_found(self):
1177 def test_submodules_file_found(self):
1177 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1178 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1178 node = mock.Mock()
1179 node = mock.Mock()
1179 with mock.patch.object(
1180 with mock.patch.object(
1180 commit, 'get_node', return_value=node) as get_node_mock:
1181 commit, 'get_node', return_value=node) as get_node_mock:
1181 node.content = (
1182 node.content = (
1182 '[submodule "subrepo1"]\n'
1183 '[submodule "subrepo1"]\n'
1183 '\tpath = subrepo1\n'
1184 '\tpath = subrepo1\n'
1184 '\turl = https://code.rhodecode.com/dulwich\n'
1185 '\turl = https://code.rhodecode.com/dulwich\n'
1185 )
1186 )
1186 result = commit._get_submodule_url('subrepo1')
1187 result = commit._get_submodule_url('subrepo1')
1187 get_node_mock.assert_called_once_with('.gitmodules')
1188 get_node_mock.assert_called_once_with('.gitmodules')
1188 assert result == 'https://code.rhodecode.com/dulwich'
1189 assert result == 'https://code.rhodecode.com/dulwich'
1189
1190
1190 def test_complex_submodule_path(self):
1191 def test_complex_submodule_path(self):
1191 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1192 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1192 node = mock.Mock()
1193 node = mock.Mock()
1193 with mock.patch.object(
1194 with mock.patch.object(
1194 commit, 'get_node', return_value=node) as get_node_mock:
1195 commit, 'get_node', return_value=node) as get_node_mock:
1195 node.content = (
1196 node.content = (
1196 '[submodule "complex/subrepo/path"]\n'
1197 '[submodule "complex/subrepo/path"]\n'
1197 '\tpath = complex/subrepo/path\n'
1198 '\tpath = complex/subrepo/path\n'
1198 '\turl = https://code.rhodecode.com/dulwich\n'
1199 '\turl = https://code.rhodecode.com/dulwich\n'
1199 )
1200 )
1200 result = commit._get_submodule_url('complex/subrepo/path')
1201 result = commit._get_submodule_url('complex/subrepo/path')
1201 get_node_mock.assert_called_once_with('.gitmodules')
1202 get_node_mock.assert_called_once_with('.gitmodules')
1202 assert result == 'https://code.rhodecode.com/dulwich'
1203 assert result == 'https://code.rhodecode.com/dulwich'
1203
1204
1204 def test_submodules_file_not_found(self):
1205 def test_submodules_file_not_found(self):
1205 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1206 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1206 with mock.patch.object(
1207 with mock.patch.object(
1207 commit, 'get_node', side_effect=NodeDoesNotExistError):
1208 commit, 'get_node', side_effect=NodeDoesNotExistError):
1208 result = commit._get_submodule_url('complex/subrepo/path')
1209 result = commit._get_submodule_url('complex/subrepo/path')
1209 assert result is None
1210 assert result is None
1210
1211
1211 def test_path_not_found(self):
1212 def test_path_not_found(self):
1212 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1213 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1213 node = mock.Mock()
1214 node = mock.Mock()
1214 with mock.patch.object(
1215 with mock.patch.object(
1215 commit, 'get_node', return_value=node) as get_node_mock:
1216 commit, 'get_node', return_value=node) as get_node_mock:
1216 node.content = (
1217 node.content = (
1217 '[submodule "subrepo1"]\n'
1218 '[submodule "subrepo1"]\n'
1218 '\tpath = subrepo1\n'
1219 '\tpath = subrepo1\n'
1219 '\turl = https://code.rhodecode.com/dulwich\n'
1220 '\turl = https://code.rhodecode.com/dulwich\n'
1220 )
1221 )
1221 result = commit._get_submodule_url('subrepo2')
1222 result = commit._get_submodule_url('subrepo2')
1222 get_node_mock.assert_called_once_with('.gitmodules')
1223 get_node_mock.assert_called_once_with('.gitmodules')
1223 assert result is None
1224 assert result is None
1224
1225
1225 def test_returns_cached_values(self):
1226 def test_returns_cached_values(self):
1226 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1227 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1227 node = mock.Mock()
1228 node = mock.Mock()
1228 with mock.patch.object(
1229 with mock.patch.object(
1229 commit, 'get_node', return_value=node) as get_node_mock:
1230 commit, 'get_node', return_value=node) as get_node_mock:
1230 node.content = (
1231 node.content = (
1231 '[submodule "subrepo1"]\n'
1232 '[submodule "subrepo1"]\n'
1232 '\tpath = subrepo1\n'
1233 '\tpath = subrepo1\n'
1233 '\turl = https://code.rhodecode.com/dulwich\n'
1234 '\turl = https://code.rhodecode.com/dulwich\n'
1234 )
1235 )
1235 for _ in range(3):
1236 for _ in range(3):
1236 commit._get_submodule_url('subrepo1')
1237 commit._get_submodule_url('subrepo1')
1237 get_node_mock.assert_called_once_with('.gitmodules')
1238 get_node_mock.assert_called_once_with('.gitmodules')
1238
1239
1239 def test_get_node_returns_a_link(self):
1240 def test_get_node_returns_a_link(self):
1240 repository = mock.Mock()
1241 repository = mock.Mock()
1241 repository.alias = 'git'
1242 repository.alias = 'git'
1242 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1243 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1243 submodule_url = 'https://code.rhodecode.com/dulwich'
1244 submodule_url = 'https://code.rhodecode.com/dulwich'
1244 get_id_patch = mock.patch.object(
1245 get_id_patch = mock.patch.object(
1245 commit, '_get_id_for_path', return_value=(1, 'link'))
1246 commit, '_get_tree_id_for_path', return_value=(1, 'link'))
1246 get_submodule_patch = mock.patch.object(
1247 get_submodule_patch = mock.patch.object(
1247 commit, '_get_submodule_url', return_value=submodule_url)
1248 commit, '_get_submodule_url', return_value=submodule_url)
1248
1249
1249 with get_id_patch, get_submodule_patch as submodule_mock:
1250 with get_id_patch, get_submodule_patch as submodule_mock:
1250 node = commit.get_node('/abcde')
1251 node = commit.get_node('/abcde')
1251
1252
1252 submodule_mock.assert_called_once_with('/abcde')
1253 submodule_mock.assert_called_once_with('/abcde')
1253 assert type(node) == SubModuleNode
1254 assert type(node) == SubModuleNode
1254 assert node.url == submodule_url
1255 assert node.url == submodule_url
1255
1256
1256 def test_get_nodes_returns_links(self):
1257 def test_get_nodes_returns_links(self):
1257 repository = mock.MagicMock()
1258 repository = mock.MagicMock()
1258 repository.alias = 'git'
1259 repository.alias = 'git'
1259 repository._remote.tree_items.return_value = [
1260 repository._remote.tree_items.return_value = [
1260 ('subrepo', 'stat', 1, 'link')
1261 ('subrepo', 'stat', 1, 'link')
1261 ]
1262 ]
1262 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1263 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1263 submodule_url = 'https://code.rhodecode.com/dulwich'
1264 submodule_url = 'https://code.rhodecode.com/dulwich'
1264 get_id_patch = mock.patch.object(
1265 get_id_patch = mock.patch.object(
1265 commit, '_get_id_for_path', return_value=(1, 'tree'))
1266 commit, '_get_tree_id_for_path', return_value=(1, 'tree'))
1266 get_submodule_patch = mock.patch.object(
1267 get_submodule_patch = mock.patch.object(
1267 commit, '_get_submodule_url', return_value=submodule_url)
1268 commit, '_get_submodule_url', return_value=submodule_url)
1268
1269
1269 with get_id_patch, get_submodule_patch as submodule_mock:
1270 with get_id_patch, get_submodule_patch as submodule_mock:
1270 nodes = commit.get_nodes('/abcde')
1271 nodes = commit.get_nodes('/abcde')
1271
1272
1272 submodule_mock.assert_called_once_with('/abcde/subrepo')
1273 submodule_mock.assert_called_once_with('/abcde/subrepo')
1273 assert len(nodes) == 1
1274 assert len(nodes) == 1
1274 assert type(nodes[0]) == SubModuleNode
1275 assert type(nodes[0]) == SubModuleNode
1275 assert nodes[0].url == submodule_url
1276 assert nodes[0].url == submodule_url
@@ -1,351 +1,349 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Tests so called "in memory commits" commit API of vcs.
22 Tests so called "in memory commits" commit API of vcs.
23 """
23 """
24 import datetime
24 import datetime
25
25
26 import pytest
26 import pytest
27
27
28 from rhodecode.lib.utils2 import safe_unicode
28 from rhodecode.lib.utils2 import safe_unicode
29 from rhodecode.lib.vcs.exceptions import (
29 from rhodecode.lib.vcs.exceptions import (
30 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError,
30 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError,
31 NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError,
31 NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError,
32 NodeNotChangedError)
32 NodeNotChangedError)
33 from rhodecode.lib.vcs.nodes import DirNode, FileNode
33 from rhodecode.lib.vcs.nodes import DirNode, FileNode
34 from rhodecode.tests.vcs.conftest import BackendTestMixin
34 from rhodecode.tests.vcs.conftest import BackendTestMixin
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture
38 def nodes():
38 def nodes():
39 nodes = [
39 nodes = [
40 FileNode('foobar', content='Foo & bar'),
40 FileNode('foobar', content='Foo & bar'),
41 FileNode('foobar2', content='Foo & bar, doubled!'),
41 FileNode('foobar2', content='Foo & bar, doubled!'),
42 FileNode('foo bar with spaces', content=''),
42 FileNode('foo bar with spaces', content=''),
43 FileNode('foo/bar/baz', content='Inside'),
43 FileNode('foo/bar/baz', content='Inside'),
44 FileNode(
44 FileNode(
45 'foo/bar/file.bin',
45 'foo/bar/file.bin',
46 content=(
46 content=(
47 '\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00'
47 '\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00'
48 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe'
48 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe'
49 '\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
49 '\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
50 '\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'
50 '\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'
51 '\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00'
51 '\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00'
52 '\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff'
52 '\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff'
53 '\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
53 '\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
54 )
54 )
55 ),
55 ),
56 ]
56 ]
57 return nodes
57 return nodes
58
58
59
59
60 @pytest.mark.usefixtures("vcs_repository_support")
60 @pytest.mark.usefixtures("vcs_repository_support")
61 class TestInMemoryCommit(BackendTestMixin):
61 class TestInMemoryCommit(BackendTestMixin):
62 """
62 """
63 This is a backend independent test case class which should be created
63 This is a backend independent test case class which should be created
64 with ``type`` method.
64 with ``type`` method.
65
65
66 It is required to set following attributes at subclass:
66 It is required to set following attributes at subclass:
67
67
68 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
68 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
69 """
69 """
70
70
71 @classmethod
71 @classmethod
72 def _get_commits(cls):
72 def _get_commits(cls):
73 return []
73 return []
74
74
75 def test_add(self, nodes):
75 def test_add(self, nodes):
76 for node in nodes:
76 for node in nodes:
77 self.imc.add(node)
77 self.imc.add(node)
78
78
79 self.commit()
79 self.commit()
80 self.assert_succesful_commit(nodes)
80 self.assert_succesful_commit(nodes)
81
81
82 @pytest.mark.backends("hg")
82 @pytest.mark.backends("hg")
83 def test_add_on_branch_hg(self, nodes):
83 def test_add_on_branch_hg(self, nodes):
84 for node in nodes:
84 for node in nodes:
85 self.imc.add(node)
85 self.imc.add(node)
86 self.commit(branch=u'stable')
86 self.commit(branch=u'stable')
87 self.assert_succesful_commit(nodes)
87 self.assert_succesful_commit(nodes)
88
88
89 @pytest.mark.backends("git")
89 @pytest.mark.backends("git")
90 def test_add_on_branch_git(self, nodes):
90 def test_add_on_branch_git(self, nodes):
91 self.repo._checkout('stable', create=True)
92
93 for node in nodes:
91 for node in nodes:
94 self.imc.add(node)
92 self.imc.add(node)
95 self.commit(branch=u'stable')
93 self.commit(branch=u'stable')
96 self.assert_succesful_commit(nodes)
94 self.assert_succesful_commit(nodes)
97
95
98 def test_add_in_bulk(self, nodes):
96 def test_add_in_bulk(self, nodes):
99 self.imc.add(*nodes)
97 self.imc.add(*nodes)
100
98
101 self.commit()
99 self.commit()
102 self.assert_succesful_commit(nodes)
100 self.assert_succesful_commit(nodes)
103
101
104 def test_add_non_ascii_files(self):
102 def test_add_non_ascii_files(self):
105 nodes = [
103 nodes = [
106 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko_utf8_str', content='Δ‡Δ‡Δ‡Δ‡'),
104 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko_utf8_str', content='Δ‡Δ‡Δ‡Δ‡'),
107 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_unicode', content=u'Δ‡Δ‡Δ‡Δ‡'),
105 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_unicode', content=u'Δ‡Δ‡Δ‡Δ‡'),
108 ]
106 ]
109
107
110 for node in nodes:
108 for node in nodes:
111 self.imc.add(node)
109 self.imc.add(node)
112
110
113 self.commit()
111 self.commit()
114 self.assert_succesful_commit(nodes)
112 self.assert_succesful_commit(nodes)
115
113
116 def commit(self, branch=None):
114 def commit(self, branch=None):
117 self.old_commit_count = len(self.repo.commit_ids)
115 self.old_commit_count = len(self.repo.commit_ids)
118 self.commit_message = u'Test commit with unicode: ΕΌΓ³Ε‚wik'
116 self.commit_message = u'Test commit with unicode: ΕΌΓ³Ε‚wik'
119 self.commit_author = u'{} <foo@email.com>'.format(self.__class__.__name__)
117 self.commit_author = u'{} <foo@email.com>'.format(self.__class__.__name__)
120 self.commit = self.imc.commit(
118 self.commit = self.imc.commit(
121 message=self.commit_message, author=self.commit_author,
119 message=self.commit_message, author=self.commit_author,
122 branch=branch)
120 branch=branch)
123
121
124 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
122 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
125 to_add = [
123 to_add = [
126 FileNode('foo/bar/image.png', content='\0'),
124 FileNode('foo/bar/image.png', content='\0'),
127 FileNode('foo/README.txt', content='readme!'),
125 FileNode('foo/README.txt', content='readme!'),
128 ]
126 ]
129 self.imc.add(*to_add)
127 self.imc.add(*to_add)
130 commit = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
128 commit = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
131 assert isinstance(commit.get_node('foo'), DirNode)
129 assert isinstance(commit.get_node('foo'), DirNode)
132 assert isinstance(commit.get_node('foo/bar'), DirNode)
130 assert isinstance(commit.get_node('foo/bar'), DirNode)
133 self.assert_nodes_in_commit(commit, to_add)
131 self.assert_nodes_in_commit(commit, to_add)
134
132
135 # commit some more files again
133 # commit some more files again
136 to_add = [
134 to_add = [
137 FileNode('foo/bar/foobaz/bar', content='foo'),
135 FileNode('foo/bar/foobaz/bar', content='foo'),
138 FileNode('foo/bar/another/bar', content='foo'),
136 FileNode('foo/bar/another/bar', content='foo'),
139 FileNode('foo/baz.txt', content='foo'),
137 FileNode('foo/baz.txt', content='foo'),
140 FileNode('foobar/foobaz/file', content='foo'),
138 FileNode('foobar/foobaz/file', content='foo'),
141 FileNode('foobar/barbaz', content='foo'),
139 FileNode('foobar/barbaz', content='foo'),
142 ]
140 ]
143 self.imc.add(*to_add)
141 self.imc.add(*to_add)
144 commit = self.imc.commit(u'Another', u'joe doe <joe.doe@example.com>')
142 commit = self.imc.commit(u'Another', u'joe doe <joe.doe@example.com>')
145 self.assert_nodes_in_commit(commit, to_add)
143 self.assert_nodes_in_commit(commit, to_add)
146
144
147 def test_add_raise_already_added(self):
145 def test_add_raise_already_added(self):
148 node = FileNode('foobar', content='baz')
146 node = FileNode('foobar', content='baz')
149 self.imc.add(node)
147 self.imc.add(node)
150 with pytest.raises(NodeAlreadyAddedError):
148 with pytest.raises(NodeAlreadyAddedError):
151 self.imc.add(node)
149 self.imc.add(node)
152
150
153 def test_check_integrity_raise_already_exist(self):
151 def test_check_integrity_raise_already_exist(self):
154 node = FileNode('foobar', content='baz')
152 node = FileNode('foobar', content='baz')
155 self.imc.add(node)
153 self.imc.add(node)
156 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
154 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
157 self.imc.add(node)
155 self.imc.add(node)
158 with pytest.raises(NodeAlreadyExistsError):
156 with pytest.raises(NodeAlreadyExistsError):
159 self.imc.commit(message='new message', author=u'{} <foo@bar.com>'.format(self))
157 self.imc.commit(message='new message', author=u'{} <foo@bar.com>'.format(self))
160
158
161 def test_change(self):
159 def test_change(self):
162 self.imc.add(FileNode('foo/bar/baz', content='foo'))
160 self.imc.add(FileNode('foo/bar/baz', content='foo'))
163 self.imc.add(FileNode('foo/fbar', content='foobar'))
161 self.imc.add(FileNode('foo/fbar', content='foobar'))
164 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
162 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
165
163
166 # Change node's content
164 # Change node's content
167 node = FileNode('foo/bar/baz', content='My **changed** content')
165 node = FileNode('foo/bar/baz', content='My **changed** content')
168 self.imc.change(node)
166 self.imc.change(node)
169 self.imc.commit(u'Changed %s' % node.path, u'joe doe <joe.doe@example.com>')
167 self.imc.commit(u'Changed %s' % node.path, u'joe doe <joe.doe@example.com>')
170
168
171 newtip = self.repo.get_commit()
169 newtip = self.repo.get_commit()
172 assert tip != newtip
170 assert tip != newtip
173 assert tip.id != newtip.id
171 assert tip.id != newtip.id
174 self.assert_nodes_in_commit(newtip, (node,))
172 self.assert_nodes_in_commit(newtip, (node,))
175
173
176 def test_change_non_ascii(self):
174 def test_change_non_ascii(self):
177 to_add = [
175 to_add = [
178 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
176 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
179 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
177 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
180 ]
178 ]
181 for node in to_add:
179 for node in to_add:
182 self.imc.add(node)
180 self.imc.add(node)
183
181
184 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
182 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
185
183
186 # Change node's content
184 # Change node's content
187 node = FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='My **changed** content')
185 node = FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='My **changed** content')
188 self.imc.change(node)
186 self.imc.change(node)
189 self.imc.commit(u'Changed %s' % safe_unicode(node.path),
187 self.imc.commit(u'Changed %s' % safe_unicode(node.path),
190 author=u'joe doe <joe.doe@example.com>')
188 author=u'joe doe <joe.doe@example.com>')
191
189
192 node_uni = FileNode(
190 node_uni = FileNode(
193 u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'My **changed** content')
191 u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'My **changed** content')
194 self.imc.change(node_uni)
192 self.imc.change(node_uni)
195 self.imc.commit(u'Changed %s' % safe_unicode(node_uni.path),
193 self.imc.commit(u'Changed %s' % safe_unicode(node_uni.path),
196 author=u'joe doe <joe.doe@example.com>')
194 author=u'joe doe <joe.doe@example.com>')
197
195
198 newtip = self.repo.get_commit()
196 newtip = self.repo.get_commit()
199 assert tip != newtip
197 assert tip != newtip
200 assert tip.id != newtip.id
198 assert tip.id != newtip.id
201
199
202 self.assert_nodes_in_commit(newtip, (node, node_uni))
200 self.assert_nodes_in_commit(newtip, (node, node_uni))
203
201
204 def test_change_raise_empty_repository(self):
202 def test_change_raise_empty_repository(self):
205 node = FileNode('foobar')
203 node = FileNode('foobar')
206 with pytest.raises(EmptyRepositoryError):
204 with pytest.raises(EmptyRepositoryError):
207 self.imc.change(node)
205 self.imc.change(node)
208
206
209 def test_check_integrity_change_raise_node_does_not_exist(self):
207 def test_check_integrity_change_raise_node_does_not_exist(self):
210 node = FileNode('foobar', content='baz')
208 node = FileNode('foobar', content='baz')
211 self.imc.add(node)
209 self.imc.add(node)
212 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
210 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
213 node = FileNode('not-foobar', content='')
211 node = FileNode('not-foobar', content='')
214 self.imc.change(node)
212 self.imc.change(node)
215 with pytest.raises(NodeDoesNotExistError):
213 with pytest.raises(NodeDoesNotExistError):
216 self.imc.commit(message='Changed not existing node', author=u'{} <foo@bar.com>'.format(self))
214 self.imc.commit(message='Changed not existing node', author=u'{} <foo@bar.com>'.format(self))
217
215
218 def test_change_raise_node_already_changed(self):
216 def test_change_raise_node_already_changed(self):
219 node = FileNode('foobar', content='baz')
217 node = FileNode('foobar', content='baz')
220 self.imc.add(node)
218 self.imc.add(node)
221 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
219 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
222 node = FileNode('foobar', content='more baz')
220 node = FileNode('foobar', content='more baz')
223 self.imc.change(node)
221 self.imc.change(node)
224 with pytest.raises(NodeAlreadyChangedError):
222 with pytest.raises(NodeAlreadyChangedError):
225 self.imc.change(node)
223 self.imc.change(node)
226
224
227 def test_check_integrity_change_raise_node_not_changed(self, nodes):
225 def test_check_integrity_change_raise_node_not_changed(self, nodes):
228 self.test_add(nodes) # Performs first commit
226 self.test_add(nodes) # Performs first commit
229
227
230 node = FileNode(nodes[0].path, content=nodes[0].content)
228 node = FileNode(nodes[0].path, content=nodes[0].content)
231 self.imc.change(node)
229 self.imc.change(node)
232 with pytest.raises(NodeNotChangedError):
230 with pytest.raises(NodeNotChangedError):
233 self.imc.commit(
231 self.imc.commit(
234 message=u'Trying to mark node as changed without touching it',
232 message=u'Trying to mark node as changed without touching it',
235 author=u'{} <foo@bar.com>'.format(self))
233 author=u'{} <foo@bar.com>'.format(self))
236
234
237 def test_change_raise_node_already_removed(self):
235 def test_change_raise_node_already_removed(self):
238 node = FileNode('foobar', content='baz')
236 node = FileNode('foobar', content='baz')
239 self.imc.add(node)
237 self.imc.add(node)
240 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
238 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
241 self.imc.remove(FileNode('foobar'))
239 self.imc.remove(FileNode('foobar'))
242 with pytest.raises(NodeAlreadyRemovedError):
240 with pytest.raises(NodeAlreadyRemovedError):
243 self.imc.change(node)
241 self.imc.change(node)
244
242
245 def test_remove(self, nodes):
243 def test_remove(self, nodes):
246 self.test_add(nodes) # Performs first commit
244 self.test_add(nodes) # Performs first commit
247
245
248 tip = self.repo.get_commit()
246 tip = self.repo.get_commit()
249 node = nodes[0]
247 node = nodes[0]
250 assert node.content == tip.get_node(node.path).content
248 assert node.content == tip.get_node(node.path).content
251 self.imc.remove(node)
249 self.imc.remove(node)
252 self.imc.commit(
250 self.imc.commit(
253 message=u'Removed %s' % node.path, author=u'{} <foo@bar.com>'.format(self))
251 message=u'Removed %s' % node.path, author=u'{} <foo@bar.com>'.format(self))
254
252
255 newtip = self.repo.get_commit()
253 newtip = self.repo.get_commit()
256 assert tip != newtip
254 assert tip != newtip
257 assert tip.id != newtip.id
255 assert tip.id != newtip.id
258 with pytest.raises(NodeDoesNotExistError):
256 with pytest.raises(NodeDoesNotExistError):
259 newtip.get_node(node.path)
257 newtip.get_node(node.path)
260
258
261 def test_remove_last_file_from_directory(self):
259 def test_remove_last_file_from_directory(self):
262 node = FileNode('omg/qwe/foo/bar', content='foobar')
260 node = FileNode('omg/qwe/foo/bar', content='foobar')
263 self.imc.add(node)
261 self.imc.add(node)
264 self.imc.commit(u'added', author=u'joe doe <joe@doe.com>')
262 self.imc.commit(u'added', author=u'joe doe <joe@doe.com>')
265
263
266 self.imc.remove(node)
264 self.imc.remove(node)
267 tip = self.imc.commit(u'removed', u'joe doe <joe@doe.com>')
265 tip = self.imc.commit(u'removed', u'joe doe <joe@doe.com>')
268 with pytest.raises(NodeDoesNotExistError):
266 with pytest.raises(NodeDoesNotExistError):
269 tip.get_node('omg/qwe/foo/bar')
267 tip.get_node('omg/qwe/foo/bar')
270
268
271 def test_remove_raise_node_does_not_exist(self, nodes):
269 def test_remove_raise_node_does_not_exist(self, nodes):
272 self.imc.remove(nodes[0])
270 self.imc.remove(nodes[0])
273 with pytest.raises(NodeDoesNotExistError):
271 with pytest.raises(NodeDoesNotExistError):
274 self.imc.commit(
272 self.imc.commit(
275 message='Trying to remove node at empty repository',
273 message='Trying to remove node at empty repository',
276 author=u'{} <foo@bar.com>'.format(self))
274 author=u'{} <foo@bar.com>'.format(self))
277
275
278 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
276 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
279 self.test_add(nodes) # Performs first commit
277 self.test_add(nodes) # Performs first commit
280
278
281 node = FileNode('no-such-file')
279 node = FileNode('no-such-file')
282 self.imc.remove(node)
280 self.imc.remove(node)
283 with pytest.raises(NodeDoesNotExistError):
281 with pytest.raises(NodeDoesNotExistError):
284 self.imc.commit(
282 self.imc.commit(
285 message=u'Trying to remove not existing node',
283 message=u'Trying to remove not existing node',
286 author=u'{} <foo@bar.com>'.format(self))
284 author=u'{} <foo@bar.com>'.format(self))
287
285
288 def test_remove_raise_node_already_removed(self, nodes):
286 def test_remove_raise_node_already_removed(self, nodes):
289 self.test_add(nodes) # Performs first commit
287 self.test_add(nodes) # Performs first commit
290
288
291 node = FileNode(nodes[0].path)
289 node = FileNode(nodes[0].path)
292 self.imc.remove(node)
290 self.imc.remove(node)
293 with pytest.raises(NodeAlreadyRemovedError):
291 with pytest.raises(NodeAlreadyRemovedError):
294 self.imc.remove(node)
292 self.imc.remove(node)
295
293
296 def test_remove_raise_node_already_changed(self, nodes):
294 def test_remove_raise_node_already_changed(self, nodes):
297 self.test_add(nodes) # Performs first commit
295 self.test_add(nodes) # Performs first commit
298
296
299 node = FileNode(nodes[0].path, content='Bending time')
297 node = FileNode(nodes[0].path, content='Bending time')
300 self.imc.change(node)
298 self.imc.change(node)
301 with pytest.raises(NodeAlreadyChangedError):
299 with pytest.raises(NodeAlreadyChangedError):
302 self.imc.remove(node)
300 self.imc.remove(node)
303
301
304 def test_reset(self):
302 def test_reset(self):
305 self.imc.add(FileNode('foo', content='bar'))
303 self.imc.add(FileNode('foo', content='bar'))
306 # self.imc.change(FileNode('baz', content='new'))
304 # self.imc.change(FileNode('baz', content='new'))
307 # self.imc.remove(FileNode('qwe'))
305 # self.imc.remove(FileNode('qwe'))
308 self.imc.reset()
306 self.imc.reset()
309 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
307 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
310
308
311 def test_multiple_commits(self):
309 def test_multiple_commits(self):
312 N = 3 # number of commits to perform
310 N = 3 # number of commits to perform
313 last = None
311 last = None
314 for x in xrange(N):
312 for x in xrange(N):
315 fname = 'file%s' % str(x).rjust(5, '0')
313 fname = 'file%s' % str(x).rjust(5, '0')
316 content = 'foobar\n' * x
314 content = 'foobar\n' * x
317 node = FileNode(fname, content=content)
315 node = FileNode(fname, content=content)
318 self.imc.add(node)
316 self.imc.add(node)
319 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs <foo@bar.com>')
317 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs <foo@bar.com>')
320 assert last != commit
318 assert last != commit
321 last = commit
319 last = commit
322
320
323 # Check commit number for same repo
321 # Check commit number for same repo
324 assert len(self.repo.commit_ids) == N
322 assert len(self.repo.commit_ids) == N
325
323
326 # Check commit number for recreated repo
324 # Check commit number for recreated repo
327 repo = self.Backend(self.repo_path)
325 repo = self.Backend(self.repo_path)
328 assert len(repo.commit_ids) == N
326 assert len(repo.commit_ids) == N
329
327
330 def test_date_attr(self, local_dt_to_utc):
328 def test_date_attr(self, local_dt_to_utc):
331 node = FileNode('foobar.txt', content='Foobared!')
329 node = FileNode('foobar.txt', content='Foobared!')
332 self.imc.add(node)
330 self.imc.add(node)
333 date = datetime.datetime(1985, 1, 30, 1, 45)
331 date = datetime.datetime(1985, 1, 30, 1, 45)
334 commit = self.imc.commit(
332 commit = self.imc.commit(
335 u"Committed at time when I was born ;-)",
333 u"Committed at time when I was born ;-)",
336 author=u'{} <foo@bar.com>'.format(self), date=date)
334 author=u'{} <foo@bar.com>'.format(self), date=date)
337
335
338 assert commit.date == local_dt_to_utc(date)
336 assert commit.date == local_dt_to_utc(date)
339
337
340 def assert_succesful_commit(self, added_nodes):
338 def assert_succesful_commit(self, added_nodes):
341 newtip = self.repo.get_commit()
339 newtip = self.repo.get_commit()
342 assert self.commit == newtip
340 assert self.commit == newtip
343 assert self.old_commit_count + 1 == len(self.repo.commit_ids)
341 assert self.old_commit_count + 1 == len(self.repo.commit_ids)
344 assert newtip.message == self.commit_message
342 assert newtip.message == self.commit_message
345 assert newtip.author == self.commit_author
343 assert newtip.author == self.commit_author
346 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
344 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
347 self.assert_nodes_in_commit(newtip, added_nodes)
345 self.assert_nodes_in_commit(newtip, added_nodes)
348
346
349 def assert_nodes_in_commit(self, commit, nodes):
347 def assert_nodes_in_commit(self, commit, nodes):
350 for node in nodes:
348 for node in nodes:
351 assert commit.get_node(node.path).content == node.content
349 assert commit.get_node(node.path).content == node.content
General Comments 0
You need to be logged in to leave comments. Login now