##// END OF EJS Templates
git: replaced some raw subprocess commands with dedicated GIT vcsserver commands.
marcink -
r3862:44edb079 default
parent child Browse files
Show More
@@ -1,507 +1,474 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT commit module
23 23 """
24 24
25 25 import re
26 26 import stat
27 27 from itertools import chain
28 28 from StringIO import StringIO
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 33 from rhodecode.lib.utils import safe_unicode, safe_str
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib.vcs.conf import settings
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 38 from rhodecode.lib.vcs.nodes import (
39 39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 41 RemovedFileNodesGenerator, LargeFileNode)
42 42 from rhodecode.lib.vcs.compat import configparser
43 43
44 44
45 45 class GitCommit(base.BaseCommit):
46 46 """
47 47 Represents state of the repository at single commit id.
48 48 """
49 49
50 50 _filter_pre_load = [
51 51 # done through a more complex tree walk on parents
52 52 "affected_files",
53 53 # done through subprocess not remote call
54 54 "children",
55 55 # done through a more complex tree walk on parents
56 56 "status",
57 57 # mercurial specific property not supported here
58 58 "_file_paths",
59 59 # mercurial specific property not supported here
60 60 'obsolete',
61 61 # mercurial specific property not supported here
62 62 'phase',
63 63 # mercurial specific property not supported here
64 64 'hidden'
65 65 ]
66 66
67 67 def __init__(self, repository, raw_id, idx, pre_load=None):
68 68 self.repository = repository
69 69 self._remote = repository._remote
70 70 # TODO: johbo: Tweak of raw_id should not be necessary
71 71 self.raw_id = safe_str(raw_id)
72 72 self.idx = idx
73 73
74 74 self._set_bulk_properties(pre_load)
75 75
76 76 # caches
77 77 self._stat_modes = {} # stat info for paths
78 78 self._paths = {} # path processed with parse_tree
79 79 self.nodes = {}
80 80 self._submodules = None
81 81
82 82 def _set_bulk_properties(self, pre_load):
83 83
84 84 if not pre_load:
85 85 return
86 86 pre_load = [entry for entry in pre_load
87 87 if entry not in self._filter_pre_load]
88 88 if not pre_load:
89 89 return
90 90
91 91 result = self._remote.bulk_request(self.raw_id, pre_load)
92 92 for attr, value in result.items():
93 93 if attr in ["author", "message"]:
94 94 if value:
95 95 value = safe_unicode(value)
96 96 elif attr == "date":
97 97 value = utcdate_fromtimestamp(*value)
98 98 elif attr == "parents":
99 99 value = self._make_commits(value)
100 100 elif attr == "branch":
101 101 value = value[0] if value else None
102 102 self.__dict__[attr] = value
103 103
104 104 @LazyProperty
105 105 def _commit(self):
106 106 return self._remote[self.raw_id]
107 107
108 108 @LazyProperty
109 109 def _tree_id(self):
110 110 return self._remote[self._commit['tree']]['id']
111 111
112 112 @LazyProperty
113 113 def id(self):
114 114 return self.raw_id
115 115
116 116 @LazyProperty
117 117 def short_id(self):
118 118 return self.raw_id[:12]
119 119
120 120 @LazyProperty
121 121 def message(self):
122 122 return safe_unicode(self._remote.message(self.id))
123 123
124 124 @LazyProperty
125 125 def committer(self):
126 126 return safe_unicode(self._remote.author(self.id))
127 127
128 128 @LazyProperty
129 129 def author(self):
130 130 return safe_unicode(self._remote.author(self.id))
131 131
132 132 @LazyProperty
133 133 def date(self):
134 134 unix_ts, tz = self._remote.date(self.raw_id)
135 135 return utcdate_fromtimestamp(unix_ts, tz)
136 136
137 137 @LazyProperty
138 138 def status(self):
139 139 """
140 140 Returns modified, added, removed, deleted files for current commit
141 141 """
142 142 return self.changed, self.added, self.removed
143 143
144 144 @LazyProperty
145 145 def tags(self):
146 146 tags = [safe_unicode(name) for name,
147 147 commit_id in self.repository.tags.iteritems()
148 148 if commit_id == self.raw_id]
149 149 return tags
150 150
151 151 @LazyProperty
152 152 def commit_branches(self):
153 153 branches = []
154 154 for name, commit_id in self.repository.branches.iteritems():
155 155 if commit_id == self.raw_id:
156 156 branches.append(name)
157 157 return branches
158 158
159 159 @LazyProperty
160 160 def branch(self):
161 161 branches = self._remote.branch(self.raw_id)
162 162
163 163 if branches:
164 164 # actually commit can have multiple branches in git
165 165 return safe_unicode(branches[0])
166 166
167 167 def _get_tree_id_for_path(self, path):
168 168 path = safe_str(path)
169 169 if path in self._paths:
170 170 return self._paths[path]
171 171
172 172 tree_id = self._tree_id
173 173
174 174 path = path.strip('/')
175 175 if path == '':
176 176 data = [tree_id, "tree"]
177 177 self._paths[''] = data
178 178 return data
179 179
180 180 tree_id, tree_type, tree_mode = \
181 181 self._remote.tree_and_type_for_path(self.raw_id, path)
182 182 if tree_id is None:
183 183 raise self.no_node_at_path(path)
184 184
185 185 self._paths[path] = [tree_id, tree_type]
186 186 self._stat_modes[path] = tree_mode
187 187
188 188 if path not in self._paths:
189 189 raise self.no_node_at_path(path)
190 190
191 191 return self._paths[path]
192 192
193 193 def _get_kind(self, path):
194 194 tree_id, type_ = self._get_tree_id_for_path(path)
195 195 if type_ == 'blob':
196 196 return NodeKind.FILE
197 197 elif type_ == 'tree':
198 198 return NodeKind.DIR
199 199 elif type_ == 'link':
200 200 return NodeKind.SUBMODULE
201 201 return None
202 202
203 203 def _get_filectx(self, path):
204 204 path = self._fix_path(path)
205 205 if self._get_kind(path) != NodeKind.FILE:
206 206 raise CommitError(
207 207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
208 208 return path
209 209
210 210 def _get_file_nodes(self):
211 211 return chain(*(t[2] for t in self.walk()))
212 212
213 213 @LazyProperty
214 214 def parents(self):
215 215 """
216 216 Returns list of parent commits.
217 217 """
218 218 parent_ids = self._remote.parents(self.id)
219 219 return self._make_commits(parent_ids)
220 220
221 221 @LazyProperty
222 222 def children(self):
223 223 """
224 224 Returns list of child commits.
225 225 """
226 rev_filter = settings.GIT_REV_FILTER
227 output, __ = self.repository.run_git_command(
228 ['rev-list', '--children'] + rev_filter)
229 226
230 child_ids = []
231 pat = re.compile(r'^%s' % self.raw_id)
232 for l in output.splitlines():
233 if pat.match(l):
234 found_ids = l.split(' ')[1:]
235 child_ids.extend(found_ids)
236 return self._make_commits(child_ids)
227 children = self._remote.children(self.raw_id)
228 return self._make_commits(children)
237 229
238 230 def _make_commits(self, commit_ids):
239 231 def commit_maker(_commit_id):
240 232 return self.repository.get_commit(commit_id=commit_id)
241 233
242 234 return [commit_maker(commit_id) for commit_id in commit_ids]
243 235
244 236 def get_file_mode(self, path):
245 237 """
246 238 Returns stat mode of the file at the given `path`.
247 239 """
248 240 path = safe_str(path)
249 241 # ensure path is traversed
250 242 self._get_tree_id_for_path(path)
251 243 return self._stat_modes[path]
252 244
253 245 def is_link(self, path):
254 246 return stat.S_ISLNK(self.get_file_mode(path))
255 247
256 248 def get_file_content(self, path):
257 249 """
258 250 Returns content of the file at given `path`.
259 251 """
260 252 tree_id, _ = self._get_tree_id_for_path(path)
261 253 return self._remote.blob_as_pretty_string(tree_id)
262 254
263 255 def get_file_size(self, path):
264 256 """
265 257 Returns size of the file at given `path`.
266 258 """
267 259 tree_id, _ = self._get_tree_id_for_path(path)
268 260 return self._remote.blob_raw_length(tree_id)
269 261
270 262 def get_path_history(self, path, limit=None, pre_load=None):
271 263 """
272 264 Returns history of file as reversed list of `GitCommit` objects for
273 265 which file at given `path` has been modified.
274
275 TODO: This function now uses an underlying 'git' command which works
276 quickly but ideally we should replace with an algorithm.
277 266 """
278 self._get_filectx(path)
279 f_path = safe_str(path)
280 267
281 # optimize for n==1, rev-list is much faster for that use-case
282 if limit == 1:
283 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
284 else:
285 cmd = ['log']
286 if limit:
287 cmd.extend(['-n', str(safe_int(limit, 0))])
288 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
289
290 output, __ = self.repository.run_git_command(cmd)
291 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
292
268 path = self._get_filectx(path)
269 hist = self._remote.node_history(self.raw_id, path, limit)
293 270 return [
294 271 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
295 for commit_id in commit_ids]
272 for commit_id in hist]
296 273
297 274 def get_file_annotate(self, path, pre_load=None):
298 275 """
299 276 Returns a generator of four element tuples with
300 277 lineno, commit_id, commit lazy loader and line
278 """
301 279
302 TODO: This function now uses os underlying 'git' command which is
303 generally not good. Should be replaced with algorithm iterating
304 commits.
305 """
306 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
307 # -l ==> outputs long shas (and we need all 40 characters)
308 # --root ==> doesn't put '^' character for bounderies
309 # -r commit_id ==> blames for the given commit
310 output, __ = self.repository.run_git_command(cmd)
280 result = self._remote.node_annotate(self.raw_id, path)
311 281
312 for i, blame_line in enumerate(output.split('\n')[:-1]):
313 line_no = i + 1
314 commit_id, line = re.split(r' ', blame_line, 1)
282 for ln_no, commit_id, content in result:
315 283 yield (
316 line_no, commit_id,
317 lambda: self.repository.get_commit(commit_id=commit_id,
318 pre_load=pre_load),
319 line)
284 ln_no, commit_id,
285 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
286 content)
320 287
321 288 def get_nodes(self, path):
322 289
323 290 if self._get_kind(path) != NodeKind.DIR:
324 291 raise CommitError(
325 292 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
326 293 path = self._fix_path(path)
327 294
328 295 tree_id, _ = self._get_tree_id_for_path(path)
329 296
330 297 dirnodes = []
331 298 filenodes = []
332 299
333 300 # extracted tree ID gives us our files...
334 301 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
335 302 if type_ == 'link':
336 303 url = self._get_submodule_url('/'.join((path, name)))
337 304 dirnodes.append(SubModuleNode(
338 305 name, url=url, commit=id_, alias=self.repository.alias))
339 306 continue
340 307
341 308 if path != '':
342 309 obj_path = '/'.join((path, name))
343 310 else:
344 311 obj_path = name
345 312 if obj_path not in self._stat_modes:
346 313 self._stat_modes[obj_path] = stat_
347 314
348 315 if type_ == 'tree':
349 316 dirnodes.append(DirNode(obj_path, commit=self))
350 317 elif type_ == 'blob':
351 318 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
352 319 else:
353 320 raise CommitError(
354 321 "Requested object should be Tree or Blob, is %s", type_)
355 322
356 323 nodes = dirnodes + filenodes
357 324 for node in nodes:
358 325 if node.path not in self.nodes:
359 326 self.nodes[node.path] = node
360 327 nodes.sort()
361 328 return nodes
362 329
363 330 def get_node(self, path, pre_load=None):
364 331 if isinstance(path, unicode):
365 332 path = path.encode('utf-8')
366 333 path = self._fix_path(path)
367 334 if path not in self.nodes:
368 335 try:
369 336 tree_id, type_ = self._get_tree_id_for_path(path)
370 337 except CommitError:
371 338 raise NodeDoesNotExistError(
372 339 "Cannot find one of parents' directories for a given "
373 340 "path: %s" % path)
374 341
375 342 if type_ == 'link':
376 343 url = self._get_submodule_url(path)
377 344 node = SubModuleNode(path, url=url, commit=tree_id,
378 345 alias=self.repository.alias)
379 346 elif type_ == 'tree':
380 347 if path == '':
381 348 node = RootNode(commit=self)
382 349 else:
383 350 node = DirNode(path, commit=self)
384 351 elif type_ == 'blob':
385 352 node = FileNode(path, commit=self, pre_load=pre_load)
386 353 self._stat_modes[path] = node.mode
387 354 else:
388 355 raise self.no_node_at_path(path)
389 356
390 357 # cache node
391 358 self.nodes[path] = node
392 359
393 360 return self.nodes[path]
394 361
395 362 def get_largefile_node(self, path):
396 363 tree_id, _ = self._get_tree_id_for_path(path)
397 364 pointer_spec = self._remote.is_large_file(tree_id)
398 365
399 366 if pointer_spec:
400 367 # content of that file regular FileNode is the hash of largefile
401 368 file_id = pointer_spec.get('oid_hash')
402 369 if self._remote.in_largefiles_store(file_id):
403 370 lf_path = self._remote.store_path(file_id)
404 371 return LargeFileNode(lf_path, commit=self, org_path=path)
405 372
406 373 @LazyProperty
407 374 def affected_files(self):
408 375 """
409 376 Gets a fast accessible file changes for given commit
410 377 """
411 378 added, modified, deleted = self._changes_cache
412 379 return list(added.union(modified).union(deleted))
413 380
414 381 @LazyProperty
415 382 def _changes_cache(self):
416 383 added = set()
417 384 modified = set()
418 385 deleted = set()
419 386 _r = self._remote
420 387
421 388 parents = self.parents
422 389 if not self.parents:
423 390 parents = [base.EmptyCommit()]
424 391 for parent in parents:
425 392 if isinstance(parent, base.EmptyCommit):
426 393 oid = None
427 394 else:
428 395 oid = parent.raw_id
429 396 changes = _r.tree_changes(oid, self.raw_id)
430 397 for (oldpath, newpath), (_, _), (_, _) in changes:
431 398 if newpath and oldpath:
432 399 modified.add(newpath)
433 400 elif newpath and not oldpath:
434 401 added.add(newpath)
435 402 elif not newpath and oldpath:
436 403 deleted.add(oldpath)
437 404 return added, modified, deleted
438 405
439 406 def _get_paths_for_status(self, status):
440 407 """
441 408 Returns sorted list of paths for given ``status``.
442 409
443 410 :param status: one of: *added*, *modified* or *deleted*
444 411 """
445 412 added, modified, deleted = self._changes_cache
446 413 return sorted({
447 414 'added': list(added),
448 415 'modified': list(modified),
449 416 'deleted': list(deleted)}[status]
450 417 )
451 418
452 419 @LazyProperty
453 420 def added(self):
454 421 """
455 422 Returns list of added ``FileNode`` objects.
456 423 """
457 424 if not self.parents:
458 425 return list(self._get_file_nodes())
459 426 return AddedFileNodesGenerator(
460 427 [n for n in self._get_paths_for_status('added')], self)
461 428
462 429 @LazyProperty
463 430 def changed(self):
464 431 """
465 432 Returns list of modified ``FileNode`` objects.
466 433 """
467 434 if not self.parents:
468 435 return []
469 436 return ChangedFileNodesGenerator(
470 437 [n for n in self._get_paths_for_status('modified')], self)
471 438
472 439 @LazyProperty
473 440 def removed(self):
474 441 """
475 442 Returns list of removed ``FileNode`` objects.
476 443 """
477 444 if not self.parents:
478 445 return []
479 446 return RemovedFileNodesGenerator(
480 447 [n for n in self._get_paths_for_status('deleted')], self)
481 448
482 449 def _get_submodule_url(self, submodule_path):
483 450 git_modules_path = '.gitmodules'
484 451
485 452 if self._submodules is None:
486 453 self._submodules = {}
487 454
488 455 try:
489 456 submodules_node = self.get_node(git_modules_path)
490 457 except NodeDoesNotExistError:
491 458 return None
492 459
493 460 content = submodules_node.content
494 461
495 462 # ConfigParser fails if there are whitespaces
496 463 content = '\n'.join(l.strip() for l in content.split('\n'))
497 464
498 465 parser = configparser.ConfigParser()
499 466 parser.readfp(StringIO(content))
500 467
501 468 for section in parser.sections():
502 469 path = parser.get(section, 'path')
503 470 url = parser.get(section, 'url')
504 471 if path and url:
505 472 self._submodules[path.strip('/')] = url
506 473
507 474 return self._submodules.get(submodule_path.strip('/'))
@@ -1,1021 +1,1004 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'master'
58 58
59 59 contact = BaseRepository.DEFAULT_CONTACT
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63
64 64 self.path = safe_str(os.path.abspath(repo_path))
65 65 self.config = config if config else self.get_default_config()
66 66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 67
68 68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def _remote(self):
75 75 repo_id = self.path
76 76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 77
78 78 @LazyProperty
79 79 def bare(self):
80 80 return self._remote.bare()
81 81
82 82 @LazyProperty
83 83 def head(self):
84 84 return self._remote.head()
85 85
86 86 @CachedProperty
87 87 def commit_ids(self):
88 88 """
89 89 Returns list of commit ids, in ascending order. Being lazy
90 90 attribute allows external tools to inject commit ids from cache.
91 91 """
92 92 commit_ids = self._get_all_commit_ids()
93 93 self._rebuild_cache(commit_ids)
94 94 return commit_ids
95 95
96 96 def _rebuild_cache(self, commit_ids):
97 97 self._commit_ids = dict((commit_id, index)
98 98 for index, commit_id in enumerate(commit_ids))
99 99
100 100 def run_git_command(self, cmd, **opts):
101 101 """
102 102 Runs given ``cmd`` as git command and returns tuple
103 103 (stdout, stderr).
104 104
105 105 :param cmd: git command to be executed
106 106 :param opts: env options to pass into Subprocess command
107 107 """
108 108 if not isinstance(cmd, list):
109 109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 110
111 111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 112 out, err = self._remote.run_git_command(cmd, **opts)
113 113 if err and not skip_stderr_log:
114 114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 115 return out, err
116 116
117 117 @staticmethod
118 118 def check_url(url, config):
119 119 """
120 120 Function will check given url and try to verify if it's a valid
121 121 link. Sometimes it may happened that git will issue basic
122 122 auth request that can cause whole API to hang when used from python
123 123 or other external calls.
124 124
125 125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 126 when the return code is non 200
127 127 """
128 128 # check first if it's not an url
129 129 if os.path.isdir(url) or url.startswith('file:'):
130 130 return True
131 131
132 132 if '+' in url.split('://', 1)[0]:
133 133 url = url.split('+', 1)[1]
134 134
135 135 # Request the _remote to verify the url
136 136 return connection.Git.check_url(url, config.serialize())
137 137
138 138 @staticmethod
139 139 def is_valid_repository(path):
140 140 if os.path.isdir(os.path.join(path, '.git')):
141 141 return True
142 142 # check case of bare repository
143 143 try:
144 144 GitRepository(path)
145 145 return True
146 146 except VCSError:
147 147 pass
148 148 return False
149 149
150 150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 151 bare=False):
152 152 if create and os.path.exists(self.path):
153 153 raise RepositoryError(
154 154 "Cannot create repository at %s, location already exist"
155 155 % self.path)
156 156
157 157 if bare and do_workspace_checkout:
158 158 raise RepositoryError("Cannot update a bare repository")
159 159 try:
160 160
161 161 if src_url:
162 162 # check URL before any actions
163 163 GitRepository.check_url(src_url, self.config)
164 164
165 165 if create:
166 166 os.makedirs(self.path, mode=0o755)
167 167
168 168 if bare:
169 169 self._remote.init_bare()
170 170 else:
171 171 self._remote.init()
172 172
173 173 if src_url and bare:
174 174 # bare repository only allows a fetch and checkout is not allowed
175 175 self.fetch(src_url, commit_ids=None)
176 176 elif src_url:
177 177 self.pull(src_url, commit_ids=None,
178 178 update_after=do_workspace_checkout)
179 179
180 180 else:
181 181 if not self._remote.assert_correct_path():
182 182 raise RepositoryError(
183 183 'Path "%s" does not contain a Git repository' %
184 184 (self.path,))
185 185
186 186 # TODO: johbo: check if we have to translate the OSError here
187 187 except OSError as err:
188 188 raise RepositoryError(err)
189 189
190 190 def _get_all_commit_ids(self):
191 191 return self._remote.get_all_commit_ids()
192 192
193 193 def _get_commit_ids(self, filters=None):
194 194 # we must check if this repo is not empty, since later command
195 195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 196 # errors
197 197
198 198 head = self._remote.head(show_exc=False)
199 199
200 200 if not head:
201 201 return []
202 202
203 203 rev_filter = ['--branches', '--tags']
204 204 extra_filter = []
205 205
206 206 if filters:
207 207 if filters.get('since'):
208 208 extra_filter.append('--since=%s' % (filters['since']))
209 209 if filters.get('until'):
210 210 extra_filter.append('--until=%s' % (filters['until']))
211 211 if filters.get('branch_name'):
212 212 rev_filter = []
213 213 extra_filter.append(filters['branch_name'])
214 214 rev_filter.extend(extra_filter)
215 215
216 216 # if filters.get('start') or filters.get('end'):
217 217 # # skip is offset, max-count is limit
218 218 # if filters.get('start'):
219 219 # extra_filter += ' --skip=%s' % filters['start']
220 220 # if filters.get('end'):
221 221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 222
223 223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 224 try:
225 225 output, __ = self.run_git_command(cmd)
226 226 except RepositoryError:
227 227 # Can be raised for empty repositories
228 228 return []
229 229 return output.splitlines()
230 230
231 231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
232 232 def is_null(value):
233 233 return len(value) == commit_id_or_idx.count('0')
234 234
235 235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 236 return self.commit_ids[-1]
237 237 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 238 *map(safe_str, [commit_id_or_idx, self.name]))
239 239
240 240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 243 try:
244 244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 245 except Exception:
246 246 raise CommitDoesNotExistError(commit_missing_err)
247 247
248 248 elif is_bstr:
249 249 # Need to call remote to translate id for tagging scenario
250 250 try:
251 251 remote_data = self._remote.get_object(commit_id_or_idx)
252 252 commit_id_or_idx = remote_data["commit_id"]
253 253 except (CommitDoesNotExistError,):
254 254 raise CommitDoesNotExistError(commit_missing_err)
255 255
256 256 # Ensure we return full id
257 257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 258 raise CommitDoesNotExistError(
259 259 "Given commit id %s not recognized" % commit_id_or_idx)
260 260 return commit_id_or_idx
261 261
262 262 def get_hook_location(self):
263 263 """
264 264 returns absolute path to location where hooks are stored
265 265 """
266 266 loc = os.path.join(self.path, 'hooks')
267 267 if not self.bare:
268 268 loc = os.path.join(self.path, '.git', 'hooks')
269 269 return loc
270 270
271 271 @LazyProperty
272 272 def last_change(self):
273 273 """
274 274 Returns last change made on this repository as
275 275 `datetime.datetime` object.
276 276 """
277 277 try:
278 278 return self.get_commit().date
279 279 except RepositoryError:
280 280 tzoffset = makedate()[1]
281 281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282 282
283 283 def _get_fs_mtime(self):
284 284 idx_loc = '' if self.bare else '.git'
285 285 # fallback to filesystem
286 286 in_path = os.path.join(self.path, idx_loc, "index")
287 287 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 288 if os.path.exists(in_path):
289 289 return os.stat(in_path).st_mtime
290 290 else:
291 291 return os.stat(he_path).st_mtime
292 292
293 293 @LazyProperty
294 294 def description(self):
295 295 description = self._remote.get_description()
296 296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297 297
298 298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 299 if self.is_empty():
300 300 return OrderedDict()
301 301
302 302 result = []
303 303 for ref, sha in self._refs.iteritems():
304 304 if ref.startswith(prefix):
305 305 ref_name = ref
306 306 if strip_prefix:
307 307 ref_name = ref[len(prefix):]
308 308 result.append((safe_unicode(ref_name), sha))
309 309
310 310 def get_name(entry):
311 311 return entry[0]
312 312
313 313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314 314
315 315 def _get_branches(self):
316 316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317 317
318 318 @CachedProperty
319 319 def branches(self):
320 320 return self._get_branches()
321 321
322 322 @CachedProperty
323 323 def branches_closed(self):
324 324 return {}
325 325
326 326 @CachedProperty
327 327 def bookmarks(self):
328 328 return {}
329 329
330 330 @CachedProperty
331 331 def branches_all(self):
332 332 all_branches = {}
333 333 all_branches.update(self.branches)
334 334 all_branches.update(self.branches_closed)
335 335 return all_branches
336 336
337 337 @CachedProperty
338 338 def tags(self):
339 339 return self._get_tags()
340 340
341 341 def _get_tags(self):
342 342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
343 343
344 344 def tag(self, name, user, commit_id=None, message=None, date=None,
345 345 **kwargs):
346 346 # TODO: fix this method to apply annotated tags correct with message
347 347 """
348 348 Creates and returns a tag for the given ``commit_id``.
349 349
350 350 :param name: name for new tag
351 351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 352 :param commit_id: commit id for which new tag would be created
353 353 :param message: message of the tag's commit
354 354 :param date: date of tag's commit
355 355
356 356 :raises TagAlreadyExistError: if tag with same name already exists
357 357 """
358 358 if name in self.tags:
359 359 raise TagAlreadyExistError("Tag %s already exists" % name)
360 360 commit = self.get_commit(commit_id=commit_id)
361 361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
362 362
363 363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
364 364
365 365 self._invalidate_prop_cache('tags')
366 366 self._invalidate_prop_cache('_refs')
367 367
368 368 return commit
369 369
370 370 def remove_tag(self, name, user, message=None, date=None):
371 371 """
372 372 Removes tag with the given ``name``.
373 373
374 374 :param name: name of the tag to be removed
375 375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 376 :param message: message of the tag's removal commit
377 377 :param date: date of tag's removal commit
378 378
379 379 :raises TagDoesNotExistError: if tag with given name does not exists
380 380 """
381 381 if name not in self.tags:
382 382 raise TagDoesNotExistError("Tag %s does not exist" % name)
383 383
384 384 self._remote.tag_remove(name)
385 385 self._invalidate_prop_cache('tags')
386 386 self._invalidate_prop_cache('_refs')
387 387
388 388 def _get_refs(self):
389 389 return self._remote.get_refs()
390 390
391 391 @CachedProperty
392 392 def _refs(self):
393 393 return self._get_refs()
394 394
395 395 @property
396 396 def _ref_tree(self):
397 397 node = tree = {}
398 398 for ref, sha in self._refs.iteritems():
399 399 path = ref.split('/')
400 400 for bit in path[:-1]:
401 401 node = node.setdefault(bit, {})
402 402 node[path[-1]] = sha
403 403 node = tree
404 404 return tree
405 405
406 406 def get_remote_ref(self, ref_name):
407 407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 408 try:
409 409 return self._refs[ref_key]
410 410 except Exception:
411 411 return
412 412
413 413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
414 414 """
415 415 Returns `GitCommit` object representing commit from git repository
416 416 at the given `commit_id` or head (most recent commit) if None given.
417 417 """
418 418 if self.is_empty():
419 419 raise EmptyRepositoryError("There are no commits yet")
420 420
421 421 if commit_id is not None:
422 422 self._validate_commit_id(commit_id)
423 423 try:
424 424 # we have cached idx, use it without contacting the remote
425 425 idx = self._commit_ids[commit_id]
426 426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
427 427 except KeyError:
428 428 pass
429 429
430 430 elif commit_idx is not None:
431 431 self._validate_commit_idx(commit_idx)
432 432 try:
433 433 _commit_id = self.commit_ids[commit_idx]
434 434 if commit_idx < 0:
435 435 commit_idx = self.commit_ids.index(_commit_id)
436 436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
437 437 except IndexError:
438 438 commit_id = commit_idx
439 439 else:
440 440 commit_id = "tip"
441 441
442 442 if translate_tag:
443 443 commit_id = self._lookup_commit(commit_id)
444 444
445 445 try:
446 446 idx = self._commit_ids[commit_id]
447 447 except KeyError:
448 448 idx = -1
449 449
450 450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
451 451
452 452 def get_commits(
453 453 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
455 455 """
456 456 Returns generator of `GitCommit` objects from start to end (both
457 457 are inclusive), in ascending date order.
458 458
459 459 :param start_id: None, str(commit_id)
460 460 :param end_id: None, str(commit_id)
461 461 :param start_date: if specified, commits with commit date less than
462 462 ``start_date`` would be filtered out from returned set
463 463 :param end_date: if specified, commits with commit date greater than
464 464 ``end_date`` would be filtered out from returned set
465 465 :param branch_name: if specified, commits not reachable from given
466 466 branch would be filtered out from returned set
467 467 :param show_hidden: Show hidden commits such as obsolete or hidden from
468 468 Mercurial evolve
469 469 :raise BranchDoesNotExistError: If given `branch_name` does not
470 470 exist.
471 471 :raise CommitDoesNotExistError: If commits for given `start` or
472 472 `end` could not be found.
473 473
474 474 """
475 475 if self.is_empty():
476 476 raise EmptyRepositoryError("There are no commits yet")
477 477
478 478 self._validate_branch_name(branch_name)
479 479
480 480 if start_id is not None:
481 481 self._validate_commit_id(start_id)
482 482 if end_id is not None:
483 483 self._validate_commit_id(end_id)
484 484
485 485 start_raw_id = self._lookup_commit(start_id)
486 486 start_pos = self._commit_ids[start_raw_id] if start_id else None
487 487 end_raw_id = self._lookup_commit(end_id)
488 488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
489 489
490 490 if None not in [start_id, end_id] and start_pos > end_pos:
491 491 raise RepositoryError(
492 492 "Start commit '%s' cannot be after end commit '%s'" %
493 493 (start_id, end_id))
494 494
495 495 if end_pos is not None:
496 496 end_pos += 1
497 497
498 498 filter_ = []
499 499 if branch_name:
500 500 filter_.append({'branch_name': branch_name})
501 501 if start_date and not end_date:
502 502 filter_.append({'since': start_date})
503 503 if end_date and not start_date:
504 504 filter_.append({'until': end_date})
505 505 if start_date and end_date:
506 506 filter_.append({'since': start_date})
507 507 filter_.append({'until': end_date})
508 508
509 509 # if start_pos or end_pos:
510 510 # filter_.append({'start': start_pos})
511 511 # filter_.append({'end': end_pos})
512 512
513 513 if filter_:
514 514 revfilters = {
515 515 'branch_name': branch_name,
516 516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
517 517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
518 518 'start': start_pos,
519 519 'end': end_pos,
520 520 }
521 521 commit_ids = self._get_commit_ids(filters=revfilters)
522 522
523 523 else:
524 524 commit_ids = self.commit_ids
525 525
526 526 if start_pos or end_pos:
527 527 commit_ids = commit_ids[start_pos: end_pos]
528 528
529 529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
530 530 translate_tag=translate_tags)
531 531
532 532 def get_diff(
533 533 self, commit1, commit2, path='', ignore_whitespace=False,
534 534 context=3, path1=None):
535 535 """
536 536 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 537 ``commit2`` since ``commit1``.
538 538
539 539 :param commit1: Entry point from which diff is shown. Can be
540 540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 541 the changes since empty state of the repository until ``commit2``
542 542 :param commit2: Until which commits changes should be shown.
543 543 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 544 changes. Defaults to ``False``.
545 545 :param context: How many lines before/after changed lines should be
546 546 shown. Defaults to ``3``.
547 547 """
548 548 self._validate_diff_commits(commit1, commit2)
549 549 if path1 is not None and path1 != path:
550 550 raise ValueError("Diff of two different paths not supported.")
551 551
552 flags = [
553 '-U%s' % context, '--full-index', '--binary', '-p',
554 '-M', '--abbrev=40']
555 if ignore_whitespace:
556 flags.append('-w')
557
558 if commit1 == self.EMPTY_COMMIT:
559 cmd = ['show'] + flags + [commit2.raw_id]
552 if path:
553 file_filter = path
560 554 else:
561 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
562
563 if path:
564 cmd.extend(['--', path])
555 file_filter = None
565 556
566 stdout, __ = self.run_git_command(cmd)
567 # If we used 'show' command, strip first few lines (until actual diff
568 # starts)
569 if commit1 == self.EMPTY_COMMIT:
570 lines = stdout.splitlines()
571 x = 0
572 for line in lines:
573 if line.startswith('diff'):
574 break
575 x += 1
576 # Append new line just like 'diff' command do
577 stdout = '\n'.join(lines[x:]) + '\n'
578 return GitDiff(stdout)
557 diff = self._remote.diff(
558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
559 opt_ignorews=ignore_whitespace,
560 context=context)
561 return GitDiff(diff)
579 562
580 563 def strip(self, commit_id, branch_name):
581 564 commit = self.get_commit(commit_id=commit_id)
582 565 if commit.merge:
583 566 raise Exception('Cannot reset to merge commit')
584 567
585 568 # parent is going to be the new head now
586 569 commit = commit.parents[0]
587 570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
588 571
589 572 # clear cached properties
590 573 self._invalidate_prop_cache('commit_ids')
591 574 self._invalidate_prop_cache('_refs')
592 575 self._invalidate_prop_cache('branches')
593 576
594 577 return len(self.commit_ids)
595 578
596 579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
597 580 if commit_id1 == commit_id2:
598 581 return commit_id1
599 582
600 583 if self != repo2:
601 584 commits = self._remote.get_missing_revs(
602 585 commit_id1, commit_id2, repo2.path)
603 586 if commits:
604 587 commit = repo2.get_commit(commits[-1])
605 588 if commit.parents:
606 589 ancestor_id = commit.parents[0].raw_id
607 590 else:
608 591 ancestor_id = None
609 592 else:
610 593 # no commits from other repo, ancestor_id is the commit_id2
611 594 ancestor_id = commit_id2
612 595 else:
613 596 output, __ = self.run_git_command(
614 597 ['merge-base', commit_id1, commit_id2])
615 598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
616 599
617 600 return ancestor_id
618 601
619 602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
620 603 repo1 = self
621 604 ancestor_id = None
622 605
623 606 if commit_id1 == commit_id2:
624 607 commits = []
625 608 elif repo1 != repo2:
626 609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
627 610 repo2.path)
628 611 commits = [
629 612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
630 613 for commit_id in reversed(missing_ids)]
631 614 else:
632 615 output, __ = repo1.run_git_command(
633 616 ['log', '--reverse', '--pretty=format: %H', '-s',
634 617 '%s..%s' % (commit_id1, commit_id2)])
635 618 commits = [
636 619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
637 620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
638 621
639 622 return commits
640 623
641 624 @LazyProperty
642 625 def in_memory_commit(self):
643 626 """
644 627 Returns ``GitInMemoryCommit`` object for this repository.
645 628 """
646 629 return GitInMemoryCommit(self)
647 630
648 631 def pull(self, url, commit_ids=None, update_after=False):
649 632 """
650 633 Pull changes from external location. Pull is different in GIT
651 634 that fetch since it's doing a checkout
652 635
653 636 :param commit_ids: Optional. Can be set to a list of commit ids
654 637 which shall be pulled from the other repository.
655 638 """
656 639 refs = None
657 640 if commit_ids is not None:
658 641 remote_refs = self._remote.get_remote_refs(url)
659 642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
660 643 self._remote.pull(url, refs=refs, update_after=update_after)
661 644 self._remote.invalidate_vcs_cache()
662 645
663 646 def fetch(self, url, commit_ids=None):
664 647 """
665 648 Fetch all git objects from external location.
666 649 """
667 650 self._remote.sync_fetch(url, refs=commit_ids)
668 651 self._remote.invalidate_vcs_cache()
669 652
670 653 def push(self, url):
671 654 refs = None
672 655 self._remote.sync_push(url, refs=refs)
673 656
674 657 def set_refs(self, ref_name, commit_id):
675 658 self._remote.set_refs(ref_name, commit_id)
676 659 self._invalidate_prop_cache('_refs')
677 660
678 661 def remove_ref(self, ref_name):
679 662 self._remote.remove_ref(ref_name)
680 663 self._invalidate_prop_cache('_refs')
681 664
682 665 def _update_server_info(self):
683 666 """
684 667 runs gits update-server-info command in this repo instance
685 668 """
686 669 self._remote.update_server_info()
687 670
688 671 def _current_branch(self):
689 672 """
690 673 Return the name of the current branch.
691 674
692 675 It only works for non bare repositories (i.e. repositories with a
693 676 working copy)
694 677 """
695 678 if self.bare:
696 679 raise RepositoryError('Bare git repos do not have active branches')
697 680
698 681 if self.is_empty():
699 682 return None
700 683
701 684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
702 685 return stdout.strip()
703 686
704 687 def _checkout(self, branch_name, create=False, force=False):
705 688 """
706 689 Checkout a branch in the working directory.
707 690
708 691 It tries to create the branch if create is True, failing if the branch
709 692 already exists.
710 693
711 694 It only works for non bare repositories (i.e. repositories with a
712 695 working copy)
713 696 """
714 697 if self.bare:
715 698 raise RepositoryError('Cannot checkout branches in a bare git repo')
716 699
717 700 cmd = ['checkout']
718 701 if force:
719 702 cmd.append('-f')
720 703 if create:
721 704 cmd.append('-b')
722 705 cmd.append(branch_name)
723 706 self.run_git_command(cmd, fail_on_stderr=False)
724 707
725 708 def _create_branch(self, branch_name, commit_id):
726 709 """
727 710 creates a branch in a GIT repo
728 711 """
729 712 self._remote.create_branch(branch_name, commit_id)
730 713
731 714 def _identify(self):
732 715 """
733 716 Return the current state of the working directory.
734 717 """
735 718 if self.bare:
736 719 raise RepositoryError('Bare git repos do not have active branches')
737 720
738 721 if self.is_empty():
739 722 return None
740 723
741 724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
742 725 return stdout.strip()
743 726
744 727 def _local_clone(self, clone_path, branch_name, source_branch=None):
745 728 """
746 729 Create a local clone of the current repo.
747 730 """
748 731 # N.B.(skreft): the --branch option is required as otherwise the shallow
749 732 # clone will only fetch the active branch.
750 733 cmd = ['clone', '--branch', branch_name,
751 734 self.path, os.path.abspath(clone_path)]
752 735
753 736 self.run_git_command(cmd, fail_on_stderr=False)
754 737
755 738 # if we get the different source branch, make sure we also fetch it for
756 739 # merge conditions
757 740 if source_branch and source_branch != branch_name:
758 741 # check if the ref exists.
759 742 shadow_repo = GitRepository(os.path.abspath(clone_path))
760 743 if shadow_repo.get_remote_ref(source_branch):
761 744 cmd = ['fetch', self.path, source_branch]
762 745 self.run_git_command(cmd, fail_on_stderr=False)
763 746
764 747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
765 748 """
766 749 Fetch a branch from a local repository.
767 750 """
768 751 repository_path = os.path.abspath(repository_path)
769 752 if repository_path == self.path:
770 753 raise ValueError('Cannot fetch from the same repository')
771 754
772 755 if use_origin:
773 756 branch_name = '+{branch}:refs/heads/{branch}'.format(
774 757 branch=branch_name)
775 758
776 759 cmd = ['fetch', '--no-tags', '--update-head-ok',
777 760 repository_path, branch_name]
778 761 self.run_git_command(cmd, fail_on_stderr=False)
779 762
780 763 def _local_reset(self, branch_name):
781 764 branch_name = '{}'.format(branch_name)
782 765 cmd = ['reset', '--hard', branch_name, '--']
783 766 self.run_git_command(cmd, fail_on_stderr=False)
784 767
785 768 def _last_fetch_heads(self):
786 769 """
787 770 Return the last fetched heads that need merging.
788 771
789 772 The algorithm is defined at
790 773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
791 774 """
792 775 if not self.bare:
793 776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
794 777 else:
795 778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
796 779
797 780 heads = []
798 781 with open(fetch_heads_path) as f:
799 782 for line in f:
800 783 if ' not-for-merge ' in line:
801 784 continue
802 785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
803 786 heads.append(line)
804 787
805 788 return heads
806 789
807 790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
808 791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
809 792
810 793 def _local_pull(self, repository_path, branch_name, ff_only=True):
811 794 """
812 795 Pull a branch from a local repository.
813 796 """
814 797 if self.bare:
815 798 raise RepositoryError('Cannot pull into a bare git repository')
816 799 # N.B.(skreft): The --ff-only option is to make sure this is a
817 800 # fast-forward (i.e., we are only pulling new changes and there are no
818 801 # conflicts with our current branch)
819 802 # Additionally, that option needs to go before --no-tags, otherwise git
820 803 # pull complains about it being an unknown flag.
821 804 cmd = ['pull']
822 805 if ff_only:
823 806 cmd.append('--ff-only')
824 807 cmd.extend(['--no-tags', repository_path, branch_name])
825 808 self.run_git_command(cmd, fail_on_stderr=False)
826 809
827 810 def _local_merge(self, merge_message, user_name, user_email, heads):
828 811 """
829 812 Merge the given head into the checked out branch.
830 813
831 814 It will force a merge commit.
832 815
833 816 Currently it raises an error if the repo is empty, as it is not possible
834 817 to create a merge commit in an empty repo.
835 818
836 819 :param merge_message: The message to use for the merge commit.
837 820 :param heads: the heads to merge.
838 821 """
839 822 if self.bare:
840 823 raise RepositoryError('Cannot merge into a bare git repository')
841 824
842 825 if not heads:
843 826 return
844 827
845 828 if self.is_empty():
846 829 # TODO(skreft): do somehting more robust in this case.
847 830 raise RepositoryError(
848 831 'Do not know how to merge into empty repositories yet')
849 832
850 833 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
851 834 # commit message. We also specify the user who is doing the merge.
852 835 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
853 836 '-c', 'user.email=%s' % safe_str(user_email),
854 837 'merge', '--no-ff', '-m', safe_str(merge_message)]
855 838 cmd.extend(heads)
856 839 try:
857 840 output = self.run_git_command(cmd, fail_on_stderr=False)
858 841 except RepositoryError:
859 842 # Cleanup any merge leftovers
860 843 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
861 844 raise
862 845
863 846 def _local_push(
864 847 self, source_branch, repository_path, target_branch,
865 848 enable_hooks=False, rc_scm_data=None):
866 849 """
867 850 Push the source_branch to the given repository and target_branch.
868 851
869 852 Currently it if the target_branch is not master and the target repo is
870 853 empty, the push will work, but then GitRepository won't be able to find
871 854 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
872 855 pointing to master, which does not exist).
873 856
874 857 It does not run the hooks in the target repo.
875 858 """
876 859 # TODO(skreft): deal with the case in which the target repo is empty,
877 860 # and the target_branch is not master.
878 861 target_repo = GitRepository(repository_path)
879 862 if (not target_repo.bare and
880 863 target_repo._current_branch() == target_branch):
881 864 # Git prevents pushing to the checked out branch, so simulate it by
882 865 # pulling into the target repository.
883 866 target_repo._local_pull(self.path, source_branch)
884 867 else:
885 868 cmd = ['push', os.path.abspath(repository_path),
886 869 '%s:%s' % (source_branch, target_branch)]
887 870 gitenv = {}
888 871 if rc_scm_data:
889 872 gitenv.update({'RC_SCM_DATA': rc_scm_data})
890 873
891 874 if not enable_hooks:
892 875 gitenv['RC_SKIP_HOOKS'] = '1'
893 876 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
894 877
895 878 def _get_new_pr_branch(self, source_branch, target_branch):
896 879 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
897 880 pr_branches = []
898 881 for branch in self.branches:
899 882 if branch.startswith(prefix):
900 883 pr_branches.append(int(branch[len(prefix):]))
901 884
902 885 if not pr_branches:
903 886 branch_id = 0
904 887 else:
905 888 branch_id = max(pr_branches) + 1
906 889
907 890 return '%s%d' % (prefix, branch_id)
908 891
909 892 def _maybe_prepare_merge_workspace(
910 893 self, repo_id, workspace_id, target_ref, source_ref):
911 894 shadow_repository_path = self._get_shadow_repository_path(
912 895 repo_id, workspace_id)
913 896 if not os.path.exists(shadow_repository_path):
914 897 self._local_clone(
915 898 shadow_repository_path, target_ref.name, source_ref.name)
916 899 log.debug('Prepared %s shadow repository in %s',
917 900 self.alias, shadow_repository_path)
918 901
919 902 return shadow_repository_path
920 903
921 904 def _merge_repo(self, repo_id, workspace_id, target_ref,
922 905 source_repo, source_ref, merge_message,
923 906 merger_name, merger_email, dry_run=False,
924 907 use_rebase=False, close_branch=False):
925 908
926 909 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
927 910 'rebase' if use_rebase else 'merge', dry_run)
928 911 if target_ref.commit_id != self.branches[target_ref.name]:
929 912 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
930 913 target_ref.commit_id, self.branches[target_ref.name])
931 914 return MergeResponse(
932 915 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
933 916 metadata={'target_ref': target_ref})
934 917
935 918 shadow_repository_path = self._maybe_prepare_merge_workspace(
936 919 repo_id, workspace_id, target_ref, source_ref)
937 920 shadow_repo = self.get_shadow_instance(shadow_repository_path)
938 921
939 922 # checkout source, if it's different. Otherwise we could not
940 923 # fetch proper commits for merge testing
941 924 if source_ref.name != target_ref.name:
942 925 if shadow_repo.get_remote_ref(source_ref.name):
943 926 shadow_repo._checkout(source_ref.name, force=True)
944 927
945 928 # checkout target, and fetch changes
946 929 shadow_repo._checkout(target_ref.name, force=True)
947 930
948 931 # fetch/reset pull the target, in case it is changed
949 932 # this handles even force changes
950 933 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
951 934 shadow_repo._local_reset(target_ref.name)
952 935
953 936 # Need to reload repo to invalidate the cache, or otherwise we cannot
954 937 # retrieve the last target commit.
955 938 shadow_repo = self.get_shadow_instance(shadow_repository_path)
956 939 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
957 940 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
958 941 target_ref, target_ref.commit_id,
959 942 shadow_repo.branches[target_ref.name])
960 943 return MergeResponse(
961 944 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 945 metadata={'target_ref': target_ref})
963 946
964 947 # calculate new branch
965 948 pr_branch = shadow_repo._get_new_pr_branch(
966 949 source_ref.name, target_ref.name)
967 950 log.debug('using pull-request merge branch: `%s`', pr_branch)
968 951 # checkout to temp branch, and fetch changes
969 952 shadow_repo._checkout(pr_branch, create=True)
970 953 try:
971 954 shadow_repo._local_fetch(source_repo.path, source_ref.name)
972 955 except RepositoryError:
973 956 log.exception('Failure when doing local fetch on '
974 957 'shadow repo: %s', shadow_repo)
975 958 return MergeResponse(
976 959 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
977 960 metadata={'source_ref': source_ref})
978 961
979 962 merge_ref = None
980 963 merge_failure_reason = MergeFailureReason.NONE
981 964 metadata = {}
982 965 try:
983 966 shadow_repo._local_merge(merge_message, merger_name, merger_email,
984 967 [source_ref.commit_id])
985 968 merge_possible = True
986 969
987 970 # Need to invalidate the cache, or otherwise we
988 971 # cannot retrieve the merge commit.
989 972 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
990 973 merge_commit_id = shadow_repo.branches[pr_branch]
991 974
992 975 # Set a reference pointing to the merge commit. This reference may
993 976 # be used to easily identify the last successful merge commit in
994 977 # the shadow repository.
995 978 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
996 979 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
997 980 except RepositoryError:
998 981 log.exception('Failure when doing local merge on git shadow repo')
999 982 merge_possible = False
1000 983 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1001 984
1002 985 if merge_possible and not dry_run:
1003 986 try:
1004 987 shadow_repo._local_push(
1005 988 pr_branch, self.path, target_ref.name, enable_hooks=True,
1006 989 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1007 990 merge_succeeded = True
1008 991 except RepositoryError:
1009 992 log.exception(
1010 993 'Failure when doing local push from the shadow '
1011 994 'repository to the target repository at %s.', self.path)
1012 995 merge_succeeded = False
1013 996 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1014 997 metadata['target'] = 'git shadow repo'
1015 998 metadata['merge_commit'] = pr_branch
1016 999 else:
1017 1000 merge_succeeded = False
1018 1001
1019 1002 return MergeResponse(
1020 1003 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1021 1004 metadata=metadata)
@@ -1,381 +1,380 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG commit module
23 23 """
24 24
25 25 import os
26 26
27 27 from zope.cachedescriptors.property import Lazy as LazyProperty
28 28
29 29 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 30 from rhodecode.lib.utils import safe_str, safe_unicode
31 31 from rhodecode.lib.vcs import path as vcspath
32 32 from rhodecode.lib.vcs.backends import base
33 33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 34 from rhodecode.lib.vcs.exceptions import CommitError
35 35 from rhodecode.lib.vcs.nodes import (
36 36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
37 37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
38 38 LargeFileNode, LARGEFILE_PREFIX)
39 39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
40 40
41 41
42 42 class MercurialCommit(base.BaseCommit):
43 43 """
44 44 Represents state of the repository at the single commit.
45 45 """
46 46
47 47 _filter_pre_load = [
48 48 # git specific property not supported here
49 49 "_commit",
50 50 ]
51 51
52 52 def __init__(self, repository, raw_id, idx, pre_load=None):
53 53 raw_id = safe_str(raw_id)
54 54
55 55 self.repository = repository
56 56 self._remote = repository._remote
57 57
58 58 self.raw_id = raw_id
59 59 self.idx = idx
60 60
61 61 self._set_bulk_properties(pre_load)
62 62
63 63 # caches
64 64 self.nodes = {}
65 65
66 66 def _set_bulk_properties(self, pre_load):
67 67 if not pre_load:
68 68 return
69 69 pre_load = [entry for entry in pre_load
70 70 if entry not in self._filter_pre_load]
71 71 if not pre_load:
72 72 return
73 73
74 74 result = self._remote.bulk_request(self.raw_id, pre_load)
75 75 for attr, value in result.items():
76 76 if attr in ["author", "branch", "message"]:
77 77 value = safe_unicode(value)
78 78 elif attr == "affected_files":
79 79 value = map(safe_unicode, value)
80 80 elif attr == "date":
81 81 value = utcdate_fromtimestamp(*value)
82 82 elif attr in ["children", "parents"]:
83 83 value = self._make_commits(value)
84 84 elif attr in ["phase"]:
85 85 value = self._get_phase_text(value)
86 86 self.__dict__[attr] = value
87 87
88 88 @LazyProperty
89 89 def tags(self):
90 90 tags = [name for name, commit_id in self.repository.tags.iteritems()
91 91 if commit_id == self.raw_id]
92 92 return tags
93 93
94 94 @LazyProperty
95 95 def branch(self):
96 96 return safe_unicode(self._remote.ctx_branch(self.raw_id))
97 97
98 98 @LazyProperty
99 99 def bookmarks(self):
100 100 bookmarks = [
101 101 name for name, commit_id in self.repository.bookmarks.iteritems()
102 102 if commit_id == self.raw_id]
103 103 return bookmarks
104 104
105 105 @LazyProperty
106 106 def message(self):
107 107 return safe_unicode(self._remote.ctx_description(self.raw_id))
108 108
109 109 @LazyProperty
110 110 def committer(self):
111 111 return safe_unicode(self.author)
112 112
113 113 @LazyProperty
114 114 def author(self):
115 115 return safe_unicode(self._remote.ctx_user(self.raw_id))
116 116
117 117 @LazyProperty
118 118 def date(self):
119 119 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
120 120
121 121 @LazyProperty
122 122 def status(self):
123 123 """
124 124 Returns modified, added, removed, deleted files for current commit
125 125 """
126 126 return self._remote.ctx_status(self.raw_id)
127 127
128 128 @LazyProperty
129 129 def _file_paths(self):
130 130 return self._remote.ctx_list(self.raw_id)
131 131
132 132 @LazyProperty
133 133 def _dir_paths(self):
134 134 p = list(set(get_dirs_for_path(*self._file_paths)))
135 135 p.insert(0, '')
136 136 return p
137 137
138 138 @LazyProperty
139 139 def _paths(self):
140 140 return self._dir_paths + self._file_paths
141 141
142 142 @LazyProperty
143 143 def id(self):
144 144 if self.last:
145 145 return u'tip'
146 146 return self.short_id
147 147
148 148 @LazyProperty
149 149 def short_id(self):
150 150 return self.raw_id[:12]
151 151
152 152 def _make_commits(self, indexes, pre_load=None):
153 153 return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load)
154 154 for idx in indexes if idx >= 0]
155 155
156 156 @LazyProperty
157 157 def parents(self):
158 158 """
159 159 Returns list of parent commits.
160 160 """
161 161 parents = self._remote.ctx_parents(self.raw_id)
162 162 return self._make_commits(parents)
163 163
164 164 def _get_phase_text(self, phase_id):
165 165 return {
166 166 0: 'public',
167 167 1: 'draft',
168 168 2: 'secret',
169 169 }.get(phase_id) or ''
170 170
171 171 @LazyProperty
172 172 def phase(self):
173 173 phase_id = self._remote.ctx_phase(self.raw_id)
174 174 phase_text = self._get_phase_text(phase_id)
175 175
176 176 return safe_unicode(phase_text)
177 177
178 178 @LazyProperty
179 179 def obsolete(self):
180 180 obsolete = self._remote.ctx_obsolete(self.raw_id)
181 181 return obsolete
182 182
183 183 @LazyProperty
184 184 def hidden(self):
185 185 hidden = self._remote.ctx_hidden(self.raw_id)
186 186 return hidden
187 187
188 188 @LazyProperty
189 189 def children(self):
190 190 """
191 191 Returns list of child commits.
192 192 """
193 193 children = self._remote.ctx_children(self.raw_id)
194 194 return self._make_commits(children)
195 195
196 196 def _fix_path(self, path):
197 197 """
198 198 Mercurial keeps filenodes as str so we need to encode from unicode
199 199 to str.
200 200 """
201 201 return safe_str(super(MercurialCommit, self)._fix_path(path))
202 202
203 203 def _get_kind(self, path):
204 204 path = self._fix_path(path)
205 205 if path in self._file_paths:
206 206 return NodeKind.FILE
207 207 elif path in self._dir_paths:
208 208 return NodeKind.DIR
209 209 else:
210 210 raise CommitError(
211 211 "Node does not exist at the given path '%s'" % (path, ))
212 212
213 213 def _get_filectx(self, path):
214 214 path = self._fix_path(path)
215 215 if self._get_kind(path) != NodeKind.FILE:
216 216 raise CommitError(
217 217 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
218 218 return path
219 219
220 220 def get_file_mode(self, path):
221 221 """
222 222 Returns stat mode of the file at the given ``path``.
223 223 """
224 224 path = self._get_filectx(path)
225 225 if 'x' in self._remote.fctx_flags(self.raw_id, path):
226 226 return base.FILEMODE_EXECUTABLE
227 227 else:
228 228 return base.FILEMODE_DEFAULT
229 229
230 230 def is_link(self, path):
231 231 path = self._get_filectx(path)
232 232 return 'l' in self._remote.fctx_flags(self.raw_id, path)
233 233
234 234 def get_file_content(self, path):
235 235 """
236 236 Returns content of the file at given ``path``.
237 237 """
238 238 path = self._get_filectx(path)
239 239 return self._remote.fctx_node_data(self.raw_id, path)
240 240
241 241 def get_file_size(self, path):
242 242 """
243 243 Returns size of the file at given ``path``.
244 244 """
245 245 path = self._get_filectx(path)
246 246 return self._remote.fctx_size(self.raw_id, path)
247 247
248 248 def get_path_history(self, path, limit=None, pre_load=None):
249 249 """
250 250 Returns history of file as reversed list of `MercurialCommit` objects
251 251 for which file at given ``path`` has been modified.
252 252 """
253 253 path = self._get_filectx(path)
254 254 hist = self._remote.node_history(self.raw_id, path, limit)
255 255 return [
256 256 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
257 257 for commit_id in hist]
258 258
259 259 def get_file_annotate(self, path, pre_load=None):
260 260 """
261 261 Returns a generator of four element tuples with
262 262 lineno, commit_id, commit lazy loader and line
263 263 """
264 264 result = self._remote.fctx_annotate(self.raw_id, path)
265 265
266 266 for ln_no, commit_id, content in result:
267 267 yield (
268 268 ln_no, commit_id,
269 lambda: self.repository.get_commit(commit_id=commit_id,
270 pre_load=pre_load),
269 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
271 270 content)
272 271
273 272 def get_nodes(self, path):
274 273 """
275 274 Returns combined ``DirNode`` and ``FileNode`` objects list representing
276 275 state of commit at the given ``path``. If node at the given ``path``
277 276 is not instance of ``DirNode``, CommitError would be raised.
278 277 """
279 278
280 279 if self._get_kind(path) != NodeKind.DIR:
281 280 raise CommitError(
282 281 "Directory does not exist for idx %s at '%s'" % (self.raw_id, path))
283 282 path = self._fix_path(path)
284 283
285 284 filenodes = [
286 285 FileNode(f, commit=self) for f in self._file_paths
287 286 if os.path.dirname(f) == path]
288 287 # TODO: johbo: Check if this can be done in a more obvious way
289 288 dirs = path == '' and '' or [
290 289 d for d in self._dir_paths
291 290 if d and vcspath.dirname(d) == path]
292 291 dirnodes = [
293 292 DirNode(d, commit=self) for d in dirs
294 293 if os.path.dirname(d) == path]
295 294
296 295 alias = self.repository.alias
297 296 for k, vals in self._submodules.iteritems():
298 297 if vcspath.dirname(k) == path:
299 298 loc = vals[0]
300 299 commit = vals[1]
301 300 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
302 301
303 302 nodes = dirnodes + filenodes
304 303 for node in nodes:
305 304 if node.path not in self.nodes:
306 305 self.nodes[node.path] = node
307 306 nodes.sort()
308 307
309 308 return nodes
310 309
311 310 def get_node(self, path, pre_load=None):
312 311 """
313 312 Returns `Node` object from the given `path`. If there is no node at
314 313 the given `path`, `NodeDoesNotExistError` would be raised.
315 314 """
316 315 path = self._fix_path(path)
317 316
318 317 if path not in self.nodes:
319 318 if path in self._file_paths:
320 319 node = FileNode(path, commit=self, pre_load=pre_load)
321 320 elif path in self._dir_paths:
322 321 if path == '':
323 322 node = RootNode(commit=self)
324 323 else:
325 324 node = DirNode(path, commit=self)
326 325 else:
327 326 raise self.no_node_at_path(path)
328 327
329 328 # cache node
330 329 self.nodes[path] = node
331 330 return self.nodes[path]
332 331
333 332 def get_largefile_node(self, path):
334 333
335 334 if self._remote.is_large_file(path):
336 335 # content of that file regular FileNode is the hash of largefile
337 336 file_id = self.get_file_content(path).strip()
338 337
339 338 if self._remote.in_largefiles_store(file_id):
340 339 lf_path = self._remote.store_path(file_id)
341 340 return LargeFileNode(lf_path, commit=self, org_path=path)
342 341 elif self._remote.in_user_cache(file_id):
343 342 lf_path = self._remote.store_path(file_id)
344 343 self._remote.link(file_id, path)
345 344 return LargeFileNode(lf_path, commit=self, org_path=path)
346 345
347 346 @LazyProperty
348 347 def _submodules(self):
349 348 """
350 349 Returns a dictionary with submodule information from substate file
351 350 of hg repository.
352 351 """
353 352 return self._remote.ctx_substate(self.raw_id)
354 353
355 354 @LazyProperty
356 355 def affected_files(self):
357 356 """
358 357 Gets a fast accessible file changes for given commit
359 358 """
360 359 return self._remote.ctx_files(self.raw_id)
361 360
362 361 @property
363 362 def added(self):
364 363 """
365 364 Returns list of added ``FileNode`` objects.
366 365 """
367 366 return AddedFileNodesGenerator([n for n in self.status[1]], self)
368 367
369 368 @property
370 369 def changed(self):
371 370 """
372 371 Returns list of modified ``FileNode`` objects.
373 372 """
374 373 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
375 374
376 375 @property
377 376 def removed(self):
378 377 """
379 378 Returns list of removed ``FileNode`` objects.
380 379 """
381 380 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
@@ -1,1276 +1,1274 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import mock
23 23 import os
24 24 import sys
25 25 import shutil
26 26
27 27 import pytest
28 28
29 29 from rhodecode.lib.utils import make_db_config
30 30 from rhodecode.lib.vcs.backends.base import Reference
31 31 from rhodecode.lib.vcs.backends.git import (
32 32 GitRepository, GitCommit, discover_git_version)
33 33 from rhodecode.lib.vcs.exceptions import (
34 34 RepositoryError, VCSError, NodeDoesNotExistError)
35 35 from rhodecode.lib.vcs.nodes import (
36 36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39 39
40 40
41 41 pytestmark = pytest.mark.backends("git")
42 42
43 43
44 44 class TestGitRepository(object):
45 45
46 46 @pytest.fixture(autouse=True)
47 47 def prepare(self, request, baseapp):
48 48 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
49 49 self.repo.count()
50 50
51 51 def get_clone_repo(self, tmp_path_factory):
52 52 """
53 53 Return a non bare clone of the base repo.
54 54 """
55 55 clone_path = tmp_path_factory.mktemp('clone-url')
56 56 repo_clone = GitRepository(
57 57 clone_path, create=True, src_url=self.repo.path, bare=False)
58 58
59 59 return repo_clone
60 60
61 61 def get_empty_repo(self, tmp_path_factory, bare=False):
62 62 """
63 63 Return a non bare empty repo.
64 64 """
65 65 clone_path = tmp_path_factory.mktemp('empty-repo')
66 66 return GitRepository(clone_path, create=True, bare=bare)
67 67
68 68 def test_wrong_repo_path(self):
69 69 wrong_repo_path = '/tmp/errorrepo_git'
70 70 with pytest.raises(RepositoryError):
71 71 GitRepository(wrong_repo_path)
72 72
73 73 def test_repo_clone(self, tmp_path_factory):
74 74 repo = GitRepository(TEST_GIT_REPO)
75 75 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE
76 76 repo_clone = GitRepository(
77 77 clone_path,
78 78 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
79 79
80 80 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
81 81 # Checking hashes of commits should be enough
82 82 for commit in repo.get_commits():
83 83 raw_id = commit.raw_id
84 84 assert raw_id == repo_clone.get_commit(raw_id).raw_id
85 85
86 86 def test_repo_clone_without_create(self):
87 87 with pytest.raises(RepositoryError):
88 88 GitRepository(
89 89 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
90 90
91 91 def test_repo_clone_with_update(self, tmp_path_factory):
92 92 repo = GitRepository(TEST_GIT_REPO)
93 93 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_update'
94 94
95 95 repo_clone = GitRepository(
96 96 clone_path,
97 97 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
98 98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 99
100 100 # check if current workdir was updated
101 101 fpath = os.path.join(clone_path, 'MANIFEST.in')
102 102 assert os.path.isfile(fpath)
103 103
104 104 def test_repo_clone_without_update(self, tmp_path_factory):
105 105 repo = GitRepository(TEST_GIT_REPO)
106 106 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_without_update'
107 107 repo_clone = GitRepository(
108 108 clone_path,
109 109 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
110 110 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
111 111 # check if current workdir was *NOT* updated
112 112 fpath = os.path.join(clone_path, 'MANIFEST.in')
113 113 # Make sure it's not bare repo
114 114 assert not repo_clone.bare
115 115 assert not os.path.isfile(fpath)
116 116
117 117 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
118 118 repo = GitRepository(TEST_GIT_REPO)
119 119 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_bare.git'
120 120 repo_clone = GitRepository(
121 121 clone_path, create=True, src_url=repo.path, bare=True)
122 122 assert repo_clone.bare
123 123
124 124 def test_create_repo_is_not_bare_by_default(self):
125 125 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
126 126 assert not repo.bare
127 127
128 128 def test_create_bare_repo(self):
129 129 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
130 130 assert repo.bare
131 131
132 132 def test_update_server_info(self):
133 133 self.repo._update_server_info()
134 134
135 135 def test_fetch(self, vcsbackend_git):
136 136 # Note: This is a git specific part of the API, it's only implemented
137 137 # by the git backend.
138 138 source_repo = vcsbackend_git.repo
139 139 target_repo = vcsbackend_git.create_repo(bare=True)
140 140 target_repo.fetch(source_repo.path)
141 141 # Note: Get a fresh instance, avoids caching trouble
142 142 target_repo = vcsbackend_git.backend(target_repo.path)
143 143 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
144 144
145 145 def test_commit_ids(self):
146 146 # there are 112 commits (by now)
147 147 # so we can assume they would be available from now on
148 148 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
149 149 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
150 150 'fa6600f6848800641328adbf7811fd2372c02ab2',
151 151 '102607b09cdd60e2793929c4f90478be29f85a17',
152 152 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
153 153 '2d1028c054665b962fa3d307adfc923ddd528038',
154 154 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
155 155 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
156 156 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
157 157 '8430a588b43b5d6da365400117c89400326e7992',
158 158 'd955cd312c17b02143c04fa1099a352b04368118',
159 159 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
160 160 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
161 161 'f298fe1189f1b69779a4423f40b48edf92a703fc',
162 162 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
163 163 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
164 164 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
165 165 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
166 166 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
167 167 '45223f8f114c64bf4d6f853e3c35a369a6305520',
168 168 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
169 169 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
170 170 '27d48942240f5b91dfda77accd2caac94708cc7d',
171 171 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
172 172 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
173 173 assert subset.issubset(set(self.repo.commit_ids))
174 174
175 175 def test_slicing(self):
176 176 # 4 1 5 10 95
177 177 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
178 178 (10, 20, 10), (5, 100, 95)]:
179 179 commit_ids = list(self.repo[sfrom:sto])
180 180 assert len(commit_ids) == size
181 181 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
182 182 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
183 183
184 184 def test_branches(self):
185 185 # TODO: Need more tests here
186 186 # Removed (those are 'remotes' branches for cloned repo)
187 187 # assert 'master' in self.repo.branches
188 188 # assert 'gittree' in self.repo.branches
189 189 # assert 'web-branch' in self.repo.branches
190 190 for __, commit_id in self.repo.branches.items():
191 191 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
192 192
193 193 def test_tags(self):
194 194 # TODO: Need more tests here
195 195 assert 'v0.1.1' in self.repo.tags
196 196 assert 'v0.1.2' in self.repo.tags
197 197 for __, commit_id in self.repo.tags.items():
198 198 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
199 199
200 200 def _test_single_commit_cache(self, commit_id):
201 201 commit = self.repo.get_commit(commit_id)
202 202 assert commit_id in self.repo.commits
203 203 assert commit is self.repo.commits[commit_id]
204 204
205 205 def test_initial_commit(self):
206 206 commit_id = self.repo.commit_ids[0]
207 207 init_commit = self.repo.get_commit(commit_id)
208 208 init_author = init_commit.author
209 209
210 210 assert init_commit.message == 'initial import\n'
211 211 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
212 212 assert init_author == init_commit.committer
213 213 for path in ('vcs/__init__.py',
214 214 'vcs/backends/BaseRepository.py',
215 215 'vcs/backends/__init__.py'):
216 216 assert isinstance(init_commit.get_node(path), FileNode)
217 217 for path in ('', 'vcs', 'vcs/backends'):
218 218 assert isinstance(init_commit.get_node(path), DirNode)
219 219
220 220 with pytest.raises(NodeDoesNotExistError):
221 221 init_commit.get_node(path='foobar')
222 222
223 223 node = init_commit.get_node('vcs/')
224 224 assert hasattr(node, 'kind')
225 225 assert node.kind == NodeKind.DIR
226 226
227 227 node = init_commit.get_node('vcs')
228 228 assert hasattr(node, 'kind')
229 229 assert node.kind == NodeKind.DIR
230 230
231 231 node = init_commit.get_node('vcs/__init__.py')
232 232 assert hasattr(node, 'kind')
233 233 assert node.kind == NodeKind.FILE
234 234
235 235 def test_not_existing_commit(self):
236 236 with pytest.raises(RepositoryError):
237 237 self.repo.get_commit('f' * 40)
238 238
239 239 def test_commit10(self):
240 240
241 241 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
242 242 README = """===
243 243 VCS
244 244 ===
245 245
246 246 Various Version Control System management abstraction layer for Python.
247 247
248 248 Introduction
249 249 ------------
250 250
251 251 TODO: To be written...
252 252
253 253 """
254 254 node = commit10.get_node('README.rst')
255 255 assert node.kind == NodeKind.FILE
256 256 assert node.content == README
257 257
258 258 def test_head(self):
259 259 assert self.repo.head == self.repo.get_commit().raw_id
260 260
261 261 def test_checkout_with_create(self, tmp_path_factory):
262 262 repo_clone = self.get_clone_repo(tmp_path_factory)
263 263
264 264 new_branch = 'new_branch'
265 265 assert repo_clone._current_branch() == 'master'
266 266 assert set(repo_clone.branches) == {'master'}
267 267 repo_clone._checkout(new_branch, create=True)
268 268
269 269 # Branches is a lazy property so we need to recrete the Repo object.
270 270 repo_clone = GitRepository(repo_clone.path)
271 271 assert set(repo_clone.branches) == {'master', new_branch}
272 272 assert repo_clone._current_branch() == new_branch
273 273
274 274 def test_checkout(self, tmp_path_factory):
275 275 repo_clone = self.get_clone_repo(tmp_path_factory)
276 276
277 277 repo_clone._checkout('new_branch', create=True)
278 278 repo_clone._checkout('master')
279 279
280 280 assert repo_clone._current_branch() == 'master'
281 281
282 282 def test_checkout_same_branch(self, tmp_path_factory):
283 283 repo_clone = self.get_clone_repo(tmp_path_factory)
284 284
285 285 repo_clone._checkout('master')
286 286 assert repo_clone._current_branch() == 'master'
287 287
288 288 def test_checkout_branch_already_exists(self, tmp_path_factory):
289 289 repo_clone = self.get_clone_repo(tmp_path_factory)
290 290
291 291 with pytest.raises(RepositoryError):
292 292 repo_clone._checkout('master', create=True)
293 293
294 294 def test_checkout_bare_repo(self):
295 295 with pytest.raises(RepositoryError):
296 296 self.repo._checkout('master')
297 297
298 298 def test_current_branch_bare_repo(self):
299 299 with pytest.raises(RepositoryError):
300 300 self.repo._current_branch()
301 301
302 302 def test_current_branch_empty_repo(self, tmp_path_factory):
303 303 repo = self.get_empty_repo(tmp_path_factory)
304 304 assert repo._current_branch() is None
305 305
306 306 def test_local_clone(self, tmp_path_factory):
307 307 clone_path = tmp_path_factory.mktemp('test-local-clone')
308 308 self.repo._local_clone(clone_path, 'master')
309 309 repo_clone = GitRepository(clone_path)
310 310
311 311 assert self.repo.commit_ids == repo_clone.commit_ids
312 312
313 313 def test_local_clone_with_specific_branch(self, tmp_path_factory):
314 314 source_repo = self.get_clone_repo(tmp_path_factory)
315 315
316 316 # Create a new branch in source repo
317 317 new_branch_commit = source_repo.commit_ids[-3]
318 318 source_repo._checkout(new_branch_commit)
319 319 source_repo._checkout('new_branch', create=True)
320 320
321 321 clone_path = tmp_path_factory.mktemp('git-clone-path-1')
322 322 source_repo._local_clone(clone_path, 'new_branch')
323 323 repo_clone = GitRepository(clone_path)
324 324
325 325 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
326 326
327 327 clone_path = tmp_path_factory.mktemp('git-clone-path-2')
328 328 source_repo._local_clone(clone_path, 'master')
329 329 repo_clone = GitRepository(clone_path)
330 330
331 331 assert source_repo.commit_ids == repo_clone.commit_ids
332 332
333 333 def test_local_clone_fails_if_target_exists(self):
334 334 with pytest.raises(RepositoryError):
335 335 self.repo._local_clone(self.repo.path, 'master')
336 336
337 337 def test_local_fetch(self, tmp_path_factory):
338 338 target_repo = self.get_empty_repo(tmp_path_factory)
339 339 source_repo = self.get_clone_repo(tmp_path_factory)
340 340
341 341 # Create a new branch in source repo
342 342 master_commit = source_repo.commit_ids[-1]
343 343 new_branch_commit = source_repo.commit_ids[-3]
344 344 source_repo._checkout(new_branch_commit)
345 345 source_repo._checkout('new_branch', create=True)
346 346
347 347 target_repo._local_fetch(source_repo.path, 'new_branch')
348 348 assert target_repo._last_fetch_heads() == [new_branch_commit]
349 349
350 350 target_repo._local_fetch(source_repo.path, 'master')
351 351 assert target_repo._last_fetch_heads() == [master_commit]
352 352
353 353 def test_local_fetch_from_bare_repo(self, tmp_path_factory):
354 354 target_repo = self.get_empty_repo(tmp_path_factory)
355 355 target_repo._local_fetch(self.repo.path, 'master')
356 356
357 357 master_commit = self.repo.commit_ids[-1]
358 358 assert target_repo._last_fetch_heads() == [master_commit]
359 359
360 360 def test_local_fetch_from_same_repo(self):
361 361 with pytest.raises(ValueError):
362 362 self.repo._local_fetch(self.repo.path, 'master')
363 363
364 364 def test_local_fetch_branch_does_not_exist(self, tmp_path_factory):
365 365 target_repo = self.get_empty_repo(tmp_path_factory)
366 366
367 367 with pytest.raises(RepositoryError):
368 368 target_repo._local_fetch(self.repo.path, 'new_branch')
369 369
370 370 def test_local_pull(self, tmp_path_factory):
371 371 target_repo = self.get_empty_repo(tmp_path_factory)
372 372 source_repo = self.get_clone_repo(tmp_path_factory)
373 373
374 374 # Create a new branch in source repo
375 375 master_commit = source_repo.commit_ids[-1]
376 376 new_branch_commit = source_repo.commit_ids[-3]
377 377 source_repo._checkout(new_branch_commit)
378 378 source_repo._checkout('new_branch', create=True)
379 379
380 380 target_repo._local_pull(source_repo.path, 'new_branch')
381 381 target_repo = GitRepository(target_repo.path)
382 382 assert target_repo.head == new_branch_commit
383 383
384 384 target_repo._local_pull(source_repo.path, 'master')
385 385 target_repo = GitRepository(target_repo.path)
386 386 assert target_repo.head == master_commit
387 387
388 388 def test_local_pull_in_bare_repo(self):
389 389 with pytest.raises(RepositoryError):
390 390 self.repo._local_pull(self.repo.path, 'master')
391 391
392 392 def test_local_merge(self, tmp_path_factory):
393 393 target_repo = self.get_empty_repo(tmp_path_factory)
394 394 source_repo = self.get_clone_repo(tmp_path_factory)
395 395
396 396 # Create a new branch in source repo
397 397 master_commit = source_repo.commit_ids[-1]
398 398 new_branch_commit = source_repo.commit_ids[-3]
399 399 source_repo._checkout(new_branch_commit)
400 400 source_repo._checkout('new_branch', create=True)
401 401
402 402 # This is required as one cannot do a -ff-only merge in an empty repo.
403 403 target_repo._local_pull(source_repo.path, 'new_branch')
404 404
405 405 target_repo._local_fetch(source_repo.path, 'master')
406 406 merge_message = 'Merge message\n\nDescription:...'
407 407 user_name = 'Albert Einstein'
408 408 user_email = 'albert@einstein.com'
409 409 target_repo._local_merge(merge_message, user_name, user_email,
410 410 target_repo._last_fetch_heads())
411 411
412 412 target_repo = GitRepository(target_repo.path)
413 413 assert target_repo.commit_ids[-2] == master_commit
414 414 last_commit = target_repo.get_commit(target_repo.head)
415 415 assert last_commit.message.strip() == merge_message
416 416 assert last_commit.author == '%s <%s>' % (user_name, user_email)
417 417
418 418 assert not os.path.exists(
419 419 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
420 420
421 421 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
422 422 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
423 423 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
424 424
425 425 target_repo._local_fetch(self.repo.path, 'master')
426 426 with pytest.raises(RepositoryError):
427 427 target_repo._local_merge(
428 428 'merge_message', 'user name', 'user@name.com',
429 429 target_repo._last_fetch_heads())
430 430
431 431 # Check we are not left in an intermediate merge state
432 432 assert not os.path.exists(
433 433 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
434 434
435 435 def test_local_merge_into_empty_repo(self, tmp_path_factory):
436 436 target_repo = self.get_empty_repo(tmp_path_factory)
437 437
438 438 # This is required as one cannot do a -ff-only merge in an empty repo.
439 439 target_repo._local_fetch(self.repo.path, 'master')
440 440 with pytest.raises(RepositoryError):
441 441 target_repo._local_merge(
442 442 'merge_message', 'user name', 'user@name.com',
443 443 target_repo._last_fetch_heads())
444 444
445 445 def test_local_merge_in_bare_repo(self):
446 446 with pytest.raises(RepositoryError):
447 447 self.repo._local_merge(
448 448 'merge_message', 'user name', 'user@name.com', None)
449 449
450 450 def test_local_push_non_bare(self, tmp_path_factory):
451 451 target_repo = self.get_empty_repo(tmp_path_factory)
452 452
453 453 pushed_branch = 'pushed_branch'
454 454 self.repo._local_push('master', target_repo.path, pushed_branch)
455 455 # Fix the HEAD of the target repo, or otherwise GitRepository won't
456 456 # report any branches.
457 457 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
458 458 f.write('ref: refs/heads/%s' % pushed_branch)
459 459
460 460 target_repo = GitRepository(target_repo.path)
461 461
462 462 assert (target_repo.branches[pushed_branch] ==
463 463 self.repo.branches['master'])
464 464
465 465 def test_local_push_bare(self, tmp_path_factory):
466 466 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
467 467
468 468 pushed_branch = 'pushed_branch'
469 469 self.repo._local_push('master', target_repo.path, pushed_branch)
470 470 # Fix the HEAD of the target repo, or otherwise GitRepository won't
471 471 # report any branches.
472 472 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
473 473 f.write('ref: refs/heads/%s' % pushed_branch)
474 474
475 475 target_repo = GitRepository(target_repo.path)
476 476
477 477 assert (target_repo.branches[pushed_branch] ==
478 478 self.repo.branches['master'])
479 479
480 480 def test_local_push_non_bare_target_branch_is_checked_out(self, tmp_path_factory):
481 481 target_repo = self.get_clone_repo(tmp_path_factory)
482 482
483 483 pushed_branch = 'pushed_branch'
484 484 # Create a new branch in source repo
485 485 new_branch_commit = target_repo.commit_ids[-3]
486 486 target_repo._checkout(new_branch_commit)
487 487 target_repo._checkout(pushed_branch, create=True)
488 488
489 489 self.repo._local_push('master', target_repo.path, pushed_branch)
490 490
491 491 target_repo = GitRepository(target_repo.path)
492 492
493 493 assert (target_repo.branches[pushed_branch] ==
494 494 self.repo.branches['master'])
495 495
496 496 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
497 497 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
498 498 with pytest.raises(RepositoryError):
499 499 self.repo._local_push('master', target_repo.path, 'master')
500 500
501 501 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmp_path_factory):
502 502 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
503 503
504 504 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
505 505 self.repo._local_push(
506 506 'master', target_repo.path, 'master', enable_hooks=True)
507 507 env = run_mock.call_args[1]['extra_env']
508 508 assert 'RC_SKIP_HOOKS' not in env
509 509
510 510 def _add_failing_hook(self, repo_path, hook_name, bare=False):
511 511 path_components = (
512 512 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
513 513 hook_path = os.path.join(repo_path, *path_components)
514 514 with open(hook_path, 'w') as f:
515 515 script_lines = [
516 516 '#!%s' % sys.executable,
517 517 'import os',
518 518 'import sys',
519 519 'if os.environ.get("RC_SKIP_HOOKS"):',
520 520 ' sys.exit(0)',
521 521 'sys.exit(1)',
522 522 ]
523 523 f.write('\n'.join(script_lines))
524 524 os.chmod(hook_path, 0o755)
525 525
526 526 def test_local_push_does_not_execute_hook(self, tmp_path_factory):
527 527 target_repo = self.get_empty_repo(tmp_path_factory)
528 528
529 529 pushed_branch = 'pushed_branch'
530 530 self._add_failing_hook(target_repo.path, 'pre-receive')
531 531 self.repo._local_push('master', target_repo.path, pushed_branch)
532 532 # Fix the HEAD of the target repo, or otherwise GitRepository won't
533 533 # report any branches.
534 534 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
535 535 f.write('ref: refs/heads/%s' % pushed_branch)
536 536
537 537 target_repo = GitRepository(target_repo.path)
538 538
539 539 assert (target_repo.branches[pushed_branch] ==
540 540 self.repo.branches['master'])
541 541
542 542 def test_local_push_executes_hook(self, tmp_path_factory):
543 543 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
544 544 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
545 545 with pytest.raises(RepositoryError):
546 546 self.repo._local_push(
547 547 'master', target_repo.path, 'master', enable_hooks=True)
548 548
549 549 def test_maybe_prepare_merge_workspace(self):
550 550 workspace = self.repo._maybe_prepare_merge_workspace(
551 551 2, 'pr2', Reference('branch', 'master', 'unused'),
552 552 Reference('branch', 'master', 'unused'))
553 553
554 554 assert os.path.isdir(workspace)
555 555 workspace_repo = GitRepository(workspace)
556 556 assert workspace_repo.branches == self.repo.branches
557 557
558 558 # Calling it a second time should also succeed
559 559 workspace = self.repo._maybe_prepare_merge_workspace(
560 560 2, 'pr2', Reference('branch', 'master', 'unused'),
561 561 Reference('branch', 'master', 'unused'))
562 562 assert os.path.isdir(workspace)
563 563
564 564 def test_maybe_prepare_merge_workspace_different_refs(self):
565 565 workspace = self.repo._maybe_prepare_merge_workspace(
566 566 2, 'pr2', Reference('branch', 'master', 'unused'),
567 567 Reference('branch', 'develop', 'unused'))
568 568
569 569 assert os.path.isdir(workspace)
570 570 workspace_repo = GitRepository(workspace)
571 571 assert workspace_repo.branches == self.repo.branches
572 572
573 573 # Calling it a second time should also succeed
574 574 workspace = self.repo._maybe_prepare_merge_workspace(
575 575 2, 'pr2', Reference('branch', 'master', 'unused'),
576 576 Reference('branch', 'develop', 'unused'))
577 577 assert os.path.isdir(workspace)
578 578
579 579 def test_cleanup_merge_workspace(self):
580 580 workspace = self.repo._maybe_prepare_merge_workspace(
581 581 2, 'pr3', Reference('branch', 'master', 'unused'),
582 582 Reference('branch', 'master', 'unused'))
583 583 self.repo.cleanup_merge_workspace(2, 'pr3')
584 584
585 585 assert not os.path.exists(workspace)
586 586
587 587 def test_cleanup_merge_workspace_invalid_workspace_id(self):
588 588 # No assert: because in case of an inexistent workspace this function
589 589 # should still succeed.
590 590 self.repo.cleanup_merge_workspace(1, 'pr4')
591 591
592 592 def test_set_refs(self):
593 593 test_ref = 'refs/test-refs/abcde'
594 594 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
595 595
596 596 self.repo.set_refs(test_ref, test_commit_id)
597 597 stdout, _ = self.repo.run_git_command(['show-ref'])
598 598 assert test_ref in stdout
599 599 assert test_commit_id in stdout
600 600
601 601 def test_remove_ref(self):
602 602 test_ref = 'refs/test-refs/abcde'
603 603 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
604 604 self.repo.set_refs(test_ref, test_commit_id)
605 605 stdout, _ = self.repo.run_git_command(['show-ref'])
606 606 assert test_ref in stdout
607 607 assert test_commit_id in stdout
608 608
609 609 self.repo.remove_ref(test_ref)
610 610 stdout, _ = self.repo.run_git_command(['show-ref'])
611 611 assert test_ref not in stdout
612 612 assert test_commit_id not in stdout
613 613
614 614
615 615 class TestGitCommit(object):
616 616
617 617 @pytest.fixture(autouse=True)
618 618 def prepare(self):
619 619 self.repo = GitRepository(TEST_GIT_REPO)
620 620
621 621 def test_default_commit(self):
622 622 tip = self.repo.get_commit()
623 623 assert tip == self.repo.get_commit(None)
624 624 assert tip == self.repo.get_commit('tip')
625 625
626 626 def test_root_node(self):
627 627 tip = self.repo.get_commit()
628 628 assert tip.root is tip.get_node('')
629 629
630 630 def test_lazy_fetch(self):
631 631 """
632 632 Test if commit's nodes expands and are cached as we walk through
633 633 the commit. This test is somewhat hard to write as order of tests
634 634 is a key here. Written by running command after command in a shell.
635 635 """
636 636 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
637 637 assert commit_id in self.repo.commit_ids
638 638 commit = self.repo.get_commit(commit_id)
639 639 assert len(commit.nodes) == 0
640 640 root = commit.root
641 641 assert len(commit.nodes) == 1
642 642 assert len(root.nodes) == 8
643 643 # accessing root.nodes updates commit.nodes
644 644 assert len(commit.nodes) == 9
645 645
646 646 docs = root.get_node('docs')
647 647 # we haven't yet accessed anything new as docs dir was already cached
648 648 assert len(commit.nodes) == 9
649 649 assert len(docs.nodes) == 8
650 650 # accessing docs.nodes updates commit.nodes
651 651 assert len(commit.nodes) == 17
652 652
653 653 assert docs is commit.get_node('docs')
654 654 assert docs is root.nodes[0]
655 655 assert docs is root.dirs[0]
656 656 assert docs is commit.get_node('docs')
657 657
658 658 def test_nodes_with_commit(self):
659 659 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
660 660 commit = self.repo.get_commit(commit_id)
661 661 root = commit.root
662 662 docs = root.get_node('docs')
663 663 assert docs is commit.get_node('docs')
664 664 api = docs.get_node('api')
665 665 assert api is commit.get_node('docs/api')
666 666 index = api.get_node('index.rst')
667 667 assert index is commit.get_node('docs/api/index.rst')
668 668 assert index is commit.get_node('docs')\
669 669 .get_node('api')\
670 670 .get_node('index.rst')
671 671
672 672 def test_branch_and_tags(self):
673 673 """
674 674 rev0 = self.repo.commit_ids[0]
675 675 commit0 = self.repo.get_commit(rev0)
676 676 assert commit0.branch == 'master'
677 677 assert commit0.tags == []
678 678
679 679 rev10 = self.repo.commit_ids[10]
680 680 commit10 = self.repo.get_commit(rev10)
681 681 assert commit10.branch == 'master'
682 682 assert commit10.tags == []
683 683
684 684 rev44 = self.repo.commit_ids[44]
685 685 commit44 = self.repo.get_commit(rev44)
686 686 assert commit44.branch == 'web-branch'
687 687
688 688 tip = self.repo.get_commit('tip')
689 689 assert 'tip' in tip.tags
690 690 """
691 691 # Those tests would fail - branches are now going
692 692 # to be changed at main API in order to support git backend
693 693 pass
694 694
695 695 def test_file_size(self):
696 696 to_check = (
697 697 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
698 698 'vcs/backends/BaseRepository.py', 502),
699 699 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
700 700 'vcs/backends/hg.py', 854),
701 701 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
702 702 'setup.py', 1068),
703 703
704 704 ('d955cd312c17b02143c04fa1099a352b04368118',
705 705 'vcs/backends/base.py', 2921),
706 706 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
707 707 'vcs/backends/base.py', 3936),
708 708 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
709 709 'vcs/backends/base.py', 6189),
710 710 )
711 711 for commit_id, path, size in to_check:
712 712 node = self.repo.get_commit(commit_id).get_node(path)
713 713 assert node.is_file()
714 714 assert node.size == size
715 715
716 716 def test_file_history_from_commits(self):
717 717 node = self.repo[10].get_node('setup.py')
718 718 commit_ids = [commit.raw_id for commit in node.history]
719 719 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
720 720
721 721 node = self.repo[20].get_node('setup.py')
722 722 node_ids = [commit.raw_id for commit in node.history]
723 723 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
724 724 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
725 725
726 726 # special case we check history from commit that has this particular
727 727 # file changed this means we check if it's included as well
728 728 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
729 729 .get_node('setup.py')
730 730 node_ids = [commit.raw_id for commit in node.history]
731 731 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
732 732 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
733 733
734 734 def test_file_history(self):
735 735 # we can only check if those commits are present in the history
736 736 # as we cannot update this test every time file is changed
737 737 files = {
738 738 'setup.py': [
739 739 '54386793436c938cff89326944d4c2702340037d',
740 740 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
741 741 '998ed409c795fec2012b1c0ca054d99888b22090',
742 742 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
743 743 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
744 744 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
745 745 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
746 746 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
747 747 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
748 748 ],
749 749 'vcs/nodes.py': [
750 750 '33fa3223355104431402a888fa77a4e9956feb3e',
751 751 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
752 752 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
753 753 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
754 754 'c877b68d18e792a66b7f4c529ea02c8f80801542',
755 755 '4313566d2e417cb382948f8d9d7c765330356054',
756 756 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
757 757 '54386793436c938cff89326944d4c2702340037d',
758 758 '54000345d2e78b03a99d561399e8e548de3f3203',
759 759 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
760 760 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
761 761 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
762 762 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
763 763 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
764 764 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
765 765 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
766 766 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
767 767 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
768 768 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
769 769 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
770 770 'f15c21f97864b4f071cddfbf2750ec2e23859414',
771 771 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
772 772 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
773 773 '84dec09632a4458f79f50ddbbd155506c460b4f9',
774 774 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
775 775 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
776 776 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
777 777 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
778 778 '6970b057cffe4aab0a792aa634c89f4bebf01441',
779 779 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
780 780 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
781 781 ],
782 782 'vcs/backends/git.py': [
783 783 '4cf116ad5a457530381135e2f4c453e68a1b0105',
784 784 '9a751d84d8e9408e736329767387f41b36935153',
785 785 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
786 786 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
787 787 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
788 788 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
789 789 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
790 790 '54000345d2e78b03a99d561399e8e548de3f3203',
791 791 ],
792 792 }
793 793 for path, commit_ids in files.items():
794 794 node = self.repo.get_commit(commit_ids[0]).get_node(path)
795 795 node_ids = [commit.raw_id for commit in node.history]
796 796 assert set(commit_ids).issubset(set(node_ids)), (
797 797 "We assumed that %s is subset of commit_ids for which file %s "
798 798 "has been changed, and history of that node returned: %s"
799 799 % (commit_ids, path, node_ids))
800 800
801 801 def test_file_annotate(self):
802 802 files = {
803 803 'vcs/backends/__init__.py': {
804 804 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
805 805 'lines_no': 1,
806 806 'commits': [
807 807 'c1214f7e79e02fc37156ff215cd71275450cffc3',
808 808 ],
809 809 },
810 810 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
811 811 'lines_no': 21,
812 812 'commits': [
813 813 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
814 814 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
815 815 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
816 816 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
817 817 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
818 818 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
819 819 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
820 820 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
821 821 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
822 822 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
823 823 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
824 824 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
825 825 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
826 826 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
827 827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
828 828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
829 829 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
830 830 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 834 ],
835 835 },
836 836 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
837 837 'lines_no': 32,
838 838 'commits': [
839 839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 841 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
842 842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 844 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
845 845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 846 '54000345d2e78b03a99d561399e8e548de3f3203',
847 847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 849 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
850 850 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
851 851 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
852 852 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
853 853 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
854 854 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
855 855 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
856 856 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
857 857 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
858 858 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
859 859 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
860 860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
864 864 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
865 865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 866 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 868 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
869 869 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
870 870 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
871 871 ],
872 872 },
873 873 },
874 874 }
875 875
876 876 for fname, commit_dict in files.items():
877 877 for commit_id, __ in commit_dict.items():
878 878 commit = self.repo.get_commit(commit_id)
879 879
880 880 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
881 881 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
882 882 assert l1_1 == l1_2
883 883 l1 = l1_1
884 884 l2 = files[fname][commit_id]['commits']
885 885 assert l1 == l2, (
886 886 "The lists of commit_ids for %s@commit_id %s"
887 887 "from annotation list should match each other, "
888 888 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
889 889
890 890 def test_files_state(self):
891 891 """
892 892 Tests state of FileNodes.
893 893 """
894 894 node = self.repo\
895 895 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
896 896 .get_node('vcs/utils/diffs.py')
897 897 assert node.state, NodeState.ADDED
898 898 assert node.added
899 899 assert not node.changed
900 900 assert not node.not_changed
901 901 assert not node.removed
902 902
903 903 node = self.repo\
904 904 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
905 905 .get_node('.hgignore')
906 906 assert node.state, NodeState.CHANGED
907 907 assert not node.added
908 908 assert node.changed
909 909 assert not node.not_changed
910 910 assert not node.removed
911 911
912 912 node = self.repo\
913 913 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
914 914 .get_node('setup.py')
915 915 assert node.state, NodeState.NOT_CHANGED
916 916 assert not node.added
917 917 assert not node.changed
918 918 assert node.not_changed
919 919 assert not node.removed
920 920
921 921 # If node has REMOVED state then trying to fetch it would raise
922 922 # CommitError exception
923 923 commit = self.repo.get_commit(
924 924 'fa6600f6848800641328adbf7811fd2372c02ab2')
925 925 path = 'vcs/backends/BaseRepository.py'
926 926 with pytest.raises(NodeDoesNotExistError):
927 927 commit.get_node(path)
928 928 # but it would be one of ``removed`` (commit's attribute)
929 929 assert path in [rf.path for rf in commit.removed]
930 930
931 931 commit = self.repo.get_commit(
932 932 '54386793436c938cff89326944d4c2702340037d')
933 933 changed = [
934 934 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
935 935 'vcs/nodes.py']
936 936 assert set(changed) == set([f.path for f in commit.changed])
937 937
938 938 def test_unicode_branch_refs(self):
939 939 unicode_branches = {
940 940 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
941 941 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
942 942 }
943 943 with mock.patch(
944 944 ("rhodecode.lib.vcs.backends.git.repository"
945 945 ".GitRepository._refs"),
946 946 unicode_branches):
947 947 branches = self.repo.branches
948 948
949 949 assert 'unicode' in branches
950 950 assert u'uniΓ§ΓΆβˆ‚e' in branches
951 951
952 952 def test_unicode_tag_refs(self):
953 953 unicode_tags = {
954 954 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
955 955 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
956 956 }
957 957 with mock.patch(
958 958 ("rhodecode.lib.vcs.backends.git.repository"
959 959 ".GitRepository._refs"),
960 960 unicode_tags):
961 961 tags = self.repo.tags
962 962
963 963 assert 'unicode' in tags
964 964 assert u'uniΓ§ΓΆβˆ‚e' in tags
965 965
966 966 def test_commit_message_is_unicode(self):
967 967 for commit in self.repo:
968 968 assert type(commit.message) == unicode
969 969
970 970 def test_commit_author_is_unicode(self):
971 971 for commit in self.repo:
972 972 assert type(commit.author) == unicode
973 973
974 974 def test_repo_files_content_is_unicode(self):
975 975 commit = self.repo.get_commit()
976 976 for node in commit.get_node('/'):
977 977 if node.is_file():
978 978 assert type(node.content) == unicode
979 979
980 980 def test_wrong_path(self):
981 981 # There is 'setup.py' in the root dir but not there:
982 982 path = 'foo/bar/setup.py'
983 983 tip = self.repo.get_commit()
984 984 with pytest.raises(VCSError):
985 985 tip.get_node(path)
986 986
987 987 @pytest.mark.parametrize("author_email, commit_id", [
988 988 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
989 989 ('lukasz.balcerzak@python-center.pl',
990 990 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
991 991 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
992 992 ])
993 993 def test_author_email(self, author_email, commit_id):
994 994 commit = self.repo.get_commit(commit_id)
995 995 assert author_email == commit.author_email
996 996
997 997 @pytest.mark.parametrize("author, commit_id", [
998 998 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
999 999 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1000 1000 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1001 1001 ])
1002 1002 def test_author_username(self, author, commit_id):
1003 1003 commit = self.repo.get_commit(commit_id)
1004 1004 assert author == commit.author_name
1005 1005
1006 1006
1007 1007 class TestLargeFileRepo(object):
1008 1008
1009 1009 def test_large_file(self, backend_git):
1010 1010 conf = make_db_config()
1011 1011 repo = backend_git.create_test_repo('largefiles', conf)
1012 1012
1013 1013 tip = repo.scm_instance().get_commit()
1014 1014
1015 1015 # extract stored LF node into the origin cache
1016 1016 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1017 1017
1018 1018 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1019 1019 oid_path = os.path.join(lfs_store, oid)
1020 1020 oid_destination = os.path.join(
1021 1021 conf.get('vcs_git_lfs', 'store_location'), oid)
1022 1022 shutil.copy(oid_path, oid_destination)
1023 1023
1024 1024 node = tip.get_node('1MB.zip')
1025 1025
1026 1026 lf_node = node.get_largefile_node()
1027 1027
1028 1028 assert lf_node.is_largefile() is True
1029 1029 assert lf_node.size == 1024000
1030 1030 assert lf_node.name == '1MB.zip'
1031 1031
1032 1032
1033 1033 @pytest.mark.usefixtures("vcs_repository_support")
1034 1034 class TestGitSpecificWithRepo(BackendTestMixin):
1035 1035
1036 1036 @classmethod
1037 1037 def _get_commits(cls):
1038 1038 return [
1039 1039 {
1040 1040 'message': 'Initial',
1041 1041 'author': 'Joe Doe <joe.doe@example.com>',
1042 1042 'date': datetime.datetime(2010, 1, 1, 20),
1043 1043 'added': [
1044 1044 FileNode('foobar/static/js/admin/base.js', content='base'),
1045 1045 FileNode(
1046 1046 'foobar/static/admin', content='admin',
1047 1047 mode=0o120000), # this is a link
1048 1048 FileNode('foo', content='foo'),
1049 1049 ],
1050 1050 },
1051 1051 {
1052 1052 'message': 'Second',
1053 1053 'author': 'Joe Doe <joe.doe@example.com>',
1054 1054 'date': datetime.datetime(2010, 1, 1, 22),
1055 1055 'added': [
1056 1056 FileNode('foo2', content='foo2'),
1057 1057 ],
1058 1058 },
1059 1059 ]
1060 1060
1061 1061 def test_paths_slow_traversing(self):
1062 1062 commit = self.repo.get_commit()
1063 1063 assert commit.get_node('foobar').get_node('static').get_node('js')\
1064 1064 .get_node('admin').get_node('base.js').content == 'base'
1065 1065
1066 1066 def test_paths_fast_traversing(self):
1067 1067 commit = self.repo.get_commit()
1068 assert (
1069 commit.get_node('foobar/static/js/admin/base.js').content ==
1070 'base')
1068 assert commit.get_node('foobar/static/js/admin/base.js').content == 'base'
1071 1069
1072 1070 def test_get_diff_runs_git_command_with_hashes(self):
1073 1071 comm1 = self.repo[0]
1074 1072 comm2 = self.repo[1]
1075 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1073
1074 with mock.patch.object(self.repo, '_remote') as remote_mock:
1076 1075 self.repo.get_diff(comm1, comm2)
1077 1076
1078 self.repo.run_git_command.assert_called_once_with(
1079 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1080 '--abbrev=40', comm1.raw_id, comm2.raw_id])
1077 remote_mock.diff.assert_called_once_with(
1078 comm1.raw_id, comm2.raw_id,
1079 file_filter=None, opt_ignorews=False, context=3)
1081 1080
1082 1081 def test_get_diff_runs_git_command_with_str_hashes(self):
1083 1082 comm2 = self.repo[1]
1084 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1083 with mock.patch.object(self.repo, '_remote') as remote_mock:
1085 1084 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1086 self.repo.run_git_command.assert_called_once_with(
1087 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1088 '--abbrev=40', comm2.raw_id])
1085 remote_mock.diff.assert_called_once_with(
1086 self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id,
1087 file_filter=None, opt_ignorews=False, context=3)
1089 1088
1090 1089 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1091 1090 comm1 = self.repo[0]
1092 1091 comm2 = self.repo[1]
1093 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1092 with mock.patch.object(self.repo, '_remote') as remote_mock:
1094 1093 self.repo.get_diff(comm1, comm2, 'foo')
1095 self.repo.run_git_command.assert_called_once_with(
1096 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1097 '--abbrev=40', self.repo._lookup_commit(0),
1098 comm2.raw_id, '--', 'foo'])
1094 remote_mock.diff.assert_called_once_with(
1095 self.repo._lookup_commit(0), comm2.raw_id,
1096 file_filter='foo', opt_ignorews=False, context=3)
1099 1097
1100 1098
1101 1099 @pytest.mark.usefixtures("vcs_repository_support")
1102 1100 class TestGitRegression(BackendTestMixin):
1103 1101
1104 1102 @classmethod
1105 1103 def _get_commits(cls):
1106 1104 return [
1107 1105 {
1108 1106 'message': 'Initial',
1109 1107 'author': 'Joe Doe <joe.doe@example.com>',
1110 1108 'date': datetime.datetime(2010, 1, 1, 20),
1111 1109 'added': [
1112 1110 FileNode('bot/__init__.py', content='base'),
1113 1111 FileNode('bot/templates/404.html', content='base'),
1114 1112 FileNode('bot/templates/500.html', content='base'),
1115 1113 ],
1116 1114 },
1117 1115 {
1118 1116 'message': 'Second',
1119 1117 'author': 'Joe Doe <joe.doe@example.com>',
1120 1118 'date': datetime.datetime(2010, 1, 1, 22),
1121 1119 'added': [
1122 1120 FileNode('bot/build/migrations/1.py', content='foo2'),
1123 1121 FileNode('bot/build/migrations/2.py', content='foo2'),
1124 1122 FileNode(
1125 1123 'bot/build/static/templates/f.html', content='foo2'),
1126 1124 FileNode(
1127 1125 'bot/build/static/templates/f1.html', content='foo2'),
1128 1126 FileNode('bot/build/templates/err.html', content='foo2'),
1129 1127 FileNode('bot/build/templates/err2.html', content='foo2'),
1130 1128 ],
1131 1129 },
1132 1130 ]
1133 1131
1134 1132 @pytest.mark.parametrize("path, expected_paths", [
1135 1133 ('bot', [
1136 1134 'bot/build',
1137 1135 'bot/templates',
1138 1136 'bot/__init__.py']),
1139 1137 ('bot/build', [
1140 1138 'bot/build/migrations',
1141 1139 'bot/build/static',
1142 1140 'bot/build/templates']),
1143 1141 ('bot/build/static', [
1144 1142 'bot/build/static/templates']),
1145 1143 ('bot/build/static/templates', [
1146 1144 'bot/build/static/templates/f.html',
1147 1145 'bot/build/static/templates/f1.html']),
1148 1146 ('bot/build/templates', [
1149 1147 'bot/build/templates/err.html',
1150 1148 'bot/build/templates/err2.html']),
1151 1149 ('bot/templates/', [
1152 1150 'bot/templates/404.html',
1153 1151 'bot/templates/500.html']),
1154 1152 ])
1155 1153 def test_similar_paths(self, path, expected_paths):
1156 1154 commit = self.repo.get_commit()
1157 1155 paths = [n.path for n in commit.get_nodes(path)]
1158 1156 assert paths == expected_paths
1159 1157
1160 1158
1161 1159 class TestDiscoverGitVersion(object):
1162 1160
1163 1161 def test_returns_git_version(self, baseapp):
1164 1162 version = discover_git_version()
1165 1163 assert version
1166 1164
1167 1165 def test_returns_empty_string_without_vcsserver(self):
1168 1166 mock_connection = mock.Mock()
1169 1167 mock_connection.discover_git_version = mock.Mock(
1170 1168 side_effect=Exception)
1171 1169 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1172 1170 version = discover_git_version()
1173 1171 assert version == ''
1174 1172
1175 1173
1176 1174 class TestGetSubmoduleUrl(object):
1177 1175 def test_submodules_file_found(self):
1178 1176 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1179 1177 node = mock.Mock()
1180 1178 with mock.patch.object(
1181 1179 commit, 'get_node', return_value=node) as get_node_mock:
1182 1180 node.content = (
1183 1181 '[submodule "subrepo1"]\n'
1184 1182 '\tpath = subrepo1\n'
1185 1183 '\turl = https://code.rhodecode.com/dulwich\n'
1186 1184 )
1187 1185 result = commit._get_submodule_url('subrepo1')
1188 1186 get_node_mock.assert_called_once_with('.gitmodules')
1189 1187 assert result == 'https://code.rhodecode.com/dulwich'
1190 1188
1191 1189 def test_complex_submodule_path(self):
1192 1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1193 1191 node = mock.Mock()
1194 1192 with mock.patch.object(
1195 1193 commit, 'get_node', return_value=node) as get_node_mock:
1196 1194 node.content = (
1197 1195 '[submodule "complex/subrepo/path"]\n'
1198 1196 '\tpath = complex/subrepo/path\n'
1199 1197 '\turl = https://code.rhodecode.com/dulwich\n'
1200 1198 )
1201 1199 result = commit._get_submodule_url('complex/subrepo/path')
1202 1200 get_node_mock.assert_called_once_with('.gitmodules')
1203 1201 assert result == 'https://code.rhodecode.com/dulwich'
1204 1202
1205 1203 def test_submodules_file_not_found(self):
1206 1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1207 1205 with mock.patch.object(
1208 1206 commit, 'get_node', side_effect=NodeDoesNotExistError):
1209 1207 result = commit._get_submodule_url('complex/subrepo/path')
1210 1208 assert result is None
1211 1209
1212 1210 def test_path_not_found(self):
1213 1211 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1214 1212 node = mock.Mock()
1215 1213 with mock.patch.object(
1216 1214 commit, 'get_node', return_value=node) as get_node_mock:
1217 1215 node.content = (
1218 1216 '[submodule "subrepo1"]\n'
1219 1217 '\tpath = subrepo1\n'
1220 1218 '\turl = https://code.rhodecode.com/dulwich\n'
1221 1219 )
1222 1220 result = commit._get_submodule_url('subrepo2')
1223 1221 get_node_mock.assert_called_once_with('.gitmodules')
1224 1222 assert result is None
1225 1223
1226 1224 def test_returns_cached_values(self):
1227 1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1228 1226 node = mock.Mock()
1229 1227 with mock.patch.object(
1230 1228 commit, 'get_node', return_value=node) as get_node_mock:
1231 1229 node.content = (
1232 1230 '[submodule "subrepo1"]\n'
1233 1231 '\tpath = subrepo1\n'
1234 1232 '\turl = https://code.rhodecode.com/dulwich\n'
1235 1233 )
1236 1234 for _ in range(3):
1237 1235 commit._get_submodule_url('subrepo1')
1238 1236 get_node_mock.assert_called_once_with('.gitmodules')
1239 1237
1240 1238 def test_get_node_returns_a_link(self):
1241 1239 repository = mock.Mock()
1242 1240 repository.alias = 'git'
1243 1241 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1244 1242 submodule_url = 'https://code.rhodecode.com/dulwich'
1245 1243 get_id_patch = mock.patch.object(
1246 1244 commit, '_get_tree_id_for_path', return_value=(1, 'link'))
1247 1245 get_submodule_patch = mock.patch.object(
1248 1246 commit, '_get_submodule_url', return_value=submodule_url)
1249 1247
1250 1248 with get_id_patch, get_submodule_patch as submodule_mock:
1251 1249 node = commit.get_node('/abcde')
1252 1250
1253 1251 submodule_mock.assert_called_once_with('/abcde')
1254 1252 assert type(node) == SubModuleNode
1255 1253 assert node.url == submodule_url
1256 1254
1257 1255 def test_get_nodes_returns_links(self):
1258 1256 repository = mock.MagicMock()
1259 1257 repository.alias = 'git'
1260 1258 repository._remote.tree_items.return_value = [
1261 1259 ('subrepo', 'stat', 1, 'link')
1262 1260 ]
1263 1261 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1264 1262 submodule_url = 'https://code.rhodecode.com/dulwich'
1265 1263 get_id_patch = mock.patch.object(
1266 1264 commit, '_get_tree_id_for_path', return_value=(1, 'tree'))
1267 1265 get_submodule_patch = mock.patch.object(
1268 1266 commit, '_get_submodule_url', return_value=submodule_url)
1269 1267
1270 1268 with get_id_patch, get_submodule_patch as submodule_mock:
1271 1269 nodes = commit.get_nodes('/abcde')
1272 1270
1273 1271 submodule_mock.assert_called_once_with('/abcde/subrepo')
1274 1272 assert len(nodes) == 1
1275 1273 assert type(nodes[0]) == SubModuleNode
1276 1274 assert nodes[0].url == submodule_url
@@ -1,1188 +1,1188 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.lib.utils import make_db_config
27 27 from rhodecode.lib.vcs import backends
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 Reference, MergeResponse, MergeFailureReason)
30 30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 31 from rhodecode.lib.vcs.exceptions import (
32 32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35 35
36 36
37 37 pytestmark = pytest.mark.backends("hg")
38 38
39 39
40 40 def repo_path_generator():
41 41 """
42 42 Return a different path to be used for cloning repos.
43 43 """
44 44 i = 0
45 45 while True:
46 46 i += 1
47 47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
48 48
49 49 REPO_PATH_GENERATOR = repo_path_generator()
50 50
51 51
52 52 @pytest.fixture(scope='class', autouse=True)
53 53 def repo(request, baseapp):
54 54 repo = MercurialRepository(TEST_HG_REPO)
55 55 if request.cls:
56 56 request.cls.repo = repo
57 57 return repo
58 58
59 59
60 class TestMercurialRepository:
60 class TestMercurialRepository(object):
61 61
62 62 # pylint: disable=protected-access
63 63
64 64 def get_clone_repo(self):
65 65 """
66 66 Return a clone of the base repo.
67 67 """
68 68 clone_path = next(REPO_PATH_GENERATOR)
69 69 repo_clone = MercurialRepository(
70 70 clone_path, create=True, src_url=self.repo.path)
71 71
72 72 return repo_clone
73 73
74 74 def get_empty_repo(self):
75 75 """
76 76 Return an empty repo.
77 77 """
78 78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
79 79
80 80 def test_wrong_repo_path(self):
81 81 wrong_repo_path = '/tmp/errorrepo_hg'
82 82 with pytest.raises(RepositoryError):
83 83 MercurialRepository(wrong_repo_path)
84 84
85 85 def test_unicode_path_repo(self):
86 86 with pytest.raises(VCSError):
87 87 MercurialRepository(u'iShouldFail')
88 88
89 89 def test_unicode_commit_id(self):
90 90 with pytest.raises(CommitDoesNotExistError):
91 91 self.repo.get_commit(u'unicode-commit-id')
92 92 with pytest.raises(CommitDoesNotExistError):
93 93 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
94 94
95 95 def test_unicode_bookmark(self):
96 96 self.repo.bookmark(u'unicode-bookmark')
97 97 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
98 98
99 99 def test_unicode_branch(self):
100 100 with pytest.raises(KeyError):
101 101 self.repo.branches[u'unicode-branch']
102 102 with pytest.raises(KeyError):
103 103 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
104 104
105 105 def test_repo_clone(self):
106 106 if os.path.exists(TEST_HG_REPO_CLONE):
107 107 self.fail(
108 108 'Cannot test mercurial clone repo as location %s already '
109 109 'exists. You should manually remove it first.'
110 110 % TEST_HG_REPO_CLONE)
111 111
112 112 repo = MercurialRepository(TEST_HG_REPO)
113 113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
114 114 src_url=TEST_HG_REPO)
115 115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 116 # Checking hashes of commits should be enough
117 117 for commit in repo.get_commits():
118 118 raw_id = commit.raw_id
119 119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
120 120
121 121 def test_repo_clone_with_update(self):
122 122 repo = MercurialRepository(TEST_HG_REPO)
123 123 repo_clone = MercurialRepository(
124 124 TEST_HG_REPO_CLONE + '_w_update',
125 125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
126 126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 127
128 128 # check if current workdir was updated
129 129 assert os.path.isfile(
130 130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
131 131
132 132 def test_repo_clone_without_update(self):
133 133 repo = MercurialRepository(TEST_HG_REPO)
134 134 repo_clone = MercurialRepository(
135 135 TEST_HG_REPO_CLONE + '_wo_update',
136 136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
137 137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
138 138 assert not os.path.isfile(
139 139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
140 140
141 141 def test_commit_ids(self):
142 142 # there are 21 commits at bitbucket now
143 143 # so we can assume they would be available from now on
144 144 subset = set([
145 145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
146 146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
147 147 '6cba7170863a2411822803fa77a0a264f1310b35',
148 148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
149 149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
150 150 '6fff84722075f1607a30f436523403845f84cd9e',
151 151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
152 152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
153 153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
154 154 'be90031137367893f1c406e0a8683010fd115b79',
155 155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
156 156 '84478366594b424af694a6c784cb991a16b87c21',
157 157 '17f8e105dddb9f339600389c6dc7175d395a535c',
158 158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
159 159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
160 160 '786facd2c61deb9cf91e9534735124fb8fc11842',
161 161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
162 162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
163 163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
164 164 ])
165 165 assert subset.issubset(set(self.repo.commit_ids))
166 166
167 167 # check if we have the proper order of commits
168 168 org = [
169 169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
170 170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
171 171 '6cba7170863a2411822803fa77a0a264f1310b35',
172 172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
173 173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
174 174 '6fff84722075f1607a30f436523403845f84cd9e',
175 175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
176 176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
177 177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
178 178 'be90031137367893f1c406e0a8683010fd115b79',
179 179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
180 180 '84478366594b424af694a6c784cb991a16b87c21',
181 181 '17f8e105dddb9f339600389c6dc7175d395a535c',
182 182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
183 183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
184 184 '786facd2c61deb9cf91e9534735124fb8fc11842',
185 185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
186 186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
187 187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
188 188 '2c1885c735575ca478bf9e17b0029dca68824458',
189 189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
190 190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
191 191 '4fb8326d78e5120da2c7468dcf7098997be385da',
192 192 '62b4a097164940bd66030c4db51687f3ec035eed',
193 193 '536c1a19428381cfea92ac44985304f6a8049569',
194 194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
195 195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
196 196 'f8940bcb890a98c4702319fbe36db75ea309b475',
197 197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
198 198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
199 199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
200 200 ]
201 201 assert org == self.repo.commit_ids[:31]
202 202
203 203 def test_iter_slice(self):
204 204 sliced = list(self.repo[:10])
205 205 itered = list(self.repo)[:10]
206 206 assert sliced == itered
207 207
208 208 def test_slicing(self):
209 209 # 4 1 5 10 95
210 210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
211 211 (10, 20, 10), (5, 100, 95)]:
212 212 indexes = list(self.repo[sfrom:sto])
213 213 assert len(indexes) == size
214 214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
215 215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
216 216
217 217 def test_branches(self):
218 218 # TODO: Need more tests here
219 219
220 220 # active branches
221 221 assert 'default' in self.repo.branches
222 222 assert 'stable' in self.repo.branches
223 223
224 224 # closed
225 225 assert 'git' in self.repo._get_branches(closed=True)
226 226 assert 'web' in self.repo._get_branches(closed=True)
227 227
228 228 for name, id in self.repo.branches.items():
229 229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
230 230
231 231 def test_tip_in_tags(self):
232 232 # tip is always a tag
233 233 assert 'tip' in self.repo.tags
234 234
235 235 def test_tip_commit_in_tags(self):
236 236 tip = self.repo.get_commit()
237 237 assert self.repo.tags['tip'] == tip.raw_id
238 238
239 239 def test_initial_commit(self):
240 240 init_commit = self.repo.get_commit(commit_idx=0)
241 241 init_author = init_commit.author
242 242
243 243 assert init_commit.message == 'initial import'
244 244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
245 245 assert init_author == init_commit.committer
246 246 assert sorted(init_commit._file_paths) == sorted([
247 247 'vcs/__init__.py',
248 248 'vcs/backends/BaseRepository.py',
249 249 'vcs/backends/__init__.py',
250 250 ])
251 251 assert sorted(init_commit._dir_paths) == sorted(
252 252 ['', 'vcs', 'vcs/backends'])
253 253
254 254 assert init_commit._dir_paths + init_commit._file_paths == \
255 255 init_commit._paths
256 256
257 257 with pytest.raises(NodeDoesNotExistError):
258 258 init_commit.get_node(path='foobar')
259 259
260 260 node = init_commit.get_node('vcs/')
261 261 assert hasattr(node, 'kind')
262 262 assert node.kind == NodeKind.DIR
263 263
264 264 node = init_commit.get_node('vcs')
265 265 assert hasattr(node, 'kind')
266 266 assert node.kind == NodeKind.DIR
267 267
268 268 node = init_commit.get_node('vcs/__init__.py')
269 269 assert hasattr(node, 'kind')
270 270 assert node.kind == NodeKind.FILE
271 271
272 272 def test_not_existing_commit(self):
273 273 # rawid
274 274 with pytest.raises(RepositoryError):
275 275 self.repo.get_commit('abcd' * 10)
276 276 # shortid
277 277 with pytest.raises(RepositoryError):
278 278 self.repo.get_commit('erro' * 4)
279 279 # numeric
280 280 with pytest.raises(RepositoryError):
281 281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
282 282
283 283 # Small chance we ever get to this one
284 284 idx = pow(2, 30)
285 285 with pytest.raises(RepositoryError):
286 286 self.repo.get_commit(commit_idx=idx)
287 287
288 288 def test_commit10(self):
289 289 commit10 = self.repo.get_commit(commit_idx=10)
290 290 README = """===
291 291 VCS
292 292 ===
293 293
294 294 Various Version Control System management abstraction layer for Python.
295 295
296 296 Introduction
297 297 ------------
298 298
299 299 TODO: To be written...
300 300
301 301 """
302 302 node = commit10.get_node('README.rst')
303 303 assert node.kind == NodeKind.FILE
304 304 assert node.content == README
305 305
306 306 def test_local_clone(self):
307 307 clone_path = next(REPO_PATH_GENERATOR)
308 308 self.repo._local_clone(clone_path)
309 309 repo_clone = MercurialRepository(clone_path)
310 310
311 311 assert self.repo.commit_ids == repo_clone.commit_ids
312 312
313 313 def test_local_clone_fails_if_target_exists(self):
314 314 with pytest.raises(RepositoryError):
315 315 self.repo._local_clone(self.repo.path)
316 316
317 317 def test_update(self):
318 318 repo_clone = self.get_clone_repo()
319 319 branches = repo_clone.branches
320 320
321 321 repo_clone._update('default')
322 322 assert branches['default'] == repo_clone._identify()
323 323 repo_clone._update('stable')
324 324 assert branches['stable'] == repo_clone._identify()
325 325
326 326 def test_local_pull_branch(self):
327 327 target_repo = self.get_empty_repo()
328 328 source_repo = self.get_clone_repo()
329 329
330 330 default = Reference(
331 331 'branch', 'default', source_repo.branches['default'])
332 332 target_repo._local_pull(source_repo.path, default)
333 333 target_repo = MercurialRepository(target_repo.path)
334 334 assert (target_repo.branches['default'] ==
335 335 source_repo.branches['default'])
336 336
337 337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
338 338 target_repo._local_pull(source_repo.path, stable)
339 339 target_repo = MercurialRepository(target_repo.path)
340 340 assert target_repo.branches['stable'] == source_repo.branches['stable']
341 341
342 342 def test_local_pull_bookmark(self):
343 343 target_repo = self.get_empty_repo()
344 344 source_repo = self.get_clone_repo()
345 345
346 346 commits = list(source_repo.get_commits(branch_name='default'))
347 347 foo1_id = commits[-5].raw_id
348 348 foo1 = Reference('book', 'foo1', foo1_id)
349 349 source_repo._update(foo1_id)
350 350 source_repo.bookmark('foo1')
351 351
352 352 foo2_id = commits[-3].raw_id
353 353 foo2 = Reference('book', 'foo2', foo2_id)
354 354 source_repo._update(foo2_id)
355 355 source_repo.bookmark('foo2')
356 356
357 357 target_repo._local_pull(source_repo.path, foo1)
358 358 target_repo = MercurialRepository(target_repo.path)
359 359 assert target_repo.branches['default'] == commits[-5].raw_id
360 360
361 361 target_repo._local_pull(source_repo.path, foo2)
362 362 target_repo = MercurialRepository(target_repo.path)
363 363 assert target_repo.branches['default'] == commits[-3].raw_id
364 364
365 365 def test_local_pull_commit(self):
366 366 target_repo = self.get_empty_repo()
367 367 source_repo = self.get_clone_repo()
368 368
369 369 commits = list(source_repo.get_commits(branch_name='default'))
370 370 commit_id = commits[-5].raw_id
371 371 commit = Reference('rev', commit_id, commit_id)
372 372 target_repo._local_pull(source_repo.path, commit)
373 373 target_repo = MercurialRepository(target_repo.path)
374 374 assert target_repo.branches['default'] == commit_id
375 375
376 376 commit_id = commits[-3].raw_id
377 377 commit = Reference('rev', commit_id, commit_id)
378 378 target_repo._local_pull(source_repo.path, commit)
379 379 target_repo = MercurialRepository(target_repo.path)
380 380 assert target_repo.branches['default'] == commit_id
381 381
382 382 def test_local_pull_from_same_repo(self):
383 383 reference = Reference('branch', 'default', None)
384 384 with pytest.raises(ValueError):
385 385 self.repo._local_pull(self.repo.path, reference)
386 386
387 387 def test_validate_pull_reference_raises_on_missing_reference(
388 388 self, vcsbackend_hg):
389 389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
390 390 reference = Reference(
391 391 'book', 'invalid_reference', 'a' * 40)
392 392
393 393 with pytest.raises(CommitDoesNotExistError):
394 394 target_repo._validate_pull_reference(reference)
395 395
396 396 def test_heads(self):
397 397 assert set(self.repo._heads()) == set(self.repo.branches.values())
398 398
399 399 def test_ancestor(self):
400 400 commits = [
401 401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
402 402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
403 403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
404 404
405 405 def test_local_push(self):
406 406 target_repo = self.get_empty_repo()
407 407
408 408 revisions = list(self.repo.get_commits(branch_name='default'))
409 409 revision = revisions[-5].raw_id
410 410 self.repo._local_push(revision, target_repo.path)
411 411
412 412 target_repo = MercurialRepository(target_repo.path)
413 413
414 414 assert target_repo.branches['default'] == revision
415 415
416 416 def test_hooks_can_be_enabled_for_local_push(self):
417 417 revision = 'deadbeef'
418 418 repo_path = 'test_group/test_repo'
419 419 with mock.patch.object(self.repo, '_remote') as remote_mock:
420 420 self.repo._local_push(revision, repo_path, enable_hooks=True)
421 421 remote_mock.push.assert_called_once_with(
422 422 [revision], repo_path, hooks=True, push_branches=False)
423 423
424 424 def test_local_merge(self, vcsbackend_hg):
425 425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
426 426 source_repo = vcsbackend_hg.clone_repo(target_repo)
427 427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
428 428 target_repo = MercurialRepository(target_repo.path)
429 429 target_rev = target_repo.branches['default']
430 430 target_ref = Reference(
431 431 type='branch', name='default', commit_id=target_rev)
432 432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
433 433 source_repo = MercurialRepository(source_repo.path)
434 434 source_rev = source_repo.branches['default']
435 435 source_ref = Reference(
436 436 type='branch', name='default', commit_id=source_rev)
437 437
438 438 target_repo._local_pull(source_repo.path, source_ref)
439 439
440 440 merge_message = 'Merge message\n\nDescription:...'
441 441 user_name = 'Albert Einstein'
442 442 user_email = 'albert@einstein.com'
443 443 merge_commit_id, needs_push = target_repo._local_merge(
444 444 target_ref, merge_message, user_name, user_email, source_ref)
445 445 assert needs_push
446 446
447 447 target_repo = MercurialRepository(target_repo.path)
448 448 assert target_repo.commit_ids[-3] == target_rev
449 449 assert target_repo.commit_ids[-2] == source_rev
450 450 last_commit = target_repo.get_commit(merge_commit_id)
451 451 assert last_commit.message.strip() == merge_message
452 452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
453 453
454 454 assert not os.path.exists(
455 455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
456 456
457 457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
458 458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
459 459 source_repo = vcsbackend_hg.clone_repo(target_repo)
460 460 target_rev = target_repo.branches['default']
461 461 target_ref = Reference(
462 462 type='branch', name='default', commit_id=target_rev)
463 463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
464 464 source_repo = MercurialRepository(source_repo.path)
465 465 source_rev = source_repo.branches['default']
466 466 source_ref = Reference(
467 467 type='branch', name='default', commit_id=source_rev)
468 468
469 469 target_repo._local_pull(source_repo.path, source_ref)
470 470
471 471 merge_message = 'Merge message\n\nDescription:...'
472 472 user_name = 'Albert Einstein'
473 473 user_email = 'albert@einstein.com'
474 474 merge_commit_id, needs_push = target_repo._local_merge(
475 475 target_ref, merge_message, user_name, user_email, source_ref)
476 476 assert merge_commit_id == source_rev
477 477 assert needs_push
478 478
479 479 target_repo = MercurialRepository(target_repo.path)
480 480 assert target_repo.commit_ids[-2] == target_rev
481 481 assert target_repo.commit_ids[-1] == source_rev
482 482
483 483 assert not os.path.exists(
484 484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
485 485
486 486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
487 487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
488 488 target_rev = target_repo.branches['default']
489 489 target_ref = Reference(
490 490 type='branch', name='default', commit_id=target_rev)
491 491
492 492 merge_message = 'Merge message\n\nDescription:...'
493 493 user_name = 'Albert Einstein'
494 494 user_email = 'albert@einstein.com'
495 495 merge_commit_id, needs_push = target_repo._local_merge(
496 496 target_ref, merge_message, user_name, user_email, target_ref)
497 497 assert merge_commit_id == target_rev
498 498 assert not needs_push
499 499
500 500 target_repo = MercurialRepository(target_repo.path)
501 501 assert target_repo.commit_ids[-1] == target_rev
502 502
503 503 assert not os.path.exists(
504 504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
505 505
506 506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
507 507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
508 508 source_repo = vcsbackend_hg.clone_repo(target_repo)
509 509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
510 510 target_repo = MercurialRepository(target_repo.path)
511 511 target_rev = target_repo.branches['default']
512 512 target_ref = Reference(
513 513 type='branch', name='default', commit_id=target_rev)
514 514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
515 515 source_repo = MercurialRepository(source_repo.path)
516 516 source_rev = source_repo.branches['default']
517 517 source_ref = Reference(
518 518 type='branch', name='default', commit_id=source_rev)
519 519
520 520 target_repo._local_pull(source_repo.path, source_ref)
521 521 with pytest.raises(RepositoryError):
522 522 target_repo._local_merge(
523 523 target_ref, 'merge_message', 'user name', 'user@name.com',
524 524 source_ref)
525 525
526 526 # Check we are not left in an intermediate merge state
527 527 assert not os.path.exists(
528 528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
529 529
530 530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
531 531 commits = [
532 532 {'message': 'a'},
533 533 {'message': 'b', 'branch': 'b'},
534 534 ]
535 535 repo = backend_hg.create_repo(commits)
536 536 commit_ids = backend_hg.commit_ids
537 537 target_ref = Reference(
538 538 type='branch', name='default', commit_id=commit_ids['a'])
539 539 source_ref = Reference(
540 540 type='branch', name='b', commit_id=commit_ids['b'])
541 541 merge_message = 'Merge message\n\nDescription:...'
542 542 user_name = 'Albert Einstein'
543 543 user_email = 'albert@einstein.com'
544 544 vcs_repo = repo.scm_instance()
545 545 merge_commit_id, needs_push = vcs_repo._local_merge(
546 546 target_ref, merge_message, user_name, user_email, source_ref)
547 547 assert merge_commit_id != source_ref.commit_id
548 548 assert needs_push is True
549 549 commit = vcs_repo.get_commit(merge_commit_id)
550 550 assert commit.merge is True
551 551 assert commit.message == merge_message
552 552
553 553 def test_maybe_prepare_merge_workspace(self):
554 554 workspace = self.repo._maybe_prepare_merge_workspace(
555 555 1, 'pr2', 'unused', 'unused2')
556 556
557 557 assert os.path.isdir(workspace)
558 558 workspace_repo = MercurialRepository(workspace)
559 559 assert workspace_repo.branches == self.repo.branches
560 560
561 561 # Calling it a second time should also succeed
562 562 workspace = self.repo._maybe_prepare_merge_workspace(
563 563 1, 'pr2', 'unused', 'unused2')
564 564 assert os.path.isdir(workspace)
565 565
566 566 def test_cleanup_merge_workspace(self):
567 567 workspace = self.repo._maybe_prepare_merge_workspace(
568 568 1, 'pr3', 'unused', 'unused2')
569 569
570 570 assert os.path.isdir(workspace)
571 571 self.repo.cleanup_merge_workspace(1, 'pr3')
572 572
573 573 assert not os.path.exists(workspace)
574 574
575 575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
576 576 # No assert: because in case of an inexistent workspace this function
577 577 # should still succeed.
578 578 self.repo.cleanup_merge_workspace(1, 'pr4')
579 579
580 580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
581 581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
582 582 source_repo = vcsbackend_hg.clone_repo(target_repo)
583 583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
584 584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
585 585 imc = source_repo.in_memory_commit
586 586 imc.add(FileNode('file_x', content=source_repo.name))
587 587 imc.commit(
588 588 message=u'Automatic commit from repo merge test',
589 589 author=u'Automatic <automatic@rhodecode.com>')
590 590 target_commit = target_repo.get_commit()
591 591 source_commit = source_repo.get_commit()
592 592 default_branch = target_repo.DEFAULT_BRANCH_NAME
593 593 bookmark_name = 'bookmark'
594 594 target_repo._update(default_branch)
595 595 target_repo.bookmark(bookmark_name)
596 596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
597 597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
598 598 workspace_id = 'test-merge'
599 599 repo_id = repo_id_generator(target_repo.path)
600 600 merge_response = target_repo.merge(
601 601 repo_id, workspace_id, target_ref, source_repo, source_ref,
602 602 'test user', 'test@rhodecode.com', 'merge message 1',
603 603 dry_run=False)
604 604 expected_merge_response = MergeResponse(
605 605 True, True, merge_response.merge_ref,
606 606 MergeFailureReason.NONE)
607 607 assert merge_response == expected_merge_response
608 608
609 609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
610 610 target_repo.path)
611 611 target_commits = list(target_repo.get_commits())
612 612 commit_ids = [c.raw_id for c in target_commits[:-1]]
613 613 assert source_ref.commit_id in commit_ids
614 614 assert target_ref.commit_id in commit_ids
615 615
616 616 merge_commit = target_commits[-1]
617 617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
618 618 assert merge_commit.message.strip() == 'merge message 1'
619 619 assert merge_commit.author == 'test user <test@rhodecode.com>'
620 620
621 621 # Check the bookmark was updated in the target repo
622 622 assert (
623 623 target_repo.bookmarks[bookmark_name] ==
624 624 merge_response.merge_ref.commit_id)
625 625
626 626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
627 627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
628 628 source_repo = vcsbackend_hg.clone_repo(target_repo)
629 629 imc = source_repo.in_memory_commit
630 630 imc.add(FileNode('file_x', content=source_repo.name))
631 631 imc.commit(
632 632 message=u'Automatic commit from repo merge test',
633 633 author=u'Automatic <automatic@rhodecode.com>')
634 634 target_commit = target_repo.get_commit()
635 635 source_commit = source_repo.get_commit()
636 636 default_branch = target_repo.DEFAULT_BRANCH_NAME
637 637 bookmark_name = 'bookmark'
638 638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
639 639 source_repo._update(default_branch)
640 640 source_repo.bookmark(bookmark_name)
641 641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
642 642 workspace_id = 'test-merge'
643 643 repo_id = repo_id_generator(target_repo.path)
644 644 merge_response = target_repo.merge(
645 645 repo_id, workspace_id, target_ref, source_repo, source_ref,
646 646 'test user', 'test@rhodecode.com', 'merge message 1',
647 647 dry_run=False)
648 648 expected_merge_response = MergeResponse(
649 649 True, True, merge_response.merge_ref,
650 650 MergeFailureReason.NONE)
651 651 assert merge_response == expected_merge_response
652 652
653 653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
654 654 target_repo.path)
655 655 target_commits = list(target_repo.get_commits())
656 656 commit_ids = [c.raw_id for c in target_commits]
657 657 assert source_ref.commit_id == commit_ids[-1]
658 658 assert target_ref.commit_id == commit_ids[-2]
659 659
660 660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
661 661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
662 662 source_repo = vcsbackend_hg.clone_repo(target_repo)
663 663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
664 664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
665 665
666 666 # add an extra head to the target repo
667 667 imc = target_repo.in_memory_commit
668 668 imc.add(FileNode('file_x', content='foo'))
669 669 commits = list(target_repo.get_commits())
670 670 imc.commit(
671 671 message=u'Automatic commit from repo merge test',
672 672 author=u'Automatic <automatic@rhodecode.com>', parents=commits[0:1])
673 673
674 674 target_commit = target_repo.get_commit()
675 675 source_commit = source_repo.get_commit()
676 676 default_branch = target_repo.DEFAULT_BRANCH_NAME
677 677 target_repo._update(default_branch)
678 678
679 679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
680 680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
681 681 workspace_id = 'test-merge'
682 682
683 683 assert len(target_repo._heads(branch='default')) == 2
684 684 heads = target_repo._heads(branch='default')
685 685 expected_merge_response = MergeResponse(
686 686 False, False, None,
687 687 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
688 688 metadata={'heads': heads})
689 689 repo_id = repo_id_generator(target_repo.path)
690 690 merge_response = target_repo.merge(
691 691 repo_id, workspace_id, target_ref, source_repo, source_ref,
692 692 'test user', 'test@rhodecode.com', 'merge message 1',
693 693 dry_run=False)
694 694 assert merge_response == expected_merge_response
695 695
696 696 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
697 697 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
698 698 source_repo = vcsbackend_hg.clone_repo(target_repo)
699 699 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
700 700 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
701 701 imc = source_repo.in_memory_commit
702 702 imc.add(FileNode('file_x', content=source_repo.name))
703 703 imc.commit(
704 704 message=u'Automatic commit from repo merge test',
705 705 author=u'Automatic <automatic@rhodecode.com>')
706 706 target_commit = target_repo.get_commit()
707 707 source_commit = source_repo.get_commit()
708 708
709 709 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
710 710
711 711 default_branch = target_repo.DEFAULT_BRANCH_NAME
712 712 bookmark_name = 'bookmark'
713 713 source_repo._update(default_branch)
714 714 source_repo.bookmark(bookmark_name)
715 715
716 716 target_ref = Reference('branch', default_branch, target_commit.raw_id)
717 717 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
718 718 repo_id = repo_id_generator(target_repo.path)
719 719 workspace_id = 'test-merge'
720 720
721 721 merge_response = target_repo.merge(
722 722 repo_id, workspace_id, target_ref, source_repo, source_ref,
723 723 'test user', 'test@rhodecode.com', 'merge message 1',
724 724 dry_run=False, use_rebase=True)
725 725
726 726 expected_merge_response = MergeResponse(
727 727 True, True, merge_response.merge_ref,
728 728 MergeFailureReason.NONE)
729 729 assert merge_response == expected_merge_response
730 730
731 731 target_repo = backends.get_backend(vcsbackend_hg.alias)(
732 732 target_repo.path)
733 733 last_commit = target_repo.get_commit()
734 734 assert last_commit.message == source_commit.message
735 735 assert last_commit.author == source_commit.author
736 736 # This checks that we effectively did a rebase
737 737 assert last_commit.raw_id != source_commit.raw_id
738 738
739 739 # Check the target has only 4 commits: 2 were already in target and
740 740 # only two should have been added
741 741 assert len(target_repo.commit_ids) == 2 + 2
742 742
743 743
744 744 class TestGetShadowInstance(object):
745 745
746 746 @pytest.fixture
747 747 def repo(self, vcsbackend_hg, monkeypatch):
748 748 repo = vcsbackend_hg.repo
749 749 monkeypatch.setattr(repo, 'config', mock.Mock())
750 750 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
751 751 return repo
752 752
753 753 def test_passes_config(self, repo):
754 754 shadow = repo.get_shadow_instance(repo.path)
755 755 assert shadow.config == repo.config.copy()
756 756
757 757 def test_disables_hooks(self, repo):
758 758 shadow = repo.get_shadow_instance(repo.path)
759 759 shadow.config.clear_section.assert_called_once_with('hooks')
760 760
761 761 def test_allows_to_keep_hooks(self, repo):
762 762 shadow = repo.get_shadow_instance(repo.path, enable_hooks=True)
763 763 assert not shadow.config.clear_section.called
764 764
765 765
766 766 class TestMercurialCommit(object):
767 767
768 768 def _test_equality(self, commit):
769 769 idx = commit.idx
770 770 assert commit == self.repo.get_commit(commit_idx=idx)
771 771
772 772 def test_equality(self):
773 773 indexes = [0, 10, 20]
774 774 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
775 775 for commit in commits:
776 776 self._test_equality(commit)
777 777
778 778 def test_default_commit(self):
779 779 tip = self.repo.get_commit('tip')
780 780 assert tip == self.repo.get_commit()
781 781 assert tip == self.repo.get_commit(commit_id=None)
782 782 assert tip == self.repo.get_commit(commit_idx=None)
783 783 assert tip == list(self.repo[-1:])[0]
784 784
785 785 def test_root_node(self):
786 786 tip = self.repo.get_commit('tip')
787 787 assert tip.root is tip.get_node('')
788 788
789 789 def test_lazy_fetch(self):
790 790 """
791 791 Test if commit's nodes expands and are cached as we walk through
792 792 the commit. This test is somewhat hard to write as order of tests
793 793 is a key here. Written by running command after command in a shell.
794 794 """
795 795 commit = self.repo.get_commit(commit_idx=45)
796 796 assert len(commit.nodes) == 0
797 797 root = commit.root
798 798 assert len(commit.nodes) == 1
799 799 assert len(root.nodes) == 8
800 800 # accessing root.nodes updates commit.nodes
801 801 assert len(commit.nodes) == 9
802 802
803 803 docs = root.get_node('docs')
804 804 # we haven't yet accessed anything new as docs dir was already cached
805 805 assert len(commit.nodes) == 9
806 806 assert len(docs.nodes) == 8
807 807 # accessing docs.nodes updates commit.nodes
808 808 assert len(commit.nodes) == 17
809 809
810 810 assert docs is commit.get_node('docs')
811 811 assert docs is root.nodes[0]
812 812 assert docs is root.dirs[0]
813 813 assert docs is commit.get_node('docs')
814 814
815 815 def test_nodes_with_commit(self):
816 816 commit = self.repo.get_commit(commit_idx=45)
817 817 root = commit.root
818 818 docs = root.get_node('docs')
819 819 assert docs is commit.get_node('docs')
820 820 api = docs.get_node('api')
821 821 assert api is commit.get_node('docs/api')
822 822 index = api.get_node('index.rst')
823 823 assert index is commit.get_node('docs/api/index.rst')
824 824 assert index is commit.get_node(
825 825 'docs').get_node('api').get_node('index.rst')
826 826
827 827 def test_branch_and_tags(self):
828 828 commit0 = self.repo.get_commit(commit_idx=0)
829 829 assert commit0.branch == 'default'
830 830 assert commit0.tags == []
831 831
832 832 commit10 = self.repo.get_commit(commit_idx=10)
833 833 assert commit10.branch == 'default'
834 834 assert commit10.tags == []
835 835
836 836 commit44 = self.repo.get_commit(commit_idx=44)
837 837 assert commit44.branch == 'web'
838 838
839 839 tip = self.repo.get_commit('tip')
840 840 assert 'tip' in tip.tags
841 841
842 842 def test_bookmarks(self):
843 843 commit0 = self.repo.get_commit(commit_idx=0)
844 844 assert commit0.bookmarks == []
845 845
846 846 def _test_file_size(self, idx, path, size):
847 847 node = self.repo.get_commit(commit_idx=idx).get_node(path)
848 848 assert node.is_file()
849 849 assert node.size == size
850 850
851 851 def test_file_size(self):
852 852 to_check = (
853 853 (10, 'setup.py', 1068),
854 854 (20, 'setup.py', 1106),
855 855 (60, 'setup.py', 1074),
856 856
857 857 (10, 'vcs/backends/base.py', 2921),
858 858 (20, 'vcs/backends/base.py', 3936),
859 859 (60, 'vcs/backends/base.py', 6189),
860 860 )
861 861 for idx, path, size in to_check:
862 862 self._test_file_size(idx, path, size)
863 863
864 864 def test_file_history_from_commits(self):
865 865 node = self.repo[10].get_node('setup.py')
866 866 commit_ids = [commit.raw_id for commit in node.history]
867 867 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
868 868
869 869 node = self.repo[20].get_node('setup.py')
870 870 node_ids = [commit.raw_id for commit in node.history]
871 871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
872 872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
873 873
874 874 # special case we check history from commit that has this particular
875 875 # file changed this means we check if it's included as well
876 876 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
877 877 .get_node('setup.py')
878 878 node_ids = [commit.raw_id for commit in node.history]
879 879 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
880 880 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
881 881
882 882 def test_file_history(self):
883 883 # we can only check if those commits are present in the history
884 884 # as we cannot update this test every time file is changed
885 885 files = {
886 886 'setup.py': [7, 18, 45, 46, 47, 69, 77],
887 887 'vcs/nodes.py': [
888 888 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
889 889 'vcs/backends/hg.py': [
890 890 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
891 891 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
892 892 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
893 893 }
894 894 for path, indexes in files.items():
895 895 tip = self.repo.get_commit(commit_idx=indexes[-1])
896 896 node = tip.get_node(path)
897 897 node_indexes = [commit.idx for commit in node.history]
898 898 assert set(indexes).issubset(set(node_indexes)), (
899 899 "We assumed that %s is subset of commits for which file %s "
900 900 "has been changed, and history of that node returned: %s"
901 901 % (indexes, path, node_indexes))
902 902
903 903 def test_file_annotate(self):
904 904 files = {
905 905 'vcs/backends/__init__.py': {
906 906 89: {
907 907 'lines_no': 31,
908 908 'commits': [
909 909 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
910 910 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
911 911 32, 32, 32, 32, 37, 32, 37, 37, 32,
912 912 32, 32
913 913 ]
914 914 },
915 915 20: {
916 916 'lines_no': 1,
917 917 'commits': [4]
918 918 },
919 919 55: {
920 920 'lines_no': 31,
921 921 'commits': [
922 922 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
923 923 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
924 924 32, 32, 32, 32, 37, 32, 37, 37, 32,
925 925 32, 32
926 926 ]
927 927 }
928 928 },
929 929 'vcs/exceptions.py': {
930 930 89: {
931 931 'lines_no': 18,
932 932 'commits': [
933 933 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
934 934 16, 16, 17, 16, 16, 18, 18, 18
935 935 ]
936 936 },
937 937 20: {
938 938 'lines_no': 18,
939 939 'commits': [
940 940 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
941 941 16, 16, 17, 16, 16, 18, 18, 18
942 942 ]
943 943 },
944 944 55: {
945 945 'lines_no': 18,
946 946 'commits': [
947 947 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
948 948 17, 16, 16, 18, 18, 18
949 949 ]
950 950 }
951 951 },
952 952 'MANIFEST.in': {
953 953 89: {
954 954 'lines_no': 5,
955 955 'commits': [7, 7, 7, 71, 71]
956 956 },
957 957 20: {
958 958 'lines_no': 3,
959 959 'commits': [7, 7, 7]
960 960 },
961 961 55: {
962 962 'lines_no': 3,
963 963 'commits': [7, 7, 7]
964 964 }
965 965 }
966 966 }
967 967
968 968 for fname, commit_dict in files.items():
969 969 for idx, __ in commit_dict.items():
970 970 commit = self.repo.get_commit(commit_idx=idx)
971 971 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
972 972 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
973 973 assert l1_1 == l1_2
974 974 l1 = l1_2 = [
975 975 x[2]().idx for x in commit.get_file_annotate(fname)]
976 976 l2 = files[fname][idx]['commits']
977 977 assert l1 == l2, (
978 978 "The lists of commit for %s@commit_id%s"
979 979 "from annotation list should match each other,"
980 980 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
981 981
982 982 def test_commit_state(self):
983 983 """
984 984 Tests which files have been added/changed/removed at particular commit
985 985 """
986 986
987 987 # commit_id 46ad32a4f974:
988 988 # hg st --rev 46ad32a4f974
989 989 # changed: 13
990 990 # added: 20
991 991 # removed: 1
992 992 changed = set([
993 993 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
994 994 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
995 995 'vcs/__init__.py', 'vcs/backends/__init__.py',
996 996 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
997 997 'vcs/utils/__init__.py'])
998 998
999 999 added = set([
1000 1000 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
1001 1001 'docs/api/index.rst', 'docs/api/nodes.rst',
1002 1002 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1003 1003 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1004 1004 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1005 1005 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1006 1006 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1007 1007 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1008 1008 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1009 1009 'vcs/web/simplevcs/views.py'])
1010 1010
1011 1011 removed = set(['docs/api.rst'])
1012 1012
1013 1013 commit64 = self.repo.get_commit('46ad32a4f974')
1014 1014 assert set((node.path for node in commit64.added)) == added
1015 1015 assert set((node.path for node in commit64.changed)) == changed
1016 1016 assert set((node.path for node in commit64.removed)) == removed
1017 1017
1018 1018 # commit_id b090f22d27d6:
1019 1019 # hg st --rev b090f22d27d6
1020 1020 # changed: 13
1021 1021 # added: 20
1022 1022 # removed: 1
1023 1023 commit88 = self.repo.get_commit('b090f22d27d6')
1024 1024 assert set((node.path for node in commit88.added)) == set()
1025 1025 assert set((node.path for node in commit88.changed)) == \
1026 1026 set(['.hgignore'])
1027 1027 assert set((node.path for node in commit88.removed)) == set()
1028 1028
1029 1029 #
1030 1030 # 85:
1031 1031 # added: 2 [
1032 1032 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1033 1033 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1034 1034 # removed: 1 ['vcs/utils/web.py']
1035 1035 commit85 = self.repo.get_commit(commit_idx=85)
1036 1036 assert set((node.path for node in commit85.added)) == set([
1037 1037 'vcs/utils/diffs.py',
1038 1038 'vcs/web/simplevcs/views/diffs.py'])
1039 1039 assert set((node.path for node in commit85.changed)) == set([
1040 1040 'vcs/web/simplevcs/models.py',
1041 1041 'vcs/web/simplevcs/utils.py',
1042 1042 'vcs/web/simplevcs/views/__init__.py',
1043 1043 'vcs/web/simplevcs/views/repository.py',
1044 1044 ])
1045 1045 assert set((node.path for node in commit85.removed)) == \
1046 1046 set(['vcs/utils/web.py'])
1047 1047
1048 1048 def test_files_state(self):
1049 1049 """
1050 1050 Tests state of FileNodes.
1051 1051 """
1052 1052 commit = self.repo.get_commit(commit_idx=85)
1053 1053 node = commit.get_node('vcs/utils/diffs.py')
1054 1054 assert node.state, NodeState.ADDED
1055 1055 assert node.added
1056 1056 assert not node.changed
1057 1057 assert not node.not_changed
1058 1058 assert not node.removed
1059 1059
1060 1060 commit = self.repo.get_commit(commit_idx=88)
1061 1061 node = commit.get_node('.hgignore')
1062 1062 assert node.state, NodeState.CHANGED
1063 1063 assert not node.added
1064 1064 assert node.changed
1065 1065 assert not node.not_changed
1066 1066 assert not node.removed
1067 1067
1068 1068 commit = self.repo.get_commit(commit_idx=85)
1069 1069 node = commit.get_node('setup.py')
1070 1070 assert node.state, NodeState.NOT_CHANGED
1071 1071 assert not node.added
1072 1072 assert not node.changed
1073 1073 assert node.not_changed
1074 1074 assert not node.removed
1075 1075
1076 1076 # If node has REMOVED state then trying to fetch it would raise
1077 1077 # CommitError exception
1078 1078 commit = self.repo.get_commit(commit_idx=2)
1079 1079 path = 'vcs/backends/BaseRepository.py'
1080 1080 with pytest.raises(NodeDoesNotExistError):
1081 1081 commit.get_node(path)
1082 1082 # but it would be one of ``removed`` (commit's attribute)
1083 1083 assert path in [rf.path for rf in commit.removed]
1084 1084
1085 1085 def test_commit_message_is_unicode(self):
1086 1086 for cm in self.repo:
1087 1087 assert type(cm.message) == unicode
1088 1088
1089 1089 def test_commit_author_is_unicode(self):
1090 1090 for cm in self.repo:
1091 1091 assert type(cm.author) == unicode
1092 1092
1093 1093 def test_repo_files_content_is_unicode(self):
1094 1094 test_commit = self.repo.get_commit(commit_idx=100)
1095 1095 for node in test_commit.get_node('/'):
1096 1096 if node.is_file():
1097 1097 assert type(node.content) == unicode
1098 1098
1099 1099 def test_wrong_path(self):
1100 1100 # There is 'setup.py' in the root dir but not there:
1101 1101 path = 'foo/bar/setup.py'
1102 1102 with pytest.raises(VCSError):
1103 1103 self.repo.get_commit().get_node(path)
1104 1104
1105 1105 def test_author_email(self):
1106 1106 assert 'marcin@python-blog.com' == \
1107 1107 self.repo.get_commit('b986218ba1c9').author_email
1108 1108 assert 'lukasz.balcerzak@python-center.pl' == \
1109 1109 self.repo.get_commit('3803844fdbd3').author_email
1110 1110 assert '' == self.repo.get_commit('84478366594b').author_email
1111 1111
1112 1112 def test_author_username(self):
1113 1113 assert 'Marcin Kuzminski' == \
1114 1114 self.repo.get_commit('b986218ba1c9').author_name
1115 1115 assert 'Lukasz Balcerzak' == \
1116 1116 self.repo.get_commit('3803844fdbd3').author_name
1117 1117 assert 'marcink' == \
1118 1118 self.repo.get_commit('84478366594b').author_name
1119 1119
1120 1120
1121 1121 class TestLargeFileRepo(object):
1122 1122
1123 1123 def test_large_file(self, backend_hg):
1124 1124 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1125 1125
1126 1126 tip = repo.scm_instance().get_commit()
1127 1127 node = tip.get_node('.hglf/thisfileislarge')
1128 1128
1129 1129 lf_node = node.get_largefile_node()
1130 1130
1131 1131 assert lf_node.is_largefile() is True
1132 1132 assert lf_node.size == 1024000
1133 1133 assert lf_node.name == '.hglf/thisfileislarge'
1134 1134
1135 1135
1136 1136 class TestGetBranchName(object):
1137 1137 def test_returns_ref_name_when_type_is_branch(self):
1138 1138 ref = self._create_ref('branch', 'fake-name')
1139 1139 result = self.repo._get_branch_name(ref)
1140 1140 assert result == ref.name
1141 1141
1142 1142 @pytest.mark.parametrize("type_", ("book", "tag"))
1143 1143 def test_queries_remote_when_type_is_not_branch(self, type_):
1144 1144 ref = self._create_ref(type_, 'wrong-fake-name')
1145 1145 with mock.patch.object(self.repo, "_remote") as remote_mock:
1146 1146 remote_mock.ctx_branch.return_value = "fake-name"
1147 1147 result = self.repo._get_branch_name(ref)
1148 1148 assert result == "fake-name"
1149 1149 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1150 1150
1151 1151 def _create_ref(self, type_, name):
1152 1152 ref = mock.Mock()
1153 1153 ref.type = type_
1154 1154 ref.name = 'wrong-fake-name'
1155 1155 ref.commit_id = "deadbeef"
1156 1156 return ref
1157 1157
1158 1158
1159 1159 class TestIsTheSameBranch(object):
1160 1160 def test_returns_true_when_branches_are_equal(self):
1161 1161 source_ref = mock.Mock(name="source-ref")
1162 1162 target_ref = mock.Mock(name="target-ref")
1163 1163 branch_name_patcher = mock.patch.object(
1164 1164 self.repo, "_get_branch_name", return_value="default")
1165 1165 with branch_name_patcher as branch_name_mock:
1166 1166 result = self.repo._is_the_same_branch(source_ref, target_ref)
1167 1167
1168 1168 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1169 1169 assert branch_name_mock.call_args_list == expected_calls
1170 1170 assert result is True
1171 1171
1172 1172 def test_returns_false_when_branches_are_not_equal(self):
1173 1173 source_ref = mock.Mock(name="source-ref")
1174 1174 source_ref.name = "source-branch"
1175 1175 target_ref = mock.Mock(name="target-ref")
1176 1176 source_ref.name = "target-branch"
1177 1177
1178 1178 def side_effect(ref):
1179 1179 return ref.name
1180 1180
1181 1181 branch_name_patcher = mock.patch.object(
1182 1182 self.repo, "_get_branch_name", side_effect=side_effect)
1183 1183 with branch_name_patcher as branch_name_mock:
1184 1184 result = self.repo._is_the_same_branch(source_ref, target_ref)
1185 1185
1186 1186 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1187 1187 assert branch_name_mock.call_args_list == expected_calls
1188 1188 assert result is False
General Comments 0
You need to be logged in to leave comments. Login now