##// END OF EJS Templates
git: replaced some raw subprocess commands with dedicated GIT vcsserver commands.
marcink -
r3862:44edb079 default
parent child Browse files
Show More
@@ -1,507 +1,474 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT commit module
22 GIT commit module
23 """
23 """
24
24
25 import re
25 import re
26 import stat
26 import stat
27 from itertools import chain
27 from itertools import chain
28 from StringIO import StringIO
28 from StringIO import StringIO
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 from rhodecode.lib.utils import safe_unicode, safe_str
33 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.lib.utils2 import safe_int
35 from rhodecode.lib.vcs.conf import settings
35 from rhodecode.lib.vcs.conf import settings
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 from rhodecode.lib.vcs.nodes import (
38 from rhodecode.lib.vcs.nodes import (
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 RemovedFileNodesGenerator, LargeFileNode)
41 RemovedFileNodesGenerator, LargeFileNode)
42 from rhodecode.lib.vcs.compat import configparser
42 from rhodecode.lib.vcs.compat import configparser
43
43
44
44
45 class GitCommit(base.BaseCommit):
45 class GitCommit(base.BaseCommit):
46 """
46 """
47 Represents state of the repository at single commit id.
47 Represents state of the repository at single commit id.
48 """
48 """
49
49
50 _filter_pre_load = [
50 _filter_pre_load = [
51 # done through a more complex tree walk on parents
51 # done through a more complex tree walk on parents
52 "affected_files",
52 "affected_files",
53 # done through subprocess not remote call
53 # done through subprocess not remote call
54 "children",
54 "children",
55 # done through a more complex tree walk on parents
55 # done through a more complex tree walk on parents
56 "status",
56 "status",
57 # mercurial specific property not supported here
57 # mercurial specific property not supported here
58 "_file_paths",
58 "_file_paths",
59 # mercurial specific property not supported here
59 # mercurial specific property not supported here
60 'obsolete',
60 'obsolete',
61 # mercurial specific property not supported here
61 # mercurial specific property not supported here
62 'phase',
62 'phase',
63 # mercurial specific property not supported here
63 # mercurial specific property not supported here
64 'hidden'
64 'hidden'
65 ]
65 ]
66
66
67 def __init__(self, repository, raw_id, idx, pre_load=None):
67 def __init__(self, repository, raw_id, idx, pre_load=None):
68 self.repository = repository
68 self.repository = repository
69 self._remote = repository._remote
69 self._remote = repository._remote
70 # TODO: johbo: Tweak of raw_id should not be necessary
70 # TODO: johbo: Tweak of raw_id should not be necessary
71 self.raw_id = safe_str(raw_id)
71 self.raw_id = safe_str(raw_id)
72 self.idx = idx
72 self.idx = idx
73
73
74 self._set_bulk_properties(pre_load)
74 self._set_bulk_properties(pre_load)
75
75
76 # caches
76 # caches
77 self._stat_modes = {} # stat info for paths
77 self._stat_modes = {} # stat info for paths
78 self._paths = {} # path processed with parse_tree
78 self._paths = {} # path processed with parse_tree
79 self.nodes = {}
79 self.nodes = {}
80 self._submodules = None
80 self._submodules = None
81
81
82 def _set_bulk_properties(self, pre_load):
82 def _set_bulk_properties(self, pre_load):
83
83
84 if not pre_load:
84 if not pre_load:
85 return
85 return
86 pre_load = [entry for entry in pre_load
86 pre_load = [entry for entry in pre_load
87 if entry not in self._filter_pre_load]
87 if entry not in self._filter_pre_load]
88 if not pre_load:
88 if not pre_load:
89 return
89 return
90
90
91 result = self._remote.bulk_request(self.raw_id, pre_load)
91 result = self._remote.bulk_request(self.raw_id, pre_load)
92 for attr, value in result.items():
92 for attr, value in result.items():
93 if attr in ["author", "message"]:
93 if attr in ["author", "message"]:
94 if value:
94 if value:
95 value = safe_unicode(value)
95 value = safe_unicode(value)
96 elif attr == "date":
96 elif attr == "date":
97 value = utcdate_fromtimestamp(*value)
97 value = utcdate_fromtimestamp(*value)
98 elif attr == "parents":
98 elif attr == "parents":
99 value = self._make_commits(value)
99 value = self._make_commits(value)
100 elif attr == "branch":
100 elif attr == "branch":
101 value = value[0] if value else None
101 value = value[0] if value else None
102 self.__dict__[attr] = value
102 self.__dict__[attr] = value
103
103
104 @LazyProperty
104 @LazyProperty
105 def _commit(self):
105 def _commit(self):
106 return self._remote[self.raw_id]
106 return self._remote[self.raw_id]
107
107
108 @LazyProperty
108 @LazyProperty
109 def _tree_id(self):
109 def _tree_id(self):
110 return self._remote[self._commit['tree']]['id']
110 return self._remote[self._commit['tree']]['id']
111
111
112 @LazyProperty
112 @LazyProperty
113 def id(self):
113 def id(self):
114 return self.raw_id
114 return self.raw_id
115
115
116 @LazyProperty
116 @LazyProperty
117 def short_id(self):
117 def short_id(self):
118 return self.raw_id[:12]
118 return self.raw_id[:12]
119
119
120 @LazyProperty
120 @LazyProperty
121 def message(self):
121 def message(self):
122 return safe_unicode(self._remote.message(self.id))
122 return safe_unicode(self._remote.message(self.id))
123
123
124 @LazyProperty
124 @LazyProperty
125 def committer(self):
125 def committer(self):
126 return safe_unicode(self._remote.author(self.id))
126 return safe_unicode(self._remote.author(self.id))
127
127
128 @LazyProperty
128 @LazyProperty
129 def author(self):
129 def author(self):
130 return safe_unicode(self._remote.author(self.id))
130 return safe_unicode(self._remote.author(self.id))
131
131
132 @LazyProperty
132 @LazyProperty
133 def date(self):
133 def date(self):
134 unix_ts, tz = self._remote.date(self.raw_id)
134 unix_ts, tz = self._remote.date(self.raw_id)
135 return utcdate_fromtimestamp(unix_ts, tz)
135 return utcdate_fromtimestamp(unix_ts, tz)
136
136
137 @LazyProperty
137 @LazyProperty
138 def status(self):
138 def status(self):
139 """
139 """
140 Returns modified, added, removed, deleted files for current commit
140 Returns modified, added, removed, deleted files for current commit
141 """
141 """
142 return self.changed, self.added, self.removed
142 return self.changed, self.added, self.removed
143
143
144 @LazyProperty
144 @LazyProperty
145 def tags(self):
145 def tags(self):
146 tags = [safe_unicode(name) for name,
146 tags = [safe_unicode(name) for name,
147 commit_id in self.repository.tags.iteritems()
147 commit_id in self.repository.tags.iteritems()
148 if commit_id == self.raw_id]
148 if commit_id == self.raw_id]
149 return tags
149 return tags
150
150
151 @LazyProperty
151 @LazyProperty
152 def commit_branches(self):
152 def commit_branches(self):
153 branches = []
153 branches = []
154 for name, commit_id in self.repository.branches.iteritems():
154 for name, commit_id in self.repository.branches.iteritems():
155 if commit_id == self.raw_id:
155 if commit_id == self.raw_id:
156 branches.append(name)
156 branches.append(name)
157 return branches
157 return branches
158
158
159 @LazyProperty
159 @LazyProperty
160 def branch(self):
160 def branch(self):
161 branches = self._remote.branch(self.raw_id)
161 branches = self._remote.branch(self.raw_id)
162
162
163 if branches:
163 if branches:
164 # actually commit can have multiple branches in git
164 # actually commit can have multiple branches in git
165 return safe_unicode(branches[0])
165 return safe_unicode(branches[0])
166
166
167 def _get_tree_id_for_path(self, path):
167 def _get_tree_id_for_path(self, path):
168 path = safe_str(path)
168 path = safe_str(path)
169 if path in self._paths:
169 if path in self._paths:
170 return self._paths[path]
170 return self._paths[path]
171
171
172 tree_id = self._tree_id
172 tree_id = self._tree_id
173
173
174 path = path.strip('/')
174 path = path.strip('/')
175 if path == '':
175 if path == '':
176 data = [tree_id, "tree"]
176 data = [tree_id, "tree"]
177 self._paths[''] = data
177 self._paths[''] = data
178 return data
178 return data
179
179
180 tree_id, tree_type, tree_mode = \
180 tree_id, tree_type, tree_mode = \
181 self._remote.tree_and_type_for_path(self.raw_id, path)
181 self._remote.tree_and_type_for_path(self.raw_id, path)
182 if tree_id is None:
182 if tree_id is None:
183 raise self.no_node_at_path(path)
183 raise self.no_node_at_path(path)
184
184
185 self._paths[path] = [tree_id, tree_type]
185 self._paths[path] = [tree_id, tree_type]
186 self._stat_modes[path] = tree_mode
186 self._stat_modes[path] = tree_mode
187
187
188 if path not in self._paths:
188 if path not in self._paths:
189 raise self.no_node_at_path(path)
189 raise self.no_node_at_path(path)
190
190
191 return self._paths[path]
191 return self._paths[path]
192
192
193 def _get_kind(self, path):
193 def _get_kind(self, path):
194 tree_id, type_ = self._get_tree_id_for_path(path)
194 tree_id, type_ = self._get_tree_id_for_path(path)
195 if type_ == 'blob':
195 if type_ == 'blob':
196 return NodeKind.FILE
196 return NodeKind.FILE
197 elif type_ == 'tree':
197 elif type_ == 'tree':
198 return NodeKind.DIR
198 return NodeKind.DIR
199 elif type_ == 'link':
199 elif type_ == 'link':
200 return NodeKind.SUBMODULE
200 return NodeKind.SUBMODULE
201 return None
201 return None
202
202
203 def _get_filectx(self, path):
203 def _get_filectx(self, path):
204 path = self._fix_path(path)
204 path = self._fix_path(path)
205 if self._get_kind(path) != NodeKind.FILE:
205 if self._get_kind(path) != NodeKind.FILE:
206 raise CommitError(
206 raise CommitError(
207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
208 return path
208 return path
209
209
210 def _get_file_nodes(self):
210 def _get_file_nodes(self):
211 return chain(*(t[2] for t in self.walk()))
211 return chain(*(t[2] for t in self.walk()))
212
212
213 @LazyProperty
213 @LazyProperty
214 def parents(self):
214 def parents(self):
215 """
215 """
216 Returns list of parent commits.
216 Returns list of parent commits.
217 """
217 """
218 parent_ids = self._remote.parents(self.id)
218 parent_ids = self._remote.parents(self.id)
219 return self._make_commits(parent_ids)
219 return self._make_commits(parent_ids)
220
220
221 @LazyProperty
221 @LazyProperty
222 def children(self):
222 def children(self):
223 """
223 """
224 Returns list of child commits.
224 Returns list of child commits.
225 """
225 """
226 rev_filter = settings.GIT_REV_FILTER
227 output, __ = self.repository.run_git_command(
228 ['rev-list', '--children'] + rev_filter)
229
226
230 child_ids = []
227 children = self._remote.children(self.raw_id)
231 pat = re.compile(r'^%s' % self.raw_id)
228 return self._make_commits(children)
232 for l in output.splitlines():
233 if pat.match(l):
234 found_ids = l.split(' ')[1:]
235 child_ids.extend(found_ids)
236 return self._make_commits(child_ids)
237
229
238 def _make_commits(self, commit_ids):
230 def _make_commits(self, commit_ids):
239 def commit_maker(_commit_id):
231 def commit_maker(_commit_id):
240 return self.repository.get_commit(commit_id=commit_id)
232 return self.repository.get_commit(commit_id=commit_id)
241
233
242 return [commit_maker(commit_id) for commit_id in commit_ids]
234 return [commit_maker(commit_id) for commit_id in commit_ids]
243
235
244 def get_file_mode(self, path):
236 def get_file_mode(self, path):
245 """
237 """
246 Returns stat mode of the file at the given `path`.
238 Returns stat mode of the file at the given `path`.
247 """
239 """
248 path = safe_str(path)
240 path = safe_str(path)
249 # ensure path is traversed
241 # ensure path is traversed
250 self._get_tree_id_for_path(path)
242 self._get_tree_id_for_path(path)
251 return self._stat_modes[path]
243 return self._stat_modes[path]
252
244
253 def is_link(self, path):
245 def is_link(self, path):
254 return stat.S_ISLNK(self.get_file_mode(path))
246 return stat.S_ISLNK(self.get_file_mode(path))
255
247
256 def get_file_content(self, path):
248 def get_file_content(self, path):
257 """
249 """
258 Returns content of the file at given `path`.
250 Returns content of the file at given `path`.
259 """
251 """
260 tree_id, _ = self._get_tree_id_for_path(path)
252 tree_id, _ = self._get_tree_id_for_path(path)
261 return self._remote.blob_as_pretty_string(tree_id)
253 return self._remote.blob_as_pretty_string(tree_id)
262
254
263 def get_file_size(self, path):
255 def get_file_size(self, path):
264 """
256 """
265 Returns size of the file at given `path`.
257 Returns size of the file at given `path`.
266 """
258 """
267 tree_id, _ = self._get_tree_id_for_path(path)
259 tree_id, _ = self._get_tree_id_for_path(path)
268 return self._remote.blob_raw_length(tree_id)
260 return self._remote.blob_raw_length(tree_id)
269
261
270 def get_path_history(self, path, limit=None, pre_load=None):
262 def get_path_history(self, path, limit=None, pre_load=None):
271 """
263 """
272 Returns history of file as reversed list of `GitCommit` objects for
264 Returns history of file as reversed list of `GitCommit` objects for
273 which file at given `path` has been modified.
265 which file at given `path` has been modified.
274
275 TODO: This function now uses an underlying 'git' command which works
276 quickly but ideally we should replace with an algorithm.
277 """
266 """
278 self._get_filectx(path)
279 f_path = safe_str(path)
280
267
281 # optimize for n==1, rev-list is much faster for that use-case
268 path = self._get_filectx(path)
282 if limit == 1:
269 hist = self._remote.node_history(self.raw_id, path, limit)
283 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
284 else:
285 cmd = ['log']
286 if limit:
287 cmd.extend(['-n', str(safe_int(limit, 0))])
288 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
289
290 output, __ = self.repository.run_git_command(cmd)
291 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
292
293 return [
270 return [
294 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
271 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
295 for commit_id in commit_ids]
272 for commit_id in hist]
296
273
297 def get_file_annotate(self, path, pre_load=None):
274 def get_file_annotate(self, path, pre_load=None):
298 """
275 """
299 Returns a generator of four element tuples with
276 Returns a generator of four element tuples with
300 lineno, commit_id, commit lazy loader and line
277 lineno, commit_id, commit lazy loader and line
278 """
301
279
302 TODO: This function now uses os underlying 'git' command which is
280 result = self._remote.node_annotate(self.raw_id, path)
303 generally not good. Should be replaced with algorithm iterating
304 commits.
305 """
306 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
307 # -l ==> outputs long shas (and we need all 40 characters)
308 # --root ==> doesn't put '^' character for bounderies
309 # -r commit_id ==> blames for the given commit
310 output, __ = self.repository.run_git_command(cmd)
311
281
312 for i, blame_line in enumerate(output.split('\n')[:-1]):
282 for ln_no, commit_id, content in result:
313 line_no = i + 1
314 commit_id, line = re.split(r' ', blame_line, 1)
315 yield (
283 yield (
316 line_no, commit_id,
284 ln_no, commit_id,
317 lambda: self.repository.get_commit(commit_id=commit_id,
285 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
318 pre_load=pre_load),
286 content)
319 line)
320
287
321 def get_nodes(self, path):
288 def get_nodes(self, path):
322
289
323 if self._get_kind(path) != NodeKind.DIR:
290 if self._get_kind(path) != NodeKind.DIR:
324 raise CommitError(
291 raise CommitError(
325 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
292 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
326 path = self._fix_path(path)
293 path = self._fix_path(path)
327
294
328 tree_id, _ = self._get_tree_id_for_path(path)
295 tree_id, _ = self._get_tree_id_for_path(path)
329
296
330 dirnodes = []
297 dirnodes = []
331 filenodes = []
298 filenodes = []
332
299
333 # extracted tree ID gives us our files...
300 # extracted tree ID gives us our files...
334 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
301 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
335 if type_ == 'link':
302 if type_ == 'link':
336 url = self._get_submodule_url('/'.join((path, name)))
303 url = self._get_submodule_url('/'.join((path, name)))
337 dirnodes.append(SubModuleNode(
304 dirnodes.append(SubModuleNode(
338 name, url=url, commit=id_, alias=self.repository.alias))
305 name, url=url, commit=id_, alias=self.repository.alias))
339 continue
306 continue
340
307
341 if path != '':
308 if path != '':
342 obj_path = '/'.join((path, name))
309 obj_path = '/'.join((path, name))
343 else:
310 else:
344 obj_path = name
311 obj_path = name
345 if obj_path not in self._stat_modes:
312 if obj_path not in self._stat_modes:
346 self._stat_modes[obj_path] = stat_
313 self._stat_modes[obj_path] = stat_
347
314
348 if type_ == 'tree':
315 if type_ == 'tree':
349 dirnodes.append(DirNode(obj_path, commit=self))
316 dirnodes.append(DirNode(obj_path, commit=self))
350 elif type_ == 'blob':
317 elif type_ == 'blob':
351 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
318 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
352 else:
319 else:
353 raise CommitError(
320 raise CommitError(
354 "Requested object should be Tree or Blob, is %s", type_)
321 "Requested object should be Tree or Blob, is %s", type_)
355
322
356 nodes = dirnodes + filenodes
323 nodes = dirnodes + filenodes
357 for node in nodes:
324 for node in nodes:
358 if node.path not in self.nodes:
325 if node.path not in self.nodes:
359 self.nodes[node.path] = node
326 self.nodes[node.path] = node
360 nodes.sort()
327 nodes.sort()
361 return nodes
328 return nodes
362
329
363 def get_node(self, path, pre_load=None):
330 def get_node(self, path, pre_load=None):
364 if isinstance(path, unicode):
331 if isinstance(path, unicode):
365 path = path.encode('utf-8')
332 path = path.encode('utf-8')
366 path = self._fix_path(path)
333 path = self._fix_path(path)
367 if path not in self.nodes:
334 if path not in self.nodes:
368 try:
335 try:
369 tree_id, type_ = self._get_tree_id_for_path(path)
336 tree_id, type_ = self._get_tree_id_for_path(path)
370 except CommitError:
337 except CommitError:
371 raise NodeDoesNotExistError(
338 raise NodeDoesNotExistError(
372 "Cannot find one of parents' directories for a given "
339 "Cannot find one of parents' directories for a given "
373 "path: %s" % path)
340 "path: %s" % path)
374
341
375 if type_ == 'link':
342 if type_ == 'link':
376 url = self._get_submodule_url(path)
343 url = self._get_submodule_url(path)
377 node = SubModuleNode(path, url=url, commit=tree_id,
344 node = SubModuleNode(path, url=url, commit=tree_id,
378 alias=self.repository.alias)
345 alias=self.repository.alias)
379 elif type_ == 'tree':
346 elif type_ == 'tree':
380 if path == '':
347 if path == '':
381 node = RootNode(commit=self)
348 node = RootNode(commit=self)
382 else:
349 else:
383 node = DirNode(path, commit=self)
350 node = DirNode(path, commit=self)
384 elif type_ == 'blob':
351 elif type_ == 'blob':
385 node = FileNode(path, commit=self, pre_load=pre_load)
352 node = FileNode(path, commit=self, pre_load=pre_load)
386 self._stat_modes[path] = node.mode
353 self._stat_modes[path] = node.mode
387 else:
354 else:
388 raise self.no_node_at_path(path)
355 raise self.no_node_at_path(path)
389
356
390 # cache node
357 # cache node
391 self.nodes[path] = node
358 self.nodes[path] = node
392
359
393 return self.nodes[path]
360 return self.nodes[path]
394
361
395 def get_largefile_node(self, path):
362 def get_largefile_node(self, path):
396 tree_id, _ = self._get_tree_id_for_path(path)
363 tree_id, _ = self._get_tree_id_for_path(path)
397 pointer_spec = self._remote.is_large_file(tree_id)
364 pointer_spec = self._remote.is_large_file(tree_id)
398
365
399 if pointer_spec:
366 if pointer_spec:
400 # content of that file regular FileNode is the hash of largefile
367 # content of that file regular FileNode is the hash of largefile
401 file_id = pointer_spec.get('oid_hash')
368 file_id = pointer_spec.get('oid_hash')
402 if self._remote.in_largefiles_store(file_id):
369 if self._remote.in_largefiles_store(file_id):
403 lf_path = self._remote.store_path(file_id)
370 lf_path = self._remote.store_path(file_id)
404 return LargeFileNode(lf_path, commit=self, org_path=path)
371 return LargeFileNode(lf_path, commit=self, org_path=path)
405
372
406 @LazyProperty
373 @LazyProperty
407 def affected_files(self):
374 def affected_files(self):
408 """
375 """
409 Gets a fast accessible file changes for given commit
376 Gets a fast accessible file changes for given commit
410 """
377 """
411 added, modified, deleted = self._changes_cache
378 added, modified, deleted = self._changes_cache
412 return list(added.union(modified).union(deleted))
379 return list(added.union(modified).union(deleted))
413
380
414 @LazyProperty
381 @LazyProperty
415 def _changes_cache(self):
382 def _changes_cache(self):
416 added = set()
383 added = set()
417 modified = set()
384 modified = set()
418 deleted = set()
385 deleted = set()
419 _r = self._remote
386 _r = self._remote
420
387
421 parents = self.parents
388 parents = self.parents
422 if not self.parents:
389 if not self.parents:
423 parents = [base.EmptyCommit()]
390 parents = [base.EmptyCommit()]
424 for parent in parents:
391 for parent in parents:
425 if isinstance(parent, base.EmptyCommit):
392 if isinstance(parent, base.EmptyCommit):
426 oid = None
393 oid = None
427 else:
394 else:
428 oid = parent.raw_id
395 oid = parent.raw_id
429 changes = _r.tree_changes(oid, self.raw_id)
396 changes = _r.tree_changes(oid, self.raw_id)
430 for (oldpath, newpath), (_, _), (_, _) in changes:
397 for (oldpath, newpath), (_, _), (_, _) in changes:
431 if newpath and oldpath:
398 if newpath and oldpath:
432 modified.add(newpath)
399 modified.add(newpath)
433 elif newpath and not oldpath:
400 elif newpath and not oldpath:
434 added.add(newpath)
401 added.add(newpath)
435 elif not newpath and oldpath:
402 elif not newpath and oldpath:
436 deleted.add(oldpath)
403 deleted.add(oldpath)
437 return added, modified, deleted
404 return added, modified, deleted
438
405
439 def _get_paths_for_status(self, status):
406 def _get_paths_for_status(self, status):
440 """
407 """
441 Returns sorted list of paths for given ``status``.
408 Returns sorted list of paths for given ``status``.
442
409
443 :param status: one of: *added*, *modified* or *deleted*
410 :param status: one of: *added*, *modified* or *deleted*
444 """
411 """
445 added, modified, deleted = self._changes_cache
412 added, modified, deleted = self._changes_cache
446 return sorted({
413 return sorted({
447 'added': list(added),
414 'added': list(added),
448 'modified': list(modified),
415 'modified': list(modified),
449 'deleted': list(deleted)}[status]
416 'deleted': list(deleted)}[status]
450 )
417 )
451
418
452 @LazyProperty
419 @LazyProperty
453 def added(self):
420 def added(self):
454 """
421 """
455 Returns list of added ``FileNode`` objects.
422 Returns list of added ``FileNode`` objects.
456 """
423 """
457 if not self.parents:
424 if not self.parents:
458 return list(self._get_file_nodes())
425 return list(self._get_file_nodes())
459 return AddedFileNodesGenerator(
426 return AddedFileNodesGenerator(
460 [n for n in self._get_paths_for_status('added')], self)
427 [n for n in self._get_paths_for_status('added')], self)
461
428
462 @LazyProperty
429 @LazyProperty
463 def changed(self):
430 def changed(self):
464 """
431 """
465 Returns list of modified ``FileNode`` objects.
432 Returns list of modified ``FileNode`` objects.
466 """
433 """
467 if not self.parents:
434 if not self.parents:
468 return []
435 return []
469 return ChangedFileNodesGenerator(
436 return ChangedFileNodesGenerator(
470 [n for n in self._get_paths_for_status('modified')], self)
437 [n for n in self._get_paths_for_status('modified')], self)
471
438
472 @LazyProperty
439 @LazyProperty
473 def removed(self):
440 def removed(self):
474 """
441 """
475 Returns list of removed ``FileNode`` objects.
442 Returns list of removed ``FileNode`` objects.
476 """
443 """
477 if not self.parents:
444 if not self.parents:
478 return []
445 return []
479 return RemovedFileNodesGenerator(
446 return RemovedFileNodesGenerator(
480 [n for n in self._get_paths_for_status('deleted')], self)
447 [n for n in self._get_paths_for_status('deleted')], self)
481
448
482 def _get_submodule_url(self, submodule_path):
449 def _get_submodule_url(self, submodule_path):
483 git_modules_path = '.gitmodules'
450 git_modules_path = '.gitmodules'
484
451
485 if self._submodules is None:
452 if self._submodules is None:
486 self._submodules = {}
453 self._submodules = {}
487
454
488 try:
455 try:
489 submodules_node = self.get_node(git_modules_path)
456 submodules_node = self.get_node(git_modules_path)
490 except NodeDoesNotExistError:
457 except NodeDoesNotExistError:
491 return None
458 return None
492
459
493 content = submodules_node.content
460 content = submodules_node.content
494
461
495 # ConfigParser fails if there are whitespaces
462 # ConfigParser fails if there are whitespaces
496 content = '\n'.join(l.strip() for l in content.split('\n'))
463 content = '\n'.join(l.strip() for l in content.split('\n'))
497
464
498 parser = configparser.ConfigParser()
465 parser = configparser.ConfigParser()
499 parser.readfp(StringIO(content))
466 parser.readfp(StringIO(content))
500
467
501 for section in parser.sections():
468 for section in parser.sections():
502 path = parser.get(section, 'path')
469 path = parser.get(section, 'path')
503 url = parser.get(section, 'url')
470 url = parser.get(section, 'url')
504 if path and url:
471 if path and url:
505 self._submodules[path.strip('/')] = url
472 self._submodules[path.strip('/')] = url
506
473
507 return self._submodules.get(submodule_path.strip('/'))
474 return self._submodules.get(submodule_path.strip('/'))
@@ -1,1021 +1,1004 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'master'
57 DEFAULT_BRANCH_NAME = 'master'
58
58
59 contact = BaseRepository.DEFAULT_CONTACT
59 contact = BaseRepository.DEFAULT_CONTACT
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
63
63
64 self.path = safe_str(os.path.abspath(repo_path))
64 self.path = safe_str(os.path.abspath(repo_path))
65 self.config = config if config else self.get_default_config()
65 self.config = config if config else self.get_default_config()
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67
67
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69
69
70 # caches
70 # caches
71 self._commit_ids = {}
71 self._commit_ids = {}
72
72
73 @LazyProperty
73 @LazyProperty
74 def _remote(self):
74 def _remote(self):
75 repo_id = self.path
75 repo_id = self.path
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77
77
78 @LazyProperty
78 @LazyProperty
79 def bare(self):
79 def bare(self):
80 return self._remote.bare()
80 return self._remote.bare()
81
81
82 @LazyProperty
82 @LazyProperty
83 def head(self):
83 def head(self):
84 return self._remote.head()
84 return self._remote.head()
85
85
86 @CachedProperty
86 @CachedProperty
87 def commit_ids(self):
87 def commit_ids(self):
88 """
88 """
89 Returns list of commit ids, in ascending order. Being lazy
89 Returns list of commit ids, in ascending order. Being lazy
90 attribute allows external tools to inject commit ids from cache.
90 attribute allows external tools to inject commit ids from cache.
91 """
91 """
92 commit_ids = self._get_all_commit_ids()
92 commit_ids = self._get_all_commit_ids()
93 self._rebuild_cache(commit_ids)
93 self._rebuild_cache(commit_ids)
94 return commit_ids
94 return commit_ids
95
95
96 def _rebuild_cache(self, commit_ids):
96 def _rebuild_cache(self, commit_ids):
97 self._commit_ids = dict((commit_id, index)
97 self._commit_ids = dict((commit_id, index)
98 for index, commit_id in enumerate(commit_ids))
98 for index, commit_id in enumerate(commit_ids))
99
99
100 def run_git_command(self, cmd, **opts):
100 def run_git_command(self, cmd, **opts):
101 """
101 """
102 Runs given ``cmd`` as git command and returns tuple
102 Runs given ``cmd`` as git command and returns tuple
103 (stdout, stderr).
103 (stdout, stderr).
104
104
105 :param cmd: git command to be executed
105 :param cmd: git command to be executed
106 :param opts: env options to pass into Subprocess command
106 :param opts: env options to pass into Subprocess command
107 """
107 """
108 if not isinstance(cmd, list):
108 if not isinstance(cmd, list):
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110
110
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 out, err = self._remote.run_git_command(cmd, **opts)
112 out, err = self._remote.run_git_command(cmd, **opts)
113 if err and not skip_stderr_log:
113 if err and not skip_stderr_log:
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 return out, err
115 return out, err
116
116
117 @staticmethod
117 @staticmethod
118 def check_url(url, config):
118 def check_url(url, config):
119 """
119 """
120 Function will check given url and try to verify if it's a valid
120 Function will check given url and try to verify if it's a valid
121 link. Sometimes it may happened that git will issue basic
121 link. Sometimes it may happened that git will issue basic
122 auth request that can cause whole API to hang when used from python
122 auth request that can cause whole API to hang when used from python
123 or other external calls.
123 or other external calls.
124
124
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 when the return code is non 200
126 when the return code is non 200
127 """
127 """
128 # check first if it's not an url
128 # check first if it's not an url
129 if os.path.isdir(url) or url.startswith('file:'):
129 if os.path.isdir(url) or url.startswith('file:'):
130 return True
130 return True
131
131
132 if '+' in url.split('://', 1)[0]:
132 if '+' in url.split('://', 1)[0]:
133 url = url.split('+', 1)[1]
133 url = url.split('+', 1)[1]
134
134
135 # Request the _remote to verify the url
135 # Request the _remote to verify the url
136 return connection.Git.check_url(url, config.serialize())
136 return connection.Git.check_url(url, config.serialize())
137
137
138 @staticmethod
138 @staticmethod
139 def is_valid_repository(path):
139 def is_valid_repository(path):
140 if os.path.isdir(os.path.join(path, '.git')):
140 if os.path.isdir(os.path.join(path, '.git')):
141 return True
141 return True
142 # check case of bare repository
142 # check case of bare repository
143 try:
143 try:
144 GitRepository(path)
144 GitRepository(path)
145 return True
145 return True
146 except VCSError:
146 except VCSError:
147 pass
147 pass
148 return False
148 return False
149
149
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 bare=False):
151 bare=False):
152 if create and os.path.exists(self.path):
152 if create and os.path.exists(self.path):
153 raise RepositoryError(
153 raise RepositoryError(
154 "Cannot create repository at %s, location already exist"
154 "Cannot create repository at %s, location already exist"
155 % self.path)
155 % self.path)
156
156
157 if bare and do_workspace_checkout:
157 if bare and do_workspace_checkout:
158 raise RepositoryError("Cannot update a bare repository")
158 raise RepositoryError("Cannot update a bare repository")
159 try:
159 try:
160
160
161 if src_url:
161 if src_url:
162 # check URL before any actions
162 # check URL before any actions
163 GitRepository.check_url(src_url, self.config)
163 GitRepository.check_url(src_url, self.config)
164
164
165 if create:
165 if create:
166 os.makedirs(self.path, mode=0o755)
166 os.makedirs(self.path, mode=0o755)
167
167
168 if bare:
168 if bare:
169 self._remote.init_bare()
169 self._remote.init_bare()
170 else:
170 else:
171 self._remote.init()
171 self._remote.init()
172
172
173 if src_url and bare:
173 if src_url and bare:
174 # bare repository only allows a fetch and checkout is not allowed
174 # bare repository only allows a fetch and checkout is not allowed
175 self.fetch(src_url, commit_ids=None)
175 self.fetch(src_url, commit_ids=None)
176 elif src_url:
176 elif src_url:
177 self.pull(src_url, commit_ids=None,
177 self.pull(src_url, commit_ids=None,
178 update_after=do_workspace_checkout)
178 update_after=do_workspace_checkout)
179
179
180 else:
180 else:
181 if not self._remote.assert_correct_path():
181 if not self._remote.assert_correct_path():
182 raise RepositoryError(
182 raise RepositoryError(
183 'Path "%s" does not contain a Git repository' %
183 'Path "%s" does not contain a Git repository' %
184 (self.path,))
184 (self.path,))
185
185
186 # TODO: johbo: check if we have to translate the OSError here
186 # TODO: johbo: check if we have to translate the OSError here
187 except OSError as err:
187 except OSError as err:
188 raise RepositoryError(err)
188 raise RepositoryError(err)
189
189
190 def _get_all_commit_ids(self):
190 def _get_all_commit_ids(self):
191 return self._remote.get_all_commit_ids()
191 return self._remote.get_all_commit_ids()
192
192
193 def _get_commit_ids(self, filters=None):
193 def _get_commit_ids(self, filters=None):
194 # we must check if this repo is not empty, since later command
194 # we must check if this repo is not empty, since later command
195 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # errors
196 # errors
197
197
198 head = self._remote.head(show_exc=False)
198 head = self._remote.head(show_exc=False)
199
199
200 if not head:
200 if not head:
201 return []
201 return []
202
202
203 rev_filter = ['--branches', '--tags']
203 rev_filter = ['--branches', '--tags']
204 extra_filter = []
204 extra_filter = []
205
205
206 if filters:
206 if filters:
207 if filters.get('since'):
207 if filters.get('since'):
208 extra_filter.append('--since=%s' % (filters['since']))
208 extra_filter.append('--since=%s' % (filters['since']))
209 if filters.get('until'):
209 if filters.get('until'):
210 extra_filter.append('--until=%s' % (filters['until']))
210 extra_filter.append('--until=%s' % (filters['until']))
211 if filters.get('branch_name'):
211 if filters.get('branch_name'):
212 rev_filter = []
212 rev_filter = []
213 extra_filter.append(filters['branch_name'])
213 extra_filter.append(filters['branch_name'])
214 rev_filter.extend(extra_filter)
214 rev_filter.extend(extra_filter)
215
215
216 # if filters.get('start') or filters.get('end'):
216 # if filters.get('start') or filters.get('end'):
217 # # skip is offset, max-count is limit
217 # # skip is offset, max-count is limit
218 # if filters.get('start'):
218 # if filters.get('start'):
219 # extra_filter += ' --skip=%s' % filters['start']
219 # extra_filter += ' --skip=%s' % filters['start']
220 # if filters.get('end'):
220 # if filters.get('end'):
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222
222
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 try:
224 try:
225 output, __ = self.run_git_command(cmd)
225 output, __ = self.run_git_command(cmd)
226 except RepositoryError:
226 except RepositoryError:
227 # Can be raised for empty repositories
227 # Can be raised for empty repositories
228 return []
228 return []
229 return output.splitlines()
229 return output.splitlines()
230
230
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
232 def is_null(value):
232 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
233 return len(value) == commit_id_or_idx.count('0')
234
234
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
236 return self.commit_ids[-1]
237 commit_missing_err = "Commit {} does not exist for `{}`".format(
237 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 *map(safe_str, [commit_id_or_idx, self.name]))
238 *map(safe_str, [commit_id_or_idx, self.name]))
239
239
240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 try:
243 try:
244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 except Exception:
245 except Exception:
246 raise CommitDoesNotExistError(commit_missing_err)
246 raise CommitDoesNotExistError(commit_missing_err)
247
247
248 elif is_bstr:
248 elif is_bstr:
249 # Need to call remote to translate id for tagging scenario
249 # Need to call remote to translate id for tagging scenario
250 try:
250 try:
251 remote_data = self._remote.get_object(commit_id_or_idx)
251 remote_data = self._remote.get_object(commit_id_or_idx)
252 commit_id_or_idx = remote_data["commit_id"]
252 commit_id_or_idx = remote_data["commit_id"]
253 except (CommitDoesNotExistError,):
253 except (CommitDoesNotExistError,):
254 raise CommitDoesNotExistError(commit_missing_err)
254 raise CommitDoesNotExistError(commit_missing_err)
255
255
256 # Ensure we return full id
256 # Ensure we return full id
257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 raise CommitDoesNotExistError(
258 raise CommitDoesNotExistError(
259 "Given commit id %s not recognized" % commit_id_or_idx)
259 "Given commit id %s not recognized" % commit_id_or_idx)
260 return commit_id_or_idx
260 return commit_id_or_idx
261
261
262 def get_hook_location(self):
262 def get_hook_location(self):
263 """
263 """
264 returns absolute path to location where hooks are stored
264 returns absolute path to location where hooks are stored
265 """
265 """
266 loc = os.path.join(self.path, 'hooks')
266 loc = os.path.join(self.path, 'hooks')
267 if not self.bare:
267 if not self.bare:
268 loc = os.path.join(self.path, '.git', 'hooks')
268 loc = os.path.join(self.path, '.git', 'hooks')
269 return loc
269 return loc
270
270
271 @LazyProperty
271 @LazyProperty
272 def last_change(self):
272 def last_change(self):
273 """
273 """
274 Returns last change made on this repository as
274 Returns last change made on this repository as
275 `datetime.datetime` object.
275 `datetime.datetime` object.
276 """
276 """
277 try:
277 try:
278 return self.get_commit().date
278 return self.get_commit().date
279 except RepositoryError:
279 except RepositoryError:
280 tzoffset = makedate()[1]
280 tzoffset = makedate()[1]
281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282
282
283 def _get_fs_mtime(self):
283 def _get_fs_mtime(self):
284 idx_loc = '' if self.bare else '.git'
284 idx_loc = '' if self.bare else '.git'
285 # fallback to filesystem
285 # fallback to filesystem
286 in_path = os.path.join(self.path, idx_loc, "index")
286 in_path = os.path.join(self.path, idx_loc, "index")
287 he_path = os.path.join(self.path, idx_loc, "HEAD")
287 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 if os.path.exists(in_path):
288 if os.path.exists(in_path):
289 return os.stat(in_path).st_mtime
289 return os.stat(in_path).st_mtime
290 else:
290 else:
291 return os.stat(he_path).st_mtime
291 return os.stat(he_path).st_mtime
292
292
293 @LazyProperty
293 @LazyProperty
294 def description(self):
294 def description(self):
295 description = self._remote.get_description()
295 description = self._remote.get_description()
296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297
297
298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 if self.is_empty():
299 if self.is_empty():
300 return OrderedDict()
300 return OrderedDict()
301
301
302 result = []
302 result = []
303 for ref, sha in self._refs.iteritems():
303 for ref, sha in self._refs.iteritems():
304 if ref.startswith(prefix):
304 if ref.startswith(prefix):
305 ref_name = ref
305 ref_name = ref
306 if strip_prefix:
306 if strip_prefix:
307 ref_name = ref[len(prefix):]
307 ref_name = ref[len(prefix):]
308 result.append((safe_unicode(ref_name), sha))
308 result.append((safe_unicode(ref_name), sha))
309
309
310 def get_name(entry):
310 def get_name(entry):
311 return entry[0]
311 return entry[0]
312
312
313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314
314
315 def _get_branches(self):
315 def _get_branches(self):
316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317
317
318 @CachedProperty
318 @CachedProperty
319 def branches(self):
319 def branches(self):
320 return self._get_branches()
320 return self._get_branches()
321
321
322 @CachedProperty
322 @CachedProperty
323 def branches_closed(self):
323 def branches_closed(self):
324 return {}
324 return {}
325
325
326 @CachedProperty
326 @CachedProperty
327 def bookmarks(self):
327 def bookmarks(self):
328 return {}
328 return {}
329
329
330 @CachedProperty
330 @CachedProperty
331 def branches_all(self):
331 def branches_all(self):
332 all_branches = {}
332 all_branches = {}
333 all_branches.update(self.branches)
333 all_branches.update(self.branches)
334 all_branches.update(self.branches_closed)
334 all_branches.update(self.branches_closed)
335 return all_branches
335 return all_branches
336
336
337 @CachedProperty
337 @CachedProperty
338 def tags(self):
338 def tags(self):
339 return self._get_tags()
339 return self._get_tags()
340
340
341 def _get_tags(self):
341 def _get_tags(self):
342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
343
343
344 def tag(self, name, user, commit_id=None, message=None, date=None,
344 def tag(self, name, user, commit_id=None, message=None, date=None,
345 **kwargs):
345 **kwargs):
346 # TODO: fix this method to apply annotated tags correct with message
346 # TODO: fix this method to apply annotated tags correct with message
347 """
347 """
348 Creates and returns a tag for the given ``commit_id``.
348 Creates and returns a tag for the given ``commit_id``.
349
349
350 :param name: name for new tag
350 :param name: name for new tag
351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 :param commit_id: commit id for which new tag would be created
352 :param commit_id: commit id for which new tag would be created
353 :param message: message of the tag's commit
353 :param message: message of the tag's commit
354 :param date: date of tag's commit
354 :param date: date of tag's commit
355
355
356 :raises TagAlreadyExistError: if tag with same name already exists
356 :raises TagAlreadyExistError: if tag with same name already exists
357 """
357 """
358 if name in self.tags:
358 if name in self.tags:
359 raise TagAlreadyExistError("Tag %s already exists" % name)
359 raise TagAlreadyExistError("Tag %s already exists" % name)
360 commit = self.get_commit(commit_id=commit_id)
360 commit = self.get_commit(commit_id=commit_id)
361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
362
362
363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
364
364
365 self._invalidate_prop_cache('tags')
365 self._invalidate_prop_cache('tags')
366 self._invalidate_prop_cache('_refs')
366 self._invalidate_prop_cache('_refs')
367
367
368 return commit
368 return commit
369
369
370 def remove_tag(self, name, user, message=None, date=None):
370 def remove_tag(self, name, user, message=None, date=None):
371 """
371 """
372 Removes tag with the given ``name``.
372 Removes tag with the given ``name``.
373
373
374 :param name: name of the tag to be removed
374 :param name: name of the tag to be removed
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 :param message: message of the tag's removal commit
376 :param message: message of the tag's removal commit
377 :param date: date of tag's removal commit
377 :param date: date of tag's removal commit
378
378
379 :raises TagDoesNotExistError: if tag with given name does not exists
379 :raises TagDoesNotExistError: if tag with given name does not exists
380 """
380 """
381 if name not in self.tags:
381 if name not in self.tags:
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
383
383
384 self._remote.tag_remove(name)
384 self._remote.tag_remove(name)
385 self._invalidate_prop_cache('tags')
385 self._invalidate_prop_cache('tags')
386 self._invalidate_prop_cache('_refs')
386 self._invalidate_prop_cache('_refs')
387
387
388 def _get_refs(self):
388 def _get_refs(self):
389 return self._remote.get_refs()
389 return self._remote.get_refs()
390
390
391 @CachedProperty
391 @CachedProperty
392 def _refs(self):
392 def _refs(self):
393 return self._get_refs()
393 return self._get_refs()
394
394
395 @property
395 @property
396 def _ref_tree(self):
396 def _ref_tree(self):
397 node = tree = {}
397 node = tree = {}
398 for ref, sha in self._refs.iteritems():
398 for ref, sha in self._refs.iteritems():
399 path = ref.split('/')
399 path = ref.split('/')
400 for bit in path[:-1]:
400 for bit in path[:-1]:
401 node = node.setdefault(bit, {})
401 node = node.setdefault(bit, {})
402 node[path[-1]] = sha
402 node[path[-1]] = sha
403 node = tree
403 node = tree
404 return tree
404 return tree
405
405
406 def get_remote_ref(self, ref_name):
406 def get_remote_ref(self, ref_name):
407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 try:
408 try:
409 return self._refs[ref_key]
409 return self._refs[ref_key]
410 except Exception:
410 except Exception:
411 return
411 return
412
412
413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
414 """
414 """
415 Returns `GitCommit` object representing commit from git repository
415 Returns `GitCommit` object representing commit from git repository
416 at the given `commit_id` or head (most recent commit) if None given.
416 at the given `commit_id` or head (most recent commit) if None given.
417 """
417 """
418 if self.is_empty():
418 if self.is_empty():
419 raise EmptyRepositoryError("There are no commits yet")
419 raise EmptyRepositoryError("There are no commits yet")
420
420
421 if commit_id is not None:
421 if commit_id is not None:
422 self._validate_commit_id(commit_id)
422 self._validate_commit_id(commit_id)
423 try:
423 try:
424 # we have cached idx, use it without contacting the remote
424 # we have cached idx, use it without contacting the remote
425 idx = self._commit_ids[commit_id]
425 idx = self._commit_ids[commit_id]
426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
427 except KeyError:
427 except KeyError:
428 pass
428 pass
429
429
430 elif commit_idx is not None:
430 elif commit_idx is not None:
431 self._validate_commit_idx(commit_idx)
431 self._validate_commit_idx(commit_idx)
432 try:
432 try:
433 _commit_id = self.commit_ids[commit_idx]
433 _commit_id = self.commit_ids[commit_idx]
434 if commit_idx < 0:
434 if commit_idx < 0:
435 commit_idx = self.commit_ids.index(_commit_id)
435 commit_idx = self.commit_ids.index(_commit_id)
436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
437 except IndexError:
437 except IndexError:
438 commit_id = commit_idx
438 commit_id = commit_idx
439 else:
439 else:
440 commit_id = "tip"
440 commit_id = "tip"
441
441
442 if translate_tag:
442 if translate_tag:
443 commit_id = self._lookup_commit(commit_id)
443 commit_id = self._lookup_commit(commit_id)
444
444
445 try:
445 try:
446 idx = self._commit_ids[commit_id]
446 idx = self._commit_ids[commit_id]
447 except KeyError:
447 except KeyError:
448 idx = -1
448 idx = -1
449
449
450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
451
451
452 def get_commits(
452 def get_commits(
453 self, start_id=None, end_id=None, start_date=None, end_date=None,
453 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
455 """
455 """
456 Returns generator of `GitCommit` objects from start to end (both
456 Returns generator of `GitCommit` objects from start to end (both
457 are inclusive), in ascending date order.
457 are inclusive), in ascending date order.
458
458
459 :param start_id: None, str(commit_id)
459 :param start_id: None, str(commit_id)
460 :param end_id: None, str(commit_id)
460 :param end_id: None, str(commit_id)
461 :param start_date: if specified, commits with commit date less than
461 :param start_date: if specified, commits with commit date less than
462 ``start_date`` would be filtered out from returned set
462 ``start_date`` would be filtered out from returned set
463 :param end_date: if specified, commits with commit date greater than
463 :param end_date: if specified, commits with commit date greater than
464 ``end_date`` would be filtered out from returned set
464 ``end_date`` would be filtered out from returned set
465 :param branch_name: if specified, commits not reachable from given
465 :param branch_name: if specified, commits not reachable from given
466 branch would be filtered out from returned set
466 branch would be filtered out from returned set
467 :param show_hidden: Show hidden commits such as obsolete or hidden from
467 :param show_hidden: Show hidden commits such as obsolete or hidden from
468 Mercurial evolve
468 Mercurial evolve
469 :raise BranchDoesNotExistError: If given `branch_name` does not
469 :raise BranchDoesNotExistError: If given `branch_name` does not
470 exist.
470 exist.
471 :raise CommitDoesNotExistError: If commits for given `start` or
471 :raise CommitDoesNotExistError: If commits for given `start` or
472 `end` could not be found.
472 `end` could not be found.
473
473
474 """
474 """
475 if self.is_empty():
475 if self.is_empty():
476 raise EmptyRepositoryError("There are no commits yet")
476 raise EmptyRepositoryError("There are no commits yet")
477
477
478 self._validate_branch_name(branch_name)
478 self._validate_branch_name(branch_name)
479
479
480 if start_id is not None:
480 if start_id is not None:
481 self._validate_commit_id(start_id)
481 self._validate_commit_id(start_id)
482 if end_id is not None:
482 if end_id is not None:
483 self._validate_commit_id(end_id)
483 self._validate_commit_id(end_id)
484
484
485 start_raw_id = self._lookup_commit(start_id)
485 start_raw_id = self._lookup_commit(start_id)
486 start_pos = self._commit_ids[start_raw_id] if start_id else None
486 start_pos = self._commit_ids[start_raw_id] if start_id else None
487 end_raw_id = self._lookup_commit(end_id)
487 end_raw_id = self._lookup_commit(end_id)
488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
489
489
490 if None not in [start_id, end_id] and start_pos > end_pos:
490 if None not in [start_id, end_id] and start_pos > end_pos:
491 raise RepositoryError(
491 raise RepositoryError(
492 "Start commit '%s' cannot be after end commit '%s'" %
492 "Start commit '%s' cannot be after end commit '%s'" %
493 (start_id, end_id))
493 (start_id, end_id))
494
494
495 if end_pos is not None:
495 if end_pos is not None:
496 end_pos += 1
496 end_pos += 1
497
497
498 filter_ = []
498 filter_ = []
499 if branch_name:
499 if branch_name:
500 filter_.append({'branch_name': branch_name})
500 filter_.append({'branch_name': branch_name})
501 if start_date and not end_date:
501 if start_date and not end_date:
502 filter_.append({'since': start_date})
502 filter_.append({'since': start_date})
503 if end_date and not start_date:
503 if end_date and not start_date:
504 filter_.append({'until': end_date})
504 filter_.append({'until': end_date})
505 if start_date and end_date:
505 if start_date and end_date:
506 filter_.append({'since': start_date})
506 filter_.append({'since': start_date})
507 filter_.append({'until': end_date})
507 filter_.append({'until': end_date})
508
508
509 # if start_pos or end_pos:
509 # if start_pos or end_pos:
510 # filter_.append({'start': start_pos})
510 # filter_.append({'start': start_pos})
511 # filter_.append({'end': end_pos})
511 # filter_.append({'end': end_pos})
512
512
513 if filter_:
513 if filter_:
514 revfilters = {
514 revfilters = {
515 'branch_name': branch_name,
515 'branch_name': branch_name,
516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
518 'start': start_pos,
518 'start': start_pos,
519 'end': end_pos,
519 'end': end_pos,
520 }
520 }
521 commit_ids = self._get_commit_ids(filters=revfilters)
521 commit_ids = self._get_commit_ids(filters=revfilters)
522
522
523 else:
523 else:
524 commit_ids = self.commit_ids
524 commit_ids = self.commit_ids
525
525
526 if start_pos or end_pos:
526 if start_pos or end_pos:
527 commit_ids = commit_ids[start_pos: end_pos]
527 commit_ids = commit_ids[start_pos: end_pos]
528
528
529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
530 translate_tag=translate_tags)
530 translate_tag=translate_tags)
531
531
532 def get_diff(
532 def get_diff(
533 self, commit1, commit2, path='', ignore_whitespace=False,
533 self, commit1, commit2, path='', ignore_whitespace=False,
534 context=3, path1=None):
534 context=3, path1=None):
535 """
535 """
536 Returns (git like) *diff*, as plain text. Shows changes introduced by
536 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 ``commit2`` since ``commit1``.
537 ``commit2`` since ``commit1``.
538
538
539 :param commit1: Entry point from which diff is shown. Can be
539 :param commit1: Entry point from which diff is shown. Can be
540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 the changes since empty state of the repository until ``commit2``
541 the changes since empty state of the repository until ``commit2``
542 :param commit2: Until which commits changes should be shown.
542 :param commit2: Until which commits changes should be shown.
543 :param ignore_whitespace: If set to ``True``, would not show whitespace
543 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 changes. Defaults to ``False``.
544 changes. Defaults to ``False``.
545 :param context: How many lines before/after changed lines should be
545 :param context: How many lines before/after changed lines should be
546 shown. Defaults to ``3``.
546 shown. Defaults to ``3``.
547 """
547 """
548 self._validate_diff_commits(commit1, commit2)
548 self._validate_diff_commits(commit1, commit2)
549 if path1 is not None and path1 != path:
549 if path1 is not None and path1 != path:
550 raise ValueError("Diff of two different paths not supported.")
550 raise ValueError("Diff of two different paths not supported.")
551
551
552 flags = [
552 if path:
553 '-U%s' % context, '--full-index', '--binary', '-p',
553 file_filter = path
554 '-M', '--abbrev=40']
555 if ignore_whitespace:
556 flags.append('-w')
557
558 if commit1 == self.EMPTY_COMMIT:
559 cmd = ['show'] + flags + [commit2.raw_id]
560 else:
554 else:
561 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
555 file_filter = None
562
563 if path:
564 cmd.extend(['--', path])
565
556
566 stdout, __ = self.run_git_command(cmd)
557 diff = self._remote.diff(
567 # If we used 'show' command, strip first few lines (until actual diff
558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
568 # starts)
559 opt_ignorews=ignore_whitespace,
569 if commit1 == self.EMPTY_COMMIT:
560 context=context)
570 lines = stdout.splitlines()
561 return GitDiff(diff)
571 x = 0
572 for line in lines:
573 if line.startswith('diff'):
574 break
575 x += 1
576 # Append new line just like 'diff' command do
577 stdout = '\n'.join(lines[x:]) + '\n'
578 return GitDiff(stdout)
579
562
580 def strip(self, commit_id, branch_name):
563 def strip(self, commit_id, branch_name):
581 commit = self.get_commit(commit_id=commit_id)
564 commit = self.get_commit(commit_id=commit_id)
582 if commit.merge:
565 if commit.merge:
583 raise Exception('Cannot reset to merge commit')
566 raise Exception('Cannot reset to merge commit')
584
567
585 # parent is going to be the new head now
568 # parent is going to be the new head now
586 commit = commit.parents[0]
569 commit = commit.parents[0]
587 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
588
571
589 # clear cached properties
572 # clear cached properties
590 self._invalidate_prop_cache('commit_ids')
573 self._invalidate_prop_cache('commit_ids')
591 self._invalidate_prop_cache('_refs')
574 self._invalidate_prop_cache('_refs')
592 self._invalidate_prop_cache('branches')
575 self._invalidate_prop_cache('branches')
593
576
594 return len(self.commit_ids)
577 return len(self.commit_ids)
595
578
596 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
597 if commit_id1 == commit_id2:
580 if commit_id1 == commit_id2:
598 return commit_id1
581 return commit_id1
599
582
600 if self != repo2:
583 if self != repo2:
601 commits = self._remote.get_missing_revs(
584 commits = self._remote.get_missing_revs(
602 commit_id1, commit_id2, repo2.path)
585 commit_id1, commit_id2, repo2.path)
603 if commits:
586 if commits:
604 commit = repo2.get_commit(commits[-1])
587 commit = repo2.get_commit(commits[-1])
605 if commit.parents:
588 if commit.parents:
606 ancestor_id = commit.parents[0].raw_id
589 ancestor_id = commit.parents[0].raw_id
607 else:
590 else:
608 ancestor_id = None
591 ancestor_id = None
609 else:
592 else:
610 # no commits from other repo, ancestor_id is the commit_id2
593 # no commits from other repo, ancestor_id is the commit_id2
611 ancestor_id = commit_id2
594 ancestor_id = commit_id2
612 else:
595 else:
613 output, __ = self.run_git_command(
596 output, __ = self.run_git_command(
614 ['merge-base', commit_id1, commit_id2])
597 ['merge-base', commit_id1, commit_id2])
615 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
616
599
617 return ancestor_id
600 return ancestor_id
618
601
619 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
620 repo1 = self
603 repo1 = self
621 ancestor_id = None
604 ancestor_id = None
622
605
623 if commit_id1 == commit_id2:
606 if commit_id1 == commit_id2:
624 commits = []
607 commits = []
625 elif repo1 != repo2:
608 elif repo1 != repo2:
626 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
627 repo2.path)
610 repo2.path)
628 commits = [
611 commits = [
629 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
630 for commit_id in reversed(missing_ids)]
613 for commit_id in reversed(missing_ids)]
631 else:
614 else:
632 output, __ = repo1.run_git_command(
615 output, __ = repo1.run_git_command(
633 ['log', '--reverse', '--pretty=format: %H', '-s',
616 ['log', '--reverse', '--pretty=format: %H', '-s',
634 '%s..%s' % (commit_id1, commit_id2)])
617 '%s..%s' % (commit_id1, commit_id2)])
635 commits = [
618 commits = [
636 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
637 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
638
621
639 return commits
622 return commits
640
623
641 @LazyProperty
624 @LazyProperty
642 def in_memory_commit(self):
625 def in_memory_commit(self):
643 """
626 """
644 Returns ``GitInMemoryCommit`` object for this repository.
627 Returns ``GitInMemoryCommit`` object for this repository.
645 """
628 """
646 return GitInMemoryCommit(self)
629 return GitInMemoryCommit(self)
647
630
648 def pull(self, url, commit_ids=None, update_after=False):
631 def pull(self, url, commit_ids=None, update_after=False):
649 """
632 """
650 Pull changes from external location. Pull is different in GIT
633 Pull changes from external location. Pull is different in GIT
651 that fetch since it's doing a checkout
634 that fetch since it's doing a checkout
652
635
653 :param commit_ids: Optional. Can be set to a list of commit ids
636 :param commit_ids: Optional. Can be set to a list of commit ids
654 which shall be pulled from the other repository.
637 which shall be pulled from the other repository.
655 """
638 """
656 refs = None
639 refs = None
657 if commit_ids is not None:
640 if commit_ids is not None:
658 remote_refs = self._remote.get_remote_refs(url)
641 remote_refs = self._remote.get_remote_refs(url)
659 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
660 self._remote.pull(url, refs=refs, update_after=update_after)
643 self._remote.pull(url, refs=refs, update_after=update_after)
661 self._remote.invalidate_vcs_cache()
644 self._remote.invalidate_vcs_cache()
662
645
663 def fetch(self, url, commit_ids=None):
646 def fetch(self, url, commit_ids=None):
664 """
647 """
665 Fetch all git objects from external location.
648 Fetch all git objects from external location.
666 """
649 """
667 self._remote.sync_fetch(url, refs=commit_ids)
650 self._remote.sync_fetch(url, refs=commit_ids)
668 self._remote.invalidate_vcs_cache()
651 self._remote.invalidate_vcs_cache()
669
652
670 def push(self, url):
653 def push(self, url):
671 refs = None
654 refs = None
672 self._remote.sync_push(url, refs=refs)
655 self._remote.sync_push(url, refs=refs)
673
656
674 def set_refs(self, ref_name, commit_id):
657 def set_refs(self, ref_name, commit_id):
675 self._remote.set_refs(ref_name, commit_id)
658 self._remote.set_refs(ref_name, commit_id)
676 self._invalidate_prop_cache('_refs')
659 self._invalidate_prop_cache('_refs')
677
660
678 def remove_ref(self, ref_name):
661 def remove_ref(self, ref_name):
679 self._remote.remove_ref(ref_name)
662 self._remote.remove_ref(ref_name)
680 self._invalidate_prop_cache('_refs')
663 self._invalidate_prop_cache('_refs')
681
664
682 def _update_server_info(self):
665 def _update_server_info(self):
683 """
666 """
684 runs gits update-server-info command in this repo instance
667 runs gits update-server-info command in this repo instance
685 """
668 """
686 self._remote.update_server_info()
669 self._remote.update_server_info()
687
670
688 def _current_branch(self):
671 def _current_branch(self):
689 """
672 """
690 Return the name of the current branch.
673 Return the name of the current branch.
691
674
692 It only works for non bare repositories (i.e. repositories with a
675 It only works for non bare repositories (i.e. repositories with a
693 working copy)
676 working copy)
694 """
677 """
695 if self.bare:
678 if self.bare:
696 raise RepositoryError('Bare git repos do not have active branches')
679 raise RepositoryError('Bare git repos do not have active branches')
697
680
698 if self.is_empty():
681 if self.is_empty():
699 return None
682 return None
700
683
701 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
702 return stdout.strip()
685 return stdout.strip()
703
686
704 def _checkout(self, branch_name, create=False, force=False):
687 def _checkout(self, branch_name, create=False, force=False):
705 """
688 """
706 Checkout a branch in the working directory.
689 Checkout a branch in the working directory.
707
690
708 It tries to create the branch if create is True, failing if the branch
691 It tries to create the branch if create is True, failing if the branch
709 already exists.
692 already exists.
710
693
711 It only works for non bare repositories (i.e. repositories with a
694 It only works for non bare repositories (i.e. repositories with a
712 working copy)
695 working copy)
713 """
696 """
714 if self.bare:
697 if self.bare:
715 raise RepositoryError('Cannot checkout branches in a bare git repo')
698 raise RepositoryError('Cannot checkout branches in a bare git repo')
716
699
717 cmd = ['checkout']
700 cmd = ['checkout']
718 if force:
701 if force:
719 cmd.append('-f')
702 cmd.append('-f')
720 if create:
703 if create:
721 cmd.append('-b')
704 cmd.append('-b')
722 cmd.append(branch_name)
705 cmd.append(branch_name)
723 self.run_git_command(cmd, fail_on_stderr=False)
706 self.run_git_command(cmd, fail_on_stderr=False)
724
707
725 def _create_branch(self, branch_name, commit_id):
708 def _create_branch(self, branch_name, commit_id):
726 """
709 """
727 creates a branch in a GIT repo
710 creates a branch in a GIT repo
728 """
711 """
729 self._remote.create_branch(branch_name, commit_id)
712 self._remote.create_branch(branch_name, commit_id)
730
713
731 def _identify(self):
714 def _identify(self):
732 """
715 """
733 Return the current state of the working directory.
716 Return the current state of the working directory.
734 """
717 """
735 if self.bare:
718 if self.bare:
736 raise RepositoryError('Bare git repos do not have active branches')
719 raise RepositoryError('Bare git repos do not have active branches')
737
720
738 if self.is_empty():
721 if self.is_empty():
739 return None
722 return None
740
723
741 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
742 return stdout.strip()
725 return stdout.strip()
743
726
744 def _local_clone(self, clone_path, branch_name, source_branch=None):
727 def _local_clone(self, clone_path, branch_name, source_branch=None):
745 """
728 """
746 Create a local clone of the current repo.
729 Create a local clone of the current repo.
747 """
730 """
748 # N.B.(skreft): the --branch option is required as otherwise the shallow
731 # N.B.(skreft): the --branch option is required as otherwise the shallow
749 # clone will only fetch the active branch.
732 # clone will only fetch the active branch.
750 cmd = ['clone', '--branch', branch_name,
733 cmd = ['clone', '--branch', branch_name,
751 self.path, os.path.abspath(clone_path)]
734 self.path, os.path.abspath(clone_path)]
752
735
753 self.run_git_command(cmd, fail_on_stderr=False)
736 self.run_git_command(cmd, fail_on_stderr=False)
754
737
755 # if we get the different source branch, make sure we also fetch it for
738 # if we get the different source branch, make sure we also fetch it for
756 # merge conditions
739 # merge conditions
757 if source_branch and source_branch != branch_name:
740 if source_branch and source_branch != branch_name:
758 # check if the ref exists.
741 # check if the ref exists.
759 shadow_repo = GitRepository(os.path.abspath(clone_path))
742 shadow_repo = GitRepository(os.path.abspath(clone_path))
760 if shadow_repo.get_remote_ref(source_branch):
743 if shadow_repo.get_remote_ref(source_branch):
761 cmd = ['fetch', self.path, source_branch]
744 cmd = ['fetch', self.path, source_branch]
762 self.run_git_command(cmd, fail_on_stderr=False)
745 self.run_git_command(cmd, fail_on_stderr=False)
763
746
764 def _local_fetch(self, repository_path, branch_name, use_origin=False):
747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
765 """
748 """
766 Fetch a branch from a local repository.
749 Fetch a branch from a local repository.
767 """
750 """
768 repository_path = os.path.abspath(repository_path)
751 repository_path = os.path.abspath(repository_path)
769 if repository_path == self.path:
752 if repository_path == self.path:
770 raise ValueError('Cannot fetch from the same repository')
753 raise ValueError('Cannot fetch from the same repository')
771
754
772 if use_origin:
755 if use_origin:
773 branch_name = '+{branch}:refs/heads/{branch}'.format(
756 branch_name = '+{branch}:refs/heads/{branch}'.format(
774 branch=branch_name)
757 branch=branch_name)
775
758
776 cmd = ['fetch', '--no-tags', '--update-head-ok',
759 cmd = ['fetch', '--no-tags', '--update-head-ok',
777 repository_path, branch_name]
760 repository_path, branch_name]
778 self.run_git_command(cmd, fail_on_stderr=False)
761 self.run_git_command(cmd, fail_on_stderr=False)
779
762
780 def _local_reset(self, branch_name):
763 def _local_reset(self, branch_name):
781 branch_name = '{}'.format(branch_name)
764 branch_name = '{}'.format(branch_name)
782 cmd = ['reset', '--hard', branch_name, '--']
765 cmd = ['reset', '--hard', branch_name, '--']
783 self.run_git_command(cmd, fail_on_stderr=False)
766 self.run_git_command(cmd, fail_on_stderr=False)
784
767
785 def _last_fetch_heads(self):
768 def _last_fetch_heads(self):
786 """
769 """
787 Return the last fetched heads that need merging.
770 Return the last fetched heads that need merging.
788
771
789 The algorithm is defined at
772 The algorithm is defined at
790 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
791 """
774 """
792 if not self.bare:
775 if not self.bare:
793 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
794 else:
777 else:
795 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
796
779
797 heads = []
780 heads = []
798 with open(fetch_heads_path) as f:
781 with open(fetch_heads_path) as f:
799 for line in f:
782 for line in f:
800 if ' not-for-merge ' in line:
783 if ' not-for-merge ' in line:
801 continue
784 continue
802 line = re.sub('\t.*', '', line, flags=re.DOTALL)
785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
803 heads.append(line)
786 heads.append(line)
804
787
805 return heads
788 return heads
806
789
807 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
808 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
809
792
810 def _local_pull(self, repository_path, branch_name, ff_only=True):
793 def _local_pull(self, repository_path, branch_name, ff_only=True):
811 """
794 """
812 Pull a branch from a local repository.
795 Pull a branch from a local repository.
813 """
796 """
814 if self.bare:
797 if self.bare:
815 raise RepositoryError('Cannot pull into a bare git repository')
798 raise RepositoryError('Cannot pull into a bare git repository')
816 # N.B.(skreft): The --ff-only option is to make sure this is a
799 # N.B.(skreft): The --ff-only option is to make sure this is a
817 # fast-forward (i.e., we are only pulling new changes and there are no
800 # fast-forward (i.e., we are only pulling new changes and there are no
818 # conflicts with our current branch)
801 # conflicts with our current branch)
819 # Additionally, that option needs to go before --no-tags, otherwise git
802 # Additionally, that option needs to go before --no-tags, otherwise git
820 # pull complains about it being an unknown flag.
803 # pull complains about it being an unknown flag.
821 cmd = ['pull']
804 cmd = ['pull']
822 if ff_only:
805 if ff_only:
823 cmd.append('--ff-only')
806 cmd.append('--ff-only')
824 cmd.extend(['--no-tags', repository_path, branch_name])
807 cmd.extend(['--no-tags', repository_path, branch_name])
825 self.run_git_command(cmd, fail_on_stderr=False)
808 self.run_git_command(cmd, fail_on_stderr=False)
826
809
827 def _local_merge(self, merge_message, user_name, user_email, heads):
810 def _local_merge(self, merge_message, user_name, user_email, heads):
828 """
811 """
829 Merge the given head into the checked out branch.
812 Merge the given head into the checked out branch.
830
813
831 It will force a merge commit.
814 It will force a merge commit.
832
815
833 Currently it raises an error if the repo is empty, as it is not possible
816 Currently it raises an error if the repo is empty, as it is not possible
834 to create a merge commit in an empty repo.
817 to create a merge commit in an empty repo.
835
818
836 :param merge_message: The message to use for the merge commit.
819 :param merge_message: The message to use for the merge commit.
837 :param heads: the heads to merge.
820 :param heads: the heads to merge.
838 """
821 """
839 if self.bare:
822 if self.bare:
840 raise RepositoryError('Cannot merge into a bare git repository')
823 raise RepositoryError('Cannot merge into a bare git repository')
841
824
842 if not heads:
825 if not heads:
843 return
826 return
844
827
845 if self.is_empty():
828 if self.is_empty():
846 # TODO(skreft): do somehting more robust in this case.
829 # TODO(skreft): do somehting more robust in this case.
847 raise RepositoryError(
830 raise RepositoryError(
848 'Do not know how to merge into empty repositories yet')
831 'Do not know how to merge into empty repositories yet')
849
832
850 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
833 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
851 # commit message. We also specify the user who is doing the merge.
834 # commit message. We also specify the user who is doing the merge.
852 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
835 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
853 '-c', 'user.email=%s' % safe_str(user_email),
836 '-c', 'user.email=%s' % safe_str(user_email),
854 'merge', '--no-ff', '-m', safe_str(merge_message)]
837 'merge', '--no-ff', '-m', safe_str(merge_message)]
855 cmd.extend(heads)
838 cmd.extend(heads)
856 try:
839 try:
857 output = self.run_git_command(cmd, fail_on_stderr=False)
840 output = self.run_git_command(cmd, fail_on_stderr=False)
858 except RepositoryError:
841 except RepositoryError:
859 # Cleanup any merge leftovers
842 # Cleanup any merge leftovers
860 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
843 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
861 raise
844 raise
862
845
863 def _local_push(
846 def _local_push(
864 self, source_branch, repository_path, target_branch,
847 self, source_branch, repository_path, target_branch,
865 enable_hooks=False, rc_scm_data=None):
848 enable_hooks=False, rc_scm_data=None):
866 """
849 """
867 Push the source_branch to the given repository and target_branch.
850 Push the source_branch to the given repository and target_branch.
868
851
869 Currently it if the target_branch is not master and the target repo is
852 Currently it if the target_branch is not master and the target repo is
870 empty, the push will work, but then GitRepository won't be able to find
853 empty, the push will work, but then GitRepository won't be able to find
871 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
854 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
872 pointing to master, which does not exist).
855 pointing to master, which does not exist).
873
856
874 It does not run the hooks in the target repo.
857 It does not run the hooks in the target repo.
875 """
858 """
876 # TODO(skreft): deal with the case in which the target repo is empty,
859 # TODO(skreft): deal with the case in which the target repo is empty,
877 # and the target_branch is not master.
860 # and the target_branch is not master.
878 target_repo = GitRepository(repository_path)
861 target_repo = GitRepository(repository_path)
879 if (not target_repo.bare and
862 if (not target_repo.bare and
880 target_repo._current_branch() == target_branch):
863 target_repo._current_branch() == target_branch):
881 # Git prevents pushing to the checked out branch, so simulate it by
864 # Git prevents pushing to the checked out branch, so simulate it by
882 # pulling into the target repository.
865 # pulling into the target repository.
883 target_repo._local_pull(self.path, source_branch)
866 target_repo._local_pull(self.path, source_branch)
884 else:
867 else:
885 cmd = ['push', os.path.abspath(repository_path),
868 cmd = ['push', os.path.abspath(repository_path),
886 '%s:%s' % (source_branch, target_branch)]
869 '%s:%s' % (source_branch, target_branch)]
887 gitenv = {}
870 gitenv = {}
888 if rc_scm_data:
871 if rc_scm_data:
889 gitenv.update({'RC_SCM_DATA': rc_scm_data})
872 gitenv.update({'RC_SCM_DATA': rc_scm_data})
890
873
891 if not enable_hooks:
874 if not enable_hooks:
892 gitenv['RC_SKIP_HOOKS'] = '1'
875 gitenv['RC_SKIP_HOOKS'] = '1'
893 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
876 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
894
877
895 def _get_new_pr_branch(self, source_branch, target_branch):
878 def _get_new_pr_branch(self, source_branch, target_branch):
896 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
879 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
897 pr_branches = []
880 pr_branches = []
898 for branch in self.branches:
881 for branch in self.branches:
899 if branch.startswith(prefix):
882 if branch.startswith(prefix):
900 pr_branches.append(int(branch[len(prefix):]))
883 pr_branches.append(int(branch[len(prefix):]))
901
884
902 if not pr_branches:
885 if not pr_branches:
903 branch_id = 0
886 branch_id = 0
904 else:
887 else:
905 branch_id = max(pr_branches) + 1
888 branch_id = max(pr_branches) + 1
906
889
907 return '%s%d' % (prefix, branch_id)
890 return '%s%d' % (prefix, branch_id)
908
891
909 def _maybe_prepare_merge_workspace(
892 def _maybe_prepare_merge_workspace(
910 self, repo_id, workspace_id, target_ref, source_ref):
893 self, repo_id, workspace_id, target_ref, source_ref):
911 shadow_repository_path = self._get_shadow_repository_path(
894 shadow_repository_path = self._get_shadow_repository_path(
912 repo_id, workspace_id)
895 repo_id, workspace_id)
913 if not os.path.exists(shadow_repository_path):
896 if not os.path.exists(shadow_repository_path):
914 self._local_clone(
897 self._local_clone(
915 shadow_repository_path, target_ref.name, source_ref.name)
898 shadow_repository_path, target_ref.name, source_ref.name)
916 log.debug('Prepared %s shadow repository in %s',
899 log.debug('Prepared %s shadow repository in %s',
917 self.alias, shadow_repository_path)
900 self.alias, shadow_repository_path)
918
901
919 return shadow_repository_path
902 return shadow_repository_path
920
903
921 def _merge_repo(self, repo_id, workspace_id, target_ref,
904 def _merge_repo(self, repo_id, workspace_id, target_ref,
922 source_repo, source_ref, merge_message,
905 source_repo, source_ref, merge_message,
923 merger_name, merger_email, dry_run=False,
906 merger_name, merger_email, dry_run=False,
924 use_rebase=False, close_branch=False):
907 use_rebase=False, close_branch=False):
925
908
926 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
909 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
927 'rebase' if use_rebase else 'merge', dry_run)
910 'rebase' if use_rebase else 'merge', dry_run)
928 if target_ref.commit_id != self.branches[target_ref.name]:
911 if target_ref.commit_id != self.branches[target_ref.name]:
929 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
912 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
930 target_ref.commit_id, self.branches[target_ref.name])
913 target_ref.commit_id, self.branches[target_ref.name])
931 return MergeResponse(
914 return MergeResponse(
932 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
915 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
933 metadata={'target_ref': target_ref})
916 metadata={'target_ref': target_ref})
934
917
935 shadow_repository_path = self._maybe_prepare_merge_workspace(
918 shadow_repository_path = self._maybe_prepare_merge_workspace(
936 repo_id, workspace_id, target_ref, source_ref)
919 repo_id, workspace_id, target_ref, source_ref)
937 shadow_repo = self.get_shadow_instance(shadow_repository_path)
920 shadow_repo = self.get_shadow_instance(shadow_repository_path)
938
921
939 # checkout source, if it's different. Otherwise we could not
922 # checkout source, if it's different. Otherwise we could not
940 # fetch proper commits for merge testing
923 # fetch proper commits for merge testing
941 if source_ref.name != target_ref.name:
924 if source_ref.name != target_ref.name:
942 if shadow_repo.get_remote_ref(source_ref.name):
925 if shadow_repo.get_remote_ref(source_ref.name):
943 shadow_repo._checkout(source_ref.name, force=True)
926 shadow_repo._checkout(source_ref.name, force=True)
944
927
945 # checkout target, and fetch changes
928 # checkout target, and fetch changes
946 shadow_repo._checkout(target_ref.name, force=True)
929 shadow_repo._checkout(target_ref.name, force=True)
947
930
948 # fetch/reset pull the target, in case it is changed
931 # fetch/reset pull the target, in case it is changed
949 # this handles even force changes
932 # this handles even force changes
950 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
933 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
951 shadow_repo._local_reset(target_ref.name)
934 shadow_repo._local_reset(target_ref.name)
952
935
953 # Need to reload repo to invalidate the cache, or otherwise we cannot
936 # Need to reload repo to invalidate the cache, or otherwise we cannot
954 # retrieve the last target commit.
937 # retrieve the last target commit.
955 shadow_repo = self.get_shadow_instance(shadow_repository_path)
938 shadow_repo = self.get_shadow_instance(shadow_repository_path)
956 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
939 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
957 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
940 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
958 target_ref, target_ref.commit_id,
941 target_ref, target_ref.commit_id,
959 shadow_repo.branches[target_ref.name])
942 shadow_repo.branches[target_ref.name])
960 return MergeResponse(
943 return MergeResponse(
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
944 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 metadata={'target_ref': target_ref})
945 metadata={'target_ref': target_ref})
963
946
964 # calculate new branch
947 # calculate new branch
965 pr_branch = shadow_repo._get_new_pr_branch(
948 pr_branch = shadow_repo._get_new_pr_branch(
966 source_ref.name, target_ref.name)
949 source_ref.name, target_ref.name)
967 log.debug('using pull-request merge branch: `%s`', pr_branch)
950 log.debug('using pull-request merge branch: `%s`', pr_branch)
968 # checkout to temp branch, and fetch changes
951 # checkout to temp branch, and fetch changes
969 shadow_repo._checkout(pr_branch, create=True)
952 shadow_repo._checkout(pr_branch, create=True)
970 try:
953 try:
971 shadow_repo._local_fetch(source_repo.path, source_ref.name)
954 shadow_repo._local_fetch(source_repo.path, source_ref.name)
972 except RepositoryError:
955 except RepositoryError:
973 log.exception('Failure when doing local fetch on '
956 log.exception('Failure when doing local fetch on '
974 'shadow repo: %s', shadow_repo)
957 'shadow repo: %s', shadow_repo)
975 return MergeResponse(
958 return MergeResponse(
976 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
959 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
977 metadata={'source_ref': source_ref})
960 metadata={'source_ref': source_ref})
978
961
979 merge_ref = None
962 merge_ref = None
980 merge_failure_reason = MergeFailureReason.NONE
963 merge_failure_reason = MergeFailureReason.NONE
981 metadata = {}
964 metadata = {}
982 try:
965 try:
983 shadow_repo._local_merge(merge_message, merger_name, merger_email,
966 shadow_repo._local_merge(merge_message, merger_name, merger_email,
984 [source_ref.commit_id])
967 [source_ref.commit_id])
985 merge_possible = True
968 merge_possible = True
986
969
987 # Need to invalidate the cache, or otherwise we
970 # Need to invalidate the cache, or otherwise we
988 # cannot retrieve the merge commit.
971 # cannot retrieve the merge commit.
989 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
972 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
990 merge_commit_id = shadow_repo.branches[pr_branch]
973 merge_commit_id = shadow_repo.branches[pr_branch]
991
974
992 # Set a reference pointing to the merge commit. This reference may
975 # Set a reference pointing to the merge commit. This reference may
993 # be used to easily identify the last successful merge commit in
976 # be used to easily identify the last successful merge commit in
994 # the shadow repository.
977 # the shadow repository.
995 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
978 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
996 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
979 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
997 except RepositoryError:
980 except RepositoryError:
998 log.exception('Failure when doing local merge on git shadow repo')
981 log.exception('Failure when doing local merge on git shadow repo')
999 merge_possible = False
982 merge_possible = False
1000 merge_failure_reason = MergeFailureReason.MERGE_FAILED
983 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1001
984
1002 if merge_possible and not dry_run:
985 if merge_possible and not dry_run:
1003 try:
986 try:
1004 shadow_repo._local_push(
987 shadow_repo._local_push(
1005 pr_branch, self.path, target_ref.name, enable_hooks=True,
988 pr_branch, self.path, target_ref.name, enable_hooks=True,
1006 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
989 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1007 merge_succeeded = True
990 merge_succeeded = True
1008 except RepositoryError:
991 except RepositoryError:
1009 log.exception(
992 log.exception(
1010 'Failure when doing local push from the shadow '
993 'Failure when doing local push from the shadow '
1011 'repository to the target repository at %s.', self.path)
994 'repository to the target repository at %s.', self.path)
1012 merge_succeeded = False
995 merge_succeeded = False
1013 merge_failure_reason = MergeFailureReason.PUSH_FAILED
996 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1014 metadata['target'] = 'git shadow repo'
997 metadata['target'] = 'git shadow repo'
1015 metadata['merge_commit'] = pr_branch
998 metadata['merge_commit'] = pr_branch
1016 else:
999 else:
1017 merge_succeeded = False
1000 merge_succeeded = False
1018
1001
1019 return MergeResponse(
1002 return MergeResponse(
1020 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1003 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1021 metadata=metadata)
1004 metadata=metadata)
@@ -1,381 +1,380 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG commit module
22 HG commit module
23 """
23 """
24
24
25 import os
25 import os
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 from rhodecode.lib.utils import safe_str, safe_unicode
30 from rhodecode.lib.utils import safe_str, safe_unicode
31 from rhodecode.lib.vcs import path as vcspath
31 from rhodecode.lib.vcs import path as vcspath
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 from rhodecode.lib.vcs.exceptions import CommitError
34 from rhodecode.lib.vcs.exceptions import CommitError
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
38 LargeFileNode, LARGEFILE_PREFIX)
38 LargeFileNode, LARGEFILE_PREFIX)
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
40
40
41
41
42 class MercurialCommit(base.BaseCommit):
42 class MercurialCommit(base.BaseCommit):
43 """
43 """
44 Represents state of the repository at the single commit.
44 Represents state of the repository at the single commit.
45 """
45 """
46
46
47 _filter_pre_load = [
47 _filter_pre_load = [
48 # git specific property not supported here
48 # git specific property not supported here
49 "_commit",
49 "_commit",
50 ]
50 ]
51
51
52 def __init__(self, repository, raw_id, idx, pre_load=None):
52 def __init__(self, repository, raw_id, idx, pre_load=None):
53 raw_id = safe_str(raw_id)
53 raw_id = safe_str(raw_id)
54
54
55 self.repository = repository
55 self.repository = repository
56 self._remote = repository._remote
56 self._remote = repository._remote
57
57
58 self.raw_id = raw_id
58 self.raw_id = raw_id
59 self.idx = idx
59 self.idx = idx
60
60
61 self._set_bulk_properties(pre_load)
61 self._set_bulk_properties(pre_load)
62
62
63 # caches
63 # caches
64 self.nodes = {}
64 self.nodes = {}
65
65
66 def _set_bulk_properties(self, pre_load):
66 def _set_bulk_properties(self, pre_load):
67 if not pre_load:
67 if not pre_load:
68 return
68 return
69 pre_load = [entry for entry in pre_load
69 pre_load = [entry for entry in pre_load
70 if entry not in self._filter_pre_load]
70 if entry not in self._filter_pre_load]
71 if not pre_load:
71 if not pre_load:
72 return
72 return
73
73
74 result = self._remote.bulk_request(self.raw_id, pre_load)
74 result = self._remote.bulk_request(self.raw_id, pre_load)
75 for attr, value in result.items():
75 for attr, value in result.items():
76 if attr in ["author", "branch", "message"]:
76 if attr in ["author", "branch", "message"]:
77 value = safe_unicode(value)
77 value = safe_unicode(value)
78 elif attr == "affected_files":
78 elif attr == "affected_files":
79 value = map(safe_unicode, value)
79 value = map(safe_unicode, value)
80 elif attr == "date":
80 elif attr == "date":
81 value = utcdate_fromtimestamp(*value)
81 value = utcdate_fromtimestamp(*value)
82 elif attr in ["children", "parents"]:
82 elif attr in ["children", "parents"]:
83 value = self._make_commits(value)
83 value = self._make_commits(value)
84 elif attr in ["phase"]:
84 elif attr in ["phase"]:
85 value = self._get_phase_text(value)
85 value = self._get_phase_text(value)
86 self.__dict__[attr] = value
86 self.__dict__[attr] = value
87
87
88 @LazyProperty
88 @LazyProperty
89 def tags(self):
89 def tags(self):
90 tags = [name for name, commit_id in self.repository.tags.iteritems()
90 tags = [name for name, commit_id in self.repository.tags.iteritems()
91 if commit_id == self.raw_id]
91 if commit_id == self.raw_id]
92 return tags
92 return tags
93
93
94 @LazyProperty
94 @LazyProperty
95 def branch(self):
95 def branch(self):
96 return safe_unicode(self._remote.ctx_branch(self.raw_id))
96 return safe_unicode(self._remote.ctx_branch(self.raw_id))
97
97
98 @LazyProperty
98 @LazyProperty
99 def bookmarks(self):
99 def bookmarks(self):
100 bookmarks = [
100 bookmarks = [
101 name for name, commit_id in self.repository.bookmarks.iteritems()
101 name for name, commit_id in self.repository.bookmarks.iteritems()
102 if commit_id == self.raw_id]
102 if commit_id == self.raw_id]
103 return bookmarks
103 return bookmarks
104
104
105 @LazyProperty
105 @LazyProperty
106 def message(self):
106 def message(self):
107 return safe_unicode(self._remote.ctx_description(self.raw_id))
107 return safe_unicode(self._remote.ctx_description(self.raw_id))
108
108
109 @LazyProperty
109 @LazyProperty
110 def committer(self):
110 def committer(self):
111 return safe_unicode(self.author)
111 return safe_unicode(self.author)
112
112
113 @LazyProperty
113 @LazyProperty
114 def author(self):
114 def author(self):
115 return safe_unicode(self._remote.ctx_user(self.raw_id))
115 return safe_unicode(self._remote.ctx_user(self.raw_id))
116
116
117 @LazyProperty
117 @LazyProperty
118 def date(self):
118 def date(self):
119 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
119 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
120
120
121 @LazyProperty
121 @LazyProperty
122 def status(self):
122 def status(self):
123 """
123 """
124 Returns modified, added, removed, deleted files for current commit
124 Returns modified, added, removed, deleted files for current commit
125 """
125 """
126 return self._remote.ctx_status(self.raw_id)
126 return self._remote.ctx_status(self.raw_id)
127
127
128 @LazyProperty
128 @LazyProperty
129 def _file_paths(self):
129 def _file_paths(self):
130 return self._remote.ctx_list(self.raw_id)
130 return self._remote.ctx_list(self.raw_id)
131
131
132 @LazyProperty
132 @LazyProperty
133 def _dir_paths(self):
133 def _dir_paths(self):
134 p = list(set(get_dirs_for_path(*self._file_paths)))
134 p = list(set(get_dirs_for_path(*self._file_paths)))
135 p.insert(0, '')
135 p.insert(0, '')
136 return p
136 return p
137
137
138 @LazyProperty
138 @LazyProperty
139 def _paths(self):
139 def _paths(self):
140 return self._dir_paths + self._file_paths
140 return self._dir_paths + self._file_paths
141
141
142 @LazyProperty
142 @LazyProperty
143 def id(self):
143 def id(self):
144 if self.last:
144 if self.last:
145 return u'tip'
145 return u'tip'
146 return self.short_id
146 return self.short_id
147
147
148 @LazyProperty
148 @LazyProperty
149 def short_id(self):
149 def short_id(self):
150 return self.raw_id[:12]
150 return self.raw_id[:12]
151
151
152 def _make_commits(self, indexes, pre_load=None):
152 def _make_commits(self, indexes, pre_load=None):
153 return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load)
153 return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load)
154 for idx in indexes if idx >= 0]
154 for idx in indexes if idx >= 0]
155
155
156 @LazyProperty
156 @LazyProperty
157 def parents(self):
157 def parents(self):
158 """
158 """
159 Returns list of parent commits.
159 Returns list of parent commits.
160 """
160 """
161 parents = self._remote.ctx_parents(self.raw_id)
161 parents = self._remote.ctx_parents(self.raw_id)
162 return self._make_commits(parents)
162 return self._make_commits(parents)
163
163
164 def _get_phase_text(self, phase_id):
164 def _get_phase_text(self, phase_id):
165 return {
165 return {
166 0: 'public',
166 0: 'public',
167 1: 'draft',
167 1: 'draft',
168 2: 'secret',
168 2: 'secret',
169 }.get(phase_id) or ''
169 }.get(phase_id) or ''
170
170
171 @LazyProperty
171 @LazyProperty
172 def phase(self):
172 def phase(self):
173 phase_id = self._remote.ctx_phase(self.raw_id)
173 phase_id = self._remote.ctx_phase(self.raw_id)
174 phase_text = self._get_phase_text(phase_id)
174 phase_text = self._get_phase_text(phase_id)
175
175
176 return safe_unicode(phase_text)
176 return safe_unicode(phase_text)
177
177
178 @LazyProperty
178 @LazyProperty
179 def obsolete(self):
179 def obsolete(self):
180 obsolete = self._remote.ctx_obsolete(self.raw_id)
180 obsolete = self._remote.ctx_obsolete(self.raw_id)
181 return obsolete
181 return obsolete
182
182
183 @LazyProperty
183 @LazyProperty
184 def hidden(self):
184 def hidden(self):
185 hidden = self._remote.ctx_hidden(self.raw_id)
185 hidden = self._remote.ctx_hidden(self.raw_id)
186 return hidden
186 return hidden
187
187
188 @LazyProperty
188 @LazyProperty
189 def children(self):
189 def children(self):
190 """
190 """
191 Returns list of child commits.
191 Returns list of child commits.
192 """
192 """
193 children = self._remote.ctx_children(self.raw_id)
193 children = self._remote.ctx_children(self.raw_id)
194 return self._make_commits(children)
194 return self._make_commits(children)
195
195
196 def _fix_path(self, path):
196 def _fix_path(self, path):
197 """
197 """
198 Mercurial keeps filenodes as str so we need to encode from unicode
198 Mercurial keeps filenodes as str so we need to encode from unicode
199 to str.
199 to str.
200 """
200 """
201 return safe_str(super(MercurialCommit, self)._fix_path(path))
201 return safe_str(super(MercurialCommit, self)._fix_path(path))
202
202
203 def _get_kind(self, path):
203 def _get_kind(self, path):
204 path = self._fix_path(path)
204 path = self._fix_path(path)
205 if path in self._file_paths:
205 if path in self._file_paths:
206 return NodeKind.FILE
206 return NodeKind.FILE
207 elif path in self._dir_paths:
207 elif path in self._dir_paths:
208 return NodeKind.DIR
208 return NodeKind.DIR
209 else:
209 else:
210 raise CommitError(
210 raise CommitError(
211 "Node does not exist at the given path '%s'" % (path, ))
211 "Node does not exist at the given path '%s'" % (path, ))
212
212
213 def _get_filectx(self, path):
213 def _get_filectx(self, path):
214 path = self._fix_path(path)
214 path = self._fix_path(path)
215 if self._get_kind(path) != NodeKind.FILE:
215 if self._get_kind(path) != NodeKind.FILE:
216 raise CommitError(
216 raise CommitError(
217 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
217 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
218 return path
218 return path
219
219
220 def get_file_mode(self, path):
220 def get_file_mode(self, path):
221 """
221 """
222 Returns stat mode of the file at the given ``path``.
222 Returns stat mode of the file at the given ``path``.
223 """
223 """
224 path = self._get_filectx(path)
224 path = self._get_filectx(path)
225 if 'x' in self._remote.fctx_flags(self.raw_id, path):
225 if 'x' in self._remote.fctx_flags(self.raw_id, path):
226 return base.FILEMODE_EXECUTABLE
226 return base.FILEMODE_EXECUTABLE
227 else:
227 else:
228 return base.FILEMODE_DEFAULT
228 return base.FILEMODE_DEFAULT
229
229
230 def is_link(self, path):
230 def is_link(self, path):
231 path = self._get_filectx(path)
231 path = self._get_filectx(path)
232 return 'l' in self._remote.fctx_flags(self.raw_id, path)
232 return 'l' in self._remote.fctx_flags(self.raw_id, path)
233
233
234 def get_file_content(self, path):
234 def get_file_content(self, path):
235 """
235 """
236 Returns content of the file at given ``path``.
236 Returns content of the file at given ``path``.
237 """
237 """
238 path = self._get_filectx(path)
238 path = self._get_filectx(path)
239 return self._remote.fctx_node_data(self.raw_id, path)
239 return self._remote.fctx_node_data(self.raw_id, path)
240
240
241 def get_file_size(self, path):
241 def get_file_size(self, path):
242 """
242 """
243 Returns size of the file at given ``path``.
243 Returns size of the file at given ``path``.
244 """
244 """
245 path = self._get_filectx(path)
245 path = self._get_filectx(path)
246 return self._remote.fctx_size(self.raw_id, path)
246 return self._remote.fctx_size(self.raw_id, path)
247
247
248 def get_path_history(self, path, limit=None, pre_load=None):
248 def get_path_history(self, path, limit=None, pre_load=None):
249 """
249 """
250 Returns history of file as reversed list of `MercurialCommit` objects
250 Returns history of file as reversed list of `MercurialCommit` objects
251 for which file at given ``path`` has been modified.
251 for which file at given ``path`` has been modified.
252 """
252 """
253 path = self._get_filectx(path)
253 path = self._get_filectx(path)
254 hist = self._remote.node_history(self.raw_id, path, limit)
254 hist = self._remote.node_history(self.raw_id, path, limit)
255 return [
255 return [
256 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
256 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
257 for commit_id in hist]
257 for commit_id in hist]
258
258
259 def get_file_annotate(self, path, pre_load=None):
259 def get_file_annotate(self, path, pre_load=None):
260 """
260 """
261 Returns a generator of four element tuples with
261 Returns a generator of four element tuples with
262 lineno, commit_id, commit lazy loader and line
262 lineno, commit_id, commit lazy loader and line
263 """
263 """
264 result = self._remote.fctx_annotate(self.raw_id, path)
264 result = self._remote.fctx_annotate(self.raw_id, path)
265
265
266 for ln_no, commit_id, content in result:
266 for ln_no, commit_id, content in result:
267 yield (
267 yield (
268 ln_no, commit_id,
268 ln_no, commit_id,
269 lambda: self.repository.get_commit(commit_id=commit_id,
269 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
270 pre_load=pre_load),
271 content)
270 content)
272
271
273 def get_nodes(self, path):
272 def get_nodes(self, path):
274 """
273 """
275 Returns combined ``DirNode`` and ``FileNode`` objects list representing
274 Returns combined ``DirNode`` and ``FileNode`` objects list representing
276 state of commit at the given ``path``. If node at the given ``path``
275 state of commit at the given ``path``. If node at the given ``path``
277 is not instance of ``DirNode``, CommitError would be raised.
276 is not instance of ``DirNode``, CommitError would be raised.
278 """
277 """
279
278
280 if self._get_kind(path) != NodeKind.DIR:
279 if self._get_kind(path) != NodeKind.DIR:
281 raise CommitError(
280 raise CommitError(
282 "Directory does not exist for idx %s at '%s'" % (self.raw_id, path))
281 "Directory does not exist for idx %s at '%s'" % (self.raw_id, path))
283 path = self._fix_path(path)
282 path = self._fix_path(path)
284
283
285 filenodes = [
284 filenodes = [
286 FileNode(f, commit=self) for f in self._file_paths
285 FileNode(f, commit=self) for f in self._file_paths
287 if os.path.dirname(f) == path]
286 if os.path.dirname(f) == path]
288 # TODO: johbo: Check if this can be done in a more obvious way
287 # TODO: johbo: Check if this can be done in a more obvious way
289 dirs = path == '' and '' or [
288 dirs = path == '' and '' or [
290 d for d in self._dir_paths
289 d for d in self._dir_paths
291 if d and vcspath.dirname(d) == path]
290 if d and vcspath.dirname(d) == path]
292 dirnodes = [
291 dirnodes = [
293 DirNode(d, commit=self) for d in dirs
292 DirNode(d, commit=self) for d in dirs
294 if os.path.dirname(d) == path]
293 if os.path.dirname(d) == path]
295
294
296 alias = self.repository.alias
295 alias = self.repository.alias
297 for k, vals in self._submodules.iteritems():
296 for k, vals in self._submodules.iteritems():
298 if vcspath.dirname(k) == path:
297 if vcspath.dirname(k) == path:
299 loc = vals[0]
298 loc = vals[0]
300 commit = vals[1]
299 commit = vals[1]
301 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
300 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
302
301
303 nodes = dirnodes + filenodes
302 nodes = dirnodes + filenodes
304 for node in nodes:
303 for node in nodes:
305 if node.path not in self.nodes:
304 if node.path not in self.nodes:
306 self.nodes[node.path] = node
305 self.nodes[node.path] = node
307 nodes.sort()
306 nodes.sort()
308
307
309 return nodes
308 return nodes
310
309
311 def get_node(self, path, pre_load=None):
310 def get_node(self, path, pre_load=None):
312 """
311 """
313 Returns `Node` object from the given `path`. If there is no node at
312 Returns `Node` object from the given `path`. If there is no node at
314 the given `path`, `NodeDoesNotExistError` would be raised.
313 the given `path`, `NodeDoesNotExistError` would be raised.
315 """
314 """
316 path = self._fix_path(path)
315 path = self._fix_path(path)
317
316
318 if path not in self.nodes:
317 if path not in self.nodes:
319 if path in self._file_paths:
318 if path in self._file_paths:
320 node = FileNode(path, commit=self, pre_load=pre_load)
319 node = FileNode(path, commit=self, pre_load=pre_load)
321 elif path in self._dir_paths:
320 elif path in self._dir_paths:
322 if path == '':
321 if path == '':
323 node = RootNode(commit=self)
322 node = RootNode(commit=self)
324 else:
323 else:
325 node = DirNode(path, commit=self)
324 node = DirNode(path, commit=self)
326 else:
325 else:
327 raise self.no_node_at_path(path)
326 raise self.no_node_at_path(path)
328
327
329 # cache node
328 # cache node
330 self.nodes[path] = node
329 self.nodes[path] = node
331 return self.nodes[path]
330 return self.nodes[path]
332
331
333 def get_largefile_node(self, path):
332 def get_largefile_node(self, path):
334
333
335 if self._remote.is_large_file(path):
334 if self._remote.is_large_file(path):
336 # content of that file regular FileNode is the hash of largefile
335 # content of that file regular FileNode is the hash of largefile
337 file_id = self.get_file_content(path).strip()
336 file_id = self.get_file_content(path).strip()
338
337
339 if self._remote.in_largefiles_store(file_id):
338 if self._remote.in_largefiles_store(file_id):
340 lf_path = self._remote.store_path(file_id)
339 lf_path = self._remote.store_path(file_id)
341 return LargeFileNode(lf_path, commit=self, org_path=path)
340 return LargeFileNode(lf_path, commit=self, org_path=path)
342 elif self._remote.in_user_cache(file_id):
341 elif self._remote.in_user_cache(file_id):
343 lf_path = self._remote.store_path(file_id)
342 lf_path = self._remote.store_path(file_id)
344 self._remote.link(file_id, path)
343 self._remote.link(file_id, path)
345 return LargeFileNode(lf_path, commit=self, org_path=path)
344 return LargeFileNode(lf_path, commit=self, org_path=path)
346
345
347 @LazyProperty
346 @LazyProperty
348 def _submodules(self):
347 def _submodules(self):
349 """
348 """
350 Returns a dictionary with submodule information from substate file
349 Returns a dictionary with submodule information from substate file
351 of hg repository.
350 of hg repository.
352 """
351 """
353 return self._remote.ctx_substate(self.raw_id)
352 return self._remote.ctx_substate(self.raw_id)
354
353
355 @LazyProperty
354 @LazyProperty
356 def affected_files(self):
355 def affected_files(self):
357 """
356 """
358 Gets a fast accessible file changes for given commit
357 Gets a fast accessible file changes for given commit
359 """
358 """
360 return self._remote.ctx_files(self.raw_id)
359 return self._remote.ctx_files(self.raw_id)
361
360
362 @property
361 @property
363 def added(self):
362 def added(self):
364 """
363 """
365 Returns list of added ``FileNode`` objects.
364 Returns list of added ``FileNode`` objects.
366 """
365 """
367 return AddedFileNodesGenerator([n for n in self.status[1]], self)
366 return AddedFileNodesGenerator([n for n in self.status[1]], self)
368
367
369 @property
368 @property
370 def changed(self):
369 def changed(self):
371 """
370 """
372 Returns list of modified ``FileNode`` objects.
371 Returns list of modified ``FileNode`` objects.
373 """
372 """
374 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
373 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
375
374
376 @property
375 @property
377 def removed(self):
376 def removed(self):
378 """
377 """
379 Returns list of removed ``FileNode`` objects.
378 Returns list of removed ``FileNode`` objects.
380 """
379 """
381 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
380 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
@@ -1,1276 +1,1274 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import mock
22 import mock
23 import os
23 import os
24 import sys
24 import sys
25 import shutil
25 import shutil
26
26
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib.utils import make_db_config
29 from rhodecode.lib.utils import make_db_config
30 from rhodecode.lib.vcs.backends.base import Reference
30 from rhodecode.lib.vcs.backends.base import Reference
31 from rhodecode.lib.vcs.backends.git import (
31 from rhodecode.lib.vcs.backends.git import (
32 GitRepository, GitCommit, discover_git_version)
32 GitRepository, GitCommit, discover_git_version)
33 from rhodecode.lib.vcs.exceptions import (
33 from rhodecode.lib.vcs.exceptions import (
34 RepositoryError, VCSError, NodeDoesNotExistError)
34 RepositoryError, VCSError, NodeDoesNotExistError)
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39
39
40
40
41 pytestmark = pytest.mark.backends("git")
41 pytestmark = pytest.mark.backends("git")
42
42
43
43
44 class TestGitRepository(object):
44 class TestGitRepository(object):
45
45
46 @pytest.fixture(autouse=True)
46 @pytest.fixture(autouse=True)
47 def prepare(self, request, baseapp):
47 def prepare(self, request, baseapp):
48 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
48 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
49 self.repo.count()
49 self.repo.count()
50
50
51 def get_clone_repo(self, tmp_path_factory):
51 def get_clone_repo(self, tmp_path_factory):
52 """
52 """
53 Return a non bare clone of the base repo.
53 Return a non bare clone of the base repo.
54 """
54 """
55 clone_path = tmp_path_factory.mktemp('clone-url')
55 clone_path = tmp_path_factory.mktemp('clone-url')
56 repo_clone = GitRepository(
56 repo_clone = GitRepository(
57 clone_path, create=True, src_url=self.repo.path, bare=False)
57 clone_path, create=True, src_url=self.repo.path, bare=False)
58
58
59 return repo_clone
59 return repo_clone
60
60
61 def get_empty_repo(self, tmp_path_factory, bare=False):
61 def get_empty_repo(self, tmp_path_factory, bare=False):
62 """
62 """
63 Return a non bare empty repo.
63 Return a non bare empty repo.
64 """
64 """
65 clone_path = tmp_path_factory.mktemp('empty-repo')
65 clone_path = tmp_path_factory.mktemp('empty-repo')
66 return GitRepository(clone_path, create=True, bare=bare)
66 return GitRepository(clone_path, create=True, bare=bare)
67
67
68 def test_wrong_repo_path(self):
68 def test_wrong_repo_path(self):
69 wrong_repo_path = '/tmp/errorrepo_git'
69 wrong_repo_path = '/tmp/errorrepo_git'
70 with pytest.raises(RepositoryError):
70 with pytest.raises(RepositoryError):
71 GitRepository(wrong_repo_path)
71 GitRepository(wrong_repo_path)
72
72
73 def test_repo_clone(self, tmp_path_factory):
73 def test_repo_clone(self, tmp_path_factory):
74 repo = GitRepository(TEST_GIT_REPO)
74 repo = GitRepository(TEST_GIT_REPO)
75 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE
75 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE
76 repo_clone = GitRepository(
76 repo_clone = GitRepository(
77 clone_path,
77 clone_path,
78 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
78 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
79
79
80 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
80 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
81 # Checking hashes of commits should be enough
81 # Checking hashes of commits should be enough
82 for commit in repo.get_commits():
82 for commit in repo.get_commits():
83 raw_id = commit.raw_id
83 raw_id = commit.raw_id
84 assert raw_id == repo_clone.get_commit(raw_id).raw_id
84 assert raw_id == repo_clone.get_commit(raw_id).raw_id
85
85
86 def test_repo_clone_without_create(self):
86 def test_repo_clone_without_create(self):
87 with pytest.raises(RepositoryError):
87 with pytest.raises(RepositoryError):
88 GitRepository(
88 GitRepository(
89 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
89 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
90
90
91 def test_repo_clone_with_update(self, tmp_path_factory):
91 def test_repo_clone_with_update(self, tmp_path_factory):
92 repo = GitRepository(TEST_GIT_REPO)
92 repo = GitRepository(TEST_GIT_REPO)
93 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_update'
93 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_update'
94
94
95 repo_clone = GitRepository(
95 repo_clone = GitRepository(
96 clone_path,
96 clone_path,
97 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
97 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99
99
100 # check if current workdir was updated
100 # check if current workdir was updated
101 fpath = os.path.join(clone_path, 'MANIFEST.in')
101 fpath = os.path.join(clone_path, 'MANIFEST.in')
102 assert os.path.isfile(fpath)
102 assert os.path.isfile(fpath)
103
103
104 def test_repo_clone_without_update(self, tmp_path_factory):
104 def test_repo_clone_without_update(self, tmp_path_factory):
105 repo = GitRepository(TEST_GIT_REPO)
105 repo = GitRepository(TEST_GIT_REPO)
106 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_without_update'
106 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_without_update'
107 repo_clone = GitRepository(
107 repo_clone = GitRepository(
108 clone_path,
108 clone_path,
109 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
109 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
110 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
110 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
111 # check if current workdir was *NOT* updated
111 # check if current workdir was *NOT* updated
112 fpath = os.path.join(clone_path, 'MANIFEST.in')
112 fpath = os.path.join(clone_path, 'MANIFEST.in')
113 # Make sure it's not bare repo
113 # Make sure it's not bare repo
114 assert not repo_clone.bare
114 assert not repo_clone.bare
115 assert not os.path.isfile(fpath)
115 assert not os.path.isfile(fpath)
116
116
117 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
117 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
118 repo = GitRepository(TEST_GIT_REPO)
118 repo = GitRepository(TEST_GIT_REPO)
119 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_bare.git'
119 clone_path = tmp_path_factory.mktemp('_') + '_' + TEST_GIT_REPO_CLONE + '_bare.git'
120 repo_clone = GitRepository(
120 repo_clone = GitRepository(
121 clone_path, create=True, src_url=repo.path, bare=True)
121 clone_path, create=True, src_url=repo.path, bare=True)
122 assert repo_clone.bare
122 assert repo_clone.bare
123
123
124 def test_create_repo_is_not_bare_by_default(self):
124 def test_create_repo_is_not_bare_by_default(self):
125 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
125 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
126 assert not repo.bare
126 assert not repo.bare
127
127
128 def test_create_bare_repo(self):
128 def test_create_bare_repo(self):
129 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
129 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
130 assert repo.bare
130 assert repo.bare
131
131
132 def test_update_server_info(self):
132 def test_update_server_info(self):
133 self.repo._update_server_info()
133 self.repo._update_server_info()
134
134
135 def test_fetch(self, vcsbackend_git):
135 def test_fetch(self, vcsbackend_git):
136 # Note: This is a git specific part of the API, it's only implemented
136 # Note: This is a git specific part of the API, it's only implemented
137 # by the git backend.
137 # by the git backend.
138 source_repo = vcsbackend_git.repo
138 source_repo = vcsbackend_git.repo
139 target_repo = vcsbackend_git.create_repo(bare=True)
139 target_repo = vcsbackend_git.create_repo(bare=True)
140 target_repo.fetch(source_repo.path)
140 target_repo.fetch(source_repo.path)
141 # Note: Get a fresh instance, avoids caching trouble
141 # Note: Get a fresh instance, avoids caching trouble
142 target_repo = vcsbackend_git.backend(target_repo.path)
142 target_repo = vcsbackend_git.backend(target_repo.path)
143 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
143 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
144
144
145 def test_commit_ids(self):
145 def test_commit_ids(self):
146 # there are 112 commits (by now)
146 # there are 112 commits (by now)
147 # so we can assume they would be available from now on
147 # so we can assume they would be available from now on
148 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
148 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
149 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
149 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
150 'fa6600f6848800641328adbf7811fd2372c02ab2',
150 'fa6600f6848800641328adbf7811fd2372c02ab2',
151 '102607b09cdd60e2793929c4f90478be29f85a17',
151 '102607b09cdd60e2793929c4f90478be29f85a17',
152 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
152 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
153 '2d1028c054665b962fa3d307adfc923ddd528038',
153 '2d1028c054665b962fa3d307adfc923ddd528038',
154 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
154 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
155 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
155 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
156 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
156 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
157 '8430a588b43b5d6da365400117c89400326e7992',
157 '8430a588b43b5d6da365400117c89400326e7992',
158 'd955cd312c17b02143c04fa1099a352b04368118',
158 'd955cd312c17b02143c04fa1099a352b04368118',
159 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
159 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
160 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
160 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
161 'f298fe1189f1b69779a4423f40b48edf92a703fc',
161 'f298fe1189f1b69779a4423f40b48edf92a703fc',
162 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
162 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
163 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
163 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
164 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
164 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
165 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
165 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
166 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
166 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
167 '45223f8f114c64bf4d6f853e3c35a369a6305520',
167 '45223f8f114c64bf4d6f853e3c35a369a6305520',
168 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
168 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
169 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
169 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
170 '27d48942240f5b91dfda77accd2caac94708cc7d',
170 '27d48942240f5b91dfda77accd2caac94708cc7d',
171 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
171 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
172 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
172 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
173 assert subset.issubset(set(self.repo.commit_ids))
173 assert subset.issubset(set(self.repo.commit_ids))
174
174
175 def test_slicing(self):
175 def test_slicing(self):
176 # 4 1 5 10 95
176 # 4 1 5 10 95
177 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
177 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
178 (10, 20, 10), (5, 100, 95)]:
178 (10, 20, 10), (5, 100, 95)]:
179 commit_ids = list(self.repo[sfrom:sto])
179 commit_ids = list(self.repo[sfrom:sto])
180 assert len(commit_ids) == size
180 assert len(commit_ids) == size
181 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
181 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
182 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
182 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
183
183
184 def test_branches(self):
184 def test_branches(self):
185 # TODO: Need more tests here
185 # TODO: Need more tests here
186 # Removed (those are 'remotes' branches for cloned repo)
186 # Removed (those are 'remotes' branches for cloned repo)
187 # assert 'master' in self.repo.branches
187 # assert 'master' in self.repo.branches
188 # assert 'gittree' in self.repo.branches
188 # assert 'gittree' in self.repo.branches
189 # assert 'web-branch' in self.repo.branches
189 # assert 'web-branch' in self.repo.branches
190 for __, commit_id in self.repo.branches.items():
190 for __, commit_id in self.repo.branches.items():
191 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
191 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
192
192
193 def test_tags(self):
193 def test_tags(self):
194 # TODO: Need more tests here
194 # TODO: Need more tests here
195 assert 'v0.1.1' in self.repo.tags
195 assert 'v0.1.1' in self.repo.tags
196 assert 'v0.1.2' in self.repo.tags
196 assert 'v0.1.2' in self.repo.tags
197 for __, commit_id in self.repo.tags.items():
197 for __, commit_id in self.repo.tags.items():
198 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
198 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
199
199
200 def _test_single_commit_cache(self, commit_id):
200 def _test_single_commit_cache(self, commit_id):
201 commit = self.repo.get_commit(commit_id)
201 commit = self.repo.get_commit(commit_id)
202 assert commit_id in self.repo.commits
202 assert commit_id in self.repo.commits
203 assert commit is self.repo.commits[commit_id]
203 assert commit is self.repo.commits[commit_id]
204
204
205 def test_initial_commit(self):
205 def test_initial_commit(self):
206 commit_id = self.repo.commit_ids[0]
206 commit_id = self.repo.commit_ids[0]
207 init_commit = self.repo.get_commit(commit_id)
207 init_commit = self.repo.get_commit(commit_id)
208 init_author = init_commit.author
208 init_author = init_commit.author
209
209
210 assert init_commit.message == 'initial import\n'
210 assert init_commit.message == 'initial import\n'
211 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
211 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
212 assert init_author == init_commit.committer
212 assert init_author == init_commit.committer
213 for path in ('vcs/__init__.py',
213 for path in ('vcs/__init__.py',
214 'vcs/backends/BaseRepository.py',
214 'vcs/backends/BaseRepository.py',
215 'vcs/backends/__init__.py'):
215 'vcs/backends/__init__.py'):
216 assert isinstance(init_commit.get_node(path), FileNode)
216 assert isinstance(init_commit.get_node(path), FileNode)
217 for path in ('', 'vcs', 'vcs/backends'):
217 for path in ('', 'vcs', 'vcs/backends'):
218 assert isinstance(init_commit.get_node(path), DirNode)
218 assert isinstance(init_commit.get_node(path), DirNode)
219
219
220 with pytest.raises(NodeDoesNotExistError):
220 with pytest.raises(NodeDoesNotExistError):
221 init_commit.get_node(path='foobar')
221 init_commit.get_node(path='foobar')
222
222
223 node = init_commit.get_node('vcs/')
223 node = init_commit.get_node('vcs/')
224 assert hasattr(node, 'kind')
224 assert hasattr(node, 'kind')
225 assert node.kind == NodeKind.DIR
225 assert node.kind == NodeKind.DIR
226
226
227 node = init_commit.get_node('vcs')
227 node = init_commit.get_node('vcs')
228 assert hasattr(node, 'kind')
228 assert hasattr(node, 'kind')
229 assert node.kind == NodeKind.DIR
229 assert node.kind == NodeKind.DIR
230
230
231 node = init_commit.get_node('vcs/__init__.py')
231 node = init_commit.get_node('vcs/__init__.py')
232 assert hasattr(node, 'kind')
232 assert hasattr(node, 'kind')
233 assert node.kind == NodeKind.FILE
233 assert node.kind == NodeKind.FILE
234
234
235 def test_not_existing_commit(self):
235 def test_not_existing_commit(self):
236 with pytest.raises(RepositoryError):
236 with pytest.raises(RepositoryError):
237 self.repo.get_commit('f' * 40)
237 self.repo.get_commit('f' * 40)
238
238
239 def test_commit10(self):
239 def test_commit10(self):
240
240
241 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
241 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
242 README = """===
242 README = """===
243 VCS
243 VCS
244 ===
244 ===
245
245
246 Various Version Control System management abstraction layer for Python.
246 Various Version Control System management abstraction layer for Python.
247
247
248 Introduction
248 Introduction
249 ------------
249 ------------
250
250
251 TODO: To be written...
251 TODO: To be written...
252
252
253 """
253 """
254 node = commit10.get_node('README.rst')
254 node = commit10.get_node('README.rst')
255 assert node.kind == NodeKind.FILE
255 assert node.kind == NodeKind.FILE
256 assert node.content == README
256 assert node.content == README
257
257
258 def test_head(self):
258 def test_head(self):
259 assert self.repo.head == self.repo.get_commit().raw_id
259 assert self.repo.head == self.repo.get_commit().raw_id
260
260
261 def test_checkout_with_create(self, tmp_path_factory):
261 def test_checkout_with_create(self, tmp_path_factory):
262 repo_clone = self.get_clone_repo(tmp_path_factory)
262 repo_clone = self.get_clone_repo(tmp_path_factory)
263
263
264 new_branch = 'new_branch'
264 new_branch = 'new_branch'
265 assert repo_clone._current_branch() == 'master'
265 assert repo_clone._current_branch() == 'master'
266 assert set(repo_clone.branches) == {'master'}
266 assert set(repo_clone.branches) == {'master'}
267 repo_clone._checkout(new_branch, create=True)
267 repo_clone._checkout(new_branch, create=True)
268
268
269 # Branches is a lazy property so we need to recrete the Repo object.
269 # Branches is a lazy property so we need to recrete the Repo object.
270 repo_clone = GitRepository(repo_clone.path)
270 repo_clone = GitRepository(repo_clone.path)
271 assert set(repo_clone.branches) == {'master', new_branch}
271 assert set(repo_clone.branches) == {'master', new_branch}
272 assert repo_clone._current_branch() == new_branch
272 assert repo_clone._current_branch() == new_branch
273
273
274 def test_checkout(self, tmp_path_factory):
274 def test_checkout(self, tmp_path_factory):
275 repo_clone = self.get_clone_repo(tmp_path_factory)
275 repo_clone = self.get_clone_repo(tmp_path_factory)
276
276
277 repo_clone._checkout('new_branch', create=True)
277 repo_clone._checkout('new_branch', create=True)
278 repo_clone._checkout('master')
278 repo_clone._checkout('master')
279
279
280 assert repo_clone._current_branch() == 'master'
280 assert repo_clone._current_branch() == 'master'
281
281
282 def test_checkout_same_branch(self, tmp_path_factory):
282 def test_checkout_same_branch(self, tmp_path_factory):
283 repo_clone = self.get_clone_repo(tmp_path_factory)
283 repo_clone = self.get_clone_repo(tmp_path_factory)
284
284
285 repo_clone._checkout('master')
285 repo_clone._checkout('master')
286 assert repo_clone._current_branch() == 'master'
286 assert repo_clone._current_branch() == 'master'
287
287
288 def test_checkout_branch_already_exists(self, tmp_path_factory):
288 def test_checkout_branch_already_exists(self, tmp_path_factory):
289 repo_clone = self.get_clone_repo(tmp_path_factory)
289 repo_clone = self.get_clone_repo(tmp_path_factory)
290
290
291 with pytest.raises(RepositoryError):
291 with pytest.raises(RepositoryError):
292 repo_clone._checkout('master', create=True)
292 repo_clone._checkout('master', create=True)
293
293
294 def test_checkout_bare_repo(self):
294 def test_checkout_bare_repo(self):
295 with pytest.raises(RepositoryError):
295 with pytest.raises(RepositoryError):
296 self.repo._checkout('master')
296 self.repo._checkout('master')
297
297
298 def test_current_branch_bare_repo(self):
298 def test_current_branch_bare_repo(self):
299 with pytest.raises(RepositoryError):
299 with pytest.raises(RepositoryError):
300 self.repo._current_branch()
300 self.repo._current_branch()
301
301
302 def test_current_branch_empty_repo(self, tmp_path_factory):
302 def test_current_branch_empty_repo(self, tmp_path_factory):
303 repo = self.get_empty_repo(tmp_path_factory)
303 repo = self.get_empty_repo(tmp_path_factory)
304 assert repo._current_branch() is None
304 assert repo._current_branch() is None
305
305
306 def test_local_clone(self, tmp_path_factory):
306 def test_local_clone(self, tmp_path_factory):
307 clone_path = tmp_path_factory.mktemp('test-local-clone')
307 clone_path = tmp_path_factory.mktemp('test-local-clone')
308 self.repo._local_clone(clone_path, 'master')
308 self.repo._local_clone(clone_path, 'master')
309 repo_clone = GitRepository(clone_path)
309 repo_clone = GitRepository(clone_path)
310
310
311 assert self.repo.commit_ids == repo_clone.commit_ids
311 assert self.repo.commit_ids == repo_clone.commit_ids
312
312
313 def test_local_clone_with_specific_branch(self, tmp_path_factory):
313 def test_local_clone_with_specific_branch(self, tmp_path_factory):
314 source_repo = self.get_clone_repo(tmp_path_factory)
314 source_repo = self.get_clone_repo(tmp_path_factory)
315
315
316 # Create a new branch in source repo
316 # Create a new branch in source repo
317 new_branch_commit = source_repo.commit_ids[-3]
317 new_branch_commit = source_repo.commit_ids[-3]
318 source_repo._checkout(new_branch_commit)
318 source_repo._checkout(new_branch_commit)
319 source_repo._checkout('new_branch', create=True)
319 source_repo._checkout('new_branch', create=True)
320
320
321 clone_path = tmp_path_factory.mktemp('git-clone-path-1')
321 clone_path = tmp_path_factory.mktemp('git-clone-path-1')
322 source_repo._local_clone(clone_path, 'new_branch')
322 source_repo._local_clone(clone_path, 'new_branch')
323 repo_clone = GitRepository(clone_path)
323 repo_clone = GitRepository(clone_path)
324
324
325 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
325 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
326
326
327 clone_path = tmp_path_factory.mktemp('git-clone-path-2')
327 clone_path = tmp_path_factory.mktemp('git-clone-path-2')
328 source_repo._local_clone(clone_path, 'master')
328 source_repo._local_clone(clone_path, 'master')
329 repo_clone = GitRepository(clone_path)
329 repo_clone = GitRepository(clone_path)
330
330
331 assert source_repo.commit_ids == repo_clone.commit_ids
331 assert source_repo.commit_ids == repo_clone.commit_ids
332
332
333 def test_local_clone_fails_if_target_exists(self):
333 def test_local_clone_fails_if_target_exists(self):
334 with pytest.raises(RepositoryError):
334 with pytest.raises(RepositoryError):
335 self.repo._local_clone(self.repo.path, 'master')
335 self.repo._local_clone(self.repo.path, 'master')
336
336
337 def test_local_fetch(self, tmp_path_factory):
337 def test_local_fetch(self, tmp_path_factory):
338 target_repo = self.get_empty_repo(tmp_path_factory)
338 target_repo = self.get_empty_repo(tmp_path_factory)
339 source_repo = self.get_clone_repo(tmp_path_factory)
339 source_repo = self.get_clone_repo(tmp_path_factory)
340
340
341 # Create a new branch in source repo
341 # Create a new branch in source repo
342 master_commit = source_repo.commit_ids[-1]
342 master_commit = source_repo.commit_ids[-1]
343 new_branch_commit = source_repo.commit_ids[-3]
343 new_branch_commit = source_repo.commit_ids[-3]
344 source_repo._checkout(new_branch_commit)
344 source_repo._checkout(new_branch_commit)
345 source_repo._checkout('new_branch', create=True)
345 source_repo._checkout('new_branch', create=True)
346
346
347 target_repo._local_fetch(source_repo.path, 'new_branch')
347 target_repo._local_fetch(source_repo.path, 'new_branch')
348 assert target_repo._last_fetch_heads() == [new_branch_commit]
348 assert target_repo._last_fetch_heads() == [new_branch_commit]
349
349
350 target_repo._local_fetch(source_repo.path, 'master')
350 target_repo._local_fetch(source_repo.path, 'master')
351 assert target_repo._last_fetch_heads() == [master_commit]
351 assert target_repo._last_fetch_heads() == [master_commit]
352
352
353 def test_local_fetch_from_bare_repo(self, tmp_path_factory):
353 def test_local_fetch_from_bare_repo(self, tmp_path_factory):
354 target_repo = self.get_empty_repo(tmp_path_factory)
354 target_repo = self.get_empty_repo(tmp_path_factory)
355 target_repo._local_fetch(self.repo.path, 'master')
355 target_repo._local_fetch(self.repo.path, 'master')
356
356
357 master_commit = self.repo.commit_ids[-1]
357 master_commit = self.repo.commit_ids[-1]
358 assert target_repo._last_fetch_heads() == [master_commit]
358 assert target_repo._last_fetch_heads() == [master_commit]
359
359
360 def test_local_fetch_from_same_repo(self):
360 def test_local_fetch_from_same_repo(self):
361 with pytest.raises(ValueError):
361 with pytest.raises(ValueError):
362 self.repo._local_fetch(self.repo.path, 'master')
362 self.repo._local_fetch(self.repo.path, 'master')
363
363
364 def test_local_fetch_branch_does_not_exist(self, tmp_path_factory):
364 def test_local_fetch_branch_does_not_exist(self, tmp_path_factory):
365 target_repo = self.get_empty_repo(tmp_path_factory)
365 target_repo = self.get_empty_repo(tmp_path_factory)
366
366
367 with pytest.raises(RepositoryError):
367 with pytest.raises(RepositoryError):
368 target_repo._local_fetch(self.repo.path, 'new_branch')
368 target_repo._local_fetch(self.repo.path, 'new_branch')
369
369
370 def test_local_pull(self, tmp_path_factory):
370 def test_local_pull(self, tmp_path_factory):
371 target_repo = self.get_empty_repo(tmp_path_factory)
371 target_repo = self.get_empty_repo(tmp_path_factory)
372 source_repo = self.get_clone_repo(tmp_path_factory)
372 source_repo = self.get_clone_repo(tmp_path_factory)
373
373
374 # Create a new branch in source repo
374 # Create a new branch in source repo
375 master_commit = source_repo.commit_ids[-1]
375 master_commit = source_repo.commit_ids[-1]
376 new_branch_commit = source_repo.commit_ids[-3]
376 new_branch_commit = source_repo.commit_ids[-3]
377 source_repo._checkout(new_branch_commit)
377 source_repo._checkout(new_branch_commit)
378 source_repo._checkout('new_branch', create=True)
378 source_repo._checkout('new_branch', create=True)
379
379
380 target_repo._local_pull(source_repo.path, 'new_branch')
380 target_repo._local_pull(source_repo.path, 'new_branch')
381 target_repo = GitRepository(target_repo.path)
381 target_repo = GitRepository(target_repo.path)
382 assert target_repo.head == new_branch_commit
382 assert target_repo.head == new_branch_commit
383
383
384 target_repo._local_pull(source_repo.path, 'master')
384 target_repo._local_pull(source_repo.path, 'master')
385 target_repo = GitRepository(target_repo.path)
385 target_repo = GitRepository(target_repo.path)
386 assert target_repo.head == master_commit
386 assert target_repo.head == master_commit
387
387
388 def test_local_pull_in_bare_repo(self):
388 def test_local_pull_in_bare_repo(self):
389 with pytest.raises(RepositoryError):
389 with pytest.raises(RepositoryError):
390 self.repo._local_pull(self.repo.path, 'master')
390 self.repo._local_pull(self.repo.path, 'master')
391
391
392 def test_local_merge(self, tmp_path_factory):
392 def test_local_merge(self, tmp_path_factory):
393 target_repo = self.get_empty_repo(tmp_path_factory)
393 target_repo = self.get_empty_repo(tmp_path_factory)
394 source_repo = self.get_clone_repo(tmp_path_factory)
394 source_repo = self.get_clone_repo(tmp_path_factory)
395
395
396 # Create a new branch in source repo
396 # Create a new branch in source repo
397 master_commit = source_repo.commit_ids[-1]
397 master_commit = source_repo.commit_ids[-1]
398 new_branch_commit = source_repo.commit_ids[-3]
398 new_branch_commit = source_repo.commit_ids[-3]
399 source_repo._checkout(new_branch_commit)
399 source_repo._checkout(new_branch_commit)
400 source_repo._checkout('new_branch', create=True)
400 source_repo._checkout('new_branch', create=True)
401
401
402 # This is required as one cannot do a -ff-only merge in an empty repo.
402 # This is required as one cannot do a -ff-only merge in an empty repo.
403 target_repo._local_pull(source_repo.path, 'new_branch')
403 target_repo._local_pull(source_repo.path, 'new_branch')
404
404
405 target_repo._local_fetch(source_repo.path, 'master')
405 target_repo._local_fetch(source_repo.path, 'master')
406 merge_message = 'Merge message\n\nDescription:...'
406 merge_message = 'Merge message\n\nDescription:...'
407 user_name = 'Albert Einstein'
407 user_name = 'Albert Einstein'
408 user_email = 'albert@einstein.com'
408 user_email = 'albert@einstein.com'
409 target_repo._local_merge(merge_message, user_name, user_email,
409 target_repo._local_merge(merge_message, user_name, user_email,
410 target_repo._last_fetch_heads())
410 target_repo._last_fetch_heads())
411
411
412 target_repo = GitRepository(target_repo.path)
412 target_repo = GitRepository(target_repo.path)
413 assert target_repo.commit_ids[-2] == master_commit
413 assert target_repo.commit_ids[-2] == master_commit
414 last_commit = target_repo.get_commit(target_repo.head)
414 last_commit = target_repo.get_commit(target_repo.head)
415 assert last_commit.message.strip() == merge_message
415 assert last_commit.message.strip() == merge_message
416 assert last_commit.author == '%s <%s>' % (user_name, user_email)
416 assert last_commit.author == '%s <%s>' % (user_name, user_email)
417
417
418 assert not os.path.exists(
418 assert not os.path.exists(
419 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
419 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
420
420
421 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
421 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
422 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
422 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
423 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
423 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
424
424
425 target_repo._local_fetch(self.repo.path, 'master')
425 target_repo._local_fetch(self.repo.path, 'master')
426 with pytest.raises(RepositoryError):
426 with pytest.raises(RepositoryError):
427 target_repo._local_merge(
427 target_repo._local_merge(
428 'merge_message', 'user name', 'user@name.com',
428 'merge_message', 'user name', 'user@name.com',
429 target_repo._last_fetch_heads())
429 target_repo._last_fetch_heads())
430
430
431 # Check we are not left in an intermediate merge state
431 # Check we are not left in an intermediate merge state
432 assert not os.path.exists(
432 assert not os.path.exists(
433 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
433 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
434
434
435 def test_local_merge_into_empty_repo(self, tmp_path_factory):
435 def test_local_merge_into_empty_repo(self, tmp_path_factory):
436 target_repo = self.get_empty_repo(tmp_path_factory)
436 target_repo = self.get_empty_repo(tmp_path_factory)
437
437
438 # This is required as one cannot do a -ff-only merge in an empty repo.
438 # This is required as one cannot do a -ff-only merge in an empty repo.
439 target_repo._local_fetch(self.repo.path, 'master')
439 target_repo._local_fetch(self.repo.path, 'master')
440 with pytest.raises(RepositoryError):
440 with pytest.raises(RepositoryError):
441 target_repo._local_merge(
441 target_repo._local_merge(
442 'merge_message', 'user name', 'user@name.com',
442 'merge_message', 'user name', 'user@name.com',
443 target_repo._last_fetch_heads())
443 target_repo._last_fetch_heads())
444
444
445 def test_local_merge_in_bare_repo(self):
445 def test_local_merge_in_bare_repo(self):
446 with pytest.raises(RepositoryError):
446 with pytest.raises(RepositoryError):
447 self.repo._local_merge(
447 self.repo._local_merge(
448 'merge_message', 'user name', 'user@name.com', None)
448 'merge_message', 'user name', 'user@name.com', None)
449
449
450 def test_local_push_non_bare(self, tmp_path_factory):
450 def test_local_push_non_bare(self, tmp_path_factory):
451 target_repo = self.get_empty_repo(tmp_path_factory)
451 target_repo = self.get_empty_repo(tmp_path_factory)
452
452
453 pushed_branch = 'pushed_branch'
453 pushed_branch = 'pushed_branch'
454 self.repo._local_push('master', target_repo.path, pushed_branch)
454 self.repo._local_push('master', target_repo.path, pushed_branch)
455 # Fix the HEAD of the target repo, or otherwise GitRepository won't
455 # Fix the HEAD of the target repo, or otherwise GitRepository won't
456 # report any branches.
456 # report any branches.
457 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
457 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
458 f.write('ref: refs/heads/%s' % pushed_branch)
458 f.write('ref: refs/heads/%s' % pushed_branch)
459
459
460 target_repo = GitRepository(target_repo.path)
460 target_repo = GitRepository(target_repo.path)
461
461
462 assert (target_repo.branches[pushed_branch] ==
462 assert (target_repo.branches[pushed_branch] ==
463 self.repo.branches['master'])
463 self.repo.branches['master'])
464
464
465 def test_local_push_bare(self, tmp_path_factory):
465 def test_local_push_bare(self, tmp_path_factory):
466 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
466 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
467
467
468 pushed_branch = 'pushed_branch'
468 pushed_branch = 'pushed_branch'
469 self.repo._local_push('master', target_repo.path, pushed_branch)
469 self.repo._local_push('master', target_repo.path, pushed_branch)
470 # Fix the HEAD of the target repo, or otherwise GitRepository won't
470 # Fix the HEAD of the target repo, or otherwise GitRepository won't
471 # report any branches.
471 # report any branches.
472 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
472 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
473 f.write('ref: refs/heads/%s' % pushed_branch)
473 f.write('ref: refs/heads/%s' % pushed_branch)
474
474
475 target_repo = GitRepository(target_repo.path)
475 target_repo = GitRepository(target_repo.path)
476
476
477 assert (target_repo.branches[pushed_branch] ==
477 assert (target_repo.branches[pushed_branch] ==
478 self.repo.branches['master'])
478 self.repo.branches['master'])
479
479
480 def test_local_push_non_bare_target_branch_is_checked_out(self, tmp_path_factory):
480 def test_local_push_non_bare_target_branch_is_checked_out(self, tmp_path_factory):
481 target_repo = self.get_clone_repo(tmp_path_factory)
481 target_repo = self.get_clone_repo(tmp_path_factory)
482
482
483 pushed_branch = 'pushed_branch'
483 pushed_branch = 'pushed_branch'
484 # Create a new branch in source repo
484 # Create a new branch in source repo
485 new_branch_commit = target_repo.commit_ids[-3]
485 new_branch_commit = target_repo.commit_ids[-3]
486 target_repo._checkout(new_branch_commit)
486 target_repo._checkout(new_branch_commit)
487 target_repo._checkout(pushed_branch, create=True)
487 target_repo._checkout(pushed_branch, create=True)
488
488
489 self.repo._local_push('master', target_repo.path, pushed_branch)
489 self.repo._local_push('master', target_repo.path, pushed_branch)
490
490
491 target_repo = GitRepository(target_repo.path)
491 target_repo = GitRepository(target_repo.path)
492
492
493 assert (target_repo.branches[pushed_branch] ==
493 assert (target_repo.branches[pushed_branch] ==
494 self.repo.branches['master'])
494 self.repo.branches['master'])
495
495
496 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
496 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
497 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
497 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
498 with pytest.raises(RepositoryError):
498 with pytest.raises(RepositoryError):
499 self.repo._local_push('master', target_repo.path, 'master')
499 self.repo._local_push('master', target_repo.path, 'master')
500
500
501 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmp_path_factory):
501 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmp_path_factory):
502 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
502 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
503
503
504 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
504 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
505 self.repo._local_push(
505 self.repo._local_push(
506 'master', target_repo.path, 'master', enable_hooks=True)
506 'master', target_repo.path, 'master', enable_hooks=True)
507 env = run_mock.call_args[1]['extra_env']
507 env = run_mock.call_args[1]['extra_env']
508 assert 'RC_SKIP_HOOKS' not in env
508 assert 'RC_SKIP_HOOKS' not in env
509
509
510 def _add_failing_hook(self, repo_path, hook_name, bare=False):
510 def _add_failing_hook(self, repo_path, hook_name, bare=False):
511 path_components = (
511 path_components = (
512 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
512 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
513 hook_path = os.path.join(repo_path, *path_components)
513 hook_path = os.path.join(repo_path, *path_components)
514 with open(hook_path, 'w') as f:
514 with open(hook_path, 'w') as f:
515 script_lines = [
515 script_lines = [
516 '#!%s' % sys.executable,
516 '#!%s' % sys.executable,
517 'import os',
517 'import os',
518 'import sys',
518 'import sys',
519 'if os.environ.get("RC_SKIP_HOOKS"):',
519 'if os.environ.get("RC_SKIP_HOOKS"):',
520 ' sys.exit(0)',
520 ' sys.exit(0)',
521 'sys.exit(1)',
521 'sys.exit(1)',
522 ]
522 ]
523 f.write('\n'.join(script_lines))
523 f.write('\n'.join(script_lines))
524 os.chmod(hook_path, 0o755)
524 os.chmod(hook_path, 0o755)
525
525
526 def test_local_push_does_not_execute_hook(self, tmp_path_factory):
526 def test_local_push_does_not_execute_hook(self, tmp_path_factory):
527 target_repo = self.get_empty_repo(tmp_path_factory)
527 target_repo = self.get_empty_repo(tmp_path_factory)
528
528
529 pushed_branch = 'pushed_branch'
529 pushed_branch = 'pushed_branch'
530 self._add_failing_hook(target_repo.path, 'pre-receive')
530 self._add_failing_hook(target_repo.path, 'pre-receive')
531 self.repo._local_push('master', target_repo.path, pushed_branch)
531 self.repo._local_push('master', target_repo.path, pushed_branch)
532 # Fix the HEAD of the target repo, or otherwise GitRepository won't
532 # Fix the HEAD of the target repo, or otherwise GitRepository won't
533 # report any branches.
533 # report any branches.
534 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
534 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
535 f.write('ref: refs/heads/%s' % pushed_branch)
535 f.write('ref: refs/heads/%s' % pushed_branch)
536
536
537 target_repo = GitRepository(target_repo.path)
537 target_repo = GitRepository(target_repo.path)
538
538
539 assert (target_repo.branches[pushed_branch] ==
539 assert (target_repo.branches[pushed_branch] ==
540 self.repo.branches['master'])
540 self.repo.branches['master'])
541
541
542 def test_local_push_executes_hook(self, tmp_path_factory):
542 def test_local_push_executes_hook(self, tmp_path_factory):
543 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
543 target_repo = self.get_empty_repo(tmp_path_factory, bare=True)
544 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
544 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
545 with pytest.raises(RepositoryError):
545 with pytest.raises(RepositoryError):
546 self.repo._local_push(
546 self.repo._local_push(
547 'master', target_repo.path, 'master', enable_hooks=True)
547 'master', target_repo.path, 'master', enable_hooks=True)
548
548
549 def test_maybe_prepare_merge_workspace(self):
549 def test_maybe_prepare_merge_workspace(self):
550 workspace = self.repo._maybe_prepare_merge_workspace(
550 workspace = self.repo._maybe_prepare_merge_workspace(
551 2, 'pr2', Reference('branch', 'master', 'unused'),
551 2, 'pr2', Reference('branch', 'master', 'unused'),
552 Reference('branch', 'master', 'unused'))
552 Reference('branch', 'master', 'unused'))
553
553
554 assert os.path.isdir(workspace)
554 assert os.path.isdir(workspace)
555 workspace_repo = GitRepository(workspace)
555 workspace_repo = GitRepository(workspace)
556 assert workspace_repo.branches == self.repo.branches
556 assert workspace_repo.branches == self.repo.branches
557
557
558 # Calling it a second time should also succeed
558 # Calling it a second time should also succeed
559 workspace = self.repo._maybe_prepare_merge_workspace(
559 workspace = self.repo._maybe_prepare_merge_workspace(
560 2, 'pr2', Reference('branch', 'master', 'unused'),
560 2, 'pr2', Reference('branch', 'master', 'unused'),
561 Reference('branch', 'master', 'unused'))
561 Reference('branch', 'master', 'unused'))
562 assert os.path.isdir(workspace)
562 assert os.path.isdir(workspace)
563
563
564 def test_maybe_prepare_merge_workspace_different_refs(self):
564 def test_maybe_prepare_merge_workspace_different_refs(self):
565 workspace = self.repo._maybe_prepare_merge_workspace(
565 workspace = self.repo._maybe_prepare_merge_workspace(
566 2, 'pr2', Reference('branch', 'master', 'unused'),
566 2, 'pr2', Reference('branch', 'master', 'unused'),
567 Reference('branch', 'develop', 'unused'))
567 Reference('branch', 'develop', 'unused'))
568
568
569 assert os.path.isdir(workspace)
569 assert os.path.isdir(workspace)
570 workspace_repo = GitRepository(workspace)
570 workspace_repo = GitRepository(workspace)
571 assert workspace_repo.branches == self.repo.branches
571 assert workspace_repo.branches == self.repo.branches
572
572
573 # Calling it a second time should also succeed
573 # Calling it a second time should also succeed
574 workspace = self.repo._maybe_prepare_merge_workspace(
574 workspace = self.repo._maybe_prepare_merge_workspace(
575 2, 'pr2', Reference('branch', 'master', 'unused'),
575 2, 'pr2', Reference('branch', 'master', 'unused'),
576 Reference('branch', 'develop', 'unused'))
576 Reference('branch', 'develop', 'unused'))
577 assert os.path.isdir(workspace)
577 assert os.path.isdir(workspace)
578
578
579 def test_cleanup_merge_workspace(self):
579 def test_cleanup_merge_workspace(self):
580 workspace = self.repo._maybe_prepare_merge_workspace(
580 workspace = self.repo._maybe_prepare_merge_workspace(
581 2, 'pr3', Reference('branch', 'master', 'unused'),
581 2, 'pr3', Reference('branch', 'master', 'unused'),
582 Reference('branch', 'master', 'unused'))
582 Reference('branch', 'master', 'unused'))
583 self.repo.cleanup_merge_workspace(2, 'pr3')
583 self.repo.cleanup_merge_workspace(2, 'pr3')
584
584
585 assert not os.path.exists(workspace)
585 assert not os.path.exists(workspace)
586
586
587 def test_cleanup_merge_workspace_invalid_workspace_id(self):
587 def test_cleanup_merge_workspace_invalid_workspace_id(self):
588 # No assert: because in case of an inexistent workspace this function
588 # No assert: because in case of an inexistent workspace this function
589 # should still succeed.
589 # should still succeed.
590 self.repo.cleanup_merge_workspace(1, 'pr4')
590 self.repo.cleanup_merge_workspace(1, 'pr4')
591
591
592 def test_set_refs(self):
592 def test_set_refs(self):
593 test_ref = 'refs/test-refs/abcde'
593 test_ref = 'refs/test-refs/abcde'
594 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
594 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
595
595
596 self.repo.set_refs(test_ref, test_commit_id)
596 self.repo.set_refs(test_ref, test_commit_id)
597 stdout, _ = self.repo.run_git_command(['show-ref'])
597 stdout, _ = self.repo.run_git_command(['show-ref'])
598 assert test_ref in stdout
598 assert test_ref in stdout
599 assert test_commit_id in stdout
599 assert test_commit_id in stdout
600
600
601 def test_remove_ref(self):
601 def test_remove_ref(self):
602 test_ref = 'refs/test-refs/abcde'
602 test_ref = 'refs/test-refs/abcde'
603 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
603 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
604 self.repo.set_refs(test_ref, test_commit_id)
604 self.repo.set_refs(test_ref, test_commit_id)
605 stdout, _ = self.repo.run_git_command(['show-ref'])
605 stdout, _ = self.repo.run_git_command(['show-ref'])
606 assert test_ref in stdout
606 assert test_ref in stdout
607 assert test_commit_id in stdout
607 assert test_commit_id in stdout
608
608
609 self.repo.remove_ref(test_ref)
609 self.repo.remove_ref(test_ref)
610 stdout, _ = self.repo.run_git_command(['show-ref'])
610 stdout, _ = self.repo.run_git_command(['show-ref'])
611 assert test_ref not in stdout
611 assert test_ref not in stdout
612 assert test_commit_id not in stdout
612 assert test_commit_id not in stdout
613
613
614
614
615 class TestGitCommit(object):
615 class TestGitCommit(object):
616
616
617 @pytest.fixture(autouse=True)
617 @pytest.fixture(autouse=True)
618 def prepare(self):
618 def prepare(self):
619 self.repo = GitRepository(TEST_GIT_REPO)
619 self.repo = GitRepository(TEST_GIT_REPO)
620
620
621 def test_default_commit(self):
621 def test_default_commit(self):
622 tip = self.repo.get_commit()
622 tip = self.repo.get_commit()
623 assert tip == self.repo.get_commit(None)
623 assert tip == self.repo.get_commit(None)
624 assert tip == self.repo.get_commit('tip')
624 assert tip == self.repo.get_commit('tip')
625
625
626 def test_root_node(self):
626 def test_root_node(self):
627 tip = self.repo.get_commit()
627 tip = self.repo.get_commit()
628 assert tip.root is tip.get_node('')
628 assert tip.root is tip.get_node('')
629
629
630 def test_lazy_fetch(self):
630 def test_lazy_fetch(self):
631 """
631 """
632 Test if commit's nodes expands and are cached as we walk through
632 Test if commit's nodes expands and are cached as we walk through
633 the commit. This test is somewhat hard to write as order of tests
633 the commit. This test is somewhat hard to write as order of tests
634 is a key here. Written by running command after command in a shell.
634 is a key here. Written by running command after command in a shell.
635 """
635 """
636 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
636 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
637 assert commit_id in self.repo.commit_ids
637 assert commit_id in self.repo.commit_ids
638 commit = self.repo.get_commit(commit_id)
638 commit = self.repo.get_commit(commit_id)
639 assert len(commit.nodes) == 0
639 assert len(commit.nodes) == 0
640 root = commit.root
640 root = commit.root
641 assert len(commit.nodes) == 1
641 assert len(commit.nodes) == 1
642 assert len(root.nodes) == 8
642 assert len(root.nodes) == 8
643 # accessing root.nodes updates commit.nodes
643 # accessing root.nodes updates commit.nodes
644 assert len(commit.nodes) == 9
644 assert len(commit.nodes) == 9
645
645
646 docs = root.get_node('docs')
646 docs = root.get_node('docs')
647 # we haven't yet accessed anything new as docs dir was already cached
647 # we haven't yet accessed anything new as docs dir was already cached
648 assert len(commit.nodes) == 9
648 assert len(commit.nodes) == 9
649 assert len(docs.nodes) == 8
649 assert len(docs.nodes) == 8
650 # accessing docs.nodes updates commit.nodes
650 # accessing docs.nodes updates commit.nodes
651 assert len(commit.nodes) == 17
651 assert len(commit.nodes) == 17
652
652
653 assert docs is commit.get_node('docs')
653 assert docs is commit.get_node('docs')
654 assert docs is root.nodes[0]
654 assert docs is root.nodes[0]
655 assert docs is root.dirs[0]
655 assert docs is root.dirs[0]
656 assert docs is commit.get_node('docs')
656 assert docs is commit.get_node('docs')
657
657
658 def test_nodes_with_commit(self):
658 def test_nodes_with_commit(self):
659 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
659 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
660 commit = self.repo.get_commit(commit_id)
660 commit = self.repo.get_commit(commit_id)
661 root = commit.root
661 root = commit.root
662 docs = root.get_node('docs')
662 docs = root.get_node('docs')
663 assert docs is commit.get_node('docs')
663 assert docs is commit.get_node('docs')
664 api = docs.get_node('api')
664 api = docs.get_node('api')
665 assert api is commit.get_node('docs/api')
665 assert api is commit.get_node('docs/api')
666 index = api.get_node('index.rst')
666 index = api.get_node('index.rst')
667 assert index is commit.get_node('docs/api/index.rst')
667 assert index is commit.get_node('docs/api/index.rst')
668 assert index is commit.get_node('docs')\
668 assert index is commit.get_node('docs')\
669 .get_node('api')\
669 .get_node('api')\
670 .get_node('index.rst')
670 .get_node('index.rst')
671
671
672 def test_branch_and_tags(self):
672 def test_branch_and_tags(self):
673 """
673 """
674 rev0 = self.repo.commit_ids[0]
674 rev0 = self.repo.commit_ids[0]
675 commit0 = self.repo.get_commit(rev0)
675 commit0 = self.repo.get_commit(rev0)
676 assert commit0.branch == 'master'
676 assert commit0.branch == 'master'
677 assert commit0.tags == []
677 assert commit0.tags == []
678
678
679 rev10 = self.repo.commit_ids[10]
679 rev10 = self.repo.commit_ids[10]
680 commit10 = self.repo.get_commit(rev10)
680 commit10 = self.repo.get_commit(rev10)
681 assert commit10.branch == 'master'
681 assert commit10.branch == 'master'
682 assert commit10.tags == []
682 assert commit10.tags == []
683
683
684 rev44 = self.repo.commit_ids[44]
684 rev44 = self.repo.commit_ids[44]
685 commit44 = self.repo.get_commit(rev44)
685 commit44 = self.repo.get_commit(rev44)
686 assert commit44.branch == 'web-branch'
686 assert commit44.branch == 'web-branch'
687
687
688 tip = self.repo.get_commit('tip')
688 tip = self.repo.get_commit('tip')
689 assert 'tip' in tip.tags
689 assert 'tip' in tip.tags
690 """
690 """
691 # Those tests would fail - branches are now going
691 # Those tests would fail - branches are now going
692 # to be changed at main API in order to support git backend
692 # to be changed at main API in order to support git backend
693 pass
693 pass
694
694
695 def test_file_size(self):
695 def test_file_size(self):
696 to_check = (
696 to_check = (
697 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
697 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
698 'vcs/backends/BaseRepository.py', 502),
698 'vcs/backends/BaseRepository.py', 502),
699 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
699 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
700 'vcs/backends/hg.py', 854),
700 'vcs/backends/hg.py', 854),
701 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
701 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
702 'setup.py', 1068),
702 'setup.py', 1068),
703
703
704 ('d955cd312c17b02143c04fa1099a352b04368118',
704 ('d955cd312c17b02143c04fa1099a352b04368118',
705 'vcs/backends/base.py', 2921),
705 'vcs/backends/base.py', 2921),
706 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
706 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
707 'vcs/backends/base.py', 3936),
707 'vcs/backends/base.py', 3936),
708 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
708 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
709 'vcs/backends/base.py', 6189),
709 'vcs/backends/base.py', 6189),
710 )
710 )
711 for commit_id, path, size in to_check:
711 for commit_id, path, size in to_check:
712 node = self.repo.get_commit(commit_id).get_node(path)
712 node = self.repo.get_commit(commit_id).get_node(path)
713 assert node.is_file()
713 assert node.is_file()
714 assert node.size == size
714 assert node.size == size
715
715
716 def test_file_history_from_commits(self):
716 def test_file_history_from_commits(self):
717 node = self.repo[10].get_node('setup.py')
717 node = self.repo[10].get_node('setup.py')
718 commit_ids = [commit.raw_id for commit in node.history]
718 commit_ids = [commit.raw_id for commit in node.history]
719 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
719 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
720
720
721 node = self.repo[20].get_node('setup.py')
721 node = self.repo[20].get_node('setup.py')
722 node_ids = [commit.raw_id for commit in node.history]
722 node_ids = [commit.raw_id for commit in node.history]
723 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
723 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
724 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
724 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
725
725
726 # special case we check history from commit that has this particular
726 # special case we check history from commit that has this particular
727 # file changed this means we check if it's included as well
727 # file changed this means we check if it's included as well
728 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
728 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
729 .get_node('setup.py')
729 .get_node('setup.py')
730 node_ids = [commit.raw_id for commit in node.history]
730 node_ids = [commit.raw_id for commit in node.history]
731 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
731 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
732 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
732 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
733
733
734 def test_file_history(self):
734 def test_file_history(self):
735 # we can only check if those commits are present in the history
735 # we can only check if those commits are present in the history
736 # as we cannot update this test every time file is changed
736 # as we cannot update this test every time file is changed
737 files = {
737 files = {
738 'setup.py': [
738 'setup.py': [
739 '54386793436c938cff89326944d4c2702340037d',
739 '54386793436c938cff89326944d4c2702340037d',
740 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
740 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
741 '998ed409c795fec2012b1c0ca054d99888b22090',
741 '998ed409c795fec2012b1c0ca054d99888b22090',
742 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
742 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
743 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
743 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
744 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
744 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
745 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
745 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
746 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
746 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
747 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
747 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
748 ],
748 ],
749 'vcs/nodes.py': [
749 'vcs/nodes.py': [
750 '33fa3223355104431402a888fa77a4e9956feb3e',
750 '33fa3223355104431402a888fa77a4e9956feb3e',
751 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
751 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
752 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
752 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
753 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
753 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
754 'c877b68d18e792a66b7f4c529ea02c8f80801542',
754 'c877b68d18e792a66b7f4c529ea02c8f80801542',
755 '4313566d2e417cb382948f8d9d7c765330356054',
755 '4313566d2e417cb382948f8d9d7c765330356054',
756 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
756 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
757 '54386793436c938cff89326944d4c2702340037d',
757 '54386793436c938cff89326944d4c2702340037d',
758 '54000345d2e78b03a99d561399e8e548de3f3203',
758 '54000345d2e78b03a99d561399e8e548de3f3203',
759 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
759 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
760 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
760 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
761 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
761 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
762 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
762 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
763 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
763 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
764 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
764 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
765 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
765 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
766 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
766 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
767 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
767 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
768 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
768 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
769 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
769 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
770 'f15c21f97864b4f071cddfbf2750ec2e23859414',
770 'f15c21f97864b4f071cddfbf2750ec2e23859414',
771 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
771 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
772 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
772 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
773 '84dec09632a4458f79f50ddbbd155506c460b4f9',
773 '84dec09632a4458f79f50ddbbd155506c460b4f9',
774 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
774 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
775 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
775 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
776 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
776 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
777 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
777 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
778 '6970b057cffe4aab0a792aa634c89f4bebf01441',
778 '6970b057cffe4aab0a792aa634c89f4bebf01441',
779 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
779 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
780 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
780 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
781 ],
781 ],
782 'vcs/backends/git.py': [
782 'vcs/backends/git.py': [
783 '4cf116ad5a457530381135e2f4c453e68a1b0105',
783 '4cf116ad5a457530381135e2f4c453e68a1b0105',
784 '9a751d84d8e9408e736329767387f41b36935153',
784 '9a751d84d8e9408e736329767387f41b36935153',
785 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
785 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
786 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
786 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
787 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
787 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
788 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
788 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
789 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
789 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
790 '54000345d2e78b03a99d561399e8e548de3f3203',
790 '54000345d2e78b03a99d561399e8e548de3f3203',
791 ],
791 ],
792 }
792 }
793 for path, commit_ids in files.items():
793 for path, commit_ids in files.items():
794 node = self.repo.get_commit(commit_ids[0]).get_node(path)
794 node = self.repo.get_commit(commit_ids[0]).get_node(path)
795 node_ids = [commit.raw_id for commit in node.history]
795 node_ids = [commit.raw_id for commit in node.history]
796 assert set(commit_ids).issubset(set(node_ids)), (
796 assert set(commit_ids).issubset(set(node_ids)), (
797 "We assumed that %s is subset of commit_ids for which file %s "
797 "We assumed that %s is subset of commit_ids for which file %s "
798 "has been changed, and history of that node returned: %s"
798 "has been changed, and history of that node returned: %s"
799 % (commit_ids, path, node_ids))
799 % (commit_ids, path, node_ids))
800
800
801 def test_file_annotate(self):
801 def test_file_annotate(self):
802 files = {
802 files = {
803 'vcs/backends/__init__.py': {
803 'vcs/backends/__init__.py': {
804 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
804 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
805 'lines_no': 1,
805 'lines_no': 1,
806 'commits': [
806 'commits': [
807 'c1214f7e79e02fc37156ff215cd71275450cffc3',
807 'c1214f7e79e02fc37156ff215cd71275450cffc3',
808 ],
808 ],
809 },
809 },
810 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
810 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
811 'lines_no': 21,
811 'lines_no': 21,
812 'commits': [
812 'commits': [
813 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
813 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
814 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
814 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
815 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
815 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
816 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
816 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
817 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
817 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
818 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
818 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
819 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
819 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
820 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
820 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
821 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
821 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
822 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
822 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
823 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
823 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
824 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
824 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
825 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
825 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
826 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
826 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
829 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
829 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
830 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
830 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 ],
834 ],
835 },
835 },
836 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
836 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
837 'lines_no': 32,
837 'lines_no': 32,
838 'commits': [
838 'commits': [
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
841 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
844 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '54000345d2e78b03a99d561399e8e548de3f3203',
846 '54000345d2e78b03a99d561399e8e548de3f3203',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
849 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
850 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
850 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
851 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
851 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
852 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
852 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
853 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
853 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
854 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
854 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
855 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
855 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
856 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
856 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
857 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
857 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
858 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
858 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
859 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
859 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
864 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
864 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
866 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
868 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
869 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
869 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
870 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
870 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
871 ],
871 ],
872 },
872 },
873 },
873 },
874 }
874 }
875
875
876 for fname, commit_dict in files.items():
876 for fname, commit_dict in files.items():
877 for commit_id, __ in commit_dict.items():
877 for commit_id, __ in commit_dict.items():
878 commit = self.repo.get_commit(commit_id)
878 commit = self.repo.get_commit(commit_id)
879
879
880 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
880 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
881 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
881 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
882 assert l1_1 == l1_2
882 assert l1_1 == l1_2
883 l1 = l1_1
883 l1 = l1_1
884 l2 = files[fname][commit_id]['commits']
884 l2 = files[fname][commit_id]['commits']
885 assert l1 == l2, (
885 assert l1 == l2, (
886 "The lists of commit_ids for %s@commit_id %s"
886 "The lists of commit_ids for %s@commit_id %s"
887 "from annotation list should match each other, "
887 "from annotation list should match each other, "
888 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
888 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
889
889
890 def test_files_state(self):
890 def test_files_state(self):
891 """
891 """
892 Tests state of FileNodes.
892 Tests state of FileNodes.
893 """
893 """
894 node = self.repo\
894 node = self.repo\
895 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
895 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
896 .get_node('vcs/utils/diffs.py')
896 .get_node('vcs/utils/diffs.py')
897 assert node.state, NodeState.ADDED
897 assert node.state, NodeState.ADDED
898 assert node.added
898 assert node.added
899 assert not node.changed
899 assert not node.changed
900 assert not node.not_changed
900 assert not node.not_changed
901 assert not node.removed
901 assert not node.removed
902
902
903 node = self.repo\
903 node = self.repo\
904 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
904 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
905 .get_node('.hgignore')
905 .get_node('.hgignore')
906 assert node.state, NodeState.CHANGED
906 assert node.state, NodeState.CHANGED
907 assert not node.added
907 assert not node.added
908 assert node.changed
908 assert node.changed
909 assert not node.not_changed
909 assert not node.not_changed
910 assert not node.removed
910 assert not node.removed
911
911
912 node = self.repo\
912 node = self.repo\
913 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
913 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
914 .get_node('setup.py')
914 .get_node('setup.py')
915 assert node.state, NodeState.NOT_CHANGED
915 assert node.state, NodeState.NOT_CHANGED
916 assert not node.added
916 assert not node.added
917 assert not node.changed
917 assert not node.changed
918 assert node.not_changed
918 assert node.not_changed
919 assert not node.removed
919 assert not node.removed
920
920
921 # If node has REMOVED state then trying to fetch it would raise
921 # If node has REMOVED state then trying to fetch it would raise
922 # CommitError exception
922 # CommitError exception
923 commit = self.repo.get_commit(
923 commit = self.repo.get_commit(
924 'fa6600f6848800641328adbf7811fd2372c02ab2')
924 'fa6600f6848800641328adbf7811fd2372c02ab2')
925 path = 'vcs/backends/BaseRepository.py'
925 path = 'vcs/backends/BaseRepository.py'
926 with pytest.raises(NodeDoesNotExistError):
926 with pytest.raises(NodeDoesNotExistError):
927 commit.get_node(path)
927 commit.get_node(path)
928 # but it would be one of ``removed`` (commit's attribute)
928 # but it would be one of ``removed`` (commit's attribute)
929 assert path in [rf.path for rf in commit.removed]
929 assert path in [rf.path for rf in commit.removed]
930
930
931 commit = self.repo.get_commit(
931 commit = self.repo.get_commit(
932 '54386793436c938cff89326944d4c2702340037d')
932 '54386793436c938cff89326944d4c2702340037d')
933 changed = [
933 changed = [
934 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
934 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
935 'vcs/nodes.py']
935 'vcs/nodes.py']
936 assert set(changed) == set([f.path for f in commit.changed])
936 assert set(changed) == set([f.path for f in commit.changed])
937
937
938 def test_unicode_branch_refs(self):
938 def test_unicode_branch_refs(self):
939 unicode_branches = {
939 unicode_branches = {
940 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
940 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
941 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
941 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
942 }
942 }
943 with mock.patch(
943 with mock.patch(
944 ("rhodecode.lib.vcs.backends.git.repository"
944 ("rhodecode.lib.vcs.backends.git.repository"
945 ".GitRepository._refs"),
945 ".GitRepository._refs"),
946 unicode_branches):
946 unicode_branches):
947 branches = self.repo.branches
947 branches = self.repo.branches
948
948
949 assert 'unicode' in branches
949 assert 'unicode' in branches
950 assert u'uniΓ§ΓΆβˆ‚e' in branches
950 assert u'uniΓ§ΓΆβˆ‚e' in branches
951
951
952 def test_unicode_tag_refs(self):
952 def test_unicode_tag_refs(self):
953 unicode_tags = {
953 unicode_tags = {
954 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
954 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
955 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
955 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
956 }
956 }
957 with mock.patch(
957 with mock.patch(
958 ("rhodecode.lib.vcs.backends.git.repository"
958 ("rhodecode.lib.vcs.backends.git.repository"
959 ".GitRepository._refs"),
959 ".GitRepository._refs"),
960 unicode_tags):
960 unicode_tags):
961 tags = self.repo.tags
961 tags = self.repo.tags
962
962
963 assert 'unicode' in tags
963 assert 'unicode' in tags
964 assert u'uniΓ§ΓΆβˆ‚e' in tags
964 assert u'uniΓ§ΓΆβˆ‚e' in tags
965
965
966 def test_commit_message_is_unicode(self):
966 def test_commit_message_is_unicode(self):
967 for commit in self.repo:
967 for commit in self.repo:
968 assert type(commit.message) == unicode
968 assert type(commit.message) == unicode
969
969
970 def test_commit_author_is_unicode(self):
970 def test_commit_author_is_unicode(self):
971 for commit in self.repo:
971 for commit in self.repo:
972 assert type(commit.author) == unicode
972 assert type(commit.author) == unicode
973
973
974 def test_repo_files_content_is_unicode(self):
974 def test_repo_files_content_is_unicode(self):
975 commit = self.repo.get_commit()
975 commit = self.repo.get_commit()
976 for node in commit.get_node('/'):
976 for node in commit.get_node('/'):
977 if node.is_file():
977 if node.is_file():
978 assert type(node.content) == unicode
978 assert type(node.content) == unicode
979
979
980 def test_wrong_path(self):
980 def test_wrong_path(self):
981 # There is 'setup.py' in the root dir but not there:
981 # There is 'setup.py' in the root dir but not there:
982 path = 'foo/bar/setup.py'
982 path = 'foo/bar/setup.py'
983 tip = self.repo.get_commit()
983 tip = self.repo.get_commit()
984 with pytest.raises(VCSError):
984 with pytest.raises(VCSError):
985 tip.get_node(path)
985 tip.get_node(path)
986
986
987 @pytest.mark.parametrize("author_email, commit_id", [
987 @pytest.mark.parametrize("author_email, commit_id", [
988 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
988 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
989 ('lukasz.balcerzak@python-center.pl',
989 ('lukasz.balcerzak@python-center.pl',
990 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
990 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
991 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
991 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
992 ])
992 ])
993 def test_author_email(self, author_email, commit_id):
993 def test_author_email(self, author_email, commit_id):
994 commit = self.repo.get_commit(commit_id)
994 commit = self.repo.get_commit(commit_id)
995 assert author_email == commit.author_email
995 assert author_email == commit.author_email
996
996
997 @pytest.mark.parametrize("author, commit_id", [
997 @pytest.mark.parametrize("author, commit_id", [
998 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
998 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
999 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
999 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1000 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1000 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1001 ])
1001 ])
1002 def test_author_username(self, author, commit_id):
1002 def test_author_username(self, author, commit_id):
1003 commit = self.repo.get_commit(commit_id)
1003 commit = self.repo.get_commit(commit_id)
1004 assert author == commit.author_name
1004 assert author == commit.author_name
1005
1005
1006
1006
1007 class TestLargeFileRepo(object):
1007 class TestLargeFileRepo(object):
1008
1008
1009 def test_large_file(self, backend_git):
1009 def test_large_file(self, backend_git):
1010 conf = make_db_config()
1010 conf = make_db_config()
1011 repo = backend_git.create_test_repo('largefiles', conf)
1011 repo = backend_git.create_test_repo('largefiles', conf)
1012
1012
1013 tip = repo.scm_instance().get_commit()
1013 tip = repo.scm_instance().get_commit()
1014
1014
1015 # extract stored LF node into the origin cache
1015 # extract stored LF node into the origin cache
1016 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1016 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1017
1017
1018 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1018 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1019 oid_path = os.path.join(lfs_store, oid)
1019 oid_path = os.path.join(lfs_store, oid)
1020 oid_destination = os.path.join(
1020 oid_destination = os.path.join(
1021 conf.get('vcs_git_lfs', 'store_location'), oid)
1021 conf.get('vcs_git_lfs', 'store_location'), oid)
1022 shutil.copy(oid_path, oid_destination)
1022 shutil.copy(oid_path, oid_destination)
1023
1023
1024 node = tip.get_node('1MB.zip')
1024 node = tip.get_node('1MB.zip')
1025
1025
1026 lf_node = node.get_largefile_node()
1026 lf_node = node.get_largefile_node()
1027
1027
1028 assert lf_node.is_largefile() is True
1028 assert lf_node.is_largefile() is True
1029 assert lf_node.size == 1024000
1029 assert lf_node.size == 1024000
1030 assert lf_node.name == '1MB.zip'
1030 assert lf_node.name == '1MB.zip'
1031
1031
1032
1032
1033 @pytest.mark.usefixtures("vcs_repository_support")
1033 @pytest.mark.usefixtures("vcs_repository_support")
1034 class TestGitSpecificWithRepo(BackendTestMixin):
1034 class TestGitSpecificWithRepo(BackendTestMixin):
1035
1035
1036 @classmethod
1036 @classmethod
1037 def _get_commits(cls):
1037 def _get_commits(cls):
1038 return [
1038 return [
1039 {
1039 {
1040 'message': 'Initial',
1040 'message': 'Initial',
1041 'author': 'Joe Doe <joe.doe@example.com>',
1041 'author': 'Joe Doe <joe.doe@example.com>',
1042 'date': datetime.datetime(2010, 1, 1, 20),
1042 'date': datetime.datetime(2010, 1, 1, 20),
1043 'added': [
1043 'added': [
1044 FileNode('foobar/static/js/admin/base.js', content='base'),
1044 FileNode('foobar/static/js/admin/base.js', content='base'),
1045 FileNode(
1045 FileNode(
1046 'foobar/static/admin', content='admin',
1046 'foobar/static/admin', content='admin',
1047 mode=0o120000), # this is a link
1047 mode=0o120000), # this is a link
1048 FileNode('foo', content='foo'),
1048 FileNode('foo', content='foo'),
1049 ],
1049 ],
1050 },
1050 },
1051 {
1051 {
1052 'message': 'Second',
1052 'message': 'Second',
1053 'author': 'Joe Doe <joe.doe@example.com>',
1053 'author': 'Joe Doe <joe.doe@example.com>',
1054 'date': datetime.datetime(2010, 1, 1, 22),
1054 'date': datetime.datetime(2010, 1, 1, 22),
1055 'added': [
1055 'added': [
1056 FileNode('foo2', content='foo2'),
1056 FileNode('foo2', content='foo2'),
1057 ],
1057 ],
1058 },
1058 },
1059 ]
1059 ]
1060
1060
1061 def test_paths_slow_traversing(self):
1061 def test_paths_slow_traversing(self):
1062 commit = self.repo.get_commit()
1062 commit = self.repo.get_commit()
1063 assert commit.get_node('foobar').get_node('static').get_node('js')\
1063 assert commit.get_node('foobar').get_node('static').get_node('js')\
1064 .get_node('admin').get_node('base.js').content == 'base'
1064 .get_node('admin').get_node('base.js').content == 'base'
1065
1065
1066 def test_paths_fast_traversing(self):
1066 def test_paths_fast_traversing(self):
1067 commit = self.repo.get_commit()
1067 commit = self.repo.get_commit()
1068 assert (
1068 assert commit.get_node('foobar/static/js/admin/base.js').content == 'base'
1069 commit.get_node('foobar/static/js/admin/base.js').content ==
1070 'base')
1071
1069
1072 def test_get_diff_runs_git_command_with_hashes(self):
1070 def test_get_diff_runs_git_command_with_hashes(self):
1073 comm1 = self.repo[0]
1071 comm1 = self.repo[0]
1074 comm2 = self.repo[1]
1072 comm2 = self.repo[1]
1075 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1073
1076 self.repo.get_diff(comm1, comm2)
1074 with mock.patch.object(self.repo, '_remote') as remote_mock:
1075 self.repo.get_diff(comm1, comm2)
1077
1076
1078 self.repo.run_git_command.assert_called_once_with(
1077 remote_mock.diff.assert_called_once_with(
1079 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1078 comm1.raw_id, comm2.raw_id,
1080 '--abbrev=40', comm1.raw_id, comm2.raw_id])
1079 file_filter=None, opt_ignorews=False, context=3)
1081
1080
1082 def test_get_diff_runs_git_command_with_str_hashes(self):
1081 def test_get_diff_runs_git_command_with_str_hashes(self):
1083 comm2 = self.repo[1]
1082 comm2 = self.repo[1]
1084 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1083 with mock.patch.object(self.repo, '_remote') as remote_mock:
1085 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1084 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1086 self.repo.run_git_command.assert_called_once_with(
1085 remote_mock.diff.assert_called_once_with(
1087 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1086 self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id,
1088 '--abbrev=40', comm2.raw_id])
1087 file_filter=None, opt_ignorews=False, context=3)
1089
1088
1090 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1089 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1091 comm1 = self.repo[0]
1090 comm1 = self.repo[0]
1092 comm2 = self.repo[1]
1091 comm2 = self.repo[1]
1093 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1092 with mock.patch.object(self.repo, '_remote') as remote_mock:
1094 self.repo.get_diff(comm1, comm2, 'foo')
1093 self.repo.get_diff(comm1, comm2, 'foo')
1095 self.repo.run_git_command.assert_called_once_with(
1094 remote_mock.diff.assert_called_once_with(
1096 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1095 self.repo._lookup_commit(0), comm2.raw_id,
1097 '--abbrev=40', self.repo._lookup_commit(0),
1096 file_filter='foo', opt_ignorews=False, context=3)
1098 comm2.raw_id, '--', 'foo'])
1099
1097
1100
1098
1101 @pytest.mark.usefixtures("vcs_repository_support")
1099 @pytest.mark.usefixtures("vcs_repository_support")
1102 class TestGitRegression(BackendTestMixin):
1100 class TestGitRegression(BackendTestMixin):
1103
1101
1104 @classmethod
1102 @classmethod
1105 def _get_commits(cls):
1103 def _get_commits(cls):
1106 return [
1104 return [
1107 {
1105 {
1108 'message': 'Initial',
1106 'message': 'Initial',
1109 'author': 'Joe Doe <joe.doe@example.com>',
1107 'author': 'Joe Doe <joe.doe@example.com>',
1110 'date': datetime.datetime(2010, 1, 1, 20),
1108 'date': datetime.datetime(2010, 1, 1, 20),
1111 'added': [
1109 'added': [
1112 FileNode('bot/__init__.py', content='base'),
1110 FileNode('bot/__init__.py', content='base'),
1113 FileNode('bot/templates/404.html', content='base'),
1111 FileNode('bot/templates/404.html', content='base'),
1114 FileNode('bot/templates/500.html', content='base'),
1112 FileNode('bot/templates/500.html', content='base'),
1115 ],
1113 ],
1116 },
1114 },
1117 {
1115 {
1118 'message': 'Second',
1116 'message': 'Second',
1119 'author': 'Joe Doe <joe.doe@example.com>',
1117 'author': 'Joe Doe <joe.doe@example.com>',
1120 'date': datetime.datetime(2010, 1, 1, 22),
1118 'date': datetime.datetime(2010, 1, 1, 22),
1121 'added': [
1119 'added': [
1122 FileNode('bot/build/migrations/1.py', content='foo2'),
1120 FileNode('bot/build/migrations/1.py', content='foo2'),
1123 FileNode('bot/build/migrations/2.py', content='foo2'),
1121 FileNode('bot/build/migrations/2.py', content='foo2'),
1124 FileNode(
1122 FileNode(
1125 'bot/build/static/templates/f.html', content='foo2'),
1123 'bot/build/static/templates/f.html', content='foo2'),
1126 FileNode(
1124 FileNode(
1127 'bot/build/static/templates/f1.html', content='foo2'),
1125 'bot/build/static/templates/f1.html', content='foo2'),
1128 FileNode('bot/build/templates/err.html', content='foo2'),
1126 FileNode('bot/build/templates/err.html', content='foo2'),
1129 FileNode('bot/build/templates/err2.html', content='foo2'),
1127 FileNode('bot/build/templates/err2.html', content='foo2'),
1130 ],
1128 ],
1131 },
1129 },
1132 ]
1130 ]
1133
1131
1134 @pytest.mark.parametrize("path, expected_paths", [
1132 @pytest.mark.parametrize("path, expected_paths", [
1135 ('bot', [
1133 ('bot', [
1136 'bot/build',
1134 'bot/build',
1137 'bot/templates',
1135 'bot/templates',
1138 'bot/__init__.py']),
1136 'bot/__init__.py']),
1139 ('bot/build', [
1137 ('bot/build', [
1140 'bot/build/migrations',
1138 'bot/build/migrations',
1141 'bot/build/static',
1139 'bot/build/static',
1142 'bot/build/templates']),
1140 'bot/build/templates']),
1143 ('bot/build/static', [
1141 ('bot/build/static', [
1144 'bot/build/static/templates']),
1142 'bot/build/static/templates']),
1145 ('bot/build/static/templates', [
1143 ('bot/build/static/templates', [
1146 'bot/build/static/templates/f.html',
1144 'bot/build/static/templates/f.html',
1147 'bot/build/static/templates/f1.html']),
1145 'bot/build/static/templates/f1.html']),
1148 ('bot/build/templates', [
1146 ('bot/build/templates', [
1149 'bot/build/templates/err.html',
1147 'bot/build/templates/err.html',
1150 'bot/build/templates/err2.html']),
1148 'bot/build/templates/err2.html']),
1151 ('bot/templates/', [
1149 ('bot/templates/', [
1152 'bot/templates/404.html',
1150 'bot/templates/404.html',
1153 'bot/templates/500.html']),
1151 'bot/templates/500.html']),
1154 ])
1152 ])
1155 def test_similar_paths(self, path, expected_paths):
1153 def test_similar_paths(self, path, expected_paths):
1156 commit = self.repo.get_commit()
1154 commit = self.repo.get_commit()
1157 paths = [n.path for n in commit.get_nodes(path)]
1155 paths = [n.path for n in commit.get_nodes(path)]
1158 assert paths == expected_paths
1156 assert paths == expected_paths
1159
1157
1160
1158
1161 class TestDiscoverGitVersion(object):
1159 class TestDiscoverGitVersion(object):
1162
1160
1163 def test_returns_git_version(self, baseapp):
1161 def test_returns_git_version(self, baseapp):
1164 version = discover_git_version()
1162 version = discover_git_version()
1165 assert version
1163 assert version
1166
1164
1167 def test_returns_empty_string_without_vcsserver(self):
1165 def test_returns_empty_string_without_vcsserver(self):
1168 mock_connection = mock.Mock()
1166 mock_connection = mock.Mock()
1169 mock_connection.discover_git_version = mock.Mock(
1167 mock_connection.discover_git_version = mock.Mock(
1170 side_effect=Exception)
1168 side_effect=Exception)
1171 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1169 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1172 version = discover_git_version()
1170 version = discover_git_version()
1173 assert version == ''
1171 assert version == ''
1174
1172
1175
1173
1176 class TestGetSubmoduleUrl(object):
1174 class TestGetSubmoduleUrl(object):
1177 def test_submodules_file_found(self):
1175 def test_submodules_file_found(self):
1178 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1176 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1179 node = mock.Mock()
1177 node = mock.Mock()
1180 with mock.patch.object(
1178 with mock.patch.object(
1181 commit, 'get_node', return_value=node) as get_node_mock:
1179 commit, 'get_node', return_value=node) as get_node_mock:
1182 node.content = (
1180 node.content = (
1183 '[submodule "subrepo1"]\n'
1181 '[submodule "subrepo1"]\n'
1184 '\tpath = subrepo1\n'
1182 '\tpath = subrepo1\n'
1185 '\turl = https://code.rhodecode.com/dulwich\n'
1183 '\turl = https://code.rhodecode.com/dulwich\n'
1186 )
1184 )
1187 result = commit._get_submodule_url('subrepo1')
1185 result = commit._get_submodule_url('subrepo1')
1188 get_node_mock.assert_called_once_with('.gitmodules')
1186 get_node_mock.assert_called_once_with('.gitmodules')
1189 assert result == 'https://code.rhodecode.com/dulwich'
1187 assert result == 'https://code.rhodecode.com/dulwich'
1190
1188
1191 def test_complex_submodule_path(self):
1189 def test_complex_submodule_path(self):
1192 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1193 node = mock.Mock()
1191 node = mock.Mock()
1194 with mock.patch.object(
1192 with mock.patch.object(
1195 commit, 'get_node', return_value=node) as get_node_mock:
1193 commit, 'get_node', return_value=node) as get_node_mock:
1196 node.content = (
1194 node.content = (
1197 '[submodule "complex/subrepo/path"]\n'
1195 '[submodule "complex/subrepo/path"]\n'
1198 '\tpath = complex/subrepo/path\n'
1196 '\tpath = complex/subrepo/path\n'
1199 '\turl = https://code.rhodecode.com/dulwich\n'
1197 '\turl = https://code.rhodecode.com/dulwich\n'
1200 )
1198 )
1201 result = commit._get_submodule_url('complex/subrepo/path')
1199 result = commit._get_submodule_url('complex/subrepo/path')
1202 get_node_mock.assert_called_once_with('.gitmodules')
1200 get_node_mock.assert_called_once_with('.gitmodules')
1203 assert result == 'https://code.rhodecode.com/dulwich'
1201 assert result == 'https://code.rhodecode.com/dulwich'
1204
1202
1205 def test_submodules_file_not_found(self):
1203 def test_submodules_file_not_found(self):
1206 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1207 with mock.patch.object(
1205 with mock.patch.object(
1208 commit, 'get_node', side_effect=NodeDoesNotExistError):
1206 commit, 'get_node', side_effect=NodeDoesNotExistError):
1209 result = commit._get_submodule_url('complex/subrepo/path')
1207 result = commit._get_submodule_url('complex/subrepo/path')
1210 assert result is None
1208 assert result is None
1211
1209
1212 def test_path_not_found(self):
1210 def test_path_not_found(self):
1213 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1211 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1214 node = mock.Mock()
1212 node = mock.Mock()
1215 with mock.patch.object(
1213 with mock.patch.object(
1216 commit, 'get_node', return_value=node) as get_node_mock:
1214 commit, 'get_node', return_value=node) as get_node_mock:
1217 node.content = (
1215 node.content = (
1218 '[submodule "subrepo1"]\n'
1216 '[submodule "subrepo1"]\n'
1219 '\tpath = subrepo1\n'
1217 '\tpath = subrepo1\n'
1220 '\turl = https://code.rhodecode.com/dulwich\n'
1218 '\turl = https://code.rhodecode.com/dulwich\n'
1221 )
1219 )
1222 result = commit._get_submodule_url('subrepo2')
1220 result = commit._get_submodule_url('subrepo2')
1223 get_node_mock.assert_called_once_with('.gitmodules')
1221 get_node_mock.assert_called_once_with('.gitmodules')
1224 assert result is None
1222 assert result is None
1225
1223
1226 def test_returns_cached_values(self):
1224 def test_returns_cached_values(self):
1227 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1228 node = mock.Mock()
1226 node = mock.Mock()
1229 with mock.patch.object(
1227 with mock.patch.object(
1230 commit, 'get_node', return_value=node) as get_node_mock:
1228 commit, 'get_node', return_value=node) as get_node_mock:
1231 node.content = (
1229 node.content = (
1232 '[submodule "subrepo1"]\n'
1230 '[submodule "subrepo1"]\n'
1233 '\tpath = subrepo1\n'
1231 '\tpath = subrepo1\n'
1234 '\turl = https://code.rhodecode.com/dulwich\n'
1232 '\turl = https://code.rhodecode.com/dulwich\n'
1235 )
1233 )
1236 for _ in range(3):
1234 for _ in range(3):
1237 commit._get_submodule_url('subrepo1')
1235 commit._get_submodule_url('subrepo1')
1238 get_node_mock.assert_called_once_with('.gitmodules')
1236 get_node_mock.assert_called_once_with('.gitmodules')
1239
1237
1240 def test_get_node_returns_a_link(self):
1238 def test_get_node_returns_a_link(self):
1241 repository = mock.Mock()
1239 repository = mock.Mock()
1242 repository.alias = 'git'
1240 repository.alias = 'git'
1243 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1241 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1244 submodule_url = 'https://code.rhodecode.com/dulwich'
1242 submodule_url = 'https://code.rhodecode.com/dulwich'
1245 get_id_patch = mock.patch.object(
1243 get_id_patch = mock.patch.object(
1246 commit, '_get_tree_id_for_path', return_value=(1, 'link'))
1244 commit, '_get_tree_id_for_path', return_value=(1, 'link'))
1247 get_submodule_patch = mock.patch.object(
1245 get_submodule_patch = mock.patch.object(
1248 commit, '_get_submodule_url', return_value=submodule_url)
1246 commit, '_get_submodule_url', return_value=submodule_url)
1249
1247
1250 with get_id_patch, get_submodule_patch as submodule_mock:
1248 with get_id_patch, get_submodule_patch as submodule_mock:
1251 node = commit.get_node('/abcde')
1249 node = commit.get_node('/abcde')
1252
1250
1253 submodule_mock.assert_called_once_with('/abcde')
1251 submodule_mock.assert_called_once_with('/abcde')
1254 assert type(node) == SubModuleNode
1252 assert type(node) == SubModuleNode
1255 assert node.url == submodule_url
1253 assert node.url == submodule_url
1256
1254
1257 def test_get_nodes_returns_links(self):
1255 def test_get_nodes_returns_links(self):
1258 repository = mock.MagicMock()
1256 repository = mock.MagicMock()
1259 repository.alias = 'git'
1257 repository.alias = 'git'
1260 repository._remote.tree_items.return_value = [
1258 repository._remote.tree_items.return_value = [
1261 ('subrepo', 'stat', 1, 'link')
1259 ('subrepo', 'stat', 1, 'link')
1262 ]
1260 ]
1263 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1261 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1264 submodule_url = 'https://code.rhodecode.com/dulwich'
1262 submodule_url = 'https://code.rhodecode.com/dulwich'
1265 get_id_patch = mock.patch.object(
1263 get_id_patch = mock.patch.object(
1266 commit, '_get_tree_id_for_path', return_value=(1, 'tree'))
1264 commit, '_get_tree_id_for_path', return_value=(1, 'tree'))
1267 get_submodule_patch = mock.patch.object(
1265 get_submodule_patch = mock.patch.object(
1268 commit, '_get_submodule_url', return_value=submodule_url)
1266 commit, '_get_submodule_url', return_value=submodule_url)
1269
1267
1270 with get_id_patch, get_submodule_patch as submodule_mock:
1268 with get_id_patch, get_submodule_patch as submodule_mock:
1271 nodes = commit.get_nodes('/abcde')
1269 nodes = commit.get_nodes('/abcde')
1272
1270
1273 submodule_mock.assert_called_once_with('/abcde/subrepo')
1271 submodule_mock.assert_called_once_with('/abcde/subrepo')
1274 assert len(nodes) == 1
1272 assert len(nodes) == 1
1275 assert type(nodes[0]) == SubModuleNode
1273 assert type(nodes[0]) == SubModuleNode
1276 assert nodes[0].url == submodule_url
1274 assert nodes[0].url == submodule_url
@@ -1,1188 +1,1188 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.utils import make_db_config
26 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.vcs import backends
27 from rhodecode.lib.vcs import backends
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 Reference, MergeResponse, MergeFailureReason)
29 Reference, MergeResponse, MergeFailureReason)
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 from rhodecode.lib.vcs.exceptions import (
31 from rhodecode.lib.vcs.exceptions import (
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35
35
36
36
37 pytestmark = pytest.mark.backends("hg")
37 pytestmark = pytest.mark.backends("hg")
38
38
39
39
40 def repo_path_generator():
40 def repo_path_generator():
41 """
41 """
42 Return a different path to be used for cloning repos.
42 Return a different path to be used for cloning repos.
43 """
43 """
44 i = 0
44 i = 0
45 while True:
45 while True:
46 i += 1
46 i += 1
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
48
48
49 REPO_PATH_GENERATOR = repo_path_generator()
49 REPO_PATH_GENERATOR = repo_path_generator()
50
50
51
51
52 @pytest.fixture(scope='class', autouse=True)
52 @pytest.fixture(scope='class', autouse=True)
53 def repo(request, baseapp):
53 def repo(request, baseapp):
54 repo = MercurialRepository(TEST_HG_REPO)
54 repo = MercurialRepository(TEST_HG_REPO)
55 if request.cls:
55 if request.cls:
56 request.cls.repo = repo
56 request.cls.repo = repo
57 return repo
57 return repo
58
58
59
59
60 class TestMercurialRepository:
60 class TestMercurialRepository(object):
61
61
62 # pylint: disable=protected-access
62 # pylint: disable=protected-access
63
63
64 def get_clone_repo(self):
64 def get_clone_repo(self):
65 """
65 """
66 Return a clone of the base repo.
66 Return a clone of the base repo.
67 """
67 """
68 clone_path = next(REPO_PATH_GENERATOR)
68 clone_path = next(REPO_PATH_GENERATOR)
69 repo_clone = MercurialRepository(
69 repo_clone = MercurialRepository(
70 clone_path, create=True, src_url=self.repo.path)
70 clone_path, create=True, src_url=self.repo.path)
71
71
72 return repo_clone
72 return repo_clone
73
73
74 def get_empty_repo(self):
74 def get_empty_repo(self):
75 """
75 """
76 Return an empty repo.
76 Return an empty repo.
77 """
77 """
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
79
79
80 def test_wrong_repo_path(self):
80 def test_wrong_repo_path(self):
81 wrong_repo_path = '/tmp/errorrepo_hg'
81 wrong_repo_path = '/tmp/errorrepo_hg'
82 with pytest.raises(RepositoryError):
82 with pytest.raises(RepositoryError):
83 MercurialRepository(wrong_repo_path)
83 MercurialRepository(wrong_repo_path)
84
84
85 def test_unicode_path_repo(self):
85 def test_unicode_path_repo(self):
86 with pytest.raises(VCSError):
86 with pytest.raises(VCSError):
87 MercurialRepository(u'iShouldFail')
87 MercurialRepository(u'iShouldFail')
88
88
89 def test_unicode_commit_id(self):
89 def test_unicode_commit_id(self):
90 with pytest.raises(CommitDoesNotExistError):
90 with pytest.raises(CommitDoesNotExistError):
91 self.repo.get_commit(u'unicode-commit-id')
91 self.repo.get_commit(u'unicode-commit-id')
92 with pytest.raises(CommitDoesNotExistError):
92 with pytest.raises(CommitDoesNotExistError):
93 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
93 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
94
94
95 def test_unicode_bookmark(self):
95 def test_unicode_bookmark(self):
96 self.repo.bookmark(u'unicode-bookmark')
96 self.repo.bookmark(u'unicode-bookmark')
97 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
97 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
98
98
99 def test_unicode_branch(self):
99 def test_unicode_branch(self):
100 with pytest.raises(KeyError):
100 with pytest.raises(KeyError):
101 self.repo.branches[u'unicode-branch']
101 self.repo.branches[u'unicode-branch']
102 with pytest.raises(KeyError):
102 with pytest.raises(KeyError):
103 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
103 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
104
104
105 def test_repo_clone(self):
105 def test_repo_clone(self):
106 if os.path.exists(TEST_HG_REPO_CLONE):
106 if os.path.exists(TEST_HG_REPO_CLONE):
107 self.fail(
107 self.fail(
108 'Cannot test mercurial clone repo as location %s already '
108 'Cannot test mercurial clone repo as location %s already '
109 'exists. You should manually remove it first.'
109 'exists. You should manually remove it first.'
110 % TEST_HG_REPO_CLONE)
110 % TEST_HG_REPO_CLONE)
111
111
112 repo = MercurialRepository(TEST_HG_REPO)
112 repo = MercurialRepository(TEST_HG_REPO)
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
114 src_url=TEST_HG_REPO)
114 src_url=TEST_HG_REPO)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 # Checking hashes of commits should be enough
116 # Checking hashes of commits should be enough
117 for commit in repo.get_commits():
117 for commit in repo.get_commits():
118 raw_id = commit.raw_id
118 raw_id = commit.raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
120
120
121 def test_repo_clone_with_update(self):
121 def test_repo_clone_with_update(self):
122 repo = MercurialRepository(TEST_HG_REPO)
122 repo = MercurialRepository(TEST_HG_REPO)
123 repo_clone = MercurialRepository(
123 repo_clone = MercurialRepository(
124 TEST_HG_REPO_CLONE + '_w_update',
124 TEST_HG_REPO_CLONE + '_w_update',
125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127
127
128 # check if current workdir was updated
128 # check if current workdir was updated
129 assert os.path.isfile(
129 assert os.path.isfile(
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
131
131
132 def test_repo_clone_without_update(self):
132 def test_repo_clone_without_update(self):
133 repo = MercurialRepository(TEST_HG_REPO)
133 repo = MercurialRepository(TEST_HG_REPO)
134 repo_clone = MercurialRepository(
134 repo_clone = MercurialRepository(
135 TEST_HG_REPO_CLONE + '_wo_update',
135 TEST_HG_REPO_CLONE + '_wo_update',
136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
138 assert not os.path.isfile(
138 assert not os.path.isfile(
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
140
140
141 def test_commit_ids(self):
141 def test_commit_ids(self):
142 # there are 21 commits at bitbucket now
142 # there are 21 commits at bitbucket now
143 # so we can assume they would be available from now on
143 # so we can assume they would be available from now on
144 subset = set([
144 subset = set([
145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
147 '6cba7170863a2411822803fa77a0a264f1310b35',
147 '6cba7170863a2411822803fa77a0a264f1310b35',
148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
150 '6fff84722075f1607a30f436523403845f84cd9e',
150 '6fff84722075f1607a30f436523403845f84cd9e',
151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
154 'be90031137367893f1c406e0a8683010fd115b79',
154 'be90031137367893f1c406e0a8683010fd115b79',
155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
156 '84478366594b424af694a6c784cb991a16b87c21',
156 '84478366594b424af694a6c784cb991a16b87c21',
157 '17f8e105dddb9f339600389c6dc7175d395a535c',
157 '17f8e105dddb9f339600389c6dc7175d395a535c',
158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
160 '786facd2c61deb9cf91e9534735124fb8fc11842',
160 '786facd2c61deb9cf91e9534735124fb8fc11842',
161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
164 ])
164 ])
165 assert subset.issubset(set(self.repo.commit_ids))
165 assert subset.issubset(set(self.repo.commit_ids))
166
166
167 # check if we have the proper order of commits
167 # check if we have the proper order of commits
168 org = [
168 org = [
169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
171 '6cba7170863a2411822803fa77a0a264f1310b35',
171 '6cba7170863a2411822803fa77a0a264f1310b35',
172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
174 '6fff84722075f1607a30f436523403845f84cd9e',
174 '6fff84722075f1607a30f436523403845f84cd9e',
175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
178 'be90031137367893f1c406e0a8683010fd115b79',
178 'be90031137367893f1c406e0a8683010fd115b79',
179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
180 '84478366594b424af694a6c784cb991a16b87c21',
180 '84478366594b424af694a6c784cb991a16b87c21',
181 '17f8e105dddb9f339600389c6dc7175d395a535c',
181 '17f8e105dddb9f339600389c6dc7175d395a535c',
182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
184 '786facd2c61deb9cf91e9534735124fb8fc11842',
184 '786facd2c61deb9cf91e9534735124fb8fc11842',
185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
188 '2c1885c735575ca478bf9e17b0029dca68824458',
188 '2c1885c735575ca478bf9e17b0029dca68824458',
189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
191 '4fb8326d78e5120da2c7468dcf7098997be385da',
191 '4fb8326d78e5120da2c7468dcf7098997be385da',
192 '62b4a097164940bd66030c4db51687f3ec035eed',
192 '62b4a097164940bd66030c4db51687f3ec035eed',
193 '536c1a19428381cfea92ac44985304f6a8049569',
193 '536c1a19428381cfea92ac44985304f6a8049569',
194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
196 'f8940bcb890a98c4702319fbe36db75ea309b475',
196 'f8940bcb890a98c4702319fbe36db75ea309b475',
197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
200 ]
200 ]
201 assert org == self.repo.commit_ids[:31]
201 assert org == self.repo.commit_ids[:31]
202
202
203 def test_iter_slice(self):
203 def test_iter_slice(self):
204 sliced = list(self.repo[:10])
204 sliced = list(self.repo[:10])
205 itered = list(self.repo)[:10]
205 itered = list(self.repo)[:10]
206 assert sliced == itered
206 assert sliced == itered
207
207
208 def test_slicing(self):
208 def test_slicing(self):
209 # 4 1 5 10 95
209 # 4 1 5 10 95
210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
211 (10, 20, 10), (5, 100, 95)]:
211 (10, 20, 10), (5, 100, 95)]:
212 indexes = list(self.repo[sfrom:sto])
212 indexes = list(self.repo[sfrom:sto])
213 assert len(indexes) == size
213 assert len(indexes) == size
214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
216
216
217 def test_branches(self):
217 def test_branches(self):
218 # TODO: Need more tests here
218 # TODO: Need more tests here
219
219
220 # active branches
220 # active branches
221 assert 'default' in self.repo.branches
221 assert 'default' in self.repo.branches
222 assert 'stable' in self.repo.branches
222 assert 'stable' in self.repo.branches
223
223
224 # closed
224 # closed
225 assert 'git' in self.repo._get_branches(closed=True)
225 assert 'git' in self.repo._get_branches(closed=True)
226 assert 'web' in self.repo._get_branches(closed=True)
226 assert 'web' in self.repo._get_branches(closed=True)
227
227
228 for name, id in self.repo.branches.items():
228 for name, id in self.repo.branches.items():
229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
230
230
231 def test_tip_in_tags(self):
231 def test_tip_in_tags(self):
232 # tip is always a tag
232 # tip is always a tag
233 assert 'tip' in self.repo.tags
233 assert 'tip' in self.repo.tags
234
234
235 def test_tip_commit_in_tags(self):
235 def test_tip_commit_in_tags(self):
236 tip = self.repo.get_commit()
236 tip = self.repo.get_commit()
237 assert self.repo.tags['tip'] == tip.raw_id
237 assert self.repo.tags['tip'] == tip.raw_id
238
238
239 def test_initial_commit(self):
239 def test_initial_commit(self):
240 init_commit = self.repo.get_commit(commit_idx=0)
240 init_commit = self.repo.get_commit(commit_idx=0)
241 init_author = init_commit.author
241 init_author = init_commit.author
242
242
243 assert init_commit.message == 'initial import'
243 assert init_commit.message == 'initial import'
244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
245 assert init_author == init_commit.committer
245 assert init_author == init_commit.committer
246 assert sorted(init_commit._file_paths) == sorted([
246 assert sorted(init_commit._file_paths) == sorted([
247 'vcs/__init__.py',
247 'vcs/__init__.py',
248 'vcs/backends/BaseRepository.py',
248 'vcs/backends/BaseRepository.py',
249 'vcs/backends/__init__.py',
249 'vcs/backends/__init__.py',
250 ])
250 ])
251 assert sorted(init_commit._dir_paths) == sorted(
251 assert sorted(init_commit._dir_paths) == sorted(
252 ['', 'vcs', 'vcs/backends'])
252 ['', 'vcs', 'vcs/backends'])
253
253
254 assert init_commit._dir_paths + init_commit._file_paths == \
254 assert init_commit._dir_paths + init_commit._file_paths == \
255 init_commit._paths
255 init_commit._paths
256
256
257 with pytest.raises(NodeDoesNotExistError):
257 with pytest.raises(NodeDoesNotExistError):
258 init_commit.get_node(path='foobar')
258 init_commit.get_node(path='foobar')
259
259
260 node = init_commit.get_node('vcs/')
260 node = init_commit.get_node('vcs/')
261 assert hasattr(node, 'kind')
261 assert hasattr(node, 'kind')
262 assert node.kind == NodeKind.DIR
262 assert node.kind == NodeKind.DIR
263
263
264 node = init_commit.get_node('vcs')
264 node = init_commit.get_node('vcs')
265 assert hasattr(node, 'kind')
265 assert hasattr(node, 'kind')
266 assert node.kind == NodeKind.DIR
266 assert node.kind == NodeKind.DIR
267
267
268 node = init_commit.get_node('vcs/__init__.py')
268 node = init_commit.get_node('vcs/__init__.py')
269 assert hasattr(node, 'kind')
269 assert hasattr(node, 'kind')
270 assert node.kind == NodeKind.FILE
270 assert node.kind == NodeKind.FILE
271
271
272 def test_not_existing_commit(self):
272 def test_not_existing_commit(self):
273 # rawid
273 # rawid
274 with pytest.raises(RepositoryError):
274 with pytest.raises(RepositoryError):
275 self.repo.get_commit('abcd' * 10)
275 self.repo.get_commit('abcd' * 10)
276 # shortid
276 # shortid
277 with pytest.raises(RepositoryError):
277 with pytest.raises(RepositoryError):
278 self.repo.get_commit('erro' * 4)
278 self.repo.get_commit('erro' * 4)
279 # numeric
279 # numeric
280 with pytest.raises(RepositoryError):
280 with pytest.raises(RepositoryError):
281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
282
282
283 # Small chance we ever get to this one
283 # Small chance we ever get to this one
284 idx = pow(2, 30)
284 idx = pow(2, 30)
285 with pytest.raises(RepositoryError):
285 with pytest.raises(RepositoryError):
286 self.repo.get_commit(commit_idx=idx)
286 self.repo.get_commit(commit_idx=idx)
287
287
288 def test_commit10(self):
288 def test_commit10(self):
289 commit10 = self.repo.get_commit(commit_idx=10)
289 commit10 = self.repo.get_commit(commit_idx=10)
290 README = """===
290 README = """===
291 VCS
291 VCS
292 ===
292 ===
293
293
294 Various Version Control System management abstraction layer for Python.
294 Various Version Control System management abstraction layer for Python.
295
295
296 Introduction
296 Introduction
297 ------------
297 ------------
298
298
299 TODO: To be written...
299 TODO: To be written...
300
300
301 """
301 """
302 node = commit10.get_node('README.rst')
302 node = commit10.get_node('README.rst')
303 assert node.kind == NodeKind.FILE
303 assert node.kind == NodeKind.FILE
304 assert node.content == README
304 assert node.content == README
305
305
306 def test_local_clone(self):
306 def test_local_clone(self):
307 clone_path = next(REPO_PATH_GENERATOR)
307 clone_path = next(REPO_PATH_GENERATOR)
308 self.repo._local_clone(clone_path)
308 self.repo._local_clone(clone_path)
309 repo_clone = MercurialRepository(clone_path)
309 repo_clone = MercurialRepository(clone_path)
310
310
311 assert self.repo.commit_ids == repo_clone.commit_ids
311 assert self.repo.commit_ids == repo_clone.commit_ids
312
312
313 def test_local_clone_fails_if_target_exists(self):
313 def test_local_clone_fails_if_target_exists(self):
314 with pytest.raises(RepositoryError):
314 with pytest.raises(RepositoryError):
315 self.repo._local_clone(self.repo.path)
315 self.repo._local_clone(self.repo.path)
316
316
317 def test_update(self):
317 def test_update(self):
318 repo_clone = self.get_clone_repo()
318 repo_clone = self.get_clone_repo()
319 branches = repo_clone.branches
319 branches = repo_clone.branches
320
320
321 repo_clone._update('default')
321 repo_clone._update('default')
322 assert branches['default'] == repo_clone._identify()
322 assert branches['default'] == repo_clone._identify()
323 repo_clone._update('stable')
323 repo_clone._update('stable')
324 assert branches['stable'] == repo_clone._identify()
324 assert branches['stable'] == repo_clone._identify()
325
325
326 def test_local_pull_branch(self):
326 def test_local_pull_branch(self):
327 target_repo = self.get_empty_repo()
327 target_repo = self.get_empty_repo()
328 source_repo = self.get_clone_repo()
328 source_repo = self.get_clone_repo()
329
329
330 default = Reference(
330 default = Reference(
331 'branch', 'default', source_repo.branches['default'])
331 'branch', 'default', source_repo.branches['default'])
332 target_repo._local_pull(source_repo.path, default)
332 target_repo._local_pull(source_repo.path, default)
333 target_repo = MercurialRepository(target_repo.path)
333 target_repo = MercurialRepository(target_repo.path)
334 assert (target_repo.branches['default'] ==
334 assert (target_repo.branches['default'] ==
335 source_repo.branches['default'])
335 source_repo.branches['default'])
336
336
337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
338 target_repo._local_pull(source_repo.path, stable)
338 target_repo._local_pull(source_repo.path, stable)
339 target_repo = MercurialRepository(target_repo.path)
339 target_repo = MercurialRepository(target_repo.path)
340 assert target_repo.branches['stable'] == source_repo.branches['stable']
340 assert target_repo.branches['stable'] == source_repo.branches['stable']
341
341
342 def test_local_pull_bookmark(self):
342 def test_local_pull_bookmark(self):
343 target_repo = self.get_empty_repo()
343 target_repo = self.get_empty_repo()
344 source_repo = self.get_clone_repo()
344 source_repo = self.get_clone_repo()
345
345
346 commits = list(source_repo.get_commits(branch_name='default'))
346 commits = list(source_repo.get_commits(branch_name='default'))
347 foo1_id = commits[-5].raw_id
347 foo1_id = commits[-5].raw_id
348 foo1 = Reference('book', 'foo1', foo1_id)
348 foo1 = Reference('book', 'foo1', foo1_id)
349 source_repo._update(foo1_id)
349 source_repo._update(foo1_id)
350 source_repo.bookmark('foo1')
350 source_repo.bookmark('foo1')
351
351
352 foo2_id = commits[-3].raw_id
352 foo2_id = commits[-3].raw_id
353 foo2 = Reference('book', 'foo2', foo2_id)
353 foo2 = Reference('book', 'foo2', foo2_id)
354 source_repo._update(foo2_id)
354 source_repo._update(foo2_id)
355 source_repo.bookmark('foo2')
355 source_repo.bookmark('foo2')
356
356
357 target_repo._local_pull(source_repo.path, foo1)
357 target_repo._local_pull(source_repo.path, foo1)
358 target_repo = MercurialRepository(target_repo.path)
358 target_repo = MercurialRepository(target_repo.path)
359 assert target_repo.branches['default'] == commits[-5].raw_id
359 assert target_repo.branches['default'] == commits[-5].raw_id
360
360
361 target_repo._local_pull(source_repo.path, foo2)
361 target_repo._local_pull(source_repo.path, foo2)
362 target_repo = MercurialRepository(target_repo.path)
362 target_repo = MercurialRepository(target_repo.path)
363 assert target_repo.branches['default'] == commits[-3].raw_id
363 assert target_repo.branches['default'] == commits[-3].raw_id
364
364
365 def test_local_pull_commit(self):
365 def test_local_pull_commit(self):
366 target_repo = self.get_empty_repo()
366 target_repo = self.get_empty_repo()
367 source_repo = self.get_clone_repo()
367 source_repo = self.get_clone_repo()
368
368
369 commits = list(source_repo.get_commits(branch_name='default'))
369 commits = list(source_repo.get_commits(branch_name='default'))
370 commit_id = commits[-5].raw_id
370 commit_id = commits[-5].raw_id
371 commit = Reference('rev', commit_id, commit_id)
371 commit = Reference('rev', commit_id, commit_id)
372 target_repo._local_pull(source_repo.path, commit)
372 target_repo._local_pull(source_repo.path, commit)
373 target_repo = MercurialRepository(target_repo.path)
373 target_repo = MercurialRepository(target_repo.path)
374 assert target_repo.branches['default'] == commit_id
374 assert target_repo.branches['default'] == commit_id
375
375
376 commit_id = commits[-3].raw_id
376 commit_id = commits[-3].raw_id
377 commit = Reference('rev', commit_id, commit_id)
377 commit = Reference('rev', commit_id, commit_id)
378 target_repo._local_pull(source_repo.path, commit)
378 target_repo._local_pull(source_repo.path, commit)
379 target_repo = MercurialRepository(target_repo.path)
379 target_repo = MercurialRepository(target_repo.path)
380 assert target_repo.branches['default'] == commit_id
380 assert target_repo.branches['default'] == commit_id
381
381
382 def test_local_pull_from_same_repo(self):
382 def test_local_pull_from_same_repo(self):
383 reference = Reference('branch', 'default', None)
383 reference = Reference('branch', 'default', None)
384 with pytest.raises(ValueError):
384 with pytest.raises(ValueError):
385 self.repo._local_pull(self.repo.path, reference)
385 self.repo._local_pull(self.repo.path, reference)
386
386
387 def test_validate_pull_reference_raises_on_missing_reference(
387 def test_validate_pull_reference_raises_on_missing_reference(
388 self, vcsbackend_hg):
388 self, vcsbackend_hg):
389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
390 reference = Reference(
390 reference = Reference(
391 'book', 'invalid_reference', 'a' * 40)
391 'book', 'invalid_reference', 'a' * 40)
392
392
393 with pytest.raises(CommitDoesNotExistError):
393 with pytest.raises(CommitDoesNotExistError):
394 target_repo._validate_pull_reference(reference)
394 target_repo._validate_pull_reference(reference)
395
395
396 def test_heads(self):
396 def test_heads(self):
397 assert set(self.repo._heads()) == set(self.repo.branches.values())
397 assert set(self.repo._heads()) == set(self.repo.branches.values())
398
398
399 def test_ancestor(self):
399 def test_ancestor(self):
400 commits = [
400 commits = [
401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
404
404
405 def test_local_push(self):
405 def test_local_push(self):
406 target_repo = self.get_empty_repo()
406 target_repo = self.get_empty_repo()
407
407
408 revisions = list(self.repo.get_commits(branch_name='default'))
408 revisions = list(self.repo.get_commits(branch_name='default'))
409 revision = revisions[-5].raw_id
409 revision = revisions[-5].raw_id
410 self.repo._local_push(revision, target_repo.path)
410 self.repo._local_push(revision, target_repo.path)
411
411
412 target_repo = MercurialRepository(target_repo.path)
412 target_repo = MercurialRepository(target_repo.path)
413
413
414 assert target_repo.branches['default'] == revision
414 assert target_repo.branches['default'] == revision
415
415
416 def test_hooks_can_be_enabled_for_local_push(self):
416 def test_hooks_can_be_enabled_for_local_push(self):
417 revision = 'deadbeef'
417 revision = 'deadbeef'
418 repo_path = 'test_group/test_repo'
418 repo_path = 'test_group/test_repo'
419 with mock.patch.object(self.repo, '_remote') as remote_mock:
419 with mock.patch.object(self.repo, '_remote') as remote_mock:
420 self.repo._local_push(revision, repo_path, enable_hooks=True)
420 self.repo._local_push(revision, repo_path, enable_hooks=True)
421 remote_mock.push.assert_called_once_with(
421 remote_mock.push.assert_called_once_with(
422 [revision], repo_path, hooks=True, push_branches=False)
422 [revision], repo_path, hooks=True, push_branches=False)
423
423
424 def test_local_merge(self, vcsbackend_hg):
424 def test_local_merge(self, vcsbackend_hg):
425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
426 source_repo = vcsbackend_hg.clone_repo(target_repo)
426 source_repo = vcsbackend_hg.clone_repo(target_repo)
427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
428 target_repo = MercurialRepository(target_repo.path)
428 target_repo = MercurialRepository(target_repo.path)
429 target_rev = target_repo.branches['default']
429 target_rev = target_repo.branches['default']
430 target_ref = Reference(
430 target_ref = Reference(
431 type='branch', name='default', commit_id=target_rev)
431 type='branch', name='default', commit_id=target_rev)
432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
433 source_repo = MercurialRepository(source_repo.path)
433 source_repo = MercurialRepository(source_repo.path)
434 source_rev = source_repo.branches['default']
434 source_rev = source_repo.branches['default']
435 source_ref = Reference(
435 source_ref = Reference(
436 type='branch', name='default', commit_id=source_rev)
436 type='branch', name='default', commit_id=source_rev)
437
437
438 target_repo._local_pull(source_repo.path, source_ref)
438 target_repo._local_pull(source_repo.path, source_ref)
439
439
440 merge_message = 'Merge message\n\nDescription:...'
440 merge_message = 'Merge message\n\nDescription:...'
441 user_name = 'Albert Einstein'
441 user_name = 'Albert Einstein'
442 user_email = 'albert@einstein.com'
442 user_email = 'albert@einstein.com'
443 merge_commit_id, needs_push = target_repo._local_merge(
443 merge_commit_id, needs_push = target_repo._local_merge(
444 target_ref, merge_message, user_name, user_email, source_ref)
444 target_ref, merge_message, user_name, user_email, source_ref)
445 assert needs_push
445 assert needs_push
446
446
447 target_repo = MercurialRepository(target_repo.path)
447 target_repo = MercurialRepository(target_repo.path)
448 assert target_repo.commit_ids[-3] == target_rev
448 assert target_repo.commit_ids[-3] == target_rev
449 assert target_repo.commit_ids[-2] == source_rev
449 assert target_repo.commit_ids[-2] == source_rev
450 last_commit = target_repo.get_commit(merge_commit_id)
450 last_commit = target_repo.get_commit(merge_commit_id)
451 assert last_commit.message.strip() == merge_message
451 assert last_commit.message.strip() == merge_message
452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
453
453
454 assert not os.path.exists(
454 assert not os.path.exists(
455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
456
456
457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
459 source_repo = vcsbackend_hg.clone_repo(target_repo)
459 source_repo = vcsbackend_hg.clone_repo(target_repo)
460 target_rev = target_repo.branches['default']
460 target_rev = target_repo.branches['default']
461 target_ref = Reference(
461 target_ref = Reference(
462 type='branch', name='default', commit_id=target_rev)
462 type='branch', name='default', commit_id=target_rev)
463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
464 source_repo = MercurialRepository(source_repo.path)
464 source_repo = MercurialRepository(source_repo.path)
465 source_rev = source_repo.branches['default']
465 source_rev = source_repo.branches['default']
466 source_ref = Reference(
466 source_ref = Reference(
467 type='branch', name='default', commit_id=source_rev)
467 type='branch', name='default', commit_id=source_rev)
468
468
469 target_repo._local_pull(source_repo.path, source_ref)
469 target_repo._local_pull(source_repo.path, source_ref)
470
470
471 merge_message = 'Merge message\n\nDescription:...'
471 merge_message = 'Merge message\n\nDescription:...'
472 user_name = 'Albert Einstein'
472 user_name = 'Albert Einstein'
473 user_email = 'albert@einstein.com'
473 user_email = 'albert@einstein.com'
474 merge_commit_id, needs_push = target_repo._local_merge(
474 merge_commit_id, needs_push = target_repo._local_merge(
475 target_ref, merge_message, user_name, user_email, source_ref)
475 target_ref, merge_message, user_name, user_email, source_ref)
476 assert merge_commit_id == source_rev
476 assert merge_commit_id == source_rev
477 assert needs_push
477 assert needs_push
478
478
479 target_repo = MercurialRepository(target_repo.path)
479 target_repo = MercurialRepository(target_repo.path)
480 assert target_repo.commit_ids[-2] == target_rev
480 assert target_repo.commit_ids[-2] == target_rev
481 assert target_repo.commit_ids[-1] == source_rev
481 assert target_repo.commit_ids[-1] == source_rev
482
482
483 assert not os.path.exists(
483 assert not os.path.exists(
484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
485
485
486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
488 target_rev = target_repo.branches['default']
488 target_rev = target_repo.branches['default']
489 target_ref = Reference(
489 target_ref = Reference(
490 type='branch', name='default', commit_id=target_rev)
490 type='branch', name='default', commit_id=target_rev)
491
491
492 merge_message = 'Merge message\n\nDescription:...'
492 merge_message = 'Merge message\n\nDescription:...'
493 user_name = 'Albert Einstein'
493 user_name = 'Albert Einstein'
494 user_email = 'albert@einstein.com'
494 user_email = 'albert@einstein.com'
495 merge_commit_id, needs_push = target_repo._local_merge(
495 merge_commit_id, needs_push = target_repo._local_merge(
496 target_ref, merge_message, user_name, user_email, target_ref)
496 target_ref, merge_message, user_name, user_email, target_ref)
497 assert merge_commit_id == target_rev
497 assert merge_commit_id == target_rev
498 assert not needs_push
498 assert not needs_push
499
499
500 target_repo = MercurialRepository(target_repo.path)
500 target_repo = MercurialRepository(target_repo.path)
501 assert target_repo.commit_ids[-1] == target_rev
501 assert target_repo.commit_ids[-1] == target_rev
502
502
503 assert not os.path.exists(
503 assert not os.path.exists(
504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
505
505
506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
508 source_repo = vcsbackend_hg.clone_repo(target_repo)
508 source_repo = vcsbackend_hg.clone_repo(target_repo)
509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
510 target_repo = MercurialRepository(target_repo.path)
510 target_repo = MercurialRepository(target_repo.path)
511 target_rev = target_repo.branches['default']
511 target_rev = target_repo.branches['default']
512 target_ref = Reference(
512 target_ref = Reference(
513 type='branch', name='default', commit_id=target_rev)
513 type='branch', name='default', commit_id=target_rev)
514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
515 source_repo = MercurialRepository(source_repo.path)
515 source_repo = MercurialRepository(source_repo.path)
516 source_rev = source_repo.branches['default']
516 source_rev = source_repo.branches['default']
517 source_ref = Reference(
517 source_ref = Reference(
518 type='branch', name='default', commit_id=source_rev)
518 type='branch', name='default', commit_id=source_rev)
519
519
520 target_repo._local_pull(source_repo.path, source_ref)
520 target_repo._local_pull(source_repo.path, source_ref)
521 with pytest.raises(RepositoryError):
521 with pytest.raises(RepositoryError):
522 target_repo._local_merge(
522 target_repo._local_merge(
523 target_ref, 'merge_message', 'user name', 'user@name.com',
523 target_ref, 'merge_message', 'user name', 'user@name.com',
524 source_ref)
524 source_ref)
525
525
526 # Check we are not left in an intermediate merge state
526 # Check we are not left in an intermediate merge state
527 assert not os.path.exists(
527 assert not os.path.exists(
528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
529
529
530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
531 commits = [
531 commits = [
532 {'message': 'a'},
532 {'message': 'a'},
533 {'message': 'b', 'branch': 'b'},
533 {'message': 'b', 'branch': 'b'},
534 ]
534 ]
535 repo = backend_hg.create_repo(commits)
535 repo = backend_hg.create_repo(commits)
536 commit_ids = backend_hg.commit_ids
536 commit_ids = backend_hg.commit_ids
537 target_ref = Reference(
537 target_ref = Reference(
538 type='branch', name='default', commit_id=commit_ids['a'])
538 type='branch', name='default', commit_id=commit_ids['a'])
539 source_ref = Reference(
539 source_ref = Reference(
540 type='branch', name='b', commit_id=commit_ids['b'])
540 type='branch', name='b', commit_id=commit_ids['b'])
541 merge_message = 'Merge message\n\nDescription:...'
541 merge_message = 'Merge message\n\nDescription:...'
542 user_name = 'Albert Einstein'
542 user_name = 'Albert Einstein'
543 user_email = 'albert@einstein.com'
543 user_email = 'albert@einstein.com'
544 vcs_repo = repo.scm_instance()
544 vcs_repo = repo.scm_instance()
545 merge_commit_id, needs_push = vcs_repo._local_merge(
545 merge_commit_id, needs_push = vcs_repo._local_merge(
546 target_ref, merge_message, user_name, user_email, source_ref)
546 target_ref, merge_message, user_name, user_email, source_ref)
547 assert merge_commit_id != source_ref.commit_id
547 assert merge_commit_id != source_ref.commit_id
548 assert needs_push is True
548 assert needs_push is True
549 commit = vcs_repo.get_commit(merge_commit_id)
549 commit = vcs_repo.get_commit(merge_commit_id)
550 assert commit.merge is True
550 assert commit.merge is True
551 assert commit.message == merge_message
551 assert commit.message == merge_message
552
552
553 def test_maybe_prepare_merge_workspace(self):
553 def test_maybe_prepare_merge_workspace(self):
554 workspace = self.repo._maybe_prepare_merge_workspace(
554 workspace = self.repo._maybe_prepare_merge_workspace(
555 1, 'pr2', 'unused', 'unused2')
555 1, 'pr2', 'unused', 'unused2')
556
556
557 assert os.path.isdir(workspace)
557 assert os.path.isdir(workspace)
558 workspace_repo = MercurialRepository(workspace)
558 workspace_repo = MercurialRepository(workspace)
559 assert workspace_repo.branches == self.repo.branches
559 assert workspace_repo.branches == self.repo.branches
560
560
561 # Calling it a second time should also succeed
561 # Calling it a second time should also succeed
562 workspace = self.repo._maybe_prepare_merge_workspace(
562 workspace = self.repo._maybe_prepare_merge_workspace(
563 1, 'pr2', 'unused', 'unused2')
563 1, 'pr2', 'unused', 'unused2')
564 assert os.path.isdir(workspace)
564 assert os.path.isdir(workspace)
565
565
566 def test_cleanup_merge_workspace(self):
566 def test_cleanup_merge_workspace(self):
567 workspace = self.repo._maybe_prepare_merge_workspace(
567 workspace = self.repo._maybe_prepare_merge_workspace(
568 1, 'pr3', 'unused', 'unused2')
568 1, 'pr3', 'unused', 'unused2')
569
569
570 assert os.path.isdir(workspace)
570 assert os.path.isdir(workspace)
571 self.repo.cleanup_merge_workspace(1, 'pr3')
571 self.repo.cleanup_merge_workspace(1, 'pr3')
572
572
573 assert not os.path.exists(workspace)
573 assert not os.path.exists(workspace)
574
574
575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
576 # No assert: because in case of an inexistent workspace this function
576 # No assert: because in case of an inexistent workspace this function
577 # should still succeed.
577 # should still succeed.
578 self.repo.cleanup_merge_workspace(1, 'pr4')
578 self.repo.cleanup_merge_workspace(1, 'pr4')
579
579
580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
582 source_repo = vcsbackend_hg.clone_repo(target_repo)
582 source_repo = vcsbackend_hg.clone_repo(target_repo)
583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
585 imc = source_repo.in_memory_commit
585 imc = source_repo.in_memory_commit
586 imc.add(FileNode('file_x', content=source_repo.name))
586 imc.add(FileNode('file_x', content=source_repo.name))
587 imc.commit(
587 imc.commit(
588 message=u'Automatic commit from repo merge test',
588 message=u'Automatic commit from repo merge test',
589 author=u'Automatic <automatic@rhodecode.com>')
589 author=u'Automatic <automatic@rhodecode.com>')
590 target_commit = target_repo.get_commit()
590 target_commit = target_repo.get_commit()
591 source_commit = source_repo.get_commit()
591 source_commit = source_repo.get_commit()
592 default_branch = target_repo.DEFAULT_BRANCH_NAME
592 default_branch = target_repo.DEFAULT_BRANCH_NAME
593 bookmark_name = 'bookmark'
593 bookmark_name = 'bookmark'
594 target_repo._update(default_branch)
594 target_repo._update(default_branch)
595 target_repo.bookmark(bookmark_name)
595 target_repo.bookmark(bookmark_name)
596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
598 workspace_id = 'test-merge'
598 workspace_id = 'test-merge'
599 repo_id = repo_id_generator(target_repo.path)
599 repo_id = repo_id_generator(target_repo.path)
600 merge_response = target_repo.merge(
600 merge_response = target_repo.merge(
601 repo_id, workspace_id, target_ref, source_repo, source_ref,
601 repo_id, workspace_id, target_ref, source_repo, source_ref,
602 'test user', 'test@rhodecode.com', 'merge message 1',
602 'test user', 'test@rhodecode.com', 'merge message 1',
603 dry_run=False)
603 dry_run=False)
604 expected_merge_response = MergeResponse(
604 expected_merge_response = MergeResponse(
605 True, True, merge_response.merge_ref,
605 True, True, merge_response.merge_ref,
606 MergeFailureReason.NONE)
606 MergeFailureReason.NONE)
607 assert merge_response == expected_merge_response
607 assert merge_response == expected_merge_response
608
608
609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
610 target_repo.path)
610 target_repo.path)
611 target_commits = list(target_repo.get_commits())
611 target_commits = list(target_repo.get_commits())
612 commit_ids = [c.raw_id for c in target_commits[:-1]]
612 commit_ids = [c.raw_id for c in target_commits[:-1]]
613 assert source_ref.commit_id in commit_ids
613 assert source_ref.commit_id in commit_ids
614 assert target_ref.commit_id in commit_ids
614 assert target_ref.commit_id in commit_ids
615
615
616 merge_commit = target_commits[-1]
616 merge_commit = target_commits[-1]
617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
618 assert merge_commit.message.strip() == 'merge message 1'
618 assert merge_commit.message.strip() == 'merge message 1'
619 assert merge_commit.author == 'test user <test@rhodecode.com>'
619 assert merge_commit.author == 'test user <test@rhodecode.com>'
620
620
621 # Check the bookmark was updated in the target repo
621 # Check the bookmark was updated in the target repo
622 assert (
622 assert (
623 target_repo.bookmarks[bookmark_name] ==
623 target_repo.bookmarks[bookmark_name] ==
624 merge_response.merge_ref.commit_id)
624 merge_response.merge_ref.commit_id)
625
625
626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
628 source_repo = vcsbackend_hg.clone_repo(target_repo)
628 source_repo = vcsbackend_hg.clone_repo(target_repo)
629 imc = source_repo.in_memory_commit
629 imc = source_repo.in_memory_commit
630 imc.add(FileNode('file_x', content=source_repo.name))
630 imc.add(FileNode('file_x', content=source_repo.name))
631 imc.commit(
631 imc.commit(
632 message=u'Automatic commit from repo merge test',
632 message=u'Automatic commit from repo merge test',
633 author=u'Automatic <automatic@rhodecode.com>')
633 author=u'Automatic <automatic@rhodecode.com>')
634 target_commit = target_repo.get_commit()
634 target_commit = target_repo.get_commit()
635 source_commit = source_repo.get_commit()
635 source_commit = source_repo.get_commit()
636 default_branch = target_repo.DEFAULT_BRANCH_NAME
636 default_branch = target_repo.DEFAULT_BRANCH_NAME
637 bookmark_name = 'bookmark'
637 bookmark_name = 'bookmark'
638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
639 source_repo._update(default_branch)
639 source_repo._update(default_branch)
640 source_repo.bookmark(bookmark_name)
640 source_repo.bookmark(bookmark_name)
641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
642 workspace_id = 'test-merge'
642 workspace_id = 'test-merge'
643 repo_id = repo_id_generator(target_repo.path)
643 repo_id = repo_id_generator(target_repo.path)
644 merge_response = target_repo.merge(
644 merge_response = target_repo.merge(
645 repo_id, workspace_id, target_ref, source_repo, source_ref,
645 repo_id, workspace_id, target_ref, source_repo, source_ref,
646 'test user', 'test@rhodecode.com', 'merge message 1',
646 'test user', 'test@rhodecode.com', 'merge message 1',
647 dry_run=False)
647 dry_run=False)
648 expected_merge_response = MergeResponse(
648 expected_merge_response = MergeResponse(
649 True, True, merge_response.merge_ref,
649 True, True, merge_response.merge_ref,
650 MergeFailureReason.NONE)
650 MergeFailureReason.NONE)
651 assert merge_response == expected_merge_response
651 assert merge_response == expected_merge_response
652
652
653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
654 target_repo.path)
654 target_repo.path)
655 target_commits = list(target_repo.get_commits())
655 target_commits = list(target_repo.get_commits())
656 commit_ids = [c.raw_id for c in target_commits]
656 commit_ids = [c.raw_id for c in target_commits]
657 assert source_ref.commit_id == commit_ids[-1]
657 assert source_ref.commit_id == commit_ids[-1]
658 assert target_ref.commit_id == commit_ids[-2]
658 assert target_ref.commit_id == commit_ids[-2]
659
659
660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
662 source_repo = vcsbackend_hg.clone_repo(target_repo)
662 source_repo = vcsbackend_hg.clone_repo(target_repo)
663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
665
665
666 # add an extra head to the target repo
666 # add an extra head to the target repo
667 imc = target_repo.in_memory_commit
667 imc = target_repo.in_memory_commit
668 imc.add(FileNode('file_x', content='foo'))
668 imc.add(FileNode('file_x', content='foo'))
669 commits = list(target_repo.get_commits())
669 commits = list(target_repo.get_commits())
670 imc.commit(
670 imc.commit(
671 message=u'Automatic commit from repo merge test',
671 message=u'Automatic commit from repo merge test',
672 author=u'Automatic <automatic@rhodecode.com>', parents=commits[0:1])
672 author=u'Automatic <automatic@rhodecode.com>', parents=commits[0:1])
673
673
674 target_commit = target_repo.get_commit()
674 target_commit = target_repo.get_commit()
675 source_commit = source_repo.get_commit()
675 source_commit = source_repo.get_commit()
676 default_branch = target_repo.DEFAULT_BRANCH_NAME
676 default_branch = target_repo.DEFAULT_BRANCH_NAME
677 target_repo._update(default_branch)
677 target_repo._update(default_branch)
678
678
679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
681 workspace_id = 'test-merge'
681 workspace_id = 'test-merge'
682
682
683 assert len(target_repo._heads(branch='default')) == 2
683 assert len(target_repo._heads(branch='default')) == 2
684 heads = target_repo._heads(branch='default')
684 heads = target_repo._heads(branch='default')
685 expected_merge_response = MergeResponse(
685 expected_merge_response = MergeResponse(
686 False, False, None,
686 False, False, None,
687 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
687 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
688 metadata={'heads': heads})
688 metadata={'heads': heads})
689 repo_id = repo_id_generator(target_repo.path)
689 repo_id = repo_id_generator(target_repo.path)
690 merge_response = target_repo.merge(
690 merge_response = target_repo.merge(
691 repo_id, workspace_id, target_ref, source_repo, source_ref,
691 repo_id, workspace_id, target_ref, source_repo, source_ref,
692 'test user', 'test@rhodecode.com', 'merge message 1',
692 'test user', 'test@rhodecode.com', 'merge message 1',
693 dry_run=False)
693 dry_run=False)
694 assert merge_response == expected_merge_response
694 assert merge_response == expected_merge_response
695
695
696 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
696 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
697 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
697 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
698 source_repo = vcsbackend_hg.clone_repo(target_repo)
698 source_repo = vcsbackend_hg.clone_repo(target_repo)
699 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
699 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
700 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
700 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
701 imc = source_repo.in_memory_commit
701 imc = source_repo.in_memory_commit
702 imc.add(FileNode('file_x', content=source_repo.name))
702 imc.add(FileNode('file_x', content=source_repo.name))
703 imc.commit(
703 imc.commit(
704 message=u'Automatic commit from repo merge test',
704 message=u'Automatic commit from repo merge test',
705 author=u'Automatic <automatic@rhodecode.com>')
705 author=u'Automatic <automatic@rhodecode.com>')
706 target_commit = target_repo.get_commit()
706 target_commit = target_repo.get_commit()
707 source_commit = source_repo.get_commit()
707 source_commit = source_repo.get_commit()
708
708
709 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
709 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
710
710
711 default_branch = target_repo.DEFAULT_BRANCH_NAME
711 default_branch = target_repo.DEFAULT_BRANCH_NAME
712 bookmark_name = 'bookmark'
712 bookmark_name = 'bookmark'
713 source_repo._update(default_branch)
713 source_repo._update(default_branch)
714 source_repo.bookmark(bookmark_name)
714 source_repo.bookmark(bookmark_name)
715
715
716 target_ref = Reference('branch', default_branch, target_commit.raw_id)
716 target_ref = Reference('branch', default_branch, target_commit.raw_id)
717 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
717 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
718 repo_id = repo_id_generator(target_repo.path)
718 repo_id = repo_id_generator(target_repo.path)
719 workspace_id = 'test-merge'
719 workspace_id = 'test-merge'
720
720
721 merge_response = target_repo.merge(
721 merge_response = target_repo.merge(
722 repo_id, workspace_id, target_ref, source_repo, source_ref,
722 repo_id, workspace_id, target_ref, source_repo, source_ref,
723 'test user', 'test@rhodecode.com', 'merge message 1',
723 'test user', 'test@rhodecode.com', 'merge message 1',
724 dry_run=False, use_rebase=True)
724 dry_run=False, use_rebase=True)
725
725
726 expected_merge_response = MergeResponse(
726 expected_merge_response = MergeResponse(
727 True, True, merge_response.merge_ref,
727 True, True, merge_response.merge_ref,
728 MergeFailureReason.NONE)
728 MergeFailureReason.NONE)
729 assert merge_response == expected_merge_response
729 assert merge_response == expected_merge_response
730
730
731 target_repo = backends.get_backend(vcsbackend_hg.alias)(
731 target_repo = backends.get_backend(vcsbackend_hg.alias)(
732 target_repo.path)
732 target_repo.path)
733 last_commit = target_repo.get_commit()
733 last_commit = target_repo.get_commit()
734 assert last_commit.message == source_commit.message
734 assert last_commit.message == source_commit.message
735 assert last_commit.author == source_commit.author
735 assert last_commit.author == source_commit.author
736 # This checks that we effectively did a rebase
736 # This checks that we effectively did a rebase
737 assert last_commit.raw_id != source_commit.raw_id
737 assert last_commit.raw_id != source_commit.raw_id
738
738
739 # Check the target has only 4 commits: 2 were already in target and
739 # Check the target has only 4 commits: 2 were already in target and
740 # only two should have been added
740 # only two should have been added
741 assert len(target_repo.commit_ids) == 2 + 2
741 assert len(target_repo.commit_ids) == 2 + 2
742
742
743
743
744 class TestGetShadowInstance(object):
744 class TestGetShadowInstance(object):
745
745
746 @pytest.fixture
746 @pytest.fixture
747 def repo(self, vcsbackend_hg, monkeypatch):
747 def repo(self, vcsbackend_hg, monkeypatch):
748 repo = vcsbackend_hg.repo
748 repo = vcsbackend_hg.repo
749 monkeypatch.setattr(repo, 'config', mock.Mock())
749 monkeypatch.setattr(repo, 'config', mock.Mock())
750 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
750 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
751 return repo
751 return repo
752
752
753 def test_passes_config(self, repo):
753 def test_passes_config(self, repo):
754 shadow = repo.get_shadow_instance(repo.path)
754 shadow = repo.get_shadow_instance(repo.path)
755 assert shadow.config == repo.config.copy()
755 assert shadow.config == repo.config.copy()
756
756
757 def test_disables_hooks(self, repo):
757 def test_disables_hooks(self, repo):
758 shadow = repo.get_shadow_instance(repo.path)
758 shadow = repo.get_shadow_instance(repo.path)
759 shadow.config.clear_section.assert_called_once_with('hooks')
759 shadow.config.clear_section.assert_called_once_with('hooks')
760
760
761 def test_allows_to_keep_hooks(self, repo):
761 def test_allows_to_keep_hooks(self, repo):
762 shadow = repo.get_shadow_instance(repo.path, enable_hooks=True)
762 shadow = repo.get_shadow_instance(repo.path, enable_hooks=True)
763 assert not shadow.config.clear_section.called
763 assert not shadow.config.clear_section.called
764
764
765
765
766 class TestMercurialCommit(object):
766 class TestMercurialCommit(object):
767
767
768 def _test_equality(self, commit):
768 def _test_equality(self, commit):
769 idx = commit.idx
769 idx = commit.idx
770 assert commit == self.repo.get_commit(commit_idx=idx)
770 assert commit == self.repo.get_commit(commit_idx=idx)
771
771
772 def test_equality(self):
772 def test_equality(self):
773 indexes = [0, 10, 20]
773 indexes = [0, 10, 20]
774 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
774 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
775 for commit in commits:
775 for commit in commits:
776 self._test_equality(commit)
776 self._test_equality(commit)
777
777
778 def test_default_commit(self):
778 def test_default_commit(self):
779 tip = self.repo.get_commit('tip')
779 tip = self.repo.get_commit('tip')
780 assert tip == self.repo.get_commit()
780 assert tip == self.repo.get_commit()
781 assert tip == self.repo.get_commit(commit_id=None)
781 assert tip == self.repo.get_commit(commit_id=None)
782 assert tip == self.repo.get_commit(commit_idx=None)
782 assert tip == self.repo.get_commit(commit_idx=None)
783 assert tip == list(self.repo[-1:])[0]
783 assert tip == list(self.repo[-1:])[0]
784
784
785 def test_root_node(self):
785 def test_root_node(self):
786 tip = self.repo.get_commit('tip')
786 tip = self.repo.get_commit('tip')
787 assert tip.root is tip.get_node('')
787 assert tip.root is tip.get_node('')
788
788
789 def test_lazy_fetch(self):
789 def test_lazy_fetch(self):
790 """
790 """
791 Test if commit's nodes expands and are cached as we walk through
791 Test if commit's nodes expands and are cached as we walk through
792 the commit. This test is somewhat hard to write as order of tests
792 the commit. This test is somewhat hard to write as order of tests
793 is a key here. Written by running command after command in a shell.
793 is a key here. Written by running command after command in a shell.
794 """
794 """
795 commit = self.repo.get_commit(commit_idx=45)
795 commit = self.repo.get_commit(commit_idx=45)
796 assert len(commit.nodes) == 0
796 assert len(commit.nodes) == 0
797 root = commit.root
797 root = commit.root
798 assert len(commit.nodes) == 1
798 assert len(commit.nodes) == 1
799 assert len(root.nodes) == 8
799 assert len(root.nodes) == 8
800 # accessing root.nodes updates commit.nodes
800 # accessing root.nodes updates commit.nodes
801 assert len(commit.nodes) == 9
801 assert len(commit.nodes) == 9
802
802
803 docs = root.get_node('docs')
803 docs = root.get_node('docs')
804 # we haven't yet accessed anything new as docs dir was already cached
804 # we haven't yet accessed anything new as docs dir was already cached
805 assert len(commit.nodes) == 9
805 assert len(commit.nodes) == 9
806 assert len(docs.nodes) == 8
806 assert len(docs.nodes) == 8
807 # accessing docs.nodes updates commit.nodes
807 # accessing docs.nodes updates commit.nodes
808 assert len(commit.nodes) == 17
808 assert len(commit.nodes) == 17
809
809
810 assert docs is commit.get_node('docs')
810 assert docs is commit.get_node('docs')
811 assert docs is root.nodes[0]
811 assert docs is root.nodes[0]
812 assert docs is root.dirs[0]
812 assert docs is root.dirs[0]
813 assert docs is commit.get_node('docs')
813 assert docs is commit.get_node('docs')
814
814
815 def test_nodes_with_commit(self):
815 def test_nodes_with_commit(self):
816 commit = self.repo.get_commit(commit_idx=45)
816 commit = self.repo.get_commit(commit_idx=45)
817 root = commit.root
817 root = commit.root
818 docs = root.get_node('docs')
818 docs = root.get_node('docs')
819 assert docs is commit.get_node('docs')
819 assert docs is commit.get_node('docs')
820 api = docs.get_node('api')
820 api = docs.get_node('api')
821 assert api is commit.get_node('docs/api')
821 assert api is commit.get_node('docs/api')
822 index = api.get_node('index.rst')
822 index = api.get_node('index.rst')
823 assert index is commit.get_node('docs/api/index.rst')
823 assert index is commit.get_node('docs/api/index.rst')
824 assert index is commit.get_node(
824 assert index is commit.get_node(
825 'docs').get_node('api').get_node('index.rst')
825 'docs').get_node('api').get_node('index.rst')
826
826
827 def test_branch_and_tags(self):
827 def test_branch_and_tags(self):
828 commit0 = self.repo.get_commit(commit_idx=0)
828 commit0 = self.repo.get_commit(commit_idx=0)
829 assert commit0.branch == 'default'
829 assert commit0.branch == 'default'
830 assert commit0.tags == []
830 assert commit0.tags == []
831
831
832 commit10 = self.repo.get_commit(commit_idx=10)
832 commit10 = self.repo.get_commit(commit_idx=10)
833 assert commit10.branch == 'default'
833 assert commit10.branch == 'default'
834 assert commit10.tags == []
834 assert commit10.tags == []
835
835
836 commit44 = self.repo.get_commit(commit_idx=44)
836 commit44 = self.repo.get_commit(commit_idx=44)
837 assert commit44.branch == 'web'
837 assert commit44.branch == 'web'
838
838
839 tip = self.repo.get_commit('tip')
839 tip = self.repo.get_commit('tip')
840 assert 'tip' in tip.tags
840 assert 'tip' in tip.tags
841
841
842 def test_bookmarks(self):
842 def test_bookmarks(self):
843 commit0 = self.repo.get_commit(commit_idx=0)
843 commit0 = self.repo.get_commit(commit_idx=0)
844 assert commit0.bookmarks == []
844 assert commit0.bookmarks == []
845
845
846 def _test_file_size(self, idx, path, size):
846 def _test_file_size(self, idx, path, size):
847 node = self.repo.get_commit(commit_idx=idx).get_node(path)
847 node = self.repo.get_commit(commit_idx=idx).get_node(path)
848 assert node.is_file()
848 assert node.is_file()
849 assert node.size == size
849 assert node.size == size
850
850
851 def test_file_size(self):
851 def test_file_size(self):
852 to_check = (
852 to_check = (
853 (10, 'setup.py', 1068),
853 (10, 'setup.py', 1068),
854 (20, 'setup.py', 1106),
854 (20, 'setup.py', 1106),
855 (60, 'setup.py', 1074),
855 (60, 'setup.py', 1074),
856
856
857 (10, 'vcs/backends/base.py', 2921),
857 (10, 'vcs/backends/base.py', 2921),
858 (20, 'vcs/backends/base.py', 3936),
858 (20, 'vcs/backends/base.py', 3936),
859 (60, 'vcs/backends/base.py', 6189),
859 (60, 'vcs/backends/base.py', 6189),
860 )
860 )
861 for idx, path, size in to_check:
861 for idx, path, size in to_check:
862 self._test_file_size(idx, path, size)
862 self._test_file_size(idx, path, size)
863
863
864 def test_file_history_from_commits(self):
864 def test_file_history_from_commits(self):
865 node = self.repo[10].get_node('setup.py')
865 node = self.repo[10].get_node('setup.py')
866 commit_ids = [commit.raw_id for commit in node.history]
866 commit_ids = [commit.raw_id for commit in node.history]
867 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
867 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
868
868
869 node = self.repo[20].get_node('setup.py')
869 node = self.repo[20].get_node('setup.py')
870 node_ids = [commit.raw_id for commit in node.history]
870 node_ids = [commit.raw_id for commit in node.history]
871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
873
873
874 # special case we check history from commit that has this particular
874 # special case we check history from commit that has this particular
875 # file changed this means we check if it's included as well
875 # file changed this means we check if it's included as well
876 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
876 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
877 .get_node('setup.py')
877 .get_node('setup.py')
878 node_ids = [commit.raw_id for commit in node.history]
878 node_ids = [commit.raw_id for commit in node.history]
879 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
879 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
880 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
880 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
881
881
882 def test_file_history(self):
882 def test_file_history(self):
883 # we can only check if those commits are present in the history
883 # we can only check if those commits are present in the history
884 # as we cannot update this test every time file is changed
884 # as we cannot update this test every time file is changed
885 files = {
885 files = {
886 'setup.py': [7, 18, 45, 46, 47, 69, 77],
886 'setup.py': [7, 18, 45, 46, 47, 69, 77],
887 'vcs/nodes.py': [
887 'vcs/nodes.py': [
888 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
888 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
889 'vcs/backends/hg.py': [
889 'vcs/backends/hg.py': [
890 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
890 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
891 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
891 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
892 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
892 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
893 }
893 }
894 for path, indexes in files.items():
894 for path, indexes in files.items():
895 tip = self.repo.get_commit(commit_idx=indexes[-1])
895 tip = self.repo.get_commit(commit_idx=indexes[-1])
896 node = tip.get_node(path)
896 node = tip.get_node(path)
897 node_indexes = [commit.idx for commit in node.history]
897 node_indexes = [commit.idx for commit in node.history]
898 assert set(indexes).issubset(set(node_indexes)), (
898 assert set(indexes).issubset(set(node_indexes)), (
899 "We assumed that %s is subset of commits for which file %s "
899 "We assumed that %s is subset of commits for which file %s "
900 "has been changed, and history of that node returned: %s"
900 "has been changed, and history of that node returned: %s"
901 % (indexes, path, node_indexes))
901 % (indexes, path, node_indexes))
902
902
903 def test_file_annotate(self):
903 def test_file_annotate(self):
904 files = {
904 files = {
905 'vcs/backends/__init__.py': {
905 'vcs/backends/__init__.py': {
906 89: {
906 89: {
907 'lines_no': 31,
907 'lines_no': 31,
908 'commits': [
908 'commits': [
909 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
909 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
910 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
910 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
911 32, 32, 32, 32, 37, 32, 37, 37, 32,
911 32, 32, 32, 32, 37, 32, 37, 37, 32,
912 32, 32
912 32, 32
913 ]
913 ]
914 },
914 },
915 20: {
915 20: {
916 'lines_no': 1,
916 'lines_no': 1,
917 'commits': [4]
917 'commits': [4]
918 },
918 },
919 55: {
919 55: {
920 'lines_no': 31,
920 'lines_no': 31,
921 'commits': [
921 'commits': [
922 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
922 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
923 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
923 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
924 32, 32, 32, 32, 37, 32, 37, 37, 32,
924 32, 32, 32, 32, 37, 32, 37, 37, 32,
925 32, 32
925 32, 32
926 ]
926 ]
927 }
927 }
928 },
928 },
929 'vcs/exceptions.py': {
929 'vcs/exceptions.py': {
930 89: {
930 89: {
931 'lines_no': 18,
931 'lines_no': 18,
932 'commits': [
932 'commits': [
933 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
933 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
934 16, 16, 17, 16, 16, 18, 18, 18
934 16, 16, 17, 16, 16, 18, 18, 18
935 ]
935 ]
936 },
936 },
937 20: {
937 20: {
938 'lines_no': 18,
938 'lines_no': 18,
939 'commits': [
939 'commits': [
940 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
940 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
941 16, 16, 17, 16, 16, 18, 18, 18
941 16, 16, 17, 16, 16, 18, 18, 18
942 ]
942 ]
943 },
943 },
944 55: {
944 55: {
945 'lines_no': 18,
945 'lines_no': 18,
946 'commits': [
946 'commits': [
947 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
947 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
948 17, 16, 16, 18, 18, 18
948 17, 16, 16, 18, 18, 18
949 ]
949 ]
950 }
950 }
951 },
951 },
952 'MANIFEST.in': {
952 'MANIFEST.in': {
953 89: {
953 89: {
954 'lines_no': 5,
954 'lines_no': 5,
955 'commits': [7, 7, 7, 71, 71]
955 'commits': [7, 7, 7, 71, 71]
956 },
956 },
957 20: {
957 20: {
958 'lines_no': 3,
958 'lines_no': 3,
959 'commits': [7, 7, 7]
959 'commits': [7, 7, 7]
960 },
960 },
961 55: {
961 55: {
962 'lines_no': 3,
962 'lines_no': 3,
963 'commits': [7, 7, 7]
963 'commits': [7, 7, 7]
964 }
964 }
965 }
965 }
966 }
966 }
967
967
968 for fname, commit_dict in files.items():
968 for fname, commit_dict in files.items():
969 for idx, __ in commit_dict.items():
969 for idx, __ in commit_dict.items():
970 commit = self.repo.get_commit(commit_idx=idx)
970 commit = self.repo.get_commit(commit_idx=idx)
971 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
971 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
972 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
972 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
973 assert l1_1 == l1_2
973 assert l1_1 == l1_2
974 l1 = l1_2 = [
974 l1 = l1_2 = [
975 x[2]().idx for x in commit.get_file_annotate(fname)]
975 x[2]().idx for x in commit.get_file_annotate(fname)]
976 l2 = files[fname][idx]['commits']
976 l2 = files[fname][idx]['commits']
977 assert l1 == l2, (
977 assert l1 == l2, (
978 "The lists of commit for %s@commit_id%s"
978 "The lists of commit for %s@commit_id%s"
979 "from annotation list should match each other,"
979 "from annotation list should match each other,"
980 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
980 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
981
981
982 def test_commit_state(self):
982 def test_commit_state(self):
983 """
983 """
984 Tests which files have been added/changed/removed at particular commit
984 Tests which files have been added/changed/removed at particular commit
985 """
985 """
986
986
987 # commit_id 46ad32a4f974:
987 # commit_id 46ad32a4f974:
988 # hg st --rev 46ad32a4f974
988 # hg st --rev 46ad32a4f974
989 # changed: 13
989 # changed: 13
990 # added: 20
990 # added: 20
991 # removed: 1
991 # removed: 1
992 changed = set([
992 changed = set([
993 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
993 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
994 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
994 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
995 'vcs/__init__.py', 'vcs/backends/__init__.py',
995 'vcs/__init__.py', 'vcs/backends/__init__.py',
996 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
996 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
997 'vcs/utils/__init__.py'])
997 'vcs/utils/__init__.py'])
998
998
999 added = set([
999 added = set([
1000 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
1000 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
1001 'docs/api/index.rst', 'docs/api/nodes.rst',
1001 'docs/api/index.rst', 'docs/api/nodes.rst',
1002 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1002 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1003 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1003 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1004 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1004 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1005 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1005 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1006 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1006 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1007 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1007 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1008 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1008 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1009 'vcs/web/simplevcs/views.py'])
1009 'vcs/web/simplevcs/views.py'])
1010
1010
1011 removed = set(['docs/api.rst'])
1011 removed = set(['docs/api.rst'])
1012
1012
1013 commit64 = self.repo.get_commit('46ad32a4f974')
1013 commit64 = self.repo.get_commit('46ad32a4f974')
1014 assert set((node.path for node in commit64.added)) == added
1014 assert set((node.path for node in commit64.added)) == added
1015 assert set((node.path for node in commit64.changed)) == changed
1015 assert set((node.path for node in commit64.changed)) == changed
1016 assert set((node.path for node in commit64.removed)) == removed
1016 assert set((node.path for node in commit64.removed)) == removed
1017
1017
1018 # commit_id b090f22d27d6:
1018 # commit_id b090f22d27d6:
1019 # hg st --rev b090f22d27d6
1019 # hg st --rev b090f22d27d6
1020 # changed: 13
1020 # changed: 13
1021 # added: 20
1021 # added: 20
1022 # removed: 1
1022 # removed: 1
1023 commit88 = self.repo.get_commit('b090f22d27d6')
1023 commit88 = self.repo.get_commit('b090f22d27d6')
1024 assert set((node.path for node in commit88.added)) == set()
1024 assert set((node.path for node in commit88.added)) == set()
1025 assert set((node.path for node in commit88.changed)) == \
1025 assert set((node.path for node in commit88.changed)) == \
1026 set(['.hgignore'])
1026 set(['.hgignore'])
1027 assert set((node.path for node in commit88.removed)) == set()
1027 assert set((node.path for node in commit88.removed)) == set()
1028
1028
1029 #
1029 #
1030 # 85:
1030 # 85:
1031 # added: 2 [
1031 # added: 2 [
1032 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1032 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1033 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1033 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1034 # removed: 1 ['vcs/utils/web.py']
1034 # removed: 1 ['vcs/utils/web.py']
1035 commit85 = self.repo.get_commit(commit_idx=85)
1035 commit85 = self.repo.get_commit(commit_idx=85)
1036 assert set((node.path for node in commit85.added)) == set([
1036 assert set((node.path for node in commit85.added)) == set([
1037 'vcs/utils/diffs.py',
1037 'vcs/utils/diffs.py',
1038 'vcs/web/simplevcs/views/diffs.py'])
1038 'vcs/web/simplevcs/views/diffs.py'])
1039 assert set((node.path for node in commit85.changed)) == set([
1039 assert set((node.path for node in commit85.changed)) == set([
1040 'vcs/web/simplevcs/models.py',
1040 'vcs/web/simplevcs/models.py',
1041 'vcs/web/simplevcs/utils.py',
1041 'vcs/web/simplevcs/utils.py',
1042 'vcs/web/simplevcs/views/__init__.py',
1042 'vcs/web/simplevcs/views/__init__.py',
1043 'vcs/web/simplevcs/views/repository.py',
1043 'vcs/web/simplevcs/views/repository.py',
1044 ])
1044 ])
1045 assert set((node.path for node in commit85.removed)) == \
1045 assert set((node.path for node in commit85.removed)) == \
1046 set(['vcs/utils/web.py'])
1046 set(['vcs/utils/web.py'])
1047
1047
1048 def test_files_state(self):
1048 def test_files_state(self):
1049 """
1049 """
1050 Tests state of FileNodes.
1050 Tests state of FileNodes.
1051 """
1051 """
1052 commit = self.repo.get_commit(commit_idx=85)
1052 commit = self.repo.get_commit(commit_idx=85)
1053 node = commit.get_node('vcs/utils/diffs.py')
1053 node = commit.get_node('vcs/utils/diffs.py')
1054 assert node.state, NodeState.ADDED
1054 assert node.state, NodeState.ADDED
1055 assert node.added
1055 assert node.added
1056 assert not node.changed
1056 assert not node.changed
1057 assert not node.not_changed
1057 assert not node.not_changed
1058 assert not node.removed
1058 assert not node.removed
1059
1059
1060 commit = self.repo.get_commit(commit_idx=88)
1060 commit = self.repo.get_commit(commit_idx=88)
1061 node = commit.get_node('.hgignore')
1061 node = commit.get_node('.hgignore')
1062 assert node.state, NodeState.CHANGED
1062 assert node.state, NodeState.CHANGED
1063 assert not node.added
1063 assert not node.added
1064 assert node.changed
1064 assert node.changed
1065 assert not node.not_changed
1065 assert not node.not_changed
1066 assert not node.removed
1066 assert not node.removed
1067
1067
1068 commit = self.repo.get_commit(commit_idx=85)
1068 commit = self.repo.get_commit(commit_idx=85)
1069 node = commit.get_node('setup.py')
1069 node = commit.get_node('setup.py')
1070 assert node.state, NodeState.NOT_CHANGED
1070 assert node.state, NodeState.NOT_CHANGED
1071 assert not node.added
1071 assert not node.added
1072 assert not node.changed
1072 assert not node.changed
1073 assert node.not_changed
1073 assert node.not_changed
1074 assert not node.removed
1074 assert not node.removed
1075
1075
1076 # If node has REMOVED state then trying to fetch it would raise
1076 # If node has REMOVED state then trying to fetch it would raise
1077 # CommitError exception
1077 # CommitError exception
1078 commit = self.repo.get_commit(commit_idx=2)
1078 commit = self.repo.get_commit(commit_idx=2)
1079 path = 'vcs/backends/BaseRepository.py'
1079 path = 'vcs/backends/BaseRepository.py'
1080 with pytest.raises(NodeDoesNotExistError):
1080 with pytest.raises(NodeDoesNotExistError):
1081 commit.get_node(path)
1081 commit.get_node(path)
1082 # but it would be one of ``removed`` (commit's attribute)
1082 # but it would be one of ``removed`` (commit's attribute)
1083 assert path in [rf.path for rf in commit.removed]
1083 assert path in [rf.path for rf in commit.removed]
1084
1084
1085 def test_commit_message_is_unicode(self):
1085 def test_commit_message_is_unicode(self):
1086 for cm in self.repo:
1086 for cm in self.repo:
1087 assert type(cm.message) == unicode
1087 assert type(cm.message) == unicode
1088
1088
1089 def test_commit_author_is_unicode(self):
1089 def test_commit_author_is_unicode(self):
1090 for cm in self.repo:
1090 for cm in self.repo:
1091 assert type(cm.author) == unicode
1091 assert type(cm.author) == unicode
1092
1092
1093 def test_repo_files_content_is_unicode(self):
1093 def test_repo_files_content_is_unicode(self):
1094 test_commit = self.repo.get_commit(commit_idx=100)
1094 test_commit = self.repo.get_commit(commit_idx=100)
1095 for node in test_commit.get_node('/'):
1095 for node in test_commit.get_node('/'):
1096 if node.is_file():
1096 if node.is_file():
1097 assert type(node.content) == unicode
1097 assert type(node.content) == unicode
1098
1098
1099 def test_wrong_path(self):
1099 def test_wrong_path(self):
1100 # There is 'setup.py' in the root dir but not there:
1100 # There is 'setup.py' in the root dir but not there:
1101 path = 'foo/bar/setup.py'
1101 path = 'foo/bar/setup.py'
1102 with pytest.raises(VCSError):
1102 with pytest.raises(VCSError):
1103 self.repo.get_commit().get_node(path)
1103 self.repo.get_commit().get_node(path)
1104
1104
1105 def test_author_email(self):
1105 def test_author_email(self):
1106 assert 'marcin@python-blog.com' == \
1106 assert 'marcin@python-blog.com' == \
1107 self.repo.get_commit('b986218ba1c9').author_email
1107 self.repo.get_commit('b986218ba1c9').author_email
1108 assert 'lukasz.balcerzak@python-center.pl' == \
1108 assert 'lukasz.balcerzak@python-center.pl' == \
1109 self.repo.get_commit('3803844fdbd3').author_email
1109 self.repo.get_commit('3803844fdbd3').author_email
1110 assert '' == self.repo.get_commit('84478366594b').author_email
1110 assert '' == self.repo.get_commit('84478366594b').author_email
1111
1111
1112 def test_author_username(self):
1112 def test_author_username(self):
1113 assert 'Marcin Kuzminski' == \
1113 assert 'Marcin Kuzminski' == \
1114 self.repo.get_commit('b986218ba1c9').author_name
1114 self.repo.get_commit('b986218ba1c9').author_name
1115 assert 'Lukasz Balcerzak' == \
1115 assert 'Lukasz Balcerzak' == \
1116 self.repo.get_commit('3803844fdbd3').author_name
1116 self.repo.get_commit('3803844fdbd3').author_name
1117 assert 'marcink' == \
1117 assert 'marcink' == \
1118 self.repo.get_commit('84478366594b').author_name
1118 self.repo.get_commit('84478366594b').author_name
1119
1119
1120
1120
1121 class TestLargeFileRepo(object):
1121 class TestLargeFileRepo(object):
1122
1122
1123 def test_large_file(self, backend_hg):
1123 def test_large_file(self, backend_hg):
1124 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1124 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1125
1125
1126 tip = repo.scm_instance().get_commit()
1126 tip = repo.scm_instance().get_commit()
1127 node = tip.get_node('.hglf/thisfileislarge')
1127 node = tip.get_node('.hglf/thisfileislarge')
1128
1128
1129 lf_node = node.get_largefile_node()
1129 lf_node = node.get_largefile_node()
1130
1130
1131 assert lf_node.is_largefile() is True
1131 assert lf_node.is_largefile() is True
1132 assert lf_node.size == 1024000
1132 assert lf_node.size == 1024000
1133 assert lf_node.name == '.hglf/thisfileislarge'
1133 assert lf_node.name == '.hglf/thisfileislarge'
1134
1134
1135
1135
1136 class TestGetBranchName(object):
1136 class TestGetBranchName(object):
1137 def test_returns_ref_name_when_type_is_branch(self):
1137 def test_returns_ref_name_when_type_is_branch(self):
1138 ref = self._create_ref('branch', 'fake-name')
1138 ref = self._create_ref('branch', 'fake-name')
1139 result = self.repo._get_branch_name(ref)
1139 result = self.repo._get_branch_name(ref)
1140 assert result == ref.name
1140 assert result == ref.name
1141
1141
1142 @pytest.mark.parametrize("type_", ("book", "tag"))
1142 @pytest.mark.parametrize("type_", ("book", "tag"))
1143 def test_queries_remote_when_type_is_not_branch(self, type_):
1143 def test_queries_remote_when_type_is_not_branch(self, type_):
1144 ref = self._create_ref(type_, 'wrong-fake-name')
1144 ref = self._create_ref(type_, 'wrong-fake-name')
1145 with mock.patch.object(self.repo, "_remote") as remote_mock:
1145 with mock.patch.object(self.repo, "_remote") as remote_mock:
1146 remote_mock.ctx_branch.return_value = "fake-name"
1146 remote_mock.ctx_branch.return_value = "fake-name"
1147 result = self.repo._get_branch_name(ref)
1147 result = self.repo._get_branch_name(ref)
1148 assert result == "fake-name"
1148 assert result == "fake-name"
1149 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1149 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1150
1150
1151 def _create_ref(self, type_, name):
1151 def _create_ref(self, type_, name):
1152 ref = mock.Mock()
1152 ref = mock.Mock()
1153 ref.type = type_
1153 ref.type = type_
1154 ref.name = 'wrong-fake-name'
1154 ref.name = 'wrong-fake-name'
1155 ref.commit_id = "deadbeef"
1155 ref.commit_id = "deadbeef"
1156 return ref
1156 return ref
1157
1157
1158
1158
1159 class TestIsTheSameBranch(object):
1159 class TestIsTheSameBranch(object):
1160 def test_returns_true_when_branches_are_equal(self):
1160 def test_returns_true_when_branches_are_equal(self):
1161 source_ref = mock.Mock(name="source-ref")
1161 source_ref = mock.Mock(name="source-ref")
1162 target_ref = mock.Mock(name="target-ref")
1162 target_ref = mock.Mock(name="target-ref")
1163 branch_name_patcher = mock.patch.object(
1163 branch_name_patcher = mock.patch.object(
1164 self.repo, "_get_branch_name", return_value="default")
1164 self.repo, "_get_branch_name", return_value="default")
1165 with branch_name_patcher as branch_name_mock:
1165 with branch_name_patcher as branch_name_mock:
1166 result = self.repo._is_the_same_branch(source_ref, target_ref)
1166 result = self.repo._is_the_same_branch(source_ref, target_ref)
1167
1167
1168 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1168 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1169 assert branch_name_mock.call_args_list == expected_calls
1169 assert branch_name_mock.call_args_list == expected_calls
1170 assert result is True
1170 assert result is True
1171
1171
1172 def test_returns_false_when_branches_are_not_equal(self):
1172 def test_returns_false_when_branches_are_not_equal(self):
1173 source_ref = mock.Mock(name="source-ref")
1173 source_ref = mock.Mock(name="source-ref")
1174 source_ref.name = "source-branch"
1174 source_ref.name = "source-branch"
1175 target_ref = mock.Mock(name="target-ref")
1175 target_ref = mock.Mock(name="target-ref")
1176 source_ref.name = "target-branch"
1176 source_ref.name = "target-branch"
1177
1177
1178 def side_effect(ref):
1178 def side_effect(ref):
1179 return ref.name
1179 return ref.name
1180
1180
1181 branch_name_patcher = mock.patch.object(
1181 branch_name_patcher = mock.patch.object(
1182 self.repo, "_get_branch_name", side_effect=side_effect)
1182 self.repo, "_get_branch_name", side_effect=side_effect)
1183 with branch_name_patcher as branch_name_mock:
1183 with branch_name_patcher as branch_name_mock:
1184 result = self.repo._is_the_same_branch(source_ref, target_ref)
1184 result = self.repo._is_the_same_branch(source_ref, target_ref)
1185
1185
1186 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1186 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1187 assert branch_name_mock.call_args_list == expected_calls
1187 assert branch_name_mock.call_args_list == expected_calls
1188 assert result is False
1188 assert result is False
General Comments 0
You need to be logged in to leave comments. Login now