##// END OF EJS Templates
vcs: make commit datetimes utc
dan -
r154:3953c69f default
parent child Browse files
Show More
@@ -1,79 +1,79 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Provides utilities around date and time handling
22 Provides utilities around date and time handling
23 """
23 """
24
24
25 import datetime
25 import datetime
26 import time
26 import time
27
27
28
28
29 def makedate():
29 def makedate():
30 lt = time.localtime()
30 lt = time.localtime()
31 if lt[8] == 1 and time.daylight:
31 if lt[8] == 1 and time.daylight:
32 tz = time.altzone
32 tz = time.altzone
33 else:
33 else:
34 tz = time.timezone
34 tz = time.timezone
35 return time.mktime(lt), tz
35 return time.mktime(lt), tz
36
36
37
37
38 def date_fromtimestamp(unixts, tzoffset=0):
38 def utcdate_fromtimestamp(unixts, tzoffset=0):
39 """
39 """
40 Makes a local datetime object out of unix timestamp
40 Makes a local datetime object out of unix timestamp
41
41
42 :param unixts:
42 :param unixts:
43 :param tzoffset:
43 :param tzoffset:
44 """
44 """
45
45
46 return datetime.datetime.fromtimestamp(float(unixts))
46 return datetime.datetime.utcfromtimestamp(float(unixts))
47
47
48
48
49 def date_astimestamp(value):
49 def date_astimestamp(value):
50 """
50 """
51 Convert a given `datetime.datetime` into a `float` like `time.time`
51 Convert a given `datetime.datetime` into a `float` like `time.time`
52 """
52 """
53 return time.mktime(value.timetuple()) + value.microsecond / 1E6
53 return time.mktime(value.timetuple()) + value.microsecond / 1E6
54
54
55
55
56 def date_to_timestamp_plus_offset(value):
56 def date_to_timestamp_plus_offset(value):
57 """
57 """
58 Convert a given `datetime.datetime` into a unix timestamp and offset.
58 Convert a given `datetime.datetime` into a unix timestamp and offset.
59 """
59 """
60 # TODO: johbo: The time handling looks quite fragile here since we mix
60 # TODO: johbo: The time handling looks quite fragile here since we mix
61 # system time zones with naive datetime instances.
61 # system time zones with naive datetime instances.
62 if value is None:
62 if value is None:
63 value = time.time()
63 value = time.time()
64 elif isinstance(value, datetime.datetime):
64 elif isinstance(value, datetime.datetime):
65 assert not is_aware(value), (
65 assert not is_aware(value), (
66 "This code is not prepared to handle aware datetime instances")
66 "This code is not prepared to handle aware datetime instances")
67 value = date_astimestamp(value)
67 value = date_astimestamp(value)
68 return (value, time.timezone)
68 return (value, time.timezone)
69
69
70
70
71 def is_aware(value):
71 def is_aware(value):
72 """
72 """
73 Determines if a given datetime.time is aware.
73 Determines if a given datetime.time is aware.
74
74
75 The logic is described in Python's docs:
75 The logic is described in Python's docs:
76 http://docs.python.org/library/datetime.html#datetime.tzinfo
76 http://docs.python.org/library/datetime.html#datetime.tzinfo
77 """
77 """
78 return (value.tzinfo is not None
78 return (value.tzinfo is not None
79 and value.tzinfo.utcoffset(value) is not None)
79 and value.tzinfo.utcoffset(value) is not None)
@@ -1,527 +1,527 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT commit module
22 GIT commit module
23 """
23 """
24
24
25 import re
25 import re
26 import stat
26 import stat
27 from ConfigParser import ConfigParser
27 from ConfigParser import ConfigParser
28 from itertools import chain
28 from itertools import chain
29 from StringIO import StringIO
29 from StringIO import StringIO
30
30
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 from rhodecode.lib.datelib import date_fromtimestamp
33 from rhodecode.lib.datelib import utcdate_fromtimestamp
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import safe_int
35 from rhodecode.lib.utils2 import safe_int
36 from rhodecode.lib.vcs.conf import settings
36 from rhodecode.lib.vcs.conf import settings
37 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends import base
38 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
39 from rhodecode.lib.vcs.nodes import (
39 from rhodecode.lib.vcs.nodes import (
40 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
41 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 ChangedFileNodesGenerator, AddedFileNodesGenerator,
42 RemovedFileNodesGenerator)
42 RemovedFileNodesGenerator)
43
43
44
44
45 class GitCommit(base.BaseCommit):
45 class GitCommit(base.BaseCommit):
46 """
46 """
47 Represents state of the repository at single commit id.
47 Represents state of the repository at single commit id.
48 """
48 """
49 _author_property = 'author'
49 _author_property = 'author'
50 _committer_property = 'committer'
50 _committer_property = 'committer'
51 _date_property = 'commit_time'
51 _date_property = 'commit_time'
52 _date_tz_property = 'commit_timezone'
52 _date_tz_property = 'commit_timezone'
53 _message_property = 'message'
53 _message_property = 'message'
54 _parents_property = 'parents'
54 _parents_property = 'parents'
55
55
56 _filter_pre_load = [
56 _filter_pre_load = [
57 # done through a more complex tree walk on parents
57 # done through a more complex tree walk on parents
58 "affected_files",
58 "affected_files",
59 # based on repository cached property
59 # based on repository cached property
60 "branch",
60 "branch",
61 # done through subprocess not remote call
61 # done through subprocess not remote call
62 "children",
62 "children",
63 # done through a more complex tree walk on parents
63 # done through a more complex tree walk on parents
64 "status",
64 "status",
65 # mercurial specific property not supported here
65 # mercurial specific property not supported here
66 "_file_paths",
66 "_file_paths",
67 ]
67 ]
68
68
69 def __init__(self, repository, raw_id, idx, pre_load=None):
69 def __init__(self, repository, raw_id, idx, pre_load=None):
70 self.repository = repository
70 self.repository = repository
71 self._remote = repository._remote
71 self._remote = repository._remote
72 # TODO: johbo: Tweak of raw_id should not be necessary
72 # TODO: johbo: Tweak of raw_id should not be necessary
73 self.raw_id = safe_str(raw_id)
73 self.raw_id = safe_str(raw_id)
74 self.idx = idx
74 self.idx = idx
75
75
76 self._set_bulk_properties(pre_load)
76 self._set_bulk_properties(pre_load)
77
77
78 # caches
78 # caches
79 self._stat_modes = {} # stat info for paths
79 self._stat_modes = {} # stat info for paths
80 self._paths = {} # path processed with parse_tree
80 self._paths = {} # path processed with parse_tree
81 self.nodes = {}
81 self.nodes = {}
82 self._submodules = None
82 self._submodules = None
83
83
84 def _set_bulk_properties(self, pre_load):
84 def _set_bulk_properties(self, pre_load):
85 if not pre_load:
85 if not pre_load:
86 return
86 return
87 pre_load = [entry for entry in pre_load
87 pre_load = [entry for entry in pre_load
88 if entry not in self._filter_pre_load]
88 if entry not in self._filter_pre_load]
89 if not pre_load:
89 if not pre_load:
90 return
90 return
91
91
92 result = self._remote.bulk_request(self.raw_id, pre_load)
92 result = self._remote.bulk_request(self.raw_id, pre_load)
93 for attr, value in result.items():
93 for attr, value in result.items():
94 if attr in ["author", "message"]:
94 if attr in ["author", "message"]:
95 if value:
95 if value:
96 value = safe_unicode(value)
96 value = safe_unicode(value)
97 elif attr == "date":
97 elif attr == "date":
98 value = date_fromtimestamp(*value)
98 value = utcdate_fromtimestamp(*value)
99 elif attr == "parents":
99 elif attr == "parents":
100 value = self._make_commits(value)
100 value = self._make_commits(value)
101 self.__dict__[attr] = value
101 self.__dict__[attr] = value
102
102
103 @LazyProperty
103 @LazyProperty
104 def _commit(self):
104 def _commit(self):
105 return self._remote[self.raw_id]
105 return self._remote[self.raw_id]
106
106
107 @LazyProperty
107 @LazyProperty
108 def _tree_id(self):
108 def _tree_id(self):
109 return self._remote[self._commit['tree']]['id']
109 return self._remote[self._commit['tree']]['id']
110
110
111 @LazyProperty
111 @LazyProperty
112 def id(self):
112 def id(self):
113 return self.raw_id
113 return self.raw_id
114
114
115 @LazyProperty
115 @LazyProperty
116 def short_id(self):
116 def short_id(self):
117 return self.raw_id[:12]
117 return self.raw_id[:12]
118
118
119 @LazyProperty
119 @LazyProperty
120 def message(self):
120 def message(self):
121 return safe_unicode(
121 return safe_unicode(
122 self._remote.commit_attribute(self.id, self._message_property))
122 self._remote.commit_attribute(self.id, self._message_property))
123
123
124 @LazyProperty
124 @LazyProperty
125 def committer(self):
125 def committer(self):
126 return safe_unicode(
126 return safe_unicode(
127 self._remote.commit_attribute(self.id, self._committer_property))
127 self._remote.commit_attribute(self.id, self._committer_property))
128
128
129 @LazyProperty
129 @LazyProperty
130 def author(self):
130 def author(self):
131 return safe_unicode(
131 return safe_unicode(
132 self._remote.commit_attribute(self.id, self._author_property))
132 self._remote.commit_attribute(self.id, self._author_property))
133
133
134 @LazyProperty
134 @LazyProperty
135 def date(self):
135 def date(self):
136 unix_ts, tz = self._remote.get_object_attrs(
136 unix_ts, tz = self._remote.get_object_attrs(
137 self.raw_id, self._date_property, self._date_tz_property)
137 self.raw_id, self._date_property, self._date_tz_property)
138 return date_fromtimestamp(unix_ts, tz)
138 return utcdate_fromtimestamp(unix_ts, tz)
139
139
140 @LazyProperty
140 @LazyProperty
141 def status(self):
141 def status(self):
142 """
142 """
143 Returns modified, added, removed, deleted files for current commit
143 Returns modified, added, removed, deleted files for current commit
144 """
144 """
145 return self.changed, self.added, self.removed
145 return self.changed, self.added, self.removed
146
146
147 @LazyProperty
147 @LazyProperty
148 def tags(self):
148 def tags(self):
149 tags = [safe_unicode(name) for name,
149 tags = [safe_unicode(name) for name,
150 commit_id in self.repository.tags.iteritems()
150 commit_id in self.repository.tags.iteritems()
151 if commit_id == self.raw_id]
151 if commit_id == self.raw_id]
152 return tags
152 return tags
153
153
154 @LazyProperty
154 @LazyProperty
155 def branch(self):
155 def branch(self):
156 for name, commit_id in self.repository.branches.iteritems():
156 for name, commit_id in self.repository.branches.iteritems():
157 if commit_id == self.raw_id:
157 if commit_id == self.raw_id:
158 return safe_unicode(name)
158 return safe_unicode(name)
159 return None
159 return None
160
160
161 def _get_id_for_path(self, path):
161 def _get_id_for_path(self, path):
162 path = safe_str(path)
162 path = safe_str(path)
163 if path in self._paths:
163 if path in self._paths:
164 return self._paths[path]
164 return self._paths[path]
165
165
166 tree_id = self._tree_id
166 tree_id = self._tree_id
167
167
168 path = path.strip('/')
168 path = path.strip('/')
169 if path == '':
169 if path == '':
170 data = [tree_id, "tree"]
170 data = [tree_id, "tree"]
171 self._paths[''] = data
171 self._paths[''] = data
172 return data
172 return data
173
173
174 parts = path.split('/')
174 parts = path.split('/')
175 dirs, name = parts[:-1], parts[-1]
175 dirs, name = parts[:-1], parts[-1]
176 cur_dir = ''
176 cur_dir = ''
177
177
178 # initially extract things from root dir
178 # initially extract things from root dir
179 tree_items = self._remote.tree_items(tree_id)
179 tree_items = self._remote.tree_items(tree_id)
180 self._process_tree_items(tree_items, cur_dir)
180 self._process_tree_items(tree_items, cur_dir)
181
181
182 for dir in dirs:
182 for dir in dirs:
183 if cur_dir:
183 if cur_dir:
184 cur_dir = '/'.join((cur_dir, dir))
184 cur_dir = '/'.join((cur_dir, dir))
185 else:
185 else:
186 cur_dir = dir
186 cur_dir = dir
187 dir_id = None
187 dir_id = None
188 for item, stat_, id_, type_ in tree_items:
188 for item, stat_, id_, type_ in tree_items:
189 if item == dir:
189 if item == dir:
190 dir_id = id_
190 dir_id = id_
191 break
191 break
192 if dir_id:
192 if dir_id:
193 if type_ != "tree":
193 if type_ != "tree":
194 raise CommitError('%s is not a directory' % cur_dir)
194 raise CommitError('%s is not a directory' % cur_dir)
195 # update tree
195 # update tree
196 tree_items = self._remote.tree_items(dir_id)
196 tree_items = self._remote.tree_items(dir_id)
197 else:
197 else:
198 raise CommitError('%s have not been found' % cur_dir)
198 raise CommitError('%s have not been found' % cur_dir)
199
199
200 # cache all items from the given traversed tree
200 # cache all items from the given traversed tree
201 self._process_tree_items(tree_items, cur_dir)
201 self._process_tree_items(tree_items, cur_dir)
202
202
203 if path not in self._paths:
203 if path not in self._paths:
204 raise self.no_node_at_path(path)
204 raise self.no_node_at_path(path)
205
205
206 return self._paths[path]
206 return self._paths[path]
207
207
208 def _process_tree_items(self, items, cur_dir):
208 def _process_tree_items(self, items, cur_dir):
209 for item, stat_, id_, type_ in items:
209 for item, stat_, id_, type_ in items:
210 if cur_dir:
210 if cur_dir:
211 name = '/'.join((cur_dir, item))
211 name = '/'.join((cur_dir, item))
212 else:
212 else:
213 name = item
213 name = item
214 self._paths[name] = [id_, type_]
214 self._paths[name] = [id_, type_]
215 self._stat_modes[name] = stat_
215 self._stat_modes[name] = stat_
216
216
217 def _get_kind(self, path):
217 def _get_kind(self, path):
218 path_id, type_ = self._get_id_for_path(path)
218 path_id, type_ = self._get_id_for_path(path)
219 if type_ == 'blob':
219 if type_ == 'blob':
220 return NodeKind.FILE
220 return NodeKind.FILE
221 elif type_ == 'tree':
221 elif type_ == 'tree':
222 return NodeKind.DIR
222 return NodeKind.DIR
223 elif type == 'link':
223 elif type == 'link':
224 return NodeKind.SUBMODULE
224 return NodeKind.SUBMODULE
225 return None
225 return None
226
226
227 def _get_filectx(self, path):
227 def _get_filectx(self, path):
228 path = self._fix_path(path)
228 path = self._fix_path(path)
229 if self._get_kind(path) != NodeKind.FILE:
229 if self._get_kind(path) != NodeKind.FILE:
230 raise CommitError(
230 raise CommitError(
231 "File does not exist for commit %s at '%s'" %
231 "File does not exist for commit %s at '%s'" %
232 (self.raw_id, path))
232 (self.raw_id, path))
233 return path
233 return path
234
234
235 def _get_file_nodes(self):
235 def _get_file_nodes(self):
236 return chain(*(t[2] for t in self.walk()))
236 return chain(*(t[2] for t in self.walk()))
237
237
238 @LazyProperty
238 @LazyProperty
239 def parents(self):
239 def parents(self):
240 """
240 """
241 Returns list of parent commits.
241 Returns list of parent commits.
242 """
242 """
243 parent_ids = self._remote.commit_attribute(
243 parent_ids = self._remote.commit_attribute(
244 self.id, self._parents_property)
244 self.id, self._parents_property)
245 return self._make_commits(parent_ids)
245 return self._make_commits(parent_ids)
246
246
247 @LazyProperty
247 @LazyProperty
248 def children(self):
248 def children(self):
249 """
249 """
250 Returns list of child commits.
250 Returns list of child commits.
251 """
251 """
252 rev_filter = settings.GIT_REV_FILTER
252 rev_filter = settings.GIT_REV_FILTER
253 output, __ = self.repository.run_git_command(
253 output, __ = self.repository.run_git_command(
254 ['rev-list', '--children'] + rev_filter)
254 ['rev-list', '--children'] + rev_filter)
255
255
256 child_ids = []
256 child_ids = []
257 pat = re.compile(r'^%s' % self.raw_id)
257 pat = re.compile(r'^%s' % self.raw_id)
258 for l in output.splitlines():
258 for l in output.splitlines():
259 if pat.match(l):
259 if pat.match(l):
260 found_ids = l.split(' ')[1:]
260 found_ids = l.split(' ')[1:]
261 child_ids.extend(found_ids)
261 child_ids.extend(found_ids)
262 return self._make_commits(child_ids)
262 return self._make_commits(child_ids)
263
263
264 def _make_commits(self, commit_ids):
264 def _make_commits(self, commit_ids):
265 return [self.repository.get_commit(commit_id=commit_id)
265 return [self.repository.get_commit(commit_id=commit_id)
266 for commit_id in commit_ids]
266 for commit_id in commit_ids]
267
267
268 def get_file_mode(self, path):
268 def get_file_mode(self, path):
269 """
269 """
270 Returns stat mode of the file at the given `path`.
270 Returns stat mode of the file at the given `path`.
271 """
271 """
272 path = safe_str(path)
272 path = safe_str(path)
273 # ensure path is traversed
273 # ensure path is traversed
274 self._get_id_for_path(path)
274 self._get_id_for_path(path)
275 return self._stat_modes[path]
275 return self._stat_modes[path]
276
276
277 def is_link(self, path):
277 def is_link(self, path):
278 return stat.S_ISLNK(self.get_file_mode(path))
278 return stat.S_ISLNK(self.get_file_mode(path))
279
279
280 def get_file_content(self, path):
280 def get_file_content(self, path):
281 """
281 """
282 Returns content of the file at given `path`.
282 Returns content of the file at given `path`.
283 """
283 """
284 id_, _ = self._get_id_for_path(path)
284 id_, _ = self._get_id_for_path(path)
285 return self._remote.blob_as_pretty_string(id_)
285 return self._remote.blob_as_pretty_string(id_)
286
286
287 def get_file_size(self, path):
287 def get_file_size(self, path):
288 """
288 """
289 Returns size of the file at given `path`.
289 Returns size of the file at given `path`.
290 """
290 """
291 id_, _ = self._get_id_for_path(path)
291 id_, _ = self._get_id_for_path(path)
292 return self._remote.blob_raw_length(id_)
292 return self._remote.blob_raw_length(id_)
293
293
294 def get_file_history(self, path, limit=None, pre_load=None):
294 def get_file_history(self, path, limit=None, pre_load=None):
295 """
295 """
296 Returns history of file as reversed list of `GitCommit` objects for
296 Returns history of file as reversed list of `GitCommit` objects for
297 which file at given `path` has been modified.
297 which file at given `path` has been modified.
298
298
299 TODO: This function now uses an underlying 'git' command which works
299 TODO: This function now uses an underlying 'git' command which works
300 quickly but ideally we should replace with an algorithm.
300 quickly but ideally we should replace with an algorithm.
301 """
301 """
302 self._get_filectx(path)
302 self._get_filectx(path)
303 f_path = safe_str(path)
303 f_path = safe_str(path)
304
304
305 cmd = ['log']
305 cmd = ['log']
306 if limit:
306 if limit:
307 cmd.extend(['-n', str(safe_int(limit, 0))])
307 cmd.extend(['-n', str(safe_int(limit, 0))])
308 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
308 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
309
309
310 output, __ = self.repository.run_git_command(cmd)
310 output, __ = self.repository.run_git_command(cmd)
311 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
311 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
312
312
313 return [
313 return [
314 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
314 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
315 for commit_id in commit_ids]
315 for commit_id in commit_ids]
316
316
317 # TODO: unused for now potential replacement for subprocess
317 # TODO: unused for now potential replacement for subprocess
318 def get_file_history_2(self, path, limit=None, pre_load=None):
318 def get_file_history_2(self, path, limit=None, pre_load=None):
319 """
319 """
320 Returns history of file as reversed list of `Commit` objects for
320 Returns history of file as reversed list of `Commit` objects for
321 which file at given `path` has been modified.
321 which file at given `path` has been modified.
322 """
322 """
323 self._get_filectx(path)
323 self._get_filectx(path)
324 f_path = safe_str(path)
324 f_path = safe_str(path)
325
325
326 commit_ids = self._remote.get_file_history(f_path, self.id, limit)
326 commit_ids = self._remote.get_file_history(f_path, self.id, limit)
327
327
328 return [
328 return [
329 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
329 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
330 for commit_id in commit_ids]
330 for commit_id in commit_ids]
331
331
332 def get_file_annotate(self, path, pre_load=None):
332 def get_file_annotate(self, path, pre_load=None):
333 """
333 """
334 Returns a generator of four element tuples with
334 Returns a generator of four element tuples with
335 lineno, commit_id, commit lazy loader and line
335 lineno, commit_id, commit lazy loader and line
336
336
337 TODO: This function now uses os underlying 'git' command which is
337 TODO: This function now uses os underlying 'git' command which is
338 generally not good. Should be replaced with algorithm iterating
338 generally not good. Should be replaced with algorithm iterating
339 commits.
339 commits.
340 """
340 """
341 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
341 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
342 # -l ==> outputs long shas (and we need all 40 characters)
342 # -l ==> outputs long shas (and we need all 40 characters)
343 # --root ==> doesn't put '^' character for bounderies
343 # --root ==> doesn't put '^' character for bounderies
344 # -r commit_id ==> blames for the given commit
344 # -r commit_id ==> blames for the given commit
345 output, __ = self.repository.run_git_command(cmd)
345 output, __ = self.repository.run_git_command(cmd)
346
346
347 for i, blame_line in enumerate(output.split('\n')[:-1]):
347 for i, blame_line in enumerate(output.split('\n')[:-1]):
348 line_no = i + 1
348 line_no = i + 1
349 commit_id, line = re.split(r' ', blame_line, 1)
349 commit_id, line = re.split(r' ', blame_line, 1)
350 yield (
350 yield (
351 line_no, commit_id,
351 line_no, commit_id,
352 lambda: self.repository.get_commit(commit_id=commit_id,
352 lambda: self.repository.get_commit(commit_id=commit_id,
353 pre_load=pre_load),
353 pre_load=pre_load),
354 line)
354 line)
355
355
356 def get_nodes(self, path):
356 def get_nodes(self, path):
357 if self._get_kind(path) != NodeKind.DIR:
357 if self._get_kind(path) != NodeKind.DIR:
358 raise CommitError(
358 raise CommitError(
359 "Directory does not exist for commit %s at "
359 "Directory does not exist for commit %s at "
360 " '%s'" % (self.raw_id, path))
360 " '%s'" % (self.raw_id, path))
361 path = self._fix_path(path)
361 path = self._fix_path(path)
362 id_, _ = self._get_id_for_path(path)
362 id_, _ = self._get_id_for_path(path)
363 tree_id = self._remote[id_]['id']
363 tree_id = self._remote[id_]['id']
364 dirnodes = []
364 dirnodes = []
365 filenodes = []
365 filenodes = []
366 alias = self.repository.alias
366 alias = self.repository.alias
367 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
367 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
368 if type_ == 'link':
368 if type_ == 'link':
369 url = self._get_submodule_url('/'.join((path, name)))
369 url = self._get_submodule_url('/'.join((path, name)))
370 dirnodes.append(SubModuleNode(
370 dirnodes.append(SubModuleNode(
371 name, url=url, commit=id_, alias=alias))
371 name, url=url, commit=id_, alias=alias))
372 continue
372 continue
373
373
374 if path != '':
374 if path != '':
375 obj_path = '/'.join((path, name))
375 obj_path = '/'.join((path, name))
376 else:
376 else:
377 obj_path = name
377 obj_path = name
378 if obj_path not in self._stat_modes:
378 if obj_path not in self._stat_modes:
379 self._stat_modes[obj_path] = stat_
379 self._stat_modes[obj_path] = stat_
380
380
381 if type_ == 'tree':
381 if type_ == 'tree':
382 dirnodes.append(DirNode(obj_path, commit=self))
382 dirnodes.append(DirNode(obj_path, commit=self))
383 elif type_ == 'blob':
383 elif type_ == 'blob':
384 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
384 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
385 else:
385 else:
386 raise CommitError(
386 raise CommitError(
387 "Requested object should be Tree or Blob, is %s", type_)
387 "Requested object should be Tree or Blob, is %s", type_)
388
388
389 nodes = dirnodes + filenodes
389 nodes = dirnodes + filenodes
390 for node in nodes:
390 for node in nodes:
391 if node.path not in self.nodes:
391 if node.path not in self.nodes:
392 self.nodes[node.path] = node
392 self.nodes[node.path] = node
393 nodes.sort()
393 nodes.sort()
394 return nodes
394 return nodes
395
395
396 def get_node(self, path):
396 def get_node(self, path):
397 if isinstance(path, unicode):
397 if isinstance(path, unicode):
398 path = path.encode('utf-8')
398 path = path.encode('utf-8')
399 path = self._fix_path(path)
399 path = self._fix_path(path)
400 if path not in self.nodes:
400 if path not in self.nodes:
401 try:
401 try:
402 id_, type_ = self._get_id_for_path(path)
402 id_, type_ = self._get_id_for_path(path)
403 except CommitError:
403 except CommitError:
404 raise NodeDoesNotExistError(
404 raise NodeDoesNotExistError(
405 "Cannot find one of parents' directories for a given "
405 "Cannot find one of parents' directories for a given "
406 "path: %s" % path)
406 "path: %s" % path)
407
407
408 if type_ == 'link':
408 if type_ == 'link':
409 url = self._get_submodule_url(path)
409 url = self._get_submodule_url(path)
410 node = SubModuleNode(path, url=url, commit=id_,
410 node = SubModuleNode(path, url=url, commit=id_,
411 alias=self.repository.alias)
411 alias=self.repository.alias)
412 elif type_ == 'tree':
412 elif type_ == 'tree':
413 if path == '':
413 if path == '':
414 node = RootNode(commit=self)
414 node = RootNode(commit=self)
415 else:
415 else:
416 node = DirNode(path, commit=self)
416 node = DirNode(path, commit=self)
417 elif type_ == 'blob':
417 elif type_ == 'blob':
418 node = FileNode(path, commit=self)
418 node = FileNode(path, commit=self)
419 else:
419 else:
420 raise self.no_node_at_path(path)
420 raise self.no_node_at_path(path)
421
421
422 # cache node
422 # cache node
423 self.nodes[path] = node
423 self.nodes[path] = node
424 return self.nodes[path]
424 return self.nodes[path]
425
425
426 @LazyProperty
426 @LazyProperty
427 def affected_files(self):
427 def affected_files(self):
428 """
428 """
429 Gets a fast accessible file changes for given commit
429 Gets a fast accessible file changes for given commit
430 """
430 """
431 added, modified, deleted = self._changes_cache
431 added, modified, deleted = self._changes_cache
432 return list(added.union(modified).union(deleted))
432 return list(added.union(modified).union(deleted))
433
433
434 @LazyProperty
434 @LazyProperty
435 def _changes_cache(self):
435 def _changes_cache(self):
436 added = set()
436 added = set()
437 modified = set()
437 modified = set()
438 deleted = set()
438 deleted = set()
439 _r = self._remote
439 _r = self._remote
440
440
441 parents = self.parents
441 parents = self.parents
442 if not self.parents:
442 if not self.parents:
443 parents = [base.EmptyCommit()]
443 parents = [base.EmptyCommit()]
444 for parent in parents:
444 for parent in parents:
445 if isinstance(parent, base.EmptyCommit):
445 if isinstance(parent, base.EmptyCommit):
446 oid = None
446 oid = None
447 else:
447 else:
448 oid = parent.raw_id
448 oid = parent.raw_id
449 changes = _r.tree_changes(oid, self.raw_id)
449 changes = _r.tree_changes(oid, self.raw_id)
450 for (oldpath, newpath), (_, _), (_, _) in changes:
450 for (oldpath, newpath), (_, _), (_, _) in changes:
451 if newpath and oldpath:
451 if newpath and oldpath:
452 modified.add(newpath)
452 modified.add(newpath)
453 elif newpath and not oldpath:
453 elif newpath and not oldpath:
454 added.add(newpath)
454 added.add(newpath)
455 elif not newpath and oldpath:
455 elif not newpath and oldpath:
456 deleted.add(oldpath)
456 deleted.add(oldpath)
457 return added, modified, deleted
457 return added, modified, deleted
458
458
459 def _get_paths_for_status(self, status):
459 def _get_paths_for_status(self, status):
460 """
460 """
461 Returns sorted list of paths for given ``status``.
461 Returns sorted list of paths for given ``status``.
462
462
463 :param status: one of: *added*, *modified* or *deleted*
463 :param status: one of: *added*, *modified* or *deleted*
464 """
464 """
465 added, modified, deleted = self._changes_cache
465 added, modified, deleted = self._changes_cache
466 return sorted({
466 return sorted({
467 'added': list(added),
467 'added': list(added),
468 'modified': list(modified),
468 'modified': list(modified),
469 'deleted': list(deleted)}[status]
469 'deleted': list(deleted)}[status]
470 )
470 )
471
471
472 @LazyProperty
472 @LazyProperty
473 def added(self):
473 def added(self):
474 """
474 """
475 Returns list of added ``FileNode`` objects.
475 Returns list of added ``FileNode`` objects.
476 """
476 """
477 if not self.parents:
477 if not self.parents:
478 return list(self._get_file_nodes())
478 return list(self._get_file_nodes())
479 return AddedFileNodesGenerator(
479 return AddedFileNodesGenerator(
480 [n for n in self._get_paths_for_status('added')], self)
480 [n for n in self._get_paths_for_status('added')], self)
481
481
482 @LazyProperty
482 @LazyProperty
483 def changed(self):
483 def changed(self):
484 """
484 """
485 Returns list of modified ``FileNode`` objects.
485 Returns list of modified ``FileNode`` objects.
486 """
486 """
487 if not self.parents:
487 if not self.parents:
488 return []
488 return []
489 return ChangedFileNodesGenerator(
489 return ChangedFileNodesGenerator(
490 [n for n in self._get_paths_for_status('modified')], self)
490 [n for n in self._get_paths_for_status('modified')], self)
491
491
492 @LazyProperty
492 @LazyProperty
493 def removed(self):
493 def removed(self):
494 """
494 """
495 Returns list of removed ``FileNode`` objects.
495 Returns list of removed ``FileNode`` objects.
496 """
496 """
497 if not self.parents:
497 if not self.parents:
498 return []
498 return []
499 return RemovedFileNodesGenerator(
499 return RemovedFileNodesGenerator(
500 [n for n in self._get_paths_for_status('deleted')], self)
500 [n for n in self._get_paths_for_status('deleted')], self)
501
501
502 def _get_submodule_url(self, submodule_path):
502 def _get_submodule_url(self, submodule_path):
503 git_modules_path = '.gitmodules'
503 git_modules_path = '.gitmodules'
504
504
505 if self._submodules is None:
505 if self._submodules is None:
506 self._submodules = {}
506 self._submodules = {}
507
507
508 try:
508 try:
509 submodules_node = self.get_node(git_modules_path)
509 submodules_node = self.get_node(git_modules_path)
510 except NodeDoesNotExistError:
510 except NodeDoesNotExistError:
511 return None
511 return None
512
512
513 content = submodules_node.content
513 content = submodules_node.content
514
514
515 # ConfigParser fails if there are whitespaces
515 # ConfigParser fails if there are whitespaces
516 content = '\n'.join(l.strip() for l in content.split('\n'))
516 content = '\n'.join(l.strip() for l in content.split('\n'))
517
517
518 parser = ConfigParser()
518 parser = ConfigParser()
519 parser.readfp(StringIO(content))
519 parser.readfp(StringIO(content))
520
520
521 for section in parser.sections():
521 for section in parser.sections():
522 path = parser.get(section, 'path')
522 path = parser.get(section, 'path')
523 url = parser.get(section, 'url')
523 url = parser.get(section, 'url')
524 if path and url:
524 if path and url:
525 self._submodules[path.strip('/')] = url
525 self._submodules[path.strip('/')] = url
526
526
527 return self._submodules.get(submodule_path.strip('/'))
527 return self._submodules.get(submodule_path.strip('/'))
@@ -1,910 +1,910 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import shutil
28 import shutil
29 import time
29 import time
30
30
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.datelib import makedate, date_fromtimestamp
34 from rhodecode.lib.datelib import makedate, utcdate_fromtimestamp
35 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils import safe_unicode, safe_str
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason)
39 MergeFailureReason)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.conf import settings
43 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 CommitDoesNotExistError, EmptyRepositoryError,
45 CommitDoesNotExistError, EmptyRepositoryError,
46 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
46 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
47
47
48
48
49 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class GitRepository(BaseRepository):
54 class GitRepository(BaseRepository):
55 """
55 """
56 Git repository backend.
56 Git repository backend.
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'master'
58 DEFAULT_BRANCH_NAME = 'master'
59
59
60 contact = BaseRepository.DEFAULT_CONTACT
60 contact = BaseRepository.DEFAULT_CONTACT
61
61
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
63 update_after_clone=False, with_wire=None, bare=False):
63 update_after_clone=False, with_wire=None, bare=False):
64
64
65 self.path = safe_str(os.path.abspath(repo_path))
65 self.path = safe_str(os.path.abspath(repo_path))
66 self.config = config if config else Config()
66 self.config = config if config else Config()
67 self._remote = connection.Git(
67 self._remote = connection.Git(
68 self.path, self.config, with_wire=with_wire)
68 self.path, self.config, with_wire=with_wire)
69
69
70 self._init_repo(create, src_url, update_after_clone, bare)
70 self._init_repo(create, src_url, update_after_clone, bare)
71
71
72 # caches
72 # caches
73 self._commit_ids = {}
73 self._commit_ids = {}
74
74
75 self.bookmarks = {}
75 self.bookmarks = {}
76
76
77 @LazyProperty
77 @LazyProperty
78 def bare(self):
78 def bare(self):
79 return self._remote.bare()
79 return self._remote.bare()
80
80
81 @LazyProperty
81 @LazyProperty
82 def head(self):
82 def head(self):
83 return self._remote.head()
83 return self._remote.head()
84
84
85 @LazyProperty
85 @LazyProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject commit ids from cache.
89 attribute allows external tools to inject commit ids from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = dict((commit_id, index)
96 self._commit_ids = dict((commit_id, index)
97 for index, commit_id in enumerate(commit_ids))
97 for index, commit_id in enumerate(commit_ids))
98
98
99 def run_git_command(self, cmd, **opts):
99 def run_git_command(self, cmd, **opts):
100 """
100 """
101 Runs given ``cmd`` as git command and returns tuple
101 Runs given ``cmd`` as git command and returns tuple
102 (stdout, stderr).
102 (stdout, stderr).
103
103
104 :param cmd: git command to be executed
104 :param cmd: git command to be executed
105 :param opts: env options to pass into Subprocess command
105 :param opts: env options to pass into Subprocess command
106 """
106 """
107 if not isinstance(cmd, list):
107 if not isinstance(cmd, list):
108 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
109
109
110 out, err = self._remote.run_git_command(cmd, **opts)
110 out, err = self._remote.run_git_command(cmd, **opts)
111 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
111 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
112 return out, err
112 return out, err
113
113
114 @staticmethod
114 @staticmethod
115 def check_url(url, config):
115 def check_url(url, config):
116 """
116 """
117 Function will check given url and try to verify if it's a valid
117 Function will check given url and try to verify if it's a valid
118 link. Sometimes it may happened that git will issue basic
118 link. Sometimes it may happened that git will issue basic
119 auth request that can cause whole API to hang when used from python
119 auth request that can cause whole API to hang when used from python
120 or other external calls.
120 or other external calls.
121
121
122 On failures it'll raise urllib2.HTTPError, exception is also thrown
122 On failures it'll raise urllib2.HTTPError, exception is also thrown
123 when the return code is non 200
123 when the return code is non 200
124 """
124 """
125 # check first if it's not an url
125 # check first if it's not an url
126 if os.path.isdir(url) or url.startswith('file:'):
126 if os.path.isdir(url) or url.startswith('file:'):
127 return True
127 return True
128
128
129 if '+' in url.split('://', 1)[0]:
129 if '+' in url.split('://', 1)[0]:
130 url = url.split('+', 1)[1]
130 url = url.split('+', 1)[1]
131
131
132 # Request the _remote to verify the url
132 # Request the _remote to verify the url
133 return connection.Git.check_url(url, config.serialize())
133 return connection.Git.check_url(url, config.serialize())
134
134
135 @staticmethod
135 @staticmethod
136 def is_valid_repository(path):
136 def is_valid_repository(path):
137 if os.path.isdir(os.path.join(path, '.git')):
137 if os.path.isdir(os.path.join(path, '.git')):
138 return True
138 return True
139 # check case of bare repository
139 # check case of bare repository
140 try:
140 try:
141 GitRepository(path)
141 GitRepository(path)
142 return True
142 return True
143 except VCSError:
143 except VCSError:
144 pass
144 pass
145 return False
145 return False
146
146
147 def _init_repo(self, create, src_url=None, update_after_clone=False,
147 def _init_repo(self, create, src_url=None, update_after_clone=False,
148 bare=False):
148 bare=False):
149 if create and os.path.exists(self.path):
149 if create and os.path.exists(self.path):
150 raise RepositoryError(
150 raise RepositoryError(
151 "Cannot create repository at %s, location already exist"
151 "Cannot create repository at %s, location already exist"
152 % self.path)
152 % self.path)
153
153
154 try:
154 try:
155 if create and src_url:
155 if create and src_url:
156 GitRepository.check_url(src_url, self.config)
156 GitRepository.check_url(src_url, self.config)
157 self.clone(src_url, update_after_clone, bare)
157 self.clone(src_url, update_after_clone, bare)
158 elif create:
158 elif create:
159 os.makedirs(self.path, mode=0755)
159 os.makedirs(self.path, mode=0755)
160
160
161 if bare:
161 if bare:
162 self._remote.init_bare()
162 self._remote.init_bare()
163 else:
163 else:
164 self._remote.init()
164 self._remote.init()
165 else:
165 else:
166 self._remote.assert_correct_path()
166 self._remote.assert_correct_path()
167 # TODO: johbo: check if we have to translate the OSError here
167 # TODO: johbo: check if we have to translate the OSError here
168 except OSError as err:
168 except OSError as err:
169 raise RepositoryError(err)
169 raise RepositoryError(err)
170
170
171 def _get_all_commit_ids(self, filters=None):
171 def _get_all_commit_ids(self, filters=None):
172 # we must check if this repo is not empty, since later command
172 # we must check if this repo is not empty, since later command
173 # fails if it is. And it's cheaper to ask than throw the subprocess
173 # fails if it is. And it's cheaper to ask than throw the subprocess
174 # errors
174 # errors
175 try:
175 try:
176 self._remote.head()
176 self._remote.head()
177 except KeyError:
177 except KeyError:
178 return []
178 return []
179
179
180 rev_filter = ['--branches', '--tags']
180 rev_filter = ['--branches', '--tags']
181 extra_filter = []
181 extra_filter = []
182
182
183 if filters:
183 if filters:
184 if filters.get('since'):
184 if filters.get('since'):
185 extra_filter.append('--since=%s' % (filters['since']))
185 extra_filter.append('--since=%s' % (filters['since']))
186 if filters.get('until'):
186 if filters.get('until'):
187 extra_filter.append('--until=%s' % (filters['until']))
187 extra_filter.append('--until=%s' % (filters['until']))
188 if filters.get('branch_name'):
188 if filters.get('branch_name'):
189 rev_filter = ['--tags']
189 rev_filter = ['--tags']
190 extra_filter.append(filters['branch_name'])
190 extra_filter.append(filters['branch_name'])
191 rev_filter.extend(extra_filter)
191 rev_filter.extend(extra_filter)
192
192
193 # if filters.get('start') or filters.get('end'):
193 # if filters.get('start') or filters.get('end'):
194 # # skip is offset, max-count is limit
194 # # skip is offset, max-count is limit
195 # if filters.get('start'):
195 # if filters.get('start'):
196 # extra_filter += ' --skip=%s' % filters['start']
196 # extra_filter += ' --skip=%s' % filters['start']
197 # if filters.get('end'):
197 # if filters.get('end'):
198 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
198 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
199
199
200 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
200 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
201 try:
201 try:
202 output, __ = self.run_git_command(cmd)
202 output, __ = self.run_git_command(cmd)
203 except RepositoryError:
203 except RepositoryError:
204 # Can be raised for empty repositories
204 # Can be raised for empty repositories
205 return []
205 return []
206 return output.splitlines()
206 return output.splitlines()
207
207
208 def _get_all_commit_ids2(self):
208 def _get_all_commit_ids2(self):
209 # alternate implementation
209 # alternate implementation
210 includes = [x[1][0] for x in self._parsed_refs.iteritems()
210 includes = [x[1][0] for x in self._parsed_refs.iteritems()
211 if x[1][1] != 'T']
211 if x[1][1] != 'T']
212 return [c.commit.id for c in self._remote.get_walker(include=includes)]
212 return [c.commit.id for c in self._remote.get_walker(include=includes)]
213
213
214 def _get_commit_id(self, commit_id_or_idx):
214 def _get_commit_id(self, commit_id_or_idx):
215 def is_null(value):
215 def is_null(value):
216 return len(value) == commit_id_or_idx.count('0')
216 return len(value) == commit_id_or_idx.count('0')
217
217
218 if self.is_empty():
218 if self.is_empty():
219 raise EmptyRepositoryError("There are no commits yet")
219 raise EmptyRepositoryError("There are no commits yet")
220
220
221 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
221 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
222 return self.commit_ids[-1]
222 return self.commit_ids[-1]
223
223
224 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
224 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
225 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
225 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
226 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
226 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
227 try:
227 try:
228 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
228 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
229 except Exception:
229 except Exception:
230 msg = "Commit %s does not exist for %s" % (
230 msg = "Commit %s does not exist for %s" % (
231 commit_id_or_idx, self)
231 commit_id_or_idx, self)
232 raise CommitDoesNotExistError(msg)
232 raise CommitDoesNotExistError(msg)
233
233
234 elif is_bstr:
234 elif is_bstr:
235 # get by branch/tag name
235 # get by branch/tag name
236 ref_id = self._parsed_refs.get(commit_id_or_idx)
236 ref_id = self._parsed_refs.get(commit_id_or_idx)
237 if ref_id: # and ref_id[1] in ['H', 'RH', 'T']:
237 if ref_id: # and ref_id[1] in ['H', 'RH', 'T']:
238 return ref_id[0]
238 return ref_id[0]
239
239
240 tag_ids = self.tags.values()
240 tag_ids = self.tags.values()
241 # maybe it's a tag ? we don't have them in self.commit_ids
241 # maybe it's a tag ? we don't have them in self.commit_ids
242 if commit_id_or_idx in tag_ids:
242 if commit_id_or_idx in tag_ids:
243 return commit_id_or_idx
243 return commit_id_or_idx
244
244
245 elif (not SHA_PATTERN.match(commit_id_or_idx) or
245 elif (not SHA_PATTERN.match(commit_id_or_idx) or
246 commit_id_or_idx not in self.commit_ids):
246 commit_id_or_idx not in self.commit_ids):
247 msg = "Commit %s does not exist for %s" % (
247 msg = "Commit %s does not exist for %s" % (
248 commit_id_or_idx, self)
248 commit_id_or_idx, self)
249 raise CommitDoesNotExistError(msg)
249 raise CommitDoesNotExistError(msg)
250
250
251 # Ensure we return full id
251 # Ensure we return full id
252 if not SHA_PATTERN.match(str(commit_id_or_idx)):
252 if not SHA_PATTERN.match(str(commit_id_or_idx)):
253 raise CommitDoesNotExistError(
253 raise CommitDoesNotExistError(
254 "Given commit id %s not recognized" % commit_id_or_idx)
254 "Given commit id %s not recognized" % commit_id_or_idx)
255 return commit_id_or_idx
255 return commit_id_or_idx
256
256
257 def get_hook_location(self):
257 def get_hook_location(self):
258 """
258 """
259 returns absolute path to location where hooks are stored
259 returns absolute path to location where hooks are stored
260 """
260 """
261 loc = os.path.join(self.path, 'hooks')
261 loc = os.path.join(self.path, 'hooks')
262 if not self.bare:
262 if not self.bare:
263 loc = os.path.join(self.path, '.git', 'hooks')
263 loc = os.path.join(self.path, '.git', 'hooks')
264 return loc
264 return loc
265
265
266 @LazyProperty
266 @LazyProperty
267 def last_change(self):
267 def last_change(self):
268 """
268 """
269 Returns last change made on this repository as
269 Returns last change made on this repository as
270 `datetime.datetime` object.
270 `datetime.datetime` object.
271 """
271 """
272 return date_fromtimestamp(self._get_mtime(), makedate()[1])
272 return utcdate_fromtimestamp(self._get_mtime(), makedate()[1])
273
273
274 def _get_mtime(self):
274 def _get_mtime(self):
275 try:
275 try:
276 return time.mktime(self.get_commit().date.timetuple())
276 return time.mktime(self.get_commit().date.timetuple())
277 except RepositoryError:
277 except RepositoryError:
278 idx_loc = '' if self.bare else '.git'
278 idx_loc = '' if self.bare else '.git'
279 # fallback to filesystem
279 # fallback to filesystem
280 in_path = os.path.join(self.path, idx_loc, "index")
280 in_path = os.path.join(self.path, idx_loc, "index")
281 he_path = os.path.join(self.path, idx_loc, "HEAD")
281 he_path = os.path.join(self.path, idx_loc, "HEAD")
282 if os.path.exists(in_path):
282 if os.path.exists(in_path):
283 return os.stat(in_path).st_mtime
283 return os.stat(in_path).st_mtime
284 else:
284 else:
285 return os.stat(he_path).st_mtime
285 return os.stat(he_path).st_mtime
286
286
287 @LazyProperty
287 @LazyProperty
288 def description(self):
288 def description(self):
289 description = self._remote.get_description()
289 description = self._remote.get_description()
290 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
290 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
291
291
292 def _get_refs_entry(self, value, reverse):
292 def _get_refs_entry(self, value, reverse):
293 if self.is_empty():
293 if self.is_empty():
294 return {}
294 return {}
295
295
296 def get_name(ctx):
296 def get_name(ctx):
297 return ctx[0]
297 return ctx[0]
298
298
299 _branches = [
299 _branches = [
300 (safe_unicode(x[0]), x[1][0])
300 (safe_unicode(x[0]), x[1][0])
301 for x in self._parsed_refs.iteritems() if x[1][1] == value]
301 for x in self._parsed_refs.iteritems() if x[1][1] == value]
302 return OrderedDict(sorted(_branches, key=get_name, reverse=reverse))
302 return OrderedDict(sorted(_branches, key=get_name, reverse=reverse))
303
303
304 def _get_branches(self):
304 def _get_branches(self):
305 return self._get_refs_entry('H', False)
305 return self._get_refs_entry('H', False)
306
306
307 @LazyProperty
307 @LazyProperty
308 def branches(self):
308 def branches(self):
309 return self._get_branches()
309 return self._get_branches()
310
310
311 @LazyProperty
311 @LazyProperty
312 def branches_closed(self):
312 def branches_closed(self):
313 return {}
313 return {}
314
314
315 @LazyProperty
315 @LazyProperty
316 def branches_all(self):
316 def branches_all(self):
317 all_branches = {}
317 all_branches = {}
318 all_branches.update(self.branches)
318 all_branches.update(self.branches)
319 all_branches.update(self.branches_closed)
319 all_branches.update(self.branches_closed)
320 return all_branches
320 return all_branches
321
321
322 @LazyProperty
322 @LazyProperty
323 def tags(self):
323 def tags(self):
324 return self._get_tags()
324 return self._get_tags()
325
325
326 def _get_tags(self):
326 def _get_tags(self):
327 return self._get_refs_entry('T', True)
327 return self._get_refs_entry('T', True)
328
328
329 def tag(self, name, user, commit_id=None, message=None, date=None,
329 def tag(self, name, user, commit_id=None, message=None, date=None,
330 **kwargs):
330 **kwargs):
331 """
331 """
332 Creates and returns a tag for the given ``commit_id``.
332 Creates and returns a tag for the given ``commit_id``.
333
333
334 :param name: name for new tag
334 :param name: name for new tag
335 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
335 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
336 :param commit_id: commit id for which new tag would be created
336 :param commit_id: commit id for which new tag would be created
337 :param message: message of the tag's commit
337 :param message: message of the tag's commit
338 :param date: date of tag's commit
338 :param date: date of tag's commit
339
339
340 :raises TagAlreadyExistError: if tag with same name already exists
340 :raises TagAlreadyExistError: if tag with same name already exists
341 """
341 """
342 if name in self.tags:
342 if name in self.tags:
343 raise TagAlreadyExistError("Tag %s already exists" % name)
343 raise TagAlreadyExistError("Tag %s already exists" % name)
344 commit = self.get_commit(commit_id=commit_id)
344 commit = self.get_commit(commit_id=commit_id)
345 message = message or "Added tag %s for commit %s" % (
345 message = message or "Added tag %s for commit %s" % (
346 name, commit.raw_id)
346 name, commit.raw_id)
347 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
347 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
348
348
349 self._parsed_refs = self._get_parsed_refs()
349 self._parsed_refs = self._get_parsed_refs()
350 self.tags = self._get_tags()
350 self.tags = self._get_tags()
351 return commit
351 return commit
352
352
353 def remove_tag(self, name, user, message=None, date=None):
353 def remove_tag(self, name, user, message=None, date=None):
354 """
354 """
355 Removes tag with the given ``name``.
355 Removes tag with the given ``name``.
356
356
357 :param name: name of the tag to be removed
357 :param name: name of the tag to be removed
358 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
358 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
359 :param message: message of the tag's removal commit
359 :param message: message of the tag's removal commit
360 :param date: date of tag's removal commit
360 :param date: date of tag's removal commit
361
361
362 :raises TagDoesNotExistError: if tag with given name does not exists
362 :raises TagDoesNotExistError: if tag with given name does not exists
363 """
363 """
364 if name not in self.tags:
364 if name not in self.tags:
365 raise TagDoesNotExistError("Tag %s does not exist" % name)
365 raise TagDoesNotExistError("Tag %s does not exist" % name)
366 tagpath = vcspath.join(
366 tagpath = vcspath.join(
367 self._remote.get_refs_path(), 'refs', 'tags', name)
367 self._remote.get_refs_path(), 'refs', 'tags', name)
368 try:
368 try:
369 os.remove(tagpath)
369 os.remove(tagpath)
370 self._parsed_refs = self._get_parsed_refs()
370 self._parsed_refs = self._get_parsed_refs()
371 self.tags = self._get_tags()
371 self.tags = self._get_tags()
372 except OSError as e:
372 except OSError as e:
373 raise RepositoryError(e.strerror)
373 raise RepositoryError(e.strerror)
374
374
375 @LazyProperty
375 @LazyProperty
376 def _parsed_refs(self):
376 def _parsed_refs(self):
377 return self._get_parsed_refs()
377 return self._get_parsed_refs()
378
378
379 def _get_parsed_refs(self):
379 def _get_parsed_refs(self):
380 # TODO: (oliver) who needs RH; branches?
380 # TODO: (oliver) who needs RH; branches?
381 # Remote Heads were commented out, as they may overwrite local branches
381 # Remote Heads were commented out, as they may overwrite local branches
382 # See the TODO note in rhodecode.lib.vcs.remote.git:get_refs for more
382 # See the TODO note in rhodecode.lib.vcs.remote.git:get_refs for more
383 # details.
383 # details.
384 keys = [('refs/heads/', 'H'),
384 keys = [('refs/heads/', 'H'),
385 #('refs/remotes/origin/', 'RH'),
385 #('refs/remotes/origin/', 'RH'),
386 ('refs/tags/', 'T')]
386 ('refs/tags/', 'T')]
387 return self._remote.get_refs(keys=keys)
387 return self._remote.get_refs(keys=keys)
388
388
389 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
389 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
390 """
390 """
391 Returns `GitCommit` object representing commit from git repository
391 Returns `GitCommit` object representing commit from git repository
392 at the given `commit_id` or head (most recent commit) if None given.
392 at the given `commit_id` or head (most recent commit) if None given.
393 """
393 """
394 if commit_id is not None:
394 if commit_id is not None:
395 self._validate_commit_id(commit_id)
395 self._validate_commit_id(commit_id)
396 elif commit_idx is not None:
396 elif commit_idx is not None:
397 self._validate_commit_idx(commit_idx)
397 self._validate_commit_idx(commit_idx)
398 commit_id = commit_idx
398 commit_id = commit_idx
399 commit_id = self._get_commit_id(commit_id)
399 commit_id = self._get_commit_id(commit_id)
400 try:
400 try:
401 # Need to call remote to translate id for tagging scenario
401 # Need to call remote to translate id for tagging scenario
402 commit_id = self._remote.get_object(commit_id)["commit_id"]
402 commit_id = self._remote.get_object(commit_id)["commit_id"]
403 idx = self._commit_ids[commit_id]
403 idx = self._commit_ids[commit_id]
404 except KeyError:
404 except KeyError:
405 raise RepositoryError("Cannot get object with id %s" % commit_id)
405 raise RepositoryError("Cannot get object with id %s" % commit_id)
406
406
407 return GitCommit(self, commit_id, idx, pre_load=pre_load)
407 return GitCommit(self, commit_id, idx, pre_load=pre_load)
408
408
409 def get_commits(
409 def get_commits(
410 self, start_id=None, end_id=None, start_date=None, end_date=None,
410 self, start_id=None, end_id=None, start_date=None, end_date=None,
411 branch_name=None, pre_load=None):
411 branch_name=None, pre_load=None):
412 """
412 """
413 Returns generator of `GitCommit` objects from start to end (both
413 Returns generator of `GitCommit` objects from start to end (both
414 are inclusive), in ascending date order.
414 are inclusive), in ascending date order.
415
415
416 :param start_id: None, str(commit_id)
416 :param start_id: None, str(commit_id)
417 :param end_id: None, str(commit_id)
417 :param end_id: None, str(commit_id)
418 :param start_date: if specified, commits with commit date less than
418 :param start_date: if specified, commits with commit date less than
419 ``start_date`` would be filtered out from returned set
419 ``start_date`` would be filtered out from returned set
420 :param end_date: if specified, commits with commit date greater than
420 :param end_date: if specified, commits with commit date greater than
421 ``end_date`` would be filtered out from returned set
421 ``end_date`` would be filtered out from returned set
422 :param branch_name: if specified, commits not reachable from given
422 :param branch_name: if specified, commits not reachable from given
423 branch would be filtered out from returned set
423 branch would be filtered out from returned set
424
424
425 :raise BranchDoesNotExistError: If given `branch_name` does not
425 :raise BranchDoesNotExistError: If given `branch_name` does not
426 exist.
426 exist.
427 :raise CommitDoesNotExistError: If commits for given `start` or
427 :raise CommitDoesNotExistError: If commits for given `start` or
428 `end` could not be found.
428 `end` could not be found.
429
429
430 """
430 """
431 if self.is_empty():
431 if self.is_empty():
432 raise EmptyRepositoryError("There are no commits yet")
432 raise EmptyRepositoryError("There are no commits yet")
433 self._validate_branch_name(branch_name)
433 self._validate_branch_name(branch_name)
434
434
435 if start_id is not None:
435 if start_id is not None:
436 self._validate_commit_id(start_id)
436 self._validate_commit_id(start_id)
437 if end_id is not None:
437 if end_id is not None:
438 self._validate_commit_id(end_id)
438 self._validate_commit_id(end_id)
439
439
440 start_raw_id = self._get_commit_id(start_id)
440 start_raw_id = self._get_commit_id(start_id)
441 start_pos = self._commit_ids[start_raw_id] if start_id else None
441 start_pos = self._commit_ids[start_raw_id] if start_id else None
442 end_raw_id = self._get_commit_id(end_id)
442 end_raw_id = self._get_commit_id(end_id)
443 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
443 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
444
444
445 if None not in [start_id, end_id] and start_pos > end_pos:
445 if None not in [start_id, end_id] and start_pos > end_pos:
446 raise RepositoryError(
446 raise RepositoryError(
447 "Start commit '%s' cannot be after end commit '%s'" %
447 "Start commit '%s' cannot be after end commit '%s'" %
448 (start_id, end_id))
448 (start_id, end_id))
449
449
450 if end_pos is not None:
450 if end_pos is not None:
451 end_pos += 1
451 end_pos += 1
452
452
453 filter_ = []
453 filter_ = []
454 if branch_name:
454 if branch_name:
455 filter_.append({'branch_name': branch_name})
455 filter_.append({'branch_name': branch_name})
456 if start_date and not end_date:
456 if start_date and not end_date:
457 filter_.append({'since': start_date})
457 filter_.append({'since': start_date})
458 if end_date and not start_date:
458 if end_date and not start_date:
459 filter_.append({'until': end_date})
459 filter_.append({'until': end_date})
460 if start_date and end_date:
460 if start_date and end_date:
461 filter_.append({'since': start_date})
461 filter_.append({'since': start_date})
462 filter_.append({'until': end_date})
462 filter_.append({'until': end_date})
463
463
464 # if start_pos or end_pos:
464 # if start_pos or end_pos:
465 # filter_.append({'start': start_pos})
465 # filter_.append({'start': start_pos})
466 # filter_.append({'end': end_pos})
466 # filter_.append({'end': end_pos})
467
467
468 if filter_:
468 if filter_:
469 revfilters = {
469 revfilters = {
470 'branch_name': branch_name,
470 'branch_name': branch_name,
471 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
471 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
472 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
472 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
473 'start': start_pos,
473 'start': start_pos,
474 'end': end_pos,
474 'end': end_pos,
475 }
475 }
476 commit_ids = self._get_all_commit_ids(filters=revfilters)
476 commit_ids = self._get_all_commit_ids(filters=revfilters)
477
477
478 # pure python stuff, it's slow due to walker walking whole repo
478 # pure python stuff, it's slow due to walker walking whole repo
479 # def get_revs(walker):
479 # def get_revs(walker):
480 # for walker_entry in walker:
480 # for walker_entry in walker:
481 # yield walker_entry.commit.id
481 # yield walker_entry.commit.id
482 # revfilters = {}
482 # revfilters = {}
483 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
483 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
484 else:
484 else:
485 commit_ids = self.commit_ids
485 commit_ids = self.commit_ids
486
486
487 if start_pos or end_pos:
487 if start_pos or end_pos:
488 commit_ids = commit_ids[start_pos: end_pos]
488 commit_ids = commit_ids[start_pos: end_pos]
489
489
490 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
490 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
491
491
492 def get_diff(
492 def get_diff(
493 self, commit1, commit2, path='', ignore_whitespace=False,
493 self, commit1, commit2, path='', ignore_whitespace=False,
494 context=3, path1=None):
494 context=3, path1=None):
495 """
495 """
496 Returns (git like) *diff*, as plain text. Shows changes introduced by
496 Returns (git like) *diff*, as plain text. Shows changes introduced by
497 ``commit2`` since ``commit1``.
497 ``commit2`` since ``commit1``.
498
498
499 :param commit1: Entry point from which diff is shown. Can be
499 :param commit1: Entry point from which diff is shown. Can be
500 ``self.EMPTY_COMMIT`` - in this case, patch showing all
500 ``self.EMPTY_COMMIT`` - in this case, patch showing all
501 the changes since empty state of the repository until ``commit2``
501 the changes since empty state of the repository until ``commit2``
502 :param commit2: Until which commits changes should be shown.
502 :param commit2: Until which commits changes should be shown.
503 :param ignore_whitespace: If set to ``True``, would not show whitespace
503 :param ignore_whitespace: If set to ``True``, would not show whitespace
504 changes. Defaults to ``False``.
504 changes. Defaults to ``False``.
505 :param context: How many lines before/after changed lines should be
505 :param context: How many lines before/after changed lines should be
506 shown. Defaults to ``3``.
506 shown. Defaults to ``3``.
507 """
507 """
508 self._validate_diff_commits(commit1, commit2)
508 self._validate_diff_commits(commit1, commit2)
509 if path1 is not None and path1 != path:
509 if path1 is not None and path1 != path:
510 raise ValueError("Diff of two different paths not supported.")
510 raise ValueError("Diff of two different paths not supported.")
511
511
512 flags = [
512 flags = [
513 '-U%s' % context, '--full-index', '--binary', '-p',
513 '-U%s' % context, '--full-index', '--binary', '-p',
514 '-M', '--abbrev=40']
514 '-M', '--abbrev=40']
515 if ignore_whitespace:
515 if ignore_whitespace:
516 flags.append('-w')
516 flags.append('-w')
517
517
518 if commit1 == self.EMPTY_COMMIT:
518 if commit1 == self.EMPTY_COMMIT:
519 cmd = ['show'] + flags + [commit2.raw_id]
519 cmd = ['show'] + flags + [commit2.raw_id]
520 else:
520 else:
521 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
521 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
522
522
523 if path:
523 if path:
524 cmd.extend(['--', path])
524 cmd.extend(['--', path])
525
525
526 stdout, __ = self.run_git_command(cmd)
526 stdout, __ = self.run_git_command(cmd)
527 # If we used 'show' command, strip first few lines (until actual diff
527 # If we used 'show' command, strip first few lines (until actual diff
528 # starts)
528 # starts)
529 if commit1 == self.EMPTY_COMMIT:
529 if commit1 == self.EMPTY_COMMIT:
530 lines = stdout.splitlines()
530 lines = stdout.splitlines()
531 x = 0
531 x = 0
532 for line in lines:
532 for line in lines:
533 if line.startswith('diff'):
533 if line.startswith('diff'):
534 break
534 break
535 x += 1
535 x += 1
536 # Append new line just like 'diff' command do
536 # Append new line just like 'diff' command do
537 stdout = '\n'.join(lines[x:]) + '\n'
537 stdout = '\n'.join(lines[x:]) + '\n'
538 return GitDiff(stdout)
538 return GitDiff(stdout)
539
539
540 def strip(self, commit_id, branch_name):
540 def strip(self, commit_id, branch_name):
541 commit = self.get_commit(commit_id=commit_id)
541 commit = self.get_commit(commit_id=commit_id)
542 if commit.merge:
542 if commit.merge:
543 raise Exception('Cannot reset to merge commit')
543 raise Exception('Cannot reset to merge commit')
544
544
545 # parent is going to be the new head now
545 # parent is going to be the new head now
546 commit = commit.parents[0]
546 commit = commit.parents[0]
547 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
547 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
548
548
549 self.commit_ids = self._get_all_commit_ids()
549 self.commit_ids = self._get_all_commit_ids()
550 self._rebuild_cache(self.commit_ids)
550 self._rebuild_cache(self.commit_ids)
551
551
552 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
552 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
553 if commit_id1 == commit_id2:
553 if commit_id1 == commit_id2:
554 return commit_id1
554 return commit_id1
555
555
556 if self != repo2:
556 if self != repo2:
557 commits = self._remote.get_missing_revs(
557 commits = self._remote.get_missing_revs(
558 commit_id1, commit_id2, repo2.path)
558 commit_id1, commit_id2, repo2.path)
559 if commits:
559 if commits:
560 commit = repo2.get_commit(commits[-1])
560 commit = repo2.get_commit(commits[-1])
561 if commit.parents:
561 if commit.parents:
562 ancestor_id = commit.parents[0].raw_id
562 ancestor_id = commit.parents[0].raw_id
563 else:
563 else:
564 ancestor_id = None
564 ancestor_id = None
565 else:
565 else:
566 # no commits from other repo, ancestor_id is the commit_id2
566 # no commits from other repo, ancestor_id is the commit_id2
567 ancestor_id = commit_id2
567 ancestor_id = commit_id2
568 else:
568 else:
569 output, __ = self.run_git_command(
569 output, __ = self.run_git_command(
570 ['merge-base', commit_id1, commit_id2])
570 ['merge-base', commit_id1, commit_id2])
571 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
571 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
572
572
573 return ancestor_id
573 return ancestor_id
574
574
575 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
575 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
576 repo1 = self
576 repo1 = self
577 ancestor_id = None
577 ancestor_id = None
578
578
579 if commit_id1 == commit_id2:
579 if commit_id1 == commit_id2:
580 commits = []
580 commits = []
581 elif repo1 != repo2:
581 elif repo1 != repo2:
582 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
582 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
583 repo2.path)
583 repo2.path)
584 commits = [
584 commits = [
585 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
585 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
586 for commit_id in reversed(missing_ids)]
586 for commit_id in reversed(missing_ids)]
587 else:
587 else:
588 output, __ = repo1.run_git_command(
588 output, __ = repo1.run_git_command(
589 ['log', '--reverse', '--pretty=format: %H', '-s',
589 ['log', '--reverse', '--pretty=format: %H', '-s',
590 '%s..%s' % (commit_id1, commit_id2)])
590 '%s..%s' % (commit_id1, commit_id2)])
591 commits = [
591 commits = [
592 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
592 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
593 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
593 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
594
594
595 return commits
595 return commits
596
596
597 @LazyProperty
597 @LazyProperty
598 def in_memory_commit(self):
598 def in_memory_commit(self):
599 """
599 """
600 Returns ``GitInMemoryCommit`` object for this repository.
600 Returns ``GitInMemoryCommit`` object for this repository.
601 """
601 """
602 return GitInMemoryCommit(self)
602 return GitInMemoryCommit(self)
603
603
604 def clone(self, url, update_after_clone=True, bare=False):
604 def clone(self, url, update_after_clone=True, bare=False):
605 """
605 """
606 Tries to clone commits from external location.
606 Tries to clone commits from external location.
607
607
608 :param update_after_clone: If set to ``False``, git won't checkout
608 :param update_after_clone: If set to ``False``, git won't checkout
609 working directory
609 working directory
610 :param bare: If set to ``True``, repository would be cloned into
610 :param bare: If set to ``True``, repository would be cloned into
611 *bare* git repository (no working directory at all).
611 *bare* git repository (no working directory at all).
612 """
612 """
613 # init_bare and init expect empty dir created to proceed
613 # init_bare and init expect empty dir created to proceed
614 if not os.path.exists(self.path):
614 if not os.path.exists(self.path):
615 os.mkdir(self.path)
615 os.mkdir(self.path)
616
616
617 if bare:
617 if bare:
618 self._remote.init_bare()
618 self._remote.init_bare()
619 else:
619 else:
620 self._remote.init()
620 self._remote.init()
621
621
622 deferred = '^{}'
622 deferred = '^{}'
623 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
623 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
624
624
625 return self._remote.clone(
625 return self._remote.clone(
626 url, deferred, valid_refs, update_after_clone)
626 url, deferred, valid_refs, update_after_clone)
627
627
628 def pull(self, url, commit_ids=None):
628 def pull(self, url, commit_ids=None):
629 """
629 """
630 Tries to pull changes from external location. We use fetch here since
630 Tries to pull changes from external location. We use fetch here since
631 pull in get does merges and we want to be compatible with hg backend so
631 pull in get does merges and we want to be compatible with hg backend so
632 pull == fetch in this case
632 pull == fetch in this case
633 """
633 """
634 self.fetch(url, commit_ids=commit_ids)
634 self.fetch(url, commit_ids=commit_ids)
635
635
636 def fetch(self, url, commit_ids=None):
636 def fetch(self, url, commit_ids=None):
637 """
637 """
638 Tries to fetch changes from external location.
638 Tries to fetch changes from external location.
639 """
639 """
640 refs = None
640 refs = None
641
641
642 if commit_ids is not None:
642 if commit_ids is not None:
643 remote_refs = self._remote.get_remote_refs(url)
643 remote_refs = self._remote.get_remote_refs(url)
644 refs = [
644 refs = [
645 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
645 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
646 self._remote.fetch(url, refs=refs)
646 self._remote.fetch(url, refs=refs)
647
647
648 def set_refs(self, ref_name, commit_id):
648 def set_refs(self, ref_name, commit_id):
649 self._remote.set_refs(ref_name, commit_id)
649 self._remote.set_refs(ref_name, commit_id)
650
650
651 def remove_ref(self, ref_name):
651 def remove_ref(self, ref_name):
652 self._remote.remove_ref(ref_name)
652 self._remote.remove_ref(ref_name)
653
653
654 def _update_server_info(self):
654 def _update_server_info(self):
655 """
655 """
656 runs gits update-server-info command in this repo instance
656 runs gits update-server-info command in this repo instance
657 """
657 """
658 self._remote.update_server_info()
658 self._remote.update_server_info()
659
659
660 def _current_branch(self):
660 def _current_branch(self):
661 """
661 """
662 Return the name of the current branch.
662 Return the name of the current branch.
663
663
664 It only works for non bare repositories (i.e. repositories with a
664 It only works for non bare repositories (i.e. repositories with a
665 working copy)
665 working copy)
666 """
666 """
667 if self.bare:
667 if self.bare:
668 raise RepositoryError('Bare git repos do not have active branches')
668 raise RepositoryError('Bare git repos do not have active branches')
669
669
670 if self.is_empty():
670 if self.is_empty():
671 return None
671 return None
672
672
673 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
673 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
674 return stdout.strip()
674 return stdout.strip()
675
675
676 def _checkout(self, branch_name, create=False):
676 def _checkout(self, branch_name, create=False):
677 """
677 """
678 Checkout a branch in the working directory.
678 Checkout a branch in the working directory.
679
679
680 It tries to create the branch if create is True, failing if the branch
680 It tries to create the branch if create is True, failing if the branch
681 already exists.
681 already exists.
682
682
683 It only works for non bare repositories (i.e. repositories with a
683 It only works for non bare repositories (i.e. repositories with a
684 working copy)
684 working copy)
685 """
685 """
686 if self.bare:
686 if self.bare:
687 raise RepositoryError('Cannot checkout branches in a bare git repo')
687 raise RepositoryError('Cannot checkout branches in a bare git repo')
688
688
689 cmd = ['checkout']
689 cmd = ['checkout']
690 if create:
690 if create:
691 cmd.append('-b')
691 cmd.append('-b')
692 cmd.append(branch_name)
692 cmd.append(branch_name)
693 self.run_git_command(cmd, fail_on_stderr=False)
693 self.run_git_command(cmd, fail_on_stderr=False)
694
694
695 def _local_clone(self, clone_path, branch_name):
695 def _local_clone(self, clone_path, branch_name):
696 """
696 """
697 Create a local clone of the current repo.
697 Create a local clone of the current repo.
698 """
698 """
699 # N.B.(skreft): the --branch option is required as otherwise the shallow
699 # N.B.(skreft): the --branch option is required as otherwise the shallow
700 # clone will only fetch the active branch.
700 # clone will only fetch the active branch.
701 cmd = ['clone', '--branch', branch_name, '--single-branch',
701 cmd = ['clone', '--branch', branch_name, '--single-branch',
702 self.path, os.path.abspath(clone_path)]
702 self.path, os.path.abspath(clone_path)]
703 self.run_git_command(cmd, fail_on_stderr=False)
703 self.run_git_command(cmd, fail_on_stderr=False)
704
704
705 def _local_fetch(self, repository_path, branch_name):
705 def _local_fetch(self, repository_path, branch_name):
706 """
706 """
707 Fetch a branch from a local repository.
707 Fetch a branch from a local repository.
708 """
708 """
709 repository_path = os.path.abspath(repository_path)
709 repository_path = os.path.abspath(repository_path)
710 if repository_path == self.path:
710 if repository_path == self.path:
711 raise ValueError('Cannot fetch from the same repository')
711 raise ValueError('Cannot fetch from the same repository')
712
712
713 cmd = ['fetch', '--no-tags', repository_path, branch_name]
713 cmd = ['fetch', '--no-tags', repository_path, branch_name]
714 self.run_git_command(cmd, fail_on_stderr=False)
714 self.run_git_command(cmd, fail_on_stderr=False)
715
715
716 def _last_fetch_heads(self):
716 def _last_fetch_heads(self):
717 """
717 """
718 Return the last fetched heads that need merging.
718 Return the last fetched heads that need merging.
719
719
720 The algorithm is defined at
720 The algorithm is defined at
721 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
721 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
722 """
722 """
723 if not self.bare:
723 if not self.bare:
724 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
724 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
725 else:
725 else:
726 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
726 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
727
727
728 heads = []
728 heads = []
729 with open(fetch_heads_path) as f:
729 with open(fetch_heads_path) as f:
730 for line in f:
730 for line in f:
731 if ' not-for-merge ' in line:
731 if ' not-for-merge ' in line:
732 continue
732 continue
733 line = re.sub('\t.*', '', line, flags=re.DOTALL)
733 line = re.sub('\t.*', '', line, flags=re.DOTALL)
734 heads.append(line)
734 heads.append(line)
735
735
736 return heads
736 return heads
737
737
738 def _local_pull(self, repository_path, branch_name):
738 def _local_pull(self, repository_path, branch_name):
739 """
739 """
740 Pull a branch from a local repository.
740 Pull a branch from a local repository.
741 """
741 """
742 if self.bare:
742 if self.bare:
743 raise RepositoryError('Cannot pull into a bare git repository')
743 raise RepositoryError('Cannot pull into a bare git repository')
744 # N.B.(skreft): The --ff-only option is to make sure this is a
744 # N.B.(skreft): The --ff-only option is to make sure this is a
745 # fast-forward (i.e., we are only pulling new changes and there are no
745 # fast-forward (i.e., we are only pulling new changes and there are no
746 # conflicts with our current branch)
746 # conflicts with our current branch)
747 # Additionally, that option needs to go before --no-tags, otherwise git
747 # Additionally, that option needs to go before --no-tags, otherwise git
748 # pull complains about it being an unknown flag.
748 # pull complains about it being an unknown flag.
749 cmd = ['pull', '--ff-only', '--no-tags', repository_path, branch_name]
749 cmd = ['pull', '--ff-only', '--no-tags', repository_path, branch_name]
750 self.run_git_command(cmd, fail_on_stderr=False)
750 self.run_git_command(cmd, fail_on_stderr=False)
751
751
752 def _local_merge(self, merge_message, user_name, user_email, heads):
752 def _local_merge(self, merge_message, user_name, user_email, heads):
753 """
753 """
754 Merge the given head into the checked out branch.
754 Merge the given head into the checked out branch.
755
755
756 It will force a merge commit.
756 It will force a merge commit.
757
757
758 Currently it raises an error if the repo is empty, as it is not possible
758 Currently it raises an error if the repo is empty, as it is not possible
759 to create a merge commit in an empty repo.
759 to create a merge commit in an empty repo.
760
760
761 :param merge_message: The message to use for the merge commit.
761 :param merge_message: The message to use for the merge commit.
762 :param heads: the heads to merge.
762 :param heads: the heads to merge.
763 """
763 """
764 if self.bare:
764 if self.bare:
765 raise RepositoryError('Cannot merge into a bare git repository')
765 raise RepositoryError('Cannot merge into a bare git repository')
766
766
767 if not heads:
767 if not heads:
768 return
768 return
769
769
770 if self.is_empty():
770 if self.is_empty():
771 # TODO(skreft): do somehting more robust in this case.
771 # TODO(skreft): do somehting more robust in this case.
772 raise RepositoryError(
772 raise RepositoryError(
773 'Do not know how to merge into empty repositories yet')
773 'Do not know how to merge into empty repositories yet')
774
774
775 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
775 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
776 # commit message. We also specify the user who is doing the merge.
776 # commit message. We also specify the user who is doing the merge.
777 cmd = ['-c', 'user.name=%s' % safe_str(user_name),
777 cmd = ['-c', 'user.name=%s' % safe_str(user_name),
778 '-c', 'user.email=%s' % safe_str(user_email),
778 '-c', 'user.email=%s' % safe_str(user_email),
779 'merge', '--no-ff', '-m', safe_str(merge_message)]
779 'merge', '--no-ff', '-m', safe_str(merge_message)]
780 cmd.extend(heads)
780 cmd.extend(heads)
781 try:
781 try:
782 self.run_git_command(cmd, fail_on_stderr=False)
782 self.run_git_command(cmd, fail_on_stderr=False)
783 except RepositoryError:
783 except RepositoryError:
784 # Cleanup any merge leftovers
784 # Cleanup any merge leftovers
785 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
785 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
786 raise
786 raise
787
787
788 def _local_push(
788 def _local_push(
789 self, source_branch, repository_path, target_branch,
789 self, source_branch, repository_path, target_branch,
790 enable_hooks=False, rc_scm_data=None):
790 enable_hooks=False, rc_scm_data=None):
791 """
791 """
792 Push the source_branch to the given repository and target_branch.
792 Push the source_branch to the given repository and target_branch.
793
793
794 Currently it if the target_branch is not master and the target repo is
794 Currently it if the target_branch is not master and the target repo is
795 empty, the push will work, but then GitRepository won't be able to find
795 empty, the push will work, but then GitRepository won't be able to find
796 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
796 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
797 pointing to master, which does not exist).
797 pointing to master, which does not exist).
798
798
799 It does not run the hooks in the target repo.
799 It does not run the hooks in the target repo.
800 """
800 """
801 # TODO(skreft): deal with the case in which the target repo is empty,
801 # TODO(skreft): deal with the case in which the target repo is empty,
802 # and the target_branch is not master.
802 # and the target_branch is not master.
803 target_repo = GitRepository(repository_path)
803 target_repo = GitRepository(repository_path)
804 if (not target_repo.bare and
804 if (not target_repo.bare and
805 target_repo._current_branch() == target_branch):
805 target_repo._current_branch() == target_branch):
806 # Git prevents pushing to the checked out branch, so simulate it by
806 # Git prevents pushing to the checked out branch, so simulate it by
807 # pulling into the target repository.
807 # pulling into the target repository.
808 target_repo._local_pull(self.path, source_branch)
808 target_repo._local_pull(self.path, source_branch)
809 else:
809 else:
810 cmd = ['push', os.path.abspath(repository_path),
810 cmd = ['push', os.path.abspath(repository_path),
811 '%s:%s' % (source_branch, target_branch)]
811 '%s:%s' % (source_branch, target_branch)]
812 gitenv = {}
812 gitenv = {}
813 if rc_scm_data:
813 if rc_scm_data:
814 gitenv.update({'RC_SCM_DATA': rc_scm_data})
814 gitenv.update({'RC_SCM_DATA': rc_scm_data})
815
815
816 if not enable_hooks:
816 if not enable_hooks:
817 gitenv['RC_SKIP_HOOKS'] = '1'
817 gitenv['RC_SKIP_HOOKS'] = '1'
818 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
818 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
819
819
820 def _get_new_pr_branch(self, source_branch, target_branch):
820 def _get_new_pr_branch(self, source_branch, target_branch):
821 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
821 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
822 pr_branches = []
822 pr_branches = []
823 for branch in self.branches:
823 for branch in self.branches:
824 if branch.startswith(prefix):
824 if branch.startswith(prefix):
825 pr_branches.append(int(branch[len(prefix):]))
825 pr_branches.append(int(branch[len(prefix):]))
826
826
827 if not pr_branches:
827 if not pr_branches:
828 branch_id = 0
828 branch_id = 0
829 else:
829 else:
830 branch_id = max(pr_branches) + 1
830 branch_id = max(pr_branches) + 1
831
831
832 return '%s%d' % (prefix, branch_id)
832 return '%s%d' % (prefix, branch_id)
833
833
834 def _merge_repo(self, shadow_repository_path, target_ref,
834 def _merge_repo(self, shadow_repository_path, target_ref,
835 source_repo, source_ref, merge_message,
835 source_repo, source_ref, merge_message,
836 merger_name, merger_email, dry_run=False):
836 merger_name, merger_email, dry_run=False):
837 if target_ref.commit_id != self.branches[target_ref.name]:
837 if target_ref.commit_id != self.branches[target_ref.name]:
838 return MergeResponse(
838 return MergeResponse(
839 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
839 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
840
840
841 shadow_repo = GitRepository(shadow_repository_path)
841 shadow_repo = GitRepository(shadow_repository_path)
842 shadow_repo._checkout(target_ref.name)
842 shadow_repo._checkout(target_ref.name)
843 shadow_repo._local_pull(self.path, target_ref.name)
843 shadow_repo._local_pull(self.path, target_ref.name)
844 # Need to reload repo to invalidate the cache, or otherwise we cannot
844 # Need to reload repo to invalidate the cache, or otherwise we cannot
845 # retrieve the last target commit.
845 # retrieve the last target commit.
846 shadow_repo = GitRepository(shadow_repository_path)
846 shadow_repo = GitRepository(shadow_repository_path)
847 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
847 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
848 return MergeResponse(
848 return MergeResponse(
849 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
849 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
850
850
851 pr_branch = shadow_repo._get_new_pr_branch(
851 pr_branch = shadow_repo._get_new_pr_branch(
852 source_ref.name, target_ref.name)
852 source_ref.name, target_ref.name)
853 shadow_repo._checkout(pr_branch, create=True)
853 shadow_repo._checkout(pr_branch, create=True)
854 try:
854 try:
855 shadow_repo._local_fetch(source_repo.path, source_ref.name)
855 shadow_repo._local_fetch(source_repo.path, source_ref.name)
856 except RepositoryError as e:
856 except RepositoryError as e:
857 log.exception('Failure when doing local fetch on git shadow repo')
857 log.exception('Failure when doing local fetch on git shadow repo')
858 return MergeResponse(
858 return MergeResponse(
859 False, False, None, MergeFailureReason.MISSING_COMMIT)
859 False, False, None, MergeFailureReason.MISSING_COMMIT)
860
860
861 merge_commit_id = None
861 merge_commit_id = None
862 merge_failure_reason = MergeFailureReason.NONE
862 merge_failure_reason = MergeFailureReason.NONE
863 try:
863 try:
864 shadow_repo._local_merge(merge_message, merger_name, merger_email,
864 shadow_repo._local_merge(merge_message, merger_name, merger_email,
865 [source_ref.commit_id])
865 [source_ref.commit_id])
866 merge_possible = True
866 merge_possible = True
867 except RepositoryError as e:
867 except RepositoryError as e:
868 log.exception('Failure when doing local merge on git shadow repo')
868 log.exception('Failure when doing local merge on git shadow repo')
869 merge_possible = False
869 merge_possible = False
870 merge_failure_reason = MergeFailureReason.MERGE_FAILED
870 merge_failure_reason = MergeFailureReason.MERGE_FAILED
871
871
872 if merge_possible and not dry_run:
872 if merge_possible and not dry_run:
873 try:
873 try:
874 shadow_repo._local_push(
874 shadow_repo._local_push(
875 pr_branch, self.path, target_ref.name, enable_hooks=True,
875 pr_branch, self.path, target_ref.name, enable_hooks=True,
876 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
876 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
877 merge_succeeded = True
877 merge_succeeded = True
878 # Need to reload repo to invalidate the cache, or otherwise we
878 # Need to reload repo to invalidate the cache, or otherwise we
879 # cannot retrieve the merge commit.
879 # cannot retrieve the merge commit.
880 shadow_repo = GitRepository(shadow_repository_path)
880 shadow_repo = GitRepository(shadow_repository_path)
881 merge_commit_id = shadow_repo.branches[pr_branch]
881 merge_commit_id = shadow_repo.branches[pr_branch]
882 except RepositoryError as e:
882 except RepositoryError as e:
883 log.exception(
883 log.exception(
884 'Failure when doing local push on git shadow repo')
884 'Failure when doing local push on git shadow repo')
885 merge_succeeded = False
885 merge_succeeded = False
886 merge_failure_reason = MergeFailureReason.PUSH_FAILED
886 merge_failure_reason = MergeFailureReason.PUSH_FAILED
887 else:
887 else:
888 merge_succeeded = False
888 merge_succeeded = False
889
889
890 return MergeResponse(
890 return MergeResponse(
891 merge_possible, merge_succeeded, merge_commit_id,
891 merge_possible, merge_succeeded, merge_commit_id,
892 merge_failure_reason)
892 merge_failure_reason)
893
893
894 def _get_shadow_repository_path(self, workspace_id):
894 def _get_shadow_repository_path(self, workspace_id):
895 # The name of the shadow repository must start with '.', so it is
895 # The name of the shadow repository must start with '.', so it is
896 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
896 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
897 return os.path.join(
897 return os.path.join(
898 os.path.dirname(self.path),
898 os.path.dirname(self.path),
899 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
899 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
900
900
901 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
901 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
902 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
902 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
903 if not os.path.exists(shadow_repository_path):
903 if not os.path.exists(shadow_repository_path):
904 self._local_clone(shadow_repository_path, target_ref.name)
904 self._local_clone(shadow_repository_path, target_ref.name)
905
905
906 return shadow_repository_path
906 return shadow_repository_path
907
907
908 def cleanup_merge_workspace(self, workspace_id):
908 def cleanup_merge_workspace(self, workspace_id):
909 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
909 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
910 shutil.rmtree(shadow_repository_path, ignore_errors=True)
910 shutil.rmtree(shadow_repository_path, ignore_errors=True)
@@ -1,362 +1,362 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG commit module
22 HG commit module
23 """
23 """
24
24
25 import os
25 import os
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from rhodecode.lib.datelib import date_fromtimestamp
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 from rhodecode.lib.utils import safe_str, safe_unicode
30 from rhodecode.lib.utils import safe_str, safe_unicode
31 from rhodecode.lib.vcs import path as vcspath
31 from rhodecode.lib.vcs import path as vcspath
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 from rhodecode.lib.vcs.exceptions import CommitError
34 from rhodecode.lib.vcs.exceptions import CommitError
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
38 LargeFileNode, LARGEFILE_PREFIX)
38 LargeFileNode, LARGEFILE_PREFIX)
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
40
40
41
41
42 class MercurialCommit(base.BaseCommit):
42 class MercurialCommit(base.BaseCommit):
43 """
43 """
44 Represents state of the repository at the single commit.
44 Represents state of the repository at the single commit.
45 """
45 """
46
46
47 _filter_pre_load = [
47 _filter_pre_load = [
48 # git specific property not supported here
48 # git specific property not supported here
49 "_commit",
49 "_commit",
50 ]
50 ]
51
51
52 def __init__(self, repository, raw_id, idx, pre_load=None):
52 def __init__(self, repository, raw_id, idx, pre_load=None):
53 raw_id = safe_str(raw_id)
53 raw_id = safe_str(raw_id)
54
54
55 self.repository = repository
55 self.repository = repository
56 self._remote = repository._remote
56 self._remote = repository._remote
57
57
58 self.raw_id = raw_id
58 self.raw_id = raw_id
59 self.idx = repository._sanitize_commit_idx(idx)
59 self.idx = repository._sanitize_commit_idx(idx)
60
60
61 self._set_bulk_properties(pre_load)
61 self._set_bulk_properties(pre_load)
62
62
63 # caches
63 # caches
64 self.nodes = {}
64 self.nodes = {}
65
65
66 def _set_bulk_properties(self, pre_load):
66 def _set_bulk_properties(self, pre_load):
67 if not pre_load:
67 if not pre_load:
68 return
68 return
69 pre_load = [entry for entry in pre_load
69 pre_load = [entry for entry in pre_load
70 if entry not in self._filter_pre_load]
70 if entry not in self._filter_pre_load]
71 if not pre_load:
71 if not pre_load:
72 return
72 return
73
73
74 result = self._remote.bulk_request(self.idx, pre_load)
74 result = self._remote.bulk_request(self.idx, pre_load)
75 for attr, value in result.items():
75 for attr, value in result.items():
76 if attr in ["author", "branch", "message"]:
76 if attr in ["author", "branch", "message"]:
77 value = safe_unicode(value)
77 value = safe_unicode(value)
78 elif attr == "affected_files":
78 elif attr == "affected_files":
79 value = map(safe_unicode, value)
79 value = map(safe_unicode, value)
80 elif attr == "date":
80 elif attr == "date":
81 value = date_fromtimestamp(*value)
81 value = utcdate_fromtimestamp(*value)
82 elif attr in ["children", "parents"]:
82 elif attr in ["children", "parents"]:
83 value = self._make_commits(value)
83 value = self._make_commits(value)
84 self.__dict__[attr] = value
84 self.__dict__[attr] = value
85
85
86 @LazyProperty
86 @LazyProperty
87 def tags(self):
87 def tags(self):
88 tags = [name for name, commit_id in self.repository.tags.iteritems()
88 tags = [name for name, commit_id in self.repository.tags.iteritems()
89 if commit_id == self.raw_id]
89 if commit_id == self.raw_id]
90 return tags
90 return tags
91
91
92 @LazyProperty
92 @LazyProperty
93 def branch(self):
93 def branch(self):
94 return safe_unicode(self._remote.ctx_branch(self.idx))
94 return safe_unicode(self._remote.ctx_branch(self.idx))
95
95
96 @LazyProperty
96 @LazyProperty
97 def bookmarks(self):
97 def bookmarks(self):
98 bookmarks = [
98 bookmarks = [
99 name for name, commit_id in self.repository.bookmarks.iteritems()
99 name for name, commit_id in self.repository.bookmarks.iteritems()
100 if commit_id == self.raw_id]
100 if commit_id == self.raw_id]
101 return bookmarks
101 return bookmarks
102
102
103 @LazyProperty
103 @LazyProperty
104 def message(self):
104 def message(self):
105 return safe_unicode(self._remote.ctx_description(self.idx))
105 return safe_unicode(self._remote.ctx_description(self.idx))
106
106
107 @LazyProperty
107 @LazyProperty
108 def committer(self):
108 def committer(self):
109 return safe_unicode(self.author)
109 return safe_unicode(self.author)
110
110
111 @LazyProperty
111 @LazyProperty
112 def author(self):
112 def author(self):
113 return safe_unicode(self._remote.ctx_user(self.idx))
113 return safe_unicode(self._remote.ctx_user(self.idx))
114
114
115 @LazyProperty
115 @LazyProperty
116 def date(self):
116 def date(self):
117 return date_fromtimestamp(*self._remote.ctx_date(self.idx))
117 return utcdate_fromtimestamp(*self._remote.ctx_date(self.idx))
118
118
119 @LazyProperty
119 @LazyProperty
120 def status(self):
120 def status(self):
121 """
121 """
122 Returns modified, added, removed, deleted files for current commit
122 Returns modified, added, removed, deleted files for current commit
123 """
123 """
124 return self._remote.ctx_status(self.idx)
124 return self._remote.ctx_status(self.idx)
125
125
126 @LazyProperty
126 @LazyProperty
127 def _file_paths(self):
127 def _file_paths(self):
128 return self._remote.ctx_list(self.idx)
128 return self._remote.ctx_list(self.idx)
129
129
130 @LazyProperty
130 @LazyProperty
131 def _dir_paths(self):
131 def _dir_paths(self):
132 p = list(set(get_dirs_for_path(*self._file_paths)))
132 p = list(set(get_dirs_for_path(*self._file_paths)))
133 p.insert(0, '')
133 p.insert(0, '')
134 return p
134 return p
135
135
136 @LazyProperty
136 @LazyProperty
137 def _paths(self):
137 def _paths(self):
138 return self._dir_paths + self._file_paths
138 return self._dir_paths + self._file_paths
139
139
140 @LazyProperty
140 @LazyProperty
141 def id(self):
141 def id(self):
142 if self.last:
142 if self.last:
143 return u'tip'
143 return u'tip'
144 return self.short_id
144 return self.short_id
145
145
146 @LazyProperty
146 @LazyProperty
147 def short_id(self):
147 def short_id(self):
148 return self.raw_id[:12]
148 return self.raw_id[:12]
149
149
150 def _make_commits(self, indexes):
150 def _make_commits(self, indexes):
151 return [self.repository.get_commit(commit_idx=idx)
151 return [self.repository.get_commit(commit_idx=idx)
152 for idx in indexes if idx >= 0]
152 for idx in indexes if idx >= 0]
153
153
154 @LazyProperty
154 @LazyProperty
155 def parents(self):
155 def parents(self):
156 """
156 """
157 Returns list of parent commits.
157 Returns list of parent commits.
158 """
158 """
159 parents = self._remote.ctx_parents(self.idx)
159 parents = self._remote.ctx_parents(self.idx)
160 return self._make_commits(parents)
160 return self._make_commits(parents)
161
161
162 @LazyProperty
162 @LazyProperty
163 def children(self):
163 def children(self):
164 """
164 """
165 Returns list of child commits.
165 Returns list of child commits.
166 """
166 """
167 children = self._remote.ctx_children(self.idx)
167 children = self._remote.ctx_children(self.idx)
168 return self._make_commits(children)
168 return self._make_commits(children)
169
169
170 def diff(self, ignore_whitespace=True, context=3):
170 def diff(self, ignore_whitespace=True, context=3):
171 result = self._remote.ctx_diff(
171 result = self._remote.ctx_diff(
172 self.idx,
172 self.idx,
173 git=True, ignore_whitespace=ignore_whitespace, context=context)
173 git=True, ignore_whitespace=ignore_whitespace, context=context)
174 diff = ''.join(result)
174 diff = ''.join(result)
175 return MercurialDiff(diff)
175 return MercurialDiff(diff)
176
176
177 def _fix_path(self, path):
177 def _fix_path(self, path):
178 """
178 """
179 Mercurial keeps filenodes as str so we need to encode from unicode
179 Mercurial keeps filenodes as str so we need to encode from unicode
180 to str.
180 to str.
181 """
181 """
182 return safe_str(super(MercurialCommit, self)._fix_path(path))
182 return safe_str(super(MercurialCommit, self)._fix_path(path))
183
183
184 def _get_kind(self, path):
184 def _get_kind(self, path):
185 path = self._fix_path(path)
185 path = self._fix_path(path)
186 if path in self._file_paths:
186 if path in self._file_paths:
187 return NodeKind.FILE
187 return NodeKind.FILE
188 elif path in self._dir_paths:
188 elif path in self._dir_paths:
189 return NodeKind.DIR
189 return NodeKind.DIR
190 else:
190 else:
191 raise CommitError(
191 raise CommitError(
192 "Node does not exist at the given path '%s'" % (path, ))
192 "Node does not exist at the given path '%s'" % (path, ))
193
193
194 def _get_filectx(self, path):
194 def _get_filectx(self, path):
195 path = self._fix_path(path)
195 path = self._fix_path(path)
196 if self._get_kind(path) != NodeKind.FILE:
196 if self._get_kind(path) != NodeKind.FILE:
197 raise CommitError(
197 raise CommitError(
198 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
198 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
199 return path
199 return path
200
200
201 def get_file_mode(self, path):
201 def get_file_mode(self, path):
202 """
202 """
203 Returns stat mode of the file at the given ``path``.
203 Returns stat mode of the file at the given ``path``.
204 """
204 """
205 path = self._get_filectx(path)
205 path = self._get_filectx(path)
206 if 'x' in self._remote.fctx_flags(self.idx, path):
206 if 'x' in self._remote.fctx_flags(self.idx, path):
207 return base.FILEMODE_EXECUTABLE
207 return base.FILEMODE_EXECUTABLE
208 else:
208 else:
209 return base.FILEMODE_DEFAULT
209 return base.FILEMODE_DEFAULT
210
210
211 def is_link(self, path):
211 def is_link(self, path):
212 path = self._get_filectx(path)
212 path = self._get_filectx(path)
213 return 'l' in self._remote.fctx_flags(self.idx, path)
213 return 'l' in self._remote.fctx_flags(self.idx, path)
214
214
215 def get_file_content(self, path):
215 def get_file_content(self, path):
216 """
216 """
217 Returns content of the file at given ``path``.
217 Returns content of the file at given ``path``.
218 """
218 """
219 path = self._get_filectx(path)
219 path = self._get_filectx(path)
220 return self._remote.fctx_data(self.idx, path)
220 return self._remote.fctx_data(self.idx, path)
221
221
222 def get_file_size(self, path):
222 def get_file_size(self, path):
223 """
223 """
224 Returns size of the file at given ``path``.
224 Returns size of the file at given ``path``.
225 """
225 """
226 path = self._get_filectx(path)
226 path = self._get_filectx(path)
227 return self._remote.fctx_size(self.idx, path)
227 return self._remote.fctx_size(self.idx, path)
228
228
229 def get_file_history(self, path, limit=None, pre_load=None):
229 def get_file_history(self, path, limit=None, pre_load=None):
230 """
230 """
231 Returns history of file as reversed list of `MercurialCommit` objects
231 Returns history of file as reversed list of `MercurialCommit` objects
232 for which file at given ``path`` has been modified.
232 for which file at given ``path`` has been modified.
233 """
233 """
234 path = self._get_filectx(path)
234 path = self._get_filectx(path)
235 hist = self._remote.file_history(self.idx, path, limit)
235 hist = self._remote.file_history(self.idx, path, limit)
236 return [
236 return [
237 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
237 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
238 for commit_id in hist]
238 for commit_id in hist]
239
239
240 def get_file_annotate(self, path, pre_load=None):
240 def get_file_annotate(self, path, pre_load=None):
241 """
241 """
242 Returns a generator of four element tuples with
242 Returns a generator of four element tuples with
243 lineno, commit_id, commit lazy loader and line
243 lineno, commit_id, commit lazy loader and line
244 """
244 """
245 result = self._remote.fctx_annotate(self.idx, path)
245 result = self._remote.fctx_annotate(self.idx, path)
246
246
247 for ln_no, commit_id, content in result:
247 for ln_no, commit_id, content in result:
248 yield (
248 yield (
249 ln_no, commit_id,
249 ln_no, commit_id,
250 lambda: self.repository.get_commit(commit_id=commit_id,
250 lambda: self.repository.get_commit(commit_id=commit_id,
251 pre_load=pre_load),
251 pre_load=pre_load),
252 content)
252 content)
253
253
254 def get_nodes(self, path):
254 def get_nodes(self, path):
255 """
255 """
256 Returns combined ``DirNode`` and ``FileNode`` objects list representing
256 Returns combined ``DirNode`` and ``FileNode`` objects list representing
257 state of commit at the given ``path``. If node at the given ``path``
257 state of commit at the given ``path``. If node at the given ``path``
258 is not instance of ``DirNode``, CommitError would be raised.
258 is not instance of ``DirNode``, CommitError would be raised.
259 """
259 """
260
260
261 if self._get_kind(path) != NodeKind.DIR:
261 if self._get_kind(path) != NodeKind.DIR:
262 raise CommitError(
262 raise CommitError(
263 "Directory does not exist for idx %s at '%s'" %
263 "Directory does not exist for idx %s at '%s'" %
264 (self.idx, path))
264 (self.idx, path))
265 path = self._fix_path(path)
265 path = self._fix_path(path)
266
266
267 filenodes = [
267 filenodes = [
268 FileNode(f, commit=self) for f in self._file_paths
268 FileNode(f, commit=self) for f in self._file_paths
269 if os.path.dirname(f) == path]
269 if os.path.dirname(f) == path]
270 # TODO: johbo: Check if this can be done in a more obvious way
270 # TODO: johbo: Check if this can be done in a more obvious way
271 dirs = path == '' and '' or [
271 dirs = path == '' and '' or [
272 d for d in self._dir_paths
272 d for d in self._dir_paths
273 if d and vcspath.dirname(d) == path]
273 if d and vcspath.dirname(d) == path]
274 dirnodes = [
274 dirnodes = [
275 DirNode(d, commit=self) for d in dirs
275 DirNode(d, commit=self) for d in dirs
276 if os.path.dirname(d) == path]
276 if os.path.dirname(d) == path]
277
277
278 alias = self.repository.alias
278 alias = self.repository.alias
279 for k, vals in self._submodules.iteritems():
279 for k, vals in self._submodules.iteritems():
280 loc = vals[0]
280 loc = vals[0]
281 commit = vals[1]
281 commit = vals[1]
282 dirnodes.append(
282 dirnodes.append(
283 SubModuleNode(k, url=loc, commit=commit, alias=alias))
283 SubModuleNode(k, url=loc, commit=commit, alias=alias))
284 nodes = dirnodes + filenodes
284 nodes = dirnodes + filenodes
285 # cache nodes
285 # cache nodes
286 for node in nodes:
286 for node in nodes:
287 self.nodes[node.path] = node
287 self.nodes[node.path] = node
288 nodes.sort()
288 nodes.sort()
289
289
290 return nodes
290 return nodes
291
291
292 def get_node(self, path):
292 def get_node(self, path):
293 """
293 """
294 Returns `Node` object from the given `path`. If there is no node at
294 Returns `Node` object from the given `path`. If there is no node at
295 the given `path`, `NodeDoesNotExistError` would be raised.
295 the given `path`, `NodeDoesNotExistError` would be raised.
296 """
296 """
297 path = self._fix_path(path)
297 path = self._fix_path(path)
298
298
299 if path not in self.nodes:
299 if path not in self.nodes:
300 if path in self._file_paths:
300 if path in self._file_paths:
301 node = FileNode(path, commit=self)
301 node = FileNode(path, commit=self)
302 elif path in self._dir_paths:
302 elif path in self._dir_paths:
303 if path == '':
303 if path == '':
304 node = RootNode(commit=self)
304 node = RootNode(commit=self)
305 else:
305 else:
306 node = DirNode(path, commit=self)
306 node = DirNode(path, commit=self)
307 else:
307 else:
308 raise self.no_node_at_path(path)
308 raise self.no_node_at_path(path)
309
309
310 # cache node
310 # cache node
311 self.nodes[path] = node
311 self.nodes[path] = node
312 return self.nodes[path]
312 return self.nodes[path]
313
313
314 def get_largefile_node(self, path):
314 def get_largefile_node(self, path):
315 path = os.path.join(LARGEFILE_PREFIX, path)
315 path = os.path.join(LARGEFILE_PREFIX, path)
316
316
317 if self._remote.is_large_file(path):
317 if self._remote.is_large_file(path):
318 # content of that file regular FileNode is the hash of largefile
318 # content of that file regular FileNode is the hash of largefile
319 file_id = self.get_file_content(path).strip()
319 file_id = self.get_file_content(path).strip()
320 if self._remote.in_store(file_id):
320 if self._remote.in_store(file_id):
321 path = self._remote.store_path(file_id)
321 path = self._remote.store_path(file_id)
322 return LargeFileNode(path, commit=self)
322 return LargeFileNode(path, commit=self)
323 elif self._remote.in_user_cache(file_id):
323 elif self._remote.in_user_cache(file_id):
324 path = self._remote.store_path(file_id)
324 path = self._remote.store_path(file_id)
325 self._remote.link(file_id, path)
325 self._remote.link(file_id, path)
326 return LargeFileNode(path, commit=self)
326 return LargeFileNode(path, commit=self)
327
327
328 @LazyProperty
328 @LazyProperty
329 def _submodules(self):
329 def _submodules(self):
330 """
330 """
331 Returns a dictionary with submodule information from substate file
331 Returns a dictionary with submodule information from substate file
332 of hg repository.
332 of hg repository.
333 """
333 """
334 return self._remote.ctx_substate(self.idx)
334 return self._remote.ctx_substate(self.idx)
335
335
336 @LazyProperty
336 @LazyProperty
337 def affected_files(self):
337 def affected_files(self):
338 """
338 """
339 Gets a fast accessible file changes for given commit
339 Gets a fast accessible file changes for given commit
340 """
340 """
341 return self._remote.ctx_files(self.idx)
341 return self._remote.ctx_files(self.idx)
342
342
343 @property
343 @property
344 def added(self):
344 def added(self):
345 """
345 """
346 Returns list of added ``FileNode`` objects.
346 Returns list of added ``FileNode`` objects.
347 """
347 """
348 return AddedFileNodesGenerator([n for n in self.status[1]], self)
348 return AddedFileNodesGenerator([n for n in self.status[1]], self)
349
349
350 @property
350 @property
351 def changed(self):
351 def changed(self):
352 """
352 """
353 Returns list of modified ``FileNode`` objects.
353 Returns list of modified ``FileNode`` objects.
354 """
354 """
355 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
355 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
356
356
357 @property
357 @property
358 def removed(self):
358 def removed(self):
359 """
359 """
360 Returns list of removed ``FileNode`` objects.
360 Returns list of removed ``FileNode`` objects.
361 """
361 """
362 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
362 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
@@ -1,782 +1,781 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import os
27 import os
28 import re
28 import re
29 import shutil
29 import shutil
30 import urllib
30 import urllib
31
31
32 from zope.cachedescriptors.property import Lazy as LazyProperty
32 from zope.cachedescriptors.property import Lazy as LazyProperty
33
33
34 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.compat import OrderedDict
35 from rhodecode.lib.datelib import (
35 from rhodecode.lib.datelib import (
36 date_fromtimestamp, makedate, date_to_timestamp_plus_offset,
36 utcdate_fromtimestamp, makedate, date_astimestamp)
37 date_astimestamp)
38 from rhodecode.lib.utils import safe_unicode, safe_str
37 from rhodecode.lib.utils import safe_unicode, safe_str
39 from rhodecode.lib.vcs import connection
38 from rhodecode.lib.vcs import connection
40 from rhodecode.lib.vcs.backends.base import (
39 from rhodecode.lib.vcs.backends.base import (
41 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
42 MergeFailureReason)
41 MergeFailureReason)
43 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
44 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
45 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
46 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.conf import settings
47 from rhodecode.lib.vcs.exceptions import (
46 from rhodecode.lib.vcs.exceptions import (
48 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
49 TagDoesNotExistError, CommitDoesNotExistError)
48 TagDoesNotExistError, CommitDoesNotExistError)
50
49
51 hexlify = binascii.hexlify
50 hexlify = binascii.hexlify
52 nullid = "\0" * 20
51 nullid = "\0" * 20
53
52
54 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
55
54
56
55
57 class MercurialRepository(BaseRepository):
56 class MercurialRepository(BaseRepository):
58 """
57 """
59 Mercurial repository backend
58 Mercurial repository backend
60 """
59 """
61 DEFAULT_BRANCH_NAME = 'default'
60 DEFAULT_BRANCH_NAME = 'default'
62
61
63 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
64 update_after_clone=False, with_wire=None):
63 update_after_clone=False, with_wire=None):
65 """
64 """
66 Raises RepositoryError if repository could not be find at the given
65 Raises RepositoryError if repository could not be find at the given
67 ``repo_path``.
66 ``repo_path``.
68
67
69 :param repo_path: local path of the repository
68 :param repo_path: local path of the repository
70 :param config: config object containing the repo configuration
69 :param config: config object containing the repo configuration
71 :param create=False: if set to True, would try to create repository if
70 :param create=False: if set to True, would try to create repository if
72 it does not exist rather than raising exception
71 it does not exist rather than raising exception
73 :param src_url=None: would try to clone repository from given location
72 :param src_url=None: would try to clone repository from given location
74 :param update_after_clone=False: sets update of working copy after
73 :param update_after_clone=False: sets update of working copy after
75 making a clone
74 making a clone
76 """
75 """
77 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
78 self.config = config if config else Config()
77 self.config = config if config else Config()
79 self._remote = connection.Hg(
78 self._remote = connection.Hg(
80 self.path, self.config, with_wire=with_wire)
79 self.path, self.config, with_wire=with_wire)
81
80
82 self._init_repo(create, src_url, update_after_clone)
81 self._init_repo(create, src_url, update_after_clone)
83
82
84 # caches
83 # caches
85 self._commit_ids = {}
84 self._commit_ids = {}
86
85
87 @LazyProperty
86 @LazyProperty
88 def commit_ids(self):
87 def commit_ids(self):
89 """
88 """
90 Returns list of commit ids, in ascending order. Being lazy
89 Returns list of commit ids, in ascending order. Being lazy
91 attribute allows external tools to inject shas from cache.
90 attribute allows external tools to inject shas from cache.
92 """
91 """
93 commit_ids = self._get_all_commit_ids()
92 commit_ids = self._get_all_commit_ids()
94 self._rebuild_cache(commit_ids)
93 self._rebuild_cache(commit_ids)
95 return commit_ids
94 return commit_ids
96
95
97 def _rebuild_cache(self, commit_ids):
96 def _rebuild_cache(self, commit_ids):
98 self._commit_ids = dict((commit_id, index)
97 self._commit_ids = dict((commit_id, index)
99 for index, commit_id in enumerate(commit_ids))
98 for index, commit_id in enumerate(commit_ids))
100
99
101 @LazyProperty
100 @LazyProperty
102 def branches(self):
101 def branches(self):
103 return self._get_branches()
102 return self._get_branches()
104
103
105 @LazyProperty
104 @LazyProperty
106 def branches_closed(self):
105 def branches_closed(self):
107 return self._get_branches(active=False, closed=True)
106 return self._get_branches(active=False, closed=True)
108
107
109 @LazyProperty
108 @LazyProperty
110 def branches_all(self):
109 def branches_all(self):
111 all_branches = {}
110 all_branches = {}
112 all_branches.update(self.branches)
111 all_branches.update(self.branches)
113 all_branches.update(self.branches_closed)
112 all_branches.update(self.branches_closed)
114 return all_branches
113 return all_branches
115
114
116 def _get_branches(self, active=True, closed=False):
115 def _get_branches(self, active=True, closed=False):
117 """
116 """
118 Gets branches for this repository
117 Gets branches for this repository
119 Returns only not closed active branches by default
118 Returns only not closed active branches by default
120
119
121 :param active: return also active branches
120 :param active: return also active branches
122 :param closed: return also closed branches
121 :param closed: return also closed branches
123
122
124 """
123 """
125 if self.is_empty():
124 if self.is_empty():
126 return {}
125 return {}
127
126
128 def get_name(ctx):
127 def get_name(ctx):
129 return ctx[0]
128 return ctx[0]
130
129
131 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
130 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
132 self._remote.branches(active, closed).items()]
131 self._remote.branches(active, closed).items()]
133
132
134 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
133 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
135
134
136 @LazyProperty
135 @LazyProperty
137 def tags(self):
136 def tags(self):
138 """
137 """
139 Gets tags for this repository
138 Gets tags for this repository
140 """
139 """
141 return self._get_tags()
140 return self._get_tags()
142
141
143 def _get_tags(self):
142 def _get_tags(self):
144 if self.is_empty():
143 if self.is_empty():
145 return {}
144 return {}
146
145
147 def get_name(ctx):
146 def get_name(ctx):
148 return ctx[0]
147 return ctx[0]
149
148
150 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
149 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
151 self._remote.tags().items()]
150 self._remote.tags().items()]
152
151
153 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
152 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
154
153
155 def tag(self, name, user, commit_id=None, message=None, date=None,
154 def tag(self, name, user, commit_id=None, message=None, date=None,
156 **kwargs):
155 **kwargs):
157 """
156 """
158 Creates and returns a tag for the given ``commit_id``.
157 Creates and returns a tag for the given ``commit_id``.
159
158
160 :param name: name for new tag
159 :param name: name for new tag
161 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
160 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
162 :param commit_id: commit id for which new tag would be created
161 :param commit_id: commit id for which new tag would be created
163 :param message: message of the tag's commit
162 :param message: message of the tag's commit
164 :param date: date of tag's commit
163 :param date: date of tag's commit
165
164
166 :raises TagAlreadyExistError: if tag with same name already exists
165 :raises TagAlreadyExistError: if tag with same name already exists
167 """
166 """
168 if name in self.tags:
167 if name in self.tags:
169 raise TagAlreadyExistError("Tag %s already exists" % name)
168 raise TagAlreadyExistError("Tag %s already exists" % name)
170 commit = self.get_commit(commit_id=commit_id)
169 commit = self.get_commit(commit_id=commit_id)
171 local = kwargs.setdefault('local', False)
170 local = kwargs.setdefault('local', False)
172
171
173 if message is None:
172 if message is None:
174 message = "Added tag %s for commit %s" % (name, commit.short_id)
173 message = "Added tag %s for commit %s" % (name, commit.short_id)
175
174
176 date, tz = date_to_timestamp_plus_offset(date)
175 date, tz = date_to_timestamp_plus_offset(date)
177
176
178 self._remote.tag(
177 self._remote.tag(
179 name, commit.raw_id, message, local, user, date, tz)
178 name, commit.raw_id, message, local, user, date, tz)
180
179
181 # Reinitialize tags
180 # Reinitialize tags
182 self.tags = self._get_tags()
181 self.tags = self._get_tags()
183 tag_id = self.tags[name]
182 tag_id = self.tags[name]
184
183
185 return self.get_commit(commit_id=tag_id)
184 return self.get_commit(commit_id=tag_id)
186
185
187 def remove_tag(self, name, user, message=None, date=None):
186 def remove_tag(self, name, user, message=None, date=None):
188 """
187 """
189 Removes tag with the given `name`.
188 Removes tag with the given `name`.
190
189
191 :param name: name of the tag to be removed
190 :param name: name of the tag to be removed
192 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
193 :param message: message of the tag's removal commit
192 :param message: message of the tag's removal commit
194 :param date: date of tag's removal commit
193 :param date: date of tag's removal commit
195
194
196 :raises TagDoesNotExistError: if tag with given name does not exists
195 :raises TagDoesNotExistError: if tag with given name does not exists
197 """
196 """
198 if name not in self.tags:
197 if name not in self.tags:
199 raise TagDoesNotExistError("Tag %s does not exist" % name)
198 raise TagDoesNotExistError("Tag %s does not exist" % name)
200 if message is None:
199 if message is None:
201 message = "Removed tag %s" % name
200 message = "Removed tag %s" % name
202 local = False
201 local = False
203
202
204 date, tz = date_to_timestamp_plus_offset(date)
203 date, tz = date_to_timestamp_plus_offset(date)
205
204
206 self._remote.tag(name, nullid, message, local, user, date, tz)
205 self._remote.tag(name, nullid, message, local, user, date, tz)
207 self.tags = self._get_tags()
206 self.tags = self._get_tags()
208
207
209 @LazyProperty
208 @LazyProperty
210 def bookmarks(self):
209 def bookmarks(self):
211 """
210 """
212 Gets bookmarks for this repository
211 Gets bookmarks for this repository
213 """
212 """
214 return self._get_bookmarks()
213 return self._get_bookmarks()
215
214
216 def _get_bookmarks(self):
215 def _get_bookmarks(self):
217 if self.is_empty():
216 if self.is_empty():
218 return {}
217 return {}
219
218
220 def get_name(ctx):
219 def get_name(ctx):
221 return ctx[0]
220 return ctx[0]
222
221
223 _bookmarks = [
222 _bookmarks = [
224 (safe_unicode(n), hexlify(h)) for n, h in
223 (safe_unicode(n), hexlify(h)) for n, h in
225 self._remote.bookmarks().items()]
224 self._remote.bookmarks().items()]
226
225
227 return OrderedDict(sorted(_bookmarks, key=get_name))
226 return OrderedDict(sorted(_bookmarks, key=get_name))
228
227
229 def _get_all_commit_ids(self):
228 def _get_all_commit_ids(self):
230 return self._remote.get_all_commit_ids('visible')
229 return self._remote.get_all_commit_ids('visible')
231
230
232 def get_diff(
231 def get_diff(
233 self, commit1, commit2, path='', ignore_whitespace=False,
232 self, commit1, commit2, path='', ignore_whitespace=False,
234 context=3, path1=None):
233 context=3, path1=None):
235 """
234 """
236 Returns (git like) *diff*, as plain text. Shows changes introduced by
235 Returns (git like) *diff*, as plain text. Shows changes introduced by
237 `commit2` since `commit1`.
236 `commit2` since `commit1`.
238
237
239 :param commit1: Entry point from which diff is shown. Can be
238 :param commit1: Entry point from which diff is shown. Can be
240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
239 ``self.EMPTY_COMMIT`` - in this case, patch showing all
241 the changes since empty state of the repository until `commit2`
240 the changes since empty state of the repository until `commit2`
242 :param commit2: Until which commit changes should be shown.
241 :param commit2: Until which commit changes should be shown.
243 :param ignore_whitespace: If set to ``True``, would not show whitespace
242 :param ignore_whitespace: If set to ``True``, would not show whitespace
244 changes. Defaults to ``False``.
243 changes. Defaults to ``False``.
245 :param context: How many lines before/after changed lines should be
244 :param context: How many lines before/after changed lines should be
246 shown. Defaults to ``3``.
245 shown. Defaults to ``3``.
247 """
246 """
248 self._validate_diff_commits(commit1, commit2)
247 self._validate_diff_commits(commit1, commit2)
249 if path1 is not None and path1 != path:
248 if path1 is not None and path1 != path:
250 raise ValueError("Diff of two different paths not supported.")
249 raise ValueError("Diff of two different paths not supported.")
251
250
252 if path:
251 if path:
253 file_filter = [self.path, path]
252 file_filter = [self.path, path]
254 else:
253 else:
255 file_filter = None
254 file_filter = None
256
255
257 diff = self._remote.diff(
256 diff = self._remote.diff(
258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
257 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
259 opt_git=True, opt_ignorews=ignore_whitespace,
258 opt_git=True, opt_ignorews=ignore_whitespace,
260 context=context)
259 context=context)
261 return MercurialDiff(diff)
260 return MercurialDiff(diff)
262
261
263 def strip(self, commit_id, branch=None):
262 def strip(self, commit_id, branch=None):
264 self._remote.strip(commit_id, update=False, backup="none")
263 self._remote.strip(commit_id, update=False, backup="none")
265
264
266 self.commit_ids = self._get_all_commit_ids()
265 self.commit_ids = self._get_all_commit_ids()
267 self._rebuild_cache(self.commit_ids)
266 self._rebuild_cache(self.commit_ids)
268
267
269 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
268 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
270 if commit_id1 == commit_id2:
269 if commit_id1 == commit_id2:
271 return commit_id1
270 return commit_id1
272
271
273 ancestors = self._remote.revs_from_revspec(
272 ancestors = self._remote.revs_from_revspec(
274 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
273 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
275 other_path=repo2.path)
274 other_path=repo2.path)
276 return repo2[ancestors[0]].raw_id if ancestors else None
275 return repo2[ancestors[0]].raw_id if ancestors else None
277
276
278 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
277 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
279 if commit_id1 == commit_id2:
278 if commit_id1 == commit_id2:
280 commits = []
279 commits = []
281 else:
280 else:
282 if merge:
281 if merge:
283 indexes = self._remote.revs_from_revspec(
282 indexes = self._remote.revs_from_revspec(
284 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
283 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
285 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
284 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
286 else:
285 else:
287 indexes = self._remote.revs_from_revspec(
286 indexes = self._remote.revs_from_revspec(
288 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
287 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
289 commit_id1, other_path=repo2.path)
288 commit_id1, other_path=repo2.path)
290
289
291 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
290 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
292 for idx in indexes]
291 for idx in indexes]
293
292
294 return commits
293 return commits
295
294
296 @staticmethod
295 @staticmethod
297 def check_url(url, config):
296 def check_url(url, config):
298 """
297 """
299 Function will check given url and try to verify if it's a valid
298 Function will check given url and try to verify if it's a valid
300 link. Sometimes it may happened that mercurial will issue basic
299 link. Sometimes it may happened that mercurial will issue basic
301 auth request that can cause whole API to hang when used from python
300 auth request that can cause whole API to hang when used from python
302 or other external calls.
301 or other external calls.
303
302
304 On failures it'll raise urllib2.HTTPError, exception is also thrown
303 On failures it'll raise urllib2.HTTPError, exception is also thrown
305 when the return code is non 200
304 when the return code is non 200
306 """
305 """
307 # check first if it's not an local url
306 # check first if it's not an local url
308 if os.path.isdir(url) or url.startswith('file:'):
307 if os.path.isdir(url) or url.startswith('file:'):
309 return True
308 return True
310
309
311 # Request the _remote to verify the url
310 # Request the _remote to verify the url
312 return connection.Hg.check_url(url, config.serialize())
311 return connection.Hg.check_url(url, config.serialize())
313
312
314 @staticmethod
313 @staticmethod
315 def is_valid_repository(path):
314 def is_valid_repository(path):
316 return os.path.isdir(os.path.join(path, '.hg'))
315 return os.path.isdir(os.path.join(path, '.hg'))
317
316
318 def _init_repo(self, create, src_url=None, update_after_clone=False):
317 def _init_repo(self, create, src_url=None, update_after_clone=False):
319 """
318 """
320 Function will check for mercurial repository in given path. If there
319 Function will check for mercurial repository in given path. If there
321 is no repository in that path it will raise an exception unless
320 is no repository in that path it will raise an exception unless
322 `create` parameter is set to True - in that case repository would
321 `create` parameter is set to True - in that case repository would
323 be created.
322 be created.
324
323
325 If `src_url` is given, would try to clone repository from the
324 If `src_url` is given, would try to clone repository from the
326 location at given clone_point. Additionally it'll make update to
325 location at given clone_point. Additionally it'll make update to
327 working copy accordingly to `update_after_clone` flag.
326 working copy accordingly to `update_after_clone` flag.
328 """
327 """
329 if create and os.path.exists(self.path):
328 if create and os.path.exists(self.path):
330 raise RepositoryError(
329 raise RepositoryError(
331 "Cannot create repository at %s, location already exist"
330 "Cannot create repository at %s, location already exist"
332 % self.path)
331 % self.path)
333
332
334 if src_url:
333 if src_url:
335 url = str(self._get_url(src_url))
334 url = str(self._get_url(src_url))
336 MercurialRepository.check_url(url, self.config)
335 MercurialRepository.check_url(url, self.config)
337
336
338 self._remote.clone(url, self.path, update_after_clone)
337 self._remote.clone(url, self.path, update_after_clone)
339
338
340 # Don't try to create if we've already cloned repo
339 # Don't try to create if we've already cloned repo
341 create = False
340 create = False
342
341
343 if create:
342 if create:
344 os.makedirs(self.path, mode=0755)
343 os.makedirs(self.path, mode=0755)
345
344
346 self._remote.localrepository(create)
345 self._remote.localrepository(create)
347
346
348 @LazyProperty
347 @LazyProperty
349 def in_memory_commit(self):
348 def in_memory_commit(self):
350 return MercurialInMemoryCommit(self)
349 return MercurialInMemoryCommit(self)
351
350
352 @LazyProperty
351 @LazyProperty
353 def description(self):
352 def description(self):
354 description = self._remote.get_config_value(
353 description = self._remote.get_config_value(
355 'web', 'description', untrusted=True)
354 'web', 'description', untrusted=True)
356 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
355 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
357
356
358 @LazyProperty
357 @LazyProperty
359 def contact(self):
358 def contact(self):
360 contact = (
359 contact = (
361 self._remote.get_config_value("web", "contact") or
360 self._remote.get_config_value("web", "contact") or
362 self._remote.get_config_value("ui", "username"))
361 self._remote.get_config_value("ui", "username"))
363 return safe_unicode(contact or self.DEFAULT_CONTACT)
362 return safe_unicode(contact or self.DEFAULT_CONTACT)
364
363
365 @LazyProperty
364 @LazyProperty
366 def last_change(self):
365 def last_change(self):
367 """
366 """
368 Returns last change made on this repository as
367 Returns last change made on this repository as
369 `datetime.datetime` object
368 `datetime.datetime` object
370 """
369 """
371 return date_fromtimestamp(self._get_mtime(), makedate()[1])
370 return utcdate_fromtimestamp(self._get_mtime(), makedate()[1])
372
371
373 def _get_mtime(self):
372 def _get_mtime(self):
374 try:
373 try:
375 return date_astimestamp(self.get_commit().date)
374 return date_astimestamp(self.get_commit().date)
376 except RepositoryError:
375 except RepositoryError:
377 # fallback to filesystem
376 # fallback to filesystem
378 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
377 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
379 st_path = os.path.join(self.path, '.hg', "store")
378 st_path = os.path.join(self.path, '.hg', "store")
380 if os.path.exists(cl_path):
379 if os.path.exists(cl_path):
381 return os.stat(cl_path).st_mtime
380 return os.stat(cl_path).st_mtime
382 else:
381 else:
383 return os.stat(st_path).st_mtime
382 return os.stat(st_path).st_mtime
384
383
385 def _sanitize_commit_idx(self, idx):
384 def _sanitize_commit_idx(self, idx):
386 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
385 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
387 # number. A `long` is treated in the correct way though. So we convert
386 # number. A `long` is treated in the correct way though. So we convert
388 # `int` to `long` here to make sure it is handled correctly.
387 # `int` to `long` here to make sure it is handled correctly.
389 if isinstance(idx, int):
388 if isinstance(idx, int):
390 return long(idx)
389 return long(idx)
391 return idx
390 return idx
392
391
393 def _get_url(self, url):
392 def _get_url(self, url):
394 """
393 """
395 Returns normalized url. If schema is not given, would fall
394 Returns normalized url. If schema is not given, would fall
396 to filesystem
395 to filesystem
397 (``file:///``) schema.
396 (``file:///``) schema.
398 """
397 """
399 url = url.encode('utf8')
398 url = url.encode('utf8')
400 if url != 'default' and '://' not in url:
399 if url != 'default' and '://' not in url:
401 url = "file:" + urllib.pathname2url(url)
400 url = "file:" + urllib.pathname2url(url)
402 return url
401 return url
403
402
404 def get_hook_location(self):
403 def get_hook_location(self):
405 """
404 """
406 returns absolute path to location where hooks are stored
405 returns absolute path to location where hooks are stored
407 """
406 """
408 return os.path.join(self.path, '.hg', '.hgrc')
407 return os.path.join(self.path, '.hg', '.hgrc')
409
408
410 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
409 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
411 """
410 """
412 Returns ``MercurialCommit`` object representing repository's
411 Returns ``MercurialCommit`` object representing repository's
413 commit at the given `commit_id` or `commit_idx`.
412 commit at the given `commit_id` or `commit_idx`.
414 """
413 """
415 if self.is_empty():
414 if self.is_empty():
416 raise EmptyRepositoryError("There are no commits yet")
415 raise EmptyRepositoryError("There are no commits yet")
417
416
418 if commit_id is not None:
417 if commit_id is not None:
419 self._validate_commit_id(commit_id)
418 self._validate_commit_id(commit_id)
420 try:
419 try:
421 idx = self._commit_ids[commit_id]
420 idx = self._commit_ids[commit_id]
422 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
421 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
423 except KeyError:
422 except KeyError:
424 pass
423 pass
425 elif commit_idx is not None:
424 elif commit_idx is not None:
426 self._validate_commit_idx(commit_idx)
425 self._validate_commit_idx(commit_idx)
427 commit_idx = self._sanitize_commit_idx(commit_idx)
426 commit_idx = self._sanitize_commit_idx(commit_idx)
428 try:
427 try:
429 id_ = self.commit_ids[commit_idx]
428 id_ = self.commit_ids[commit_idx]
430 if commit_idx < 0:
429 if commit_idx < 0:
431 commit_idx += len(self.commit_ids)
430 commit_idx += len(self.commit_ids)
432 return MercurialCommit(
431 return MercurialCommit(
433 self, id_, commit_idx, pre_load=pre_load)
432 self, id_, commit_idx, pre_load=pre_load)
434 except IndexError:
433 except IndexError:
435 commit_id = commit_idx
434 commit_id = commit_idx
436 else:
435 else:
437 commit_id = "tip"
436 commit_id = "tip"
438
437
439 # TODO Paris: Ugly hack to "serialize" long for msgpack
438 # TODO Paris: Ugly hack to "serialize" long for msgpack
440 if isinstance(commit_id, long):
439 if isinstance(commit_id, long):
441 commit_id = float(commit_id)
440 commit_id = float(commit_id)
442
441
443 if isinstance(commit_id, unicode):
442 if isinstance(commit_id, unicode):
444 commit_id = safe_str(commit_id)
443 commit_id = safe_str(commit_id)
445
444
446 raw_id, idx = self._remote.lookup(commit_id, both=True)
445 raw_id, idx = self._remote.lookup(commit_id, both=True)
447
446
448 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
447 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
449
448
450 def get_commits(
449 def get_commits(
451 self, start_id=None, end_id=None, start_date=None, end_date=None,
450 self, start_id=None, end_id=None, start_date=None, end_date=None,
452 branch_name=None, pre_load=None):
451 branch_name=None, pre_load=None):
453 """
452 """
454 Returns generator of ``MercurialCommit`` objects from start to end
453 Returns generator of ``MercurialCommit`` objects from start to end
455 (both are inclusive)
454 (both are inclusive)
456
455
457 :param start_id: None, str(commit_id)
456 :param start_id: None, str(commit_id)
458 :param end_id: None, str(commit_id)
457 :param end_id: None, str(commit_id)
459 :param start_date: if specified, commits with commit date less than
458 :param start_date: if specified, commits with commit date less than
460 ``start_date`` would be filtered out from returned set
459 ``start_date`` would be filtered out from returned set
461 :param end_date: if specified, commits with commit date greater than
460 :param end_date: if specified, commits with commit date greater than
462 ``end_date`` would be filtered out from returned set
461 ``end_date`` would be filtered out from returned set
463 :param branch_name: if specified, commits not reachable from given
462 :param branch_name: if specified, commits not reachable from given
464 branch would be filtered out from returned set
463 branch would be filtered out from returned set
465
464
466 :raise BranchDoesNotExistError: If given ``branch_name`` does not
465 :raise BranchDoesNotExistError: If given ``branch_name`` does not
467 exist.
466 exist.
468 :raise CommitDoesNotExistError: If commit for given ``start`` or
467 :raise CommitDoesNotExistError: If commit for given ``start`` or
469 ``end`` could not be found.
468 ``end`` could not be found.
470 """
469 """
471 # actually we should check now if it's not an empty repo
470 # actually we should check now if it's not an empty repo
472 branch_ancestors = False
471 branch_ancestors = False
473 if self.is_empty():
472 if self.is_empty():
474 raise EmptyRepositoryError("There are no commits yet")
473 raise EmptyRepositoryError("There are no commits yet")
475 self._validate_branch_name(branch_name)
474 self._validate_branch_name(branch_name)
476
475
477 if start_id is not None:
476 if start_id is not None:
478 self._validate_commit_id(start_id)
477 self._validate_commit_id(start_id)
479 c_start = self.get_commit(commit_id=start_id)
478 c_start = self.get_commit(commit_id=start_id)
480 start_pos = self._commit_ids[c_start.raw_id]
479 start_pos = self._commit_ids[c_start.raw_id]
481 else:
480 else:
482 start_pos = None
481 start_pos = None
483
482
484 if end_id is not None:
483 if end_id is not None:
485 self._validate_commit_id(end_id)
484 self._validate_commit_id(end_id)
486 c_end = self.get_commit(commit_id=end_id)
485 c_end = self.get_commit(commit_id=end_id)
487 end_pos = max(0, self._commit_ids[c_end.raw_id])
486 end_pos = max(0, self._commit_ids[c_end.raw_id])
488 else:
487 else:
489 end_pos = None
488 end_pos = None
490
489
491 if None not in [start_id, end_id] and start_pos > end_pos:
490 if None not in [start_id, end_id] and start_pos > end_pos:
492 raise RepositoryError(
491 raise RepositoryError(
493 "Start commit '%s' cannot be after end commit '%s'" %
492 "Start commit '%s' cannot be after end commit '%s'" %
494 (start_id, end_id))
493 (start_id, end_id))
495
494
496 if end_pos is not None:
495 if end_pos is not None:
497 end_pos += 1
496 end_pos += 1
498
497
499 commit_filter = []
498 commit_filter = []
500 if branch_name and not branch_ancestors:
499 if branch_name and not branch_ancestors:
501 commit_filter.append('branch("%s")' % branch_name)
500 commit_filter.append('branch("%s")' % branch_name)
502 elif branch_name and branch_ancestors:
501 elif branch_name and branch_ancestors:
503 commit_filter.append('ancestors(branch("%s"))' % branch_name)
502 commit_filter.append('ancestors(branch("%s"))' % branch_name)
504 if start_date and not end_date:
503 if start_date and not end_date:
505 commit_filter.append('date(">%s")' % start_date)
504 commit_filter.append('date(">%s")' % start_date)
506 if end_date and not start_date:
505 if end_date and not start_date:
507 commit_filter.append('date("<%s")' % end_date)
506 commit_filter.append('date("<%s")' % end_date)
508 if start_date and end_date:
507 if start_date and end_date:
509 commit_filter.append(
508 commit_filter.append(
510 'date(">%s") and date("<%s")' % (start_date, end_date))
509 'date(">%s") and date("<%s")' % (start_date, end_date))
511
510
512 # TODO: johbo: Figure out a simpler way for this solution
511 # TODO: johbo: Figure out a simpler way for this solution
513 collection_generator = CollectionGenerator
512 collection_generator = CollectionGenerator
514 if commit_filter:
513 if commit_filter:
515 commit_filter = map(safe_str, commit_filter)
514 commit_filter = map(safe_str, commit_filter)
516 revisions = self._remote.rev_range(commit_filter)
515 revisions = self._remote.rev_range(commit_filter)
517 collection_generator = MercurialIndexBasedCollectionGenerator
516 collection_generator = MercurialIndexBasedCollectionGenerator
518 else:
517 else:
519 revisions = self.commit_ids
518 revisions = self.commit_ids
520
519
521 if start_pos or end_pos:
520 if start_pos or end_pos:
522 revisions = revisions[start_pos:end_pos]
521 revisions = revisions[start_pos:end_pos]
523
522
524 return collection_generator(self, revisions, pre_load=pre_load)
523 return collection_generator(self, revisions, pre_load=pre_load)
525
524
526 def pull(self, url, commit_ids=None):
525 def pull(self, url, commit_ids=None):
527 """
526 """
528 Tries to pull changes from external location.
527 Tries to pull changes from external location.
529
528
530 :param commit_ids: Optional. Can be set to a list of commit ids
529 :param commit_ids: Optional. Can be set to a list of commit ids
531 which shall be pulled from the other repository.
530 which shall be pulled from the other repository.
532 """
531 """
533 url = self._get_url(url)
532 url = self._get_url(url)
534 self._remote.pull(url, commit_ids=commit_ids)
533 self._remote.pull(url, commit_ids=commit_ids)
535
534
536 def _local_clone(self, clone_path):
535 def _local_clone(self, clone_path):
537 """
536 """
538 Create a local clone of the current repo.
537 Create a local clone of the current repo.
539 """
538 """
540 self._remote.clone(self.path, clone_path, update_after_clone=True,
539 self._remote.clone(self.path, clone_path, update_after_clone=True,
541 hooks=False)
540 hooks=False)
542
541
543 def _update(self, revision, clean=False):
542 def _update(self, revision, clean=False):
544 """
543 """
545 Update the working copty to the specified revision.
544 Update the working copty to the specified revision.
546 """
545 """
547 self._remote.update(revision, clean=clean)
546 self._remote.update(revision, clean=clean)
548
547
549 def _identify(self):
548 def _identify(self):
550 """
549 """
551 Return the current state of the working directory.
550 Return the current state of the working directory.
552 """
551 """
553 return self._remote.identify().strip().rstrip('+')
552 return self._remote.identify().strip().rstrip('+')
554
553
555 def _heads(self, branch=None):
554 def _heads(self, branch=None):
556 """
555 """
557 Return the commit ids of the repository heads.
556 Return the commit ids of the repository heads.
558 """
557 """
559 return self._remote.heads(branch=branch).strip().split(' ')
558 return self._remote.heads(branch=branch).strip().split(' ')
560
559
561 def _ancestor(self, revision1, revision2):
560 def _ancestor(self, revision1, revision2):
562 """
561 """
563 Return the common ancestor of the two revisions.
562 Return the common ancestor of the two revisions.
564 """
563 """
565 return self._remote.ancestor(
564 return self._remote.ancestor(
566 revision1, revision2).strip().split(':')[-1]
565 revision1, revision2).strip().split(':')[-1]
567
566
568 def _local_push(
567 def _local_push(
569 self, revision, repository_path, push_branches=False,
568 self, revision, repository_path, push_branches=False,
570 enable_hooks=False):
569 enable_hooks=False):
571 """
570 """
572 Push the given revision to the specified repository.
571 Push the given revision to the specified repository.
573
572
574 :param push_branches: allow to create branches in the target repo.
573 :param push_branches: allow to create branches in the target repo.
575 """
574 """
576 self._remote.push(
575 self._remote.push(
577 [revision], repository_path, hooks=enable_hooks,
576 [revision], repository_path, hooks=enable_hooks,
578 push_branches=push_branches)
577 push_branches=push_branches)
579
578
580 def _local_merge(self, target_ref, merge_message, user_name, user_email,
579 def _local_merge(self, target_ref, merge_message, user_name, user_email,
581 source_ref):
580 source_ref):
582 """
581 """
583 Merge the given source_revision into the checked out revision.
582 Merge the given source_revision into the checked out revision.
584
583
585 Returns the commit id of the merge and a boolean indicating if the
584 Returns the commit id of the merge and a boolean indicating if the
586 commit needs to be pushed.
585 commit needs to be pushed.
587 """
586 """
588 self._update(target_ref.commit_id)
587 self._update(target_ref.commit_id)
589
588
590 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
589 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
591 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
590 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
592
591
593 if ancestor == source_ref.commit_id:
592 if ancestor == source_ref.commit_id:
594 # Nothing to do, the changes were already integrated
593 # Nothing to do, the changes were already integrated
595 return target_ref.commit_id, False
594 return target_ref.commit_id, False
596
595
597 elif ancestor == target_ref.commit_id and is_the_same_branch:
596 elif ancestor == target_ref.commit_id and is_the_same_branch:
598 # In this case we should force a commit message
597 # In this case we should force a commit message
599 return source_ref.commit_id, True
598 return source_ref.commit_id, True
600
599
601 if settings.HG_USE_REBASE_FOR_MERGING:
600 if settings.HG_USE_REBASE_FOR_MERGING:
602 try:
601 try:
603 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
602 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
604 target_ref.commit_id)
603 target_ref.commit_id)
605 self.bookmark(bookmark_name, revision=source_ref.commit_id)
604 self.bookmark(bookmark_name, revision=source_ref.commit_id)
606 self._remote.rebase(
605 self._remote.rebase(
607 source=source_ref.commit_id, dest=target_ref.commit_id)
606 source=source_ref.commit_id, dest=target_ref.commit_id)
608 self._update(bookmark_name)
607 self._update(bookmark_name)
609 return self._identify(), True
608 return self._identify(), True
610 except RepositoryError:
609 except RepositoryError:
611 # Cleanup any rebase leftovers
610 # Cleanup any rebase leftovers
612 self._remote.rebase(abort=True)
611 self._remote.rebase(abort=True)
613 self._remote.update(clean=True)
612 self._remote.update(clean=True)
614 raise
613 raise
615 else:
614 else:
616 try:
615 try:
617 self._remote.merge(source_ref.commit_id)
616 self._remote.merge(source_ref.commit_id)
618 self._remote.commit(
617 self._remote.commit(
619 message=safe_str(merge_message),
618 message=safe_str(merge_message),
620 username=safe_str('%s <%s>' % (user_name, user_email)))
619 username=safe_str('%s <%s>' % (user_name, user_email)))
621 return self._identify(), True
620 return self._identify(), True
622 except RepositoryError:
621 except RepositoryError:
623 # Cleanup any merge leftovers
622 # Cleanup any merge leftovers
624 self._remote.update(clean=True)
623 self._remote.update(clean=True)
625 raise
624 raise
626
625
627 def _is_the_same_branch(self, target_ref, source_ref):
626 def _is_the_same_branch(self, target_ref, source_ref):
628 return (
627 return (
629 self._get_branch_name(target_ref) ==
628 self._get_branch_name(target_ref) ==
630 self._get_branch_name(source_ref))
629 self._get_branch_name(source_ref))
631
630
632 def _get_branch_name(self, ref):
631 def _get_branch_name(self, ref):
633 if ref.type == 'branch':
632 if ref.type == 'branch':
634 return ref.name
633 return ref.name
635 return self._remote.ctx_branch(ref.commit_id)
634 return self._remote.ctx_branch(ref.commit_id)
636
635
637 def _get_shadow_repository_path(self, workspace_id):
636 def _get_shadow_repository_path(self, workspace_id):
638 # The name of the shadow repository must start with '.', so it is
637 # The name of the shadow repository must start with '.', so it is
639 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
638 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
640 return os.path.join(
639 return os.path.join(
641 os.path.dirname(self.path),
640 os.path.dirname(self.path),
642 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
641 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
643
642
644 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
643 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
645 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
644 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
646 if not os.path.exists(shadow_repository_path):
645 if not os.path.exists(shadow_repository_path):
647 self._local_clone(shadow_repository_path)
646 self._local_clone(shadow_repository_path)
648 log.debug(
647 log.debug(
649 'Prepared shadow repository in %s', shadow_repository_path)
648 'Prepared shadow repository in %s', shadow_repository_path)
650
649
651 return shadow_repository_path
650 return shadow_repository_path
652
651
653 def cleanup_merge_workspace(self, workspace_id):
652 def cleanup_merge_workspace(self, workspace_id):
654 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
653 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
655 shutil.rmtree(shadow_repository_path, ignore_errors=True)
654 shutil.rmtree(shadow_repository_path, ignore_errors=True)
656
655
657 def _merge_repo(self, shadow_repository_path, target_ref,
656 def _merge_repo(self, shadow_repository_path, target_ref,
658 source_repo, source_ref, merge_message,
657 source_repo, source_ref, merge_message,
659 merger_name, merger_email, dry_run=False):
658 merger_name, merger_email, dry_run=False):
660 if target_ref.commit_id not in self._heads():
659 if target_ref.commit_id not in self._heads():
661 return MergeResponse(
660 return MergeResponse(
662 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
661 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
663
662
664 if (target_ref.type == 'branch' and
663 if (target_ref.type == 'branch' and
665 len(self._heads(target_ref.name)) != 1):
664 len(self._heads(target_ref.name)) != 1):
666 return MergeResponse(
665 return MergeResponse(
667 False, False, None,
666 False, False, None,
668 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
667 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
669
668
670 shadow_repo = self._get_shadow_instance(shadow_repository_path)
669 shadow_repo = self._get_shadow_instance(shadow_repository_path)
671
670
672 log.debug('Pulling in target reference %s', target_ref)
671 log.debug('Pulling in target reference %s', target_ref)
673 self._validate_pull_reference(target_ref)
672 self._validate_pull_reference(target_ref)
674 shadow_repo._local_pull(self.path, target_ref)
673 shadow_repo._local_pull(self.path, target_ref)
675 try:
674 try:
676 log.debug('Pulling in source reference %s', source_ref)
675 log.debug('Pulling in source reference %s', source_ref)
677 source_repo._validate_pull_reference(source_ref)
676 source_repo._validate_pull_reference(source_ref)
678 shadow_repo._local_pull(source_repo.path, source_ref)
677 shadow_repo._local_pull(source_repo.path, source_ref)
679 except CommitDoesNotExistError as e:
678 except CommitDoesNotExistError as e:
680 log.exception('Failure when doing local pull on hg shadow repo')
679 log.exception('Failure when doing local pull on hg shadow repo')
681 return MergeResponse(
680 return MergeResponse(
682 False, False, None, MergeFailureReason.MISSING_COMMIT)
681 False, False, None, MergeFailureReason.MISSING_COMMIT)
683
682
684 merge_commit_id = None
683 merge_commit_id = None
685 merge_failure_reason = MergeFailureReason.NONE
684 merge_failure_reason = MergeFailureReason.NONE
686
685
687 try:
686 try:
688 merge_commit_id, needs_push = shadow_repo._local_merge(
687 merge_commit_id, needs_push = shadow_repo._local_merge(
689 target_ref, merge_message, merger_name, merger_email,
688 target_ref, merge_message, merger_name, merger_email,
690 source_ref)
689 source_ref)
691 merge_possible = True
690 merge_possible = True
692 except RepositoryError as e:
691 except RepositoryError as e:
693 log.exception('Failure when doing local merge on hg shadow repo')
692 log.exception('Failure when doing local merge on hg shadow repo')
694 merge_possible = False
693 merge_possible = False
695 merge_failure_reason = MergeFailureReason.MERGE_FAILED
694 merge_failure_reason = MergeFailureReason.MERGE_FAILED
696
695
697 if merge_possible and not dry_run:
696 if merge_possible and not dry_run:
698 if needs_push:
697 if needs_push:
699 # In case the target is a bookmark, update it, so after pushing
698 # In case the target is a bookmark, update it, so after pushing
700 # the bookmarks is also updated in the target.
699 # the bookmarks is also updated in the target.
701 if target_ref.type == 'book':
700 if target_ref.type == 'book':
702 shadow_repo.bookmark(
701 shadow_repo.bookmark(
703 target_ref.name, revision=merge_commit_id)
702 target_ref.name, revision=merge_commit_id)
704
703
705 try:
704 try:
706 shadow_repo_with_hooks = self._get_shadow_instance(
705 shadow_repo_with_hooks = self._get_shadow_instance(
707 shadow_repository_path,
706 shadow_repository_path,
708 enable_hooks=True)
707 enable_hooks=True)
709 # Note: the push_branches option will push any new branch
708 # Note: the push_branches option will push any new branch
710 # defined in the source repository to the target. This may
709 # defined in the source repository to the target. This may
711 # be dangerous as branches are permanent in Mercurial.
710 # be dangerous as branches are permanent in Mercurial.
712 # This feature was requested in issue #441.
711 # This feature was requested in issue #441.
713 shadow_repo_with_hooks._local_push(
712 shadow_repo_with_hooks._local_push(
714 merge_commit_id, self.path, push_branches=True,
713 merge_commit_id, self.path, push_branches=True,
715 enable_hooks=True)
714 enable_hooks=True)
716 merge_succeeded = True
715 merge_succeeded = True
717 except RepositoryError:
716 except RepositoryError:
718 log.exception(
717 log.exception(
719 'Failure when doing local from the shadow repository '
718 'Failure when doing local from the shadow repository '
720 'to the target repository.')
719 'to the target repository.')
721 merge_succeeded = False
720 merge_succeeded = False
722 merge_failure_reason = MergeFailureReason.PUSH_FAILED
721 merge_failure_reason = MergeFailureReason.PUSH_FAILED
723 else:
722 else:
724 merge_succeeded = True
723 merge_succeeded = True
725 else:
724 else:
726 merge_succeeded = False
725 merge_succeeded = False
727
726
728 if dry_run:
727 if dry_run:
729 merge_commit_id = None
728 merge_commit_id = None
730
729
731 return MergeResponse(
730 return MergeResponse(
732 merge_possible, merge_succeeded, merge_commit_id,
731 merge_possible, merge_succeeded, merge_commit_id,
733 merge_failure_reason)
732 merge_failure_reason)
734
733
735 def _get_shadow_instance(
734 def _get_shadow_instance(
736 self, shadow_repository_path, enable_hooks=False):
735 self, shadow_repository_path, enable_hooks=False):
737 config = self.config.copy()
736 config = self.config.copy()
738 if not enable_hooks:
737 if not enable_hooks:
739 config.clear_section('hooks')
738 config.clear_section('hooks')
740 return MercurialRepository(shadow_repository_path, config)
739 return MercurialRepository(shadow_repository_path, config)
741
740
742 def _validate_pull_reference(self, reference):
741 def _validate_pull_reference(self, reference):
743 if not (reference.name in self.bookmarks or
742 if not (reference.name in self.bookmarks or
744 reference.name in self.branches or
743 reference.name in self.branches or
745 self.get_commit(reference.commit_id)):
744 self.get_commit(reference.commit_id)):
746 raise CommitDoesNotExistError(
745 raise CommitDoesNotExistError(
747 'Unknown branch, bookmark or commit id')
746 'Unknown branch, bookmark or commit id')
748
747
749 def _local_pull(self, repository_path, reference):
748 def _local_pull(self, repository_path, reference):
750 """
749 """
751 Fetch a branch, bookmark or commit from a local repository.
750 Fetch a branch, bookmark or commit from a local repository.
752 """
751 """
753 repository_path = os.path.abspath(repository_path)
752 repository_path = os.path.abspath(repository_path)
754 if repository_path == self.path:
753 if repository_path == self.path:
755 raise ValueError('Cannot pull from the same repository')
754 raise ValueError('Cannot pull from the same repository')
756
755
757 reference_type_to_option_name = {
756 reference_type_to_option_name = {
758 'book': 'bookmark',
757 'book': 'bookmark',
759 'branch': 'branch',
758 'branch': 'branch',
760 }
759 }
761 option_name = reference_type_to_option_name.get(
760 option_name = reference_type_to_option_name.get(
762 reference.type, 'revision')
761 reference.type, 'revision')
763
762
764 if option_name == 'revision':
763 if option_name == 'revision':
765 ref = reference.commit_id
764 ref = reference.commit_id
766 else:
765 else:
767 ref = reference.name
766 ref = reference.name
768
767
769 options = {option_name: [ref]}
768 options = {option_name: [ref]}
770 self._remote.pull_cmd(repository_path, hooks=False, **options)
769 self._remote.pull_cmd(repository_path, hooks=False, **options)
771
770
772 def bookmark(self, bookmark, revision=None):
771 def bookmark(self, bookmark, revision=None):
773 if isinstance(bookmark, unicode):
772 if isinstance(bookmark, unicode):
774 bookmark = safe_str(bookmark)
773 bookmark = safe_str(bookmark)
775 self._remote.bookmark(bookmark, revision=revision)
774 self._remote.bookmark(bookmark, revision=revision)
776
775
777
776
778 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
777 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
779
778
780 def _commit_factory(self, commit_id):
779 def _commit_factory(self, commit_id):
781 return self.repo.get_commit(
780 return self.repo.get_commit(
782 commit_idx=commit_id, pre_load=self.pre_load)
781 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now