##// END OF EJS Templates
mercurial: compatability changes for mercurial 4.6.0 release
marcink -
r2735:a8937381 default
parent child Browse files
Show More
@@ -1,388 +1,388 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG commit module
22 HG commit module
23 """
23 """
24
24
25 import os
25 import os
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 from rhodecode.lib.utils import safe_str, safe_unicode
30 from rhodecode.lib.utils import safe_str, safe_unicode
31 from rhodecode.lib.vcs import path as vcspath
31 from rhodecode.lib.vcs import path as vcspath
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 from rhodecode.lib.vcs.exceptions import CommitError
34 from rhodecode.lib.vcs.exceptions import CommitError
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
38 LargeFileNode, LARGEFILE_PREFIX)
38 LargeFileNode, LARGEFILE_PREFIX)
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
40
40
41
41
42 class MercurialCommit(base.BaseCommit):
42 class MercurialCommit(base.BaseCommit):
43 """
43 """
44 Represents state of the repository at the single commit.
44 Represents state of the repository at the single commit.
45 """
45 """
46
46
47 _filter_pre_load = [
47 _filter_pre_load = [
48 # git specific property not supported here
48 # git specific property not supported here
49 "_commit",
49 "_commit",
50 ]
50 ]
51
51
52 def __init__(self, repository, raw_id, idx, pre_load=None):
52 def __init__(self, repository, raw_id, idx, pre_load=None):
53 raw_id = safe_str(raw_id)
53 raw_id = safe_str(raw_id)
54
54
55 self.repository = repository
55 self.repository = repository
56 self._remote = repository._remote
56 self._remote = repository._remote
57
57
58 self.raw_id = raw_id
58 self.raw_id = raw_id
59 self.idx = repository._sanitize_commit_idx(idx)
59 self.idx = idx
60
60
61 self._set_bulk_properties(pre_load)
61 self._set_bulk_properties(pre_load)
62
62
63 # caches
63 # caches
64 self.nodes = {}
64 self.nodes = {}
65
65
66 def _set_bulk_properties(self, pre_load):
66 def _set_bulk_properties(self, pre_load):
67 if not pre_load:
67 if not pre_load:
68 return
68 return
69 pre_load = [entry for entry in pre_load
69 pre_load = [entry for entry in pre_load
70 if entry not in self._filter_pre_load]
70 if entry not in self._filter_pre_load]
71 if not pre_load:
71 if not pre_load:
72 return
72 return
73
73
74 result = self._remote.bulk_request(self.idx, pre_load)
74 result = self._remote.bulk_request(self.idx, pre_load)
75 for attr, value in result.items():
75 for attr, value in result.items():
76 if attr in ["author", "branch", "message"]:
76 if attr in ["author", "branch", "message"]:
77 value = safe_unicode(value)
77 value = safe_unicode(value)
78 elif attr == "affected_files":
78 elif attr == "affected_files":
79 value = map(safe_unicode, value)
79 value = map(safe_unicode, value)
80 elif attr == "date":
80 elif attr == "date":
81 value = utcdate_fromtimestamp(*value)
81 value = utcdate_fromtimestamp(*value)
82 elif attr in ["children", "parents"]:
82 elif attr in ["children", "parents"]:
83 value = self._make_commits(value)
83 value = self._make_commits(value)
84 elif attr in ["phase"]:
84 elif attr in ["phase"]:
85 value = self._get_phase_text(value)
85 value = self._get_phase_text(value)
86 self.__dict__[attr] = value
86 self.__dict__[attr] = value
87
87
88 @LazyProperty
88 @LazyProperty
89 def tags(self):
89 def tags(self):
90 tags = [name for name, commit_id in self.repository.tags.iteritems()
90 tags = [name for name, commit_id in self.repository.tags.iteritems()
91 if commit_id == self.raw_id]
91 if commit_id == self.raw_id]
92 return tags
92 return tags
93
93
94 @LazyProperty
94 @LazyProperty
95 def branch(self):
95 def branch(self):
96 return safe_unicode(self._remote.ctx_branch(self.idx))
96 return safe_unicode(self._remote.ctx_branch(self.idx))
97
97
98 @LazyProperty
98 @LazyProperty
99 def bookmarks(self):
99 def bookmarks(self):
100 bookmarks = [
100 bookmarks = [
101 name for name, commit_id in self.repository.bookmarks.iteritems()
101 name for name, commit_id in self.repository.bookmarks.iteritems()
102 if commit_id == self.raw_id]
102 if commit_id == self.raw_id]
103 return bookmarks
103 return bookmarks
104
104
105 @LazyProperty
105 @LazyProperty
106 def message(self):
106 def message(self):
107 return safe_unicode(self._remote.ctx_description(self.idx))
107 return safe_unicode(self._remote.ctx_description(self.idx))
108
108
109 @LazyProperty
109 @LazyProperty
110 def committer(self):
110 def committer(self):
111 return safe_unicode(self.author)
111 return safe_unicode(self.author)
112
112
113 @LazyProperty
113 @LazyProperty
114 def author(self):
114 def author(self):
115 return safe_unicode(self._remote.ctx_user(self.idx))
115 return safe_unicode(self._remote.ctx_user(self.idx))
116
116
117 @LazyProperty
117 @LazyProperty
118 def date(self):
118 def date(self):
119 return utcdate_fromtimestamp(*self._remote.ctx_date(self.idx))
119 return utcdate_fromtimestamp(*self._remote.ctx_date(self.idx))
120
120
121 @LazyProperty
121 @LazyProperty
122 def status(self):
122 def status(self):
123 """
123 """
124 Returns modified, added, removed, deleted files for current commit
124 Returns modified, added, removed, deleted files for current commit
125 """
125 """
126 return self._remote.ctx_status(self.idx)
126 return self._remote.ctx_status(self.idx)
127
127
128 @LazyProperty
128 @LazyProperty
129 def _file_paths(self):
129 def _file_paths(self):
130 return self._remote.ctx_list(self.idx)
130 return self._remote.ctx_list(self.idx)
131
131
132 @LazyProperty
132 @LazyProperty
133 def _dir_paths(self):
133 def _dir_paths(self):
134 p = list(set(get_dirs_for_path(*self._file_paths)))
134 p = list(set(get_dirs_for_path(*self._file_paths)))
135 p.insert(0, '')
135 p.insert(0, '')
136 return p
136 return p
137
137
138 @LazyProperty
138 @LazyProperty
139 def _paths(self):
139 def _paths(self):
140 return self._dir_paths + self._file_paths
140 return self._dir_paths + self._file_paths
141
141
142 @LazyProperty
142 @LazyProperty
143 def id(self):
143 def id(self):
144 if self.last:
144 if self.last:
145 return u'tip'
145 return u'tip'
146 return self.short_id
146 return self.short_id
147
147
148 @LazyProperty
148 @LazyProperty
149 def short_id(self):
149 def short_id(self):
150 return self.raw_id[:12]
150 return self.raw_id[:12]
151
151
152 def _make_commits(self, indexes, pre_load=None):
152 def _make_commits(self, indexes, pre_load=None):
153 return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load)
153 return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load)
154 for idx in indexes if idx >= 0]
154 for idx in indexes if idx >= 0]
155
155
156 @LazyProperty
156 @LazyProperty
157 def parents(self):
157 def parents(self):
158 """
158 """
159 Returns list of parent commits.
159 Returns list of parent commits.
160 """
160 """
161 parents = self._remote.ctx_parents(self.idx)
161 parents = self._remote.ctx_parents(self.idx)
162 return self._make_commits(parents)
162 return self._make_commits(parents)
163
163
164 def _get_phase_text(self, phase_id):
164 def _get_phase_text(self, phase_id):
165 return {
165 return {
166 0: 'public',
166 0: 'public',
167 1: 'draft',
167 1: 'draft',
168 2: 'secret',
168 2: 'secret',
169 }.get(phase_id) or ''
169 }.get(phase_id) or ''
170
170
171 @LazyProperty
171 @LazyProperty
172 def phase(self):
172 def phase(self):
173 phase_id = self._remote.ctx_phase(self.idx)
173 phase_id = self._remote.ctx_phase(self.idx)
174 phase_text = self._get_phase_text(phase_id)
174 phase_text = self._get_phase_text(phase_id)
175
175
176 return safe_unicode(phase_text)
176 return safe_unicode(phase_text)
177
177
178 @LazyProperty
178 @LazyProperty
179 def obsolete(self):
179 def obsolete(self):
180 obsolete = self._remote.ctx_obsolete(self.idx)
180 obsolete = self._remote.ctx_obsolete(self.idx)
181 return obsolete
181 return obsolete
182
182
183 @LazyProperty
183 @LazyProperty
184 def hidden(self):
184 def hidden(self):
185 hidden = self._remote.ctx_hidden(self.idx)
185 hidden = self._remote.ctx_hidden(self.idx)
186 return hidden
186 return hidden
187
187
188 @LazyProperty
188 @LazyProperty
189 def children(self):
189 def children(self):
190 """
190 """
191 Returns list of child commits.
191 Returns list of child commits.
192 """
192 """
193 children = self._remote.ctx_children(self.idx)
193 children = self._remote.ctx_children(self.idx)
194 return self._make_commits(children)
194 return self._make_commits(children)
195
195
196 def diff(self, ignore_whitespace=True, context=3):
196 def diff(self, ignore_whitespace=True, context=3):
197 result = self._remote.ctx_diff(
197 result = self._remote.ctx_diff(
198 self.idx,
198 self.idx,
199 git=True, ignore_whitespace=ignore_whitespace, context=context)
199 git=True, ignore_whitespace=ignore_whitespace, context=context)
200 diff = ''.join(result)
200 diff = ''.join(result)
201 return MercurialDiff(diff)
201 return MercurialDiff(diff)
202
202
203 def _fix_path(self, path):
203 def _fix_path(self, path):
204 """
204 """
205 Mercurial keeps filenodes as str so we need to encode from unicode
205 Mercurial keeps filenodes as str so we need to encode from unicode
206 to str.
206 to str.
207 """
207 """
208 return safe_str(super(MercurialCommit, self)._fix_path(path))
208 return safe_str(super(MercurialCommit, self)._fix_path(path))
209
209
210 def _get_kind(self, path):
210 def _get_kind(self, path):
211 path = self._fix_path(path)
211 path = self._fix_path(path)
212 if path in self._file_paths:
212 if path in self._file_paths:
213 return NodeKind.FILE
213 return NodeKind.FILE
214 elif path in self._dir_paths:
214 elif path in self._dir_paths:
215 return NodeKind.DIR
215 return NodeKind.DIR
216 else:
216 else:
217 raise CommitError(
217 raise CommitError(
218 "Node does not exist at the given path '%s'" % (path, ))
218 "Node does not exist at the given path '%s'" % (path, ))
219
219
220 def _get_filectx(self, path):
220 def _get_filectx(self, path):
221 path = self._fix_path(path)
221 path = self._fix_path(path)
222 if self._get_kind(path) != NodeKind.FILE:
222 if self._get_kind(path) != NodeKind.FILE:
223 raise CommitError(
223 raise CommitError(
224 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
224 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
225 return path
225 return path
226
226
227 def get_file_mode(self, path):
227 def get_file_mode(self, path):
228 """
228 """
229 Returns stat mode of the file at the given ``path``.
229 Returns stat mode of the file at the given ``path``.
230 """
230 """
231 path = self._get_filectx(path)
231 path = self._get_filectx(path)
232 if 'x' in self._remote.fctx_flags(self.idx, path):
232 if 'x' in self._remote.fctx_flags(self.idx, path):
233 return base.FILEMODE_EXECUTABLE
233 return base.FILEMODE_EXECUTABLE
234 else:
234 else:
235 return base.FILEMODE_DEFAULT
235 return base.FILEMODE_DEFAULT
236
236
237 def is_link(self, path):
237 def is_link(self, path):
238 path = self._get_filectx(path)
238 path = self._get_filectx(path)
239 return 'l' in self._remote.fctx_flags(self.idx, path)
239 return 'l' in self._remote.fctx_flags(self.idx, path)
240
240
241 def get_file_content(self, path):
241 def get_file_content(self, path):
242 """
242 """
243 Returns content of the file at given ``path``.
243 Returns content of the file at given ``path``.
244 """
244 """
245 path = self._get_filectx(path)
245 path = self._get_filectx(path)
246 return self._remote.fctx_data(self.idx, path)
246 return self._remote.fctx_data(self.idx, path)
247
247
248 def get_file_size(self, path):
248 def get_file_size(self, path):
249 """
249 """
250 Returns size of the file at given ``path``.
250 Returns size of the file at given ``path``.
251 """
251 """
252 path = self._get_filectx(path)
252 path = self._get_filectx(path)
253 return self._remote.fctx_size(self.idx, path)
253 return self._remote.fctx_size(self.idx, path)
254
254
255 def get_file_history(self, path, limit=None, pre_load=None):
255 def get_file_history(self, path, limit=None, pre_load=None):
256 """
256 """
257 Returns history of file as reversed list of `MercurialCommit` objects
257 Returns history of file as reversed list of `MercurialCommit` objects
258 for which file at given ``path`` has been modified.
258 for which file at given ``path`` has been modified.
259 """
259 """
260 path = self._get_filectx(path)
260 path = self._get_filectx(path)
261 hist = self._remote.file_history(self.idx, path, limit)
261 hist = self._remote.file_history(self.idx, path, limit)
262 return [
262 return [
263 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
263 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
264 for commit_id in hist]
264 for commit_id in hist]
265
265
266 def get_file_annotate(self, path, pre_load=None):
266 def get_file_annotate(self, path, pre_load=None):
267 """
267 """
268 Returns a generator of four element tuples with
268 Returns a generator of four element tuples with
269 lineno, commit_id, commit lazy loader and line
269 lineno, commit_id, commit lazy loader and line
270 """
270 """
271 result = self._remote.fctx_annotate(self.idx, path)
271 result = self._remote.fctx_annotate(self.idx, path)
272
272
273 for ln_no, commit_id, content in result:
273 for ln_no, commit_id, content in result:
274 yield (
274 yield (
275 ln_no, commit_id,
275 ln_no, commit_id,
276 lambda: self.repository.get_commit(commit_id=commit_id,
276 lambda: self.repository.get_commit(commit_id=commit_id,
277 pre_load=pre_load),
277 pre_load=pre_load),
278 content)
278 content)
279
279
280 def get_nodes(self, path):
280 def get_nodes(self, path):
281 """
281 """
282 Returns combined ``DirNode`` and ``FileNode`` objects list representing
282 Returns combined ``DirNode`` and ``FileNode`` objects list representing
283 state of commit at the given ``path``. If node at the given ``path``
283 state of commit at the given ``path``. If node at the given ``path``
284 is not instance of ``DirNode``, CommitError would be raised.
284 is not instance of ``DirNode``, CommitError would be raised.
285 """
285 """
286
286
287 if self._get_kind(path) != NodeKind.DIR:
287 if self._get_kind(path) != NodeKind.DIR:
288 raise CommitError(
288 raise CommitError(
289 "Directory does not exist for idx %s at '%s'" %
289 "Directory does not exist for idx %s at '%s'" %
290 (self.idx, path))
290 (self.idx, path))
291 path = self._fix_path(path)
291 path = self._fix_path(path)
292
292
293 filenodes = [
293 filenodes = [
294 FileNode(f, commit=self) for f in self._file_paths
294 FileNode(f, commit=self) for f in self._file_paths
295 if os.path.dirname(f) == path]
295 if os.path.dirname(f) == path]
296 # TODO: johbo: Check if this can be done in a more obvious way
296 # TODO: johbo: Check if this can be done in a more obvious way
297 dirs = path == '' and '' or [
297 dirs = path == '' and '' or [
298 d for d in self._dir_paths
298 d for d in self._dir_paths
299 if d and vcspath.dirname(d) == path]
299 if d and vcspath.dirname(d) == path]
300 dirnodes = [
300 dirnodes = [
301 DirNode(d, commit=self) for d in dirs
301 DirNode(d, commit=self) for d in dirs
302 if os.path.dirname(d) == path]
302 if os.path.dirname(d) == path]
303
303
304 alias = self.repository.alias
304 alias = self.repository.alias
305 for k, vals in self._submodules.iteritems():
305 for k, vals in self._submodules.iteritems():
306 loc = vals[0]
306 loc = vals[0]
307 commit = vals[1]
307 commit = vals[1]
308 dirnodes.append(
308 dirnodes.append(
309 SubModuleNode(k, url=loc, commit=commit, alias=alias))
309 SubModuleNode(k, url=loc, commit=commit, alias=alias))
310 nodes = dirnodes + filenodes
310 nodes = dirnodes + filenodes
311 # cache nodes
311 # cache nodes
312 for node in nodes:
312 for node in nodes:
313 self.nodes[node.path] = node
313 self.nodes[node.path] = node
314 nodes.sort()
314 nodes.sort()
315
315
316 return nodes
316 return nodes
317
317
318 def get_node(self, path, pre_load=None):
318 def get_node(self, path, pre_load=None):
319 """
319 """
320 Returns `Node` object from the given `path`. If there is no node at
320 Returns `Node` object from the given `path`. If there is no node at
321 the given `path`, `NodeDoesNotExistError` would be raised.
321 the given `path`, `NodeDoesNotExistError` would be raised.
322 """
322 """
323 path = self._fix_path(path)
323 path = self._fix_path(path)
324
324
325 if path not in self.nodes:
325 if path not in self.nodes:
326 if path in self._file_paths:
326 if path in self._file_paths:
327 node = FileNode(path, commit=self, pre_load=pre_load)
327 node = FileNode(path, commit=self, pre_load=pre_load)
328 elif path in self._dir_paths:
328 elif path in self._dir_paths:
329 if path == '':
329 if path == '':
330 node = RootNode(commit=self)
330 node = RootNode(commit=self)
331 else:
331 else:
332 node = DirNode(path, commit=self)
332 node = DirNode(path, commit=self)
333 else:
333 else:
334 raise self.no_node_at_path(path)
334 raise self.no_node_at_path(path)
335
335
336 # cache node
336 # cache node
337 self.nodes[path] = node
337 self.nodes[path] = node
338 return self.nodes[path]
338 return self.nodes[path]
339
339
340 def get_largefile_node(self, path):
340 def get_largefile_node(self, path):
341
341
342 if self._remote.is_large_file(path):
342 if self._remote.is_large_file(path):
343 # content of that file regular FileNode is the hash of largefile
343 # content of that file regular FileNode is the hash of largefile
344 file_id = self.get_file_content(path).strip()
344 file_id = self.get_file_content(path).strip()
345
345
346 if self._remote.in_largefiles_store(file_id):
346 if self._remote.in_largefiles_store(file_id):
347 lf_path = self._remote.store_path(file_id)
347 lf_path = self._remote.store_path(file_id)
348 return LargeFileNode(lf_path, commit=self, org_path=path)
348 return LargeFileNode(lf_path, commit=self, org_path=path)
349 elif self._remote.in_user_cache(file_id):
349 elif self._remote.in_user_cache(file_id):
350 lf_path = self._remote.store_path(file_id)
350 lf_path = self._remote.store_path(file_id)
351 self._remote.link(file_id, path)
351 self._remote.link(file_id, path)
352 return LargeFileNode(lf_path, commit=self, org_path=path)
352 return LargeFileNode(lf_path, commit=self, org_path=path)
353
353
354 @LazyProperty
354 @LazyProperty
355 def _submodules(self):
355 def _submodules(self):
356 """
356 """
357 Returns a dictionary with submodule information from substate file
357 Returns a dictionary with submodule information from substate file
358 of hg repository.
358 of hg repository.
359 """
359 """
360 return self._remote.ctx_substate(self.idx)
360 return self._remote.ctx_substate(self.idx)
361
361
362 @LazyProperty
362 @LazyProperty
363 def affected_files(self):
363 def affected_files(self):
364 """
364 """
365 Gets a fast accessible file changes for given commit
365 Gets a fast accessible file changes for given commit
366 """
366 """
367 return self._remote.ctx_files(self.idx)
367 return self._remote.ctx_files(self.idx)
368
368
369 @property
369 @property
370 def added(self):
370 def added(self):
371 """
371 """
372 Returns list of added ``FileNode`` objects.
372 Returns list of added ``FileNode`` objects.
373 """
373 """
374 return AddedFileNodesGenerator([n for n in self.status[1]], self)
374 return AddedFileNodesGenerator([n for n in self.status[1]], self)
375
375
376 @property
376 @property
377 def changed(self):
377 def changed(self):
378 """
378 """
379 Returns list of modified ``FileNode`` objects.
379 Returns list of modified ``FileNode`` objects.
380 """
380 """
381 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
381 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
382
382
383 @property
383 @property
384 def removed(self):
384 def removed(self):
385 """
385 """
386 Returns list of removed ``FileNode`` objects.
386 Returns list of removed ``FileNode`` objects.
387 """
387 """
388 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
388 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
@@ -1,936 +1,923 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import shutil
27 import shutil
28 import urllib
28 import urllib
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils import safe_unicode, safe_str
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 update_after_clone=False, with_wire=None):
61 update_after_clone=False, with_wire=None):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param update_after_clone=False: sets update of working copy after
71 :param update_after_clone=False: sets update of working copy after
72 making a clone
72 making a clone
73 """
73 """
74
74
75 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
76 # mercurial since 4.4.X requires certain configuration to be present
76 # mercurial since 4.4.X requires certain configuration to be present
77 # because sometimes we init the repos with config we need to meet
77 # because sometimes we init the repos with config we need to meet
78 # special requirements
78 # special requirements
79 self.config = config if config else self.get_default_config(
79 self.config = config if config else self.get_default_config(
80 default=[('extensions', 'largefiles', '1')])
80 default=[('extensions', 'largefiles', '1')])
81
81
82 self._remote = connection.Hg(
82 self._remote = connection.Hg(
83 self.path, self.config, with_wire=with_wire)
83 self.path, self.config, with_wire=with_wire)
84
84
85 self._init_repo(create, src_url, update_after_clone)
85 self._init_repo(create, src_url, update_after_clone)
86
86
87 # caches
87 # caches
88 self._commit_ids = {}
88 self._commit_ids = {}
89
89
90 @LazyProperty
90 @LazyProperty
91 def commit_ids(self):
91 def commit_ids(self):
92 """
92 """
93 Returns list of commit ids, in ascending order. Being lazy
93 Returns list of commit ids, in ascending order. Being lazy
94 attribute allows external tools to inject shas from cache.
94 attribute allows external tools to inject shas from cache.
95 """
95 """
96 commit_ids = self._get_all_commit_ids()
96 commit_ids = self._get_all_commit_ids()
97 self._rebuild_cache(commit_ids)
97 self._rebuild_cache(commit_ids)
98 return commit_ids
98 return commit_ids
99
99
100 def _rebuild_cache(self, commit_ids):
100 def _rebuild_cache(self, commit_ids):
101 self._commit_ids = dict((commit_id, index)
101 self._commit_ids = dict((commit_id, index)
102 for index, commit_id in enumerate(commit_ids))
102 for index, commit_id in enumerate(commit_ids))
103
103
104 @LazyProperty
104 @LazyProperty
105 def branches(self):
105 def branches(self):
106 return self._get_branches()
106 return self._get_branches()
107
107
108 @LazyProperty
108 @LazyProperty
109 def branches_closed(self):
109 def branches_closed(self):
110 return self._get_branches(active=False, closed=True)
110 return self._get_branches(active=False, closed=True)
111
111
112 @LazyProperty
112 @LazyProperty
113 def branches_all(self):
113 def branches_all(self):
114 all_branches = {}
114 all_branches = {}
115 all_branches.update(self.branches)
115 all_branches.update(self.branches)
116 all_branches.update(self.branches_closed)
116 all_branches.update(self.branches_closed)
117 return all_branches
117 return all_branches
118
118
119 def _get_branches(self, active=True, closed=False):
119 def _get_branches(self, active=True, closed=False):
120 """
120 """
121 Gets branches for this repository
121 Gets branches for this repository
122 Returns only not closed active branches by default
122 Returns only not closed active branches by default
123
123
124 :param active: return also active branches
124 :param active: return also active branches
125 :param closed: return also closed branches
125 :param closed: return also closed branches
126
126
127 """
127 """
128 if self.is_empty():
128 if self.is_empty():
129 return {}
129 return {}
130
130
131 def get_name(ctx):
131 def get_name(ctx):
132 return ctx[0]
132 return ctx[0]
133
133
134 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
134 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
135 self._remote.branches(active, closed).items()]
135 self._remote.branches(active, closed).items()]
136
136
137 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
137 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
138
138
139 @LazyProperty
139 @LazyProperty
140 def tags(self):
140 def tags(self):
141 """
141 """
142 Gets tags for this repository
142 Gets tags for this repository
143 """
143 """
144 return self._get_tags()
144 return self._get_tags()
145
145
146 def _get_tags(self):
146 def _get_tags(self):
147 if self.is_empty():
147 if self.is_empty():
148 return {}
148 return {}
149
149
150 def get_name(ctx):
150 def get_name(ctx):
151 return ctx[0]
151 return ctx[0]
152
152
153 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
153 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
154 self._remote.tags().items()]
154 self._remote.tags().items()]
155
155
156 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
156 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
157
157
158 def tag(self, name, user, commit_id=None, message=None, date=None,
158 def tag(self, name, user, commit_id=None, message=None, date=None,
159 **kwargs):
159 **kwargs):
160 """
160 """
161 Creates and returns a tag for the given ``commit_id``.
161 Creates and returns a tag for the given ``commit_id``.
162
162
163 :param name: name for new tag
163 :param name: name for new tag
164 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
164 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
165 :param commit_id: commit id for which new tag would be created
165 :param commit_id: commit id for which new tag would be created
166 :param message: message of the tag's commit
166 :param message: message of the tag's commit
167 :param date: date of tag's commit
167 :param date: date of tag's commit
168
168
169 :raises TagAlreadyExistError: if tag with same name already exists
169 :raises TagAlreadyExistError: if tag with same name already exists
170 """
170 """
171 if name in self.tags:
171 if name in self.tags:
172 raise TagAlreadyExistError("Tag %s already exists" % name)
172 raise TagAlreadyExistError("Tag %s already exists" % name)
173 commit = self.get_commit(commit_id=commit_id)
173 commit = self.get_commit(commit_id=commit_id)
174 local = kwargs.setdefault('local', False)
174 local = kwargs.setdefault('local', False)
175
175
176 if message is None:
176 if message is None:
177 message = "Added tag %s for commit %s" % (name, commit.short_id)
177 message = "Added tag %s for commit %s" % (name, commit.short_id)
178
178
179 date, tz = date_to_timestamp_plus_offset(date)
179 date, tz = date_to_timestamp_plus_offset(date)
180
180
181 self._remote.tag(
181 self._remote.tag(
182 name, commit.raw_id, message, local, user, date, tz)
182 name, commit.raw_id, message, local, user, date, tz)
183 self._remote.invalidate_vcs_cache()
183 self._remote.invalidate_vcs_cache()
184
184
185 # Reinitialize tags
185 # Reinitialize tags
186 self.tags = self._get_tags()
186 self.tags = self._get_tags()
187 tag_id = self.tags[name]
187 tag_id = self.tags[name]
188
188
189 return self.get_commit(commit_id=tag_id)
189 return self.get_commit(commit_id=tag_id)
190
190
191 def remove_tag(self, name, user, message=None, date=None):
191 def remove_tag(self, name, user, message=None, date=None):
192 """
192 """
193 Removes tag with the given `name`.
193 Removes tag with the given `name`.
194
194
195 :param name: name of the tag to be removed
195 :param name: name of the tag to be removed
196 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
196 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
197 :param message: message of the tag's removal commit
197 :param message: message of the tag's removal commit
198 :param date: date of tag's removal commit
198 :param date: date of tag's removal commit
199
199
200 :raises TagDoesNotExistError: if tag with given name does not exists
200 :raises TagDoesNotExistError: if tag with given name does not exists
201 """
201 """
202 if name not in self.tags:
202 if name not in self.tags:
203 raise TagDoesNotExistError("Tag %s does not exist" % name)
203 raise TagDoesNotExistError("Tag %s does not exist" % name)
204 if message is None:
204 if message is None:
205 message = "Removed tag %s" % name
205 message = "Removed tag %s" % name
206 local = False
206 local = False
207
207
208 date, tz = date_to_timestamp_plus_offset(date)
208 date, tz = date_to_timestamp_plus_offset(date)
209
209
210 self._remote.tag(name, nullid, message, local, user, date, tz)
210 self._remote.tag(name, nullid, message, local, user, date, tz)
211 self._remote.invalidate_vcs_cache()
211 self._remote.invalidate_vcs_cache()
212 self.tags = self._get_tags()
212 self.tags = self._get_tags()
213
213
214 @LazyProperty
214 @LazyProperty
215 def bookmarks(self):
215 def bookmarks(self):
216 """
216 """
217 Gets bookmarks for this repository
217 Gets bookmarks for this repository
218 """
218 """
219 return self._get_bookmarks()
219 return self._get_bookmarks()
220
220
221 def _get_bookmarks(self):
221 def _get_bookmarks(self):
222 if self.is_empty():
222 if self.is_empty():
223 return {}
223 return {}
224
224
225 def get_name(ctx):
225 def get_name(ctx):
226 return ctx[0]
226 return ctx[0]
227
227
228 _bookmarks = [
228 _bookmarks = [
229 (safe_unicode(n), hexlify(h)) for n, h in
229 (safe_unicode(n), hexlify(h)) for n, h in
230 self._remote.bookmarks().items()]
230 self._remote.bookmarks().items()]
231
231
232 return OrderedDict(sorted(_bookmarks, key=get_name))
232 return OrderedDict(sorted(_bookmarks, key=get_name))
233
233
234 def _get_all_commit_ids(self):
234 def _get_all_commit_ids(self):
235 return self._remote.get_all_commit_ids('visible')
235 return self._remote.get_all_commit_ids('visible')
236
236
237 def get_diff(
237 def get_diff(
238 self, commit1, commit2, path='', ignore_whitespace=False,
238 self, commit1, commit2, path='', ignore_whitespace=False,
239 context=3, path1=None):
239 context=3, path1=None):
240 """
240 """
241 Returns (git like) *diff*, as plain text. Shows changes introduced by
241 Returns (git like) *diff*, as plain text. Shows changes introduced by
242 `commit2` since `commit1`.
242 `commit2` since `commit1`.
243
243
244 :param commit1: Entry point from which diff is shown. Can be
244 :param commit1: Entry point from which diff is shown. Can be
245 ``self.EMPTY_COMMIT`` - in this case, patch showing all
245 ``self.EMPTY_COMMIT`` - in this case, patch showing all
246 the changes since empty state of the repository until `commit2`
246 the changes since empty state of the repository until `commit2`
247 :param commit2: Until which commit changes should be shown.
247 :param commit2: Until which commit changes should be shown.
248 :param ignore_whitespace: If set to ``True``, would not show whitespace
248 :param ignore_whitespace: If set to ``True``, would not show whitespace
249 changes. Defaults to ``False``.
249 changes. Defaults to ``False``.
250 :param context: How many lines before/after changed lines should be
250 :param context: How many lines before/after changed lines should be
251 shown. Defaults to ``3``.
251 shown. Defaults to ``3``.
252 """
252 """
253 self._validate_diff_commits(commit1, commit2)
253 self._validate_diff_commits(commit1, commit2)
254 if path1 is not None and path1 != path:
254 if path1 is not None and path1 != path:
255 raise ValueError("Diff of two different paths not supported.")
255 raise ValueError("Diff of two different paths not supported.")
256
256
257 if path:
257 if path:
258 file_filter = [self.path, path]
258 file_filter = [self.path, path]
259 else:
259 else:
260 file_filter = None
260 file_filter = None
261
261
262 diff = self._remote.diff(
262 diff = self._remote.diff(
263 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
263 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
264 opt_git=True, opt_ignorews=ignore_whitespace,
264 opt_git=True, opt_ignorews=ignore_whitespace,
265 context=context)
265 context=context)
266 return MercurialDiff(diff)
266 return MercurialDiff(diff)
267
267
268 def strip(self, commit_id, branch=None):
268 def strip(self, commit_id, branch=None):
269 self._remote.strip(commit_id, update=False, backup="none")
269 self._remote.strip(commit_id, update=False, backup="none")
270
270
271 self._remote.invalidate_vcs_cache()
271 self._remote.invalidate_vcs_cache()
272 self.commit_ids = self._get_all_commit_ids()
272 self.commit_ids = self._get_all_commit_ids()
273 self._rebuild_cache(self.commit_ids)
273 self._rebuild_cache(self.commit_ids)
274
274
275 def verify(self):
275 def verify(self):
276 verify = self._remote.verify()
276 verify = self._remote.verify()
277
277
278 self._remote.invalidate_vcs_cache()
278 self._remote.invalidate_vcs_cache()
279 return verify
279 return verify
280
280
281 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
281 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
282 if commit_id1 == commit_id2:
282 if commit_id1 == commit_id2:
283 return commit_id1
283 return commit_id1
284
284
285 ancestors = self._remote.revs_from_revspec(
285 ancestors = self._remote.revs_from_revspec(
286 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
286 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
287 other_path=repo2.path)
287 other_path=repo2.path)
288 return repo2[ancestors[0]].raw_id if ancestors else None
288 return repo2[ancestors[0]].raw_id if ancestors else None
289
289
290 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
290 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
291 if commit_id1 == commit_id2:
291 if commit_id1 == commit_id2:
292 commits = []
292 commits = []
293 else:
293 else:
294 if merge:
294 if merge:
295 indexes = self._remote.revs_from_revspec(
295 indexes = self._remote.revs_from_revspec(
296 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
296 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
297 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
297 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
298 else:
298 else:
299 indexes = self._remote.revs_from_revspec(
299 indexes = self._remote.revs_from_revspec(
300 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
300 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
301 commit_id1, other_path=repo2.path)
301 commit_id1, other_path=repo2.path)
302
302
303 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
303 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
304 for idx in indexes]
304 for idx in indexes]
305
305
306 return commits
306 return commits
307
307
308 @staticmethod
308 @staticmethod
309 def check_url(url, config):
309 def check_url(url, config):
310 """
310 """
311 Function will check given url and try to verify if it's a valid
311 Function will check given url and try to verify if it's a valid
312 link. Sometimes it may happened that mercurial will issue basic
312 link. Sometimes it may happened that mercurial will issue basic
313 auth request that can cause whole API to hang when used from python
313 auth request that can cause whole API to hang when used from python
314 or other external calls.
314 or other external calls.
315
315
316 On failures it'll raise urllib2.HTTPError, exception is also thrown
316 On failures it'll raise urllib2.HTTPError, exception is also thrown
317 when the return code is non 200
317 when the return code is non 200
318 """
318 """
319 # check first if it's not an local url
319 # check first if it's not an local url
320 if os.path.isdir(url) or url.startswith('file:'):
320 if os.path.isdir(url) or url.startswith('file:'):
321 return True
321 return True
322
322
323 # Request the _remote to verify the url
323 # Request the _remote to verify the url
324 return connection.Hg.check_url(url, config.serialize())
324 return connection.Hg.check_url(url, config.serialize())
325
325
326 @staticmethod
326 @staticmethod
327 def is_valid_repository(path):
327 def is_valid_repository(path):
328 return os.path.isdir(os.path.join(path, '.hg'))
328 return os.path.isdir(os.path.join(path, '.hg'))
329
329
330 def _init_repo(self, create, src_url=None, update_after_clone=False):
330 def _init_repo(self, create, src_url=None, update_after_clone=False):
331 """
331 """
332 Function will check for mercurial repository in given path. If there
332 Function will check for mercurial repository in given path. If there
333 is no repository in that path it will raise an exception unless
333 is no repository in that path it will raise an exception unless
334 `create` parameter is set to True - in that case repository would
334 `create` parameter is set to True - in that case repository would
335 be created.
335 be created.
336
336
337 If `src_url` is given, would try to clone repository from the
337 If `src_url` is given, would try to clone repository from the
338 location at given clone_point. Additionally it'll make update to
338 location at given clone_point. Additionally it'll make update to
339 working copy accordingly to `update_after_clone` flag.
339 working copy accordingly to `update_after_clone` flag.
340 """
340 """
341 if create and os.path.exists(self.path):
341 if create and os.path.exists(self.path):
342 raise RepositoryError(
342 raise RepositoryError(
343 "Cannot create repository at %s, location already exist"
343 "Cannot create repository at %s, location already exist"
344 % self.path)
344 % self.path)
345
345
346 if src_url:
346 if src_url:
347 url = str(self._get_url(src_url))
347 url = str(self._get_url(src_url))
348 MercurialRepository.check_url(url, self.config)
348 MercurialRepository.check_url(url, self.config)
349
349
350 self._remote.clone(url, self.path, update_after_clone)
350 self._remote.clone(url, self.path, update_after_clone)
351
351
352 # Don't try to create if we've already cloned repo
352 # Don't try to create if we've already cloned repo
353 create = False
353 create = False
354
354
355 if create:
355 if create:
356 os.makedirs(self.path, mode=0755)
356 os.makedirs(self.path, mode=0755)
357
357
358 self._remote.localrepository(create)
358 self._remote.localrepository(create)
359
359
360 @LazyProperty
360 @LazyProperty
361 def in_memory_commit(self):
361 def in_memory_commit(self):
362 return MercurialInMemoryCommit(self)
362 return MercurialInMemoryCommit(self)
363
363
364 @LazyProperty
364 @LazyProperty
365 def description(self):
365 def description(self):
366 description = self._remote.get_config_value(
366 description = self._remote.get_config_value(
367 'web', 'description', untrusted=True)
367 'web', 'description', untrusted=True)
368 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
368 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
369
369
370 @LazyProperty
370 @LazyProperty
371 def contact(self):
371 def contact(self):
372 contact = (
372 contact = (
373 self._remote.get_config_value("web", "contact") or
373 self._remote.get_config_value("web", "contact") or
374 self._remote.get_config_value("ui", "username"))
374 self._remote.get_config_value("ui", "username"))
375 return safe_unicode(contact or self.DEFAULT_CONTACT)
375 return safe_unicode(contact or self.DEFAULT_CONTACT)
376
376
377 @LazyProperty
377 @LazyProperty
378 def last_change(self):
378 def last_change(self):
379 """
379 """
380 Returns last change made on this repository as
380 Returns last change made on this repository as
381 `datetime.datetime` object.
381 `datetime.datetime` object.
382 """
382 """
383 try:
383 try:
384 return self.get_commit().date
384 return self.get_commit().date
385 except RepositoryError:
385 except RepositoryError:
386 tzoffset = makedate()[1]
386 tzoffset = makedate()[1]
387 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
387 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
388
388
389 def _get_fs_mtime(self):
389 def _get_fs_mtime(self):
390 # fallback to filesystem
390 # fallback to filesystem
391 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
391 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
392 st_path = os.path.join(self.path, '.hg', "store")
392 st_path = os.path.join(self.path, '.hg', "store")
393 if os.path.exists(cl_path):
393 if os.path.exists(cl_path):
394 return os.stat(cl_path).st_mtime
394 return os.stat(cl_path).st_mtime
395 else:
395 else:
396 return os.stat(st_path).st_mtime
396 return os.stat(st_path).st_mtime
397
397
398 def _sanitize_commit_idx(self, idx):
399 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
400 # number. A `long` is treated in the correct way though. So we convert
401 # `int` to `long` here to make sure it is handled correctly.
402 if isinstance(idx, int):
403 return long(idx)
404 return idx
405
406 def _get_url(self, url):
398 def _get_url(self, url):
407 """
399 """
408 Returns normalized url. If schema is not given, would fall
400 Returns normalized url. If schema is not given, would fall
409 to filesystem
401 to filesystem
410 (``file:///``) schema.
402 (``file:///``) schema.
411 """
403 """
412 url = url.encode('utf8')
404 url = url.encode('utf8')
413 if url != 'default' and '://' not in url:
405 if url != 'default' and '://' not in url:
414 url = "file:" + urllib.pathname2url(url)
406 url = "file:" + urllib.pathname2url(url)
415 return url
407 return url
416
408
417 def get_hook_location(self):
409 def get_hook_location(self):
418 """
410 """
419 returns absolute path to location where hooks are stored
411 returns absolute path to location where hooks are stored
420 """
412 """
421 return os.path.join(self.path, '.hg', '.hgrc')
413 return os.path.join(self.path, '.hg', '.hgrc')
422
414
423 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
424 """
416 """
425 Returns ``MercurialCommit`` object representing repository's
417 Returns ``MercurialCommit`` object representing repository's
426 commit at the given `commit_id` or `commit_idx`.
418 commit at the given `commit_id` or `commit_idx`.
427 """
419 """
428 if self.is_empty():
420 if self.is_empty():
429 raise EmptyRepositoryError("There are no commits yet")
421 raise EmptyRepositoryError("There are no commits yet")
430
422
431 if commit_id is not None:
423 if commit_id is not None:
432 self._validate_commit_id(commit_id)
424 self._validate_commit_id(commit_id)
433 try:
425 try:
434 idx = self._commit_ids[commit_id]
426 idx = self._commit_ids[commit_id]
435 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
427 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
436 except KeyError:
428 except KeyError:
437 pass
429 pass
438 elif commit_idx is not None:
430 elif commit_idx is not None:
439 self._validate_commit_idx(commit_idx)
431 self._validate_commit_idx(commit_idx)
440 commit_idx = self._sanitize_commit_idx(commit_idx)
441 try:
432 try:
442 id_ = self.commit_ids[commit_idx]
433 id_ = self.commit_ids[commit_idx]
443 if commit_idx < 0:
434 if commit_idx < 0:
444 commit_idx += len(self.commit_ids)
435 commit_idx += len(self.commit_ids)
445 return MercurialCommit(
436 return MercurialCommit(
446 self, id_, commit_idx, pre_load=pre_load)
437 self, id_, commit_idx, pre_load=pre_load)
447 except IndexError:
438 except IndexError:
448 commit_id = commit_idx
439 commit_id = commit_idx
449 else:
440 else:
450 commit_id = "tip"
441 commit_id = "tip"
451
442
452 # TODO Paris: Ugly hack to "serialize" long for msgpack
453 if isinstance(commit_id, long):
454 commit_id = float(commit_id)
455
456 if isinstance(commit_id, unicode):
443 if isinstance(commit_id, unicode):
457 commit_id = safe_str(commit_id)
444 commit_id = safe_str(commit_id)
458
445
459 try:
446 try:
460 raw_id, idx = self._remote.lookup(commit_id, both=True)
447 raw_id, idx = self._remote.lookup(commit_id, both=True)
461 except CommitDoesNotExistError:
448 except CommitDoesNotExistError:
462 msg = "Commit %s does not exist for %s" % (
449 msg = "Commit %s does not exist for %s" % (
463 commit_id, self)
450 commit_id, self)
464 raise CommitDoesNotExistError(msg)
451 raise CommitDoesNotExistError(msg)
465
452
466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
453 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
467
454
468 def get_commits(
455 def get_commits(
469 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
470 branch_name=None, show_hidden=False, pre_load=None):
457 branch_name=None, show_hidden=False, pre_load=None):
471 """
458 """
472 Returns generator of ``MercurialCommit`` objects from start to end
459 Returns generator of ``MercurialCommit`` objects from start to end
473 (both are inclusive)
460 (both are inclusive)
474
461
475 :param start_id: None, str(commit_id)
462 :param start_id: None, str(commit_id)
476 :param end_id: None, str(commit_id)
463 :param end_id: None, str(commit_id)
477 :param start_date: if specified, commits with commit date less than
464 :param start_date: if specified, commits with commit date less than
478 ``start_date`` would be filtered out from returned set
465 ``start_date`` would be filtered out from returned set
479 :param end_date: if specified, commits with commit date greater than
466 :param end_date: if specified, commits with commit date greater than
480 ``end_date`` would be filtered out from returned set
467 ``end_date`` would be filtered out from returned set
481 :param branch_name: if specified, commits not reachable from given
468 :param branch_name: if specified, commits not reachable from given
482 branch would be filtered out from returned set
469 branch would be filtered out from returned set
483 :param show_hidden: Show hidden commits such as obsolete or hidden from
470 :param show_hidden: Show hidden commits such as obsolete or hidden from
484 Mercurial evolve
471 Mercurial evolve
485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
472 :raise BranchDoesNotExistError: If given ``branch_name`` does not
486 exist.
473 exist.
487 :raise CommitDoesNotExistError: If commit for given ``start`` or
474 :raise CommitDoesNotExistError: If commit for given ``start`` or
488 ``end`` could not be found.
475 ``end`` could not be found.
489 """
476 """
490 # actually we should check now if it's not an empty repo
477 # actually we should check now if it's not an empty repo
491 branch_ancestors = False
478 branch_ancestors = False
492 if self.is_empty():
479 if self.is_empty():
493 raise EmptyRepositoryError("There are no commits yet")
480 raise EmptyRepositoryError("There are no commits yet")
494 self._validate_branch_name(branch_name)
481 self._validate_branch_name(branch_name)
495
482
496 if start_id is not None:
483 if start_id is not None:
497 self._validate_commit_id(start_id)
484 self._validate_commit_id(start_id)
498 c_start = self.get_commit(commit_id=start_id)
485 c_start = self.get_commit(commit_id=start_id)
499 start_pos = self._commit_ids[c_start.raw_id]
486 start_pos = self._commit_ids[c_start.raw_id]
500 else:
487 else:
501 start_pos = None
488 start_pos = None
502
489
503 if end_id is not None:
490 if end_id is not None:
504 self._validate_commit_id(end_id)
491 self._validate_commit_id(end_id)
505 c_end = self.get_commit(commit_id=end_id)
492 c_end = self.get_commit(commit_id=end_id)
506 end_pos = max(0, self._commit_ids[c_end.raw_id])
493 end_pos = max(0, self._commit_ids[c_end.raw_id])
507 else:
494 else:
508 end_pos = None
495 end_pos = None
509
496
510 if None not in [start_id, end_id] and start_pos > end_pos:
497 if None not in [start_id, end_id] and start_pos > end_pos:
511 raise RepositoryError(
498 raise RepositoryError(
512 "Start commit '%s' cannot be after end commit '%s'" %
499 "Start commit '%s' cannot be after end commit '%s'" %
513 (start_id, end_id))
500 (start_id, end_id))
514
501
515 if end_pos is not None:
502 if end_pos is not None:
516 end_pos += 1
503 end_pos += 1
517
504
518 commit_filter = []
505 commit_filter = []
519
506
520 if branch_name and not branch_ancestors:
507 if branch_name and not branch_ancestors:
521 commit_filter.append('branch("%s")' % (branch_name,))
508 commit_filter.append('branch("%s")' % (branch_name,))
522 elif branch_name and branch_ancestors:
509 elif branch_name and branch_ancestors:
523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
510 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
524
511
525 if start_date and not end_date:
512 if start_date and not end_date:
526 commit_filter.append('date(">%s")' % (start_date,))
513 commit_filter.append('date(">%s")' % (start_date,))
527 if end_date and not start_date:
514 if end_date and not start_date:
528 commit_filter.append('date("<%s")' % (end_date,))
515 commit_filter.append('date("<%s")' % (end_date,))
529 if start_date and end_date:
516 if start_date and end_date:
530 commit_filter.append(
517 commit_filter.append(
531 'date(">%s") and date("<%s")' % (start_date, end_date))
518 'date(">%s") and date("<%s")' % (start_date, end_date))
532
519
533 if not show_hidden:
520 if not show_hidden:
534 commit_filter.append('not obsolete()')
521 commit_filter.append('not obsolete()')
535 commit_filter.append('not hidden()')
522 commit_filter.append('not hidden()')
536
523
537 # TODO: johbo: Figure out a simpler way for this solution
524 # TODO: johbo: Figure out a simpler way for this solution
538 collection_generator = CollectionGenerator
525 collection_generator = CollectionGenerator
539 if commit_filter:
526 if commit_filter:
540 commit_filter = ' and '.join(map(safe_str, commit_filter))
527 commit_filter = ' and '.join(map(safe_str, commit_filter))
541 revisions = self._remote.rev_range([commit_filter])
528 revisions = self._remote.rev_range([commit_filter])
542 collection_generator = MercurialIndexBasedCollectionGenerator
529 collection_generator = MercurialIndexBasedCollectionGenerator
543 else:
530 else:
544 revisions = self.commit_ids
531 revisions = self.commit_ids
545
532
546 if start_pos or end_pos:
533 if start_pos or end_pos:
547 revisions = revisions[start_pos:end_pos]
534 revisions = revisions[start_pos:end_pos]
548
535
549 return collection_generator(self, revisions, pre_load=pre_load)
536 return collection_generator(self, revisions, pre_load=pre_load)
550
537
551 def pull(self, url, commit_ids=None):
538 def pull(self, url, commit_ids=None):
552 """
539 """
553 Tries to pull changes from external location.
540 Tries to pull changes from external location.
554
541
555 :param commit_ids: Optional. Can be set to a list of commit ids
542 :param commit_ids: Optional. Can be set to a list of commit ids
556 which shall be pulled from the other repository.
543 which shall be pulled from the other repository.
557 """
544 """
558 url = self._get_url(url)
545 url = self._get_url(url)
559 self._remote.pull(url, commit_ids=commit_ids)
546 self._remote.pull(url, commit_ids=commit_ids)
560 self._remote.invalidate_vcs_cache()
547 self._remote.invalidate_vcs_cache()
561
548
562 def push(self, url):
549 def push(self, url):
563 url = self._get_url(url)
550 url = self._get_url(url)
564 self._remote.sync_push(url)
551 self._remote.sync_push(url)
565
552
566 def _local_clone(self, clone_path):
553 def _local_clone(self, clone_path):
567 """
554 """
568 Create a local clone of the current repo.
555 Create a local clone of the current repo.
569 """
556 """
570 self._remote.clone(self.path, clone_path, update_after_clone=True,
557 self._remote.clone(self.path, clone_path, update_after_clone=True,
571 hooks=False)
558 hooks=False)
572
559
573 def _update(self, revision, clean=False):
560 def _update(self, revision, clean=False):
574 """
561 """
575 Update the working copy to the specified revision.
562 Update the working copy to the specified revision.
576 """
563 """
577 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
564 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
578 self._remote.update(revision, clean=clean)
565 self._remote.update(revision, clean=clean)
579
566
580 def _identify(self):
567 def _identify(self):
581 """
568 """
582 Return the current state of the working directory.
569 Return the current state of the working directory.
583 """
570 """
584 return self._remote.identify().strip().rstrip('+')
571 return self._remote.identify().strip().rstrip('+')
585
572
586 def _heads(self, branch=None):
573 def _heads(self, branch=None):
587 """
574 """
588 Return the commit ids of the repository heads.
575 Return the commit ids of the repository heads.
589 """
576 """
590 return self._remote.heads(branch=branch).strip().split(' ')
577 return self._remote.heads(branch=branch).strip().split(' ')
591
578
592 def _ancestor(self, revision1, revision2):
579 def _ancestor(self, revision1, revision2):
593 """
580 """
594 Return the common ancestor of the two revisions.
581 Return the common ancestor of the two revisions.
595 """
582 """
596 return self._remote.ancestor(revision1, revision2)
583 return self._remote.ancestor(revision1, revision2)
597
584
598 def _local_push(
585 def _local_push(
599 self, revision, repository_path, push_branches=False,
586 self, revision, repository_path, push_branches=False,
600 enable_hooks=False):
587 enable_hooks=False):
601 """
588 """
602 Push the given revision to the specified repository.
589 Push the given revision to the specified repository.
603
590
604 :param push_branches: allow to create branches in the target repo.
591 :param push_branches: allow to create branches in the target repo.
605 """
592 """
606 self._remote.push(
593 self._remote.push(
607 [revision], repository_path, hooks=enable_hooks,
594 [revision], repository_path, hooks=enable_hooks,
608 push_branches=push_branches)
595 push_branches=push_branches)
609
596
610 def _local_merge(self, target_ref, merge_message, user_name, user_email,
597 def _local_merge(self, target_ref, merge_message, user_name, user_email,
611 source_ref, use_rebase=False, dry_run=False):
598 source_ref, use_rebase=False, dry_run=False):
612 """
599 """
613 Merge the given source_revision into the checked out revision.
600 Merge the given source_revision into the checked out revision.
614
601
615 Returns the commit id of the merge and a boolean indicating if the
602 Returns the commit id of the merge and a boolean indicating if the
616 commit needs to be pushed.
603 commit needs to be pushed.
617 """
604 """
618 self._update(target_ref.commit_id)
605 self._update(target_ref.commit_id)
619
606
620 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
607 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
621 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
608 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
622
609
623 if ancestor == source_ref.commit_id:
610 if ancestor == source_ref.commit_id:
624 # Nothing to do, the changes were already integrated
611 # Nothing to do, the changes were already integrated
625 return target_ref.commit_id, False
612 return target_ref.commit_id, False
626
613
627 elif ancestor == target_ref.commit_id and is_the_same_branch:
614 elif ancestor == target_ref.commit_id and is_the_same_branch:
628 # In this case we should force a commit message
615 # In this case we should force a commit message
629 return source_ref.commit_id, True
616 return source_ref.commit_id, True
630
617
631 if use_rebase:
618 if use_rebase:
632 try:
619 try:
633 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
620 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
634 target_ref.commit_id)
621 target_ref.commit_id)
635 self.bookmark(bookmark_name, revision=source_ref.commit_id)
622 self.bookmark(bookmark_name, revision=source_ref.commit_id)
636 self._remote.rebase(
623 self._remote.rebase(
637 source=source_ref.commit_id, dest=target_ref.commit_id)
624 source=source_ref.commit_id, dest=target_ref.commit_id)
638 self._remote.invalidate_vcs_cache()
625 self._remote.invalidate_vcs_cache()
639 self._update(bookmark_name)
626 self._update(bookmark_name)
640 return self._identify(), True
627 return self._identify(), True
641 except RepositoryError:
628 except RepositoryError:
642 # The rebase-abort may raise another exception which 'hides'
629 # The rebase-abort may raise another exception which 'hides'
643 # the original one, therefore we log it here.
630 # the original one, therefore we log it here.
644 log.exception('Error while rebasing shadow repo during merge.')
631 log.exception('Error while rebasing shadow repo during merge.')
645
632
646 # Cleanup any rebase leftovers
633 # Cleanup any rebase leftovers
647 self._remote.invalidate_vcs_cache()
634 self._remote.invalidate_vcs_cache()
648 self._remote.rebase(abort=True)
635 self._remote.rebase(abort=True)
649 self._remote.invalidate_vcs_cache()
636 self._remote.invalidate_vcs_cache()
650 self._remote.update(clean=True)
637 self._remote.update(clean=True)
651 raise
638 raise
652 else:
639 else:
653 try:
640 try:
654 self._remote.merge(source_ref.commit_id)
641 self._remote.merge(source_ref.commit_id)
655 self._remote.invalidate_vcs_cache()
642 self._remote.invalidate_vcs_cache()
656 self._remote.commit(
643 self._remote.commit(
657 message=safe_str(merge_message),
644 message=safe_str(merge_message),
658 username=safe_str('%s <%s>' % (user_name, user_email)))
645 username=safe_str('%s <%s>' % (user_name, user_email)))
659 self._remote.invalidate_vcs_cache()
646 self._remote.invalidate_vcs_cache()
660 return self._identify(), True
647 return self._identify(), True
661 except RepositoryError:
648 except RepositoryError:
662 # Cleanup any merge leftovers
649 # Cleanup any merge leftovers
663 self._remote.update(clean=True)
650 self._remote.update(clean=True)
664 raise
651 raise
665
652
666 def _local_close(self, target_ref, user_name, user_email,
653 def _local_close(self, target_ref, user_name, user_email,
667 source_ref, close_message=''):
654 source_ref, close_message=''):
668 """
655 """
669 Close the branch of the given source_revision
656 Close the branch of the given source_revision
670
657
671 Returns the commit id of the close and a boolean indicating if the
658 Returns the commit id of the close and a boolean indicating if the
672 commit needs to be pushed.
659 commit needs to be pushed.
673 """
660 """
674 self._update(source_ref.commit_id)
661 self._update(source_ref.commit_id)
675 message = close_message or "Closing branch: `{}`".format(source_ref.name)
662 message = close_message or "Closing branch: `{}`".format(source_ref.name)
676 try:
663 try:
677 self._remote.commit(
664 self._remote.commit(
678 message=safe_str(message),
665 message=safe_str(message),
679 username=safe_str('%s <%s>' % (user_name, user_email)),
666 username=safe_str('%s <%s>' % (user_name, user_email)),
680 close_branch=True)
667 close_branch=True)
681 self._remote.invalidate_vcs_cache()
668 self._remote.invalidate_vcs_cache()
682 return self._identify(), True
669 return self._identify(), True
683 except RepositoryError:
670 except RepositoryError:
684 # Cleanup any commit leftovers
671 # Cleanup any commit leftovers
685 self._remote.update(clean=True)
672 self._remote.update(clean=True)
686 raise
673 raise
687
674
688 def _is_the_same_branch(self, target_ref, source_ref):
675 def _is_the_same_branch(self, target_ref, source_ref):
689 return (
676 return (
690 self._get_branch_name(target_ref) ==
677 self._get_branch_name(target_ref) ==
691 self._get_branch_name(source_ref))
678 self._get_branch_name(source_ref))
692
679
693 def _get_branch_name(self, ref):
680 def _get_branch_name(self, ref):
694 if ref.type == 'branch':
681 if ref.type == 'branch':
695 return ref.name
682 return ref.name
696 return self._remote.ctx_branch(ref.commit_id)
683 return self._remote.ctx_branch(ref.commit_id)
697
684
698 def _get_shadow_repository_path(self, workspace_id):
685 def _get_shadow_repository_path(self, workspace_id):
699 # The name of the shadow repository must start with '.', so it is
686 # The name of the shadow repository must start with '.', so it is
700 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
687 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
701 return os.path.join(
688 return os.path.join(
702 os.path.dirname(self.path),
689 os.path.dirname(self.path),
703 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
690 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
704
691
705 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
692 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
706 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
693 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
707 if not os.path.exists(shadow_repository_path):
694 if not os.path.exists(shadow_repository_path):
708 self._local_clone(shadow_repository_path)
695 self._local_clone(shadow_repository_path)
709 log.debug(
696 log.debug(
710 'Prepared shadow repository in %s', shadow_repository_path)
697 'Prepared shadow repository in %s', shadow_repository_path)
711
698
712 return shadow_repository_path
699 return shadow_repository_path
713
700
714 def cleanup_merge_workspace(self, workspace_id):
701 def cleanup_merge_workspace(self, workspace_id):
715 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
702 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
716 shutil.rmtree(shadow_repository_path, ignore_errors=True)
703 shutil.rmtree(shadow_repository_path, ignore_errors=True)
717
704
718 def _merge_repo(self, shadow_repository_path, target_ref,
705 def _merge_repo(self, shadow_repository_path, target_ref,
719 source_repo, source_ref, merge_message,
706 source_repo, source_ref, merge_message,
720 merger_name, merger_email, dry_run=False,
707 merger_name, merger_email, dry_run=False,
721 use_rebase=False, close_branch=False):
708 use_rebase=False, close_branch=False):
722
709
723 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
724 'rebase' if use_rebase else 'merge', dry_run)
711 'rebase' if use_rebase else 'merge', dry_run)
725 if target_ref.commit_id not in self._heads():
712 if target_ref.commit_id not in self._heads():
726 return MergeResponse(
713 return MergeResponse(
727 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
714 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
728
715
729 try:
716 try:
730 if (target_ref.type == 'branch' and
717 if (target_ref.type == 'branch' and
731 len(self._heads(target_ref.name)) != 1):
718 len(self._heads(target_ref.name)) != 1):
732 return MergeResponse(
719 return MergeResponse(
733 False, False, None,
720 False, False, None,
734 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
721 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
735 except CommitDoesNotExistError:
722 except CommitDoesNotExistError:
736 log.exception('Failure when looking up branch heads on hg target')
723 log.exception('Failure when looking up branch heads on hg target')
737 return MergeResponse(
724 return MergeResponse(
738 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
725 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
739
726
740 shadow_repo = self._get_shadow_instance(shadow_repository_path)
727 shadow_repo = self._get_shadow_instance(shadow_repository_path)
741
728
742 log.debug('Pulling in target reference %s', target_ref)
729 log.debug('Pulling in target reference %s', target_ref)
743 self._validate_pull_reference(target_ref)
730 self._validate_pull_reference(target_ref)
744 shadow_repo._local_pull(self.path, target_ref)
731 shadow_repo._local_pull(self.path, target_ref)
745 try:
732 try:
746 log.debug('Pulling in source reference %s', source_ref)
733 log.debug('Pulling in source reference %s', source_ref)
747 source_repo._validate_pull_reference(source_ref)
734 source_repo._validate_pull_reference(source_ref)
748 shadow_repo._local_pull(source_repo.path, source_ref)
735 shadow_repo._local_pull(source_repo.path, source_ref)
749 except CommitDoesNotExistError:
736 except CommitDoesNotExistError:
750 log.exception('Failure when doing local pull on hg shadow repo')
737 log.exception('Failure when doing local pull on hg shadow repo')
751 return MergeResponse(
738 return MergeResponse(
752 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
739 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
753
740
754 merge_ref = None
741 merge_ref = None
755 merge_commit_id = None
742 merge_commit_id = None
756 close_commit_id = None
743 close_commit_id = None
757 merge_failure_reason = MergeFailureReason.NONE
744 merge_failure_reason = MergeFailureReason.NONE
758
745
759 # enforce that close branch should be used only in case we source from
746 # enforce that close branch should be used only in case we source from
760 # an actual Branch
747 # an actual Branch
761 close_branch = close_branch and source_ref.type == 'branch'
748 close_branch = close_branch and source_ref.type == 'branch'
762
749
763 # don't allow to close branch if source and target are the same
750 # don't allow to close branch if source and target are the same
764 close_branch = close_branch and source_ref.name != target_ref.name
751 close_branch = close_branch and source_ref.name != target_ref.name
765
752
766 needs_push_on_close = False
753 needs_push_on_close = False
767 if close_branch and not use_rebase and not dry_run:
754 if close_branch and not use_rebase and not dry_run:
768 try:
755 try:
769 close_commit_id, needs_push_on_close = shadow_repo._local_close(
756 close_commit_id, needs_push_on_close = shadow_repo._local_close(
770 target_ref, merger_name, merger_email, source_ref)
757 target_ref, merger_name, merger_email, source_ref)
771 merge_possible = True
758 merge_possible = True
772 except RepositoryError:
759 except RepositoryError:
773 log.exception(
760 log.exception(
774 'Failure when doing close branch on hg shadow repo')
761 'Failure when doing close branch on hg shadow repo')
775 merge_possible = False
762 merge_possible = False
776 merge_failure_reason = MergeFailureReason.MERGE_FAILED
763 merge_failure_reason = MergeFailureReason.MERGE_FAILED
777 else:
764 else:
778 merge_possible = True
765 merge_possible = True
779
766
780 needs_push = False
767 needs_push = False
781 if merge_possible:
768 if merge_possible:
782 try:
769 try:
783 merge_commit_id, needs_push = shadow_repo._local_merge(
770 merge_commit_id, needs_push = shadow_repo._local_merge(
784 target_ref, merge_message, merger_name, merger_email,
771 target_ref, merge_message, merger_name, merger_email,
785 source_ref, use_rebase=use_rebase, dry_run=dry_run)
772 source_ref, use_rebase=use_rebase, dry_run=dry_run)
786 merge_possible = True
773 merge_possible = True
787
774
788 # read the state of the close action, if it
775 # read the state of the close action, if it
789 # maybe required a push
776 # maybe required a push
790 needs_push = needs_push or needs_push_on_close
777 needs_push = needs_push or needs_push_on_close
791
778
792 # Set a bookmark pointing to the merge commit. This bookmark
779 # Set a bookmark pointing to the merge commit. This bookmark
793 # may be used to easily identify the last successful merge
780 # may be used to easily identify the last successful merge
794 # commit in the shadow repository.
781 # commit in the shadow repository.
795 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
782 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
796 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
783 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
797 except SubrepoMergeError:
784 except SubrepoMergeError:
798 log.exception(
785 log.exception(
799 'Subrepo merge error during local merge on hg shadow repo.')
786 'Subrepo merge error during local merge on hg shadow repo.')
800 merge_possible = False
787 merge_possible = False
801 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
788 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
802 needs_push = False
789 needs_push = False
803 except RepositoryError:
790 except RepositoryError:
804 log.exception('Failure when doing local merge on hg shadow repo')
791 log.exception('Failure when doing local merge on hg shadow repo')
805 merge_possible = False
792 merge_possible = False
806 merge_failure_reason = MergeFailureReason.MERGE_FAILED
793 merge_failure_reason = MergeFailureReason.MERGE_FAILED
807 needs_push = False
794 needs_push = False
808
795
809 if merge_possible and not dry_run:
796 if merge_possible and not dry_run:
810 if needs_push:
797 if needs_push:
811 # In case the target is a bookmark, update it, so after pushing
798 # In case the target is a bookmark, update it, so after pushing
812 # the bookmarks is also updated in the target.
799 # the bookmarks is also updated in the target.
813 if target_ref.type == 'book':
800 if target_ref.type == 'book':
814 shadow_repo.bookmark(
801 shadow_repo.bookmark(
815 target_ref.name, revision=merge_commit_id)
802 target_ref.name, revision=merge_commit_id)
816 try:
803 try:
817 shadow_repo_with_hooks = self._get_shadow_instance(
804 shadow_repo_with_hooks = self._get_shadow_instance(
818 shadow_repository_path,
805 shadow_repository_path,
819 enable_hooks=True)
806 enable_hooks=True)
820 # This is the actual merge action, we push from shadow
807 # This is the actual merge action, we push from shadow
821 # into origin.
808 # into origin.
822 # Note: the push_branches option will push any new branch
809 # Note: the push_branches option will push any new branch
823 # defined in the source repository to the target. This may
810 # defined in the source repository to the target. This may
824 # be dangerous as branches are permanent in Mercurial.
811 # be dangerous as branches are permanent in Mercurial.
825 # This feature was requested in issue #441.
812 # This feature was requested in issue #441.
826 shadow_repo_with_hooks._local_push(
813 shadow_repo_with_hooks._local_push(
827 merge_commit_id, self.path, push_branches=True,
814 merge_commit_id, self.path, push_branches=True,
828 enable_hooks=True)
815 enable_hooks=True)
829
816
830 # maybe we also need to push the close_commit_id
817 # maybe we also need to push the close_commit_id
831 if close_commit_id:
818 if close_commit_id:
832 shadow_repo_with_hooks._local_push(
819 shadow_repo_with_hooks._local_push(
833 close_commit_id, self.path, push_branches=True,
820 close_commit_id, self.path, push_branches=True,
834 enable_hooks=True)
821 enable_hooks=True)
835 merge_succeeded = True
822 merge_succeeded = True
836 except RepositoryError:
823 except RepositoryError:
837 log.exception(
824 log.exception(
838 'Failure when doing local push from the shadow '
825 'Failure when doing local push from the shadow '
839 'repository to the target repository.')
826 'repository to the target repository.')
840 merge_succeeded = False
827 merge_succeeded = False
841 merge_failure_reason = MergeFailureReason.PUSH_FAILED
828 merge_failure_reason = MergeFailureReason.PUSH_FAILED
842 else:
829 else:
843 merge_succeeded = True
830 merge_succeeded = True
844 else:
831 else:
845 merge_succeeded = False
832 merge_succeeded = False
846
833
847 return MergeResponse(
834 return MergeResponse(
848 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
835 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
849
836
850 def _get_shadow_instance(
837 def _get_shadow_instance(
851 self, shadow_repository_path, enable_hooks=False):
838 self, shadow_repository_path, enable_hooks=False):
852 config = self.config.copy()
839 config = self.config.copy()
853 if not enable_hooks:
840 if not enable_hooks:
854 config.clear_section('hooks')
841 config.clear_section('hooks')
855 return MercurialRepository(shadow_repository_path, config)
842 return MercurialRepository(shadow_repository_path, config)
856
843
857 def _validate_pull_reference(self, reference):
844 def _validate_pull_reference(self, reference):
858 if not (reference.name in self.bookmarks or
845 if not (reference.name in self.bookmarks or
859 reference.name in self.branches or
846 reference.name in self.branches or
860 self.get_commit(reference.commit_id)):
847 self.get_commit(reference.commit_id)):
861 raise CommitDoesNotExistError(
848 raise CommitDoesNotExistError(
862 'Unknown branch, bookmark or commit id')
849 'Unknown branch, bookmark or commit id')
863
850
864 def _local_pull(self, repository_path, reference):
851 def _local_pull(self, repository_path, reference):
865 """
852 """
866 Fetch a branch, bookmark or commit from a local repository.
853 Fetch a branch, bookmark or commit from a local repository.
867 """
854 """
868 repository_path = os.path.abspath(repository_path)
855 repository_path = os.path.abspath(repository_path)
869 if repository_path == self.path:
856 if repository_path == self.path:
870 raise ValueError('Cannot pull from the same repository')
857 raise ValueError('Cannot pull from the same repository')
871
858
872 reference_type_to_option_name = {
859 reference_type_to_option_name = {
873 'book': 'bookmark',
860 'book': 'bookmark',
874 'branch': 'branch',
861 'branch': 'branch',
875 }
862 }
876 option_name = reference_type_to_option_name.get(
863 option_name = reference_type_to_option_name.get(
877 reference.type, 'revision')
864 reference.type, 'revision')
878
865
879 if option_name == 'revision':
866 if option_name == 'revision':
880 ref = reference.commit_id
867 ref = reference.commit_id
881 else:
868 else:
882 ref = reference.name
869 ref = reference.name
883
870
884 options = {option_name: [ref]}
871 options = {option_name: [ref]}
885 self._remote.pull_cmd(repository_path, hooks=False, **options)
872 self._remote.pull_cmd(repository_path, hooks=False, **options)
886 self._remote.invalidate_vcs_cache()
873 self._remote.invalidate_vcs_cache()
887
874
888 def bookmark(self, bookmark, revision=None):
875 def bookmark(self, bookmark, revision=None):
889 if isinstance(bookmark, unicode):
876 if isinstance(bookmark, unicode):
890 bookmark = safe_str(bookmark)
877 bookmark = safe_str(bookmark)
891 self._remote.bookmark(bookmark, revision=revision)
878 self._remote.bookmark(bookmark, revision=revision)
892 self._remote.invalidate_vcs_cache()
879 self._remote.invalidate_vcs_cache()
893
880
894 def get_path_permissions(self, username):
881 def get_path_permissions(self, username):
895 hgacl_file = os.path.join(self.path, '.hg/hgacl')
882 hgacl_file = os.path.join(self.path, '.hg/hgacl')
896
883
897 def read_patterns(suffix):
884 def read_patterns(suffix):
898 svalue = None
885 svalue = None
899 try:
886 try:
900 svalue = hgacl.get('narrowhgacl', username + suffix)
887 svalue = hgacl.get('narrowhgacl', username + suffix)
901 except configparser.NoOptionError:
888 except configparser.NoOptionError:
902 try:
889 try:
903 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
890 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
904 except configparser.NoOptionError:
891 except configparser.NoOptionError:
905 pass
892 pass
906 if not svalue:
893 if not svalue:
907 return None
894 return None
908 result = ['/']
895 result = ['/']
909 for pattern in svalue.split():
896 for pattern in svalue.split():
910 result.append(pattern)
897 result.append(pattern)
911 if '*' not in pattern and '?' not in pattern:
898 if '*' not in pattern and '?' not in pattern:
912 result.append(pattern + '/*')
899 result.append(pattern + '/*')
913 return result
900 return result
914
901
915 if os.path.exists(hgacl_file):
902 if os.path.exists(hgacl_file):
916 try:
903 try:
917 hgacl = configparser.RawConfigParser()
904 hgacl = configparser.RawConfigParser()
918 hgacl.read(hgacl_file)
905 hgacl.read(hgacl_file)
919
906
920 includes = read_patterns('.includes')
907 includes = read_patterns('.includes')
921 excludes = read_patterns('.excludes')
908 excludes = read_patterns('.excludes')
922 return BasePathPermissionChecker.create_from_patterns(
909 return BasePathPermissionChecker.create_from_patterns(
923 includes, excludes)
910 includes, excludes)
924 except BaseException as e:
911 except BaseException as e:
925 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
912 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
926 hgacl_file, self.name, e)
913 hgacl_file, self.name, e)
927 raise exceptions.RepositoryRequirementError(msg)
914 raise exceptions.RepositoryRequirementError(msg)
928 else:
915 else:
929 return None
916 return None
930
917
931
918
932 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
919 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
933
920
934 def _commit_factory(self, commit_id):
921 def _commit_factory(self, commit_id):
935 return self.repo.get_commit(
922 return self.repo.get_commit(
936 commit_idx=commit_id, pre_load=self.pre_load)
923 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now