##// END OF EJS Templates
vcs: Move vcsserver cache invalidation to mercurial backend....
Martin Bornhold -
r407:243d422c default
parent child Browse files
Show More
@@ -1,785 +1,796 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import os
27 import os
28 import shutil
28 import shutil
29 import urllib
29 import urllib
30
30
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.datelib import (
34 from rhodecode.lib.datelib import (
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
36 date_astimestamp)
36 date_astimestamp)
37 from rhodecode.lib.utils import safe_unicode, safe_str
37 from rhodecode.lib.utils import safe_unicode, safe_str
38 from rhodecode.lib.vcs import connection
38 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs.backends.base import (
39 from rhodecode.lib.vcs.backends.base import (
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 MergeFailureReason)
41 MergeFailureReason)
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 TagDoesNotExistError, CommitDoesNotExistError)
47 TagDoesNotExistError, CommitDoesNotExistError)
48
48
49 hexlify = binascii.hexlify
49 hexlify = binascii.hexlify
50 nullid = "\0" * 20
50 nullid = "\0" * 20
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class MercurialRepository(BaseRepository):
55 class MercurialRepository(BaseRepository):
56 """
56 """
57 Mercurial repository backend
57 Mercurial repository backend
58 """
58 """
59 DEFAULT_BRANCH_NAME = 'default'
59 DEFAULT_BRANCH_NAME = 'default'
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 update_after_clone=False, with_wire=None):
62 update_after_clone=False, with_wire=None):
63 """
63 """
64 Raises RepositoryError if repository could not be find at the given
64 Raises RepositoryError if repository could not be find at the given
65 ``repo_path``.
65 ``repo_path``.
66
66
67 :param repo_path: local path of the repository
67 :param repo_path: local path of the repository
68 :param config: config object containing the repo configuration
68 :param config: config object containing the repo configuration
69 :param create=False: if set to True, would try to create repository if
69 :param create=False: if set to True, would try to create repository if
70 it does not exist rather than raising exception
70 it does not exist rather than raising exception
71 :param src_url=None: would try to clone repository from given location
71 :param src_url=None: would try to clone repository from given location
72 :param update_after_clone=False: sets update of working copy after
72 :param update_after_clone=False: sets update of working copy after
73 making a clone
73 making a clone
74 """
74 """
75 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
76 self.config = config if config else Config()
76 self.config = config if config else Config()
77 self._remote = connection.Hg(
77 self._remote = connection.Hg(
78 self.path, self.config, with_wire=with_wire)
78 self.path, self.config, with_wire=with_wire)
79
79
80 self._init_repo(create, src_url, update_after_clone)
80 self._init_repo(create, src_url, update_after_clone)
81
81
82 # caches
82 # caches
83 self._commit_ids = {}
83 self._commit_ids = {}
84
84
85 @LazyProperty
85 @LazyProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject shas from cache.
89 attribute allows external tools to inject shas from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = dict((commit_id, index)
96 self._commit_ids = dict((commit_id, index)
97 for index, commit_id in enumerate(commit_ids))
97 for index, commit_id in enumerate(commit_ids))
98
98
99 @LazyProperty
99 @LazyProperty
100 def branches(self):
100 def branches(self):
101 return self._get_branches()
101 return self._get_branches()
102
102
103 @LazyProperty
103 @LazyProperty
104 def branches_closed(self):
104 def branches_closed(self):
105 return self._get_branches(active=False, closed=True)
105 return self._get_branches(active=False, closed=True)
106
106
107 @LazyProperty
107 @LazyProperty
108 def branches_all(self):
108 def branches_all(self):
109 all_branches = {}
109 all_branches = {}
110 all_branches.update(self.branches)
110 all_branches.update(self.branches)
111 all_branches.update(self.branches_closed)
111 all_branches.update(self.branches_closed)
112 return all_branches
112 return all_branches
113
113
114 def _get_branches(self, active=True, closed=False):
114 def _get_branches(self, active=True, closed=False):
115 """
115 """
116 Gets branches for this repository
116 Gets branches for this repository
117 Returns only not closed active branches by default
117 Returns only not closed active branches by default
118
118
119 :param active: return also active branches
119 :param active: return also active branches
120 :param closed: return also closed branches
120 :param closed: return also closed branches
121
121
122 """
122 """
123 if self.is_empty():
123 if self.is_empty():
124 return {}
124 return {}
125
125
126 def get_name(ctx):
126 def get_name(ctx):
127 return ctx[0]
127 return ctx[0]
128
128
129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
130 self._remote.branches(active, closed).items()]
130 self._remote.branches(active, closed).items()]
131
131
132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
133
133
134 @LazyProperty
134 @LazyProperty
135 def tags(self):
135 def tags(self):
136 """
136 """
137 Gets tags for this repository
137 Gets tags for this repository
138 """
138 """
139 return self._get_tags()
139 return self._get_tags()
140
140
141 def _get_tags(self):
141 def _get_tags(self):
142 if self.is_empty():
142 if self.is_empty():
143 return {}
143 return {}
144
144
145 def get_name(ctx):
145 def get_name(ctx):
146 return ctx[0]
146 return ctx[0]
147
147
148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
149 self._remote.tags().items()]
149 self._remote.tags().items()]
150
150
151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
152
152
153 def tag(self, name, user, commit_id=None, message=None, date=None,
153 def tag(self, name, user, commit_id=None, message=None, date=None,
154 **kwargs):
154 **kwargs):
155 """
155 """
156 Creates and returns a tag for the given ``commit_id``.
156 Creates and returns a tag for the given ``commit_id``.
157
157
158 :param name: name for new tag
158 :param name: name for new tag
159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
160 :param commit_id: commit id for which new tag would be created
160 :param commit_id: commit id for which new tag would be created
161 :param message: message of the tag's commit
161 :param message: message of the tag's commit
162 :param date: date of tag's commit
162 :param date: date of tag's commit
163
163
164 :raises TagAlreadyExistError: if tag with same name already exists
164 :raises TagAlreadyExistError: if tag with same name already exists
165 """
165 """
166 if name in self.tags:
166 if name in self.tags:
167 raise TagAlreadyExistError("Tag %s already exists" % name)
167 raise TagAlreadyExistError("Tag %s already exists" % name)
168 commit = self.get_commit(commit_id=commit_id)
168 commit = self.get_commit(commit_id=commit_id)
169 local = kwargs.setdefault('local', False)
169 local = kwargs.setdefault('local', False)
170
170
171 if message is None:
171 if message is None:
172 message = "Added tag %s for commit %s" % (name, commit.short_id)
172 message = "Added tag %s for commit %s" % (name, commit.short_id)
173
173
174 date, tz = date_to_timestamp_plus_offset(date)
174 date, tz = date_to_timestamp_plus_offset(date)
175
175
176 self._remote.tag(
176 self._remote.tag(
177 name, commit.raw_id, message, local, user, date, tz)
177 name, commit.raw_id, message, local, user, date, tz)
178 self._remote.invalidate_vcs_cache()
178
179
179 # Reinitialize tags
180 # Reinitialize tags
180 self.tags = self._get_tags()
181 self.tags = self._get_tags()
181 tag_id = self.tags[name]
182 tag_id = self.tags[name]
182
183
183 return self.get_commit(commit_id=tag_id)
184 return self.get_commit(commit_id=tag_id)
184
185
185 def remove_tag(self, name, user, message=None, date=None):
186 def remove_tag(self, name, user, message=None, date=None):
186 """
187 """
187 Removes tag with the given `name`.
188 Removes tag with the given `name`.
188
189
189 :param name: name of the tag to be removed
190 :param name: name of the tag to be removed
190 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
191 :param message: message of the tag's removal commit
192 :param message: message of the tag's removal commit
192 :param date: date of tag's removal commit
193 :param date: date of tag's removal commit
193
194
194 :raises TagDoesNotExistError: if tag with given name does not exists
195 :raises TagDoesNotExistError: if tag with given name does not exists
195 """
196 """
196 if name not in self.tags:
197 if name not in self.tags:
197 raise TagDoesNotExistError("Tag %s does not exist" % name)
198 raise TagDoesNotExistError("Tag %s does not exist" % name)
198 if message is None:
199 if message is None:
199 message = "Removed tag %s" % name
200 message = "Removed tag %s" % name
200 local = False
201 local = False
201
202
202 date, tz = date_to_timestamp_plus_offset(date)
203 date, tz = date_to_timestamp_plus_offset(date)
203
204
204 self._remote.tag(name, nullid, message, local, user, date, tz)
205 self._remote.tag(name, nullid, message, local, user, date, tz)
206 self._remote.invalidate_vcs_cache()
205 self.tags = self._get_tags()
207 self.tags = self._get_tags()
206
208
207 @LazyProperty
209 @LazyProperty
208 def bookmarks(self):
210 def bookmarks(self):
209 """
211 """
210 Gets bookmarks for this repository
212 Gets bookmarks for this repository
211 """
213 """
212 return self._get_bookmarks()
214 return self._get_bookmarks()
213
215
214 def _get_bookmarks(self):
216 def _get_bookmarks(self):
215 if self.is_empty():
217 if self.is_empty():
216 return {}
218 return {}
217
219
218 def get_name(ctx):
220 def get_name(ctx):
219 return ctx[0]
221 return ctx[0]
220
222
221 _bookmarks = [
223 _bookmarks = [
222 (safe_unicode(n), hexlify(h)) for n, h in
224 (safe_unicode(n), hexlify(h)) for n, h in
223 self._remote.bookmarks().items()]
225 self._remote.bookmarks().items()]
224
226
225 return OrderedDict(sorted(_bookmarks, key=get_name))
227 return OrderedDict(sorted(_bookmarks, key=get_name))
226
228
227 def _get_all_commit_ids(self):
229 def _get_all_commit_ids(self):
228 return self._remote.get_all_commit_ids('visible')
230 return self._remote.get_all_commit_ids('visible')
229
231
230 def get_diff(
232 def get_diff(
231 self, commit1, commit2, path='', ignore_whitespace=False,
233 self, commit1, commit2, path='', ignore_whitespace=False,
232 context=3, path1=None):
234 context=3, path1=None):
233 """
235 """
234 Returns (git like) *diff*, as plain text. Shows changes introduced by
236 Returns (git like) *diff*, as plain text. Shows changes introduced by
235 `commit2` since `commit1`.
237 `commit2` since `commit1`.
236
238
237 :param commit1: Entry point from which diff is shown. Can be
239 :param commit1: Entry point from which diff is shown. Can be
238 ``self.EMPTY_COMMIT`` - in this case, patch showing all
240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
239 the changes since empty state of the repository until `commit2`
241 the changes since empty state of the repository until `commit2`
240 :param commit2: Until which commit changes should be shown.
242 :param commit2: Until which commit changes should be shown.
241 :param ignore_whitespace: If set to ``True``, would not show whitespace
243 :param ignore_whitespace: If set to ``True``, would not show whitespace
242 changes. Defaults to ``False``.
244 changes. Defaults to ``False``.
243 :param context: How many lines before/after changed lines should be
245 :param context: How many lines before/after changed lines should be
244 shown. Defaults to ``3``.
246 shown. Defaults to ``3``.
245 """
247 """
246 self._validate_diff_commits(commit1, commit2)
248 self._validate_diff_commits(commit1, commit2)
247 if path1 is not None and path1 != path:
249 if path1 is not None and path1 != path:
248 raise ValueError("Diff of two different paths not supported.")
250 raise ValueError("Diff of two different paths not supported.")
249
251
250 if path:
252 if path:
251 file_filter = [self.path, path]
253 file_filter = [self.path, path]
252 else:
254 else:
253 file_filter = None
255 file_filter = None
254
256
255 diff = self._remote.diff(
257 diff = self._remote.diff(
256 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
257 opt_git=True, opt_ignorews=ignore_whitespace,
259 opt_git=True, opt_ignorews=ignore_whitespace,
258 context=context)
260 context=context)
259 return MercurialDiff(diff)
261 return MercurialDiff(diff)
260
262
261 def strip(self, commit_id, branch=None):
263 def strip(self, commit_id, branch=None):
262 self._remote.strip(commit_id, update=False, backup="none")
264 self._remote.strip(commit_id, update=False, backup="none")
263
265
266 self._remote.invalidate_vcs_cache()
264 self.commit_ids = self._get_all_commit_ids()
267 self.commit_ids = self._get_all_commit_ids()
265 self._rebuild_cache(self.commit_ids)
268 self._rebuild_cache(self.commit_ids)
266
269
267 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
270 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
268 if commit_id1 == commit_id2:
271 if commit_id1 == commit_id2:
269 return commit_id1
272 return commit_id1
270
273
271 ancestors = self._remote.revs_from_revspec(
274 ancestors = self._remote.revs_from_revspec(
272 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
275 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
273 other_path=repo2.path)
276 other_path=repo2.path)
274 return repo2[ancestors[0]].raw_id if ancestors else None
277 return repo2[ancestors[0]].raw_id if ancestors else None
275
278
276 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
279 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
277 if commit_id1 == commit_id2:
280 if commit_id1 == commit_id2:
278 commits = []
281 commits = []
279 else:
282 else:
280 if merge:
283 if merge:
281 indexes = self._remote.revs_from_revspec(
284 indexes = self._remote.revs_from_revspec(
282 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
285 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
283 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
286 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
284 else:
287 else:
285 indexes = self._remote.revs_from_revspec(
288 indexes = self._remote.revs_from_revspec(
286 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
289 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
287 commit_id1, other_path=repo2.path)
290 commit_id1, other_path=repo2.path)
288
291
289 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
292 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
290 for idx in indexes]
293 for idx in indexes]
291
294
292 return commits
295 return commits
293
296
294 @staticmethod
297 @staticmethod
295 def check_url(url, config):
298 def check_url(url, config):
296 """
299 """
297 Function will check given url and try to verify if it's a valid
300 Function will check given url and try to verify if it's a valid
298 link. Sometimes it may happened that mercurial will issue basic
301 link. Sometimes it may happened that mercurial will issue basic
299 auth request that can cause whole API to hang when used from python
302 auth request that can cause whole API to hang when used from python
300 or other external calls.
303 or other external calls.
301
304
302 On failures it'll raise urllib2.HTTPError, exception is also thrown
305 On failures it'll raise urllib2.HTTPError, exception is also thrown
303 when the return code is non 200
306 when the return code is non 200
304 """
307 """
305 # check first if it's not an local url
308 # check first if it's not an local url
306 if os.path.isdir(url) or url.startswith('file:'):
309 if os.path.isdir(url) or url.startswith('file:'):
307 return True
310 return True
308
311
309 # Request the _remote to verify the url
312 # Request the _remote to verify the url
310 return connection.Hg.check_url(url, config.serialize())
313 return connection.Hg.check_url(url, config.serialize())
311
314
312 @staticmethod
315 @staticmethod
313 def is_valid_repository(path):
316 def is_valid_repository(path):
314 return os.path.isdir(os.path.join(path, '.hg'))
317 return os.path.isdir(os.path.join(path, '.hg'))
315
318
316 def _init_repo(self, create, src_url=None, update_after_clone=False):
319 def _init_repo(self, create, src_url=None, update_after_clone=False):
317 """
320 """
318 Function will check for mercurial repository in given path. If there
321 Function will check for mercurial repository in given path. If there
319 is no repository in that path it will raise an exception unless
322 is no repository in that path it will raise an exception unless
320 `create` parameter is set to True - in that case repository would
323 `create` parameter is set to True - in that case repository would
321 be created.
324 be created.
322
325
323 If `src_url` is given, would try to clone repository from the
326 If `src_url` is given, would try to clone repository from the
324 location at given clone_point. Additionally it'll make update to
327 location at given clone_point. Additionally it'll make update to
325 working copy accordingly to `update_after_clone` flag.
328 working copy accordingly to `update_after_clone` flag.
326 """
329 """
327 if create and os.path.exists(self.path):
330 if create and os.path.exists(self.path):
328 raise RepositoryError(
331 raise RepositoryError(
329 "Cannot create repository at %s, location already exist"
332 "Cannot create repository at %s, location already exist"
330 % self.path)
333 % self.path)
331
334
332 if src_url:
335 if src_url:
333 url = str(self._get_url(src_url))
336 url = str(self._get_url(src_url))
334 MercurialRepository.check_url(url, self.config)
337 MercurialRepository.check_url(url, self.config)
335
338
336 self._remote.clone(url, self.path, update_after_clone)
339 self._remote.clone(url, self.path, update_after_clone)
337
340
338 # Don't try to create if we've already cloned repo
341 # Don't try to create if we've already cloned repo
339 create = False
342 create = False
340
343
341 if create:
344 if create:
342 os.makedirs(self.path, mode=0755)
345 os.makedirs(self.path, mode=0755)
343
346
344 self._remote.localrepository(create)
347 self._remote.localrepository(create)
345
348
346 @LazyProperty
349 @LazyProperty
347 def in_memory_commit(self):
350 def in_memory_commit(self):
348 return MercurialInMemoryCommit(self)
351 return MercurialInMemoryCommit(self)
349
352
350 @LazyProperty
353 @LazyProperty
351 def description(self):
354 def description(self):
352 description = self._remote.get_config_value(
355 description = self._remote.get_config_value(
353 'web', 'description', untrusted=True)
356 'web', 'description', untrusted=True)
354 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
357 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
355
358
356 @LazyProperty
359 @LazyProperty
357 def contact(self):
360 def contact(self):
358 contact = (
361 contact = (
359 self._remote.get_config_value("web", "contact") or
362 self._remote.get_config_value("web", "contact") or
360 self._remote.get_config_value("ui", "username"))
363 self._remote.get_config_value("ui", "username"))
361 return safe_unicode(contact or self.DEFAULT_CONTACT)
364 return safe_unicode(contact or self.DEFAULT_CONTACT)
362
365
363 @LazyProperty
366 @LazyProperty
364 def last_change(self):
367 def last_change(self):
365 """
368 """
366 Returns last change made on this repository as
369 Returns last change made on this repository as
367 `datetime.datetime` object
370 `datetime.datetime` object
368 """
371 """
369 return utcdate_fromtimestamp(self._get_mtime(), makedate()[1])
372 return utcdate_fromtimestamp(self._get_mtime(), makedate()[1])
370
373
371 def _get_mtime(self):
374 def _get_mtime(self):
372 try:
375 try:
373 return date_astimestamp(self.get_commit().date)
376 return date_astimestamp(self.get_commit().date)
374 except RepositoryError:
377 except RepositoryError:
375 # fallback to filesystem
378 # fallback to filesystem
376 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
379 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
377 st_path = os.path.join(self.path, '.hg', "store")
380 st_path = os.path.join(self.path, '.hg', "store")
378 if os.path.exists(cl_path):
381 if os.path.exists(cl_path):
379 return os.stat(cl_path).st_mtime
382 return os.stat(cl_path).st_mtime
380 else:
383 else:
381 return os.stat(st_path).st_mtime
384 return os.stat(st_path).st_mtime
382
385
383 def _sanitize_commit_idx(self, idx):
386 def _sanitize_commit_idx(self, idx):
384 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
387 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
385 # number. A `long` is treated in the correct way though. So we convert
388 # number. A `long` is treated in the correct way though. So we convert
386 # `int` to `long` here to make sure it is handled correctly.
389 # `int` to `long` here to make sure it is handled correctly.
387 if isinstance(idx, int):
390 if isinstance(idx, int):
388 return long(idx)
391 return long(idx)
389 return idx
392 return idx
390
393
391 def _get_url(self, url):
394 def _get_url(self, url):
392 """
395 """
393 Returns normalized url. If schema is not given, would fall
396 Returns normalized url. If schema is not given, would fall
394 to filesystem
397 to filesystem
395 (``file:///``) schema.
398 (``file:///``) schema.
396 """
399 """
397 url = url.encode('utf8')
400 url = url.encode('utf8')
398 if url != 'default' and '://' not in url:
401 if url != 'default' and '://' not in url:
399 url = "file:" + urllib.pathname2url(url)
402 url = "file:" + urllib.pathname2url(url)
400 return url
403 return url
401
404
402 def get_hook_location(self):
405 def get_hook_location(self):
403 """
406 """
404 returns absolute path to location where hooks are stored
407 returns absolute path to location where hooks are stored
405 """
408 """
406 return os.path.join(self.path, '.hg', '.hgrc')
409 return os.path.join(self.path, '.hg', '.hgrc')
407
410
408 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
411 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
409 """
412 """
410 Returns ``MercurialCommit`` object representing repository's
413 Returns ``MercurialCommit`` object representing repository's
411 commit at the given `commit_id` or `commit_idx`.
414 commit at the given `commit_id` or `commit_idx`.
412 """
415 """
413 if self.is_empty():
416 if self.is_empty():
414 raise EmptyRepositoryError("There are no commits yet")
417 raise EmptyRepositoryError("There are no commits yet")
415
418
416 if commit_id is not None:
419 if commit_id is not None:
417 self._validate_commit_id(commit_id)
420 self._validate_commit_id(commit_id)
418 try:
421 try:
419 idx = self._commit_ids[commit_id]
422 idx = self._commit_ids[commit_id]
420 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
423 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
421 except KeyError:
424 except KeyError:
422 pass
425 pass
423 elif commit_idx is not None:
426 elif commit_idx is not None:
424 self._validate_commit_idx(commit_idx)
427 self._validate_commit_idx(commit_idx)
425 commit_idx = self._sanitize_commit_idx(commit_idx)
428 commit_idx = self._sanitize_commit_idx(commit_idx)
426 try:
429 try:
427 id_ = self.commit_ids[commit_idx]
430 id_ = self.commit_ids[commit_idx]
428 if commit_idx < 0:
431 if commit_idx < 0:
429 commit_idx += len(self.commit_ids)
432 commit_idx += len(self.commit_ids)
430 return MercurialCommit(
433 return MercurialCommit(
431 self, id_, commit_idx, pre_load=pre_load)
434 self, id_, commit_idx, pre_load=pre_load)
432 except IndexError:
435 except IndexError:
433 commit_id = commit_idx
436 commit_id = commit_idx
434 else:
437 else:
435 commit_id = "tip"
438 commit_id = "tip"
436
439
437 # TODO Paris: Ugly hack to "serialize" long for msgpack
440 # TODO Paris: Ugly hack to "serialize" long for msgpack
438 if isinstance(commit_id, long):
441 if isinstance(commit_id, long):
439 commit_id = float(commit_id)
442 commit_id = float(commit_id)
440
443
441 if isinstance(commit_id, unicode):
444 if isinstance(commit_id, unicode):
442 commit_id = safe_str(commit_id)
445 commit_id = safe_str(commit_id)
443
446
444 raw_id, idx = self._remote.lookup(commit_id, both=True)
447 raw_id, idx = self._remote.lookup(commit_id, both=True)
445
448
446 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
449 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
447
450
448 def get_commits(
451 def get_commits(
449 self, start_id=None, end_id=None, start_date=None, end_date=None,
452 self, start_id=None, end_id=None, start_date=None, end_date=None,
450 branch_name=None, pre_load=None):
453 branch_name=None, pre_load=None):
451 """
454 """
452 Returns generator of ``MercurialCommit`` objects from start to end
455 Returns generator of ``MercurialCommit`` objects from start to end
453 (both are inclusive)
456 (both are inclusive)
454
457
455 :param start_id: None, str(commit_id)
458 :param start_id: None, str(commit_id)
456 :param end_id: None, str(commit_id)
459 :param end_id: None, str(commit_id)
457 :param start_date: if specified, commits with commit date less than
460 :param start_date: if specified, commits with commit date less than
458 ``start_date`` would be filtered out from returned set
461 ``start_date`` would be filtered out from returned set
459 :param end_date: if specified, commits with commit date greater than
462 :param end_date: if specified, commits with commit date greater than
460 ``end_date`` would be filtered out from returned set
463 ``end_date`` would be filtered out from returned set
461 :param branch_name: if specified, commits not reachable from given
464 :param branch_name: if specified, commits not reachable from given
462 branch would be filtered out from returned set
465 branch would be filtered out from returned set
463
466
464 :raise BranchDoesNotExistError: If given ``branch_name`` does not
467 :raise BranchDoesNotExistError: If given ``branch_name`` does not
465 exist.
468 exist.
466 :raise CommitDoesNotExistError: If commit for given ``start`` or
469 :raise CommitDoesNotExistError: If commit for given ``start`` or
467 ``end`` could not be found.
470 ``end`` could not be found.
468 """
471 """
469 # actually we should check now if it's not an empty repo
472 # actually we should check now if it's not an empty repo
470 branch_ancestors = False
473 branch_ancestors = False
471 if self.is_empty():
474 if self.is_empty():
472 raise EmptyRepositoryError("There are no commits yet")
475 raise EmptyRepositoryError("There are no commits yet")
473 self._validate_branch_name(branch_name)
476 self._validate_branch_name(branch_name)
474
477
475 if start_id is not None:
478 if start_id is not None:
476 self._validate_commit_id(start_id)
479 self._validate_commit_id(start_id)
477 c_start = self.get_commit(commit_id=start_id)
480 c_start = self.get_commit(commit_id=start_id)
478 start_pos = self._commit_ids[c_start.raw_id]
481 start_pos = self._commit_ids[c_start.raw_id]
479 else:
482 else:
480 start_pos = None
483 start_pos = None
481
484
482 if end_id is not None:
485 if end_id is not None:
483 self._validate_commit_id(end_id)
486 self._validate_commit_id(end_id)
484 c_end = self.get_commit(commit_id=end_id)
487 c_end = self.get_commit(commit_id=end_id)
485 end_pos = max(0, self._commit_ids[c_end.raw_id])
488 end_pos = max(0, self._commit_ids[c_end.raw_id])
486 else:
489 else:
487 end_pos = None
490 end_pos = None
488
491
489 if None not in [start_id, end_id] and start_pos > end_pos:
492 if None not in [start_id, end_id] and start_pos > end_pos:
490 raise RepositoryError(
493 raise RepositoryError(
491 "Start commit '%s' cannot be after end commit '%s'" %
494 "Start commit '%s' cannot be after end commit '%s'" %
492 (start_id, end_id))
495 (start_id, end_id))
493
496
494 if end_pos is not None:
497 if end_pos is not None:
495 end_pos += 1
498 end_pos += 1
496
499
497 commit_filter = []
500 commit_filter = []
498 if branch_name and not branch_ancestors:
501 if branch_name and not branch_ancestors:
499 commit_filter.append('branch("%s")' % branch_name)
502 commit_filter.append('branch("%s")' % branch_name)
500 elif branch_name and branch_ancestors:
503 elif branch_name and branch_ancestors:
501 commit_filter.append('ancestors(branch("%s"))' % branch_name)
504 commit_filter.append('ancestors(branch("%s"))' % branch_name)
502 if start_date and not end_date:
505 if start_date and not end_date:
503 commit_filter.append('date(">%s")' % start_date)
506 commit_filter.append('date(">%s")' % start_date)
504 if end_date and not start_date:
507 if end_date and not start_date:
505 commit_filter.append('date("<%s")' % end_date)
508 commit_filter.append('date("<%s")' % end_date)
506 if start_date and end_date:
509 if start_date and end_date:
507 commit_filter.append(
510 commit_filter.append(
508 'date(">%s") and date("<%s")' % (start_date, end_date))
511 'date(">%s") and date("<%s")' % (start_date, end_date))
509
512
510 # TODO: johbo: Figure out a simpler way for this solution
513 # TODO: johbo: Figure out a simpler way for this solution
511 collection_generator = CollectionGenerator
514 collection_generator = CollectionGenerator
512 if commit_filter:
515 if commit_filter:
513 commit_filter = map(safe_str, commit_filter)
516 commit_filter = map(safe_str, commit_filter)
514 revisions = self._remote.rev_range(commit_filter)
517 revisions = self._remote.rev_range(commit_filter)
515 collection_generator = MercurialIndexBasedCollectionGenerator
518 collection_generator = MercurialIndexBasedCollectionGenerator
516 else:
519 else:
517 revisions = self.commit_ids
520 revisions = self.commit_ids
518
521
519 if start_pos or end_pos:
522 if start_pos or end_pos:
520 revisions = revisions[start_pos:end_pos]
523 revisions = revisions[start_pos:end_pos]
521
524
522 return collection_generator(self, revisions, pre_load=pre_load)
525 return collection_generator(self, revisions, pre_load=pre_load)
523
526
524 def pull(self, url, commit_ids=None):
527 def pull(self, url, commit_ids=None):
525 """
528 """
526 Tries to pull changes from external location.
529 Tries to pull changes from external location.
527
530
528 :param commit_ids: Optional. Can be set to a list of commit ids
531 :param commit_ids: Optional. Can be set to a list of commit ids
529 which shall be pulled from the other repository.
532 which shall be pulled from the other repository.
530 """
533 """
531 url = self._get_url(url)
534 url = self._get_url(url)
532 self._remote.pull(url, commit_ids=commit_ids)
535 self._remote.pull(url, commit_ids=commit_ids)
536 self._remote.invalidate_vcs_cache()
533
537
534 def _local_clone(self, clone_path):
538 def _local_clone(self, clone_path):
535 """
539 """
536 Create a local clone of the current repo.
540 Create a local clone of the current repo.
537 """
541 """
538 self._remote.clone(self.path, clone_path, update_after_clone=True,
542 self._remote.clone(self.path, clone_path, update_after_clone=True,
539 hooks=False)
543 hooks=False)
540
544
541 def _update(self, revision, clean=False):
545 def _update(self, revision, clean=False):
542 """
546 """
543 Update the working copty to the specified revision.
547 Update the working copty to the specified revision.
544 """
548 """
545 self._remote.update(revision, clean=clean)
549 self._remote.update(revision, clean=clean)
546
550
547 def _identify(self):
551 def _identify(self):
548 """
552 """
549 Return the current state of the working directory.
553 Return the current state of the working directory.
550 """
554 """
551 return self._remote.identify().strip().rstrip('+')
555 return self._remote.identify().strip().rstrip('+')
552
556
553 def _heads(self, branch=None):
557 def _heads(self, branch=None):
554 """
558 """
555 Return the commit ids of the repository heads.
559 Return the commit ids of the repository heads.
556 """
560 """
557 return self._remote.heads(branch=branch).strip().split(' ')
561 return self._remote.heads(branch=branch).strip().split(' ')
558
562
559 def _ancestor(self, revision1, revision2):
563 def _ancestor(self, revision1, revision2):
560 """
564 """
561 Return the common ancestor of the two revisions.
565 Return the common ancestor of the two revisions.
562 """
566 """
563 return self._remote.ancestor(
567 return self._remote.ancestor(
564 revision1, revision2).strip().split(':')[-1]
568 revision1, revision2).strip().split(':')[-1]
565
569
566 def _local_push(
570 def _local_push(
567 self, revision, repository_path, push_branches=False,
571 self, revision, repository_path, push_branches=False,
568 enable_hooks=False):
572 enable_hooks=False):
569 """
573 """
570 Push the given revision to the specified repository.
574 Push the given revision to the specified repository.
571
575
572 :param push_branches: allow to create branches in the target repo.
576 :param push_branches: allow to create branches in the target repo.
573 """
577 """
574 self._remote.push(
578 self._remote.push(
575 [revision], repository_path, hooks=enable_hooks,
579 [revision], repository_path, hooks=enable_hooks,
576 push_branches=push_branches)
580 push_branches=push_branches)
577
581
578 def _local_merge(self, target_ref, merge_message, user_name, user_email,
582 def _local_merge(self, target_ref, merge_message, user_name, user_email,
579 source_ref, use_rebase=False):
583 source_ref, use_rebase=False):
580 """
584 """
581 Merge the given source_revision into the checked out revision.
585 Merge the given source_revision into the checked out revision.
582
586
583 Returns the commit id of the merge and a boolean indicating if the
587 Returns the commit id of the merge and a boolean indicating if the
584 commit needs to be pushed.
588 commit needs to be pushed.
585 """
589 """
586 self._update(target_ref.commit_id)
590 self._update(target_ref.commit_id)
587
591
588 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
592 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
589 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
593 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
590
594
591 if ancestor == source_ref.commit_id:
595 if ancestor == source_ref.commit_id:
592 # Nothing to do, the changes were already integrated
596 # Nothing to do, the changes were already integrated
593 return target_ref.commit_id, False
597 return target_ref.commit_id, False
594
598
595 elif ancestor == target_ref.commit_id and is_the_same_branch:
599 elif ancestor == target_ref.commit_id and is_the_same_branch:
596 # In this case we should force a commit message
600 # In this case we should force a commit message
597 return source_ref.commit_id, True
601 return source_ref.commit_id, True
598
602
599 if use_rebase:
603 if use_rebase:
600 try:
604 try:
601 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
605 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
602 target_ref.commit_id)
606 target_ref.commit_id)
603 self.bookmark(bookmark_name, revision=source_ref.commit_id)
607 self.bookmark(bookmark_name, revision=source_ref.commit_id)
604 self._remote.rebase(
608 self._remote.rebase(
605 source=source_ref.commit_id, dest=target_ref.commit_id)
609 source=source_ref.commit_id, dest=target_ref.commit_id)
610 self._remote.invalidate_vcs_cache()
606 self._update(bookmark_name)
611 self._update(bookmark_name)
607 return self._identify(), True
612 return self._identify(), True
608 except RepositoryError:
613 except RepositoryError:
609 # The rebase-abort may raise another exception which 'hides'
614 # The rebase-abort may raise another exception which 'hides'
610 # the original one, therefore we log it here.
615 # the original one, therefore we log it here.
611 log.exception('Error while rebasing shadow repo during merge.')
616 log.exception('Error while rebasing shadow repo during merge.')
612
617
613 # Cleanup any rebase leftovers
618 # Cleanup any rebase leftovers
619 self._remote.invalidate_vcs_cache()
614 self._remote.rebase(abort=True)
620 self._remote.rebase(abort=True)
621 self._remote.invalidate_vcs_cache()
615 self._remote.update(clean=True)
622 self._remote.update(clean=True)
616 raise
623 raise
617 else:
624 else:
618 try:
625 try:
619 self._remote.merge(source_ref.commit_id)
626 self._remote.merge(source_ref.commit_id)
627 self._remote.invalidate_vcs_cache()
620 self._remote.commit(
628 self._remote.commit(
621 message=safe_str(merge_message),
629 message=safe_str(merge_message),
622 username=safe_str('%s <%s>' % (user_name, user_email)))
630 username=safe_str('%s <%s>' % (user_name, user_email)))
631 self._remote.invalidate_vcs_cache()
623 return self._identify(), True
632 return self._identify(), True
624 except RepositoryError:
633 except RepositoryError:
625 # Cleanup any merge leftovers
634 # Cleanup any merge leftovers
626 self._remote.update(clean=True)
635 self._remote.update(clean=True)
627 raise
636 raise
628
637
629 def _is_the_same_branch(self, target_ref, source_ref):
638 def _is_the_same_branch(self, target_ref, source_ref):
630 return (
639 return (
631 self._get_branch_name(target_ref) ==
640 self._get_branch_name(target_ref) ==
632 self._get_branch_name(source_ref))
641 self._get_branch_name(source_ref))
633
642
634 def _get_branch_name(self, ref):
643 def _get_branch_name(self, ref):
635 if ref.type == 'branch':
644 if ref.type == 'branch':
636 return ref.name
645 return ref.name
637 return self._remote.ctx_branch(ref.commit_id)
646 return self._remote.ctx_branch(ref.commit_id)
638
647
639 def _get_shadow_repository_path(self, workspace_id):
648 def _get_shadow_repository_path(self, workspace_id):
640 # The name of the shadow repository must start with '.', so it is
649 # The name of the shadow repository must start with '.', so it is
641 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
650 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
642 return os.path.join(
651 return os.path.join(
643 os.path.dirname(self.path),
652 os.path.dirname(self.path),
644 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
653 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
645
654
646 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
655 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
647 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
656 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
648 if not os.path.exists(shadow_repository_path):
657 if not os.path.exists(shadow_repository_path):
649 self._local_clone(shadow_repository_path)
658 self._local_clone(shadow_repository_path)
650 log.debug(
659 log.debug(
651 'Prepared shadow repository in %s', shadow_repository_path)
660 'Prepared shadow repository in %s', shadow_repository_path)
652
661
653 return shadow_repository_path
662 return shadow_repository_path
654
663
655 def cleanup_merge_workspace(self, workspace_id):
664 def cleanup_merge_workspace(self, workspace_id):
656 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
665 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
657 shutil.rmtree(shadow_repository_path, ignore_errors=True)
666 shutil.rmtree(shadow_repository_path, ignore_errors=True)
658
667
659 def _merge_repo(self, shadow_repository_path, target_ref,
668 def _merge_repo(self, shadow_repository_path, target_ref,
660 source_repo, source_ref, merge_message,
669 source_repo, source_ref, merge_message,
661 merger_name, merger_email, dry_run=False,
670 merger_name, merger_email, dry_run=False,
662 use_rebase=False):
671 use_rebase=False):
663 if target_ref.commit_id not in self._heads():
672 if target_ref.commit_id not in self._heads():
664 return MergeResponse(
673 return MergeResponse(
665 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
674 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
666
675
667 if (target_ref.type == 'branch' and
676 if (target_ref.type == 'branch' and
668 len(self._heads(target_ref.name)) != 1):
677 len(self._heads(target_ref.name)) != 1):
669 return MergeResponse(
678 return MergeResponse(
670 False, False, None,
679 False, False, None,
671 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
680 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
672
681
673 shadow_repo = self._get_shadow_instance(shadow_repository_path)
682 shadow_repo = self._get_shadow_instance(shadow_repository_path)
674
683
675 log.debug('Pulling in target reference %s', target_ref)
684 log.debug('Pulling in target reference %s', target_ref)
676 self._validate_pull_reference(target_ref)
685 self._validate_pull_reference(target_ref)
677 shadow_repo._local_pull(self.path, target_ref)
686 shadow_repo._local_pull(self.path, target_ref)
678 try:
687 try:
679 log.debug('Pulling in source reference %s', source_ref)
688 log.debug('Pulling in source reference %s', source_ref)
680 source_repo._validate_pull_reference(source_ref)
689 source_repo._validate_pull_reference(source_ref)
681 shadow_repo._local_pull(source_repo.path, source_ref)
690 shadow_repo._local_pull(source_repo.path, source_ref)
682 except CommitDoesNotExistError as e:
691 except CommitDoesNotExistError as e:
683 log.exception('Failure when doing local pull on hg shadow repo')
692 log.exception('Failure when doing local pull on hg shadow repo')
684 return MergeResponse(
693 return MergeResponse(
685 False, False, None, MergeFailureReason.MISSING_COMMIT)
694 False, False, None, MergeFailureReason.MISSING_COMMIT)
686
695
687 merge_commit_id = None
696 merge_commit_id = None
688 merge_failure_reason = MergeFailureReason.NONE
697 merge_failure_reason = MergeFailureReason.NONE
689
698
690 try:
699 try:
691 merge_commit_id, needs_push = shadow_repo._local_merge(
700 merge_commit_id, needs_push = shadow_repo._local_merge(
692 target_ref, merge_message, merger_name, merger_email,
701 target_ref, merge_message, merger_name, merger_email,
693 source_ref, use_rebase=use_rebase)
702 source_ref, use_rebase=use_rebase)
694 merge_possible = True
703 merge_possible = True
695 except RepositoryError as e:
704 except RepositoryError as e:
696 log.exception('Failure when doing local merge on hg shadow repo')
705 log.exception('Failure when doing local merge on hg shadow repo')
697 merge_possible = False
706 merge_possible = False
698 merge_failure_reason = MergeFailureReason.MERGE_FAILED
707 merge_failure_reason = MergeFailureReason.MERGE_FAILED
699
708
700 if merge_possible and not dry_run:
709 if merge_possible and not dry_run:
701 if needs_push:
710 if needs_push:
702 # In case the target is a bookmark, update it, so after pushing
711 # In case the target is a bookmark, update it, so after pushing
703 # the bookmarks is also updated in the target.
712 # the bookmarks is also updated in the target.
704 if target_ref.type == 'book':
713 if target_ref.type == 'book':
705 shadow_repo.bookmark(
714 shadow_repo.bookmark(
706 target_ref.name, revision=merge_commit_id)
715 target_ref.name, revision=merge_commit_id)
707
716
708 try:
717 try:
709 shadow_repo_with_hooks = self._get_shadow_instance(
718 shadow_repo_with_hooks = self._get_shadow_instance(
710 shadow_repository_path,
719 shadow_repository_path,
711 enable_hooks=True)
720 enable_hooks=True)
712 # Note: the push_branches option will push any new branch
721 # Note: the push_branches option will push any new branch
713 # defined in the source repository to the target. This may
722 # defined in the source repository to the target. This may
714 # be dangerous as branches are permanent in Mercurial.
723 # be dangerous as branches are permanent in Mercurial.
715 # This feature was requested in issue #441.
724 # This feature was requested in issue #441.
716 shadow_repo_with_hooks._local_push(
725 shadow_repo_with_hooks._local_push(
717 merge_commit_id, self.path, push_branches=True,
726 merge_commit_id, self.path, push_branches=True,
718 enable_hooks=True)
727 enable_hooks=True)
719 merge_succeeded = True
728 merge_succeeded = True
720 except RepositoryError:
729 except RepositoryError:
721 log.exception(
730 log.exception(
722 'Failure when doing local push from the shadow '
731 'Failure when doing local push from the shadow '
723 'repository to the target repository.')
732 'repository to the target repository.')
724 merge_succeeded = False
733 merge_succeeded = False
725 merge_failure_reason = MergeFailureReason.PUSH_FAILED
734 merge_failure_reason = MergeFailureReason.PUSH_FAILED
726 else:
735 else:
727 merge_succeeded = True
736 merge_succeeded = True
728 else:
737 else:
729 merge_succeeded = False
738 merge_succeeded = False
730
739
731 if dry_run:
740 if dry_run:
732 merge_commit_id = None
741 merge_commit_id = None
733
742
734 return MergeResponse(
743 return MergeResponse(
735 merge_possible, merge_succeeded, merge_commit_id,
744 merge_possible, merge_succeeded, merge_commit_id,
736 merge_failure_reason)
745 merge_failure_reason)
737
746
738 def _get_shadow_instance(
747 def _get_shadow_instance(
739 self, shadow_repository_path, enable_hooks=False):
748 self, shadow_repository_path, enable_hooks=False):
740 config = self.config.copy()
749 config = self.config.copy()
741 if not enable_hooks:
750 if not enable_hooks:
742 config.clear_section('hooks')
751 config.clear_section('hooks')
743 return MercurialRepository(shadow_repository_path, config)
752 return MercurialRepository(shadow_repository_path, config)
744
753
745 def _validate_pull_reference(self, reference):
754 def _validate_pull_reference(self, reference):
746 if not (reference.name in self.bookmarks or
755 if not (reference.name in self.bookmarks or
747 reference.name in self.branches or
756 reference.name in self.branches or
748 self.get_commit(reference.commit_id)):
757 self.get_commit(reference.commit_id)):
749 raise CommitDoesNotExistError(
758 raise CommitDoesNotExistError(
750 'Unknown branch, bookmark or commit id')
759 'Unknown branch, bookmark or commit id')
751
760
752 def _local_pull(self, repository_path, reference):
761 def _local_pull(self, repository_path, reference):
753 """
762 """
754 Fetch a branch, bookmark or commit from a local repository.
763 Fetch a branch, bookmark or commit from a local repository.
755 """
764 """
756 repository_path = os.path.abspath(repository_path)
765 repository_path = os.path.abspath(repository_path)
757 if repository_path == self.path:
766 if repository_path == self.path:
758 raise ValueError('Cannot pull from the same repository')
767 raise ValueError('Cannot pull from the same repository')
759
768
760 reference_type_to_option_name = {
769 reference_type_to_option_name = {
761 'book': 'bookmark',
770 'book': 'bookmark',
762 'branch': 'branch',
771 'branch': 'branch',
763 }
772 }
764 option_name = reference_type_to_option_name.get(
773 option_name = reference_type_to_option_name.get(
765 reference.type, 'revision')
774 reference.type, 'revision')
766
775
767 if option_name == 'revision':
776 if option_name == 'revision':
768 ref = reference.commit_id
777 ref = reference.commit_id
769 else:
778 else:
770 ref = reference.name
779 ref = reference.name
771
780
772 options = {option_name: [ref]}
781 options = {option_name: [ref]}
773 self._remote.pull_cmd(repository_path, hooks=False, **options)
782 self._remote.pull_cmd(repository_path, hooks=False, **options)
783 self._remote.invalidate_vcs_cache()
774
784
775 def bookmark(self, bookmark, revision=None):
785 def bookmark(self, bookmark, revision=None):
776 if isinstance(bookmark, unicode):
786 if isinstance(bookmark, unicode):
777 bookmark = safe_str(bookmark)
787 bookmark = safe_str(bookmark)
778 self._remote.bookmark(bookmark, revision=revision)
788 self._remote.bookmark(bookmark, revision=revision)
789 self._remote.invalidate_vcs_cache()
779
790
780
791
781 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
792 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
782
793
783 def _commit_factory(self, commit_id):
794 def _commit_factory(self, commit_id):
784 return self.repo.get_commit(
795 return self.repo.get_commit(
785 commit_idx=commit_id, pre_load=self.pre_load)
796 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,270 +1,250 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2016 RhodeCode GmbH
3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Client for the VCSServer implemented based on HTTP.
22 Client for the VCSServer implemented based on HTTP.
23
23
24
24
25 Status
25 Status
26 ------
26 ------
27
27
28 This client implementation shall eventually replace the Pyro4 based
28 This client implementation shall eventually replace the Pyro4 based
29 implementation.
29 implementation.
30 """
30 """
31
31
32 import copy
32 import copy
33 import logging
33 import logging
34 import threading
34 import threading
35 import urllib2
35 import urllib2
36 import urlparse
36 import urlparse
37 import uuid
37 import uuid
38
38
39 import msgpack
39 import msgpack
40 import requests
40 import requests
41
41
42 from . import exceptions, CurlSession
42 from . import exceptions, CurlSession
43
43
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 # TODO: mikhail: Keep it in sync with vcsserver's
48 # TODO: mikhail: Keep it in sync with vcsserver's
49 # HTTPApplication.ALLOWED_EXCEPTIONS
49 # HTTPApplication.ALLOWED_EXCEPTIONS
50 EXCEPTIONS_MAP = {
50 EXCEPTIONS_MAP = {
51 'KeyError': KeyError,
51 'KeyError': KeyError,
52 'URLError': urllib2.URLError,
52 'URLError': urllib2.URLError,
53 }
53 }
54
54
55
55
56 class RepoMaker(object):
56 class RepoMaker(object):
57
57
58 def __init__(self, server_and_port, backend_endpoint, session_factory):
58 def __init__(self, server_and_port, backend_endpoint, session_factory):
59 self.url = urlparse.urljoin(
59 self.url = urlparse.urljoin(
60 'http://%s' % server_and_port, backend_endpoint)
60 'http://%s' % server_and_port, backend_endpoint)
61 self._session_factory = session_factory
61 self._session_factory = session_factory
62
62
63 def __call__(self, path, config, with_wire=None):
63 def __call__(self, path, config, with_wire=None):
64 log.debug('RepoMaker call on %s', path)
64 log.debug('RepoMaker call on %s', path)
65 return RemoteRepo(
65 return RemoteRepo(
66 path, config, self.url, self._session_factory(),
66 path, config, self.url, self._session_factory(),
67 with_wire=with_wire)
67 with_wire=with_wire)
68
68
69 def __getattr__(self, name):
69 def __getattr__(self, name):
70 def f(*args, **kwargs):
70 def f(*args, **kwargs):
71 return self._call(name, *args, **kwargs)
71 return self._call(name, *args, **kwargs)
72 return f
72 return f
73
73
74 @exceptions.map_vcs_exceptions
74 @exceptions.map_vcs_exceptions
75 def _call(self, name, *args, **kwargs):
75 def _call(self, name, *args, **kwargs):
76 payload = {
76 payload = {
77 'id': str(uuid.uuid4()),
77 'id': str(uuid.uuid4()),
78 'method': name,
78 'method': name,
79 'params': {'args': args, 'kwargs': kwargs}
79 'params': {'args': args, 'kwargs': kwargs}
80 }
80 }
81 return _remote_call(
81 return _remote_call(
82 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
82 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
83
83
84
84
85 class RemoteRepo(object):
85 class RemoteRepo(object):
86
86
87 # List of method names that act like a write to the repository. After these
88 # methods we have to invalidate the VCSServer cache.
89 _writing_methods = [
90 'bookmark',
91 'commit',
92 'pull',
93 'pull_cmd',
94 'push',
95 'rebase',
96 'strip',
97 ]
98
99 def __init__(self, path, config, url, session, with_wire=None):
87 def __init__(self, path, config, url, session, with_wire=None):
100 self.url = url
88 self.url = url
101 self._session = session
89 self._session = session
102 self._wire = {
90 self._wire = {
103 "path": path,
91 "path": path,
104 "config": config,
92 "config": config,
105 "context": self._create_vcs_cache_context(),
93 "context": self._create_vcs_cache_context(),
106 }
94 }
107 if with_wire:
95 if with_wire:
108 self._wire.update(with_wire)
96 self._wire.update(with_wire)
109
97
110 # johbo: Trading complexity for performance. Avoiding the call to
98 # johbo: Trading complexity for performance. Avoiding the call to
111 # log.debug brings a few percent gain even if is is not active.
99 # log.debug brings a few percent gain even if is is not active.
112 if log.isEnabledFor(logging.DEBUG):
100 if log.isEnabledFor(logging.DEBUG):
113 self._call = self._call_with_logging
101 self._call = self._call_with_logging
114
102
115 def __getattr__(self, name):
103 def __getattr__(self, name):
116 def f(*args, **kwargs):
104 def f(*args, **kwargs):
117 return self._call(name, *args, **kwargs)
105 return self._call(name, *args, **kwargs)
118 return f
106 return f
119
107
120 @exceptions.map_vcs_exceptions
108 @exceptions.map_vcs_exceptions
121 def _call(self, name, *args, **kwargs):
109 def _call(self, name, *args, **kwargs):
122 # TODO: oliver: This is currently necessary pre-call since the
110 # TODO: oliver: This is currently necessary pre-call since the
123 # config object is being changed for hooking scenarios
111 # config object is being changed for hooking scenarios
124 wire = copy.deepcopy(self._wire)
112 wire = copy.deepcopy(self._wire)
125 wire["config"] = wire["config"].serialize()
113 wire["config"] = wire["config"].serialize()
126 payload = {
114 payload = {
127 'id': str(uuid.uuid4()),
115 'id': str(uuid.uuid4()),
128 'method': name,
116 'method': name,
129 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
117 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
130 }
118 }
131
119 return _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session)
132 try:
133 response = _remote_call(
134 self.url, payload, EXCEPTIONS_MAP, self._session)
135 finally:
136 if name in self._writing_methods:
137 self.invalidate_vcs_cache()
138
139 return response
140
120
141 def _call_with_logging(self, name, *args, **kwargs):
121 def _call_with_logging(self, name, *args, **kwargs):
142 log.debug('Calling %s@%s', self.url, name)
122 log.debug('Calling %s@%s', self.url, name)
143 return RemoteRepo._call(self, name, *args, **kwargs)
123 return RemoteRepo._call(self, name, *args, **kwargs)
144
124
145 def __getitem__(self, key):
125 def __getitem__(self, key):
146 return self.revision(key)
126 return self.revision(key)
147
127
148 def _create_vcs_cache_context(self):
128 def _create_vcs_cache_context(self):
149 """
129 """
150 Creates a unique string which is passed to the VCSServer on every
130 Creates a unique string which is passed to the VCSServer on every
151 remote call. It is used as cache key in the VCSServer.
131 remote call. It is used as cache key in the VCSServer.
152 """
132 """
153 return str(uuid.uuid4())
133 return str(uuid.uuid4())
154
134
155 def invalidate_vcs_cache(self):
135 def invalidate_vcs_cache(self):
156 """
136 """
157 This invalidates the context which is sent to the VCSServer on every
137 This invalidates the context which is sent to the VCSServer on every
158 call to a remote method. It forces the VCSServer to create a fresh
138 call to a remote method. It forces the VCSServer to create a fresh
159 repository instance on the next call to a remote method.
139 repository instance on the next call to a remote method.
160 """
140 """
161 self._wire['context'] = self._create_vcs_cache_context()
141 self._wire['context'] = self._create_vcs_cache_context()
162
142
163
143
164 class RemoteObject(object):
144 class RemoteObject(object):
165
145
166 def __init__(self, url, session):
146 def __init__(self, url, session):
167 self._url = url
147 self._url = url
168 self._session = session
148 self._session = session
169
149
170 # johbo: Trading complexity for performance. Avoiding the call to
150 # johbo: Trading complexity for performance. Avoiding the call to
171 # log.debug brings a few percent gain even if is is not active.
151 # log.debug brings a few percent gain even if is is not active.
172 if log.isEnabledFor(logging.DEBUG):
152 if log.isEnabledFor(logging.DEBUG):
173 self._call = self._call_with_logging
153 self._call = self._call_with_logging
174
154
175 def __getattr__(self, name):
155 def __getattr__(self, name):
176 def f(*args, **kwargs):
156 def f(*args, **kwargs):
177 return self._call(name, *args, **kwargs)
157 return self._call(name, *args, **kwargs)
178 return f
158 return f
179
159
180 @exceptions.map_vcs_exceptions
160 @exceptions.map_vcs_exceptions
181 def _call(self, name, *args, **kwargs):
161 def _call(self, name, *args, **kwargs):
182 payload = {
162 payload = {
183 'id': str(uuid.uuid4()),
163 'id': str(uuid.uuid4()),
184 'method': name,
164 'method': name,
185 'params': {'args': args, 'kwargs': kwargs}
165 'params': {'args': args, 'kwargs': kwargs}
186 }
166 }
187 return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session)
167 return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session)
188
168
189 def _call_with_logging(self, name, *args, **kwargs):
169 def _call_with_logging(self, name, *args, **kwargs):
190 log.debug('Calling %s@%s', self._url, name)
170 log.debug('Calling %s@%s', self._url, name)
191 return RemoteObject._call(self, name, *args, **kwargs)
171 return RemoteObject._call(self, name, *args, **kwargs)
192
172
193
173
194 def _remote_call(url, payload, exceptions_map, session):
174 def _remote_call(url, payload, exceptions_map, session):
195 response = session.post(url, data=msgpack.packb(payload))
175 response = session.post(url, data=msgpack.packb(payload))
196 response = msgpack.unpackb(response.content)
176 response = msgpack.unpackb(response.content)
197 error = response.get('error')
177 error = response.get('error')
198 if error:
178 if error:
199 type_ = error.get('type', 'Exception')
179 type_ = error.get('type', 'Exception')
200 exc = exceptions_map.get(type_, Exception)
180 exc = exceptions_map.get(type_, Exception)
201 exc = exc(error.get('message'))
181 exc = exc(error.get('message'))
202 try:
182 try:
203 exc._vcs_kind = error['_vcs_kind']
183 exc._vcs_kind = error['_vcs_kind']
204 except KeyError:
184 except KeyError:
205 pass
185 pass
206 raise exc
186 raise exc
207 return response.get('result')
187 return response.get('result')
208
188
209
189
210 class VcsHttpProxy(object):
190 class VcsHttpProxy(object):
211
191
212 CHUNK_SIZE = 16384
192 CHUNK_SIZE = 16384
213
193
214 def __init__(self, server_and_port, backend_endpoint):
194 def __init__(self, server_and_port, backend_endpoint):
215 adapter = requests.adapters.HTTPAdapter(max_retries=5)
195 adapter = requests.adapters.HTTPAdapter(max_retries=5)
216 self.base_url = urlparse.urljoin(
196 self.base_url = urlparse.urljoin(
217 'http://%s' % server_and_port, backend_endpoint)
197 'http://%s' % server_and_port, backend_endpoint)
218 self.session = requests.Session()
198 self.session = requests.Session()
219 self.session.mount('http://', adapter)
199 self.session.mount('http://', adapter)
220
200
221 def handle(self, environment, input_data, *args, **kwargs):
201 def handle(self, environment, input_data, *args, **kwargs):
222 data = {
202 data = {
223 'environment': environment,
203 'environment': environment,
224 'input_data': input_data,
204 'input_data': input_data,
225 'args': args,
205 'args': args,
226 'kwargs': kwargs
206 'kwargs': kwargs
227 }
207 }
228 result = self.session.post(
208 result = self.session.post(
229 self.base_url, msgpack.packb(data), stream=True)
209 self.base_url, msgpack.packb(data), stream=True)
230 return self._get_result(result)
210 return self._get_result(result)
231
211
232 def _deserialize_and_raise(self, error):
212 def _deserialize_and_raise(self, error):
233 exception = Exception(error['message'])
213 exception = Exception(error['message'])
234 try:
214 try:
235 exception._vcs_kind = error['_vcs_kind']
215 exception._vcs_kind = error['_vcs_kind']
236 except KeyError:
216 except KeyError:
237 pass
217 pass
238 raise exception
218 raise exception
239
219
240 def _iterate(self, result):
220 def _iterate(self, result):
241 unpacker = msgpack.Unpacker()
221 unpacker = msgpack.Unpacker()
242 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
222 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
243 unpacker.feed(line)
223 unpacker.feed(line)
244 for chunk in unpacker:
224 for chunk in unpacker:
245 yield chunk
225 yield chunk
246
226
247 def _get_result(self, result):
227 def _get_result(self, result):
248 iterator = self._iterate(result)
228 iterator = self._iterate(result)
249 error = iterator.next()
229 error = iterator.next()
250 if error:
230 if error:
251 self._deserialize_and_raise(error)
231 self._deserialize_and_raise(error)
252
232
253 status = iterator.next()
233 status = iterator.next()
254 headers = iterator.next()
234 headers = iterator.next()
255
235
256 return iterator, status, headers
236 return iterator, status, headers
257
237
258
238
259 class ThreadlocalSessionFactory(object):
239 class ThreadlocalSessionFactory(object):
260 """
240 """
261 Creates one CurlSession per thread on demand.
241 Creates one CurlSession per thread on demand.
262 """
242 """
263
243
264 def __init__(self):
244 def __init__(self):
265 self._thread_local = threading.local()
245 self._thread_local = threading.local()
266
246
267 def __call__(self):
247 def __call__(self):
268 if not hasattr(self._thread_local, 'curl_session'):
248 if not hasattr(self._thread_local, 'curl_session'):
269 self._thread_local.curl_session = CurlSession()
249 self._thread_local.curl_session = CurlSession()
270 return self._thread_local.curl_session
250 return self._thread_local.curl_session
General Comments 0
You need to be logged in to leave comments. Login now