##// END OF EJS Templates
scm: added push options for Mercurial and Git to allow remote repository sync.
marcink -
r2492:d48fe67d default
parent child Browse files
Show More
@@ -1,977 +1,981 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import shutil
28 import shutil
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 utcdate_fromtimestamp, makedate, date_astimestamp)
34 utcdate_fromtimestamp, makedate, date_astimestamp)
35 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils import safe_unicode, safe_str
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'master'
57 DEFAULT_BRANCH_NAME = 'master'
58
58
59 contact = BaseRepository.DEFAULT_CONTACT
59 contact = BaseRepository.DEFAULT_CONTACT
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 update_after_clone=False, with_wire=None, bare=False):
62 update_after_clone=False, with_wire=None, bare=False):
63
63
64 self.path = safe_str(os.path.abspath(repo_path))
64 self.path = safe_str(os.path.abspath(repo_path))
65 self.config = config if config else Config()
65 self.config = config if config else Config()
66 self._remote = connection.Git(
66 self._remote = connection.Git(
67 self.path, self.config, with_wire=with_wire)
67 self.path, self.config, with_wire=with_wire)
68
68
69 self._init_repo(create, src_url, update_after_clone, bare)
69 self._init_repo(create, src_url, update_after_clone, bare)
70
70
71 # caches
71 # caches
72 self._commit_ids = {}
72 self._commit_ids = {}
73
73
74 self.bookmarks = {}
74 self.bookmarks = {}
75
75
76 @LazyProperty
76 @LazyProperty
77 def bare(self):
77 def bare(self):
78 return self._remote.bare()
78 return self._remote.bare()
79
79
80 @LazyProperty
80 @LazyProperty
81 def head(self):
81 def head(self):
82 return self._remote.head()
82 return self._remote.head()
83
83
84 @LazyProperty
84 @LazyProperty
85 def commit_ids(self):
85 def commit_ids(self):
86 """
86 """
87 Returns list of commit ids, in ascending order. Being lazy
87 Returns list of commit ids, in ascending order. Being lazy
88 attribute allows external tools to inject commit ids from cache.
88 attribute allows external tools to inject commit ids from cache.
89 """
89 """
90 commit_ids = self._get_all_commit_ids()
90 commit_ids = self._get_all_commit_ids()
91 self._rebuild_cache(commit_ids)
91 self._rebuild_cache(commit_ids)
92 return commit_ids
92 return commit_ids
93
93
94 def _rebuild_cache(self, commit_ids):
94 def _rebuild_cache(self, commit_ids):
95 self._commit_ids = dict((commit_id, index)
95 self._commit_ids = dict((commit_id, index)
96 for index, commit_id in enumerate(commit_ids))
96 for index, commit_id in enumerate(commit_ids))
97
97
98 def run_git_command(self, cmd, **opts):
98 def run_git_command(self, cmd, **opts):
99 """
99 """
100 Runs given ``cmd`` as git command and returns tuple
100 Runs given ``cmd`` as git command and returns tuple
101 (stdout, stderr).
101 (stdout, stderr).
102
102
103 :param cmd: git command to be executed
103 :param cmd: git command to be executed
104 :param opts: env options to pass into Subprocess command
104 :param opts: env options to pass into Subprocess command
105 """
105 """
106 if not isinstance(cmd, list):
106 if not isinstance(cmd, list):
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108
108
109 out, err = self._remote.run_git_command(cmd, **opts)
109 out, err = self._remote.run_git_command(cmd, **opts)
110 if err:
110 if err:
111 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
111 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
112 return out, err
112 return out, err
113
113
114 @staticmethod
114 @staticmethod
115 def check_url(url, config):
115 def check_url(url, config):
116 """
116 """
117 Function will check given url and try to verify if it's a valid
117 Function will check given url and try to verify if it's a valid
118 link. Sometimes it may happened that git will issue basic
118 link. Sometimes it may happened that git will issue basic
119 auth request that can cause whole API to hang when used from python
119 auth request that can cause whole API to hang when used from python
120 or other external calls.
120 or other external calls.
121
121
122 On failures it'll raise urllib2.HTTPError, exception is also thrown
122 On failures it'll raise urllib2.HTTPError, exception is also thrown
123 when the return code is non 200
123 when the return code is non 200
124 """
124 """
125 # check first if it's not an url
125 # check first if it's not an url
126 if os.path.isdir(url) or url.startswith('file:'):
126 if os.path.isdir(url) or url.startswith('file:'):
127 return True
127 return True
128
128
129 if '+' in url.split('://', 1)[0]:
129 if '+' in url.split('://', 1)[0]:
130 url = url.split('+', 1)[1]
130 url = url.split('+', 1)[1]
131
131
132 # Request the _remote to verify the url
132 # Request the _remote to verify the url
133 return connection.Git.check_url(url, config.serialize())
133 return connection.Git.check_url(url, config.serialize())
134
134
135 @staticmethod
135 @staticmethod
136 def is_valid_repository(path):
136 def is_valid_repository(path):
137 if os.path.isdir(os.path.join(path, '.git')):
137 if os.path.isdir(os.path.join(path, '.git')):
138 return True
138 return True
139 # check case of bare repository
139 # check case of bare repository
140 try:
140 try:
141 GitRepository(path)
141 GitRepository(path)
142 return True
142 return True
143 except VCSError:
143 except VCSError:
144 pass
144 pass
145 return False
145 return False
146
146
147 def _init_repo(self, create, src_url=None, update_after_clone=False,
147 def _init_repo(self, create, src_url=None, update_after_clone=False,
148 bare=False):
148 bare=False):
149 if create and os.path.exists(self.path):
149 if create and os.path.exists(self.path):
150 raise RepositoryError(
150 raise RepositoryError(
151 "Cannot create repository at %s, location already exist"
151 "Cannot create repository at %s, location already exist"
152 % self.path)
152 % self.path)
153
153
154 try:
154 try:
155 if create and src_url:
155 if create and src_url:
156 GitRepository.check_url(src_url, self.config)
156 GitRepository.check_url(src_url, self.config)
157 self.clone(src_url, update_after_clone, bare)
157 self.clone(src_url, update_after_clone, bare)
158 elif create:
158 elif create:
159 os.makedirs(self.path, mode=0755)
159 os.makedirs(self.path, mode=0755)
160
160
161 if bare:
161 if bare:
162 self._remote.init_bare()
162 self._remote.init_bare()
163 else:
163 else:
164 self._remote.init()
164 self._remote.init()
165 else:
165 else:
166 if not self._remote.assert_correct_path():
166 if not self._remote.assert_correct_path():
167 raise RepositoryError(
167 raise RepositoryError(
168 'Path "%s" does not contain a Git repository' %
168 'Path "%s" does not contain a Git repository' %
169 (self.path,))
169 (self.path,))
170
170
171 # TODO: johbo: check if we have to translate the OSError here
171 # TODO: johbo: check if we have to translate the OSError here
172 except OSError as err:
172 except OSError as err:
173 raise RepositoryError(err)
173 raise RepositoryError(err)
174
174
175 def _get_all_commit_ids(self, filters=None):
175 def _get_all_commit_ids(self, filters=None):
176 # we must check if this repo is not empty, since later command
176 # we must check if this repo is not empty, since later command
177 # fails if it is. And it's cheaper to ask than throw the subprocess
177 # fails if it is. And it's cheaper to ask than throw the subprocess
178 # errors
178 # errors
179 try:
179 try:
180 self._remote.head()
180 self._remote.head()
181 except KeyError:
181 except KeyError:
182 return []
182 return []
183
183
184 rev_filter = ['--branches', '--tags']
184 rev_filter = ['--branches', '--tags']
185 extra_filter = []
185 extra_filter = []
186
186
187 if filters:
187 if filters:
188 if filters.get('since'):
188 if filters.get('since'):
189 extra_filter.append('--since=%s' % (filters['since']))
189 extra_filter.append('--since=%s' % (filters['since']))
190 if filters.get('until'):
190 if filters.get('until'):
191 extra_filter.append('--until=%s' % (filters['until']))
191 extra_filter.append('--until=%s' % (filters['until']))
192 if filters.get('branch_name'):
192 if filters.get('branch_name'):
193 rev_filter = ['--tags']
193 rev_filter = ['--tags']
194 extra_filter.append(filters['branch_name'])
194 extra_filter.append(filters['branch_name'])
195 rev_filter.extend(extra_filter)
195 rev_filter.extend(extra_filter)
196
196
197 # if filters.get('start') or filters.get('end'):
197 # if filters.get('start') or filters.get('end'):
198 # # skip is offset, max-count is limit
198 # # skip is offset, max-count is limit
199 # if filters.get('start'):
199 # if filters.get('start'):
200 # extra_filter += ' --skip=%s' % filters['start']
200 # extra_filter += ' --skip=%s' % filters['start']
201 # if filters.get('end'):
201 # if filters.get('end'):
202 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
202 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
203
203
204 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
204 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
205 try:
205 try:
206 output, __ = self.run_git_command(cmd)
206 output, __ = self.run_git_command(cmd)
207 except RepositoryError:
207 except RepositoryError:
208 # Can be raised for empty repositories
208 # Can be raised for empty repositories
209 return []
209 return []
210 return output.splitlines()
210 return output.splitlines()
211
211
212 def _get_commit_id(self, commit_id_or_idx):
212 def _get_commit_id(self, commit_id_or_idx):
213 def is_null(value):
213 def is_null(value):
214 return len(value) == commit_id_or_idx.count('0')
214 return len(value) == commit_id_or_idx.count('0')
215
215
216 if self.is_empty():
216 if self.is_empty():
217 raise EmptyRepositoryError("There are no commits yet")
217 raise EmptyRepositoryError("There are no commits yet")
218
218
219 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
219 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
220 return self.commit_ids[-1]
220 return self.commit_ids[-1]
221
221
222 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
222 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
223 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
223 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
224 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
224 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
225 try:
225 try:
226 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
226 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
227 except Exception:
227 except Exception:
228 msg = "Commit %s does not exist for %s" % (
228 msg = "Commit %s does not exist for %s" % (
229 commit_id_or_idx, self)
229 commit_id_or_idx, self)
230 raise CommitDoesNotExistError(msg)
230 raise CommitDoesNotExistError(msg)
231
231
232 elif is_bstr:
232 elif is_bstr:
233 # check full path ref, eg. refs/heads/master
233 # check full path ref, eg. refs/heads/master
234 ref_id = self._refs.get(commit_id_or_idx)
234 ref_id = self._refs.get(commit_id_or_idx)
235 if ref_id:
235 if ref_id:
236 return ref_id
236 return ref_id
237
237
238 # check branch name
238 # check branch name
239 branch_ids = self.branches.values()
239 branch_ids = self.branches.values()
240 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
240 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
241 if ref_id:
241 if ref_id:
242 return ref_id
242 return ref_id
243
243
244 # check tag name
244 # check tag name
245 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
245 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
246 if ref_id:
246 if ref_id:
247 return ref_id
247 return ref_id
248
248
249 if (not SHA_PATTERN.match(commit_id_or_idx) or
249 if (not SHA_PATTERN.match(commit_id_or_idx) or
250 commit_id_or_idx not in self.commit_ids):
250 commit_id_or_idx not in self.commit_ids):
251 msg = "Commit %s does not exist for %s" % (
251 msg = "Commit %s does not exist for %s" % (
252 commit_id_or_idx, self)
252 commit_id_or_idx, self)
253 raise CommitDoesNotExistError(msg)
253 raise CommitDoesNotExistError(msg)
254
254
255 # Ensure we return full id
255 # Ensure we return full id
256 if not SHA_PATTERN.match(str(commit_id_or_idx)):
256 if not SHA_PATTERN.match(str(commit_id_or_idx)):
257 raise CommitDoesNotExistError(
257 raise CommitDoesNotExistError(
258 "Given commit id %s not recognized" % commit_id_or_idx)
258 "Given commit id %s not recognized" % commit_id_or_idx)
259 return commit_id_or_idx
259 return commit_id_or_idx
260
260
261 def get_hook_location(self):
261 def get_hook_location(self):
262 """
262 """
263 returns absolute path to location where hooks are stored
263 returns absolute path to location where hooks are stored
264 """
264 """
265 loc = os.path.join(self.path, 'hooks')
265 loc = os.path.join(self.path, 'hooks')
266 if not self.bare:
266 if not self.bare:
267 loc = os.path.join(self.path, '.git', 'hooks')
267 loc = os.path.join(self.path, '.git', 'hooks')
268 return loc
268 return loc
269
269
270 @LazyProperty
270 @LazyProperty
271 def last_change(self):
271 def last_change(self):
272 """
272 """
273 Returns last change made on this repository as
273 Returns last change made on this repository as
274 `datetime.datetime` object.
274 `datetime.datetime` object.
275 """
275 """
276 try:
276 try:
277 return self.get_commit().date
277 return self.get_commit().date
278 except RepositoryError:
278 except RepositoryError:
279 tzoffset = makedate()[1]
279 tzoffset = makedate()[1]
280 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
280 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
281
281
282 def _get_fs_mtime(self):
282 def _get_fs_mtime(self):
283 idx_loc = '' if self.bare else '.git'
283 idx_loc = '' if self.bare else '.git'
284 # fallback to filesystem
284 # fallback to filesystem
285 in_path = os.path.join(self.path, idx_loc, "index")
285 in_path = os.path.join(self.path, idx_loc, "index")
286 he_path = os.path.join(self.path, idx_loc, "HEAD")
286 he_path = os.path.join(self.path, idx_loc, "HEAD")
287 if os.path.exists(in_path):
287 if os.path.exists(in_path):
288 return os.stat(in_path).st_mtime
288 return os.stat(in_path).st_mtime
289 else:
289 else:
290 return os.stat(he_path).st_mtime
290 return os.stat(he_path).st_mtime
291
291
292 @LazyProperty
292 @LazyProperty
293 def description(self):
293 def description(self):
294 description = self._remote.get_description()
294 description = self._remote.get_description()
295 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
295 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
296
296
297 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
297 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
298 if self.is_empty():
298 if self.is_empty():
299 return OrderedDict()
299 return OrderedDict()
300
300
301 result = []
301 result = []
302 for ref, sha in self._refs.iteritems():
302 for ref, sha in self._refs.iteritems():
303 if ref.startswith(prefix):
303 if ref.startswith(prefix):
304 ref_name = ref
304 ref_name = ref
305 if strip_prefix:
305 if strip_prefix:
306 ref_name = ref[len(prefix):]
306 ref_name = ref[len(prefix):]
307 result.append((safe_unicode(ref_name), sha))
307 result.append((safe_unicode(ref_name), sha))
308
308
309 def get_name(entry):
309 def get_name(entry):
310 return entry[0]
310 return entry[0]
311
311
312 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
312 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
313
313
314 def _get_branches(self):
314 def _get_branches(self):
315 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
315 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
316
316
317 @LazyProperty
317 @LazyProperty
318 def branches(self):
318 def branches(self):
319 return self._get_branches()
319 return self._get_branches()
320
320
321 @LazyProperty
321 @LazyProperty
322 def branches_closed(self):
322 def branches_closed(self):
323 return {}
323 return {}
324
324
325 @LazyProperty
325 @LazyProperty
326 def branches_all(self):
326 def branches_all(self):
327 all_branches = {}
327 all_branches = {}
328 all_branches.update(self.branches)
328 all_branches.update(self.branches)
329 all_branches.update(self.branches_closed)
329 all_branches.update(self.branches_closed)
330 return all_branches
330 return all_branches
331
331
332 @LazyProperty
332 @LazyProperty
333 def tags(self):
333 def tags(self):
334 return self._get_tags()
334 return self._get_tags()
335
335
336 def _get_tags(self):
336 def _get_tags(self):
337 return self._get_refs_entries(
337 return self._get_refs_entries(
338 prefix='refs/tags/', strip_prefix=True, reverse=True)
338 prefix='refs/tags/', strip_prefix=True, reverse=True)
339
339
340 def tag(self, name, user, commit_id=None, message=None, date=None,
340 def tag(self, name, user, commit_id=None, message=None, date=None,
341 **kwargs):
341 **kwargs):
342 # TODO: fix this method to apply annotated tags correct with message
342 # TODO: fix this method to apply annotated tags correct with message
343 """
343 """
344 Creates and returns a tag for the given ``commit_id``.
344 Creates and returns a tag for the given ``commit_id``.
345
345
346 :param name: name for new tag
346 :param name: name for new tag
347 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
347 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
348 :param commit_id: commit id for which new tag would be created
348 :param commit_id: commit id for which new tag would be created
349 :param message: message of the tag's commit
349 :param message: message of the tag's commit
350 :param date: date of tag's commit
350 :param date: date of tag's commit
351
351
352 :raises TagAlreadyExistError: if tag with same name already exists
352 :raises TagAlreadyExistError: if tag with same name already exists
353 """
353 """
354 if name in self.tags:
354 if name in self.tags:
355 raise TagAlreadyExistError("Tag %s already exists" % name)
355 raise TagAlreadyExistError("Tag %s already exists" % name)
356 commit = self.get_commit(commit_id=commit_id)
356 commit = self.get_commit(commit_id=commit_id)
357 message = message or "Added tag %s for commit %s" % (
357 message = message or "Added tag %s for commit %s" % (
358 name, commit.raw_id)
358 name, commit.raw_id)
359 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
359 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
360
360
361 self._refs = self._get_refs()
361 self._refs = self._get_refs()
362 self.tags = self._get_tags()
362 self.tags = self._get_tags()
363 return commit
363 return commit
364
364
365 def remove_tag(self, name, user, message=None, date=None):
365 def remove_tag(self, name, user, message=None, date=None):
366 """
366 """
367 Removes tag with the given ``name``.
367 Removes tag with the given ``name``.
368
368
369 :param name: name of the tag to be removed
369 :param name: name of the tag to be removed
370 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
370 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
371 :param message: message of the tag's removal commit
371 :param message: message of the tag's removal commit
372 :param date: date of tag's removal commit
372 :param date: date of tag's removal commit
373
373
374 :raises TagDoesNotExistError: if tag with given name does not exists
374 :raises TagDoesNotExistError: if tag with given name does not exists
375 """
375 """
376 if name not in self.tags:
376 if name not in self.tags:
377 raise TagDoesNotExistError("Tag %s does not exist" % name)
377 raise TagDoesNotExistError("Tag %s does not exist" % name)
378 tagpath = vcspath.join(
378 tagpath = vcspath.join(
379 self._remote.get_refs_path(), 'refs', 'tags', name)
379 self._remote.get_refs_path(), 'refs', 'tags', name)
380 try:
380 try:
381 os.remove(tagpath)
381 os.remove(tagpath)
382 self._refs = self._get_refs()
382 self._refs = self._get_refs()
383 self.tags = self._get_tags()
383 self.tags = self._get_tags()
384 except OSError as e:
384 except OSError as e:
385 raise RepositoryError(e.strerror)
385 raise RepositoryError(e.strerror)
386
386
387 def _get_refs(self):
387 def _get_refs(self):
388 return self._remote.get_refs()
388 return self._remote.get_refs()
389
389
390 @LazyProperty
390 @LazyProperty
391 def _refs(self):
391 def _refs(self):
392 return self._get_refs()
392 return self._get_refs()
393
393
394 @property
394 @property
395 def _ref_tree(self):
395 def _ref_tree(self):
396 node = tree = {}
396 node = tree = {}
397 for ref, sha in self._refs.iteritems():
397 for ref, sha in self._refs.iteritems():
398 path = ref.split('/')
398 path = ref.split('/')
399 for bit in path[:-1]:
399 for bit in path[:-1]:
400 node = node.setdefault(bit, {})
400 node = node.setdefault(bit, {})
401 node[path[-1]] = sha
401 node[path[-1]] = sha
402 node = tree
402 node = tree
403 return tree
403 return tree
404
404
405 def get_remote_ref(self, ref_name):
405 def get_remote_ref(self, ref_name):
406 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
406 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
407 try:
407 try:
408 return self._refs[ref_key]
408 return self._refs[ref_key]
409 except Exception:
409 except Exception:
410 return
410 return
411
411
412 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
412 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
413 """
413 """
414 Returns `GitCommit` object representing commit from git repository
414 Returns `GitCommit` object representing commit from git repository
415 at the given `commit_id` or head (most recent commit) if None given.
415 at the given `commit_id` or head (most recent commit) if None given.
416 """
416 """
417 if commit_id is not None:
417 if commit_id is not None:
418 self._validate_commit_id(commit_id)
418 self._validate_commit_id(commit_id)
419 elif commit_idx is not None:
419 elif commit_idx is not None:
420 self._validate_commit_idx(commit_idx)
420 self._validate_commit_idx(commit_idx)
421 commit_id = commit_idx
421 commit_id = commit_idx
422 commit_id = self._get_commit_id(commit_id)
422 commit_id = self._get_commit_id(commit_id)
423 try:
423 try:
424 # Need to call remote to translate id for tagging scenario
424 # Need to call remote to translate id for tagging scenario
425 commit_id = self._remote.get_object(commit_id)["commit_id"]
425 commit_id = self._remote.get_object(commit_id)["commit_id"]
426 idx = self._commit_ids[commit_id]
426 idx = self._commit_ids[commit_id]
427 except KeyError:
427 except KeyError:
428 raise RepositoryError("Cannot get object with id %s" % commit_id)
428 raise RepositoryError("Cannot get object with id %s" % commit_id)
429
429
430 return GitCommit(self, commit_id, idx, pre_load=pre_load)
430 return GitCommit(self, commit_id, idx, pre_load=pre_load)
431
431
432 def get_commits(
432 def get_commits(
433 self, start_id=None, end_id=None, start_date=None, end_date=None,
433 self, start_id=None, end_id=None, start_date=None, end_date=None,
434 branch_name=None, show_hidden=False, pre_load=None):
434 branch_name=None, show_hidden=False, pre_load=None):
435 """
435 """
436 Returns generator of `GitCommit` objects from start to end (both
436 Returns generator of `GitCommit` objects from start to end (both
437 are inclusive), in ascending date order.
437 are inclusive), in ascending date order.
438
438
439 :param start_id: None, str(commit_id)
439 :param start_id: None, str(commit_id)
440 :param end_id: None, str(commit_id)
440 :param end_id: None, str(commit_id)
441 :param start_date: if specified, commits with commit date less than
441 :param start_date: if specified, commits with commit date less than
442 ``start_date`` would be filtered out from returned set
442 ``start_date`` would be filtered out from returned set
443 :param end_date: if specified, commits with commit date greater than
443 :param end_date: if specified, commits with commit date greater than
444 ``end_date`` would be filtered out from returned set
444 ``end_date`` would be filtered out from returned set
445 :param branch_name: if specified, commits not reachable from given
445 :param branch_name: if specified, commits not reachable from given
446 branch would be filtered out from returned set
446 branch would be filtered out from returned set
447 :param show_hidden: Show hidden commits such as obsolete or hidden from
447 :param show_hidden: Show hidden commits such as obsolete or hidden from
448 Mercurial evolve
448 Mercurial evolve
449 :raise BranchDoesNotExistError: If given `branch_name` does not
449 :raise BranchDoesNotExistError: If given `branch_name` does not
450 exist.
450 exist.
451 :raise CommitDoesNotExistError: If commits for given `start` or
451 :raise CommitDoesNotExistError: If commits for given `start` or
452 `end` could not be found.
452 `end` could not be found.
453
453
454 """
454 """
455 if self.is_empty():
455 if self.is_empty():
456 raise EmptyRepositoryError("There are no commits yet")
456 raise EmptyRepositoryError("There are no commits yet")
457 self._validate_branch_name(branch_name)
457 self._validate_branch_name(branch_name)
458
458
459 if start_id is not None:
459 if start_id is not None:
460 self._validate_commit_id(start_id)
460 self._validate_commit_id(start_id)
461 if end_id is not None:
461 if end_id is not None:
462 self._validate_commit_id(end_id)
462 self._validate_commit_id(end_id)
463
463
464 start_raw_id = self._get_commit_id(start_id)
464 start_raw_id = self._get_commit_id(start_id)
465 start_pos = self._commit_ids[start_raw_id] if start_id else None
465 start_pos = self._commit_ids[start_raw_id] if start_id else None
466 end_raw_id = self._get_commit_id(end_id)
466 end_raw_id = self._get_commit_id(end_id)
467 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
467 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
468
468
469 if None not in [start_id, end_id] and start_pos > end_pos:
469 if None not in [start_id, end_id] and start_pos > end_pos:
470 raise RepositoryError(
470 raise RepositoryError(
471 "Start commit '%s' cannot be after end commit '%s'" %
471 "Start commit '%s' cannot be after end commit '%s'" %
472 (start_id, end_id))
472 (start_id, end_id))
473
473
474 if end_pos is not None:
474 if end_pos is not None:
475 end_pos += 1
475 end_pos += 1
476
476
477 filter_ = []
477 filter_ = []
478 if branch_name:
478 if branch_name:
479 filter_.append({'branch_name': branch_name})
479 filter_.append({'branch_name': branch_name})
480 if start_date and not end_date:
480 if start_date and not end_date:
481 filter_.append({'since': start_date})
481 filter_.append({'since': start_date})
482 if end_date and not start_date:
482 if end_date and not start_date:
483 filter_.append({'until': end_date})
483 filter_.append({'until': end_date})
484 if start_date and end_date:
484 if start_date and end_date:
485 filter_.append({'since': start_date})
485 filter_.append({'since': start_date})
486 filter_.append({'until': end_date})
486 filter_.append({'until': end_date})
487
487
488 # if start_pos or end_pos:
488 # if start_pos or end_pos:
489 # filter_.append({'start': start_pos})
489 # filter_.append({'start': start_pos})
490 # filter_.append({'end': end_pos})
490 # filter_.append({'end': end_pos})
491
491
492 if filter_:
492 if filter_:
493 revfilters = {
493 revfilters = {
494 'branch_name': branch_name,
494 'branch_name': branch_name,
495 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
495 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
496 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
496 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
497 'start': start_pos,
497 'start': start_pos,
498 'end': end_pos,
498 'end': end_pos,
499 }
499 }
500 commit_ids = self._get_all_commit_ids(filters=revfilters)
500 commit_ids = self._get_all_commit_ids(filters=revfilters)
501
501
502 # pure python stuff, it's slow due to walker walking whole repo
502 # pure python stuff, it's slow due to walker walking whole repo
503 # def get_revs(walker):
503 # def get_revs(walker):
504 # for walker_entry in walker:
504 # for walker_entry in walker:
505 # yield walker_entry.commit.id
505 # yield walker_entry.commit.id
506 # revfilters = {}
506 # revfilters = {}
507 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
507 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
508 else:
508 else:
509 commit_ids = self.commit_ids
509 commit_ids = self.commit_ids
510
510
511 if start_pos or end_pos:
511 if start_pos or end_pos:
512 commit_ids = commit_ids[start_pos: end_pos]
512 commit_ids = commit_ids[start_pos: end_pos]
513
513
514 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
514 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
515
515
516 def get_diff(
516 def get_diff(
517 self, commit1, commit2, path='', ignore_whitespace=False,
517 self, commit1, commit2, path='', ignore_whitespace=False,
518 context=3, path1=None):
518 context=3, path1=None):
519 """
519 """
520 Returns (git like) *diff*, as plain text. Shows changes introduced by
520 Returns (git like) *diff*, as plain text. Shows changes introduced by
521 ``commit2`` since ``commit1``.
521 ``commit2`` since ``commit1``.
522
522
523 :param commit1: Entry point from which diff is shown. Can be
523 :param commit1: Entry point from which diff is shown. Can be
524 ``self.EMPTY_COMMIT`` - in this case, patch showing all
524 ``self.EMPTY_COMMIT`` - in this case, patch showing all
525 the changes since empty state of the repository until ``commit2``
525 the changes since empty state of the repository until ``commit2``
526 :param commit2: Until which commits changes should be shown.
526 :param commit2: Until which commits changes should be shown.
527 :param ignore_whitespace: If set to ``True``, would not show whitespace
527 :param ignore_whitespace: If set to ``True``, would not show whitespace
528 changes. Defaults to ``False``.
528 changes. Defaults to ``False``.
529 :param context: How many lines before/after changed lines should be
529 :param context: How many lines before/after changed lines should be
530 shown. Defaults to ``3``.
530 shown. Defaults to ``3``.
531 """
531 """
532 self._validate_diff_commits(commit1, commit2)
532 self._validate_diff_commits(commit1, commit2)
533 if path1 is not None and path1 != path:
533 if path1 is not None and path1 != path:
534 raise ValueError("Diff of two different paths not supported.")
534 raise ValueError("Diff of two different paths not supported.")
535
535
536 flags = [
536 flags = [
537 '-U%s' % context, '--full-index', '--binary', '-p',
537 '-U%s' % context, '--full-index', '--binary', '-p',
538 '-M', '--abbrev=40']
538 '-M', '--abbrev=40']
539 if ignore_whitespace:
539 if ignore_whitespace:
540 flags.append('-w')
540 flags.append('-w')
541
541
542 if commit1 == self.EMPTY_COMMIT:
542 if commit1 == self.EMPTY_COMMIT:
543 cmd = ['show'] + flags + [commit2.raw_id]
543 cmd = ['show'] + flags + [commit2.raw_id]
544 else:
544 else:
545 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
545 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
546
546
547 if path:
547 if path:
548 cmd.extend(['--', path])
548 cmd.extend(['--', path])
549
549
550 stdout, __ = self.run_git_command(cmd)
550 stdout, __ = self.run_git_command(cmd)
551 # If we used 'show' command, strip first few lines (until actual diff
551 # If we used 'show' command, strip first few lines (until actual diff
552 # starts)
552 # starts)
553 if commit1 == self.EMPTY_COMMIT:
553 if commit1 == self.EMPTY_COMMIT:
554 lines = stdout.splitlines()
554 lines = stdout.splitlines()
555 x = 0
555 x = 0
556 for line in lines:
556 for line in lines:
557 if line.startswith('diff'):
557 if line.startswith('diff'):
558 break
558 break
559 x += 1
559 x += 1
560 # Append new line just like 'diff' command do
560 # Append new line just like 'diff' command do
561 stdout = '\n'.join(lines[x:]) + '\n'
561 stdout = '\n'.join(lines[x:]) + '\n'
562 return GitDiff(stdout)
562 return GitDiff(stdout)
563
563
564 def strip(self, commit_id, branch_name):
564 def strip(self, commit_id, branch_name):
565 commit = self.get_commit(commit_id=commit_id)
565 commit = self.get_commit(commit_id=commit_id)
566 if commit.merge:
566 if commit.merge:
567 raise Exception('Cannot reset to merge commit')
567 raise Exception('Cannot reset to merge commit')
568
568
569 # parent is going to be the new head now
569 # parent is going to be the new head now
570 commit = commit.parents[0]
570 commit = commit.parents[0]
571 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
571 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
572
572
573 self.commit_ids = self._get_all_commit_ids()
573 self.commit_ids = self._get_all_commit_ids()
574 self._rebuild_cache(self.commit_ids)
574 self._rebuild_cache(self.commit_ids)
575
575
576 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
576 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
577 if commit_id1 == commit_id2:
577 if commit_id1 == commit_id2:
578 return commit_id1
578 return commit_id1
579
579
580 if self != repo2:
580 if self != repo2:
581 commits = self._remote.get_missing_revs(
581 commits = self._remote.get_missing_revs(
582 commit_id1, commit_id2, repo2.path)
582 commit_id1, commit_id2, repo2.path)
583 if commits:
583 if commits:
584 commit = repo2.get_commit(commits[-1])
584 commit = repo2.get_commit(commits[-1])
585 if commit.parents:
585 if commit.parents:
586 ancestor_id = commit.parents[0].raw_id
586 ancestor_id = commit.parents[0].raw_id
587 else:
587 else:
588 ancestor_id = None
588 ancestor_id = None
589 else:
589 else:
590 # no commits from other repo, ancestor_id is the commit_id2
590 # no commits from other repo, ancestor_id is the commit_id2
591 ancestor_id = commit_id2
591 ancestor_id = commit_id2
592 else:
592 else:
593 output, __ = self.run_git_command(
593 output, __ = self.run_git_command(
594 ['merge-base', commit_id1, commit_id2])
594 ['merge-base', commit_id1, commit_id2])
595 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
595 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
596
596
597 return ancestor_id
597 return ancestor_id
598
598
599 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
599 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
600 repo1 = self
600 repo1 = self
601 ancestor_id = None
601 ancestor_id = None
602
602
603 if commit_id1 == commit_id2:
603 if commit_id1 == commit_id2:
604 commits = []
604 commits = []
605 elif repo1 != repo2:
605 elif repo1 != repo2:
606 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
606 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
607 repo2.path)
607 repo2.path)
608 commits = [
608 commits = [
609 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
609 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
610 for commit_id in reversed(missing_ids)]
610 for commit_id in reversed(missing_ids)]
611 else:
611 else:
612 output, __ = repo1.run_git_command(
612 output, __ = repo1.run_git_command(
613 ['log', '--reverse', '--pretty=format: %H', '-s',
613 ['log', '--reverse', '--pretty=format: %H', '-s',
614 '%s..%s' % (commit_id1, commit_id2)])
614 '%s..%s' % (commit_id1, commit_id2)])
615 commits = [
615 commits = [
616 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
616 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
617 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
617 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
618
618
619 return commits
619 return commits
620
620
621 @LazyProperty
621 @LazyProperty
622 def in_memory_commit(self):
622 def in_memory_commit(self):
623 """
623 """
624 Returns ``GitInMemoryCommit`` object for this repository.
624 Returns ``GitInMemoryCommit`` object for this repository.
625 """
625 """
626 return GitInMemoryCommit(self)
626 return GitInMemoryCommit(self)
627
627
628 def clone(self, url, update_after_clone=True, bare=False):
628 def clone(self, url, update_after_clone=True, bare=False):
629 """
629 """
630 Tries to clone commits from external location.
630 Tries to clone commits from external location.
631
631
632 :param update_after_clone: If set to ``False``, git won't checkout
632 :param update_after_clone: If set to ``False``, git won't checkout
633 working directory
633 working directory
634 :param bare: If set to ``True``, repository would be cloned into
634 :param bare: If set to ``True``, repository would be cloned into
635 *bare* git repository (no working directory at all).
635 *bare* git repository (no working directory at all).
636 """
636 """
637 # init_bare and init expect empty dir created to proceed
637 # init_bare and init expect empty dir created to proceed
638 if not os.path.exists(self.path):
638 if not os.path.exists(self.path):
639 os.mkdir(self.path)
639 os.mkdir(self.path)
640
640
641 if bare:
641 if bare:
642 self._remote.init_bare()
642 self._remote.init_bare()
643 else:
643 else:
644 self._remote.init()
644 self._remote.init()
645
645
646 deferred = '^{}'
646 deferred = '^{}'
647 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
647 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
648
648
649 return self._remote.clone(
649 return self._remote.clone(
650 url, deferred, valid_refs, update_after_clone)
650 url, deferred, valid_refs, update_after_clone)
651
651
652 def pull(self, url, commit_ids=None):
652 def pull(self, url, commit_ids=None):
653 """
653 """
654 Tries to pull changes from external location. We use fetch here since
654 Tries to pull changes from external location. We use fetch here since
655 pull in get does merges and we want to be compatible with hg backend so
655 pull in get does merges and we want to be compatible with hg backend so
656 pull == fetch in this case
656 pull == fetch in this case
657 """
657 """
658 self.fetch(url, commit_ids=commit_ids)
658 self.fetch(url, commit_ids=commit_ids)
659
659
660 def fetch(self, url, commit_ids=None):
660 def fetch(self, url, commit_ids=None):
661 """
661 """
662 Tries to fetch changes from external location.
662 Tries to fetch changes from external location.
663 """
663 """
664 refs = None
664 refs = None
665
665
666 if commit_ids is not None:
666 if commit_ids is not None:
667 remote_refs = self._remote.get_remote_refs(url)
667 remote_refs = self._remote.get_remote_refs(url)
668 refs = [
668 refs = [
669 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
670 self._remote.fetch(url, refs=refs)
670 self._remote.fetch(url, refs=refs)
671
671
672 def push(self, url):
673 refs = None
674 self._remote.sync_push(url, refs=refs)
675
672 def set_refs(self, ref_name, commit_id):
676 def set_refs(self, ref_name, commit_id):
673 self._remote.set_refs(ref_name, commit_id)
677 self._remote.set_refs(ref_name, commit_id)
674
678
675 def remove_ref(self, ref_name):
679 def remove_ref(self, ref_name):
676 self._remote.remove_ref(ref_name)
680 self._remote.remove_ref(ref_name)
677
681
678 def _update_server_info(self):
682 def _update_server_info(self):
679 """
683 """
680 runs gits update-server-info command in this repo instance
684 runs gits update-server-info command in this repo instance
681 """
685 """
682 self._remote.update_server_info()
686 self._remote.update_server_info()
683
687
684 def _current_branch(self):
688 def _current_branch(self):
685 """
689 """
686 Return the name of the current branch.
690 Return the name of the current branch.
687
691
688 It only works for non bare repositories (i.e. repositories with a
692 It only works for non bare repositories (i.e. repositories with a
689 working copy)
693 working copy)
690 """
694 """
691 if self.bare:
695 if self.bare:
692 raise RepositoryError('Bare git repos do not have active branches')
696 raise RepositoryError('Bare git repos do not have active branches')
693
697
694 if self.is_empty():
698 if self.is_empty():
695 return None
699 return None
696
700
697 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
701 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
698 return stdout.strip()
702 return stdout.strip()
699
703
700 def _checkout(self, branch_name, create=False):
704 def _checkout(self, branch_name, create=False):
701 """
705 """
702 Checkout a branch in the working directory.
706 Checkout a branch in the working directory.
703
707
704 It tries to create the branch if create is True, failing if the branch
708 It tries to create the branch if create is True, failing if the branch
705 already exists.
709 already exists.
706
710
707 It only works for non bare repositories (i.e. repositories with a
711 It only works for non bare repositories (i.e. repositories with a
708 working copy)
712 working copy)
709 """
713 """
710 if self.bare:
714 if self.bare:
711 raise RepositoryError('Cannot checkout branches in a bare git repo')
715 raise RepositoryError('Cannot checkout branches in a bare git repo')
712
716
713 cmd = ['checkout']
717 cmd = ['checkout']
714 if create:
718 if create:
715 cmd.append('-b')
719 cmd.append('-b')
716 cmd.append(branch_name)
720 cmd.append(branch_name)
717 self.run_git_command(cmd, fail_on_stderr=False)
721 self.run_git_command(cmd, fail_on_stderr=False)
718
722
719 def _identify(self):
723 def _identify(self):
720 """
724 """
721 Return the current state of the working directory.
725 Return the current state of the working directory.
722 """
726 """
723 if self.bare:
727 if self.bare:
724 raise RepositoryError('Bare git repos do not have active branches')
728 raise RepositoryError('Bare git repos do not have active branches')
725
729
726 if self.is_empty():
730 if self.is_empty():
727 return None
731 return None
728
732
729 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
733 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
730 return stdout.strip()
734 return stdout.strip()
731
735
732 def _local_clone(self, clone_path, branch_name, source_branch=None):
736 def _local_clone(self, clone_path, branch_name, source_branch=None):
733 """
737 """
734 Create a local clone of the current repo.
738 Create a local clone of the current repo.
735 """
739 """
736 # N.B.(skreft): the --branch option is required as otherwise the shallow
740 # N.B.(skreft): the --branch option is required as otherwise the shallow
737 # clone will only fetch the active branch.
741 # clone will only fetch the active branch.
738 cmd = ['clone', '--branch', branch_name,
742 cmd = ['clone', '--branch', branch_name,
739 self.path, os.path.abspath(clone_path)]
743 self.path, os.path.abspath(clone_path)]
740
744
741 self.run_git_command(cmd, fail_on_stderr=False)
745 self.run_git_command(cmd, fail_on_stderr=False)
742
746
743 # if we get the different source branch, make sure we also fetch it for
747 # if we get the different source branch, make sure we also fetch it for
744 # merge conditions
748 # merge conditions
745 if source_branch and source_branch != branch_name:
749 if source_branch and source_branch != branch_name:
746 # check if the ref exists.
750 # check if the ref exists.
747 shadow_repo = GitRepository(os.path.abspath(clone_path))
751 shadow_repo = GitRepository(os.path.abspath(clone_path))
748 if shadow_repo.get_remote_ref(source_branch):
752 if shadow_repo.get_remote_ref(source_branch):
749 cmd = ['fetch', self.path, source_branch]
753 cmd = ['fetch', self.path, source_branch]
750 self.run_git_command(cmd, fail_on_stderr=False)
754 self.run_git_command(cmd, fail_on_stderr=False)
751
755
752 def _local_fetch(self, repository_path, branch_name):
756 def _local_fetch(self, repository_path, branch_name):
753 """
757 """
754 Fetch a branch from a local repository.
758 Fetch a branch from a local repository.
755 """
759 """
756 repository_path = os.path.abspath(repository_path)
760 repository_path = os.path.abspath(repository_path)
757 if repository_path == self.path:
761 if repository_path == self.path:
758 raise ValueError('Cannot fetch from the same repository')
762 raise ValueError('Cannot fetch from the same repository')
759
763
760 cmd = ['fetch', '--no-tags', repository_path, branch_name]
764 cmd = ['fetch', '--no-tags', repository_path, branch_name]
761 self.run_git_command(cmd, fail_on_stderr=False)
765 self.run_git_command(cmd, fail_on_stderr=False)
762
766
763 def _last_fetch_heads(self):
767 def _last_fetch_heads(self):
764 """
768 """
765 Return the last fetched heads that need merging.
769 Return the last fetched heads that need merging.
766
770
767 The algorithm is defined at
771 The algorithm is defined at
768 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
772 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
769 """
773 """
770 if not self.bare:
774 if not self.bare:
771 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
775 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
772 else:
776 else:
773 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
777 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
774
778
775 heads = []
779 heads = []
776 with open(fetch_heads_path) as f:
780 with open(fetch_heads_path) as f:
777 for line in f:
781 for line in f:
778 if ' not-for-merge ' in line:
782 if ' not-for-merge ' in line:
779 continue
783 continue
780 line = re.sub('\t.*', '', line, flags=re.DOTALL)
784 line = re.sub('\t.*', '', line, flags=re.DOTALL)
781 heads.append(line)
785 heads.append(line)
782
786
783 return heads
787 return heads
784
788
785 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
789 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
786 return GitRepository(shadow_repository_path)
790 return GitRepository(shadow_repository_path)
787
791
788 def _local_pull(self, repository_path, branch_name):
792 def _local_pull(self, repository_path, branch_name):
789 """
793 """
790 Pull a branch from a local repository.
794 Pull a branch from a local repository.
791 """
795 """
792 if self.bare:
796 if self.bare:
793 raise RepositoryError('Cannot pull into a bare git repository')
797 raise RepositoryError('Cannot pull into a bare git repository')
794 # N.B.(skreft): The --ff-only option is to make sure this is a
798 # N.B.(skreft): The --ff-only option is to make sure this is a
795 # fast-forward (i.e., we are only pulling new changes and there are no
799 # fast-forward (i.e., we are only pulling new changes and there are no
796 # conflicts with our current branch)
800 # conflicts with our current branch)
797 # Additionally, that option needs to go before --no-tags, otherwise git
801 # Additionally, that option needs to go before --no-tags, otherwise git
798 # pull complains about it being an unknown flag.
802 # pull complains about it being an unknown flag.
799 cmd = ['pull', '--ff-only', '--no-tags', repository_path, branch_name]
803 cmd = ['pull', '--ff-only', '--no-tags', repository_path, branch_name]
800 self.run_git_command(cmd, fail_on_stderr=False)
804 self.run_git_command(cmd, fail_on_stderr=False)
801
805
802 def _local_merge(self, merge_message, user_name, user_email, heads):
806 def _local_merge(self, merge_message, user_name, user_email, heads):
803 """
807 """
804 Merge the given head into the checked out branch.
808 Merge the given head into the checked out branch.
805
809
806 It will force a merge commit.
810 It will force a merge commit.
807
811
808 Currently it raises an error if the repo is empty, as it is not possible
812 Currently it raises an error if the repo is empty, as it is not possible
809 to create a merge commit in an empty repo.
813 to create a merge commit in an empty repo.
810
814
811 :param merge_message: The message to use for the merge commit.
815 :param merge_message: The message to use for the merge commit.
812 :param heads: the heads to merge.
816 :param heads: the heads to merge.
813 """
817 """
814 if self.bare:
818 if self.bare:
815 raise RepositoryError('Cannot merge into a bare git repository')
819 raise RepositoryError('Cannot merge into a bare git repository')
816
820
817 if not heads:
821 if not heads:
818 return
822 return
819
823
820 if self.is_empty():
824 if self.is_empty():
821 # TODO(skreft): do somehting more robust in this case.
825 # TODO(skreft): do somehting more robust in this case.
822 raise RepositoryError(
826 raise RepositoryError(
823 'Do not know how to merge into empty repositories yet')
827 'Do not know how to merge into empty repositories yet')
824
828
825 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
829 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
826 # commit message. We also specify the user who is doing the merge.
830 # commit message. We also specify the user who is doing the merge.
827 cmd = ['-c', 'user.name=%s' % safe_str(user_name),
831 cmd = ['-c', 'user.name=%s' % safe_str(user_name),
828 '-c', 'user.email=%s' % safe_str(user_email),
832 '-c', 'user.email=%s' % safe_str(user_email),
829 'merge', '--no-ff', '-m', safe_str(merge_message)]
833 'merge', '--no-ff', '-m', safe_str(merge_message)]
830 cmd.extend(heads)
834 cmd.extend(heads)
831 try:
835 try:
832 self.run_git_command(cmd, fail_on_stderr=False)
836 self.run_git_command(cmd, fail_on_stderr=False)
833 except RepositoryError:
837 except RepositoryError:
834 # Cleanup any merge leftovers
838 # Cleanup any merge leftovers
835 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
839 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
836 raise
840 raise
837
841
838 def _local_push(
842 def _local_push(
839 self, source_branch, repository_path, target_branch,
843 self, source_branch, repository_path, target_branch,
840 enable_hooks=False, rc_scm_data=None):
844 enable_hooks=False, rc_scm_data=None):
841 """
845 """
842 Push the source_branch to the given repository and target_branch.
846 Push the source_branch to the given repository and target_branch.
843
847
844 Currently it if the target_branch is not master and the target repo is
848 Currently it if the target_branch is not master and the target repo is
845 empty, the push will work, but then GitRepository won't be able to find
849 empty, the push will work, but then GitRepository won't be able to find
846 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
850 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
847 pointing to master, which does not exist).
851 pointing to master, which does not exist).
848
852
849 It does not run the hooks in the target repo.
853 It does not run the hooks in the target repo.
850 """
854 """
851 # TODO(skreft): deal with the case in which the target repo is empty,
855 # TODO(skreft): deal with the case in which the target repo is empty,
852 # and the target_branch is not master.
856 # and the target_branch is not master.
853 target_repo = GitRepository(repository_path)
857 target_repo = GitRepository(repository_path)
854 if (not target_repo.bare and
858 if (not target_repo.bare and
855 target_repo._current_branch() == target_branch):
859 target_repo._current_branch() == target_branch):
856 # Git prevents pushing to the checked out branch, so simulate it by
860 # Git prevents pushing to the checked out branch, so simulate it by
857 # pulling into the target repository.
861 # pulling into the target repository.
858 target_repo._local_pull(self.path, source_branch)
862 target_repo._local_pull(self.path, source_branch)
859 else:
863 else:
860 cmd = ['push', os.path.abspath(repository_path),
864 cmd = ['push', os.path.abspath(repository_path),
861 '%s:%s' % (source_branch, target_branch)]
865 '%s:%s' % (source_branch, target_branch)]
862 gitenv = {}
866 gitenv = {}
863 if rc_scm_data:
867 if rc_scm_data:
864 gitenv.update({'RC_SCM_DATA': rc_scm_data})
868 gitenv.update({'RC_SCM_DATA': rc_scm_data})
865
869
866 if not enable_hooks:
870 if not enable_hooks:
867 gitenv['RC_SKIP_HOOKS'] = '1'
871 gitenv['RC_SKIP_HOOKS'] = '1'
868 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
872 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
869
873
870 def _get_new_pr_branch(self, source_branch, target_branch):
874 def _get_new_pr_branch(self, source_branch, target_branch):
871 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
875 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
872 pr_branches = []
876 pr_branches = []
873 for branch in self.branches:
877 for branch in self.branches:
874 if branch.startswith(prefix):
878 if branch.startswith(prefix):
875 pr_branches.append(int(branch[len(prefix):]))
879 pr_branches.append(int(branch[len(prefix):]))
876
880
877 if not pr_branches:
881 if not pr_branches:
878 branch_id = 0
882 branch_id = 0
879 else:
883 else:
880 branch_id = max(pr_branches) + 1
884 branch_id = max(pr_branches) + 1
881
885
882 return '%s%d' % (prefix, branch_id)
886 return '%s%d' % (prefix, branch_id)
883
887
884 def _merge_repo(self, shadow_repository_path, target_ref,
888 def _merge_repo(self, shadow_repository_path, target_ref,
885 source_repo, source_ref, merge_message,
889 source_repo, source_ref, merge_message,
886 merger_name, merger_email, dry_run=False,
890 merger_name, merger_email, dry_run=False,
887 use_rebase=False, close_branch=False):
891 use_rebase=False, close_branch=False):
888 if target_ref.commit_id != self.branches[target_ref.name]:
892 if target_ref.commit_id != self.branches[target_ref.name]:
889 return MergeResponse(
893 return MergeResponse(
890 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
894 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
891
895
892 shadow_repo = GitRepository(shadow_repository_path)
896 shadow_repo = GitRepository(shadow_repository_path)
893 # checkout source, if it's different. Otherwise we could not
897 # checkout source, if it's different. Otherwise we could not
894 # fetch proper commits for merge testing
898 # fetch proper commits for merge testing
895 if source_ref.name != target_ref.name:
899 if source_ref.name != target_ref.name:
896 if shadow_repo.get_remote_ref(source_ref.name):
900 if shadow_repo.get_remote_ref(source_ref.name):
897 shadow_repo._checkout(source_ref.name)
901 shadow_repo._checkout(source_ref.name)
898
902
899 # checkout target
903 # checkout target
900 shadow_repo._checkout(target_ref.name)
904 shadow_repo._checkout(target_ref.name)
901 shadow_repo._local_pull(self.path, target_ref.name)
905 shadow_repo._local_pull(self.path, target_ref.name)
902
906
903 # Need to reload repo to invalidate the cache, or otherwise we cannot
907 # Need to reload repo to invalidate the cache, or otherwise we cannot
904 # retrieve the last target commit.
908 # retrieve the last target commit.
905 shadow_repo = GitRepository(shadow_repository_path)
909 shadow_repo = GitRepository(shadow_repository_path)
906 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
910 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
907 return MergeResponse(
911 return MergeResponse(
908 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
912 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
909
913
910 pr_branch = shadow_repo._get_new_pr_branch(
914 pr_branch = shadow_repo._get_new_pr_branch(
911 source_ref.name, target_ref.name)
915 source_ref.name, target_ref.name)
912 shadow_repo._checkout(pr_branch, create=True)
916 shadow_repo._checkout(pr_branch, create=True)
913 try:
917 try:
914 shadow_repo._local_fetch(source_repo.path, source_ref.name)
918 shadow_repo._local_fetch(source_repo.path, source_ref.name)
915 except RepositoryError:
919 except RepositoryError:
916 log.exception('Failure when doing local fetch on git shadow repo')
920 log.exception('Failure when doing local fetch on git shadow repo')
917 return MergeResponse(
921 return MergeResponse(
918 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
922 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
919
923
920 merge_ref = None
924 merge_ref = None
921 merge_failure_reason = MergeFailureReason.NONE
925 merge_failure_reason = MergeFailureReason.NONE
922 try:
926 try:
923 shadow_repo._local_merge(merge_message, merger_name, merger_email,
927 shadow_repo._local_merge(merge_message, merger_name, merger_email,
924 [source_ref.commit_id])
928 [source_ref.commit_id])
925 merge_possible = True
929 merge_possible = True
926
930
927 # Need to reload repo to invalidate the cache, or otherwise we
931 # Need to reload repo to invalidate the cache, or otherwise we
928 # cannot retrieve the merge commit.
932 # cannot retrieve the merge commit.
929 shadow_repo = GitRepository(shadow_repository_path)
933 shadow_repo = GitRepository(shadow_repository_path)
930 merge_commit_id = shadow_repo.branches[pr_branch]
934 merge_commit_id = shadow_repo.branches[pr_branch]
931
935
932 # Set a reference pointing to the merge commit. This reference may
936 # Set a reference pointing to the merge commit. This reference may
933 # be used to easily identify the last successful merge commit in
937 # be used to easily identify the last successful merge commit in
934 # the shadow repository.
938 # the shadow repository.
935 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
939 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
936 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
940 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
937 except RepositoryError:
941 except RepositoryError:
938 log.exception('Failure when doing local merge on git shadow repo')
942 log.exception('Failure when doing local merge on git shadow repo')
939 merge_possible = False
943 merge_possible = False
940 merge_failure_reason = MergeFailureReason.MERGE_FAILED
944 merge_failure_reason = MergeFailureReason.MERGE_FAILED
941
945
942 if merge_possible and not dry_run:
946 if merge_possible and not dry_run:
943 try:
947 try:
944 shadow_repo._local_push(
948 shadow_repo._local_push(
945 pr_branch, self.path, target_ref.name, enable_hooks=True,
949 pr_branch, self.path, target_ref.name, enable_hooks=True,
946 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
950 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
947 merge_succeeded = True
951 merge_succeeded = True
948 except RepositoryError:
952 except RepositoryError:
949 log.exception(
953 log.exception(
950 'Failure when doing local push on git shadow repo')
954 'Failure when doing local push on git shadow repo')
951 merge_succeeded = False
955 merge_succeeded = False
952 merge_failure_reason = MergeFailureReason.PUSH_FAILED
956 merge_failure_reason = MergeFailureReason.PUSH_FAILED
953 else:
957 else:
954 merge_succeeded = False
958 merge_succeeded = False
955
959
956 return MergeResponse(
960 return MergeResponse(
957 merge_possible, merge_succeeded, merge_ref,
961 merge_possible, merge_succeeded, merge_ref,
958 merge_failure_reason)
962 merge_failure_reason)
959
963
960 def _get_shadow_repository_path(self, workspace_id):
964 def _get_shadow_repository_path(self, workspace_id):
961 # The name of the shadow repository must start with '.', so it is
965 # The name of the shadow repository must start with '.', so it is
962 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
966 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
963 return os.path.join(
967 return os.path.join(
964 os.path.dirname(self.path),
968 os.path.dirname(self.path),
965 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
969 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
966
970
967 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
971 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
968 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
972 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
969 if not os.path.exists(shadow_repository_path):
973 if not os.path.exists(shadow_repository_path):
970 self._local_clone(
974 self._local_clone(
971 shadow_repository_path, target_ref.name, source_ref.name)
975 shadow_repository_path, target_ref.name, source_ref.name)
972
976
973 return shadow_repository_path
977 return shadow_repository_path
974
978
975 def cleanup_merge_workspace(self, workspace_id):
979 def cleanup_merge_workspace(self, workspace_id):
976 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
980 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
977 shutil.rmtree(shadow_repository_path, ignore_errors=True)
981 shutil.rmtree(shadow_repository_path, ignore_errors=True)
@@ -1,889 +1,893 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import os
27 import os
28 import shutil
28 import shutil
29 import urllib
29 import urllib
30
30
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.datelib import (
34 from rhodecode.lib.datelib import (
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
36 date_astimestamp)
36 date_astimestamp)
37 from rhodecode.lib.utils import safe_unicode, safe_str
37 from rhodecode.lib.utils import safe_unicode, safe_str
38 from rhodecode.lib.vcs import connection
38 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs.backends.base import (
39 from rhodecode.lib.vcs.backends.base import (
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 MergeFailureReason, Reference)
41 MergeFailureReason, Reference)
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
48
48
49 hexlify = binascii.hexlify
49 hexlify = binascii.hexlify
50 nullid = "\0" * 20
50 nullid = "\0" * 20
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class MercurialRepository(BaseRepository):
55 class MercurialRepository(BaseRepository):
56 """
56 """
57 Mercurial repository backend
57 Mercurial repository backend
58 """
58 """
59 DEFAULT_BRANCH_NAME = 'default'
59 DEFAULT_BRANCH_NAME = 'default'
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 update_after_clone=False, with_wire=None):
62 update_after_clone=False, with_wire=None):
63 """
63 """
64 Raises RepositoryError if repository could not be find at the given
64 Raises RepositoryError if repository could not be find at the given
65 ``repo_path``.
65 ``repo_path``.
66
66
67 :param repo_path: local path of the repository
67 :param repo_path: local path of the repository
68 :param config: config object containing the repo configuration
68 :param config: config object containing the repo configuration
69 :param create=False: if set to True, would try to create repository if
69 :param create=False: if set to True, would try to create repository if
70 it does not exist rather than raising exception
70 it does not exist rather than raising exception
71 :param src_url=None: would try to clone repository from given location
71 :param src_url=None: would try to clone repository from given location
72 :param update_after_clone=False: sets update of working copy after
72 :param update_after_clone=False: sets update of working copy after
73 making a clone
73 making a clone
74 """
74 """
75 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
76 self.config = config if config else Config()
76 self.config = config if config else Config()
77 self._remote = connection.Hg(
77 self._remote = connection.Hg(
78 self.path, self.config, with_wire=with_wire)
78 self.path, self.config, with_wire=with_wire)
79
79
80 self._init_repo(create, src_url, update_after_clone)
80 self._init_repo(create, src_url, update_after_clone)
81
81
82 # caches
82 # caches
83 self._commit_ids = {}
83 self._commit_ids = {}
84
84
85 @LazyProperty
85 @LazyProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject shas from cache.
89 attribute allows external tools to inject shas from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = dict((commit_id, index)
96 self._commit_ids = dict((commit_id, index)
97 for index, commit_id in enumerate(commit_ids))
97 for index, commit_id in enumerate(commit_ids))
98
98
99 @LazyProperty
99 @LazyProperty
100 def branches(self):
100 def branches(self):
101 return self._get_branches()
101 return self._get_branches()
102
102
103 @LazyProperty
103 @LazyProperty
104 def branches_closed(self):
104 def branches_closed(self):
105 return self._get_branches(active=False, closed=True)
105 return self._get_branches(active=False, closed=True)
106
106
107 @LazyProperty
107 @LazyProperty
108 def branches_all(self):
108 def branches_all(self):
109 all_branches = {}
109 all_branches = {}
110 all_branches.update(self.branches)
110 all_branches.update(self.branches)
111 all_branches.update(self.branches_closed)
111 all_branches.update(self.branches_closed)
112 return all_branches
112 return all_branches
113
113
114 def _get_branches(self, active=True, closed=False):
114 def _get_branches(self, active=True, closed=False):
115 """
115 """
116 Gets branches for this repository
116 Gets branches for this repository
117 Returns only not closed active branches by default
117 Returns only not closed active branches by default
118
118
119 :param active: return also active branches
119 :param active: return also active branches
120 :param closed: return also closed branches
120 :param closed: return also closed branches
121
121
122 """
122 """
123 if self.is_empty():
123 if self.is_empty():
124 return {}
124 return {}
125
125
126 def get_name(ctx):
126 def get_name(ctx):
127 return ctx[0]
127 return ctx[0]
128
128
129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
130 self._remote.branches(active, closed).items()]
130 self._remote.branches(active, closed).items()]
131
131
132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
133
133
134 @LazyProperty
134 @LazyProperty
135 def tags(self):
135 def tags(self):
136 """
136 """
137 Gets tags for this repository
137 Gets tags for this repository
138 """
138 """
139 return self._get_tags()
139 return self._get_tags()
140
140
141 def _get_tags(self):
141 def _get_tags(self):
142 if self.is_empty():
142 if self.is_empty():
143 return {}
143 return {}
144
144
145 def get_name(ctx):
145 def get_name(ctx):
146 return ctx[0]
146 return ctx[0]
147
147
148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
149 self._remote.tags().items()]
149 self._remote.tags().items()]
150
150
151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
152
152
153 def tag(self, name, user, commit_id=None, message=None, date=None,
153 def tag(self, name, user, commit_id=None, message=None, date=None,
154 **kwargs):
154 **kwargs):
155 """
155 """
156 Creates and returns a tag for the given ``commit_id``.
156 Creates and returns a tag for the given ``commit_id``.
157
157
158 :param name: name for new tag
158 :param name: name for new tag
159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
160 :param commit_id: commit id for which new tag would be created
160 :param commit_id: commit id for which new tag would be created
161 :param message: message of the tag's commit
161 :param message: message of the tag's commit
162 :param date: date of tag's commit
162 :param date: date of tag's commit
163
163
164 :raises TagAlreadyExistError: if tag with same name already exists
164 :raises TagAlreadyExistError: if tag with same name already exists
165 """
165 """
166 if name in self.tags:
166 if name in self.tags:
167 raise TagAlreadyExistError("Tag %s already exists" % name)
167 raise TagAlreadyExistError("Tag %s already exists" % name)
168 commit = self.get_commit(commit_id=commit_id)
168 commit = self.get_commit(commit_id=commit_id)
169 local = kwargs.setdefault('local', False)
169 local = kwargs.setdefault('local', False)
170
170
171 if message is None:
171 if message is None:
172 message = "Added tag %s for commit %s" % (name, commit.short_id)
172 message = "Added tag %s for commit %s" % (name, commit.short_id)
173
173
174 date, tz = date_to_timestamp_plus_offset(date)
174 date, tz = date_to_timestamp_plus_offset(date)
175
175
176 self._remote.tag(
176 self._remote.tag(
177 name, commit.raw_id, message, local, user, date, tz)
177 name, commit.raw_id, message, local, user, date, tz)
178 self._remote.invalidate_vcs_cache()
178 self._remote.invalidate_vcs_cache()
179
179
180 # Reinitialize tags
180 # Reinitialize tags
181 self.tags = self._get_tags()
181 self.tags = self._get_tags()
182 tag_id = self.tags[name]
182 tag_id = self.tags[name]
183
183
184 return self.get_commit(commit_id=tag_id)
184 return self.get_commit(commit_id=tag_id)
185
185
186 def remove_tag(self, name, user, message=None, date=None):
186 def remove_tag(self, name, user, message=None, date=None):
187 """
187 """
188 Removes tag with the given `name`.
188 Removes tag with the given `name`.
189
189
190 :param name: name of the tag to be removed
190 :param name: name of the tag to be removed
191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
192 :param message: message of the tag's removal commit
192 :param message: message of the tag's removal commit
193 :param date: date of tag's removal commit
193 :param date: date of tag's removal commit
194
194
195 :raises TagDoesNotExistError: if tag with given name does not exists
195 :raises TagDoesNotExistError: if tag with given name does not exists
196 """
196 """
197 if name not in self.tags:
197 if name not in self.tags:
198 raise TagDoesNotExistError("Tag %s does not exist" % name)
198 raise TagDoesNotExistError("Tag %s does not exist" % name)
199 if message is None:
199 if message is None:
200 message = "Removed tag %s" % name
200 message = "Removed tag %s" % name
201 local = False
201 local = False
202
202
203 date, tz = date_to_timestamp_plus_offset(date)
203 date, tz = date_to_timestamp_plus_offset(date)
204
204
205 self._remote.tag(name, nullid, message, local, user, date, tz)
205 self._remote.tag(name, nullid, message, local, user, date, tz)
206 self._remote.invalidate_vcs_cache()
206 self._remote.invalidate_vcs_cache()
207 self.tags = self._get_tags()
207 self.tags = self._get_tags()
208
208
209 @LazyProperty
209 @LazyProperty
210 def bookmarks(self):
210 def bookmarks(self):
211 """
211 """
212 Gets bookmarks for this repository
212 Gets bookmarks for this repository
213 """
213 """
214 return self._get_bookmarks()
214 return self._get_bookmarks()
215
215
216 def _get_bookmarks(self):
216 def _get_bookmarks(self):
217 if self.is_empty():
217 if self.is_empty():
218 return {}
218 return {}
219
219
220 def get_name(ctx):
220 def get_name(ctx):
221 return ctx[0]
221 return ctx[0]
222
222
223 _bookmarks = [
223 _bookmarks = [
224 (safe_unicode(n), hexlify(h)) for n, h in
224 (safe_unicode(n), hexlify(h)) for n, h in
225 self._remote.bookmarks().items()]
225 self._remote.bookmarks().items()]
226
226
227 return OrderedDict(sorted(_bookmarks, key=get_name))
227 return OrderedDict(sorted(_bookmarks, key=get_name))
228
228
229 def _get_all_commit_ids(self):
229 def _get_all_commit_ids(self):
230 return self._remote.get_all_commit_ids('visible')
230 return self._remote.get_all_commit_ids('visible')
231
231
232 def get_diff(
232 def get_diff(
233 self, commit1, commit2, path='', ignore_whitespace=False,
233 self, commit1, commit2, path='', ignore_whitespace=False,
234 context=3, path1=None):
234 context=3, path1=None):
235 """
235 """
236 Returns (git like) *diff*, as plain text. Shows changes introduced by
236 Returns (git like) *diff*, as plain text. Shows changes introduced by
237 `commit2` since `commit1`.
237 `commit2` since `commit1`.
238
238
239 :param commit1: Entry point from which diff is shown. Can be
239 :param commit1: Entry point from which diff is shown. Can be
240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
241 the changes since empty state of the repository until `commit2`
241 the changes since empty state of the repository until `commit2`
242 :param commit2: Until which commit changes should be shown.
242 :param commit2: Until which commit changes should be shown.
243 :param ignore_whitespace: If set to ``True``, would not show whitespace
243 :param ignore_whitespace: If set to ``True``, would not show whitespace
244 changes. Defaults to ``False``.
244 changes. Defaults to ``False``.
245 :param context: How many lines before/after changed lines should be
245 :param context: How many lines before/after changed lines should be
246 shown. Defaults to ``3``.
246 shown. Defaults to ``3``.
247 """
247 """
248 self._validate_diff_commits(commit1, commit2)
248 self._validate_diff_commits(commit1, commit2)
249 if path1 is not None and path1 != path:
249 if path1 is not None and path1 != path:
250 raise ValueError("Diff of two different paths not supported.")
250 raise ValueError("Diff of two different paths not supported.")
251
251
252 if path:
252 if path:
253 file_filter = [self.path, path]
253 file_filter = [self.path, path]
254 else:
254 else:
255 file_filter = None
255 file_filter = None
256
256
257 diff = self._remote.diff(
257 diff = self._remote.diff(
258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
259 opt_git=True, opt_ignorews=ignore_whitespace,
259 opt_git=True, opt_ignorews=ignore_whitespace,
260 context=context)
260 context=context)
261 return MercurialDiff(diff)
261 return MercurialDiff(diff)
262
262
263 def strip(self, commit_id, branch=None):
263 def strip(self, commit_id, branch=None):
264 self._remote.strip(commit_id, update=False, backup="none")
264 self._remote.strip(commit_id, update=False, backup="none")
265
265
266 self._remote.invalidate_vcs_cache()
266 self._remote.invalidate_vcs_cache()
267 self.commit_ids = self._get_all_commit_ids()
267 self.commit_ids = self._get_all_commit_ids()
268 self._rebuild_cache(self.commit_ids)
268 self._rebuild_cache(self.commit_ids)
269
269
270 def verify(self):
270 def verify(self):
271 verify = self._remote.verify()
271 verify = self._remote.verify()
272
272
273 self._remote.invalidate_vcs_cache()
273 self._remote.invalidate_vcs_cache()
274 return verify
274 return verify
275
275
276 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
276 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
277 if commit_id1 == commit_id2:
277 if commit_id1 == commit_id2:
278 return commit_id1
278 return commit_id1
279
279
280 ancestors = self._remote.revs_from_revspec(
280 ancestors = self._remote.revs_from_revspec(
281 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
281 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
282 other_path=repo2.path)
282 other_path=repo2.path)
283 return repo2[ancestors[0]].raw_id if ancestors else None
283 return repo2[ancestors[0]].raw_id if ancestors else None
284
284
285 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
285 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
286 if commit_id1 == commit_id2:
286 if commit_id1 == commit_id2:
287 commits = []
287 commits = []
288 else:
288 else:
289 if merge:
289 if merge:
290 indexes = self._remote.revs_from_revspec(
290 indexes = self._remote.revs_from_revspec(
291 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
291 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
292 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
292 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
293 else:
293 else:
294 indexes = self._remote.revs_from_revspec(
294 indexes = self._remote.revs_from_revspec(
295 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
295 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
296 commit_id1, other_path=repo2.path)
296 commit_id1, other_path=repo2.path)
297
297
298 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
298 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
299 for idx in indexes]
299 for idx in indexes]
300
300
301 return commits
301 return commits
302
302
303 @staticmethod
303 @staticmethod
304 def check_url(url, config):
304 def check_url(url, config):
305 """
305 """
306 Function will check given url and try to verify if it's a valid
306 Function will check given url and try to verify if it's a valid
307 link. Sometimes it may happened that mercurial will issue basic
307 link. Sometimes it may happened that mercurial will issue basic
308 auth request that can cause whole API to hang when used from python
308 auth request that can cause whole API to hang when used from python
309 or other external calls.
309 or other external calls.
310
310
311 On failures it'll raise urllib2.HTTPError, exception is also thrown
311 On failures it'll raise urllib2.HTTPError, exception is also thrown
312 when the return code is non 200
312 when the return code is non 200
313 """
313 """
314 # check first if it's not an local url
314 # check first if it's not an local url
315 if os.path.isdir(url) or url.startswith('file:'):
315 if os.path.isdir(url) or url.startswith('file:'):
316 return True
316 return True
317
317
318 # Request the _remote to verify the url
318 # Request the _remote to verify the url
319 return connection.Hg.check_url(url, config.serialize())
319 return connection.Hg.check_url(url, config.serialize())
320
320
321 @staticmethod
321 @staticmethod
322 def is_valid_repository(path):
322 def is_valid_repository(path):
323 return os.path.isdir(os.path.join(path, '.hg'))
323 return os.path.isdir(os.path.join(path, '.hg'))
324
324
325 def _init_repo(self, create, src_url=None, update_after_clone=False):
325 def _init_repo(self, create, src_url=None, update_after_clone=False):
326 """
326 """
327 Function will check for mercurial repository in given path. If there
327 Function will check for mercurial repository in given path. If there
328 is no repository in that path it will raise an exception unless
328 is no repository in that path it will raise an exception unless
329 `create` parameter is set to True - in that case repository would
329 `create` parameter is set to True - in that case repository would
330 be created.
330 be created.
331
331
332 If `src_url` is given, would try to clone repository from the
332 If `src_url` is given, would try to clone repository from the
333 location at given clone_point. Additionally it'll make update to
333 location at given clone_point. Additionally it'll make update to
334 working copy accordingly to `update_after_clone` flag.
334 working copy accordingly to `update_after_clone` flag.
335 """
335 """
336 if create and os.path.exists(self.path):
336 if create and os.path.exists(self.path):
337 raise RepositoryError(
337 raise RepositoryError(
338 "Cannot create repository at %s, location already exist"
338 "Cannot create repository at %s, location already exist"
339 % self.path)
339 % self.path)
340
340
341 if src_url:
341 if src_url:
342 url = str(self._get_url(src_url))
342 url = str(self._get_url(src_url))
343 MercurialRepository.check_url(url, self.config)
343 MercurialRepository.check_url(url, self.config)
344
344
345 self._remote.clone(url, self.path, update_after_clone)
345 self._remote.clone(url, self.path, update_after_clone)
346
346
347 # Don't try to create if we've already cloned repo
347 # Don't try to create if we've already cloned repo
348 create = False
348 create = False
349
349
350 if create:
350 if create:
351 os.makedirs(self.path, mode=0755)
351 os.makedirs(self.path, mode=0755)
352
352
353 self._remote.localrepository(create)
353 self._remote.localrepository(create)
354
354
355 @LazyProperty
355 @LazyProperty
356 def in_memory_commit(self):
356 def in_memory_commit(self):
357 return MercurialInMemoryCommit(self)
357 return MercurialInMemoryCommit(self)
358
358
359 @LazyProperty
359 @LazyProperty
360 def description(self):
360 def description(self):
361 description = self._remote.get_config_value(
361 description = self._remote.get_config_value(
362 'web', 'description', untrusted=True)
362 'web', 'description', untrusted=True)
363 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
363 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
364
364
365 @LazyProperty
365 @LazyProperty
366 def contact(self):
366 def contact(self):
367 contact = (
367 contact = (
368 self._remote.get_config_value("web", "contact") or
368 self._remote.get_config_value("web", "contact") or
369 self._remote.get_config_value("ui", "username"))
369 self._remote.get_config_value("ui", "username"))
370 return safe_unicode(contact or self.DEFAULT_CONTACT)
370 return safe_unicode(contact or self.DEFAULT_CONTACT)
371
371
372 @LazyProperty
372 @LazyProperty
373 def last_change(self):
373 def last_change(self):
374 """
374 """
375 Returns last change made on this repository as
375 Returns last change made on this repository as
376 `datetime.datetime` object.
376 `datetime.datetime` object.
377 """
377 """
378 try:
378 try:
379 return self.get_commit().date
379 return self.get_commit().date
380 except RepositoryError:
380 except RepositoryError:
381 tzoffset = makedate()[1]
381 tzoffset = makedate()[1]
382 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
382 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
383
383
384 def _get_fs_mtime(self):
384 def _get_fs_mtime(self):
385 # fallback to filesystem
385 # fallback to filesystem
386 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
386 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
387 st_path = os.path.join(self.path, '.hg', "store")
387 st_path = os.path.join(self.path, '.hg', "store")
388 if os.path.exists(cl_path):
388 if os.path.exists(cl_path):
389 return os.stat(cl_path).st_mtime
389 return os.stat(cl_path).st_mtime
390 else:
390 else:
391 return os.stat(st_path).st_mtime
391 return os.stat(st_path).st_mtime
392
392
393 def _sanitize_commit_idx(self, idx):
393 def _sanitize_commit_idx(self, idx):
394 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
394 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
395 # number. A `long` is treated in the correct way though. So we convert
395 # number. A `long` is treated in the correct way though. So we convert
396 # `int` to `long` here to make sure it is handled correctly.
396 # `int` to `long` here to make sure it is handled correctly.
397 if isinstance(idx, int):
397 if isinstance(idx, int):
398 return long(idx)
398 return long(idx)
399 return idx
399 return idx
400
400
401 def _get_url(self, url):
401 def _get_url(self, url):
402 """
402 """
403 Returns normalized url. If schema is not given, would fall
403 Returns normalized url. If schema is not given, would fall
404 to filesystem
404 to filesystem
405 (``file:///``) schema.
405 (``file:///``) schema.
406 """
406 """
407 url = url.encode('utf8')
407 url = url.encode('utf8')
408 if url != 'default' and '://' not in url:
408 if url != 'default' and '://' not in url:
409 url = "file:" + urllib.pathname2url(url)
409 url = "file:" + urllib.pathname2url(url)
410 return url
410 return url
411
411
412 def get_hook_location(self):
412 def get_hook_location(self):
413 """
413 """
414 returns absolute path to location where hooks are stored
414 returns absolute path to location where hooks are stored
415 """
415 """
416 return os.path.join(self.path, '.hg', '.hgrc')
416 return os.path.join(self.path, '.hg', '.hgrc')
417
417
418 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
418 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
419 """
419 """
420 Returns ``MercurialCommit`` object representing repository's
420 Returns ``MercurialCommit`` object representing repository's
421 commit at the given `commit_id` or `commit_idx`.
421 commit at the given `commit_id` or `commit_idx`.
422 """
422 """
423 if self.is_empty():
423 if self.is_empty():
424 raise EmptyRepositoryError("There are no commits yet")
424 raise EmptyRepositoryError("There are no commits yet")
425
425
426 if commit_id is not None:
426 if commit_id is not None:
427 self._validate_commit_id(commit_id)
427 self._validate_commit_id(commit_id)
428 try:
428 try:
429 idx = self._commit_ids[commit_id]
429 idx = self._commit_ids[commit_id]
430 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
431 except KeyError:
431 except KeyError:
432 pass
432 pass
433 elif commit_idx is not None:
433 elif commit_idx is not None:
434 self._validate_commit_idx(commit_idx)
434 self._validate_commit_idx(commit_idx)
435 commit_idx = self._sanitize_commit_idx(commit_idx)
435 commit_idx = self._sanitize_commit_idx(commit_idx)
436 try:
436 try:
437 id_ = self.commit_ids[commit_idx]
437 id_ = self.commit_ids[commit_idx]
438 if commit_idx < 0:
438 if commit_idx < 0:
439 commit_idx += len(self.commit_ids)
439 commit_idx += len(self.commit_ids)
440 return MercurialCommit(
440 return MercurialCommit(
441 self, id_, commit_idx, pre_load=pre_load)
441 self, id_, commit_idx, pre_load=pre_load)
442 except IndexError:
442 except IndexError:
443 commit_id = commit_idx
443 commit_id = commit_idx
444 else:
444 else:
445 commit_id = "tip"
445 commit_id = "tip"
446
446
447 # TODO Paris: Ugly hack to "serialize" long for msgpack
447 # TODO Paris: Ugly hack to "serialize" long for msgpack
448 if isinstance(commit_id, long):
448 if isinstance(commit_id, long):
449 commit_id = float(commit_id)
449 commit_id = float(commit_id)
450
450
451 if isinstance(commit_id, unicode):
451 if isinstance(commit_id, unicode):
452 commit_id = safe_str(commit_id)
452 commit_id = safe_str(commit_id)
453
453
454 try:
454 try:
455 raw_id, idx = self._remote.lookup(commit_id, both=True)
455 raw_id, idx = self._remote.lookup(commit_id, both=True)
456 except CommitDoesNotExistError:
456 except CommitDoesNotExistError:
457 msg = "Commit %s does not exist for %s" % (
457 msg = "Commit %s does not exist for %s" % (
458 commit_id, self)
458 commit_id, self)
459 raise CommitDoesNotExistError(msg)
459 raise CommitDoesNotExistError(msg)
460
460
461 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
461 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
462
462
463 def get_commits(
463 def get_commits(
464 self, start_id=None, end_id=None, start_date=None, end_date=None,
464 self, start_id=None, end_id=None, start_date=None, end_date=None,
465 branch_name=None, show_hidden=False, pre_load=None):
465 branch_name=None, show_hidden=False, pre_load=None):
466 """
466 """
467 Returns generator of ``MercurialCommit`` objects from start to end
467 Returns generator of ``MercurialCommit`` objects from start to end
468 (both are inclusive)
468 (both are inclusive)
469
469
470 :param start_id: None, str(commit_id)
470 :param start_id: None, str(commit_id)
471 :param end_id: None, str(commit_id)
471 :param end_id: None, str(commit_id)
472 :param start_date: if specified, commits with commit date less than
472 :param start_date: if specified, commits with commit date less than
473 ``start_date`` would be filtered out from returned set
473 ``start_date`` would be filtered out from returned set
474 :param end_date: if specified, commits with commit date greater than
474 :param end_date: if specified, commits with commit date greater than
475 ``end_date`` would be filtered out from returned set
475 ``end_date`` would be filtered out from returned set
476 :param branch_name: if specified, commits not reachable from given
476 :param branch_name: if specified, commits not reachable from given
477 branch would be filtered out from returned set
477 branch would be filtered out from returned set
478 :param show_hidden: Show hidden commits such as obsolete or hidden from
478 :param show_hidden: Show hidden commits such as obsolete or hidden from
479 Mercurial evolve
479 Mercurial evolve
480 :raise BranchDoesNotExistError: If given ``branch_name`` does not
480 :raise BranchDoesNotExistError: If given ``branch_name`` does not
481 exist.
481 exist.
482 :raise CommitDoesNotExistError: If commit for given ``start`` or
482 :raise CommitDoesNotExistError: If commit for given ``start`` or
483 ``end`` could not be found.
483 ``end`` could not be found.
484 """
484 """
485 # actually we should check now if it's not an empty repo
485 # actually we should check now if it's not an empty repo
486 branch_ancestors = False
486 branch_ancestors = False
487 if self.is_empty():
487 if self.is_empty():
488 raise EmptyRepositoryError("There are no commits yet")
488 raise EmptyRepositoryError("There are no commits yet")
489 self._validate_branch_name(branch_name)
489 self._validate_branch_name(branch_name)
490
490
491 if start_id is not None:
491 if start_id is not None:
492 self._validate_commit_id(start_id)
492 self._validate_commit_id(start_id)
493 c_start = self.get_commit(commit_id=start_id)
493 c_start = self.get_commit(commit_id=start_id)
494 start_pos = self._commit_ids[c_start.raw_id]
494 start_pos = self._commit_ids[c_start.raw_id]
495 else:
495 else:
496 start_pos = None
496 start_pos = None
497
497
498 if end_id is not None:
498 if end_id is not None:
499 self._validate_commit_id(end_id)
499 self._validate_commit_id(end_id)
500 c_end = self.get_commit(commit_id=end_id)
500 c_end = self.get_commit(commit_id=end_id)
501 end_pos = max(0, self._commit_ids[c_end.raw_id])
501 end_pos = max(0, self._commit_ids[c_end.raw_id])
502 else:
502 else:
503 end_pos = None
503 end_pos = None
504
504
505 if None not in [start_id, end_id] and start_pos > end_pos:
505 if None not in [start_id, end_id] and start_pos > end_pos:
506 raise RepositoryError(
506 raise RepositoryError(
507 "Start commit '%s' cannot be after end commit '%s'" %
507 "Start commit '%s' cannot be after end commit '%s'" %
508 (start_id, end_id))
508 (start_id, end_id))
509
509
510 if end_pos is not None:
510 if end_pos is not None:
511 end_pos += 1
511 end_pos += 1
512
512
513 commit_filter = []
513 commit_filter = []
514
514
515 if branch_name and not branch_ancestors:
515 if branch_name and not branch_ancestors:
516 commit_filter.append('branch("%s")' % (branch_name,))
516 commit_filter.append('branch("%s")' % (branch_name,))
517 elif branch_name and branch_ancestors:
517 elif branch_name and branch_ancestors:
518 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
518 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
519
519
520 if start_date and not end_date:
520 if start_date and not end_date:
521 commit_filter.append('date(">%s")' % (start_date,))
521 commit_filter.append('date(">%s")' % (start_date,))
522 if end_date and not start_date:
522 if end_date and not start_date:
523 commit_filter.append('date("<%s")' % (end_date,))
523 commit_filter.append('date("<%s")' % (end_date,))
524 if start_date and end_date:
524 if start_date and end_date:
525 commit_filter.append(
525 commit_filter.append(
526 'date(">%s") and date("<%s")' % (start_date, end_date))
526 'date(">%s") and date("<%s")' % (start_date, end_date))
527
527
528 if not show_hidden:
528 if not show_hidden:
529 commit_filter.append('not obsolete()')
529 commit_filter.append('not obsolete()')
530 commit_filter.append('not hidden()')
530 commit_filter.append('not hidden()')
531
531
532 # TODO: johbo: Figure out a simpler way for this solution
532 # TODO: johbo: Figure out a simpler way for this solution
533 collection_generator = CollectionGenerator
533 collection_generator = CollectionGenerator
534 if commit_filter:
534 if commit_filter:
535 commit_filter = ' and '.join(map(safe_str, commit_filter))
535 commit_filter = ' and '.join(map(safe_str, commit_filter))
536 revisions = self._remote.rev_range([commit_filter])
536 revisions = self._remote.rev_range([commit_filter])
537 collection_generator = MercurialIndexBasedCollectionGenerator
537 collection_generator = MercurialIndexBasedCollectionGenerator
538 else:
538 else:
539 revisions = self.commit_ids
539 revisions = self.commit_ids
540
540
541 if start_pos or end_pos:
541 if start_pos or end_pos:
542 revisions = revisions[start_pos:end_pos]
542 revisions = revisions[start_pos:end_pos]
543
543
544 return collection_generator(self, revisions, pre_load=pre_load)
544 return collection_generator(self, revisions, pre_load=pre_load)
545
545
546 def pull(self, url, commit_ids=None):
546 def pull(self, url, commit_ids=None):
547 """
547 """
548 Tries to pull changes from external location.
548 Tries to pull changes from external location.
549
549
550 :param commit_ids: Optional. Can be set to a list of commit ids
550 :param commit_ids: Optional. Can be set to a list of commit ids
551 which shall be pulled from the other repository.
551 which shall be pulled from the other repository.
552 """
552 """
553 url = self._get_url(url)
553 url = self._get_url(url)
554 self._remote.pull(url, commit_ids=commit_ids)
554 self._remote.pull(url, commit_ids=commit_ids)
555 self._remote.invalidate_vcs_cache()
555 self._remote.invalidate_vcs_cache()
556
556
557 def push(self, url):
558 url = self._get_url(url)
559 self._remote.sync_push(url)
560
557 def _local_clone(self, clone_path):
561 def _local_clone(self, clone_path):
558 """
562 """
559 Create a local clone of the current repo.
563 Create a local clone of the current repo.
560 """
564 """
561 self._remote.clone(self.path, clone_path, update_after_clone=True,
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
562 hooks=False)
566 hooks=False)
563
567
564 def _update(self, revision, clean=False):
568 def _update(self, revision, clean=False):
565 """
569 """
566 Update the working copy to the specified revision.
570 Update the working copy to the specified revision.
567 """
571 """
568 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
569 self._remote.update(revision, clean=clean)
573 self._remote.update(revision, clean=clean)
570
574
571 def _identify(self):
575 def _identify(self):
572 """
576 """
573 Return the current state of the working directory.
577 Return the current state of the working directory.
574 """
578 """
575 return self._remote.identify().strip().rstrip('+')
579 return self._remote.identify().strip().rstrip('+')
576
580
577 def _heads(self, branch=None):
581 def _heads(self, branch=None):
578 """
582 """
579 Return the commit ids of the repository heads.
583 Return the commit ids of the repository heads.
580 """
584 """
581 return self._remote.heads(branch=branch).strip().split(' ')
585 return self._remote.heads(branch=branch).strip().split(' ')
582
586
583 def _ancestor(self, revision1, revision2):
587 def _ancestor(self, revision1, revision2):
584 """
588 """
585 Return the common ancestor of the two revisions.
589 Return the common ancestor of the two revisions.
586 """
590 """
587 return self._remote.ancestor(revision1, revision2)
591 return self._remote.ancestor(revision1, revision2)
588
592
589 def _local_push(
593 def _local_push(
590 self, revision, repository_path, push_branches=False,
594 self, revision, repository_path, push_branches=False,
591 enable_hooks=False):
595 enable_hooks=False):
592 """
596 """
593 Push the given revision to the specified repository.
597 Push the given revision to the specified repository.
594
598
595 :param push_branches: allow to create branches in the target repo.
599 :param push_branches: allow to create branches in the target repo.
596 """
600 """
597 self._remote.push(
601 self._remote.push(
598 [revision], repository_path, hooks=enable_hooks,
602 [revision], repository_path, hooks=enable_hooks,
599 push_branches=push_branches)
603 push_branches=push_branches)
600
604
601 def _local_merge(self, target_ref, merge_message, user_name, user_email,
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
602 source_ref, use_rebase=False, dry_run=False):
606 source_ref, use_rebase=False, dry_run=False):
603 """
607 """
604 Merge the given source_revision into the checked out revision.
608 Merge the given source_revision into the checked out revision.
605
609
606 Returns the commit id of the merge and a boolean indicating if the
610 Returns the commit id of the merge and a boolean indicating if the
607 commit needs to be pushed.
611 commit needs to be pushed.
608 """
612 """
609 self._update(target_ref.commit_id)
613 self._update(target_ref.commit_id)
610
614
611 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
612 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
613
617
614 if ancestor == source_ref.commit_id:
618 if ancestor == source_ref.commit_id:
615 # Nothing to do, the changes were already integrated
619 # Nothing to do, the changes were already integrated
616 return target_ref.commit_id, False
620 return target_ref.commit_id, False
617
621
618 elif ancestor == target_ref.commit_id and is_the_same_branch:
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
619 # In this case we should force a commit message
623 # In this case we should force a commit message
620 return source_ref.commit_id, True
624 return source_ref.commit_id, True
621
625
622 if use_rebase:
626 if use_rebase:
623 try:
627 try:
624 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
625 target_ref.commit_id)
629 target_ref.commit_id)
626 self.bookmark(bookmark_name, revision=source_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
627 self._remote.rebase(
631 self._remote.rebase(
628 source=source_ref.commit_id, dest=target_ref.commit_id)
632 source=source_ref.commit_id, dest=target_ref.commit_id)
629 self._remote.invalidate_vcs_cache()
633 self._remote.invalidate_vcs_cache()
630 self._update(bookmark_name)
634 self._update(bookmark_name)
631 return self._identify(), True
635 return self._identify(), True
632 except RepositoryError:
636 except RepositoryError:
633 # The rebase-abort may raise another exception which 'hides'
637 # The rebase-abort may raise another exception which 'hides'
634 # the original one, therefore we log it here.
638 # the original one, therefore we log it here.
635 log.exception('Error while rebasing shadow repo during merge.')
639 log.exception('Error while rebasing shadow repo during merge.')
636
640
637 # Cleanup any rebase leftovers
641 # Cleanup any rebase leftovers
638 self._remote.invalidate_vcs_cache()
642 self._remote.invalidate_vcs_cache()
639 self._remote.rebase(abort=True)
643 self._remote.rebase(abort=True)
640 self._remote.invalidate_vcs_cache()
644 self._remote.invalidate_vcs_cache()
641 self._remote.update(clean=True)
645 self._remote.update(clean=True)
642 raise
646 raise
643 else:
647 else:
644 try:
648 try:
645 self._remote.merge(source_ref.commit_id)
649 self._remote.merge(source_ref.commit_id)
646 self._remote.invalidate_vcs_cache()
650 self._remote.invalidate_vcs_cache()
647 self._remote.commit(
651 self._remote.commit(
648 message=safe_str(merge_message),
652 message=safe_str(merge_message),
649 username=safe_str('%s <%s>' % (user_name, user_email)))
653 username=safe_str('%s <%s>' % (user_name, user_email)))
650 self._remote.invalidate_vcs_cache()
654 self._remote.invalidate_vcs_cache()
651 return self._identify(), True
655 return self._identify(), True
652 except RepositoryError:
656 except RepositoryError:
653 # Cleanup any merge leftovers
657 # Cleanup any merge leftovers
654 self._remote.update(clean=True)
658 self._remote.update(clean=True)
655 raise
659 raise
656
660
657 def _local_close(self, target_ref, user_name, user_email,
661 def _local_close(self, target_ref, user_name, user_email,
658 source_ref, close_message=''):
662 source_ref, close_message=''):
659 """
663 """
660 Close the branch of the given source_revision
664 Close the branch of the given source_revision
661
665
662 Returns the commit id of the close and a boolean indicating if the
666 Returns the commit id of the close and a boolean indicating if the
663 commit needs to be pushed.
667 commit needs to be pushed.
664 """
668 """
665 self._update(source_ref.commit_id)
669 self._update(source_ref.commit_id)
666 message = close_message or "Closing branch: `{}`".format(source_ref.name)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
667 try:
671 try:
668 self._remote.commit(
672 self._remote.commit(
669 message=safe_str(message),
673 message=safe_str(message),
670 username=safe_str('%s <%s>' % (user_name, user_email)),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
671 close_branch=True)
675 close_branch=True)
672 self._remote.invalidate_vcs_cache()
676 self._remote.invalidate_vcs_cache()
673 return self._identify(), True
677 return self._identify(), True
674 except RepositoryError:
678 except RepositoryError:
675 # Cleanup any commit leftovers
679 # Cleanup any commit leftovers
676 self._remote.update(clean=True)
680 self._remote.update(clean=True)
677 raise
681 raise
678
682
679 def _is_the_same_branch(self, target_ref, source_ref):
683 def _is_the_same_branch(self, target_ref, source_ref):
680 return (
684 return (
681 self._get_branch_name(target_ref) ==
685 self._get_branch_name(target_ref) ==
682 self._get_branch_name(source_ref))
686 self._get_branch_name(source_ref))
683
687
684 def _get_branch_name(self, ref):
688 def _get_branch_name(self, ref):
685 if ref.type == 'branch':
689 if ref.type == 'branch':
686 return ref.name
690 return ref.name
687 return self._remote.ctx_branch(ref.commit_id)
691 return self._remote.ctx_branch(ref.commit_id)
688
692
689 def _get_shadow_repository_path(self, workspace_id):
693 def _get_shadow_repository_path(self, workspace_id):
690 # The name of the shadow repository must start with '.', so it is
694 # The name of the shadow repository must start with '.', so it is
691 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
695 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
692 return os.path.join(
696 return os.path.join(
693 os.path.dirname(self.path),
697 os.path.dirname(self.path),
694 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
698 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
695
699
696 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
700 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
697 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
701 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
698 if not os.path.exists(shadow_repository_path):
702 if not os.path.exists(shadow_repository_path):
699 self._local_clone(shadow_repository_path)
703 self._local_clone(shadow_repository_path)
700 log.debug(
704 log.debug(
701 'Prepared shadow repository in %s', shadow_repository_path)
705 'Prepared shadow repository in %s', shadow_repository_path)
702
706
703 return shadow_repository_path
707 return shadow_repository_path
704
708
705 def cleanup_merge_workspace(self, workspace_id):
709 def cleanup_merge_workspace(self, workspace_id):
706 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
710 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
707 shutil.rmtree(shadow_repository_path, ignore_errors=True)
711 shutil.rmtree(shadow_repository_path, ignore_errors=True)
708
712
709 def _merge_repo(self, shadow_repository_path, target_ref,
713 def _merge_repo(self, shadow_repository_path, target_ref,
710 source_repo, source_ref, merge_message,
714 source_repo, source_ref, merge_message,
711 merger_name, merger_email, dry_run=False,
715 merger_name, merger_email, dry_run=False,
712 use_rebase=False, close_branch=False):
716 use_rebase=False, close_branch=False):
713
717
714 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
718 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
715 'rebase' if use_rebase else 'merge', dry_run)
719 'rebase' if use_rebase else 'merge', dry_run)
716 if target_ref.commit_id not in self._heads():
720 if target_ref.commit_id not in self._heads():
717 return MergeResponse(
721 return MergeResponse(
718 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
722 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
719
723
720 try:
724 try:
721 if (target_ref.type == 'branch' and
725 if (target_ref.type == 'branch' and
722 len(self._heads(target_ref.name)) != 1):
726 len(self._heads(target_ref.name)) != 1):
723 return MergeResponse(
727 return MergeResponse(
724 False, False, None,
728 False, False, None,
725 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
729 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
726 except CommitDoesNotExistError:
730 except CommitDoesNotExistError:
727 log.exception('Failure when looking up branch heads on hg target')
731 log.exception('Failure when looking up branch heads on hg target')
728 return MergeResponse(
732 return MergeResponse(
729 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
733 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
730
734
731 shadow_repo = self._get_shadow_instance(shadow_repository_path)
735 shadow_repo = self._get_shadow_instance(shadow_repository_path)
732
736
733 log.debug('Pulling in target reference %s', target_ref)
737 log.debug('Pulling in target reference %s', target_ref)
734 self._validate_pull_reference(target_ref)
738 self._validate_pull_reference(target_ref)
735 shadow_repo._local_pull(self.path, target_ref)
739 shadow_repo._local_pull(self.path, target_ref)
736 try:
740 try:
737 log.debug('Pulling in source reference %s', source_ref)
741 log.debug('Pulling in source reference %s', source_ref)
738 source_repo._validate_pull_reference(source_ref)
742 source_repo._validate_pull_reference(source_ref)
739 shadow_repo._local_pull(source_repo.path, source_ref)
743 shadow_repo._local_pull(source_repo.path, source_ref)
740 except CommitDoesNotExistError:
744 except CommitDoesNotExistError:
741 log.exception('Failure when doing local pull on hg shadow repo')
745 log.exception('Failure when doing local pull on hg shadow repo')
742 return MergeResponse(
746 return MergeResponse(
743 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
747 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
744
748
745 merge_ref = None
749 merge_ref = None
746 merge_commit_id = None
750 merge_commit_id = None
747 close_commit_id = None
751 close_commit_id = None
748 merge_failure_reason = MergeFailureReason.NONE
752 merge_failure_reason = MergeFailureReason.NONE
749
753
750 # enforce that close branch should be used only in case we source from
754 # enforce that close branch should be used only in case we source from
751 # an actual Branch
755 # an actual Branch
752 close_branch = close_branch and source_ref.type == 'branch'
756 close_branch = close_branch and source_ref.type == 'branch'
753
757
754 # don't allow to close branch if source and target are the same
758 # don't allow to close branch if source and target are the same
755 close_branch = close_branch and source_ref.name != target_ref.name
759 close_branch = close_branch and source_ref.name != target_ref.name
756
760
757 needs_push_on_close = False
761 needs_push_on_close = False
758 if close_branch and not use_rebase and not dry_run:
762 if close_branch and not use_rebase and not dry_run:
759 try:
763 try:
760 close_commit_id, needs_push_on_close = shadow_repo._local_close(
764 close_commit_id, needs_push_on_close = shadow_repo._local_close(
761 target_ref, merger_name, merger_email, source_ref)
765 target_ref, merger_name, merger_email, source_ref)
762 merge_possible = True
766 merge_possible = True
763 except RepositoryError:
767 except RepositoryError:
764 log.exception(
768 log.exception(
765 'Failure when doing close branch on hg shadow repo')
769 'Failure when doing close branch on hg shadow repo')
766 merge_possible = False
770 merge_possible = False
767 merge_failure_reason = MergeFailureReason.MERGE_FAILED
771 merge_failure_reason = MergeFailureReason.MERGE_FAILED
768 else:
772 else:
769 merge_possible = True
773 merge_possible = True
770
774
771 if merge_possible:
775 if merge_possible:
772 try:
776 try:
773 merge_commit_id, needs_push = shadow_repo._local_merge(
777 merge_commit_id, needs_push = shadow_repo._local_merge(
774 target_ref, merge_message, merger_name, merger_email,
778 target_ref, merge_message, merger_name, merger_email,
775 source_ref, use_rebase=use_rebase, dry_run=dry_run)
779 source_ref, use_rebase=use_rebase, dry_run=dry_run)
776 merge_possible = True
780 merge_possible = True
777
781
778 # read the state of the close action, if it
782 # read the state of the close action, if it
779 # maybe required a push
783 # maybe required a push
780 needs_push = needs_push or needs_push_on_close
784 needs_push = needs_push or needs_push_on_close
781
785
782 # Set a bookmark pointing to the merge commit. This bookmark
786 # Set a bookmark pointing to the merge commit. This bookmark
783 # may be used to easily identify the last successful merge
787 # may be used to easily identify the last successful merge
784 # commit in the shadow repository.
788 # commit in the shadow repository.
785 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
789 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
786 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
790 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
787 except SubrepoMergeError:
791 except SubrepoMergeError:
788 log.exception(
792 log.exception(
789 'Subrepo merge error during local merge on hg shadow repo.')
793 'Subrepo merge error during local merge on hg shadow repo.')
790 merge_possible = False
794 merge_possible = False
791 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
795 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
792 needs_push = False
796 needs_push = False
793 except RepositoryError:
797 except RepositoryError:
794 log.exception('Failure when doing local merge on hg shadow repo')
798 log.exception('Failure when doing local merge on hg shadow repo')
795 merge_possible = False
799 merge_possible = False
796 merge_failure_reason = MergeFailureReason.MERGE_FAILED
800 merge_failure_reason = MergeFailureReason.MERGE_FAILED
797 needs_push = False
801 needs_push = False
798
802
799 if merge_possible and not dry_run:
803 if merge_possible and not dry_run:
800 if needs_push:
804 if needs_push:
801 # In case the target is a bookmark, update it, so after pushing
805 # In case the target is a bookmark, update it, so after pushing
802 # the bookmarks is also updated in the target.
806 # the bookmarks is also updated in the target.
803 if target_ref.type == 'book':
807 if target_ref.type == 'book':
804 shadow_repo.bookmark(
808 shadow_repo.bookmark(
805 target_ref.name, revision=merge_commit_id)
809 target_ref.name, revision=merge_commit_id)
806 try:
810 try:
807 shadow_repo_with_hooks = self._get_shadow_instance(
811 shadow_repo_with_hooks = self._get_shadow_instance(
808 shadow_repository_path,
812 shadow_repository_path,
809 enable_hooks=True)
813 enable_hooks=True)
810 # This is the actual merge action, we push from shadow
814 # This is the actual merge action, we push from shadow
811 # into origin.
815 # into origin.
812 # Note: the push_branches option will push any new branch
816 # Note: the push_branches option will push any new branch
813 # defined in the source repository to the target. This may
817 # defined in the source repository to the target. This may
814 # be dangerous as branches are permanent in Mercurial.
818 # be dangerous as branches are permanent in Mercurial.
815 # This feature was requested in issue #441.
819 # This feature was requested in issue #441.
816 shadow_repo_with_hooks._local_push(
820 shadow_repo_with_hooks._local_push(
817 merge_commit_id, self.path, push_branches=True,
821 merge_commit_id, self.path, push_branches=True,
818 enable_hooks=True)
822 enable_hooks=True)
819
823
820 # maybe we also need to push the close_commit_id
824 # maybe we also need to push the close_commit_id
821 if close_commit_id:
825 if close_commit_id:
822 shadow_repo_with_hooks._local_push(
826 shadow_repo_with_hooks._local_push(
823 close_commit_id, self.path, push_branches=True,
827 close_commit_id, self.path, push_branches=True,
824 enable_hooks=True)
828 enable_hooks=True)
825 merge_succeeded = True
829 merge_succeeded = True
826 except RepositoryError:
830 except RepositoryError:
827 log.exception(
831 log.exception(
828 'Failure when doing local push from the shadow '
832 'Failure when doing local push from the shadow '
829 'repository to the target repository.')
833 'repository to the target repository.')
830 merge_succeeded = False
834 merge_succeeded = False
831 merge_failure_reason = MergeFailureReason.PUSH_FAILED
835 merge_failure_reason = MergeFailureReason.PUSH_FAILED
832 else:
836 else:
833 merge_succeeded = True
837 merge_succeeded = True
834 else:
838 else:
835 merge_succeeded = False
839 merge_succeeded = False
836
840
837 return MergeResponse(
841 return MergeResponse(
838 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
842 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
839
843
840 def _get_shadow_instance(
844 def _get_shadow_instance(
841 self, shadow_repository_path, enable_hooks=False):
845 self, shadow_repository_path, enable_hooks=False):
842 config = self.config.copy()
846 config = self.config.copy()
843 if not enable_hooks:
847 if not enable_hooks:
844 config.clear_section('hooks')
848 config.clear_section('hooks')
845 return MercurialRepository(shadow_repository_path, config)
849 return MercurialRepository(shadow_repository_path, config)
846
850
847 def _validate_pull_reference(self, reference):
851 def _validate_pull_reference(self, reference):
848 if not (reference.name in self.bookmarks or
852 if not (reference.name in self.bookmarks or
849 reference.name in self.branches or
853 reference.name in self.branches or
850 self.get_commit(reference.commit_id)):
854 self.get_commit(reference.commit_id)):
851 raise CommitDoesNotExistError(
855 raise CommitDoesNotExistError(
852 'Unknown branch, bookmark or commit id')
856 'Unknown branch, bookmark or commit id')
853
857
854 def _local_pull(self, repository_path, reference):
858 def _local_pull(self, repository_path, reference):
855 """
859 """
856 Fetch a branch, bookmark or commit from a local repository.
860 Fetch a branch, bookmark or commit from a local repository.
857 """
861 """
858 repository_path = os.path.abspath(repository_path)
862 repository_path = os.path.abspath(repository_path)
859 if repository_path == self.path:
863 if repository_path == self.path:
860 raise ValueError('Cannot pull from the same repository')
864 raise ValueError('Cannot pull from the same repository')
861
865
862 reference_type_to_option_name = {
866 reference_type_to_option_name = {
863 'book': 'bookmark',
867 'book': 'bookmark',
864 'branch': 'branch',
868 'branch': 'branch',
865 }
869 }
866 option_name = reference_type_to_option_name.get(
870 option_name = reference_type_to_option_name.get(
867 reference.type, 'revision')
871 reference.type, 'revision')
868
872
869 if option_name == 'revision':
873 if option_name == 'revision':
870 ref = reference.commit_id
874 ref = reference.commit_id
871 else:
875 else:
872 ref = reference.name
876 ref = reference.name
873
877
874 options = {option_name: [ref]}
878 options = {option_name: [ref]}
875 self._remote.pull_cmd(repository_path, hooks=False, **options)
879 self._remote.pull_cmd(repository_path, hooks=False, **options)
876 self._remote.invalidate_vcs_cache()
880 self._remote.invalidate_vcs_cache()
877
881
878 def bookmark(self, bookmark, revision=None):
882 def bookmark(self, bookmark, revision=None):
879 if isinstance(bookmark, unicode):
883 if isinstance(bookmark, unicode):
880 bookmark = safe_str(bookmark)
884 bookmark = safe_str(bookmark)
881 self._remote.bookmark(bookmark, revision=revision)
885 self._remote.bookmark(bookmark, revision=revision)
882 self._remote.invalidate_vcs_cache()
886 self._remote.invalidate_vcs_cache()
883
887
884
888
885 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
889 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
886
890
887 def _commit_factory(self, commit_id):
891 def _commit_factory(self, commit_id):
888 return self.repo.get_commit(
892 return self.repo.get_commit(
889 commit_idx=commit_id, pre_load=self.pre_load)
893 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,907 +1,922 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import traceback
28 import traceback
29 import logging
29 import logging
30 import cStringIO
30 import cStringIO
31 import pkg_resources
31 import pkg_resources
32
32
33 from sqlalchemy import func
33 from sqlalchemy import func
34 from zope.cachedescriptors.property import Lazy as LazyProperty
34 from zope.cachedescriptors.property import Lazy as LazyProperty
35
35
36 import rhodecode
36 import rhodecode
37 from rhodecode.lib.vcs import get_backend
37 from rhodecode.lib.vcs import get_backend
38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 from rhodecode.lib.vcs.nodes import FileNode
39 from rhodecode.lib.vcs.nodes import FileNode
40 from rhodecode.lib.vcs.backends.base import EmptyCommit
40 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib import helpers as h
41 from rhodecode.lib import helpers as h
42 from rhodecode.lib.auth import (
42 from rhodecode.lib.auth import (
43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
44 HasUserGroupPermissionAny)
44 HasUserGroupPermissionAny)
45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
46 from rhodecode.lib import hooks_utils, caches
46 from rhodecode.lib import hooks_utils, caches
47 from rhodecode.lib.utils import (
47 from rhodecode.lib.utils import (
48 get_filesystem_repos, make_db_config)
48 get_filesystem_repos, make_db_config)
49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
50 from rhodecode.lib.system_info import get_system_info
50 from rhodecode.lib.system_info import get_system_info
51 from rhodecode.model import BaseModel
51 from rhodecode.model import BaseModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
54 PullRequest)
54 PullRequest)
55 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.settings import VcsSettingsModel
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class UserTemp(object):
60 class UserTemp(object):
61 def __init__(self, user_id):
61 def __init__(self, user_id):
62 self.user_id = user_id
62 self.user_id = user_id
63
63
64 def __repr__(self):
64 def __repr__(self):
65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
66
66
67
67
68 class RepoTemp(object):
68 class RepoTemp(object):
69 def __init__(self, repo_id):
69 def __init__(self, repo_id):
70 self.repo_id = repo_id
70 self.repo_id = repo_id
71
71
72 def __repr__(self):
72 def __repr__(self):
73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
74
74
75
75
76 class SimpleCachedRepoList(object):
76 class SimpleCachedRepoList(object):
77 """
77 """
78 Lighter version of of iteration of repos without the scm initialisation,
78 Lighter version of of iteration of repos without the scm initialisation,
79 and with cache usage
79 and with cache usage
80 """
80 """
81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
82 self.db_repo_list = db_repo_list
82 self.db_repo_list = db_repo_list
83 self.repos_path = repos_path
83 self.repos_path = repos_path
84 self.order_by = order_by
84 self.order_by = order_by
85 self.reversed = (order_by or '').startswith('-')
85 self.reversed = (order_by or '').startswith('-')
86 if not perm_set:
86 if not perm_set:
87 perm_set = ['repository.read', 'repository.write',
87 perm_set = ['repository.read', 'repository.write',
88 'repository.admin']
88 'repository.admin']
89 self.perm_set = perm_set
89 self.perm_set = perm_set
90
90
91 def __len__(self):
91 def __len__(self):
92 return len(self.db_repo_list)
92 return len(self.db_repo_list)
93
93
94 def __repr__(self):
94 def __repr__(self):
95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
96
96
97 def __iter__(self):
97 def __iter__(self):
98 for dbr in self.db_repo_list:
98 for dbr in self.db_repo_list:
99 # check permission at this level
99 # check permission at this level
100 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 has_perm = HasRepoPermissionAny(*self.perm_set)(
101 dbr.repo_name, 'SimpleCachedRepoList check')
101 dbr.repo_name, 'SimpleCachedRepoList check')
102 if not has_perm:
102 if not has_perm:
103 continue
103 continue
104
104
105 tmp_d = {
105 tmp_d = {
106 'name': dbr.repo_name,
106 'name': dbr.repo_name,
107 'dbrepo': dbr.get_dict(),
107 'dbrepo': dbr.get_dict(),
108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
109 }
109 }
110 yield tmp_d
110 yield tmp_d
111
111
112
112
113 class _PermCheckIterator(object):
113 class _PermCheckIterator(object):
114
114
115 def __init__(
115 def __init__(
116 self, obj_list, obj_attr, perm_set, perm_checker,
116 self, obj_list, obj_attr, perm_set, perm_checker,
117 extra_kwargs=None):
117 extra_kwargs=None):
118 """
118 """
119 Creates iterator from given list of objects, additionally
119 Creates iterator from given list of objects, additionally
120 checking permission for them from perm_set var
120 checking permission for them from perm_set var
121
121
122 :param obj_list: list of db objects
122 :param obj_list: list of db objects
123 :param obj_attr: attribute of object to pass into perm_checker
123 :param obj_attr: attribute of object to pass into perm_checker
124 :param perm_set: list of permissions to check
124 :param perm_set: list of permissions to check
125 :param perm_checker: callable to check permissions against
125 :param perm_checker: callable to check permissions against
126 """
126 """
127 self.obj_list = obj_list
127 self.obj_list = obj_list
128 self.obj_attr = obj_attr
128 self.obj_attr = obj_attr
129 self.perm_set = perm_set
129 self.perm_set = perm_set
130 self.perm_checker = perm_checker
130 self.perm_checker = perm_checker
131 self.extra_kwargs = extra_kwargs or {}
131 self.extra_kwargs = extra_kwargs or {}
132
132
133 def __len__(self):
133 def __len__(self):
134 return len(self.obj_list)
134 return len(self.obj_list)
135
135
136 def __repr__(self):
136 def __repr__(self):
137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
138
138
139 def __iter__(self):
139 def __iter__(self):
140 checker = self.perm_checker(*self.perm_set)
140 checker = self.perm_checker(*self.perm_set)
141 for db_obj in self.obj_list:
141 for db_obj in self.obj_list:
142 # check permission at this level
142 # check permission at this level
143 name = getattr(db_obj, self.obj_attr, None)
143 name = getattr(db_obj, self.obj_attr, None)
144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
145 continue
145 continue
146
146
147 yield db_obj
147 yield db_obj
148
148
149
149
150 class RepoList(_PermCheckIterator):
150 class RepoList(_PermCheckIterator):
151
151
152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
153 if not perm_set:
153 if not perm_set:
154 perm_set = [
154 perm_set = [
155 'repository.read', 'repository.write', 'repository.admin']
155 'repository.read', 'repository.write', 'repository.admin']
156
156
157 super(RepoList, self).__init__(
157 super(RepoList, self).__init__(
158 obj_list=db_repo_list,
158 obj_list=db_repo_list,
159 obj_attr='repo_name', perm_set=perm_set,
159 obj_attr='repo_name', perm_set=perm_set,
160 perm_checker=HasRepoPermissionAny,
160 perm_checker=HasRepoPermissionAny,
161 extra_kwargs=extra_kwargs)
161 extra_kwargs=extra_kwargs)
162
162
163
163
164 class RepoGroupList(_PermCheckIterator):
164 class RepoGroupList(_PermCheckIterator):
165
165
166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
167 if not perm_set:
167 if not perm_set:
168 perm_set = ['group.read', 'group.write', 'group.admin']
168 perm_set = ['group.read', 'group.write', 'group.admin']
169
169
170 super(RepoGroupList, self).__init__(
170 super(RepoGroupList, self).__init__(
171 obj_list=db_repo_group_list,
171 obj_list=db_repo_group_list,
172 obj_attr='group_name', perm_set=perm_set,
172 obj_attr='group_name', perm_set=perm_set,
173 perm_checker=HasRepoGroupPermissionAny,
173 perm_checker=HasRepoGroupPermissionAny,
174 extra_kwargs=extra_kwargs)
174 extra_kwargs=extra_kwargs)
175
175
176
176
177 class UserGroupList(_PermCheckIterator):
177 class UserGroupList(_PermCheckIterator):
178
178
179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
180 if not perm_set:
180 if not perm_set:
181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
182
182
183 super(UserGroupList, self).__init__(
183 super(UserGroupList, self).__init__(
184 obj_list=db_user_group_list,
184 obj_list=db_user_group_list,
185 obj_attr='users_group_name', perm_set=perm_set,
185 obj_attr='users_group_name', perm_set=perm_set,
186 perm_checker=HasUserGroupPermissionAny,
186 perm_checker=HasUserGroupPermissionAny,
187 extra_kwargs=extra_kwargs)
187 extra_kwargs=extra_kwargs)
188
188
189
189
190 class ScmModel(BaseModel):
190 class ScmModel(BaseModel):
191 """
191 """
192 Generic Scm Model
192 Generic Scm Model
193 """
193 """
194
194
195 @LazyProperty
195 @LazyProperty
196 def repos_path(self):
196 def repos_path(self):
197 """
197 """
198 Gets the repositories root path from database
198 Gets the repositories root path from database
199 """
199 """
200
200
201 settings_model = VcsSettingsModel(sa=self.sa)
201 settings_model = VcsSettingsModel(sa=self.sa)
202 return settings_model.get_repos_location()
202 return settings_model.get_repos_location()
203
203
204 def repo_scan(self, repos_path=None):
204 def repo_scan(self, repos_path=None):
205 """
205 """
206 Listing of repositories in given path. This path should not be a
206 Listing of repositories in given path. This path should not be a
207 repository itself. Return a dictionary of repository objects
207 repository itself. Return a dictionary of repository objects
208
208
209 :param repos_path: path to directory containing repositories
209 :param repos_path: path to directory containing repositories
210 """
210 """
211
211
212 if repos_path is None:
212 if repos_path is None:
213 repos_path = self.repos_path
213 repos_path = self.repos_path
214
214
215 log.info('scanning for repositories in %s', repos_path)
215 log.info('scanning for repositories in %s', repos_path)
216
216
217 config = make_db_config()
217 config = make_db_config()
218 config.set('extensions', 'largefiles', '')
218 config.set('extensions', 'largefiles', '')
219 repos = {}
219 repos = {}
220
220
221 for name, path in get_filesystem_repos(repos_path, recursive=True):
221 for name, path in get_filesystem_repos(repos_path, recursive=True):
222 # name need to be decomposed and put back together using the /
222 # name need to be decomposed and put back together using the /
223 # since this is internal storage separator for rhodecode
223 # since this is internal storage separator for rhodecode
224 name = Repository.normalize_repo_name(name)
224 name = Repository.normalize_repo_name(name)
225
225
226 try:
226 try:
227 if name in repos:
227 if name in repos:
228 raise RepositoryError('Duplicate repository name %s '
228 raise RepositoryError('Duplicate repository name %s '
229 'found in %s' % (name, path))
229 'found in %s' % (name, path))
230 elif path[0] in rhodecode.BACKENDS:
230 elif path[0] in rhodecode.BACKENDS:
231 klass = get_backend(path[0])
231 klass = get_backend(path[0])
232 repos[name] = klass(path[1], config=config)
232 repos[name] = klass(path[1], config=config)
233 except OSError:
233 except OSError:
234 continue
234 continue
235 log.debug('found %s paths with repositories', len(repos))
235 log.debug('found %s paths with repositories', len(repos))
236 return repos
236 return repos
237
237
238 def get_repos(self, all_repos=None, sort_key=None):
238 def get_repos(self, all_repos=None, sort_key=None):
239 """
239 """
240 Get all repositories from db and for each repo create it's
240 Get all repositories from db and for each repo create it's
241 backend instance and fill that backed with information from database
241 backend instance and fill that backed with information from database
242
242
243 :param all_repos: list of repository names as strings
243 :param all_repos: list of repository names as strings
244 give specific repositories list, good for filtering
244 give specific repositories list, good for filtering
245
245
246 :param sort_key: initial sorting of repositories
246 :param sort_key: initial sorting of repositories
247 """
247 """
248 if all_repos is None:
248 if all_repos is None:
249 all_repos = self.sa.query(Repository)\
249 all_repos = self.sa.query(Repository)\
250 .filter(Repository.group_id == None)\
250 .filter(Repository.group_id == None)\
251 .order_by(func.lower(Repository.repo_name)).all()
251 .order_by(func.lower(Repository.repo_name)).all()
252 repo_iter = SimpleCachedRepoList(
252 repo_iter = SimpleCachedRepoList(
253 all_repos, repos_path=self.repos_path, order_by=sort_key)
253 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 return repo_iter
254 return repo_iter
255
255
256 def get_repo_groups(self, all_groups=None):
256 def get_repo_groups(self, all_groups=None):
257 if all_groups is None:
257 if all_groups is None:
258 all_groups = RepoGroup.query()\
258 all_groups = RepoGroup.query()\
259 .filter(RepoGroup.group_parent_id == None).all()
259 .filter(RepoGroup.group_parent_id == None).all()
260 return [x for x in RepoGroupList(all_groups)]
260 return [x for x in RepoGroupList(all_groups)]
261
261
262 def mark_for_invalidation(self, repo_name, delete=False):
262 def mark_for_invalidation(self, repo_name, delete=False):
263 """
263 """
264 Mark caches of this repo invalid in the database. `delete` flag
264 Mark caches of this repo invalid in the database. `delete` flag
265 removes the cache entries
265 removes the cache entries
266
266
267 :param repo_name: the repo_name for which caches should be marked
267 :param repo_name: the repo_name for which caches should be marked
268 invalid, or deleted
268 invalid, or deleted
269 :param delete: delete the entry keys instead of setting bool
269 :param delete: delete the entry keys instead of setting bool
270 flag on them
270 flag on them
271 """
271 """
272 CacheKey.set_invalidate(repo_name, delete=delete)
272 CacheKey.set_invalidate(repo_name, delete=delete)
273 repo = Repository.get_by_repo_name(repo_name)
273 repo = Repository.get_by_repo_name(repo_name)
274
274
275 if repo:
275 if repo:
276 config = repo._config
276 config = repo._config
277 config.set('extensions', 'largefiles', '')
277 config.set('extensions', 'largefiles', '')
278 repo.update_commit_cache(config=config, cs_cache=None)
278 repo.update_commit_cache(config=config, cs_cache=None)
279 caches.clear_repo_caches(repo_name)
279 caches.clear_repo_caches(repo_name)
280
280
281 def toggle_following_repo(self, follow_repo_id, user_id):
281 def toggle_following_repo(self, follow_repo_id, user_id):
282
282
283 f = self.sa.query(UserFollowing)\
283 f = self.sa.query(UserFollowing)\
284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
285 .filter(UserFollowing.user_id == user_id).scalar()
285 .filter(UserFollowing.user_id == user_id).scalar()
286
286
287 if f is not None:
287 if f is not None:
288 try:
288 try:
289 self.sa.delete(f)
289 self.sa.delete(f)
290 return
290 return
291 except Exception:
291 except Exception:
292 log.error(traceback.format_exc())
292 log.error(traceback.format_exc())
293 raise
293 raise
294
294
295 try:
295 try:
296 f = UserFollowing()
296 f = UserFollowing()
297 f.user_id = user_id
297 f.user_id = user_id
298 f.follows_repo_id = follow_repo_id
298 f.follows_repo_id = follow_repo_id
299 self.sa.add(f)
299 self.sa.add(f)
300 except Exception:
300 except Exception:
301 log.error(traceback.format_exc())
301 log.error(traceback.format_exc())
302 raise
302 raise
303
303
304 def toggle_following_user(self, follow_user_id, user_id):
304 def toggle_following_user(self, follow_user_id, user_id):
305 f = self.sa.query(UserFollowing)\
305 f = self.sa.query(UserFollowing)\
306 .filter(UserFollowing.follows_user_id == follow_user_id)\
306 .filter(UserFollowing.follows_user_id == follow_user_id)\
307 .filter(UserFollowing.user_id == user_id).scalar()
307 .filter(UserFollowing.user_id == user_id).scalar()
308
308
309 if f is not None:
309 if f is not None:
310 try:
310 try:
311 self.sa.delete(f)
311 self.sa.delete(f)
312 return
312 return
313 except Exception:
313 except Exception:
314 log.error(traceback.format_exc())
314 log.error(traceback.format_exc())
315 raise
315 raise
316
316
317 try:
317 try:
318 f = UserFollowing()
318 f = UserFollowing()
319 f.user_id = user_id
319 f.user_id = user_id
320 f.follows_user_id = follow_user_id
320 f.follows_user_id = follow_user_id
321 self.sa.add(f)
321 self.sa.add(f)
322 except Exception:
322 except Exception:
323 log.error(traceback.format_exc())
323 log.error(traceback.format_exc())
324 raise
324 raise
325
325
326 def is_following_repo(self, repo_name, user_id, cache=False):
326 def is_following_repo(self, repo_name, user_id, cache=False):
327 r = self.sa.query(Repository)\
327 r = self.sa.query(Repository)\
328 .filter(Repository.repo_name == repo_name).scalar()
328 .filter(Repository.repo_name == repo_name).scalar()
329
329
330 f = self.sa.query(UserFollowing)\
330 f = self.sa.query(UserFollowing)\
331 .filter(UserFollowing.follows_repository == r)\
331 .filter(UserFollowing.follows_repository == r)\
332 .filter(UserFollowing.user_id == user_id).scalar()
332 .filter(UserFollowing.user_id == user_id).scalar()
333
333
334 return f is not None
334 return f is not None
335
335
336 def is_following_user(self, username, user_id, cache=False):
336 def is_following_user(self, username, user_id, cache=False):
337 u = User.get_by_username(username)
337 u = User.get_by_username(username)
338
338
339 f = self.sa.query(UserFollowing)\
339 f = self.sa.query(UserFollowing)\
340 .filter(UserFollowing.follows_user == u)\
340 .filter(UserFollowing.follows_user == u)\
341 .filter(UserFollowing.user_id == user_id).scalar()
341 .filter(UserFollowing.user_id == user_id).scalar()
342
342
343 return f is not None
343 return f is not None
344
344
345 def get_followers(self, repo):
345 def get_followers(self, repo):
346 repo = self._get_repo(repo)
346 repo = self._get_repo(repo)
347
347
348 return self.sa.query(UserFollowing)\
348 return self.sa.query(UserFollowing)\
349 .filter(UserFollowing.follows_repository == repo).count()
349 .filter(UserFollowing.follows_repository == repo).count()
350
350
351 def get_forks(self, repo):
351 def get_forks(self, repo):
352 repo = self._get_repo(repo)
352 repo = self._get_repo(repo)
353 return self.sa.query(Repository)\
353 return self.sa.query(Repository)\
354 .filter(Repository.fork == repo).count()
354 .filter(Repository.fork == repo).count()
355
355
356 def get_pull_requests(self, repo):
356 def get_pull_requests(self, repo):
357 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
358 return self.sa.query(PullRequest)\
358 return self.sa.query(PullRequest)\
359 .filter(PullRequest.target_repo == repo)\
359 .filter(PullRequest.target_repo == repo)\
360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
361
361
362 def mark_as_fork(self, repo, fork, user):
362 def mark_as_fork(self, repo, fork, user):
363 repo = self._get_repo(repo)
363 repo = self._get_repo(repo)
364 fork = self._get_repo(fork)
364 fork = self._get_repo(fork)
365 if fork and repo.repo_id == fork.repo_id:
365 if fork and repo.repo_id == fork.repo_id:
366 raise Exception("Cannot set repository as fork of itself")
366 raise Exception("Cannot set repository as fork of itself")
367
367
368 if fork and repo.repo_type != fork.repo_type:
368 if fork and repo.repo_type != fork.repo_type:
369 raise RepositoryError(
369 raise RepositoryError(
370 "Cannot set repository as fork of repository with other type")
370 "Cannot set repository as fork of repository with other type")
371
371
372 repo.fork = fork
372 repo.fork = fork
373 self.sa.add(repo)
373 self.sa.add(repo)
374 return repo
374 return repo
375
375
376 def pull_changes(self, repo, username):
376 def pull_changes(self, repo, username, remote_uri=None):
377 dbrepo = self._get_repo(repo)
377 dbrepo = self._get_repo(repo)
378 clone_uri = dbrepo.clone_uri
378 remote_uri = remote_uri or dbrepo.clone_uri
379 if not clone_uri:
379 if not remote_uri:
380 raise Exception("This repository doesn't have a clone uri")
380 raise Exception("This repository doesn't have a clone uri")
381
381
382 repo = dbrepo.scm_instance(cache=False)
382 repo = dbrepo.scm_instance(cache=False)
383 # TODO: marcink fix this an re-enable since we need common logic
383 # TODO: marcink fix this an re-enable since we need common logic
384 # for hg/git remove hooks so we don't trigger them on fetching
384 # for hg/git remove hooks so we don't trigger them on fetching
385 # commits from remote
385 # commits from remote
386 repo.config.clear_section('hooks')
386 repo.config.clear_section('hooks')
387
387
388 repo_name = dbrepo.repo_name
388 repo_name = dbrepo.repo_name
389 try:
389 try:
390 # TODO: we need to make sure those operations call proper hooks !
390 # TODO: we need to make sure those operations call proper hooks !
391 repo.pull(clone_uri)
391 repo.pull(remote_uri)
392
392
393 self.mark_for_invalidation(repo_name)
393 self.mark_for_invalidation(repo_name)
394 except Exception:
394 except Exception:
395 log.error(traceback.format_exc())
395 log.error(traceback.format_exc())
396 raise
396 raise
397
397
398 def push_changes(self, repo, username, remote_uri=None):
399 dbrepo = self._get_repo(repo)
400 remote_uri = remote_uri or dbrepo.clone_uri
401 if not remote_uri:
402 raise Exception("This repository doesn't have a clone uri")
403
404 repo = dbrepo.scm_instance(cache=False)
405 repo.config.clear_section('hooks')
406
407 try:
408 repo.push(remote_uri)
409 except Exception:
410 log.error(traceback.format_exc())
411 raise
412
398 def commit_change(self, repo, repo_name, commit, user, author, message,
413 def commit_change(self, repo, repo_name, commit, user, author, message,
399 content, f_path):
414 content, f_path):
400 """
415 """
401 Commits changes
416 Commits changes
402
417
403 :param repo: SCM instance
418 :param repo: SCM instance
404
419
405 """
420 """
406 user = self._get_user(user)
421 user = self._get_user(user)
407
422
408 # decoding here will force that we have proper encoded values
423 # decoding here will force that we have proper encoded values
409 # in any other case this will throw exceptions and deny commit
424 # in any other case this will throw exceptions and deny commit
410 content = safe_str(content)
425 content = safe_str(content)
411 path = safe_str(f_path)
426 path = safe_str(f_path)
412 # message and author needs to be unicode
427 # message and author needs to be unicode
413 # proper backend should then translate that into required type
428 # proper backend should then translate that into required type
414 message = safe_unicode(message)
429 message = safe_unicode(message)
415 author = safe_unicode(author)
430 author = safe_unicode(author)
416 imc = repo.in_memory_commit
431 imc = repo.in_memory_commit
417 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
418 try:
433 try:
419 # TODO: handle pre-push action !
434 # TODO: handle pre-push action !
420 tip = imc.commit(
435 tip = imc.commit(
421 message=message, author=author, parents=[commit],
436 message=message, author=author, parents=[commit],
422 branch=commit.branch)
437 branch=commit.branch)
423 except Exception as e:
438 except Exception as e:
424 log.error(traceback.format_exc())
439 log.error(traceback.format_exc())
425 raise IMCCommitError(str(e))
440 raise IMCCommitError(str(e))
426 finally:
441 finally:
427 # always clear caches, if commit fails we want fresh object also
442 # always clear caches, if commit fails we want fresh object also
428 self.mark_for_invalidation(repo_name)
443 self.mark_for_invalidation(repo_name)
429
444
430 # We trigger the post-push action
445 # We trigger the post-push action
431 hooks_utils.trigger_post_push_hook(
446 hooks_utils.trigger_post_push_hook(
432 username=user.username, action='push_local', repo_name=repo_name,
447 username=user.username, action='push_local', repo_name=repo_name,
433 repo_alias=repo.alias, commit_ids=[tip.raw_id])
448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
434 return tip
449 return tip
435
450
436 def _sanitize_path(self, f_path):
451 def _sanitize_path(self, f_path):
437 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
438 raise NonRelativePathError('%s is not an relative path' % f_path)
453 raise NonRelativePathError('%s is not an relative path' % f_path)
439 if f_path:
454 if f_path:
440 f_path = os.path.normpath(f_path)
455 f_path = os.path.normpath(f_path)
441 return f_path
456 return f_path
442
457
443 def get_dirnode_metadata(self, request, commit, dir_node):
458 def get_dirnode_metadata(self, request, commit, dir_node):
444 if not dir_node.is_dir():
459 if not dir_node.is_dir():
445 return []
460 return []
446
461
447 data = []
462 data = []
448 for node in dir_node:
463 for node in dir_node:
449 if not node.is_file():
464 if not node.is_file():
450 # we skip file-nodes
465 # we skip file-nodes
451 continue
466 continue
452
467
453 last_commit = node.last_commit
468 last_commit = node.last_commit
454 last_commit_date = last_commit.date
469 last_commit_date = last_commit.date
455 data.append({
470 data.append({
456 'name': node.name,
471 'name': node.name,
457 'size': h.format_byte_size_binary(node.size),
472 'size': h.format_byte_size_binary(node.size),
458 'modified_at': h.format_date(last_commit_date),
473 'modified_at': h.format_date(last_commit_date),
459 'modified_ts': last_commit_date.isoformat(),
474 'modified_ts': last_commit_date.isoformat(),
460 'revision': last_commit.revision,
475 'revision': last_commit.revision,
461 'short_id': last_commit.short_id,
476 'short_id': last_commit.short_id,
462 'message': h.escape(last_commit.message),
477 'message': h.escape(last_commit.message),
463 'author': h.escape(last_commit.author),
478 'author': h.escape(last_commit.author),
464 'user_profile': h.gravatar_with_user(
479 'user_profile': h.gravatar_with_user(
465 request, last_commit.author),
480 request, last_commit.author),
466 })
481 })
467
482
468 return data
483 return data
469
484
470 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
485 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
471 extended_info=False, content=False, max_file_bytes=None):
486 extended_info=False, content=False, max_file_bytes=None):
472 """
487 """
473 recursive walk in root dir and return a set of all path in that dir
488 recursive walk in root dir and return a set of all path in that dir
474 based on repository walk function
489 based on repository walk function
475
490
476 :param repo_name: name of repository
491 :param repo_name: name of repository
477 :param commit_id: commit id for which to list nodes
492 :param commit_id: commit id for which to list nodes
478 :param root_path: root path to list
493 :param root_path: root path to list
479 :param flat: return as a list, if False returns a dict with description
494 :param flat: return as a list, if False returns a dict with description
480 :param max_file_bytes: will not return file contents over this limit
495 :param max_file_bytes: will not return file contents over this limit
481
496
482 """
497 """
483 _files = list()
498 _files = list()
484 _dirs = list()
499 _dirs = list()
485 try:
500 try:
486 _repo = self._get_repo(repo_name)
501 _repo = self._get_repo(repo_name)
487 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
502 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
488 root_path = root_path.lstrip('/')
503 root_path = root_path.lstrip('/')
489 for __, dirs, files in commit.walk(root_path):
504 for __, dirs, files in commit.walk(root_path):
490 for f in files:
505 for f in files:
491 _content = None
506 _content = None
492 _data = f.unicode_path
507 _data = f.unicode_path
493 over_size_limit = (max_file_bytes is not None
508 over_size_limit = (max_file_bytes is not None
494 and f.size > max_file_bytes)
509 and f.size > max_file_bytes)
495
510
496 if not flat:
511 if not flat:
497 _data = {
512 _data = {
498 "name": h.escape(f.unicode_path),
513 "name": h.escape(f.unicode_path),
499 "type": "file",
514 "type": "file",
500 }
515 }
501 if extended_info:
516 if extended_info:
502 _data.update({
517 _data.update({
503 "md5": f.md5,
518 "md5": f.md5,
504 "binary": f.is_binary,
519 "binary": f.is_binary,
505 "size": f.size,
520 "size": f.size,
506 "extension": f.extension,
521 "extension": f.extension,
507 "mimetype": f.mimetype,
522 "mimetype": f.mimetype,
508 "lines": f.lines()[0]
523 "lines": f.lines()[0]
509 })
524 })
510
525
511 if content:
526 if content:
512 full_content = None
527 full_content = None
513 if not f.is_binary and not over_size_limit:
528 if not f.is_binary and not over_size_limit:
514 full_content = safe_str(f.content)
529 full_content = safe_str(f.content)
515
530
516 _data.update({
531 _data.update({
517 "content": full_content,
532 "content": full_content,
518 })
533 })
519 _files.append(_data)
534 _files.append(_data)
520 for d in dirs:
535 for d in dirs:
521 _data = d.unicode_path
536 _data = d.unicode_path
522 if not flat:
537 if not flat:
523 _data = {
538 _data = {
524 "name": h.escape(d.unicode_path),
539 "name": h.escape(d.unicode_path),
525 "type": "dir",
540 "type": "dir",
526 }
541 }
527 if extended_info:
542 if extended_info:
528 _data.update({
543 _data.update({
529 "md5": None,
544 "md5": None,
530 "binary": None,
545 "binary": None,
531 "size": None,
546 "size": None,
532 "extension": None,
547 "extension": None,
533 })
548 })
534 if content:
549 if content:
535 _data.update({
550 _data.update({
536 "content": None
551 "content": None
537 })
552 })
538 _dirs.append(_data)
553 _dirs.append(_data)
539 except RepositoryError:
554 except RepositoryError:
540 log.debug("Exception in get_nodes", exc_info=True)
555 log.debug("Exception in get_nodes", exc_info=True)
541 raise
556 raise
542
557
543 return _dirs, _files
558 return _dirs, _files
544
559
545 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
560 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
546 author=None, trigger_push_hook=True):
561 author=None, trigger_push_hook=True):
547 """
562 """
548 Commits given multiple nodes into repo
563 Commits given multiple nodes into repo
549
564
550 :param user: RhodeCode User object or user_id, the commiter
565 :param user: RhodeCode User object or user_id, the commiter
551 :param repo: RhodeCode Repository object
566 :param repo: RhodeCode Repository object
552 :param message: commit message
567 :param message: commit message
553 :param nodes: mapping {filename:{'content':content},...}
568 :param nodes: mapping {filename:{'content':content},...}
554 :param parent_commit: parent commit, can be empty than it's
569 :param parent_commit: parent commit, can be empty than it's
555 initial commit
570 initial commit
556 :param author: author of commit, cna be different that commiter
571 :param author: author of commit, cna be different that commiter
557 only for git
572 only for git
558 :param trigger_push_hook: trigger push hooks
573 :param trigger_push_hook: trigger push hooks
559
574
560 :returns: new commited commit
575 :returns: new commited commit
561 """
576 """
562
577
563 user = self._get_user(user)
578 user = self._get_user(user)
564 scm_instance = repo.scm_instance(cache=False)
579 scm_instance = repo.scm_instance(cache=False)
565
580
566 processed_nodes = []
581 processed_nodes = []
567 for f_path in nodes:
582 for f_path in nodes:
568 f_path = self._sanitize_path(f_path)
583 f_path = self._sanitize_path(f_path)
569 content = nodes[f_path]['content']
584 content = nodes[f_path]['content']
570 f_path = safe_str(f_path)
585 f_path = safe_str(f_path)
571 # decoding here will force that we have proper encoded values
586 # decoding here will force that we have proper encoded values
572 # in any other case this will throw exceptions and deny commit
587 # in any other case this will throw exceptions and deny commit
573 if isinstance(content, (basestring,)):
588 if isinstance(content, (basestring,)):
574 content = safe_str(content)
589 content = safe_str(content)
575 elif isinstance(content, (file, cStringIO.OutputType,)):
590 elif isinstance(content, (file, cStringIO.OutputType,)):
576 content = content.read()
591 content = content.read()
577 else:
592 else:
578 raise Exception('Content is of unrecognized type %s' % (
593 raise Exception('Content is of unrecognized type %s' % (
579 type(content)
594 type(content)
580 ))
595 ))
581 processed_nodes.append((f_path, content))
596 processed_nodes.append((f_path, content))
582
597
583 message = safe_unicode(message)
598 message = safe_unicode(message)
584 commiter = user.full_contact
599 commiter = user.full_contact
585 author = safe_unicode(author) if author else commiter
600 author = safe_unicode(author) if author else commiter
586
601
587 imc = scm_instance.in_memory_commit
602 imc = scm_instance.in_memory_commit
588
603
589 if not parent_commit:
604 if not parent_commit:
590 parent_commit = EmptyCommit(alias=scm_instance.alias)
605 parent_commit = EmptyCommit(alias=scm_instance.alias)
591
606
592 if isinstance(parent_commit, EmptyCommit):
607 if isinstance(parent_commit, EmptyCommit):
593 # EmptyCommit means we we're editing empty repository
608 # EmptyCommit means we we're editing empty repository
594 parents = None
609 parents = None
595 else:
610 else:
596 parents = [parent_commit]
611 parents = [parent_commit]
597 # add multiple nodes
612 # add multiple nodes
598 for path, content in processed_nodes:
613 for path, content in processed_nodes:
599 imc.add(FileNode(path, content=content))
614 imc.add(FileNode(path, content=content))
600 # TODO: handle pre push scenario
615 # TODO: handle pre push scenario
601 tip = imc.commit(message=message,
616 tip = imc.commit(message=message,
602 author=author,
617 author=author,
603 parents=parents,
618 parents=parents,
604 branch=parent_commit.branch)
619 branch=parent_commit.branch)
605
620
606 self.mark_for_invalidation(repo.repo_name)
621 self.mark_for_invalidation(repo.repo_name)
607 if trigger_push_hook:
622 if trigger_push_hook:
608 hooks_utils.trigger_post_push_hook(
623 hooks_utils.trigger_post_push_hook(
609 username=user.username, action='push_local',
624 username=user.username, action='push_local',
610 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
625 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
611 commit_ids=[tip.raw_id])
626 commit_ids=[tip.raw_id])
612 return tip
627 return tip
613
628
614 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
629 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
615 author=None, trigger_push_hook=True):
630 author=None, trigger_push_hook=True):
616 user = self._get_user(user)
631 user = self._get_user(user)
617 scm_instance = repo.scm_instance(cache=False)
632 scm_instance = repo.scm_instance(cache=False)
618
633
619 message = safe_unicode(message)
634 message = safe_unicode(message)
620 commiter = user.full_contact
635 commiter = user.full_contact
621 author = safe_unicode(author) if author else commiter
636 author = safe_unicode(author) if author else commiter
622
637
623 imc = scm_instance.in_memory_commit
638 imc = scm_instance.in_memory_commit
624
639
625 if not parent_commit:
640 if not parent_commit:
626 parent_commit = EmptyCommit(alias=scm_instance.alias)
641 parent_commit = EmptyCommit(alias=scm_instance.alias)
627
642
628 if isinstance(parent_commit, EmptyCommit):
643 if isinstance(parent_commit, EmptyCommit):
629 # EmptyCommit means we we're editing empty repository
644 # EmptyCommit means we we're editing empty repository
630 parents = None
645 parents = None
631 else:
646 else:
632 parents = [parent_commit]
647 parents = [parent_commit]
633
648
634 # add multiple nodes
649 # add multiple nodes
635 for _filename, data in nodes.items():
650 for _filename, data in nodes.items():
636 # new filename, can be renamed from the old one, also sanitaze
651 # new filename, can be renamed from the old one, also sanitaze
637 # the path for any hack around relative paths like ../../ etc.
652 # the path for any hack around relative paths like ../../ etc.
638 filename = self._sanitize_path(data['filename'])
653 filename = self._sanitize_path(data['filename'])
639 old_filename = self._sanitize_path(_filename)
654 old_filename = self._sanitize_path(_filename)
640 content = data['content']
655 content = data['content']
641
656
642 filenode = FileNode(old_filename, content=content)
657 filenode = FileNode(old_filename, content=content)
643 op = data['op']
658 op = data['op']
644 if op == 'add':
659 if op == 'add':
645 imc.add(filenode)
660 imc.add(filenode)
646 elif op == 'del':
661 elif op == 'del':
647 imc.remove(filenode)
662 imc.remove(filenode)
648 elif op == 'mod':
663 elif op == 'mod':
649 if filename != old_filename:
664 if filename != old_filename:
650 # TODO: handle renames more efficient, needs vcs lib
665 # TODO: handle renames more efficient, needs vcs lib
651 # changes
666 # changes
652 imc.remove(filenode)
667 imc.remove(filenode)
653 imc.add(FileNode(filename, content=content))
668 imc.add(FileNode(filename, content=content))
654 else:
669 else:
655 imc.change(filenode)
670 imc.change(filenode)
656
671
657 try:
672 try:
658 # TODO: handle pre push scenario
673 # TODO: handle pre push scenario
659 # commit changes
674 # commit changes
660 tip = imc.commit(message=message,
675 tip = imc.commit(message=message,
661 author=author,
676 author=author,
662 parents=parents,
677 parents=parents,
663 branch=parent_commit.branch)
678 branch=parent_commit.branch)
664 except NodeNotChangedError:
679 except NodeNotChangedError:
665 raise
680 raise
666 except Exception as e:
681 except Exception as e:
667 log.exception("Unexpected exception during call to imc.commit")
682 log.exception("Unexpected exception during call to imc.commit")
668 raise IMCCommitError(str(e))
683 raise IMCCommitError(str(e))
669 finally:
684 finally:
670 # always clear caches, if commit fails we want fresh object also
685 # always clear caches, if commit fails we want fresh object also
671 self.mark_for_invalidation(repo.repo_name)
686 self.mark_for_invalidation(repo.repo_name)
672
687
673 if trigger_push_hook:
688 if trigger_push_hook:
674 hooks_utils.trigger_post_push_hook(
689 hooks_utils.trigger_post_push_hook(
675 username=user.username, action='push_local',
690 username=user.username, action='push_local',
676 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
691 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
677 commit_ids=[tip.raw_id])
692 commit_ids=[tip.raw_id])
678
693
679 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
694 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
680 author=None, trigger_push_hook=True):
695 author=None, trigger_push_hook=True):
681 """
696 """
682 Deletes given multiple nodes into `repo`
697 Deletes given multiple nodes into `repo`
683
698
684 :param user: RhodeCode User object or user_id, the committer
699 :param user: RhodeCode User object or user_id, the committer
685 :param repo: RhodeCode Repository object
700 :param repo: RhodeCode Repository object
686 :param message: commit message
701 :param message: commit message
687 :param nodes: mapping {filename:{'content':content},...}
702 :param nodes: mapping {filename:{'content':content},...}
688 :param parent_commit: parent commit, can be empty than it's initial
703 :param parent_commit: parent commit, can be empty than it's initial
689 commit
704 commit
690 :param author: author of commit, cna be different that commiter only
705 :param author: author of commit, cna be different that commiter only
691 for git
706 for git
692 :param trigger_push_hook: trigger push hooks
707 :param trigger_push_hook: trigger push hooks
693
708
694 :returns: new commit after deletion
709 :returns: new commit after deletion
695 """
710 """
696
711
697 user = self._get_user(user)
712 user = self._get_user(user)
698 scm_instance = repo.scm_instance(cache=False)
713 scm_instance = repo.scm_instance(cache=False)
699
714
700 processed_nodes = []
715 processed_nodes = []
701 for f_path in nodes:
716 for f_path in nodes:
702 f_path = self._sanitize_path(f_path)
717 f_path = self._sanitize_path(f_path)
703 # content can be empty but for compatabilty it allows same dicts
718 # content can be empty but for compatabilty it allows same dicts
704 # structure as add_nodes
719 # structure as add_nodes
705 content = nodes[f_path].get('content')
720 content = nodes[f_path].get('content')
706 processed_nodes.append((f_path, content))
721 processed_nodes.append((f_path, content))
707
722
708 message = safe_unicode(message)
723 message = safe_unicode(message)
709 commiter = user.full_contact
724 commiter = user.full_contact
710 author = safe_unicode(author) if author else commiter
725 author = safe_unicode(author) if author else commiter
711
726
712 imc = scm_instance.in_memory_commit
727 imc = scm_instance.in_memory_commit
713
728
714 if not parent_commit:
729 if not parent_commit:
715 parent_commit = EmptyCommit(alias=scm_instance.alias)
730 parent_commit = EmptyCommit(alias=scm_instance.alias)
716
731
717 if isinstance(parent_commit, EmptyCommit):
732 if isinstance(parent_commit, EmptyCommit):
718 # EmptyCommit means we we're editing empty repository
733 # EmptyCommit means we we're editing empty repository
719 parents = None
734 parents = None
720 else:
735 else:
721 parents = [parent_commit]
736 parents = [parent_commit]
722 # add multiple nodes
737 # add multiple nodes
723 for path, content in processed_nodes:
738 for path, content in processed_nodes:
724 imc.remove(FileNode(path, content=content))
739 imc.remove(FileNode(path, content=content))
725
740
726 # TODO: handle pre push scenario
741 # TODO: handle pre push scenario
727 tip = imc.commit(message=message,
742 tip = imc.commit(message=message,
728 author=author,
743 author=author,
729 parents=parents,
744 parents=parents,
730 branch=parent_commit.branch)
745 branch=parent_commit.branch)
731
746
732 self.mark_for_invalidation(repo.repo_name)
747 self.mark_for_invalidation(repo.repo_name)
733 if trigger_push_hook:
748 if trigger_push_hook:
734 hooks_utils.trigger_post_push_hook(
749 hooks_utils.trigger_post_push_hook(
735 username=user.username, action='push_local',
750 username=user.username, action='push_local',
736 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
751 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
737 commit_ids=[tip.raw_id])
752 commit_ids=[tip.raw_id])
738 return tip
753 return tip
739
754
740 def strip(self, repo, commit_id, branch):
755 def strip(self, repo, commit_id, branch):
741 scm_instance = repo.scm_instance(cache=False)
756 scm_instance = repo.scm_instance(cache=False)
742 scm_instance.config.clear_section('hooks')
757 scm_instance.config.clear_section('hooks')
743 scm_instance.strip(commit_id, branch)
758 scm_instance.strip(commit_id, branch)
744 self.mark_for_invalidation(repo.repo_name)
759 self.mark_for_invalidation(repo.repo_name)
745
760
746 def get_unread_journal(self):
761 def get_unread_journal(self):
747 return self.sa.query(UserLog).count()
762 return self.sa.query(UserLog).count()
748
763
749 def get_repo_landing_revs(self, translator, repo=None):
764 def get_repo_landing_revs(self, translator, repo=None):
750 """
765 """
751 Generates select option with tags branches and bookmarks (for hg only)
766 Generates select option with tags branches and bookmarks (for hg only)
752 grouped by type
767 grouped by type
753
768
754 :param repo:
769 :param repo:
755 """
770 """
756 _ = translator
771 _ = translator
757 repo = self._get_repo(repo)
772 repo = self._get_repo(repo)
758
773
759 hist_l = [
774 hist_l = [
760 ['rev:tip', _('latest tip')]
775 ['rev:tip', _('latest tip')]
761 ]
776 ]
762 choices = [
777 choices = [
763 'rev:tip'
778 'rev:tip'
764 ]
779 ]
765
780
766 if not repo:
781 if not repo:
767 return choices, hist_l
782 return choices, hist_l
768
783
769 repo = repo.scm_instance()
784 repo = repo.scm_instance()
770
785
771 branches_group = (
786 branches_group = (
772 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
787 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
773 for b in repo.branches],
788 for b in repo.branches],
774 _("Branches"))
789 _("Branches"))
775 hist_l.append(branches_group)
790 hist_l.append(branches_group)
776 choices.extend([x[0] for x in branches_group[0]])
791 choices.extend([x[0] for x in branches_group[0]])
777
792
778 if repo.alias == 'hg':
793 if repo.alias == 'hg':
779 bookmarks_group = (
794 bookmarks_group = (
780 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
795 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
781 for b in repo.bookmarks],
796 for b in repo.bookmarks],
782 _("Bookmarks"))
797 _("Bookmarks"))
783 hist_l.append(bookmarks_group)
798 hist_l.append(bookmarks_group)
784 choices.extend([x[0] for x in bookmarks_group[0]])
799 choices.extend([x[0] for x in bookmarks_group[0]])
785
800
786 tags_group = (
801 tags_group = (
787 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
802 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
788 for t in repo.tags],
803 for t in repo.tags],
789 _("Tags"))
804 _("Tags"))
790 hist_l.append(tags_group)
805 hist_l.append(tags_group)
791 choices.extend([x[0] for x in tags_group[0]])
806 choices.extend([x[0] for x in tags_group[0]])
792
807
793 return choices, hist_l
808 return choices, hist_l
794
809
795 def install_git_hook(self, repo, force_create=False):
810 def install_git_hook(self, repo, force_create=False):
796 """
811 """
797 Creates a rhodecode hook inside a git repository
812 Creates a rhodecode hook inside a git repository
798
813
799 :param repo: Instance of VCS repo
814 :param repo: Instance of VCS repo
800 :param force_create: Create even if same name hook exists
815 :param force_create: Create even if same name hook exists
801 """
816 """
802
817
803 loc = os.path.join(repo.path, 'hooks')
818 loc = os.path.join(repo.path, 'hooks')
804 if not repo.bare:
819 if not repo.bare:
805 loc = os.path.join(repo.path, '.git', 'hooks')
820 loc = os.path.join(repo.path, '.git', 'hooks')
806 if not os.path.isdir(loc):
821 if not os.path.isdir(loc):
807 os.makedirs(loc, mode=0777)
822 os.makedirs(loc, mode=0777)
808
823
809 tmpl_post = pkg_resources.resource_string(
824 tmpl_post = pkg_resources.resource_string(
810 'rhodecode', '/'.join(
825 'rhodecode', '/'.join(
811 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
826 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
812 tmpl_pre = pkg_resources.resource_string(
827 tmpl_pre = pkg_resources.resource_string(
813 'rhodecode', '/'.join(
828 'rhodecode', '/'.join(
814 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
829 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
815
830
816 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
831 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
817 _hook_file = os.path.join(loc, '%s-receive' % h_type)
832 _hook_file = os.path.join(loc, '%s-receive' % h_type)
818 log.debug('Installing git hook in repo %s', repo)
833 log.debug('Installing git hook in repo %s', repo)
819 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
834 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
820
835
821 if _rhodecode_hook or force_create:
836 if _rhodecode_hook or force_create:
822 log.debug('writing %s hook file !', h_type)
837 log.debug('writing %s hook file !', h_type)
823 try:
838 try:
824 with open(_hook_file, 'wb') as f:
839 with open(_hook_file, 'wb') as f:
825 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
840 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
826 tmpl = tmpl.replace('_ENV_', sys.executable)
841 tmpl = tmpl.replace('_ENV_', sys.executable)
827 f.write(tmpl)
842 f.write(tmpl)
828 os.chmod(_hook_file, 0755)
843 os.chmod(_hook_file, 0755)
829 except IOError:
844 except IOError:
830 log.exception('error writing hook file %s', _hook_file)
845 log.exception('error writing hook file %s', _hook_file)
831 else:
846 else:
832 log.debug('skipping writing hook file')
847 log.debug('skipping writing hook file')
833
848
834 def install_svn_hooks(self, repo, force_create=False):
849 def install_svn_hooks(self, repo, force_create=False):
835 """
850 """
836 Creates rhodecode hooks inside a svn repository
851 Creates rhodecode hooks inside a svn repository
837
852
838 :param repo: Instance of VCS repo
853 :param repo: Instance of VCS repo
839 :param force_create: Create even if same name hook exists
854 :param force_create: Create even if same name hook exists
840 """
855 """
841 hooks_path = os.path.join(repo.path, 'hooks')
856 hooks_path = os.path.join(repo.path, 'hooks')
842 if not os.path.isdir(hooks_path):
857 if not os.path.isdir(hooks_path):
843 os.makedirs(hooks_path)
858 os.makedirs(hooks_path)
844 post_commit_tmpl = pkg_resources.resource_string(
859 post_commit_tmpl = pkg_resources.resource_string(
845 'rhodecode', '/'.join(
860 'rhodecode', '/'.join(
846 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
861 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
847 pre_commit_template = pkg_resources.resource_string(
862 pre_commit_template = pkg_resources.resource_string(
848 'rhodecode', '/'.join(
863 'rhodecode', '/'.join(
849 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
864 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
850 templates = {
865 templates = {
851 'post-commit': post_commit_tmpl,
866 'post-commit': post_commit_tmpl,
852 'pre-commit': pre_commit_template
867 'pre-commit': pre_commit_template
853 }
868 }
854 for filename in templates:
869 for filename in templates:
855 _hook_file = os.path.join(hooks_path, filename)
870 _hook_file = os.path.join(hooks_path, filename)
856 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
871 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
857 if _rhodecode_hook or force_create:
872 if _rhodecode_hook or force_create:
858 log.debug('writing %s hook file !', filename)
873 log.debug('writing %s hook file !', filename)
859 template = templates[filename]
874 template = templates[filename]
860 try:
875 try:
861 with open(_hook_file, 'wb') as f:
876 with open(_hook_file, 'wb') as f:
862 template = template.replace(
877 template = template.replace(
863 '_TMPL_', rhodecode.__version__)
878 '_TMPL_', rhodecode.__version__)
864 template = template.replace('_ENV_', sys.executable)
879 template = template.replace('_ENV_', sys.executable)
865 f.write(template)
880 f.write(template)
866 os.chmod(_hook_file, 0755)
881 os.chmod(_hook_file, 0755)
867 except IOError:
882 except IOError:
868 log.exception('error writing hook file %s', filename)
883 log.exception('error writing hook file %s', filename)
869 else:
884 else:
870 log.debug('skipping writing hook file')
885 log.debug('skipping writing hook file')
871
886
872 def install_hooks(self, repo, repo_type):
887 def install_hooks(self, repo, repo_type):
873 if repo_type == 'git':
888 if repo_type == 'git':
874 self.install_git_hook(repo)
889 self.install_git_hook(repo)
875 elif repo_type == 'svn':
890 elif repo_type == 'svn':
876 self.install_svn_hooks(repo)
891 self.install_svn_hooks(repo)
877
892
878 def get_server_info(self, environ=None):
893 def get_server_info(self, environ=None):
879 server_info = get_system_info(environ)
894 server_info = get_system_info(environ)
880 return server_info
895 return server_info
881
896
882
897
883 def _check_rhodecode_hook(hook_path):
898 def _check_rhodecode_hook(hook_path):
884 """
899 """
885 Check if the hook was created by RhodeCode
900 Check if the hook was created by RhodeCode
886 """
901 """
887 if not os.path.exists(hook_path):
902 if not os.path.exists(hook_path):
888 return True
903 return True
889
904
890 log.debug('hook exists, checking if it is from rhodecode')
905 log.debug('hook exists, checking if it is from rhodecode')
891 hook_content = _read_hook(hook_path)
906 hook_content = _read_hook(hook_path)
892 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
907 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
893 if matches:
908 if matches:
894 try:
909 try:
895 version = matches.groups()[0]
910 version = matches.groups()[0]
896 log.debug('got %s, it is rhodecode', version)
911 log.debug('got %s, it is rhodecode', version)
897 return True
912 return True
898 except Exception:
913 except Exception:
899 log.exception("Exception while reading the hook version.")
914 log.exception("Exception while reading the hook version.")
900
915
901 return False
916 return False
902
917
903
918
904 def _read_hook(hook_path):
919 def _read_hook(hook_path):
905 with open(hook_path, 'rb') as f:
920 with open(hook_path, 'rb') as f:
906 content = f.read()
921 content = f.read()
907 return content
922 return content
General Comments 0
You need to be logged in to leave comments. Login now