##// END OF EJS Templates
feat(git/svn): remove filesystem modifications from git/svn calls. When sharding comes in place we can't do this....
super-admin -
r5216:07778025 default
parent child Browse files
Show More
@@ -1,1054 +1,1050 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 GIT repository module
20 GIT repository module
21 """
21 """
22
22
23 import logging
23 import logging
24 import os
24 import os
25 import re
25 import re
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from collections import OrderedDict
29 from collections import OrderedDict
30 from rhodecode.lib.datelib import (
30 from rhodecode.lib.datelib import (
31 utcdate_fromtimestamp, makedate, date_astimestamp)
31 utcdate_fromtimestamp, makedate, date_astimestamp)
32 from rhodecode.lib.hash_utils import safe_str
32 from rhodecode.lib.hash_utils import safe_str
33 from rhodecode.lib.utils2 import CachedProperty
33 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.vcs import connection, path as vcspath
34 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs.backends.base import (
35 from rhodecode.lib.vcs.backends.base import (
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 MergeFailureReason, Reference)
37 MergeFailureReason, Reference)
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitDoesNotExistError, EmptyRepositoryError,
42 CommitDoesNotExistError, EmptyRepositoryError,
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
44
44
45
45
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 class GitRepository(BaseRepository):
51 class GitRepository(BaseRepository):
52 """
52 """
53 Git repository backend.
53 Git repository backend.
54 """
54 """
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
66
66
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 repo_id = self.path
74 repo_id = self.path
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76
76
77 @LazyProperty
77 @LazyProperty
78 def bare(self):
78 def bare(self):
79 return self._remote.bare()
79 return self._remote.bare()
80
80
81 @LazyProperty
81 @LazyProperty
82 def head(self):
82 def head(self):
83 return self._remote.head()
83 return self._remote.head()
84
84
85 @CachedProperty
85 @CachedProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject commit ids from cache.
89 attribute allows external tools to inject commit ids from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = {commit_id: index
96 self._commit_ids = {commit_id: index
97 for index, commit_id in enumerate(commit_ids)}
97 for index, commit_id in enumerate(commit_ids)}
98
98
99 def run_git_command(self, cmd, **opts):
99 def run_git_command(self, cmd, **opts):
100 """
100 """
101 Runs given ``cmd`` as git command and returns tuple
101 Runs given ``cmd`` as git command and returns tuple
102 (stdout, stderr).
102 (stdout, stderr).
103
103
104 :param cmd: git command to be executed
104 :param cmd: git command to be executed
105 :param opts: env options to pass into Subprocess command
105 :param opts: env options to pass into Subprocess command
106 """
106 """
107 if not isinstance(cmd, list):
107 if not isinstance(cmd, list):
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
109
109
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 out, err = self._remote.run_git_command(cmd, **opts)
111 out, err = self._remote.run_git_command(cmd, **opts)
112 if err and not skip_stderr_log:
112 if err and not skip_stderr_log:
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 return out, err
114 return out, err
115
115
116 @staticmethod
116 @staticmethod
117 def check_url(url, config):
117 def check_url(url, config):
118 """
118 """
119 Function will check given url and try to verify if it's a valid
119 Function will check given url and try to verify if it's a valid
120 link. Sometimes it may happened that git will issue basic
120 link. Sometimes it may happened that git will issue basic
121 auth request that can cause whole API to hang when used from python
121 auth request that can cause whole API to hang when used from python
122 or other external calls.
122 or other external calls.
123
123
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 when the return code is non 200
125 when the return code is non 200
126 """
126 """
127 # check first if it's not an url
127 # check first if it's not an url
128 if os.path.isdir(url) or url.startswith('file:'):
128 if os.path.isdir(url) or url.startswith('file:'):
129 return True
129 return True
130
130
131 if '+' in url.split('://', 1)[0]:
131 if '+' in url.split('://', 1)[0]:
132 url = url.split('+', 1)[1]
132 url = url.split('+', 1)[1]
133
133
134 # Request the _remote to verify the url
134 # Request the _remote to verify the url
135 return connection.Git.check_url(url, config.serialize())
135 return connection.Git.check_url(url, config.serialize())
136
136
137 @staticmethod
137 @staticmethod
138 def is_valid_repository(path):
138 def is_valid_repository(path):
139 if os.path.isdir(os.path.join(path, '.git')):
139 if os.path.isdir(os.path.join(path, '.git')):
140 return True
140 return True
141 # check case of bare repository
141 # check case of bare repository
142 try:
142 try:
143 GitRepository(path)
143 GitRepository(path)
144 return True
144 return True
145 except VCSError:
145 except VCSError:
146 pass
146 pass
147 return False
147 return False
148
148
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 bare=False):
150 bare=False):
151 if create and os.path.exists(self.path):
151 if create and os.path.exists(self.path):
152 raise RepositoryError(
152 raise RepositoryError(
153 "Cannot create repository at %s, location already exist"
153 f"Cannot create repository at {self.path}, location already exist")
154 % self.path)
155
154
156 if bare and do_workspace_checkout:
155 if bare and do_workspace_checkout:
157 raise RepositoryError("Cannot update a bare repository")
156 raise RepositoryError("Cannot update a bare repository")
158 try:
157 try:
159
158
160 if src_url:
159 if src_url:
161 # check URL before any actions
160 # check URL before any actions
162 GitRepository.check_url(src_url, self.config)
161 GitRepository.check_url(src_url, self.config)
163
162
164 if create:
163 if create:
165 os.makedirs(self.path, mode=0o755)
166
167 if bare:
164 if bare:
168 self._remote.init_bare()
165 self._remote.init_bare()
169 else:
166 else:
170 self._remote.init()
167 self._remote.init()
171
168
172 if src_url and bare:
169 if src_url and bare:
173 # bare repository only allows a fetch and checkout is not allowed
170 # bare repository only allows a fetch and checkout is not allowed
174 self.fetch(src_url, commit_ids=None)
171 self.fetch(src_url, commit_ids=None)
175 elif src_url:
172 elif src_url:
176 self.pull(src_url, commit_ids=None,
173 self.pull(src_url, commit_ids=None,
177 update_after=do_workspace_checkout)
174 update_after=do_workspace_checkout)
178
175
179 else:
176 else:
180 if not self._remote.assert_correct_path():
177 if not self._remote.assert_correct_path():
181 raise RepositoryError(
178 raise RepositoryError(
182 'Path "%s" does not contain a Git repository' %
179 f'Path "{self.path}" does not contain a Git repository')
183 (self.path,))
184
180
185 # TODO: johbo: check if we have to translate the OSError here
181 # TODO: johbo: check if we have to translate the OSError here
186 except OSError as err:
182 except OSError as err:
187 raise RepositoryError(err)
183 raise RepositoryError(err)
188
184
189 def _get_all_commit_ids(self):
185 def _get_all_commit_ids(self):
190 return self._remote.get_all_commit_ids()
186 return self._remote.get_all_commit_ids()
191
187
192 def _get_commit_ids(self, filters=None):
188 def _get_commit_ids(self, filters=None):
193 # we must check if this repo is not empty, since later command
189 # we must check if this repo is not empty, since later command
194 # fails if it is. And it's cheaper to ask than throw the subprocess
190 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # errors
191 # errors
196
192
197 head = self._remote.head(show_exc=False)
193 head = self._remote.head(show_exc=False)
198
194
199 if not head:
195 if not head:
200 return []
196 return []
201
197
202 rev_filter = ['--branches', '--tags']
198 rev_filter = ['--branches', '--tags']
203 extra_filter = []
199 extra_filter = []
204
200
205 if filters:
201 if filters:
206 if filters.get('since'):
202 if filters.get('since'):
207 extra_filter.append('--since=%s' % (filters['since']))
203 extra_filter.append('--since=%s' % (filters['since']))
208 if filters.get('until'):
204 if filters.get('until'):
209 extra_filter.append('--until=%s' % (filters['until']))
205 extra_filter.append('--until=%s' % (filters['until']))
210 if filters.get('branch_name'):
206 if filters.get('branch_name'):
211 rev_filter = []
207 rev_filter = []
212 extra_filter.append(filters['branch_name'])
208 extra_filter.append(filters['branch_name'])
213 rev_filter.extend(extra_filter)
209 rev_filter.extend(extra_filter)
214
210
215 # if filters.get('start') or filters.get('end'):
211 # if filters.get('start') or filters.get('end'):
216 # # skip is offset, max-count is limit
212 # # skip is offset, max-count is limit
217 # if filters.get('start'):
213 # if filters.get('start'):
218 # extra_filter += ' --skip=%s' % filters['start']
214 # extra_filter += ' --skip=%s' % filters['start']
219 # if filters.get('end'):
215 # if filters.get('end'):
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221
217
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 try:
219 try:
224 output, __ = self.run_git_command(cmd)
220 output, __ = self.run_git_command(cmd)
225 except RepositoryError:
221 except RepositoryError:
226 # Can be raised for empty repositories
222 # Can be raised for empty repositories
227 return []
223 return []
228 return output.splitlines()
224 return output.splitlines()
229
225
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
226 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
231
227
232 def is_null(value):
228 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
229 return len(value) == commit_id_or_idx.count('0')
234
230
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
231 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
232 return self.commit_ids[-1]
237
233
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
234 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 *map(safe_str, [commit_id_or_idx, self.name]))
235 *map(safe_str, [commit_id_or_idx, self.name]))
240
236
241 is_bstr = isinstance(commit_id_or_idx, str)
237 is_bstr = isinstance(commit_id_or_idx, str)
242 is_branch = reference_obj and reference_obj.branch
238 is_branch = reference_obj and reference_obj.branch
243
239
244 lookup_ok = False
240 lookup_ok = False
245 if is_bstr:
241 if is_bstr:
246 # Need to call remote to translate id for tagging scenarios,
242 # Need to call remote to translate id for tagging scenarios,
247 # or branch that are numeric
243 # or branch that are numeric
248 try:
244 try:
249 remote_data = self._remote.get_object(commit_id_or_idx,
245 remote_data = self._remote.get_object(commit_id_or_idx,
250 maybe_unreachable=maybe_unreachable)
246 maybe_unreachable=maybe_unreachable)
251 commit_id_or_idx = remote_data["commit_id"]
247 commit_id_or_idx = remote_data["commit_id"]
252 lookup_ok = True
248 lookup_ok = True
253 except (CommitDoesNotExistError,):
249 except (CommitDoesNotExistError,):
254 lookup_ok = False
250 lookup_ok = False
255
251
256 if lookup_ok is False:
252 if lookup_ok is False:
257 is_numeric_idx = \
253 is_numeric_idx = \
258 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
254 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
259 or isinstance(commit_id_or_idx, int)
255 or isinstance(commit_id_or_idx, int)
260 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
256 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
261 try:
257 try:
262 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
258 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
263 lookup_ok = True
259 lookup_ok = True
264 except Exception:
260 except Exception:
265 raise CommitDoesNotExistError(commit_missing_err)
261 raise CommitDoesNotExistError(commit_missing_err)
266
262
267 # we failed regular lookup, and by integer number lookup
263 # we failed regular lookup, and by integer number lookup
268 if lookup_ok is False:
264 if lookup_ok is False:
269 raise CommitDoesNotExistError(commit_missing_err)
265 raise CommitDoesNotExistError(commit_missing_err)
270
266
271 # Ensure we return full id
267 # Ensure we return full id
272 if not SHA_PATTERN.match(str(commit_id_or_idx)):
268 if not SHA_PATTERN.match(str(commit_id_or_idx)):
273 raise CommitDoesNotExistError(
269 raise CommitDoesNotExistError(
274 "Given commit id %s not recognized" % commit_id_or_idx)
270 "Given commit id %s not recognized" % commit_id_or_idx)
275 return commit_id_or_idx
271 return commit_id_or_idx
276
272
277 def get_hook_location(self):
273 def get_hook_location(self):
278 """
274 """
279 returns absolute path to location where hooks are stored
275 returns absolute path to location where hooks are stored
280 """
276 """
281 loc = os.path.join(self.path, 'hooks')
277 loc = os.path.join(self.path, 'hooks')
282 if not self.bare:
278 if not self.bare:
283 loc = os.path.join(self.path, '.git', 'hooks')
279 loc = os.path.join(self.path, '.git', 'hooks')
284 return loc
280 return loc
285
281
286 @LazyProperty
282 @LazyProperty
287 def last_change(self):
283 def last_change(self):
288 """
284 """
289 Returns last change made on this repository as
285 Returns last change made on this repository as
290 `datetime.datetime` object.
286 `datetime.datetime` object.
291 """
287 """
292 try:
288 try:
293 return self.get_commit().date
289 return self.get_commit().date
294 except RepositoryError:
290 except RepositoryError:
295 tzoffset = makedate()[1]
291 tzoffset = makedate()[1]
296 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
292 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
297
293
298 def _get_fs_mtime(self):
294 def _get_fs_mtime(self):
299 idx_loc = '' if self.bare else '.git'
295 idx_loc = '' if self.bare else '.git'
300 # fallback to filesystem
296 # fallback to filesystem
301 in_path = os.path.join(self.path, idx_loc, "index")
297 in_path = os.path.join(self.path, idx_loc, "index")
302 he_path = os.path.join(self.path, idx_loc, "HEAD")
298 he_path = os.path.join(self.path, idx_loc, "HEAD")
303 if os.path.exists(in_path):
299 if os.path.exists(in_path):
304 return os.stat(in_path).st_mtime
300 return os.stat(in_path).st_mtime
305 else:
301 else:
306 return os.stat(he_path).st_mtime
302 return os.stat(he_path).st_mtime
307
303
308 @LazyProperty
304 @LazyProperty
309 def description(self):
305 def description(self):
310 description = self._remote.get_description()
306 description = self._remote.get_description()
311 return safe_str(description or self.DEFAULT_DESCRIPTION)
307 return safe_str(description or self.DEFAULT_DESCRIPTION)
312
308
313 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
309 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
314 if self.is_empty():
310 if self.is_empty():
315 return OrderedDict()
311 return OrderedDict()
316
312
317 result = []
313 result = []
318 for ref, sha in self._refs.items():
314 for ref, sha in self._refs.items():
319 if ref.startswith(prefix):
315 if ref.startswith(prefix):
320 ref_name = ref
316 ref_name = ref
321 if strip_prefix:
317 if strip_prefix:
322 ref_name = ref[len(prefix):]
318 ref_name = ref[len(prefix):]
323 result.append((safe_str(ref_name), sha))
319 result.append((safe_str(ref_name), sha))
324
320
325 def get_name(entry):
321 def get_name(entry):
326 return entry[0]
322 return entry[0]
327
323
328 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
324 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
329
325
330 def _get_branches(self):
326 def _get_branches(self):
331 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
327 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
332
328
333 @CachedProperty
329 @CachedProperty
334 def branches(self):
330 def branches(self):
335 return self._get_branches()
331 return self._get_branches()
336
332
337 @CachedProperty
333 @CachedProperty
338 def branches_closed(self):
334 def branches_closed(self):
339 return {}
335 return {}
340
336
341 @CachedProperty
337 @CachedProperty
342 def bookmarks(self):
338 def bookmarks(self):
343 return {}
339 return {}
344
340
345 @CachedProperty
341 @CachedProperty
346 def branches_all(self):
342 def branches_all(self):
347 all_branches = {}
343 all_branches = {}
348 all_branches.update(self.branches)
344 all_branches.update(self.branches)
349 all_branches.update(self.branches_closed)
345 all_branches.update(self.branches_closed)
350 return all_branches
346 return all_branches
351
347
352 @CachedProperty
348 @CachedProperty
353 def tags(self):
349 def tags(self):
354 return self._get_tags()
350 return self._get_tags()
355
351
356 def _get_tags(self):
352 def _get_tags(self):
357 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
353 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
358
354
359 def tag(self, name, user, commit_id=None, message=None, date=None,
355 def tag(self, name, user, commit_id=None, message=None, date=None,
360 **kwargs):
356 **kwargs):
361 # TODO: fix this method to apply annotated tags correct with message
357 # TODO: fix this method to apply annotated tags correct with message
362 """
358 """
363 Creates and returns a tag for the given ``commit_id``.
359 Creates and returns a tag for the given ``commit_id``.
364
360
365 :param name: name for new tag
361 :param name: name for new tag
366 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
367 :param commit_id: commit id for which new tag would be created
363 :param commit_id: commit id for which new tag would be created
368 :param message: message of the tag's commit
364 :param message: message of the tag's commit
369 :param date: date of tag's commit
365 :param date: date of tag's commit
370
366
371 :raises TagAlreadyExistError: if tag with same name already exists
367 :raises TagAlreadyExistError: if tag with same name already exists
372 """
368 """
373 if name in self.tags:
369 if name in self.tags:
374 raise TagAlreadyExistError("Tag %s already exists" % name)
370 raise TagAlreadyExistError("Tag %s already exists" % name)
375 commit = self.get_commit(commit_id=commit_id)
371 commit = self.get_commit(commit_id=commit_id)
376 message = message or f"Added tag {name} for commit {commit.raw_id}"
372 message = message or f"Added tag {name} for commit {commit.raw_id}"
377
373
378 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
374 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
379
375
380 self._invalidate_prop_cache('tags')
376 self._invalidate_prop_cache('tags')
381 self._invalidate_prop_cache('_refs')
377 self._invalidate_prop_cache('_refs')
382
378
383 return commit
379 return commit
384
380
385 def remove_tag(self, name, user, message=None, date=None):
381 def remove_tag(self, name, user, message=None, date=None):
386 """
382 """
387 Removes tag with the given ``name``.
383 Removes tag with the given ``name``.
388
384
389 :param name: name of the tag to be removed
385 :param name: name of the tag to be removed
390 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
386 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
391 :param message: message of the tag's removal commit
387 :param message: message of the tag's removal commit
392 :param date: date of tag's removal commit
388 :param date: date of tag's removal commit
393
389
394 :raises TagDoesNotExistError: if tag with given name does not exists
390 :raises TagDoesNotExistError: if tag with given name does not exists
395 """
391 """
396 if name not in self.tags:
392 if name not in self.tags:
397 raise TagDoesNotExistError("Tag %s does not exist" % name)
393 raise TagDoesNotExistError("Tag %s does not exist" % name)
398
394
399 self._remote.tag_remove(name)
395 self._remote.tag_remove(name)
400 self._invalidate_prop_cache('tags')
396 self._invalidate_prop_cache('tags')
401 self._invalidate_prop_cache('_refs')
397 self._invalidate_prop_cache('_refs')
402
398
403 def _get_refs(self):
399 def _get_refs(self):
404 return self._remote.get_refs()
400 return self._remote.get_refs()
405
401
406 @CachedProperty
402 @CachedProperty
407 def _refs(self):
403 def _refs(self):
408 return self._get_refs()
404 return self._get_refs()
409
405
410 @property
406 @property
411 def _ref_tree(self):
407 def _ref_tree(self):
412 node = tree = {}
408 node = tree = {}
413 for ref, sha in self._refs.items():
409 for ref, sha in self._refs.items():
414 path = ref.split('/')
410 path = ref.split('/')
415 for bit in path[:-1]:
411 for bit in path[:-1]:
416 node = node.setdefault(bit, {})
412 node = node.setdefault(bit, {})
417 node[path[-1]] = sha
413 node[path[-1]] = sha
418 node = tree
414 node = tree
419 return tree
415 return tree
420
416
421 def get_remote_ref(self, ref_name):
417 def get_remote_ref(self, ref_name):
422 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
418 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
423 try:
419 try:
424 return self._refs[ref_key]
420 return self._refs[ref_key]
425 except Exception:
421 except Exception:
426 return
422 return
427
423
428 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
429 translate_tag=True, maybe_unreachable=False, reference_obj=None):
425 translate_tag=True, maybe_unreachable=False, reference_obj=None):
430 """
426 """
431 Returns `GitCommit` object representing commit from git repository
427 Returns `GitCommit` object representing commit from git repository
432 at the given `commit_id` or head (most recent commit) if None given.
428 at the given `commit_id` or head (most recent commit) if None given.
433 """
429 """
434
430
435 if self.is_empty():
431 if self.is_empty():
436 raise EmptyRepositoryError("There are no commits yet")
432 raise EmptyRepositoryError("There are no commits yet")
437
433
438 if commit_id is not None:
434 if commit_id is not None:
439 self._validate_commit_id(commit_id)
435 self._validate_commit_id(commit_id)
440 try:
436 try:
441 # we have cached idx, use it without contacting the remote
437 # we have cached idx, use it without contacting the remote
442 idx = self._commit_ids[commit_id]
438 idx = self._commit_ids[commit_id]
443 return GitCommit(self, commit_id, idx, pre_load=pre_load)
439 return GitCommit(self, commit_id, idx, pre_load=pre_load)
444 except KeyError:
440 except KeyError:
445 pass
441 pass
446
442
447 elif commit_idx is not None:
443 elif commit_idx is not None:
448 self._validate_commit_idx(commit_idx)
444 self._validate_commit_idx(commit_idx)
449 try:
445 try:
450 _commit_id = self.commit_ids[commit_idx]
446 _commit_id = self.commit_ids[commit_idx]
451 if commit_idx < 0:
447 if commit_idx < 0:
452 commit_idx = self.commit_ids.index(_commit_id)
448 commit_idx = self.commit_ids.index(_commit_id)
453 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
449 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
454 except IndexError:
450 except IndexError:
455 commit_id = commit_idx
451 commit_id = commit_idx
456 else:
452 else:
457 commit_id = "tip"
453 commit_id = "tip"
458
454
459 if translate_tag:
455 if translate_tag:
460 commit_id = self._lookup_commit(
456 commit_id = self._lookup_commit(
461 commit_id, maybe_unreachable=maybe_unreachable,
457 commit_id, maybe_unreachable=maybe_unreachable,
462 reference_obj=reference_obj)
458 reference_obj=reference_obj)
463
459
464 try:
460 try:
465 idx = self._commit_ids[commit_id]
461 idx = self._commit_ids[commit_id]
466 except KeyError:
462 except KeyError:
467 idx = -1
463 idx = -1
468
464
469 return GitCommit(self, commit_id, idx, pre_load=pre_load)
465 return GitCommit(self, commit_id, idx, pre_load=pre_load)
470
466
471 def get_commits(
467 def get_commits(
472 self, start_id=None, end_id=None, start_date=None, end_date=None,
468 self, start_id=None, end_id=None, start_date=None, end_date=None,
473 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
469 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
474 """
470 """
475 Returns generator of `GitCommit` objects from start to end (both
471 Returns generator of `GitCommit` objects from start to end (both
476 are inclusive), in ascending date order.
472 are inclusive), in ascending date order.
477
473
478 :param start_id: None, str(commit_id)
474 :param start_id: None, str(commit_id)
479 :param end_id: None, str(commit_id)
475 :param end_id: None, str(commit_id)
480 :param start_date: if specified, commits with commit date less than
476 :param start_date: if specified, commits with commit date less than
481 ``start_date`` would be filtered out from returned set
477 ``start_date`` would be filtered out from returned set
482 :param end_date: if specified, commits with commit date greater than
478 :param end_date: if specified, commits with commit date greater than
483 ``end_date`` would be filtered out from returned set
479 ``end_date`` would be filtered out from returned set
484 :param branch_name: if specified, commits not reachable from given
480 :param branch_name: if specified, commits not reachable from given
485 branch would be filtered out from returned set
481 branch would be filtered out from returned set
486 :param show_hidden: Show hidden commits such as obsolete or hidden from
482 :param show_hidden: Show hidden commits such as obsolete or hidden from
487 Mercurial evolve
483 Mercurial evolve
488 :raise BranchDoesNotExistError: If given `branch_name` does not
484 :raise BranchDoesNotExistError: If given `branch_name` does not
489 exist.
485 exist.
490 :raise CommitDoesNotExistError: If commits for given `start` or
486 :raise CommitDoesNotExistError: If commits for given `start` or
491 `end` could not be found.
487 `end` could not be found.
492
488
493 """
489 """
494 if self.is_empty():
490 if self.is_empty():
495 raise EmptyRepositoryError("There are no commits yet")
491 raise EmptyRepositoryError("There are no commits yet")
496
492
497 self._validate_branch_name(branch_name)
493 self._validate_branch_name(branch_name)
498
494
499 if start_id is not None:
495 if start_id is not None:
500 self._validate_commit_id(start_id)
496 self._validate_commit_id(start_id)
501 if end_id is not None:
497 if end_id is not None:
502 self._validate_commit_id(end_id)
498 self._validate_commit_id(end_id)
503
499
504 start_raw_id = self._lookup_commit(start_id)
500 start_raw_id = self._lookup_commit(start_id)
505 start_pos = self._commit_ids[start_raw_id] if start_id else None
501 start_pos = self._commit_ids[start_raw_id] if start_id else None
506 end_raw_id = self._lookup_commit(end_id)
502 end_raw_id = self._lookup_commit(end_id)
507 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
503 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
508
504
509 if None not in [start_id, end_id] and start_pos > end_pos:
505 if None not in [start_id, end_id] and start_pos > end_pos:
510 raise RepositoryError(
506 raise RepositoryError(
511 "Start commit '%s' cannot be after end commit '%s'" %
507 "Start commit '%s' cannot be after end commit '%s'" %
512 (start_id, end_id))
508 (start_id, end_id))
513
509
514 if end_pos is not None:
510 if end_pos is not None:
515 end_pos += 1
511 end_pos += 1
516
512
517 filter_ = []
513 filter_ = []
518 if branch_name:
514 if branch_name:
519 filter_.append({'branch_name': branch_name})
515 filter_.append({'branch_name': branch_name})
520 if start_date and not end_date:
516 if start_date and not end_date:
521 filter_.append({'since': start_date})
517 filter_.append({'since': start_date})
522 if end_date and not start_date:
518 if end_date and not start_date:
523 filter_.append({'until': end_date})
519 filter_.append({'until': end_date})
524 if start_date and end_date:
520 if start_date and end_date:
525 filter_.append({'since': start_date})
521 filter_.append({'since': start_date})
526 filter_.append({'until': end_date})
522 filter_.append({'until': end_date})
527
523
528 # if start_pos or end_pos:
524 # if start_pos or end_pos:
529 # filter_.append({'start': start_pos})
525 # filter_.append({'start': start_pos})
530 # filter_.append({'end': end_pos})
526 # filter_.append({'end': end_pos})
531
527
532 if filter_:
528 if filter_:
533 revfilters = {
529 revfilters = {
534 'branch_name': branch_name,
530 'branch_name': branch_name,
535 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
531 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
536 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
532 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
537 'start': start_pos,
533 'start': start_pos,
538 'end': end_pos,
534 'end': end_pos,
539 }
535 }
540 commit_ids = self._get_commit_ids(filters=revfilters)
536 commit_ids = self._get_commit_ids(filters=revfilters)
541
537
542 else:
538 else:
543 commit_ids = self.commit_ids
539 commit_ids = self.commit_ids
544
540
545 if start_pos or end_pos:
541 if start_pos or end_pos:
546 commit_ids = commit_ids[start_pos: end_pos]
542 commit_ids = commit_ids[start_pos: end_pos]
547
543
548 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
544 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
549 translate_tag=translate_tags)
545 translate_tag=translate_tags)
550
546
551 def get_diff(
547 def get_diff(
552 self, commit1, commit2, path='', ignore_whitespace=False,
548 self, commit1, commit2, path='', ignore_whitespace=False,
553 context=3, path1=None):
549 context=3, path1=None):
554 """
550 """
555 Returns (git like) *diff*, as plain text. Shows changes introduced by
551 Returns (git like) *diff*, as plain text. Shows changes introduced by
556 ``commit2`` since ``commit1``.
552 ``commit2`` since ``commit1``.
557
553
558 :param commit1: Entry point from which diff is shown. Can be
554 :param commit1: Entry point from which diff is shown. Can be
559 ``self.EMPTY_COMMIT`` - in this case, patch showing all
555 ``self.EMPTY_COMMIT`` - in this case, patch showing all
560 the changes since empty state of the repository until ``commit2``
556 the changes since empty state of the repository until ``commit2``
561 :param commit2: Until which commits changes should be shown.
557 :param commit2: Until which commits changes should be shown.
562 :param path:
558 :param path:
563 :param ignore_whitespace: If set to ``True``, would not show whitespace
559 :param ignore_whitespace: If set to ``True``, would not show whitespace
564 changes. Defaults to ``False``.
560 changes. Defaults to ``False``.
565 :param context: How many lines before/after changed lines should be
561 :param context: How many lines before/after changed lines should be
566 shown. Defaults to ``3``.
562 shown. Defaults to ``3``.
567 :param path1:
563 :param path1:
568 """
564 """
569 self._validate_diff_commits(commit1, commit2)
565 self._validate_diff_commits(commit1, commit2)
570 if path1 is not None and path1 != path:
566 if path1 is not None and path1 != path:
571 raise ValueError("Diff of two different paths not supported.")
567 raise ValueError("Diff of two different paths not supported.")
572
568
573 if path:
569 if path:
574 file_filter = path
570 file_filter = path
575 else:
571 else:
576 file_filter = None
572 file_filter = None
577
573
578 diff = self._remote.diff(
574 diff = self._remote.diff(
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
575 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
580 opt_ignorews=ignore_whitespace,
576 opt_ignorews=ignore_whitespace,
581 context=context)
577 context=context)
582
578
583 return GitDiff(diff)
579 return GitDiff(diff)
584
580
585 def strip(self, commit_id, branch_name):
581 def strip(self, commit_id, branch_name):
586 commit = self.get_commit(commit_id=commit_id)
582 commit = self.get_commit(commit_id=commit_id)
587 if commit.merge:
583 if commit.merge:
588 raise Exception('Cannot reset to merge commit')
584 raise Exception('Cannot reset to merge commit')
589
585
590 # parent is going to be the new head now
586 # parent is going to be the new head now
591 commit = commit.parents[0]
587 commit = commit.parents[0]
592 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
588 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
593
589
594 # clear cached properties
590 # clear cached properties
595 self._invalidate_prop_cache('commit_ids')
591 self._invalidate_prop_cache('commit_ids')
596 self._invalidate_prop_cache('_refs')
592 self._invalidate_prop_cache('_refs')
597 self._invalidate_prop_cache('branches')
593 self._invalidate_prop_cache('branches')
598
594
599 return len(self.commit_ids)
595 return len(self.commit_ids)
600
596
601 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
597 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
602 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
598 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
603 self, commit_id1, repo2, commit_id2)
599 self, commit_id1, repo2, commit_id2)
604
600
605 if commit_id1 == commit_id2:
601 if commit_id1 == commit_id2:
606 return commit_id1
602 return commit_id1
607
603
608 if self != repo2:
604 if self != repo2:
609 commits = self._remote.get_missing_revs(
605 commits = self._remote.get_missing_revs(
610 commit_id1, commit_id2, repo2.path)
606 commit_id1, commit_id2, repo2.path)
611 if commits:
607 if commits:
612 commit = repo2.get_commit(commits[-1])
608 commit = repo2.get_commit(commits[-1])
613 if commit.parents:
609 if commit.parents:
614 ancestor_id = commit.parents[0].raw_id
610 ancestor_id = commit.parents[0].raw_id
615 else:
611 else:
616 ancestor_id = None
612 ancestor_id = None
617 else:
613 else:
618 # no commits from other repo, ancestor_id is the commit_id2
614 # no commits from other repo, ancestor_id is the commit_id2
619 ancestor_id = commit_id2
615 ancestor_id = commit_id2
620 else:
616 else:
621 output, __ = self.run_git_command(
617 output, __ = self.run_git_command(
622 ['merge-base', commit_id1, commit_id2])
618 ['merge-base', commit_id1, commit_id2])
623 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
619 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
624
620
625 log.debug('Found common ancestor with sha: %s', ancestor_id)
621 log.debug('Found common ancestor with sha: %s', ancestor_id)
626
622
627 return ancestor_id
623 return ancestor_id
628
624
629 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
625 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
630 repo1 = self
626 repo1 = self
631 ancestor_id = None
627 ancestor_id = None
632
628
633 if commit_id1 == commit_id2:
629 if commit_id1 == commit_id2:
634 commits = []
630 commits = []
635 elif repo1 != repo2:
631 elif repo1 != repo2:
636 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
632 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
637 repo2.path)
633 repo2.path)
638 commits = [
634 commits = [
639 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
635 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
640 for commit_id in reversed(missing_ids)]
636 for commit_id in reversed(missing_ids)]
641 else:
637 else:
642 output, __ = repo1.run_git_command(
638 output, __ = repo1.run_git_command(
643 ['log', '--reverse', '--pretty=format: %H', '-s',
639 ['log', '--reverse', '--pretty=format: %H', '-s',
644 f'{commit_id1}..{commit_id2}'])
640 f'{commit_id1}..{commit_id2}'])
645 commits = [
641 commits = [
646 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
642 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
647 for commit_id in self.COMMIT_ID_PAT.findall(output)]
643 for commit_id in self.COMMIT_ID_PAT.findall(output)]
648
644
649 return commits
645 return commits
650
646
651 @LazyProperty
647 @LazyProperty
652 def in_memory_commit(self):
648 def in_memory_commit(self):
653 """
649 """
654 Returns ``GitInMemoryCommit`` object for this repository.
650 Returns ``GitInMemoryCommit`` object for this repository.
655 """
651 """
656 return GitInMemoryCommit(self)
652 return GitInMemoryCommit(self)
657
653
658 def pull(self, url, commit_ids=None, update_after=False):
654 def pull(self, url, commit_ids=None, update_after=False):
659 """
655 """
660 Pull changes from external location. Pull is different in GIT
656 Pull changes from external location. Pull is different in GIT
661 that fetch since it's doing a checkout
657 that fetch since it's doing a checkout
662
658
663 :param commit_ids: Optional. Can be set to a list of commit ids
659 :param commit_ids: Optional. Can be set to a list of commit ids
664 which shall be pulled from the other repository.
660 which shall be pulled from the other repository.
665 """
661 """
666 refs = None
662 refs = None
667 if commit_ids is not None:
663 if commit_ids is not None:
668 remote_refs = self._remote.get_remote_refs(url)
664 remote_refs = self._remote.get_remote_refs(url)
669 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
665 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
670 self._remote.pull(url, refs=refs, update_after=update_after)
666 self._remote.pull(url, refs=refs, update_after=update_after)
671 self._remote.invalidate_vcs_cache()
667 self._remote.invalidate_vcs_cache()
672
668
673 def fetch(self, url, commit_ids=None):
669 def fetch(self, url, commit_ids=None):
674 """
670 """
675 Fetch all git objects from external location.
671 Fetch all git objects from external location.
676 """
672 """
677 self._remote.sync_fetch(url, refs=commit_ids)
673 self._remote.sync_fetch(url, refs=commit_ids)
678 self._remote.invalidate_vcs_cache()
674 self._remote.invalidate_vcs_cache()
679
675
680 def push(self, url):
676 def push(self, url):
681 refs = None
677 refs = None
682 self._remote.sync_push(url, refs=refs)
678 self._remote.sync_push(url, refs=refs)
683
679
684 def set_refs(self, ref_name, commit_id):
680 def set_refs(self, ref_name, commit_id):
685 self._remote.set_refs(ref_name, commit_id)
681 self._remote.set_refs(ref_name, commit_id)
686 self._invalidate_prop_cache('_refs')
682 self._invalidate_prop_cache('_refs')
687
683
688 def remove_ref(self, ref_name):
684 def remove_ref(self, ref_name):
689 self._remote.remove_ref(ref_name)
685 self._remote.remove_ref(ref_name)
690 self._invalidate_prop_cache('_refs')
686 self._invalidate_prop_cache('_refs')
691
687
692 def run_gc(self, prune=True):
688 def run_gc(self, prune=True):
693 cmd = ['gc', '--aggressive']
689 cmd = ['gc', '--aggressive']
694 if prune:
690 if prune:
695 cmd += ['--prune=now']
691 cmd += ['--prune=now']
696 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
692 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
697 return stderr
693 return stderr
698
694
699 def _update_server_info(self):
695 def _update_server_info(self):
700 """
696 """
701 runs gits update-server-info command in this repo instance
697 runs gits update-server-info command in this repo instance
702 """
698 """
703 self._remote.update_server_info()
699 self._remote.update_server_info()
704
700
705 def _current_branch(self):
701 def _current_branch(self):
706 """
702 """
707 Return the name of the current branch.
703 Return the name of the current branch.
708
704
709 It only works for non bare repositories (i.e. repositories with a
705 It only works for non bare repositories (i.e. repositories with a
710 working copy)
706 working copy)
711 """
707 """
712 if self.bare:
708 if self.bare:
713 raise RepositoryError('Bare git repos do not have active branches')
709 raise RepositoryError('Bare git repos do not have active branches')
714
710
715 if self.is_empty():
711 if self.is_empty():
716 return None
712 return None
717
713
718 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
714 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
719 return stdout.strip()
715 return stdout.strip()
720
716
721 def _checkout(self, branch_name, create=False, force=False):
717 def _checkout(self, branch_name, create=False, force=False):
722 """
718 """
723 Checkout a branch in the working directory.
719 Checkout a branch in the working directory.
724
720
725 It tries to create the branch if create is True, failing if the branch
721 It tries to create the branch if create is True, failing if the branch
726 already exists.
722 already exists.
727
723
728 It only works for non bare repositories (i.e. repositories with a
724 It only works for non bare repositories (i.e. repositories with a
729 working copy)
725 working copy)
730 """
726 """
731 if self.bare:
727 if self.bare:
732 raise RepositoryError('Cannot checkout branches in a bare git repo')
728 raise RepositoryError('Cannot checkout branches in a bare git repo')
733
729
734 cmd = ['checkout']
730 cmd = ['checkout']
735 if force:
731 if force:
736 cmd.append('-f')
732 cmd.append('-f')
737 if create:
733 if create:
738 cmd.append('-b')
734 cmd.append('-b')
739 cmd.append(branch_name)
735 cmd.append(branch_name)
740 self.run_git_command(cmd, fail_on_stderr=False)
736 self.run_git_command(cmd, fail_on_stderr=False)
741
737
742 def _create_branch(self, branch_name, commit_id):
738 def _create_branch(self, branch_name, commit_id):
743 """
739 """
744 creates a branch in a GIT repo
740 creates a branch in a GIT repo
745 """
741 """
746 self._remote.create_branch(branch_name, commit_id)
742 self._remote.create_branch(branch_name, commit_id)
747
743
748 def _identify(self):
744 def _identify(self):
749 """
745 """
750 Return the current state of the working directory.
746 Return the current state of the working directory.
751 """
747 """
752 if self.bare:
748 if self.bare:
753 raise RepositoryError('Bare git repos do not have active branches')
749 raise RepositoryError('Bare git repos do not have active branches')
754
750
755 if self.is_empty():
751 if self.is_empty():
756 return None
752 return None
757
753
758 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
754 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
759 return stdout.strip()
755 return stdout.strip()
760
756
761 def _local_clone(self, clone_path, branch_name, source_branch=None):
757 def _local_clone(self, clone_path, branch_name, source_branch=None):
762 """
758 """
763 Create a local clone of the current repo.
759 Create a local clone of the current repo.
764 """
760 """
765 # N.B.(skreft): the --branch option is required as otherwise the shallow
761 # N.B.(skreft): the --branch option is required as otherwise the shallow
766 # clone will only fetch the active branch.
762 # clone will only fetch the active branch.
767 cmd = ['clone', '--branch', branch_name,
763 cmd = ['clone', '--branch', branch_name,
768 self.path, os.path.abspath(clone_path)]
764 self.path, os.path.abspath(clone_path)]
769
765
770 self.run_git_command(cmd, fail_on_stderr=False)
766 self.run_git_command(cmd, fail_on_stderr=False)
771
767
772 # if we get the different source branch, make sure we also fetch it for
768 # if we get the different source branch, make sure we also fetch it for
773 # merge conditions
769 # merge conditions
774 if source_branch and source_branch != branch_name:
770 if source_branch and source_branch != branch_name:
775 # check if the ref exists.
771 # check if the ref exists.
776 shadow_repo = GitRepository(os.path.abspath(clone_path))
772 shadow_repo = GitRepository(os.path.abspath(clone_path))
777 if shadow_repo.get_remote_ref(source_branch):
773 if shadow_repo.get_remote_ref(source_branch):
778 cmd = ['fetch', self.path, source_branch]
774 cmd = ['fetch', self.path, source_branch]
779 self.run_git_command(cmd, fail_on_stderr=False)
775 self.run_git_command(cmd, fail_on_stderr=False)
780
776
781 def _local_fetch(self, repository_path, branch_name, use_origin=False):
777 def _local_fetch(self, repository_path, branch_name, use_origin=False):
782 """
778 """
783 Fetch a branch from a local repository.
779 Fetch a branch from a local repository.
784 """
780 """
785 repository_path = os.path.abspath(repository_path)
781 repository_path = os.path.abspath(repository_path)
786 if repository_path == self.path:
782 if repository_path == self.path:
787 raise ValueError('Cannot fetch from the same repository')
783 raise ValueError('Cannot fetch from the same repository')
788
784
789 if use_origin:
785 if use_origin:
790 branch_name = '+{branch}:refs/heads/{branch}'.format(
786 branch_name = '+{branch}:refs/heads/{branch}'.format(
791 branch=branch_name)
787 branch=branch_name)
792
788
793 cmd = ['fetch', '--no-tags', '--update-head-ok',
789 cmd = ['fetch', '--no-tags', '--update-head-ok',
794 repository_path, branch_name]
790 repository_path, branch_name]
795 self.run_git_command(cmd, fail_on_stderr=False)
791 self.run_git_command(cmd, fail_on_stderr=False)
796
792
797 def _local_reset(self, branch_name):
793 def _local_reset(self, branch_name):
798 branch_name = f'{branch_name}'
794 branch_name = f'{branch_name}'
799 cmd = ['reset', '--hard', branch_name, '--']
795 cmd = ['reset', '--hard', branch_name, '--']
800 self.run_git_command(cmd, fail_on_stderr=False)
796 self.run_git_command(cmd, fail_on_stderr=False)
801
797
802 def _last_fetch_heads(self):
798 def _last_fetch_heads(self):
803 """
799 """
804 Return the last fetched heads that need merging.
800 Return the last fetched heads that need merging.
805
801
806 The algorithm is defined at
802 The algorithm is defined at
807 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
803 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
808 """
804 """
809 if not self.bare:
805 if not self.bare:
810 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
806 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
811 else:
807 else:
812 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
808 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
813
809
814 heads = []
810 heads = []
815 with open(fetch_heads_path) as f:
811 with open(fetch_heads_path) as f:
816 for line in f:
812 for line in f:
817 if ' not-for-merge ' in line:
813 if ' not-for-merge ' in line:
818 continue
814 continue
819 line = re.sub('\t.*', '', line, flags=re.DOTALL)
815 line = re.sub('\t.*', '', line, flags=re.DOTALL)
820 heads.append(line)
816 heads.append(line)
821
817
822 return heads
818 return heads
823
819
824 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
820 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
825 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
821 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
826
822
827 def _local_pull(self, repository_path, branch_name, ff_only=True):
823 def _local_pull(self, repository_path, branch_name, ff_only=True):
828 """
824 """
829 Pull a branch from a local repository.
825 Pull a branch from a local repository.
830 """
826 """
831 if self.bare:
827 if self.bare:
832 raise RepositoryError('Cannot pull into a bare git repository')
828 raise RepositoryError('Cannot pull into a bare git repository')
833 # N.B.(skreft): The --ff-only option is to make sure this is a
829 # N.B.(skreft): The --ff-only option is to make sure this is a
834 # fast-forward (i.e., we are only pulling new changes and there are no
830 # fast-forward (i.e., we are only pulling new changes and there are no
835 # conflicts with our current branch)
831 # conflicts with our current branch)
836 # Additionally, that option needs to go before --no-tags, otherwise git
832 # Additionally, that option needs to go before --no-tags, otherwise git
837 # pull complains about it being an unknown flag.
833 # pull complains about it being an unknown flag.
838 cmd = ['pull']
834 cmd = ['pull']
839 if ff_only:
835 if ff_only:
840 cmd.append('--ff-only')
836 cmd.append('--ff-only')
841 cmd.extend(['--no-tags', repository_path, branch_name])
837 cmd.extend(['--no-tags', repository_path, branch_name])
842 self.run_git_command(cmd, fail_on_stderr=False)
838 self.run_git_command(cmd, fail_on_stderr=False)
843
839
844 def _local_merge(self, merge_message, user_name, user_email, heads):
840 def _local_merge(self, merge_message, user_name, user_email, heads):
845 """
841 """
846 Merge the given head into the checked out branch.
842 Merge the given head into the checked out branch.
847
843
848 It will force a merge commit.
844 It will force a merge commit.
849
845
850 Currently it raises an error if the repo is empty, as it is not possible
846 Currently it raises an error if the repo is empty, as it is not possible
851 to create a merge commit in an empty repo.
847 to create a merge commit in an empty repo.
852
848
853 :param merge_message: The message to use for the merge commit.
849 :param merge_message: The message to use for the merge commit.
854 :param heads: the heads to merge.
850 :param heads: the heads to merge.
855 """
851 """
856 if self.bare:
852 if self.bare:
857 raise RepositoryError('Cannot merge into a bare git repository')
853 raise RepositoryError('Cannot merge into a bare git repository')
858
854
859 if not heads:
855 if not heads:
860 return
856 return
861
857
862 if self.is_empty():
858 if self.is_empty():
863 # TODO(skreft): do something more robust in this case.
859 # TODO(skreft): do something more robust in this case.
864 raise RepositoryError('Do not know how to merge into empty repositories yet')
860 raise RepositoryError('Do not know how to merge into empty repositories yet')
865 unresolved = None
861 unresolved = None
866
862
867 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
863 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
868 # commit message. We also specify the user who is doing the merge.
864 # commit message. We also specify the user who is doing the merge.
869 cmd = ['-c', f'user.name="{user_name}"',
865 cmd = ['-c', f'user.name="{user_name}"',
870 '-c', f'user.email={user_email}',
866 '-c', f'user.email={user_email}',
871 'merge', '--no-ff', '-m', safe_str(merge_message)]
867 'merge', '--no-ff', '-m', safe_str(merge_message)]
872
868
873 merge_cmd = cmd + heads
869 merge_cmd = cmd + heads
874
870
875 try:
871 try:
876 self.run_git_command(merge_cmd, fail_on_stderr=False)
872 self.run_git_command(merge_cmd, fail_on_stderr=False)
877 except RepositoryError:
873 except RepositoryError:
878 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
874 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
879 fail_on_stderr=False)[0].splitlines()
875 fail_on_stderr=False)[0].splitlines()
880 # NOTE(marcink): we add U notation for consistent with HG backend output
876 # NOTE(marcink): we add U notation for consistent with HG backend output
881 unresolved = [f'U {f}' for f in files]
877 unresolved = [f'U {f}' for f in files]
882
878
883 # Cleanup any merge leftovers
879 # Cleanup any merge leftovers
884 self._remote.invalidate_vcs_cache()
880 self._remote.invalidate_vcs_cache()
885 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
881 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
886
882
887 if unresolved:
883 if unresolved:
888 raise UnresolvedFilesInRepo(unresolved)
884 raise UnresolvedFilesInRepo(unresolved)
889 else:
885 else:
890 raise
886 raise
891
887
892 def _local_push(
888 def _local_push(
893 self, source_branch, repository_path, target_branch,
889 self, source_branch, repository_path, target_branch,
894 enable_hooks=False, rc_scm_data=None):
890 enable_hooks=False, rc_scm_data=None):
895 """
891 """
896 Push the source_branch to the given repository and target_branch.
892 Push the source_branch to the given repository and target_branch.
897
893
898 Currently it if the target_branch is not master and the target repo is
894 Currently it if the target_branch is not master and the target repo is
899 empty, the push will work, but then GitRepository won't be able to find
895 empty, the push will work, but then GitRepository won't be able to find
900 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
896 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
901 pointing to master, which does not exist).
897 pointing to master, which does not exist).
902
898
903 It does not run the hooks in the target repo.
899 It does not run the hooks in the target repo.
904 """
900 """
905 # TODO(skreft): deal with the case in which the target repo is empty,
901 # TODO(skreft): deal with the case in which the target repo is empty,
906 # and the target_branch is not master.
902 # and the target_branch is not master.
907 target_repo = GitRepository(repository_path)
903 target_repo = GitRepository(repository_path)
908 if (not target_repo.bare and
904 if (not target_repo.bare and
909 target_repo._current_branch() == target_branch):
905 target_repo._current_branch() == target_branch):
910 # Git prevents pushing to the checked out branch, so simulate it by
906 # Git prevents pushing to the checked out branch, so simulate it by
911 # pulling into the target repository.
907 # pulling into the target repository.
912 target_repo._local_pull(self.path, source_branch)
908 target_repo._local_pull(self.path, source_branch)
913 else:
909 else:
914 cmd = ['push', os.path.abspath(repository_path),
910 cmd = ['push', os.path.abspath(repository_path),
915 f'{source_branch}:{target_branch}']
911 f'{source_branch}:{target_branch}']
916 gitenv = {}
912 gitenv = {}
917 if rc_scm_data:
913 if rc_scm_data:
918 gitenv.update({'RC_SCM_DATA': rc_scm_data})
914 gitenv.update({'RC_SCM_DATA': rc_scm_data})
919
915
920 if not enable_hooks:
916 if not enable_hooks:
921 gitenv['RC_SKIP_HOOKS'] = '1'
917 gitenv['RC_SKIP_HOOKS'] = '1'
922 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
918 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
923
919
924 def _get_new_pr_branch(self, source_branch, target_branch):
920 def _get_new_pr_branch(self, source_branch, target_branch):
925 prefix = f'pr_{source_branch}-{target_branch}_'
921 prefix = f'pr_{source_branch}-{target_branch}_'
926 pr_branches = []
922 pr_branches = []
927 for branch in self.branches:
923 for branch in self.branches:
928 if branch.startswith(prefix):
924 if branch.startswith(prefix):
929 pr_branches.append(int(branch[len(prefix):]))
925 pr_branches.append(int(branch[len(prefix):]))
930
926
931 if not pr_branches:
927 if not pr_branches:
932 branch_id = 0
928 branch_id = 0
933 else:
929 else:
934 branch_id = max(pr_branches) + 1
930 branch_id = max(pr_branches) + 1
935
931
936 return '%s%d' % (prefix, branch_id)
932 return '%s%d' % (prefix, branch_id)
937
933
938 def _maybe_prepare_merge_workspace(
934 def _maybe_prepare_merge_workspace(
939 self, repo_id, workspace_id, target_ref, source_ref):
935 self, repo_id, workspace_id, target_ref, source_ref):
940 shadow_repository_path = self._get_shadow_repository_path(
936 shadow_repository_path = self._get_shadow_repository_path(
941 self.path, repo_id, workspace_id)
937 self.path, repo_id, workspace_id)
942 if not os.path.exists(shadow_repository_path):
938 if not os.path.exists(shadow_repository_path):
943 self._local_clone(
939 self._local_clone(
944 shadow_repository_path, target_ref.name, source_ref.name)
940 shadow_repository_path, target_ref.name, source_ref.name)
945 log.debug('Prepared %s shadow repository in %s',
941 log.debug('Prepared %s shadow repository in %s',
946 self.alias, shadow_repository_path)
942 self.alias, shadow_repository_path)
947
943
948 return shadow_repository_path
944 return shadow_repository_path
949
945
950 def _merge_repo(self, repo_id, workspace_id, target_ref,
946 def _merge_repo(self, repo_id, workspace_id, target_ref,
951 source_repo, source_ref, merge_message,
947 source_repo, source_ref, merge_message,
952 merger_name, merger_email, dry_run=False,
948 merger_name, merger_email, dry_run=False,
953 use_rebase=False, close_branch=False):
949 use_rebase=False, close_branch=False):
954
950
955 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
951 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
956 'rebase' if use_rebase else 'merge', dry_run)
952 'rebase' if use_rebase else 'merge', dry_run)
957
953
958 if target_ref.commit_id != self.branches[target_ref.name]:
954 if target_ref.commit_id != self.branches[target_ref.name]:
959 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
955 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
960 target_ref.commit_id, self.branches[target_ref.name])
956 target_ref.commit_id, self.branches[target_ref.name])
961 return MergeResponse(
957 return MergeResponse(
962 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
958 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
963 metadata={'target_ref': target_ref})
959 metadata={'target_ref': target_ref})
964
960
965 shadow_repository_path = self._maybe_prepare_merge_workspace(
961 shadow_repository_path = self._maybe_prepare_merge_workspace(
966 repo_id, workspace_id, target_ref, source_ref)
962 repo_id, workspace_id, target_ref, source_ref)
967 shadow_repo = self.get_shadow_instance(shadow_repository_path)
963 shadow_repo = self.get_shadow_instance(shadow_repository_path)
968
964
969 # checkout source, if it's different. Otherwise we could not
965 # checkout source, if it's different. Otherwise we could not
970 # fetch proper commits for merge testing
966 # fetch proper commits for merge testing
971 if source_ref.name != target_ref.name:
967 if source_ref.name != target_ref.name:
972 if shadow_repo.get_remote_ref(source_ref.name):
968 if shadow_repo.get_remote_ref(source_ref.name):
973 shadow_repo._checkout(source_ref.name, force=True)
969 shadow_repo._checkout(source_ref.name, force=True)
974
970
975 # checkout target, and fetch changes
971 # checkout target, and fetch changes
976 shadow_repo._checkout(target_ref.name, force=True)
972 shadow_repo._checkout(target_ref.name, force=True)
977
973
978 # fetch/reset pull the target, in case it is changed
974 # fetch/reset pull the target, in case it is changed
979 # this handles even force changes
975 # this handles even force changes
980 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
976 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
981 shadow_repo._local_reset(target_ref.name)
977 shadow_repo._local_reset(target_ref.name)
982
978
983 # Need to reload repo to invalidate the cache, or otherwise we cannot
979 # Need to reload repo to invalidate the cache, or otherwise we cannot
984 # retrieve the last target commit.
980 # retrieve the last target commit.
985 shadow_repo = self.get_shadow_instance(shadow_repository_path)
981 shadow_repo = self.get_shadow_instance(shadow_repository_path)
986 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
982 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
987 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
983 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
988 target_ref, target_ref.commit_id,
984 target_ref, target_ref.commit_id,
989 shadow_repo.branches[target_ref.name])
985 shadow_repo.branches[target_ref.name])
990 return MergeResponse(
986 return MergeResponse(
991 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
987 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
992 metadata={'target_ref': target_ref})
988 metadata={'target_ref': target_ref})
993
989
994 # calculate new branch
990 # calculate new branch
995 pr_branch = shadow_repo._get_new_pr_branch(
991 pr_branch = shadow_repo._get_new_pr_branch(
996 source_ref.name, target_ref.name)
992 source_ref.name, target_ref.name)
997 log.debug('using pull-request merge branch: `%s`', pr_branch)
993 log.debug('using pull-request merge branch: `%s`', pr_branch)
998 # checkout to temp branch, and fetch changes
994 # checkout to temp branch, and fetch changes
999 shadow_repo._checkout(pr_branch, create=True)
995 shadow_repo._checkout(pr_branch, create=True)
1000 try:
996 try:
1001 shadow_repo._local_fetch(source_repo.path, source_ref.name)
997 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1002 except RepositoryError:
998 except RepositoryError:
1003 log.exception('Failure when doing local fetch on '
999 log.exception('Failure when doing local fetch on '
1004 'shadow repo: %s', shadow_repo)
1000 'shadow repo: %s', shadow_repo)
1005 return MergeResponse(
1001 return MergeResponse(
1006 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1002 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1007 metadata={'source_ref': source_ref})
1003 metadata={'source_ref': source_ref})
1008
1004
1009 merge_ref = None
1005 merge_ref = None
1010 merge_failure_reason = MergeFailureReason.NONE
1006 merge_failure_reason = MergeFailureReason.NONE
1011 metadata = {}
1007 metadata = {}
1012 try:
1008 try:
1013 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1009 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1014 [source_ref.commit_id])
1010 [source_ref.commit_id])
1015 merge_possible = True
1011 merge_possible = True
1016
1012
1017 # Need to invalidate the cache, or otherwise we
1013 # Need to invalidate the cache, or otherwise we
1018 # cannot retrieve the merge commit.
1014 # cannot retrieve the merge commit.
1019 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1015 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1020 merge_commit_id = shadow_repo.branches[pr_branch]
1016 merge_commit_id = shadow_repo.branches[pr_branch]
1021
1017
1022 # Set a reference pointing to the merge commit. This reference may
1018 # Set a reference pointing to the merge commit. This reference may
1023 # be used to easily identify the last successful merge commit in
1019 # be used to easily identify the last successful merge commit in
1024 # the shadow repository.
1020 # the shadow repository.
1025 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1021 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1026 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1022 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1027 except RepositoryError as e:
1023 except RepositoryError as e:
1028 log.exception('Failure when doing local merge on git shadow repo')
1024 log.exception('Failure when doing local merge on git shadow repo')
1029 if isinstance(e, UnresolvedFilesInRepo):
1025 if isinstance(e, UnresolvedFilesInRepo):
1030 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1026 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1031
1027
1032 merge_possible = False
1028 merge_possible = False
1033 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1029 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1034
1030
1035 if merge_possible and not dry_run:
1031 if merge_possible and not dry_run:
1036 try:
1032 try:
1037 shadow_repo._local_push(
1033 shadow_repo._local_push(
1038 pr_branch, self.path, target_ref.name, enable_hooks=True,
1034 pr_branch, self.path, target_ref.name, enable_hooks=True,
1039 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1035 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1040 merge_succeeded = True
1036 merge_succeeded = True
1041 except RepositoryError:
1037 except RepositoryError:
1042 log.exception(
1038 log.exception(
1043 'Failure when doing local push from the shadow '
1039 'Failure when doing local push from the shadow '
1044 'repository to the target repository at %s.', self.path)
1040 'repository to the target repository at %s.', self.path)
1045 merge_succeeded = False
1041 merge_succeeded = False
1046 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1042 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1047 metadata['target'] = 'git shadow repo'
1043 metadata['target'] = 'git shadow repo'
1048 metadata['merge_commit'] = pr_branch
1044 metadata['merge_commit'] = pr_branch
1049 else:
1045 else:
1050 merge_succeeded = False
1046 merge_succeeded = False
1051
1047
1052 return MergeResponse(
1048 return MergeResponse(
1053 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1049 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1054 metadata=metadata)
1050 metadata=metadata)
@@ -1,367 +1,361 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 SVN repository module
20 SVN repository module
21 """
21 """
22
22
23 import logging
23 import logging
24 import os
24 import os
25 import urllib.request
25 import urllib.request
26 import urllib.parse
26 import urllib.parse
27 import urllib.error
27 import urllib.error
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from collections import OrderedDict
31 from collections import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
32 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.str_utils import safe_str
33 from rhodecode.lib.str_utils import safe_str
34 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends.svn.commit import (
37 from rhodecode.lib.vcs.backends.svn.commit import (
38 SubversionCommit, _date_from_svn_properties)
38 SubversionCommit, _date_from_svn_properties)
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 VCSError, NodeDoesNotExistError)
44 VCSError, NodeDoesNotExistError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class SubversionRepository(base.BaseRepository):
50 class SubversionRepository(base.BaseRepository):
51 """
51 """
52 Subversion backend implementation
52 Subversion backend implementation
53
53
54 .. important::
54 .. important::
55
55
56 It is very important to distinguish the commit index and the commit id
56 It is very important to distinguish the commit index and the commit id
57 which is assigned by Subversion. The first one is always handled as an
57 which is assigned by Subversion. The first one is always handled as an
58 `int` by this implementation. The commit id assigned by Subversion on
58 `int` by this implementation. The commit id assigned by Subversion on
59 the other side will always be a `str`.
59 the other side will always be a `str`.
60
60
61 There is a specific trap since the first commit will have the index
61 There is a specific trap since the first commit will have the index
62 ``0`` but the svn id will be ``"1"``.
62 ``0`` but the svn id will be ``"1"``.
63
63
64 """
64 """
65
65
66 # Note: Subversion does not really have a default branch name.
66 # Note: Subversion does not really have a default branch name.
67 DEFAULT_BRANCH_NAME = None
67 DEFAULT_BRANCH_NAME = None
68
68
69 contact = base.BaseRepository.DEFAULT_CONTACT
69 contact = base.BaseRepository.DEFAULT_CONTACT
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71
71
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 bare=False, **kwargs):
73 bare=False, **kwargs):
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 self.config = config if config else self.get_default_config()
75 self.config = config if config else self.get_default_config()
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77
77
78 self._init_repo(create, src_url)
78 self._init_repo(create, src_url)
79
79
80 # caches
80 # caches
81 self._commit_ids = {}
81 self._commit_ids = {}
82
82
83 @LazyProperty
83 @LazyProperty
84 def _remote(self):
84 def _remote(self):
85 repo_id = self.path
85 repo_id = self.path
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87
87
88 def _init_repo(self, create, src_url):
88 def _init_repo(self, create, src_url):
89 if create and os.path.exists(self.path):
89 if create and os.path.exists(self.path):
90 raise RepositoryError(
90 raise RepositoryError(
91 f"Cannot create repository at {self.path}, location already exist"
91 f"Cannot create repository at {self.path}, location already exist")
92 )
93
92
94 if create:
93 if create:
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
94 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 if src_url:
95 if src_url:
97 src_url = _sanitize_url(src_url)
96 src_url = _sanitize_url(src_url)
98 self._remote.import_remote_repository(src_url)
97 self._remote.import_remote_repository(src_url)
99 else:
98 else:
100 self._check_path()
99 if not self._remote.is_path_valid_repository(self.path):
100 raise VCSError(
101 f'Path "{self.path}" does not contain a Subversion repository')
101
102
102 @CachedProperty
103 @CachedProperty
103 def commit_ids(self):
104 def commit_ids(self):
104 head = self._remote.lookup(None)
105 head = self._remote.lookup(None)
105 return [str(r) for r in range(1, head + 1)]
106 return [str(r) for r in range(1, head + 1)]
106
107
107 def _rebuild_cache(self, commit_ids):
108 def _rebuild_cache(self, commit_ids):
108 pass
109 pass
109
110
110 def run_svn_command(self, cmd, **opts):
111 def run_svn_command(self, cmd, **opts):
111 """
112 """
112 Runs given ``cmd`` as svn command and returns tuple
113 Runs given ``cmd`` as svn command and returns tuple
113 (stdout, stderr).
114 (stdout, stderr).
114
115
115 :param cmd: full svn command to be executed
116 :param cmd: full svn command to be executed
116 :param opts: env options to pass into Subprocess command
117 :param opts: env options to pass into Subprocess command
117 """
118 """
118 if not isinstance(cmd, list):
119 if not isinstance(cmd, list):
119 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
120 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
120
121
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 out, err = self._remote.run_svn_command(cmd, **opts)
123 out, err = self._remote.run_svn_command(cmd, **opts)
123 if err and not skip_stderr_log:
124 if err and not skip_stderr_log:
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 return out, err
126 return out, err
126
127
127 @LazyProperty
128 @LazyProperty
128 def branches(self):
129 def branches(self):
129 return self._tags_or_branches('vcs_svn_branch')
130 return self._tags_or_branches('vcs_svn_branch')
130
131
131 @LazyProperty
132 @LazyProperty
132 def branches_closed(self):
133 def branches_closed(self):
133 return {}
134 return {}
134
135
135 @LazyProperty
136 @LazyProperty
136 def bookmarks(self):
137 def bookmarks(self):
137 return {}
138 return {}
138
139
139 @LazyProperty
140 @LazyProperty
140 def branches_all(self):
141 def branches_all(self):
141 # TODO: johbo: Implement proper branch support
142 # TODO: johbo: Implement proper branch support
142 all_branches = {}
143 all_branches = {}
143 all_branches.update(self.branches)
144 all_branches.update(self.branches)
144 all_branches.update(self.branches_closed)
145 all_branches.update(self.branches_closed)
145 return all_branches
146 return all_branches
146
147
147 @LazyProperty
148 @LazyProperty
148 def tags(self):
149 def tags(self):
149 return self._tags_or_branches('vcs_svn_tag')
150 return self._tags_or_branches('vcs_svn_tag')
150
151
151 def _tags_or_branches(self, config_section):
152 def _tags_or_branches(self, config_section):
152 found_items = {}
153 found_items = {}
153
154
154 if self.is_empty():
155 if self.is_empty():
155 return {}
156 return {}
156
157
157 for pattern in self._patterns_from_section(config_section):
158 for pattern in self._patterns_from_section(config_section):
158 pattern = vcspath.sanitize(pattern)
159 pattern = vcspath.sanitize(pattern)
159 tip = self.get_commit()
160 tip = self.get_commit()
160 try:
161 try:
161 if pattern.endswith('*'):
162 if pattern.endswith('*'):
162 basedir = tip.get_node(vcspath.dirname(pattern))
163 basedir = tip.get_node(vcspath.dirname(pattern))
163 directories = basedir.dirs
164 directories = basedir.dirs
164 else:
165 else:
165 directories = (tip.get_node(pattern), )
166 directories = (tip.get_node(pattern), )
166 except NodeDoesNotExistError:
167 except NodeDoesNotExistError:
167 continue
168 continue
168 found_items.update((safe_str(n.path), self.commit_ids[-1]) for n in directories)
169 found_items.update((safe_str(n.path), self.commit_ids[-1]) for n in directories)
169
170
170 def get_name(item):
171 def get_name(item):
171 return item[0]
172 return item[0]
172
173
173 return OrderedDict(sorted(found_items.items(), key=get_name))
174 return OrderedDict(sorted(found_items.items(), key=get_name))
174
175
175 def _patterns_from_section(self, section):
176 def _patterns_from_section(self, section):
176 return (pattern for key, pattern in self.config.items(section))
177 return (pattern for key, pattern in self.config.items(section))
177
178
178 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
179 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
179 if self != repo2:
180 if self != repo2:
180 raise ValueError(
181 raise ValueError(
181 "Subversion does not support getting common ancestor of"
182 "Subversion does not support getting common ancestor of"
182 " different repositories.")
183 " different repositories.")
183
184
184 if int(commit_id1) < int(commit_id2):
185 if int(commit_id1) < int(commit_id2):
185 return commit_id1
186 return commit_id1
186 return commit_id2
187 return commit_id2
187
188
188 def verify(self):
189 def verify(self):
189 verify = self._remote.verify()
190 verify = self._remote.verify()
190
191
191 self._remote.invalidate_vcs_cache()
192 self._remote.invalidate_vcs_cache()
192 return verify
193 return verify
193
194
194 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
195 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
195 # TODO: johbo: Implement better comparison, this is a very naive
196 # TODO: johbo: Implement better comparison, this is a very naive
196 # version which does not allow to compare branches, tags or folders
197 # version which does not allow to compare branches, tags or folders
197 # at all.
198 # at all.
198 if repo2 != self:
199 if repo2 != self:
199 raise ValueError(
200 raise ValueError(
200 "Subversion does not support comparison of of different "
201 "Subversion does not support comparison of of different "
201 "repositories.")
202 "repositories.")
202
203
203 if commit_id1 == commit_id2:
204 if commit_id1 == commit_id2:
204 return []
205 return []
205
206
206 commit_idx1 = self._get_commit_idx(commit_id1)
207 commit_idx1 = self._get_commit_idx(commit_id1)
207 commit_idx2 = self._get_commit_idx(commit_id2)
208 commit_idx2 = self._get_commit_idx(commit_id2)
208
209
209 commits = [
210 commits = [
210 self.get_commit(commit_idx=idx)
211 self.get_commit(commit_idx=idx)
211 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
212 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
212
213
213 return commits
214 return commits
214
215
215 def _get_commit_idx(self, commit_id):
216 def _get_commit_idx(self, commit_id):
216 try:
217 try:
217 svn_rev = int(commit_id)
218 svn_rev = int(commit_id)
218 except:
219 except:
219 # TODO: johbo: this might be only one case, HEAD, check this
220 # TODO: johbo: this might be only one case, HEAD, check this
220 svn_rev = self._remote.lookup(commit_id)
221 svn_rev = self._remote.lookup(commit_id)
221 commit_idx = svn_rev - 1
222 commit_idx = svn_rev - 1
222 if commit_idx >= len(self.commit_ids):
223 if commit_idx >= len(self.commit_ids):
223 raise CommitDoesNotExistError(
224 raise CommitDoesNotExistError(
224 f"Commit at index {commit_idx} does not exist.")
225 f"Commit at index {commit_idx} does not exist.")
225 return commit_idx
226 return commit_idx
226
227
227 @staticmethod
228 @staticmethod
228 def check_url(url, config):
229 def check_url(url, config):
229 """
230 """
230 Check if `url` is a valid source to import a Subversion repository.
231 Check if `url` is a valid source to import a Subversion repository.
231 """
232 """
232 # convert to URL if it's a local directory
233 # convert to URL if it's a local directory
233 if os.path.isdir(url):
234 if os.path.isdir(url):
234 url = 'file://' + urllib.request.pathname2url(url)
235 url = 'file://' + urllib.request.pathname2url(url)
235 return connection.Svn.check_url(url, config.serialize())
236 return connection.Svn.check_url(url, config.serialize())
236
237
237 @staticmethod
238 @staticmethod
238 def is_valid_repository(path):
239 def is_valid_repository(path):
239 try:
240 try:
240 SubversionRepository(path)
241 SubversionRepository(path)
241 return True
242 return True
242 except VCSError:
243 except VCSError:
243 pass
244 pass
244 return False
245 return False
245
246
246 def _check_path(self):
247 if not os.path.exists(self.path):
248 raise VCSError(f'Path "{self.path}" does not exist!')
249 if not self._remote.is_path_valid_repository(self.path):
250 raise VCSError(
251 'Path "%s" does not contain a Subversion repository' %
252 (self.path, ))
253
247
254 @LazyProperty
248 @LazyProperty
255 def last_change(self):
249 def last_change(self):
256 """
250 """
257 Returns last change made on this repository as
251 Returns last change made on this repository as
258 `datetime.datetime` object.
252 `datetime.datetime` object.
259 """
253 """
260 # Subversion always has a first commit which has id "0" and contains
254 # Subversion always has a first commit which has id "0" and contains
261 # what we are looking for.
255 # what we are looking for.
262 last_id = len(self.commit_ids)
256 last_id = len(self.commit_ids)
263 properties = self._remote.revision_properties(last_id)
257 properties = self._remote.revision_properties(last_id)
264 return _date_from_svn_properties(properties)
258 return _date_from_svn_properties(properties)
265
259
266 @LazyProperty
260 @LazyProperty
267 def in_memory_commit(self):
261 def in_memory_commit(self):
268 return SubversionInMemoryCommit(self)
262 return SubversionInMemoryCommit(self)
269
263
270 def get_hook_location(self):
264 def get_hook_location(self):
271 """
265 """
272 returns absolute path to location where hooks are stored
266 returns absolute path to location where hooks are stored
273 """
267 """
274 return os.path.join(self.path, 'hooks')
268 return os.path.join(self.path, 'hooks')
275
269
276 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
270 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
277 translate_tag=None, maybe_unreachable=False, reference_obj=None):
271 translate_tag=None, maybe_unreachable=False, reference_obj=None):
278 if self.is_empty():
272 if self.is_empty():
279 raise EmptyRepositoryError("There are no commits yet")
273 raise EmptyRepositoryError("There are no commits yet")
280 if commit_id is not None:
274 if commit_id is not None:
281 self._validate_commit_id(commit_id)
275 self._validate_commit_id(commit_id)
282 elif commit_idx is not None:
276 elif commit_idx is not None:
283 self._validate_commit_idx(commit_idx)
277 self._validate_commit_idx(commit_idx)
284 try:
278 try:
285 commit_id = self.commit_ids[commit_idx]
279 commit_id = self.commit_ids[commit_idx]
286 except IndexError:
280 except IndexError:
287 raise CommitDoesNotExistError(f'No commit with idx: {commit_idx}')
281 raise CommitDoesNotExistError(f'No commit with idx: {commit_idx}')
288
282
289 commit_id = self._sanitize_commit_id(commit_id)
283 commit_id = self._sanitize_commit_id(commit_id)
290 commit = SubversionCommit(repository=self, commit_id=commit_id)
284 commit = SubversionCommit(repository=self, commit_id=commit_id)
291 return commit
285 return commit
292
286
293 def get_commits(
287 def get_commits(
294 self, start_id=None, end_id=None, start_date=None, end_date=None,
288 self, start_id=None, end_id=None, start_date=None, end_date=None,
295 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
289 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
296 if self.is_empty():
290 if self.is_empty():
297 raise EmptyRepositoryError("There are no commit_ids yet")
291 raise EmptyRepositoryError("There are no commit_ids yet")
298 self._validate_branch_name(branch_name)
292 self._validate_branch_name(branch_name)
299
293
300 if start_id is not None:
294 if start_id is not None:
301 self._validate_commit_id(start_id)
295 self._validate_commit_id(start_id)
302 if end_id is not None:
296 if end_id is not None:
303 self._validate_commit_id(end_id)
297 self._validate_commit_id(end_id)
304
298
305 start_raw_id = self._sanitize_commit_id(start_id)
299 start_raw_id = self._sanitize_commit_id(start_id)
306 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
300 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
307 end_raw_id = self._sanitize_commit_id(end_id)
301 end_raw_id = self._sanitize_commit_id(end_id)
308 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
302 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
309
303
310 if None not in [start_id, end_id] and start_pos > end_pos:
304 if None not in [start_id, end_id] and start_pos > end_pos:
311 raise RepositoryError(
305 raise RepositoryError(
312 "Start commit '%s' cannot be after end commit '%s'" %
306 "Start commit '%s' cannot be after end commit '%s'" %
313 (start_id, end_id))
307 (start_id, end_id))
314 if end_pos is not None:
308 if end_pos is not None:
315 end_pos += 1
309 end_pos += 1
316
310
317 # Date based filtering
311 # Date based filtering
318 if start_date or end_date:
312 if start_date or end_date:
319 start_raw_id, end_raw_id = self._remote.lookup_interval(
313 start_raw_id, end_raw_id = self._remote.lookup_interval(
320 date_astimestamp(start_date) if start_date else None,
314 date_astimestamp(start_date) if start_date else None,
321 date_astimestamp(end_date) if end_date else None)
315 date_astimestamp(end_date) if end_date else None)
322 start_pos = start_raw_id - 1
316 start_pos = start_raw_id - 1
323 end_pos = end_raw_id
317 end_pos = end_raw_id
324
318
325 commit_ids = self.commit_ids
319 commit_ids = self.commit_ids
326
320
327 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
321 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
328 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
322 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
329 svn_rev = int(self.commit_ids[-1])
323 svn_rev = int(self.commit_ids[-1])
330 commit_ids = self._remote.node_history(
324 commit_ids = self._remote.node_history(
331 path=branch_name, revision=svn_rev, limit=None)
325 path=branch_name, revision=svn_rev, limit=None)
332 commit_ids = [str(i) for i in reversed(commit_ids)]
326 commit_ids = [str(i) for i in reversed(commit_ids)]
333
327
334 if start_pos or end_pos:
328 if start_pos or end_pos:
335 commit_ids = commit_ids[start_pos:end_pos]
329 commit_ids = commit_ids[start_pos:end_pos]
336 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
330 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
337
331
338 def _sanitize_commit_id(self, commit_id):
332 def _sanitize_commit_id(self, commit_id):
339 if commit_id and commit_id.isdigit():
333 if commit_id and commit_id.isdigit():
340 if int(commit_id) <= len(self.commit_ids):
334 if int(commit_id) <= len(self.commit_ids):
341 return commit_id
335 return commit_id
342 else:
336 else:
343 raise CommitDoesNotExistError(
337 raise CommitDoesNotExistError(
344 f"Commit {commit_id} does not exist.")
338 f"Commit {commit_id} does not exist.")
345 if commit_id not in [
339 if commit_id not in [
346 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
340 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
347 raise CommitDoesNotExistError(
341 raise CommitDoesNotExistError(
348 f"Commit id {commit_id} not understood.")
342 f"Commit id {commit_id} not understood.")
349 svn_rev = self._remote.lookup('HEAD')
343 svn_rev = self._remote.lookup('HEAD')
350 return str(svn_rev)
344 return str(svn_rev)
351
345
352 def get_diff(
346 def get_diff(
353 self, commit1, commit2, path=None, ignore_whitespace=False,
347 self, commit1, commit2, path=None, ignore_whitespace=False,
354 context=3, path1=None):
348 context=3, path1=None):
355 self._validate_diff_commits(commit1, commit2)
349 self._validate_diff_commits(commit1, commit2)
356 svn_rev1 = int(commit1.raw_id)
350 svn_rev1 = int(commit1.raw_id)
357 svn_rev2 = int(commit2.raw_id)
351 svn_rev2 = int(commit2.raw_id)
358 diff = self._remote.diff(
352 diff = self._remote.diff(
359 svn_rev1, svn_rev2, path1=path1, path2=path,
353 svn_rev1, svn_rev2, path1=path1, path2=path,
360 ignore_whitespace=ignore_whitespace, context=context)
354 ignore_whitespace=ignore_whitespace, context=context)
361 return SubversionDiff(diff)
355 return SubversionDiff(diff)
362
356
363
357
364 def _sanitize_url(url):
358 def _sanitize_url(url):
365 if '://' not in url:
359 if '://' not in url:
366 url = 'file://' + urllib.request.pathname2url(url)
360 url = 'file://' + urllib.request.pathname2url(url)
367 return url
361 return url
General Comments 0
You need to be logged in to leave comments. Login now