##// END OF EJS Templates
fix(LFS): added git lfs push --all <GIT-URL> option's related changes.
ilin.s -
r5258:5a32a6f1 default
parent child Browse files
Show More
@@ -1,1053 +1,1053 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 GIT repository module
20 GIT repository module
21 """
21 """
22
22
23 import logging
23 import logging
24 import os
24 import os
25 import re
25 import re
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from collections import OrderedDict
29 from collections import OrderedDict
30 from rhodecode.lib.datelib import (
30 from rhodecode.lib.datelib import (
31 utcdate_fromtimestamp, makedate, date_astimestamp)
31 utcdate_fromtimestamp, makedate, date_astimestamp)
32 from rhodecode.lib.hash_utils import safe_str
32 from rhodecode.lib.hash_utils import safe_str
33 from rhodecode.lib.utils2 import CachedProperty
33 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.vcs import connection, path as vcspath
34 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs.backends.base import (
35 from rhodecode.lib.vcs.backends.base import (
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 MergeFailureReason, Reference)
37 MergeFailureReason, Reference)
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitDoesNotExistError, EmptyRepositoryError,
42 CommitDoesNotExistError, EmptyRepositoryError,
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
44
44
45
45
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 class GitRepository(BaseRepository):
51 class GitRepository(BaseRepository):
52 """
52 """
53 Git repository backend.
53 Git repository backend.
54 """
54 """
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
66
66
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 repo_id = self.path
74 repo_id = self.path
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76
76
77 @LazyProperty
77 @LazyProperty
78 def bare(self):
78 def bare(self):
79 return self._remote.bare()
79 return self._remote.bare()
80
80
81 @LazyProperty
81 @LazyProperty
82 def head(self):
82 def head(self):
83 return self._remote.head()
83 return self._remote.head()
84
84
85 @CachedProperty
85 @CachedProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject commit ids from cache.
89 attribute allows external tools to inject commit ids from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = {commit_id: index
96 self._commit_ids = {commit_id: index
97 for index, commit_id in enumerate(commit_ids)}
97 for index, commit_id in enumerate(commit_ids)}
98
98
99 def run_git_command(self, cmd, **opts):
99 def run_git_command(self, cmd, **opts):
100 """
100 """
101 Runs given ``cmd`` as git command and returns tuple
101 Runs given ``cmd`` as git command and returns tuple
102 (stdout, stderr).
102 (stdout, stderr).
103
103
104 :param cmd: git command to be executed
104 :param cmd: git command to be executed
105 :param opts: env options to pass into Subprocess command
105 :param opts: env options to pass into Subprocess command
106 """
106 """
107 if not isinstance(cmd, list):
107 if not isinstance(cmd, list):
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
109
109
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 out, err = self._remote.run_git_command(cmd, **opts)
111 out, err = self._remote.run_git_command(cmd, **opts)
112 if err and not skip_stderr_log:
112 if err and not skip_stderr_log:
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 return out, err
114 return out, err
115
115
116 @staticmethod
116 @staticmethod
117 def check_url(url, config):
117 def check_url(url, config):
118 """
118 """
119 Function will check given url and try to verify if it's a valid
119 Function will check given url and try to verify if it's a valid
120 link. Sometimes it may happened that git will issue basic
120 link. Sometimes it may happened that git will issue basic
121 auth request that can cause whole API to hang when used from python
121 auth request that can cause whole API to hang when used from python
122 or other external calls.
122 or other external calls.
123
123
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 when the return code is non 200
125 when the return code is non 200
126 """
126 """
127 # check first if it's not an url
127 # check first if it's not an url
128 if os.path.isdir(url) or url.startswith('file:'):
128 if os.path.isdir(url) or url.startswith('file:'):
129 return True
129 return True
130
130
131 if '+' in url.split('://', 1)[0]:
131 if '+' in url.split('://', 1)[0]:
132 url = url.split('+', 1)[1]
132 url = url.split('+', 1)[1]
133
133
134 # Request the _remote to verify the url
134 # Request the _remote to verify the url
135 return connection.Git.check_url(url, config.serialize())
135 return connection.Git.check_url(url, config.serialize())
136
136
137 @staticmethod
137 @staticmethod
138 def is_valid_repository(path):
138 def is_valid_repository(path):
139 if os.path.isdir(os.path.join(path, '.git')):
139 if os.path.isdir(os.path.join(path, '.git')):
140 return True
140 return True
141 # check case of bare repository
141 # check case of bare repository
142 try:
142 try:
143 GitRepository(path)
143 GitRepository(path)
144 return True
144 return True
145 except VCSError:
145 except VCSError:
146 pass
146 pass
147 return False
147 return False
148
148
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 bare=False):
150 bare=False):
151 if create and os.path.exists(self.path):
151 if create and os.path.exists(self.path):
152 raise RepositoryError(
152 raise RepositoryError(
153 f"Cannot create repository at {self.path}, location already exist")
153 f"Cannot create repository at {self.path}, location already exist")
154
154
155 if bare and do_workspace_checkout:
155 if bare and do_workspace_checkout:
156 raise RepositoryError("Cannot update a bare repository")
156 raise RepositoryError("Cannot update a bare repository")
157 try:
157 try:
158
158
159 if src_url:
159 if src_url:
160 # check URL before any actions
160 # check URL before any actions
161 GitRepository.check_url(src_url, self.config)
161 GitRepository.check_url(src_url, self.config)
162
162
163 if create:
163 if create:
164 if bare:
164 if bare:
165 self._remote.init_bare()
165 self._remote.init_bare()
166 else:
166 else:
167 self._remote.init()
167 self._remote.init()
168
168
169 if src_url and bare:
169 if src_url and bare:
170 # bare repository only allows a fetch and checkout is not allowed
170 # bare repository only allows a fetch and checkout is not allowed
171 self.fetch(src_url, commit_ids=None)
171 self.fetch(src_url, commit_ids=None)
172 elif src_url:
172 elif src_url:
173 self.pull(src_url, commit_ids=None,
173 self.pull(src_url, commit_ids=None,
174 update_after=do_workspace_checkout)
174 update_after=do_workspace_checkout)
175
175
176 else:
176 else:
177 if not self._remote.assert_correct_path():
177 if not self._remote.assert_correct_path():
178 raise RepositoryError(
178 raise RepositoryError(
179 f'Path "{self.path}" does not contain a Git repository')
179 f'Path "{self.path}" does not contain a Git repository')
180
180
181 # TODO: johbo: check if we have to translate the OSError here
181 # TODO: johbo: check if we have to translate the OSError here
182 except OSError as err:
182 except OSError as err:
183 raise RepositoryError(err)
183 raise RepositoryError(err)
184
184
185 def _get_all_commit_ids(self):
185 def _get_all_commit_ids(self):
186 return self._remote.get_all_commit_ids()
186 return self._remote.get_all_commit_ids()
187
187
188 def _get_commit_ids(self, filters=None):
188 def _get_commit_ids(self, filters=None):
189 # we must check if this repo is not empty, since later command
189 # we must check if this repo is not empty, since later command
190 # fails if it is. And it's cheaper to ask than throw the subprocess
190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 # errors
191 # errors
192
192
193 head = self._remote.head(show_exc=False)
193 head = self._remote.head(show_exc=False)
194
194
195 if not head:
195 if not head:
196 return []
196 return []
197
197
198 rev_filter = ['--branches', '--tags']
198 rev_filter = ['--branches', '--tags']
199 extra_filter = []
199 extra_filter = []
200
200
201 if filters:
201 if filters:
202 if filters.get('since'):
202 if filters.get('since'):
203 extra_filter.append('--since=%s' % (filters['since']))
203 extra_filter.append('--since=%s' % (filters['since']))
204 if filters.get('until'):
204 if filters.get('until'):
205 extra_filter.append('--until=%s' % (filters['until']))
205 extra_filter.append('--until=%s' % (filters['until']))
206 if filters.get('branch_name'):
206 if filters.get('branch_name'):
207 rev_filter = []
207 rev_filter = []
208 extra_filter.append(filters['branch_name'])
208 extra_filter.append(filters['branch_name'])
209 rev_filter.extend(extra_filter)
209 rev_filter.extend(extra_filter)
210
210
211 # if filters.get('start') or filters.get('end'):
211 # if filters.get('start') or filters.get('end'):
212 # # skip is offset, max-count is limit
212 # # skip is offset, max-count is limit
213 # if filters.get('start'):
213 # if filters.get('start'):
214 # extra_filter += ' --skip=%s' % filters['start']
214 # extra_filter += ' --skip=%s' % filters['start']
215 # if filters.get('end'):
215 # if filters.get('end'):
216 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
217
217
218 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
219 try:
219 try:
220 output, __ = self.run_git_command(cmd)
220 output, __ = self.run_git_command(cmd)
221 except RepositoryError:
221 except RepositoryError:
222 # Can be raised for empty repositories
222 # Can be raised for empty repositories
223 return []
223 return []
224 return output.splitlines()
224 return output.splitlines()
225
225
226 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
226 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
227
227
228 def is_null(value):
228 def is_null(value):
229 return len(value) == commit_id_or_idx.count('0')
229 return len(value) == commit_id_or_idx.count('0')
230
230
231 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
231 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
232 return self.commit_ids[-1]
232 return self.commit_ids[-1]
233
233
234 commit_missing_err = "Commit {} does not exist for `{}`".format(
234 commit_missing_err = "Commit {} does not exist for `{}`".format(
235 *map(safe_str, [commit_id_or_idx, self.name]))
235 *map(safe_str, [commit_id_or_idx, self.name]))
236
236
237 is_bstr = isinstance(commit_id_or_idx, str)
237 is_bstr = isinstance(commit_id_or_idx, str)
238 is_branch = reference_obj and reference_obj.branch
238 is_branch = reference_obj and reference_obj.branch
239
239
240 lookup_ok = False
240 lookup_ok = False
241 if is_bstr:
241 if is_bstr:
242 # Need to call remote to translate id for tagging scenarios,
242 # Need to call remote to translate id for tagging scenarios,
243 # or branch that are numeric
243 # or branch that are numeric
244 try:
244 try:
245 remote_data = self._remote.get_object(commit_id_or_idx,
245 remote_data = self._remote.get_object(commit_id_or_idx,
246 maybe_unreachable=maybe_unreachable)
246 maybe_unreachable=maybe_unreachable)
247 commit_id_or_idx = remote_data["commit_id"]
247 commit_id_or_idx = remote_data["commit_id"]
248 lookup_ok = True
248 lookup_ok = True
249 except (CommitDoesNotExistError,):
249 except (CommitDoesNotExistError,):
250 lookup_ok = False
250 lookup_ok = False
251
251
252 if lookup_ok is False:
252 if lookup_ok is False:
253 is_numeric_idx = \
253 is_numeric_idx = \
254 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
254 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
255 or isinstance(commit_id_or_idx, int)
255 or isinstance(commit_id_or_idx, int)
256 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
256 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
257 try:
257 try:
258 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
258 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
259 lookup_ok = True
259 lookup_ok = True
260 except Exception:
260 except Exception:
261 raise CommitDoesNotExistError(commit_missing_err)
261 raise CommitDoesNotExistError(commit_missing_err)
262
262
263 # we failed regular lookup, and by integer number lookup
263 # we failed regular lookup, and by integer number lookup
264 if lookup_ok is False:
264 if lookup_ok is False:
265 raise CommitDoesNotExistError(commit_missing_err)
265 raise CommitDoesNotExistError(commit_missing_err)
266
266
267 # Ensure we return full id
267 # Ensure we return full id
268 if not SHA_PATTERN.match(str(commit_id_or_idx)):
268 if not SHA_PATTERN.match(str(commit_id_or_idx)):
269 raise CommitDoesNotExistError(
269 raise CommitDoesNotExistError(
270 "Given commit id %s not recognized" % commit_id_or_idx)
270 "Given commit id %s not recognized" % commit_id_or_idx)
271 return commit_id_or_idx
271 return commit_id_or_idx
272
272
273 def get_hook_location(self):
273 def get_hook_location(self):
274 """
274 """
275 returns absolute path to location where hooks are stored
275 returns absolute path to location where hooks are stored
276 """
276 """
277 loc = os.path.join(self.path, 'hooks')
277 loc = os.path.join(self.path, 'hooks')
278 if not self.bare:
278 if not self.bare:
279 loc = os.path.join(self.path, '.git', 'hooks')
279 loc = os.path.join(self.path, '.git', 'hooks')
280 return loc
280 return loc
281
281
282 @LazyProperty
282 @LazyProperty
283 def last_change(self):
283 def last_change(self):
284 """
284 """
285 Returns last change made on this repository as
285 Returns last change made on this repository as
286 `datetime.datetime` object.
286 `datetime.datetime` object.
287 """
287 """
288 try:
288 try:
289 return self.get_commit().date
289 return self.get_commit().date
290 except RepositoryError:
290 except RepositoryError:
291 tzoffset = makedate()[1]
291 tzoffset = makedate()[1]
292 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
292 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
293
293
294 def _get_fs_mtime(self):
294 def _get_fs_mtime(self):
295 idx_loc = '' if self.bare else '.git'
295 idx_loc = '' if self.bare else '.git'
296 # fallback to filesystem
296 # fallback to filesystem
297 in_path = os.path.join(self.path, idx_loc, "index")
297 in_path = os.path.join(self.path, idx_loc, "index")
298 he_path = os.path.join(self.path, idx_loc, "HEAD")
298 he_path = os.path.join(self.path, idx_loc, "HEAD")
299 if os.path.exists(in_path):
299 if os.path.exists(in_path):
300 return os.stat(in_path).st_mtime
300 return os.stat(in_path).st_mtime
301 else:
301 else:
302 return os.stat(he_path).st_mtime
302 return os.stat(he_path).st_mtime
303
303
304 @LazyProperty
304 @LazyProperty
305 def description(self):
305 def description(self):
306 description = self._remote.get_description()
306 description = self._remote.get_description()
307 return safe_str(description or self.DEFAULT_DESCRIPTION)
307 return safe_str(description or self.DEFAULT_DESCRIPTION)
308
308
309 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
309 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
310 if self.is_empty():
310 if self.is_empty():
311 return OrderedDict()
311 return OrderedDict()
312
312
313 result = []
313 result = []
314 for ref, sha in self._refs.items():
314 for ref, sha in self._refs.items():
315 if ref.startswith(prefix):
315 if ref.startswith(prefix):
316 ref_name = ref
316 ref_name = ref
317 if strip_prefix:
317 if strip_prefix:
318 ref_name = ref[len(prefix):]
318 ref_name = ref[len(prefix):]
319 result.append((safe_str(ref_name), sha))
319 result.append((safe_str(ref_name), sha))
320
320
321 def get_name(entry):
321 def get_name(entry):
322 return entry[0]
322 return entry[0]
323
323
324 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
324 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
325
325
326 def _get_branches(self):
326 def _get_branches(self):
327 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
327 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
328
328
329 @CachedProperty
329 @CachedProperty
330 def branches(self):
330 def branches(self):
331 return self._get_branches()
331 return self._get_branches()
332
332
333 @CachedProperty
333 @CachedProperty
334 def branches_closed(self):
334 def branches_closed(self):
335 return {}
335 return {}
336
336
337 @CachedProperty
337 @CachedProperty
338 def bookmarks(self):
338 def bookmarks(self):
339 return {}
339 return {}
340
340
341 @CachedProperty
341 @CachedProperty
342 def branches_all(self):
342 def branches_all(self):
343 all_branches = {}
343 all_branches = {}
344 all_branches.update(self.branches)
344 all_branches.update(self.branches)
345 all_branches.update(self.branches_closed)
345 all_branches.update(self.branches_closed)
346 return all_branches
346 return all_branches
347
347
348 @CachedProperty
348 @CachedProperty
349 def tags(self):
349 def tags(self):
350 return self._get_tags()
350 return self._get_tags()
351
351
352 def _get_tags(self):
352 def _get_tags(self):
353 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
353 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
354
354
355 def tag(self, name, user, commit_id=None, message=None, date=None,
355 def tag(self, name, user, commit_id=None, message=None, date=None,
356 **kwargs):
356 **kwargs):
357 # TODO: fix this method to apply annotated tags correct with message
357 # TODO: fix this method to apply annotated tags correct with message
358 """
358 """
359 Creates and returns a tag for the given ``commit_id``.
359 Creates and returns a tag for the given ``commit_id``.
360
360
361 :param name: name for new tag
361 :param name: name for new tag
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 :param commit_id: commit id for which new tag would be created
363 :param commit_id: commit id for which new tag would be created
364 :param message: message of the tag's commit
364 :param message: message of the tag's commit
365 :param date: date of tag's commit
365 :param date: date of tag's commit
366
366
367 :raises TagAlreadyExistError: if tag with same name already exists
367 :raises TagAlreadyExistError: if tag with same name already exists
368 """
368 """
369 if name in self.tags:
369 if name in self.tags:
370 raise TagAlreadyExistError("Tag %s already exists" % name)
370 raise TagAlreadyExistError("Tag %s already exists" % name)
371 commit = self.get_commit(commit_id=commit_id)
371 commit = self.get_commit(commit_id=commit_id)
372 message = message or f"Added tag {name} for commit {commit.raw_id}"
372 message = message or f"Added tag {name} for commit {commit.raw_id}"
373
373
374 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
374 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
375
375
376 self._invalidate_prop_cache('tags')
376 self._invalidate_prop_cache('tags')
377 self._invalidate_prop_cache('_refs')
377 self._invalidate_prop_cache('_refs')
378
378
379 return commit
379 return commit
380
380
381 def remove_tag(self, name, user, message=None, date=None):
381 def remove_tag(self, name, user, message=None, date=None):
382 """
382 """
383 Removes tag with the given ``name``.
383 Removes tag with the given ``name``.
384
384
385 :param name: name of the tag to be removed
385 :param name: name of the tag to be removed
386 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
386 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param message: message of the tag's removal commit
387 :param message: message of the tag's removal commit
388 :param date: date of tag's removal commit
388 :param date: date of tag's removal commit
389
389
390 :raises TagDoesNotExistError: if tag with given name does not exists
390 :raises TagDoesNotExistError: if tag with given name does not exists
391 """
391 """
392 if name not in self.tags:
392 if name not in self.tags:
393 raise TagDoesNotExistError("Tag %s does not exist" % name)
393 raise TagDoesNotExistError("Tag %s does not exist" % name)
394
394
395 self._remote.tag_remove(name)
395 self._remote.tag_remove(name)
396 self._invalidate_prop_cache('tags')
396 self._invalidate_prop_cache('tags')
397 self._invalidate_prop_cache('_refs')
397 self._invalidate_prop_cache('_refs')
398
398
399 def _get_refs(self):
399 def _get_refs(self):
400 return self._remote.get_refs()
400 return self._remote.get_refs()
401
401
402 @CachedProperty
402 @CachedProperty
403 def _refs(self):
403 def _refs(self):
404 return self._get_refs()
404 return self._get_refs()
405
405
406 @property
406 @property
407 def _ref_tree(self):
407 def _ref_tree(self):
408 node = tree = {}
408 node = tree = {}
409 for ref, sha in self._refs.items():
409 for ref, sha in self._refs.items():
410 path = ref.split('/')
410 path = ref.split('/')
411 for bit in path[:-1]:
411 for bit in path[:-1]:
412 node = node.setdefault(bit, {})
412 node = node.setdefault(bit, {})
413 node[path[-1]] = sha
413 node[path[-1]] = sha
414 node = tree
414 node = tree
415 return tree
415 return tree
416
416
417 def get_remote_ref(self, ref_name):
417 def get_remote_ref(self, ref_name):
418 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
418 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
419 try:
419 try:
420 return self._refs[ref_key]
420 return self._refs[ref_key]
421 except Exception:
421 except Exception:
422 return
422 return
423
423
424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
425 translate_tag=True, maybe_unreachable=False, reference_obj=None):
425 translate_tag=True, maybe_unreachable=False, reference_obj=None):
426 """
426 """
427 Returns `GitCommit` object representing commit from git repository
427 Returns `GitCommit` object representing commit from git repository
428 at the given `commit_id` or head (most recent commit) if None given.
428 at the given `commit_id` or head (most recent commit) if None given.
429 """
429 """
430
430
431 if self.is_empty():
431 if self.is_empty():
432 raise EmptyRepositoryError("There are no commits yet")
432 raise EmptyRepositoryError("There are no commits yet")
433
433
434 if commit_id is not None:
434 if commit_id is not None:
435 self._validate_commit_id(commit_id)
435 self._validate_commit_id(commit_id)
436 try:
436 try:
437 # we have cached idx, use it without contacting the remote
437 # we have cached idx, use it without contacting the remote
438 idx = self._commit_ids[commit_id]
438 idx = self._commit_ids[commit_id]
439 return GitCommit(self, commit_id, idx, pre_load=pre_load)
439 return GitCommit(self, commit_id, idx, pre_load=pre_load)
440 except KeyError:
440 except KeyError:
441 pass
441 pass
442
442
443 elif commit_idx is not None:
443 elif commit_idx is not None:
444 self._validate_commit_idx(commit_idx)
444 self._validate_commit_idx(commit_idx)
445 try:
445 try:
446 _commit_id = self.commit_ids[commit_idx]
446 _commit_id = self.commit_ids[commit_idx]
447 if commit_idx < 0:
447 if commit_idx < 0:
448 commit_idx = self.commit_ids.index(_commit_id)
448 commit_idx = self.commit_ids.index(_commit_id)
449 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
449 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
450 except IndexError:
450 except IndexError:
451 commit_id = commit_idx
451 commit_id = commit_idx
452 else:
452 else:
453 commit_id = "tip"
453 commit_id = "tip"
454
454
455 if translate_tag:
455 if translate_tag:
456 commit_id = self._lookup_commit(
456 commit_id = self._lookup_commit(
457 commit_id, maybe_unreachable=maybe_unreachable,
457 commit_id, maybe_unreachable=maybe_unreachable,
458 reference_obj=reference_obj)
458 reference_obj=reference_obj)
459
459
460 try:
460 try:
461 idx = self._commit_ids[commit_id]
461 idx = self._commit_ids[commit_id]
462 except KeyError:
462 except KeyError:
463 idx = -1
463 idx = -1
464
464
465 return GitCommit(self, commit_id, idx, pre_load=pre_load)
465 return GitCommit(self, commit_id, idx, pre_load=pre_load)
466
466
467 def get_commits(
467 def get_commits(
468 self, start_id=None, end_id=None, start_date=None, end_date=None,
468 self, start_id=None, end_id=None, start_date=None, end_date=None,
469 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
469 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
470 """
470 """
471 Returns generator of `GitCommit` objects from start to end (both
471 Returns generator of `GitCommit` objects from start to end (both
472 are inclusive), in ascending date order.
472 are inclusive), in ascending date order.
473
473
474 :param start_id: None, str(commit_id)
474 :param start_id: None, str(commit_id)
475 :param end_id: None, str(commit_id)
475 :param end_id: None, str(commit_id)
476 :param start_date: if specified, commits with commit date less than
476 :param start_date: if specified, commits with commit date less than
477 ``start_date`` would be filtered out from returned set
477 ``start_date`` would be filtered out from returned set
478 :param end_date: if specified, commits with commit date greater than
478 :param end_date: if specified, commits with commit date greater than
479 ``end_date`` would be filtered out from returned set
479 ``end_date`` would be filtered out from returned set
480 :param branch_name: if specified, commits not reachable from given
480 :param branch_name: if specified, commits not reachable from given
481 branch would be filtered out from returned set
481 branch would be filtered out from returned set
482 :param show_hidden: Show hidden commits such as obsolete or hidden from
482 :param show_hidden: Show hidden commits such as obsolete or hidden from
483 Mercurial evolve
483 Mercurial evolve
484 :raise BranchDoesNotExistError: If given `branch_name` does not
484 :raise BranchDoesNotExistError: If given `branch_name` does not
485 exist.
485 exist.
486 :raise CommitDoesNotExistError: If commits for given `start` or
486 :raise CommitDoesNotExistError: If commits for given `start` or
487 `end` could not be found.
487 `end` could not be found.
488
488
489 """
489 """
490 if self.is_empty():
490 if self.is_empty():
491 raise EmptyRepositoryError("There are no commits yet")
491 raise EmptyRepositoryError("There are no commits yet")
492
492
493 self._validate_branch_name(branch_name)
493 self._validate_branch_name(branch_name)
494
494
495 if start_id is not None:
495 if start_id is not None:
496 self._validate_commit_id(start_id)
496 self._validate_commit_id(start_id)
497 if end_id is not None:
497 if end_id is not None:
498 self._validate_commit_id(end_id)
498 self._validate_commit_id(end_id)
499
499
500 start_raw_id = self._lookup_commit(start_id)
500 start_raw_id = self._lookup_commit(start_id)
501 start_pos = self._commit_ids[start_raw_id] if start_id else None
501 start_pos = self._commit_ids[start_raw_id] if start_id else None
502 end_raw_id = self._lookup_commit(end_id)
502 end_raw_id = self._lookup_commit(end_id)
503 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
503 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
504
504
505 if None not in [start_id, end_id] and start_pos > end_pos:
505 if None not in [start_id, end_id] and start_pos > end_pos:
506 raise RepositoryError(
506 raise RepositoryError(
507 "Start commit '%s' cannot be after end commit '%s'" %
507 "Start commit '%s' cannot be after end commit '%s'" %
508 (start_id, end_id))
508 (start_id, end_id))
509
509
510 if end_pos is not None:
510 if end_pos is not None:
511 end_pos += 1
511 end_pos += 1
512
512
513 filter_ = []
513 filter_ = []
514 if branch_name:
514 if branch_name:
515 filter_.append({'branch_name': branch_name})
515 filter_.append({'branch_name': branch_name})
516 if start_date and not end_date:
516 if start_date and not end_date:
517 filter_.append({'since': start_date})
517 filter_.append({'since': start_date})
518 if end_date and not start_date:
518 if end_date and not start_date:
519 filter_.append({'until': end_date})
519 filter_.append({'until': end_date})
520 if start_date and end_date:
520 if start_date and end_date:
521 filter_.append({'since': start_date})
521 filter_.append({'since': start_date})
522 filter_.append({'until': end_date})
522 filter_.append({'until': end_date})
523
523
524 # if start_pos or end_pos:
524 # if start_pos or end_pos:
525 # filter_.append({'start': start_pos})
525 # filter_.append({'start': start_pos})
526 # filter_.append({'end': end_pos})
526 # filter_.append({'end': end_pos})
527
527
528 if filter_:
528 if filter_:
529 revfilters = {
529 revfilters = {
530 'branch_name': branch_name,
530 'branch_name': branch_name,
531 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
531 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
532 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
532 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
533 'start': start_pos,
533 'start': start_pos,
534 'end': end_pos,
534 'end': end_pos,
535 }
535 }
536 commit_ids = self._get_commit_ids(filters=revfilters)
536 commit_ids = self._get_commit_ids(filters=revfilters)
537
537
538 else:
538 else:
539 commit_ids = self.commit_ids
539 commit_ids = self.commit_ids
540
540
541 if start_pos or end_pos:
541 if start_pos or end_pos:
542 commit_ids = commit_ids[start_pos: end_pos]
542 commit_ids = commit_ids[start_pos: end_pos]
543
543
544 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
544 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
545 translate_tag=translate_tags)
545 translate_tag=translate_tags)
546
546
547 def get_diff(
547 def get_diff(
548 self, commit1, commit2, path='', ignore_whitespace=False,
548 self, commit1, commit2, path='', ignore_whitespace=False,
549 context=3, path1=None):
549 context=3, path1=None):
550 """
550 """
551 Returns (git like) *diff*, as plain text. Shows changes introduced by
551 Returns (git like) *diff*, as plain text. Shows changes introduced by
552 ``commit2`` since ``commit1``.
552 ``commit2`` since ``commit1``.
553
553
554 :param commit1: Entry point from which diff is shown. Can be
554 :param commit1: Entry point from which diff is shown. Can be
555 ``self.EMPTY_COMMIT`` - in this case, patch showing all
555 ``self.EMPTY_COMMIT`` - in this case, patch showing all
556 the changes since empty state of the repository until ``commit2``
556 the changes since empty state of the repository until ``commit2``
557 :param commit2: Until which commits changes should be shown.
557 :param commit2: Until which commits changes should be shown.
558 :param path:
558 :param path:
559 :param ignore_whitespace: If set to ``True``, would not show whitespace
559 :param ignore_whitespace: If set to ``True``, would not show whitespace
560 changes. Defaults to ``False``.
560 changes. Defaults to ``False``.
561 :param context: How many lines before/after changed lines should be
561 :param context: How many lines before/after changed lines should be
562 shown. Defaults to ``3``.
562 shown. Defaults to ``3``.
563 :param path1:
563 :param path1:
564 """
564 """
565 self._validate_diff_commits(commit1, commit2)
565 self._validate_diff_commits(commit1, commit2)
566 if path1 is not None and path1 != path:
566 if path1 is not None and path1 != path:
567 raise ValueError("Diff of two different paths not supported.")
567 raise ValueError("Diff of two different paths not supported.")
568
568
569 if path:
569 if path:
570 file_filter = path
570 file_filter = path
571 else:
571 else:
572 file_filter = None
572 file_filter = None
573
573
574 diff = self._remote.diff(
574 diff = self._remote.diff(
575 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
575 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
576 opt_ignorews=ignore_whitespace,
576 opt_ignorews=ignore_whitespace,
577 context=context)
577 context=context)
578
578
579 return GitDiff(diff)
579 return GitDiff(diff)
580
580
581 def strip(self, commit_id, branch_name):
581 def strip(self, commit_id, branch_name):
582 commit = self.get_commit(commit_id=commit_id)
582 commit = self.get_commit(commit_id=commit_id)
583 if commit.merge:
583 if commit.merge:
584 raise Exception('Cannot reset to merge commit')
584 raise Exception('Cannot reset to merge commit')
585
585
586 if not branch_name:
586 if not branch_name:
587 raise ValueError(f'git strip requires a valid branch name, got {branch_name} instead')
587 raise ValueError(f'git strip requires a valid branch name, got {branch_name} instead')
588
588
589 # parent is going to be the new head now
589 # parent is going to be the new head now
590 commit = commit.parents[0]
590 commit = commit.parents[0]
591 self._remote.update_refs(f'refs/heads/{branch_name}', commit.raw_id)
591 self._remote.update_refs(f'refs/heads/{branch_name}', commit.raw_id)
592
592
593 # clear cached properties
593 # clear cached properties
594 self._invalidate_prop_cache('commit_ids')
594 self._invalidate_prop_cache('commit_ids')
595 self._invalidate_prop_cache('_refs')
595 self._invalidate_prop_cache('_refs')
596 self._invalidate_prop_cache('branches')
596 self._invalidate_prop_cache('branches')
597
597
598 return len(self.commit_ids)
598 return len(self.commit_ids)
599
599
600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 self, commit_id1, repo2, commit_id2)
602 self, commit_id1, repo2, commit_id2)
603
603
604 if commit_id1 == commit_id2:
604 if commit_id1 == commit_id2:
605 return commit_id1
605 return commit_id1
606
606
607 if self != repo2:
607 if self != repo2:
608 commits = self._remote.get_missing_revs(
608 commits = self._remote.get_missing_revs(
609 commit_id1, commit_id2, repo2.path)
609 commit_id1, commit_id2, repo2.path)
610 if commits:
610 if commits:
611 commit = repo2.get_commit(commits[-1])
611 commit = repo2.get_commit(commits[-1])
612 if commit.parents:
612 if commit.parents:
613 ancestor_id = commit.parents[0].raw_id
613 ancestor_id = commit.parents[0].raw_id
614 else:
614 else:
615 ancestor_id = None
615 ancestor_id = None
616 else:
616 else:
617 # no commits from other repo, ancestor_id is the commit_id2
617 # no commits from other repo, ancestor_id is the commit_id2
618 ancestor_id = commit_id2
618 ancestor_id = commit_id2
619 else:
619 else:
620 output, __ = self.run_git_command(
620 output, __ = self.run_git_command(
621 ['merge-base', commit_id1, commit_id2])
621 ['merge-base', commit_id1, commit_id2])
622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623
623
624 log.debug('Found common ancestor with sha: %s', ancestor_id)
624 log.debug('Found common ancestor with sha: %s', ancestor_id)
625
625
626 return ancestor_id
626 return ancestor_id
627
627
628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 repo1 = self
629 repo1 = self
630 ancestor_id = None
630 ancestor_id = None
631
631
632 if commit_id1 == commit_id2:
632 if commit_id1 == commit_id2:
633 commits = []
633 commits = []
634 elif repo1 != repo2:
634 elif repo1 != repo2:
635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 repo2.path)
636 repo2.path)
637 commits = [
637 commits = [
638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 for commit_id in reversed(missing_ids)]
639 for commit_id in reversed(missing_ids)]
640 else:
640 else:
641 output, __ = repo1.run_git_command(
641 output, __ = repo1.run_git_command(
642 ['log', '--reverse', '--pretty=format: %H', '-s',
642 ['log', '--reverse', '--pretty=format: %H', '-s',
643 f'{commit_id1}..{commit_id2}'])
643 f'{commit_id1}..{commit_id2}'])
644 commits = [
644 commits = [
645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647
647
648 return commits
648 return commits
649
649
650 @LazyProperty
650 @LazyProperty
651 def in_memory_commit(self):
651 def in_memory_commit(self):
652 """
652 """
653 Returns ``GitInMemoryCommit`` object for this repository.
653 Returns ``GitInMemoryCommit`` object for this repository.
654 """
654 """
655 return GitInMemoryCommit(self)
655 return GitInMemoryCommit(self)
656
656
657 def pull(self, url, commit_ids=None, update_after=False):
657 def pull(self, url, commit_ids=None, update_after=False):
658 """
658 """
659 Pull changes from external location. Pull is different in GIT
659 Pull changes from external location. Pull is different in GIT
660 that fetch since it's doing a checkout
660 that fetch since it's doing a checkout
661
661
662 :param commit_ids: Optional. Can be set to a list of commit ids
662 :param commit_ids: Optional. Can be set to a list of commit ids
663 which shall be pulled from the other repository.
663 which shall be pulled from the other repository.
664 """
664 """
665 refs = None
665 refs = None
666 if commit_ids is not None:
666 if commit_ids is not None:
667 remote_refs = self._remote.get_remote_refs(url)
667 remote_refs = self._remote.get_remote_refs(url)
668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 self._remote.pull(url, refs=refs, update_after=update_after)
669 self._remote.pull(url, refs=refs, update_after=update_after)
670 self._remote.invalidate_vcs_cache()
670 self._remote.invalidate_vcs_cache()
671
671
672 def fetch(self, url, commit_ids=None, **kwargs):
672 def fetch(self, url, commit_ids=None, **kwargs):
673 """
673 """
674 Fetch all git objects from external location.
674 Fetch all git objects from external location.
675 """
675 """
676 self._remote.sync_fetch(url, refs=commit_ids, **kwargs)
676 self._remote.sync_fetch(url, refs=commit_ids, **kwargs)
677 self._remote.invalidate_vcs_cache()
677 self._remote.invalidate_vcs_cache()
678
678
679 def push(self, url):
679 def push(self, url, **kwargs):
680 refs = None
680 refs = None
681 self._remote.sync_push(url, refs=refs)
681 self._remote.sync_push(url, refs=refs, **kwargs)
682
682
683 def set_refs(self, ref_name, commit_id):
683 def set_refs(self, ref_name, commit_id):
684 self._remote.set_refs(ref_name, commit_id)
684 self._remote.set_refs(ref_name, commit_id)
685 self._invalidate_prop_cache('_refs')
685 self._invalidate_prop_cache('_refs')
686
686
687 def remove_ref(self, ref_name):
687 def remove_ref(self, ref_name):
688 self._remote.remove_ref(ref_name)
688 self._remote.remove_ref(ref_name)
689 self._invalidate_prop_cache('_refs')
689 self._invalidate_prop_cache('_refs')
690
690
691 def run_gc(self, prune=True):
691 def run_gc(self, prune=True):
692 cmd = ['gc', '--aggressive']
692 cmd = ['gc', '--aggressive']
693 if prune:
693 if prune:
694 cmd += ['--prune=now']
694 cmd += ['--prune=now']
695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 return stderr
696 return stderr
697
697
698 def _update_server_info(self):
698 def _update_server_info(self):
699 """
699 """
700 runs gits update-server-info command in this repo instance
700 runs gits update-server-info command in this repo instance
701 """
701 """
702 self._remote.update_server_info()
702 self._remote.update_server_info()
703
703
704 def _current_branch(self):
704 def _current_branch(self):
705 """
705 """
706 Return the name of the current branch.
706 Return the name of the current branch.
707
707
708 It only works for non bare repositories (i.e. repositories with a
708 It only works for non bare repositories (i.e. repositories with a
709 working copy)
709 working copy)
710 """
710 """
711 if self.bare:
711 if self.bare:
712 raise RepositoryError('Bare git repos do not have active branches')
712 raise RepositoryError('Bare git repos do not have active branches')
713
713
714 if self.is_empty():
714 if self.is_empty():
715 return None
715 return None
716
716
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 return stdout.strip()
718 return stdout.strip()
719
719
720 def _checkout(self, branch_name, create=False, force=False):
720 def _checkout(self, branch_name, create=False, force=False):
721 """
721 """
722 Checkout a branch in the working directory.
722 Checkout a branch in the working directory.
723
723
724 It tries to create the branch if create is True, failing if the branch
724 It tries to create the branch if create is True, failing if the branch
725 already exists.
725 already exists.
726
726
727 It only works for non bare repositories (i.e. repositories with a
727 It only works for non bare repositories (i.e. repositories with a
728 working copy)
728 working copy)
729 """
729 """
730 if self.bare:
730 if self.bare:
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
732
732
733 cmd = ['checkout']
733 cmd = ['checkout']
734 if force:
734 if force:
735 cmd.append('-f')
735 cmd.append('-f')
736 if create:
736 if create:
737 cmd.append('-b')
737 cmd.append('-b')
738 cmd.append(branch_name)
738 cmd.append(branch_name)
739 self.run_git_command(cmd, fail_on_stderr=False)
739 self.run_git_command(cmd, fail_on_stderr=False)
740
740
741 def _create_branch(self, branch_name, commit_id):
741 def _create_branch(self, branch_name, commit_id):
742 """
742 """
743 creates a branch in a GIT repo
743 creates a branch in a GIT repo
744 """
744 """
745 self._remote.create_branch(branch_name, commit_id)
745 self._remote.create_branch(branch_name, commit_id)
746
746
747 def _identify(self):
747 def _identify(self):
748 """
748 """
749 Return the current state of the working directory.
749 Return the current state of the working directory.
750 """
750 """
751 if self.bare:
751 if self.bare:
752 raise RepositoryError('Bare git repos do not have active branches')
752 raise RepositoryError('Bare git repos do not have active branches')
753
753
754 if self.is_empty():
754 if self.is_empty():
755 return None
755 return None
756
756
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 return stdout.strip()
758 return stdout.strip()
759
759
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 """
761 """
762 Create a local clone of the current repo.
762 Create a local clone of the current repo.
763 """
763 """
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 # clone will only fetch the active branch.
765 # clone will only fetch the active branch.
766 cmd = ['clone', '--branch', branch_name,
766 cmd = ['clone', '--branch', branch_name,
767 self.path, os.path.abspath(clone_path)]
767 self.path, os.path.abspath(clone_path)]
768
768
769 self.run_git_command(cmd, fail_on_stderr=False)
769 self.run_git_command(cmd, fail_on_stderr=False)
770
770
771 # if we get the different source branch, make sure we also fetch it for
771 # if we get the different source branch, make sure we also fetch it for
772 # merge conditions
772 # merge conditions
773 if source_branch and source_branch != branch_name:
773 if source_branch and source_branch != branch_name:
774 # check if the ref exists.
774 # check if the ref exists.
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 if shadow_repo.get_remote_ref(source_branch):
776 if shadow_repo.get_remote_ref(source_branch):
777 cmd = ['fetch', self.path, source_branch]
777 cmd = ['fetch', self.path, source_branch]
778 self.run_git_command(cmd, fail_on_stderr=False)
778 self.run_git_command(cmd, fail_on_stderr=False)
779
779
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 """
781 """
782 Fetch a branch from a local repository.
782 Fetch a branch from a local repository.
783 """
783 """
784 repository_path = os.path.abspath(repository_path)
784 repository_path = os.path.abspath(repository_path)
785 if repository_path == self.path:
785 if repository_path == self.path:
786 raise ValueError('Cannot fetch from the same repository')
786 raise ValueError('Cannot fetch from the same repository')
787
787
788 if use_origin:
788 if use_origin:
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 branch=branch_name)
790 branch=branch_name)
791
791
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 repository_path, branch_name]
793 repository_path, branch_name]
794 self.run_git_command(cmd, fail_on_stderr=False)
794 self.run_git_command(cmd, fail_on_stderr=False)
795
795
796 def _local_reset(self, branch_name):
796 def _local_reset(self, branch_name):
797 branch_name = f'{branch_name}'
797 branch_name = f'{branch_name}'
798 cmd = ['reset', '--hard', branch_name, '--']
798 cmd = ['reset', '--hard', branch_name, '--']
799 self.run_git_command(cmd, fail_on_stderr=False)
799 self.run_git_command(cmd, fail_on_stderr=False)
800
800
801 def _last_fetch_heads(self):
801 def _last_fetch_heads(self):
802 """
802 """
803 Return the last fetched heads that need merging.
803 Return the last fetched heads that need merging.
804
804
805 The algorithm is defined at
805 The algorithm is defined at
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 """
807 """
808 if not self.bare:
808 if not self.bare:
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 else:
810 else:
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812
812
813 heads = []
813 heads = []
814 with open(fetch_heads_path) as f:
814 with open(fetch_heads_path) as f:
815 for line in f:
815 for line in f:
816 if ' not-for-merge ' in line:
816 if ' not-for-merge ' in line:
817 continue
817 continue
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 heads.append(line)
819 heads.append(line)
820
820
821 return heads
821 return heads
822
822
823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825
825
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 """
827 """
828 Pull a branch from a local repository.
828 Pull a branch from a local repository.
829 """
829 """
830 if self.bare:
830 if self.bare:
831 raise RepositoryError('Cannot pull into a bare git repository')
831 raise RepositoryError('Cannot pull into a bare git repository')
832 # N.B.(skreft): The --ff-only option is to make sure this is a
832 # N.B.(skreft): The --ff-only option is to make sure this is a
833 # fast-forward (i.e., we are only pulling new changes and there are no
833 # fast-forward (i.e., we are only pulling new changes and there are no
834 # conflicts with our current branch)
834 # conflicts with our current branch)
835 # Additionally, that option needs to go before --no-tags, otherwise git
835 # Additionally, that option needs to go before --no-tags, otherwise git
836 # pull complains about it being an unknown flag.
836 # pull complains about it being an unknown flag.
837 cmd = ['pull']
837 cmd = ['pull']
838 if ff_only:
838 if ff_only:
839 cmd.append('--ff-only')
839 cmd.append('--ff-only')
840 cmd.extend(['--no-tags', repository_path, branch_name])
840 cmd.extend(['--no-tags', repository_path, branch_name])
841 self.run_git_command(cmd, fail_on_stderr=False)
841 self.run_git_command(cmd, fail_on_stderr=False)
842
842
843 def _local_merge(self, merge_message, user_name, user_email, heads):
843 def _local_merge(self, merge_message, user_name, user_email, heads):
844 """
844 """
845 Merge the given head into the checked out branch.
845 Merge the given head into the checked out branch.
846
846
847 It will force a merge commit.
847 It will force a merge commit.
848
848
849 Currently it raises an error if the repo is empty, as it is not possible
849 Currently it raises an error if the repo is empty, as it is not possible
850 to create a merge commit in an empty repo.
850 to create a merge commit in an empty repo.
851
851
852 :param merge_message: The message to use for the merge commit.
852 :param merge_message: The message to use for the merge commit.
853 :param heads: the heads to merge.
853 :param heads: the heads to merge.
854 """
854 """
855 if self.bare:
855 if self.bare:
856 raise RepositoryError('Cannot merge into a bare git repository')
856 raise RepositoryError('Cannot merge into a bare git repository')
857
857
858 if not heads:
858 if not heads:
859 return
859 return
860
860
861 if self.is_empty():
861 if self.is_empty():
862 # TODO(skreft): do something more robust in this case.
862 # TODO(skreft): do something more robust in this case.
863 raise RepositoryError('Do not know how to merge into empty repositories yet')
863 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 unresolved = None
864 unresolved = None
865
865
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 # commit message. We also specify the user who is doing the merge.
867 # commit message. We also specify the user who is doing the merge.
868 cmd = ['-c', f'user.name="{user_name}"',
868 cmd = ['-c', f'user.name="{user_name}"',
869 '-c', f'user.email={user_email}',
869 '-c', f'user.email={user_email}',
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
871
871
872 merge_cmd = cmd + heads
872 merge_cmd = cmd + heads
873
873
874 try:
874 try:
875 self.run_git_command(merge_cmd, fail_on_stderr=False)
875 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 except RepositoryError:
876 except RepositoryError:
877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 fail_on_stderr=False)[0].splitlines()
878 fail_on_stderr=False)[0].splitlines()
879 # NOTE(marcink): we add U notation for consistent with HG backend output
879 # NOTE(marcink): we add U notation for consistent with HG backend output
880 unresolved = [f'U {f}' for f in files]
880 unresolved = [f'U {f}' for f in files]
881
881
882 # Cleanup any merge leftovers
882 # Cleanup any merge leftovers
883 self._remote.invalidate_vcs_cache()
883 self._remote.invalidate_vcs_cache()
884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885
885
886 if unresolved:
886 if unresolved:
887 raise UnresolvedFilesInRepo(unresolved)
887 raise UnresolvedFilesInRepo(unresolved)
888 else:
888 else:
889 raise
889 raise
890
890
891 def _local_push(
891 def _local_push(
892 self, source_branch, repository_path, target_branch,
892 self, source_branch, repository_path, target_branch,
893 enable_hooks=False, rc_scm_data=None):
893 enable_hooks=False, rc_scm_data=None):
894 """
894 """
895 Push the source_branch to the given repository and target_branch.
895 Push the source_branch to the given repository and target_branch.
896
896
897 Currently it if the target_branch is not master and the target repo is
897 Currently it if the target_branch is not master and the target repo is
898 empty, the push will work, but then GitRepository won't be able to find
898 empty, the push will work, but then GitRepository won't be able to find
899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 pointing to master, which does not exist).
900 pointing to master, which does not exist).
901
901
902 It does not run the hooks in the target repo.
902 It does not run the hooks in the target repo.
903 """
903 """
904 # TODO(skreft): deal with the case in which the target repo is empty,
904 # TODO(skreft): deal with the case in which the target repo is empty,
905 # and the target_branch is not master.
905 # and the target_branch is not master.
906 target_repo = GitRepository(repository_path)
906 target_repo = GitRepository(repository_path)
907 if (not target_repo.bare and
907 if (not target_repo.bare and
908 target_repo._current_branch() == target_branch):
908 target_repo._current_branch() == target_branch):
909 # Git prevents pushing to the checked out branch, so simulate it by
909 # Git prevents pushing to the checked out branch, so simulate it by
910 # pulling into the target repository.
910 # pulling into the target repository.
911 target_repo._local_pull(self.path, source_branch)
911 target_repo._local_pull(self.path, source_branch)
912 else:
912 else:
913 cmd = ['push', os.path.abspath(repository_path),
913 cmd = ['push', os.path.abspath(repository_path),
914 f'{source_branch}:{target_branch}']
914 f'{source_branch}:{target_branch}']
915 gitenv = {}
915 gitenv = {}
916 if rc_scm_data:
916 if rc_scm_data:
917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918
918
919 if not enable_hooks:
919 if not enable_hooks:
920 gitenv['RC_SKIP_HOOKS'] = '1'
920 gitenv['RC_SKIP_HOOKS'] = '1'
921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922
922
923 def _get_new_pr_branch(self, source_branch, target_branch):
923 def _get_new_pr_branch(self, source_branch, target_branch):
924 prefix = f'pr_{source_branch}-{target_branch}_'
924 prefix = f'pr_{source_branch}-{target_branch}_'
925 pr_branches = []
925 pr_branches = []
926 for branch in self.branches:
926 for branch in self.branches:
927 if branch.startswith(prefix):
927 if branch.startswith(prefix):
928 pr_branches.append(int(branch[len(prefix):]))
928 pr_branches.append(int(branch[len(prefix):]))
929
929
930 if not pr_branches:
930 if not pr_branches:
931 branch_id = 0
931 branch_id = 0
932 else:
932 else:
933 branch_id = max(pr_branches) + 1
933 branch_id = max(pr_branches) + 1
934
934
935 return '%s%d' % (prefix, branch_id)
935 return '%s%d' % (prefix, branch_id)
936
936
937 def _maybe_prepare_merge_workspace(
937 def _maybe_prepare_merge_workspace(
938 self, repo_id, workspace_id, target_ref, source_ref):
938 self, repo_id, workspace_id, target_ref, source_ref):
939 shadow_repository_path = self._get_shadow_repository_path(
939 shadow_repository_path = self._get_shadow_repository_path(
940 self.path, repo_id, workspace_id)
940 self.path, repo_id, workspace_id)
941 if not os.path.exists(shadow_repository_path):
941 if not os.path.exists(shadow_repository_path):
942 self._local_clone(
942 self._local_clone(
943 shadow_repository_path, target_ref.name, source_ref.name)
943 shadow_repository_path, target_ref.name, source_ref.name)
944 log.debug('Prepared %s shadow repository in %s',
944 log.debug('Prepared %s shadow repository in %s',
945 self.alias, shadow_repository_path)
945 self.alias, shadow_repository_path)
946
946
947 return shadow_repository_path
947 return shadow_repository_path
948
948
949 def _merge_repo(self, repo_id, workspace_id, target_ref,
949 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 source_repo, source_ref, merge_message,
950 source_repo, source_ref, merge_message,
951 merger_name, merger_email, dry_run=False,
951 merger_name, merger_email, dry_run=False,
952 use_rebase=False, close_branch=False):
952 use_rebase=False, close_branch=False):
953
953
954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 'rebase' if use_rebase else 'merge', dry_run)
955 'rebase' if use_rebase else 'merge', dry_run)
956
956
957 if target_ref.commit_id != self.branches[target_ref.name]:
957 if target_ref.commit_id != self.branches[target_ref.name]:
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
959 target_ref.commit_id, self.branches[target_ref.name])
959 target_ref.commit_id, self.branches[target_ref.name])
960 return MergeResponse(
960 return MergeResponse(
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 metadata={'target_ref': target_ref})
962 metadata={'target_ref': target_ref})
963
963
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
965 repo_id, workspace_id, target_ref, source_ref)
965 repo_id, workspace_id, target_ref, source_ref)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
967
967
968 # checkout source, if it's different. Otherwise we could not
968 # checkout source, if it's different. Otherwise we could not
969 # fetch proper commits for merge testing
969 # fetch proper commits for merge testing
970 if source_ref.name != target_ref.name:
970 if source_ref.name != target_ref.name:
971 if shadow_repo.get_remote_ref(source_ref.name):
971 if shadow_repo.get_remote_ref(source_ref.name):
972 shadow_repo._checkout(source_ref.name, force=True)
972 shadow_repo._checkout(source_ref.name, force=True)
973
973
974 # checkout target, and fetch changes
974 # checkout target, and fetch changes
975 shadow_repo._checkout(target_ref.name, force=True)
975 shadow_repo._checkout(target_ref.name, force=True)
976
976
977 # fetch/reset pull the target, in case it is changed
977 # fetch/reset pull the target, in case it is changed
978 # this handles even force changes
978 # this handles even force changes
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
980 shadow_repo._local_reset(target_ref.name)
980 shadow_repo._local_reset(target_ref.name)
981
981
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
983 # retrieve the last target commit.
983 # retrieve the last target commit.
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
987 target_ref, target_ref.commit_id,
987 target_ref, target_ref.commit_id,
988 shadow_repo.branches[target_ref.name])
988 shadow_repo.branches[target_ref.name])
989 return MergeResponse(
989 return MergeResponse(
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
991 metadata={'target_ref': target_ref})
991 metadata={'target_ref': target_ref})
992
992
993 # calculate new branch
993 # calculate new branch
994 pr_branch = shadow_repo._get_new_pr_branch(
994 pr_branch = shadow_repo._get_new_pr_branch(
995 source_ref.name, target_ref.name)
995 source_ref.name, target_ref.name)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
997 # checkout to temp branch, and fetch changes
997 # checkout to temp branch, and fetch changes
998 shadow_repo._checkout(pr_branch, create=True)
998 shadow_repo._checkout(pr_branch, create=True)
999 try:
999 try:
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1001 except RepositoryError:
1001 except RepositoryError:
1002 log.exception('Failure when doing local fetch on '
1002 log.exception('Failure when doing local fetch on '
1003 'shadow repo: %s', shadow_repo)
1003 'shadow repo: %s', shadow_repo)
1004 return MergeResponse(
1004 return MergeResponse(
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1006 metadata={'source_ref': source_ref})
1006 metadata={'source_ref': source_ref})
1007
1007
1008 merge_ref = None
1008 merge_ref = None
1009 merge_failure_reason = MergeFailureReason.NONE
1009 merge_failure_reason = MergeFailureReason.NONE
1010 metadata = {}
1010 metadata = {}
1011 try:
1011 try:
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1013 [source_ref.commit_id])
1013 [source_ref.commit_id])
1014 merge_possible = True
1014 merge_possible = True
1015
1015
1016 # Need to invalidate the cache, or otherwise we
1016 # Need to invalidate the cache, or otherwise we
1017 # cannot retrieve the merge commit.
1017 # cannot retrieve the merge commit.
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1020
1020
1021 # Set a reference pointing to the merge commit. This reference may
1021 # Set a reference pointing to the merge commit. This reference may
1022 # be used to easily identify the last successful merge commit in
1022 # be used to easily identify the last successful merge commit in
1023 # the shadow repository.
1023 # the shadow repository.
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1026 except RepositoryError as e:
1026 except RepositoryError as e:
1027 log.exception('Failure when doing local merge on git shadow repo')
1027 log.exception('Failure when doing local merge on git shadow repo')
1028 if isinstance(e, UnresolvedFilesInRepo):
1028 if isinstance(e, UnresolvedFilesInRepo):
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1030
1030
1031 merge_possible = False
1031 merge_possible = False
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1033
1033
1034 if merge_possible and not dry_run:
1034 if merge_possible and not dry_run:
1035 try:
1035 try:
1036 shadow_repo._local_push(
1036 shadow_repo._local_push(
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1039 merge_succeeded = True
1039 merge_succeeded = True
1040 except RepositoryError:
1040 except RepositoryError:
1041 log.exception(
1041 log.exception(
1042 'Failure when doing local push from the shadow '
1042 'Failure when doing local push from the shadow '
1043 'repository to the target repository at %s.', self.path)
1043 'repository to the target repository at %s.', self.path)
1044 merge_succeeded = False
1044 merge_succeeded = False
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1046 metadata['target'] = 'git shadow repo'
1046 metadata['target'] = 'git shadow repo'
1047 metadata['merge_commit'] = pr_branch
1047 metadata['merge_commit'] = pr_branch
1048 else:
1048 else:
1049 merge_succeeded = False
1049 merge_succeeded = False
1050
1050
1051 return MergeResponse(
1051 return MergeResponse(
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1053 metadata=metadata)
1053 metadata=metadata)
@@ -1,1024 +1,1024 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG repository module
20 HG repository module
21 """
21 """
22 import os
22 import os
23 import logging
23 import logging
24 import binascii
24 import binascii
25 import configparser
25 import configparser
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from collections import OrderedDict
32 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '')])
81 default=[('extensions', 'largefiles', '')])
82
82
83 # NOTE(marcink): since python3 hgsubversion is deprecated.
83 # NOTE(marcink): since python3 hgsubversion is deprecated.
84 # From old installations we might still have this set enabled
84 # From old installations we might still have this set enabled
85 # we explicitly remove this now here to make sure it wont propagate further
85 # we explicitly remove this now here to make sure it wont propagate further
86 if config and config.get('extensions', 'hgsubversion') is not None:
86 if config and config.get('extensions', 'hgsubversion') is not None:
87 config.drop_option('extensions', 'hgsubversion')
87 config.drop_option('extensions', 'hgsubversion')
88
88
89 self.with_wire = with_wire or {"cache": False} # default should not use cache
89 self.with_wire = with_wire or {"cache": False} # default should not use cache
90
90
91 self._init_repo(create, src_url, do_workspace_checkout)
91 self._init_repo(create, src_url, do_workspace_checkout)
92
92
93 # caches
93 # caches
94 self._commit_ids = {}
94 self._commit_ids = {}
95
95
96 @LazyProperty
96 @LazyProperty
97 def _remote(self):
97 def _remote(self):
98 repo_id = self.path
98 repo_id = self.path
99 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
99 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
100
100
101 @CachedProperty
101 @CachedProperty
102 def commit_ids(self):
102 def commit_ids(self):
103 """
103 """
104 Returns list of commit ids, in ascending order. Being lazy
104 Returns list of commit ids, in ascending order. Being lazy
105 attribute allows external tools to inject shas from cache.
105 attribute allows external tools to inject shas from cache.
106 """
106 """
107 commit_ids = self._get_all_commit_ids()
107 commit_ids = self._get_all_commit_ids()
108 self._rebuild_cache(commit_ids)
108 self._rebuild_cache(commit_ids)
109 return commit_ids
109 return commit_ids
110
110
111 def _rebuild_cache(self, commit_ids):
111 def _rebuild_cache(self, commit_ids):
112 self._commit_ids = {commit_id: index
112 self._commit_ids = {commit_id: index
113 for index, commit_id in enumerate(commit_ids)}
113 for index, commit_id in enumerate(commit_ids)}
114
114
115 @CachedProperty
115 @CachedProperty
116 def branches(self):
116 def branches(self):
117 return self._get_branches()
117 return self._get_branches()
118
118
119 @CachedProperty
119 @CachedProperty
120 def branches_closed(self):
120 def branches_closed(self):
121 return self._get_branches(active=False, closed=True)
121 return self._get_branches(active=False, closed=True)
122
122
123 @CachedProperty
123 @CachedProperty
124 def branches_all(self):
124 def branches_all(self):
125 all_branches = {}
125 all_branches = {}
126 all_branches.update(self.branches)
126 all_branches.update(self.branches)
127 all_branches.update(self.branches_closed)
127 all_branches.update(self.branches_closed)
128 return all_branches
128 return all_branches
129
129
130 def _get_branches(self, active=True, closed=False):
130 def _get_branches(self, active=True, closed=False):
131 """
131 """
132 Gets branches for this repository
132 Gets branches for this repository
133 Returns only not closed active branches by default
133 Returns only not closed active branches by default
134
134
135 :param active: return also active branches
135 :param active: return also active branches
136 :param closed: return also closed branches
136 :param closed: return also closed branches
137
137
138 """
138 """
139 if self.is_empty():
139 if self.is_empty():
140 return {}
140 return {}
141
141
142 def get_name(ctx):
142 def get_name(ctx):
143 return ctx[0]
143 return ctx[0]
144
144
145 _branches = [(n, h,) for n, h in
145 _branches = [(n, h,) for n, h in
146 self._remote.branches(active, closed).items()]
146 self._remote.branches(active, closed).items()]
147
147
148 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
148 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
149
149
150 @CachedProperty
150 @CachedProperty
151 def tags(self):
151 def tags(self):
152 """
152 """
153 Gets tags for this repository
153 Gets tags for this repository
154 """
154 """
155 return self._get_tags()
155 return self._get_tags()
156
156
157 def _get_tags(self):
157 def _get_tags(self):
158 if self.is_empty():
158 if self.is_empty():
159 return {}
159 return {}
160
160
161 def get_name(ctx):
161 def get_name(ctx):
162 return ctx[0]
162 return ctx[0]
163
163
164 _tags = [(n, h,) for n, h in
164 _tags = [(n, h,) for n, h in
165 self._remote.tags().items()]
165 self._remote.tags().items()]
166
166
167 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
167 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
168
168
169 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
169 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
170 """
170 """
171 Creates and returns a tag for the given ``commit_id``.
171 Creates and returns a tag for the given ``commit_id``.
172
172
173 :param name: name for new tag
173 :param name: name for new tag
174 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
174 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
175 :param commit_id: commit id for which new tag would be created
175 :param commit_id: commit id for which new tag would be created
176 :param message: message of the tag's commit
176 :param message: message of the tag's commit
177 :param date: date of tag's commit
177 :param date: date of tag's commit
178
178
179 :raises TagAlreadyExistError: if tag with same name already exists
179 :raises TagAlreadyExistError: if tag with same name already exists
180 """
180 """
181 if name in self.tags:
181 if name in self.tags:
182 raise TagAlreadyExistError("Tag %s already exists" % name)
182 raise TagAlreadyExistError("Tag %s already exists" % name)
183
183
184 commit = self.get_commit(commit_id=commit_id)
184 commit = self.get_commit(commit_id=commit_id)
185 local = kwargs.setdefault('local', False)
185 local = kwargs.setdefault('local', False)
186
186
187 if message is None:
187 if message is None:
188 message = f"Added tag {name} for commit {commit.short_id}"
188 message = f"Added tag {name} for commit {commit.short_id}"
189
189
190 date, tz = date_to_timestamp_plus_offset(date)
190 date, tz = date_to_timestamp_plus_offset(date)
191
191
192 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
192 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
193 self._remote.invalidate_vcs_cache()
193 self._remote.invalidate_vcs_cache()
194
194
195 # Reinitialize tags
195 # Reinitialize tags
196 self._invalidate_prop_cache('tags')
196 self._invalidate_prop_cache('tags')
197 tag_id = self.tags[name]
197 tag_id = self.tags[name]
198
198
199 return self.get_commit(commit_id=tag_id)
199 return self.get_commit(commit_id=tag_id)
200
200
201 def remove_tag(self, name, user, message=None, date=None):
201 def remove_tag(self, name, user, message=None, date=None):
202 """
202 """
203 Removes tag with the given `name`.
203 Removes tag with the given `name`.
204
204
205 :param name: name of the tag to be removed
205 :param name: name of the tag to be removed
206 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
206 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
207 :param message: message of the tag's removal commit
207 :param message: message of the tag's removal commit
208 :param date: date of tag's removal commit
208 :param date: date of tag's removal commit
209
209
210 :raises TagDoesNotExistError: if tag with given name does not exists
210 :raises TagDoesNotExistError: if tag with given name does not exists
211 """
211 """
212 if name not in self.tags:
212 if name not in self.tags:
213 raise TagDoesNotExistError("Tag %s does not exist" % name)
213 raise TagDoesNotExistError("Tag %s does not exist" % name)
214
214
215 if message is None:
215 if message is None:
216 message = "Removed tag %s" % name
216 message = "Removed tag %s" % name
217 local = False
217 local = False
218
218
219 date, tz = date_to_timestamp_plus_offset(date)
219 date, tz = date_to_timestamp_plus_offset(date)
220
220
221 self._remote.tag(name, nullid, message, local, user, date, tz)
221 self._remote.tag(name, nullid, message, local, user, date, tz)
222 self._remote.invalidate_vcs_cache()
222 self._remote.invalidate_vcs_cache()
223 self._invalidate_prop_cache('tags')
223 self._invalidate_prop_cache('tags')
224
224
225 @LazyProperty
225 @LazyProperty
226 def bookmarks(self):
226 def bookmarks(self):
227 """
227 """
228 Gets bookmarks for this repository
228 Gets bookmarks for this repository
229 """
229 """
230 return self._get_bookmarks()
230 return self._get_bookmarks()
231
231
232 def _get_bookmarks(self):
232 def _get_bookmarks(self):
233 if self.is_empty():
233 if self.is_empty():
234 return {}
234 return {}
235
235
236 def get_name(ctx):
236 def get_name(ctx):
237 return ctx[0]
237 return ctx[0]
238
238
239 _bookmarks = [
239 _bookmarks = [
240 (n, h) for n, h in
240 (n, h) for n, h in
241 self._remote.bookmarks().items()]
241 self._remote.bookmarks().items()]
242
242
243 return OrderedDict(sorted(_bookmarks, key=get_name))
243 return OrderedDict(sorted(_bookmarks, key=get_name))
244
244
245 def _get_all_commit_ids(self):
245 def _get_all_commit_ids(self):
246 return self._remote.get_all_commit_ids('visible')
246 return self._remote.get_all_commit_ids('visible')
247
247
248 def get_diff(
248 def get_diff(
249 self, commit1, commit2, path='', ignore_whitespace=False,
249 self, commit1, commit2, path='', ignore_whitespace=False,
250 context=3, path1=None):
250 context=3, path1=None):
251 """
251 """
252 Returns (git like) *diff*, as plain text. Shows changes introduced by
252 Returns (git like) *diff*, as plain text. Shows changes introduced by
253 `commit2` since `commit1`.
253 `commit2` since `commit1`.
254
254
255 :param commit1: Entry point from which diff is shown. Can be
255 :param commit1: Entry point from which diff is shown. Can be
256 ``self.EMPTY_COMMIT`` - in this case, patch showing all
256 ``self.EMPTY_COMMIT`` - in this case, patch showing all
257 the changes since empty state of the repository until `commit2`
257 the changes since empty state of the repository until `commit2`
258 :param commit2: Until which commit changes should be shown.
258 :param commit2: Until which commit changes should be shown.
259 :param ignore_whitespace: If set to ``True``, would not show whitespace
259 :param ignore_whitespace: If set to ``True``, would not show whitespace
260 changes. Defaults to ``False``.
260 changes. Defaults to ``False``.
261 :param context: How many lines before/after changed lines should be
261 :param context: How many lines before/after changed lines should be
262 shown. Defaults to ``3``.
262 shown. Defaults to ``3``.
263 """
263 """
264 self._validate_diff_commits(commit1, commit2)
264 self._validate_diff_commits(commit1, commit2)
265 if path1 is not None and path1 != path:
265 if path1 is not None and path1 != path:
266 raise ValueError("Diff of two different paths not supported.")
266 raise ValueError("Diff of two different paths not supported.")
267
267
268 if path:
268 if path:
269 file_filter = [self.path, path]
269 file_filter = [self.path, path]
270 else:
270 else:
271 file_filter = None
271 file_filter = None
272
272
273 diff = self._remote.diff(
273 diff = self._remote.diff(
274 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
274 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
275 opt_git=True, opt_ignorews=ignore_whitespace,
275 opt_git=True, opt_ignorews=ignore_whitespace,
276 context=context)
276 context=context)
277 return MercurialDiff(diff)
277 return MercurialDiff(diff)
278
278
279 def strip(self, commit_id, branch=None):
279 def strip(self, commit_id, branch=None):
280 self._remote.strip(commit_id, update=False, backup=False)
280 self._remote.strip(commit_id, update=False, backup=False)
281
281
282 self._remote.invalidate_vcs_cache()
282 self._remote.invalidate_vcs_cache()
283 # clear cache
283 # clear cache
284 self._invalidate_prop_cache('commit_ids')
284 self._invalidate_prop_cache('commit_ids')
285
285
286 return len(self.commit_ids)
286 return len(self.commit_ids)
287
287
288 def verify(self):
288 def verify(self):
289 verify = self._remote.verify()
289 verify = self._remote.verify()
290
290
291 self._remote.invalidate_vcs_cache()
291 self._remote.invalidate_vcs_cache()
292 return verify
292 return verify
293
293
294 def hg_update_cache(self):
294 def hg_update_cache(self):
295 update_cache = self._remote.hg_update_cache()
295 update_cache = self._remote.hg_update_cache()
296
296
297 self._remote.invalidate_vcs_cache()
297 self._remote.invalidate_vcs_cache()
298 return update_cache
298 return update_cache
299
299
300 def hg_rebuild_fn_cache(self):
300 def hg_rebuild_fn_cache(self):
301 update_cache = self._remote.hg_rebuild_fn_cache()
301 update_cache = self._remote.hg_rebuild_fn_cache()
302
302
303 self._remote.invalidate_vcs_cache()
303 self._remote.invalidate_vcs_cache()
304 return update_cache
304 return update_cache
305
305
306 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
306 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
307 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
307 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
308 self, commit_id1, repo2, commit_id2)
308 self, commit_id1, repo2, commit_id2)
309
309
310 if commit_id1 == commit_id2:
310 if commit_id1 == commit_id2:
311 return commit_id1
311 return commit_id1
312
312
313 ancestors = self._remote.revs_from_revspec(
313 ancestors = self._remote.revs_from_revspec(
314 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
314 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
315 other_path=repo2.path)
315 other_path=repo2.path)
316
316
317 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
317 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
318
318
319 log.debug('Found common ancestor with sha: %s', ancestor_id)
319 log.debug('Found common ancestor with sha: %s', ancestor_id)
320 return ancestor_id
320 return ancestor_id
321
321
322 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
322 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
323 if commit_id1 == commit_id2:
323 if commit_id1 == commit_id2:
324 commits = []
324 commits = []
325 else:
325 else:
326 if merge:
326 if merge:
327 indexes = self._remote.revs_from_revspec(
327 indexes = self._remote.revs_from_revspec(
328 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
328 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
329 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
329 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
330 else:
330 else:
331 indexes = self._remote.revs_from_revspec(
331 indexes = self._remote.revs_from_revspec(
332 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
332 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
333 commit_id1, other_path=repo2.path)
333 commit_id1, other_path=repo2.path)
334
334
335 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
335 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
336 for idx in indexes]
336 for idx in indexes]
337
337
338 return commits
338 return commits
339
339
340 @staticmethod
340 @staticmethod
341 def check_url(url, config):
341 def check_url(url, config):
342 """
342 """
343 Function will check given url and try to verify if it's a valid
343 Function will check given url and try to verify if it's a valid
344 link. Sometimes it may happened that mercurial will issue basic
344 link. Sometimes it may happened that mercurial will issue basic
345 auth request that can cause whole API to hang when used from python
345 auth request that can cause whole API to hang when used from python
346 or other external calls.
346 or other external calls.
347
347
348 On failures it'll raise urllib2.HTTPError, exception is also thrown
348 On failures it'll raise urllib2.HTTPError, exception is also thrown
349 when the return code is non 200
349 when the return code is non 200
350 """
350 """
351 # check first if it's not an local url
351 # check first if it's not an local url
352 if os.path.isdir(url) or url.startswith('file:'):
352 if os.path.isdir(url) or url.startswith('file:'):
353 return True
353 return True
354
354
355 # Request the _remote to verify the url
355 # Request the _remote to verify the url
356 return connection.Hg.check_url(url, config.serialize())
356 return connection.Hg.check_url(url, config.serialize())
357
357
358 @staticmethod
358 @staticmethod
359 def is_valid_repository(path):
359 def is_valid_repository(path):
360 return os.path.isdir(os.path.join(path, '.hg'))
360 return os.path.isdir(os.path.join(path, '.hg'))
361
361
362 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
362 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
363 """
363 """
364 Function will check for mercurial repository in given path. If there
364 Function will check for mercurial repository in given path. If there
365 is no repository in that path it will raise an exception unless
365 is no repository in that path it will raise an exception unless
366 `create` parameter is set to True - in that case repository would
366 `create` parameter is set to True - in that case repository would
367 be created.
367 be created.
368
368
369 If `src_url` is given, would try to clone repository from the
369 If `src_url` is given, would try to clone repository from the
370 location at given clone_point. Additionally it'll make update to
370 location at given clone_point. Additionally it'll make update to
371 working copy accordingly to `do_workspace_checkout` flag.
371 working copy accordingly to `do_workspace_checkout` flag.
372 """
372 """
373 if create and os.path.exists(self.path):
373 if create and os.path.exists(self.path):
374 raise RepositoryError(
374 raise RepositoryError(
375 f"Cannot create repository at {self.path}, location already exist")
375 f"Cannot create repository at {self.path}, location already exist")
376
376
377 if src_url:
377 if src_url:
378 url = str(self._get_url(src_url))
378 url = str(self._get_url(src_url))
379 MercurialRepository.check_url(url, self.config)
379 MercurialRepository.check_url(url, self.config)
380
380
381 self._remote.clone(url, self.path, do_workspace_checkout)
381 self._remote.clone(url, self.path, do_workspace_checkout)
382
382
383 # Don't try to create if we've already cloned repo
383 # Don't try to create if we've already cloned repo
384 create = False
384 create = False
385
385
386 if create:
386 if create:
387 os.makedirs(self.path, mode=0o755)
387 os.makedirs(self.path, mode=0o755)
388
388
389 self._remote.localrepository(create)
389 self._remote.localrepository(create)
390
390
391 @LazyProperty
391 @LazyProperty
392 def in_memory_commit(self):
392 def in_memory_commit(self):
393 return MercurialInMemoryCommit(self)
393 return MercurialInMemoryCommit(self)
394
394
395 @LazyProperty
395 @LazyProperty
396 def description(self):
396 def description(self):
397 description = self._remote.get_config_value(
397 description = self._remote.get_config_value(
398 'web', 'description', untrusted=True)
398 'web', 'description', untrusted=True)
399 return safe_str(description or self.DEFAULT_DESCRIPTION)
399 return safe_str(description or self.DEFAULT_DESCRIPTION)
400
400
401 @LazyProperty
401 @LazyProperty
402 def contact(self):
402 def contact(self):
403 contact = (
403 contact = (
404 self._remote.get_config_value("web", "contact") or
404 self._remote.get_config_value("web", "contact") or
405 self._remote.get_config_value("ui", "username"))
405 self._remote.get_config_value("ui", "username"))
406 return safe_str(contact or self.DEFAULT_CONTACT)
406 return safe_str(contact or self.DEFAULT_CONTACT)
407
407
408 @LazyProperty
408 @LazyProperty
409 def last_change(self):
409 def last_change(self):
410 """
410 """
411 Returns last change made on this repository as
411 Returns last change made on this repository as
412 `datetime.datetime` object.
412 `datetime.datetime` object.
413 """
413 """
414 try:
414 try:
415 return self.get_commit().date
415 return self.get_commit().date
416 except RepositoryError:
416 except RepositoryError:
417 tzoffset = makedate()[1]
417 tzoffset = makedate()[1]
418 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
418 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
419
419
420 def _get_fs_mtime(self):
420 def _get_fs_mtime(self):
421 # fallback to filesystem
421 # fallback to filesystem
422 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
422 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
423 st_path = os.path.join(self.path, '.hg', "store")
423 st_path = os.path.join(self.path, '.hg', "store")
424 if os.path.exists(cl_path):
424 if os.path.exists(cl_path):
425 return os.stat(cl_path).st_mtime
425 return os.stat(cl_path).st_mtime
426 else:
426 else:
427 return os.stat(st_path).st_mtime
427 return os.stat(st_path).st_mtime
428
428
429 def _get_url(self, url):
429 def _get_url(self, url):
430 """
430 """
431 Returns normalized url. If schema is not given, would fall
431 Returns normalized url. If schema is not given, would fall
432 to filesystem
432 to filesystem
433 (``file:///``) schema.
433 (``file:///``) schema.
434 """
434 """
435 if url != 'default' and '://' not in url:
435 if url != 'default' and '://' not in url:
436 url = "file:" + urllib.request.pathname2url(url)
436 url = "file:" + urllib.request.pathname2url(url)
437 return url
437 return url
438
438
439 def get_hook_location(self):
439 def get_hook_location(self):
440 """
440 """
441 returns absolute path to location where hooks are stored
441 returns absolute path to location where hooks are stored
442 """
442 """
443 return os.path.join(self.path, '.hg', '.hgrc')
443 return os.path.join(self.path, '.hg', '.hgrc')
444
444
445 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
445 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
446 translate_tag=None, maybe_unreachable=False, reference_obj=None):
446 translate_tag=None, maybe_unreachable=False, reference_obj=None):
447 """
447 """
448 Returns ``MercurialCommit`` object representing repository's
448 Returns ``MercurialCommit`` object representing repository's
449 commit at the given `commit_id` or `commit_idx`.
449 commit at the given `commit_id` or `commit_idx`.
450 """
450 """
451 if self.is_empty():
451 if self.is_empty():
452 raise EmptyRepositoryError("There are no commits yet")
452 raise EmptyRepositoryError("There are no commits yet")
453
453
454 if commit_id is not None:
454 if commit_id is not None:
455 self._validate_commit_id(commit_id)
455 self._validate_commit_id(commit_id)
456 try:
456 try:
457 # we have cached idx, use it without contacting the remote
457 # we have cached idx, use it without contacting the remote
458 idx = self._commit_ids[commit_id]
458 idx = self._commit_ids[commit_id]
459 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
459 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
460 except KeyError:
460 except KeyError:
461 pass
461 pass
462
462
463 elif commit_idx is not None:
463 elif commit_idx is not None:
464 self._validate_commit_idx(commit_idx)
464 self._validate_commit_idx(commit_idx)
465 try:
465 try:
466 _commit_id = self.commit_ids[commit_idx]
466 _commit_id = self.commit_ids[commit_idx]
467 if commit_idx < 0:
467 if commit_idx < 0:
468 commit_idx = self.commit_ids.index(_commit_id)
468 commit_idx = self.commit_ids.index(_commit_id)
469
469
470 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
470 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
471 except IndexError:
471 except IndexError:
472 commit_id = commit_idx
472 commit_id = commit_idx
473 else:
473 else:
474 commit_id = "tip"
474 commit_id = "tip"
475
475
476 # case here is no cached version, do an actual lookup instead
476 # case here is no cached version, do an actual lookup instead
477 try:
477 try:
478 raw_id, idx = self._remote.lookup(commit_id, both=True)
478 raw_id, idx = self._remote.lookup(commit_id, both=True)
479 except CommitDoesNotExistError:
479 except CommitDoesNotExistError:
480 msg = "Commit {} does not exist for `{}`".format(
480 msg = "Commit {} does not exist for `{}`".format(
481 *map(safe_str, [commit_id, self.name]))
481 *map(safe_str, [commit_id, self.name]))
482 raise CommitDoesNotExistError(msg)
482 raise CommitDoesNotExistError(msg)
483
483
484 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
484 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
485
485
486 def get_commits(
486 def get_commits(
487 self, start_id=None, end_id=None, start_date=None, end_date=None,
487 self, start_id=None, end_id=None, start_date=None, end_date=None,
488 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
488 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
489 """
489 """
490 Returns generator of ``MercurialCommit`` objects from start to end
490 Returns generator of ``MercurialCommit`` objects from start to end
491 (both are inclusive)
491 (both are inclusive)
492
492
493 :param start_id: None, str(commit_id)
493 :param start_id: None, str(commit_id)
494 :param end_id: None, str(commit_id)
494 :param end_id: None, str(commit_id)
495 :param start_date: if specified, commits with commit date less than
495 :param start_date: if specified, commits with commit date less than
496 ``start_date`` would be filtered out from returned set
496 ``start_date`` would be filtered out from returned set
497 :param end_date: if specified, commits with commit date greater than
497 :param end_date: if specified, commits with commit date greater than
498 ``end_date`` would be filtered out from returned set
498 ``end_date`` would be filtered out from returned set
499 :param branch_name: if specified, commits not reachable from given
499 :param branch_name: if specified, commits not reachable from given
500 branch would be filtered out from returned set
500 branch would be filtered out from returned set
501 :param show_hidden: Show hidden commits such as obsolete or hidden from
501 :param show_hidden: Show hidden commits such as obsolete or hidden from
502 Mercurial evolve
502 Mercurial evolve
503 :raise BranchDoesNotExistError: If given ``branch_name`` does not
503 :raise BranchDoesNotExistError: If given ``branch_name`` does not
504 exist.
504 exist.
505 :raise CommitDoesNotExistError: If commit for given ``start`` or
505 :raise CommitDoesNotExistError: If commit for given ``start`` or
506 ``end`` could not be found.
506 ``end`` could not be found.
507 """
507 """
508 # actually we should check now if it's not an empty repo
508 # actually we should check now if it's not an empty repo
509 if self.is_empty():
509 if self.is_empty():
510 raise EmptyRepositoryError("There are no commits yet")
510 raise EmptyRepositoryError("There are no commits yet")
511 self._validate_branch_name(branch_name)
511 self._validate_branch_name(branch_name)
512
512
513 branch_ancestors = False
513 branch_ancestors = False
514 if start_id is not None:
514 if start_id is not None:
515 self._validate_commit_id(start_id)
515 self._validate_commit_id(start_id)
516 c_start = self.get_commit(commit_id=start_id)
516 c_start = self.get_commit(commit_id=start_id)
517 start_pos = self._commit_ids[c_start.raw_id]
517 start_pos = self._commit_ids[c_start.raw_id]
518 else:
518 else:
519 start_pos = None
519 start_pos = None
520
520
521 if end_id is not None:
521 if end_id is not None:
522 self._validate_commit_id(end_id)
522 self._validate_commit_id(end_id)
523 c_end = self.get_commit(commit_id=end_id)
523 c_end = self.get_commit(commit_id=end_id)
524 end_pos = max(0, self._commit_ids[c_end.raw_id])
524 end_pos = max(0, self._commit_ids[c_end.raw_id])
525 else:
525 else:
526 end_pos = None
526 end_pos = None
527
527
528 if None not in [start_id, end_id] and start_pos > end_pos:
528 if None not in [start_id, end_id] and start_pos > end_pos:
529 raise RepositoryError(
529 raise RepositoryError(
530 "Start commit '%s' cannot be after end commit '%s'" %
530 "Start commit '%s' cannot be after end commit '%s'" %
531 (start_id, end_id))
531 (start_id, end_id))
532
532
533 if end_pos is not None:
533 if end_pos is not None:
534 end_pos += 1
534 end_pos += 1
535
535
536 commit_filter = []
536 commit_filter = []
537
537
538 if branch_name and not branch_ancestors:
538 if branch_name and not branch_ancestors:
539 commit_filter.append(f'branch("{branch_name}")')
539 commit_filter.append(f'branch("{branch_name}")')
540 elif branch_name and branch_ancestors:
540 elif branch_name and branch_ancestors:
541 commit_filter.append(f'ancestors(branch("{branch_name}"))')
541 commit_filter.append(f'ancestors(branch("{branch_name}"))')
542
542
543 if start_date and not end_date:
543 if start_date and not end_date:
544 commit_filter.append(f'date(">{start_date}")')
544 commit_filter.append(f'date(">{start_date}")')
545 if end_date and not start_date:
545 if end_date and not start_date:
546 commit_filter.append(f'date("<{end_date}")')
546 commit_filter.append(f'date("<{end_date}")')
547 if start_date and end_date:
547 if start_date and end_date:
548 commit_filter.append(
548 commit_filter.append(
549 f'date(">{start_date}") and date("<{end_date}")')
549 f'date(">{start_date}") and date("<{end_date}")')
550
550
551 if not show_hidden:
551 if not show_hidden:
552 commit_filter.append('not obsolete()')
552 commit_filter.append('not obsolete()')
553 commit_filter.append('not hidden()')
553 commit_filter.append('not hidden()')
554
554
555 # TODO: johbo: Figure out a simpler way for this solution
555 # TODO: johbo: Figure out a simpler way for this solution
556 collection_generator = CollectionGenerator
556 collection_generator = CollectionGenerator
557 if commit_filter:
557 if commit_filter:
558 commit_filter = ' and '.join(map(safe_str, commit_filter))
558 commit_filter = ' and '.join(map(safe_str, commit_filter))
559 revisions = self._remote.rev_range([commit_filter])
559 revisions = self._remote.rev_range([commit_filter])
560 collection_generator = MercurialIndexBasedCollectionGenerator
560 collection_generator = MercurialIndexBasedCollectionGenerator
561 else:
561 else:
562 revisions = self.commit_ids
562 revisions = self.commit_ids
563
563
564 if start_pos or end_pos:
564 if start_pos or end_pos:
565 revisions = revisions[start_pos:end_pos]
565 revisions = revisions[start_pos:end_pos]
566
566
567 return collection_generator(self, revisions, pre_load=pre_load)
567 return collection_generator(self, revisions, pre_load=pre_load)
568
568
569 def pull(self, url, commit_ids=None):
569 def pull(self, url, commit_ids=None):
570 """
570 """
571 Pull changes from external location.
571 Pull changes from external location.
572
572
573 :param commit_ids: Optional. Can be set to a list of commit ids
573 :param commit_ids: Optional. Can be set to a list of commit ids
574 which shall be pulled from the other repository.
574 which shall be pulled from the other repository.
575 """
575 """
576 url = self._get_url(url)
576 url = self._get_url(url)
577 self._remote.pull(url, commit_ids=commit_ids)
577 self._remote.pull(url, commit_ids=commit_ids)
578 self._remote.invalidate_vcs_cache()
578 self._remote.invalidate_vcs_cache()
579
579
580 def fetch(self, url, commit_ids=None, **kwargs):
580 def fetch(self, url, commit_ids=None, **kwargs):
581 """
581 """
582 Backward compatibility with GIT fetch==pull
582 Backward compatibility with GIT fetch==pull
583 """
583 """
584 return self.pull(url, commit_ids=commit_ids)
584 return self.pull(url, commit_ids=commit_ids)
585
585
586 def push(self, url):
586 def push(self, url, **kwargs):
587 url = self._get_url(url)
587 url = self._get_url(url)
588 self._remote.sync_push(url)
588 self._remote.sync_push(url)
589
589
590 def _local_clone(self, clone_path):
590 def _local_clone(self, clone_path):
591 """
591 """
592 Create a local clone of the current repo.
592 Create a local clone of the current repo.
593 """
593 """
594 self._remote.clone(self.path, clone_path, update_after_clone=True,
594 self._remote.clone(self.path, clone_path, update_after_clone=True,
595 hooks=False)
595 hooks=False)
596
596
597 def _update(self, revision, clean=False):
597 def _update(self, revision, clean=False):
598 """
598 """
599 Update the working copy to the specified revision.
599 Update the working copy to the specified revision.
600 """
600 """
601 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
601 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
602 self._remote.update(revision, clean=clean)
602 self._remote.update(revision, clean=clean)
603
603
604 def _identify(self):
604 def _identify(self):
605 """
605 """
606 Return the current state of the working directory.
606 Return the current state of the working directory.
607 """
607 """
608 return self._remote.identify().strip().rstrip('+')
608 return self._remote.identify().strip().rstrip('+')
609
609
610 def _heads(self, branch=None):
610 def _heads(self, branch=None):
611 """
611 """
612 Return the commit ids of the repository heads.
612 Return the commit ids of the repository heads.
613 """
613 """
614 return self._remote.heads(branch=branch).strip().split(' ')
614 return self._remote.heads(branch=branch).strip().split(' ')
615
615
616 def _ancestor(self, revision1, revision2):
616 def _ancestor(self, revision1, revision2):
617 """
617 """
618 Return the common ancestor of the two revisions.
618 Return the common ancestor of the two revisions.
619 """
619 """
620 return self._remote.ancestor(revision1, revision2)
620 return self._remote.ancestor(revision1, revision2)
621
621
622 def _local_push(
622 def _local_push(
623 self, revision, repository_path, push_branches=False,
623 self, revision, repository_path, push_branches=False,
624 enable_hooks=False):
624 enable_hooks=False):
625 """
625 """
626 Push the given revision to the specified repository.
626 Push the given revision to the specified repository.
627
627
628 :param push_branches: allow to create branches in the target repo.
628 :param push_branches: allow to create branches in the target repo.
629 """
629 """
630 self._remote.push(
630 self._remote.push(
631 [revision], repository_path, hooks=enable_hooks,
631 [revision], repository_path, hooks=enable_hooks,
632 push_branches=push_branches)
632 push_branches=push_branches)
633
633
634 def _local_merge(self, target_ref, merge_message, user_name, user_email,
634 def _local_merge(self, target_ref, merge_message, user_name, user_email,
635 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
635 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
636 """
636 """
637 Merge the given source_revision into the checked out revision.
637 Merge the given source_revision into the checked out revision.
638
638
639 Returns the commit id of the merge and a boolean indicating if the
639 Returns the commit id of the merge and a boolean indicating if the
640 commit needs to be pushed.
640 commit needs to be pushed.
641 """
641 """
642
642
643 source_ref_commit_id = source_ref.commit_id
643 source_ref_commit_id = source_ref.commit_id
644 target_ref_commit_id = target_ref.commit_id
644 target_ref_commit_id = target_ref.commit_id
645
645
646 # update our workdir to target ref, for proper merge
646 # update our workdir to target ref, for proper merge
647 self._update(target_ref_commit_id, clean=True)
647 self._update(target_ref_commit_id, clean=True)
648
648
649 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
649 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
650 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
650 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
651
651
652 if close_commit_id:
652 if close_commit_id:
653 # NOTE(marcink): if we get the close commit, this is our new source
653 # NOTE(marcink): if we get the close commit, this is our new source
654 # which will include the close commit itself.
654 # which will include the close commit itself.
655 source_ref_commit_id = close_commit_id
655 source_ref_commit_id = close_commit_id
656
656
657 if ancestor == source_ref_commit_id:
657 if ancestor == source_ref_commit_id:
658 # Nothing to do, the changes were already integrated
658 # Nothing to do, the changes were already integrated
659 return target_ref_commit_id, False
659 return target_ref_commit_id, False
660
660
661 elif ancestor == target_ref_commit_id and is_the_same_branch:
661 elif ancestor == target_ref_commit_id and is_the_same_branch:
662 # In this case we should force a commit message
662 # In this case we should force a commit message
663 return source_ref_commit_id, True
663 return source_ref_commit_id, True
664
664
665 unresolved = None
665 unresolved = None
666 if use_rebase:
666 if use_rebase:
667 try:
667 try:
668 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
668 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
669 self.bookmark(bookmark_name, revision=source_ref.commit_id)
669 self.bookmark(bookmark_name, revision=source_ref.commit_id)
670 self._remote.rebase(
670 self._remote.rebase(
671 source=source_ref_commit_id, dest=target_ref_commit_id)
671 source=source_ref_commit_id, dest=target_ref_commit_id)
672 self._remote.invalidate_vcs_cache()
672 self._remote.invalidate_vcs_cache()
673 self._update(bookmark_name, clean=True)
673 self._update(bookmark_name, clean=True)
674 return self._identify(), True
674 return self._identify(), True
675 except RepositoryError as e:
675 except RepositoryError as e:
676 # The rebase-abort may raise another exception which 'hides'
676 # The rebase-abort may raise another exception which 'hides'
677 # the original one, therefore we log it here.
677 # the original one, therefore we log it here.
678 log.exception('Error while rebasing shadow repo during merge.')
678 log.exception('Error while rebasing shadow repo during merge.')
679 if 'unresolved conflicts' in safe_str(e):
679 if 'unresolved conflicts' in safe_str(e):
680 unresolved = self._remote.get_unresolved_files()
680 unresolved = self._remote.get_unresolved_files()
681 log.debug('unresolved files: %s', unresolved)
681 log.debug('unresolved files: %s', unresolved)
682
682
683 # Cleanup any rebase leftovers
683 # Cleanup any rebase leftovers
684 self._remote.invalidate_vcs_cache()
684 self._remote.invalidate_vcs_cache()
685 self._remote.rebase(abort=True)
685 self._remote.rebase(abort=True)
686 self._remote.invalidate_vcs_cache()
686 self._remote.invalidate_vcs_cache()
687 self._remote.update(clean=True)
687 self._remote.update(clean=True)
688 if unresolved:
688 if unresolved:
689 raise UnresolvedFilesInRepo(unresolved)
689 raise UnresolvedFilesInRepo(unresolved)
690 else:
690 else:
691 raise
691 raise
692 else:
692 else:
693 try:
693 try:
694 self._remote.merge(source_ref_commit_id)
694 self._remote.merge(source_ref_commit_id)
695 self._remote.invalidate_vcs_cache()
695 self._remote.invalidate_vcs_cache()
696 self._remote.commit(
696 self._remote.commit(
697 message=safe_str(merge_message),
697 message=safe_str(merge_message),
698 username=safe_str(f'{user_name} <{user_email}>'))
698 username=safe_str(f'{user_name} <{user_email}>'))
699 self._remote.invalidate_vcs_cache()
699 self._remote.invalidate_vcs_cache()
700 return self._identify(), True
700 return self._identify(), True
701 except RepositoryError as e:
701 except RepositoryError as e:
702 # The merge-abort may raise another exception which 'hides'
702 # The merge-abort may raise another exception which 'hides'
703 # the original one, therefore we log it here.
703 # the original one, therefore we log it here.
704 log.exception('Error while merging shadow repo during merge.')
704 log.exception('Error while merging shadow repo during merge.')
705 if 'unresolved merge conflicts' in safe_str(e):
705 if 'unresolved merge conflicts' in safe_str(e):
706 unresolved = self._remote.get_unresolved_files()
706 unresolved = self._remote.get_unresolved_files()
707 log.debug('unresolved files: %s', unresolved)
707 log.debug('unresolved files: %s', unresolved)
708
708
709 # Cleanup any merge leftovers
709 # Cleanup any merge leftovers
710 self._remote.update(clean=True)
710 self._remote.update(clean=True)
711 if unresolved:
711 if unresolved:
712 raise UnresolvedFilesInRepo(unresolved)
712 raise UnresolvedFilesInRepo(unresolved)
713 else:
713 else:
714 raise
714 raise
715
715
716 def _local_close(self, target_ref, user_name, user_email,
716 def _local_close(self, target_ref, user_name, user_email,
717 source_ref, close_message=''):
717 source_ref, close_message=''):
718 """
718 """
719 Close the branch of the given source_revision
719 Close the branch of the given source_revision
720
720
721 Returns the commit id of the close and a boolean indicating if the
721 Returns the commit id of the close and a boolean indicating if the
722 commit needs to be pushed.
722 commit needs to be pushed.
723 """
723 """
724 self._update(source_ref.commit_id)
724 self._update(source_ref.commit_id)
725 message = close_message or f"Closing branch: `{source_ref.name}`"
725 message = close_message or f"Closing branch: `{source_ref.name}`"
726 try:
726 try:
727 self._remote.commit(
727 self._remote.commit(
728 message=safe_str(message),
728 message=safe_str(message),
729 username=safe_str(f'{user_name} <{user_email}>'),
729 username=safe_str(f'{user_name} <{user_email}>'),
730 close_branch=True)
730 close_branch=True)
731 self._remote.invalidate_vcs_cache()
731 self._remote.invalidate_vcs_cache()
732 return self._identify(), True
732 return self._identify(), True
733 except RepositoryError:
733 except RepositoryError:
734 # Cleanup any commit leftovers
734 # Cleanup any commit leftovers
735 self._remote.update(clean=True)
735 self._remote.update(clean=True)
736 raise
736 raise
737
737
738 def _is_the_same_branch(self, target_ref, source_ref):
738 def _is_the_same_branch(self, target_ref, source_ref):
739 return (
739 return (
740 self._get_branch_name(target_ref) ==
740 self._get_branch_name(target_ref) ==
741 self._get_branch_name(source_ref))
741 self._get_branch_name(source_ref))
742
742
743 def _get_branch_name(self, ref):
743 def _get_branch_name(self, ref):
744 if ref.type == 'branch':
744 if ref.type == 'branch':
745 return ref.name
745 return ref.name
746 return self._remote.ctx_branch(ref.commit_id)
746 return self._remote.ctx_branch(ref.commit_id)
747
747
748 def _maybe_prepare_merge_workspace(
748 def _maybe_prepare_merge_workspace(
749 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
749 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
750 shadow_repository_path = self._get_shadow_repository_path(
750 shadow_repository_path = self._get_shadow_repository_path(
751 self.path, repo_id, workspace_id)
751 self.path, repo_id, workspace_id)
752 if not os.path.exists(shadow_repository_path):
752 if not os.path.exists(shadow_repository_path):
753 self._local_clone(shadow_repository_path)
753 self._local_clone(shadow_repository_path)
754 log.debug(
754 log.debug(
755 'Prepared shadow repository in %s', shadow_repository_path)
755 'Prepared shadow repository in %s', shadow_repository_path)
756
756
757 return shadow_repository_path
757 return shadow_repository_path
758
758
759 def _merge_repo(self, repo_id, workspace_id, target_ref,
759 def _merge_repo(self, repo_id, workspace_id, target_ref,
760 source_repo, source_ref, merge_message,
760 source_repo, source_ref, merge_message,
761 merger_name, merger_email, dry_run=False,
761 merger_name, merger_email, dry_run=False,
762 use_rebase=False, close_branch=False):
762 use_rebase=False, close_branch=False):
763
763
764 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
764 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
765 'rebase' if use_rebase else 'merge', dry_run)
765 'rebase' if use_rebase else 'merge', dry_run)
766
766
767 if target_ref.commit_id not in self._heads():
767 if target_ref.commit_id not in self._heads():
768 return MergeResponse(
768 return MergeResponse(
769 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
769 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
770 metadata={'target_ref': target_ref})
770 metadata={'target_ref': target_ref})
771
771
772 try:
772 try:
773 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
773 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
774 heads_all = self._heads(target_ref.name)
774 heads_all = self._heads(target_ref.name)
775 max_heads = 10
775 max_heads = 10
776 if len(heads_all) > max_heads:
776 if len(heads_all) > max_heads:
777 heads = '\n,'.join(
777 heads = '\n,'.join(
778 heads_all[:max_heads] +
778 heads_all[:max_heads] +
779 [f'and {len(heads_all)-max_heads} more.'])
779 [f'and {len(heads_all)-max_heads} more.'])
780 else:
780 else:
781 heads = '\n,'.join(heads_all)
781 heads = '\n,'.join(heads_all)
782 metadata = {
782 metadata = {
783 'target_ref': target_ref,
783 'target_ref': target_ref,
784 'source_ref': source_ref,
784 'source_ref': source_ref,
785 'heads': heads
785 'heads': heads
786 }
786 }
787 return MergeResponse(
787 return MergeResponse(
788 False, False, None,
788 False, False, None,
789 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
789 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
790 metadata=metadata)
790 metadata=metadata)
791 except CommitDoesNotExistError:
791 except CommitDoesNotExistError:
792 log.exception('Failure when looking up branch heads on hg target')
792 log.exception('Failure when looking up branch heads on hg target')
793 return MergeResponse(
793 return MergeResponse(
794 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
794 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
795 metadata={'target_ref': target_ref})
795 metadata={'target_ref': target_ref})
796
796
797 shadow_repository_path = self._maybe_prepare_merge_workspace(
797 shadow_repository_path = self._maybe_prepare_merge_workspace(
798 repo_id, workspace_id, target_ref, source_ref)
798 repo_id, workspace_id, target_ref, source_ref)
799 shadow_repo = self.get_shadow_instance(shadow_repository_path)
799 shadow_repo = self.get_shadow_instance(shadow_repository_path)
800
800
801 log.debug('Pulling in target reference %s', target_ref)
801 log.debug('Pulling in target reference %s', target_ref)
802 self._validate_pull_reference(target_ref)
802 self._validate_pull_reference(target_ref)
803 shadow_repo._local_pull(self.path, target_ref)
803 shadow_repo._local_pull(self.path, target_ref)
804
804
805 try:
805 try:
806 log.debug('Pulling in source reference %s', source_ref)
806 log.debug('Pulling in source reference %s', source_ref)
807 source_repo._validate_pull_reference(source_ref)
807 source_repo._validate_pull_reference(source_ref)
808 shadow_repo._local_pull(source_repo.path, source_ref)
808 shadow_repo._local_pull(source_repo.path, source_ref)
809 except CommitDoesNotExistError:
809 except CommitDoesNotExistError:
810 log.exception('Failure when doing local pull on hg shadow repo')
810 log.exception('Failure when doing local pull on hg shadow repo')
811 return MergeResponse(
811 return MergeResponse(
812 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
812 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
813 metadata={'source_ref': source_ref})
813 metadata={'source_ref': source_ref})
814
814
815 merge_ref = None
815 merge_ref = None
816 merge_commit_id = None
816 merge_commit_id = None
817 close_commit_id = None
817 close_commit_id = None
818 merge_failure_reason = MergeFailureReason.NONE
818 merge_failure_reason = MergeFailureReason.NONE
819 metadata = {}
819 metadata = {}
820
820
821 # enforce that close branch should be used only in case we source from
821 # enforce that close branch should be used only in case we source from
822 # an actual Branch
822 # an actual Branch
823 close_branch = close_branch and source_ref.type == 'branch'
823 close_branch = close_branch and source_ref.type == 'branch'
824
824
825 # don't allow to close branch if source and target are the same
825 # don't allow to close branch if source and target are the same
826 close_branch = close_branch and source_ref.name != target_ref.name
826 close_branch = close_branch and source_ref.name != target_ref.name
827
827
828 needs_push_on_close = False
828 needs_push_on_close = False
829 if close_branch and not use_rebase and not dry_run:
829 if close_branch and not use_rebase and not dry_run:
830 try:
830 try:
831 close_commit_id, needs_push_on_close = shadow_repo._local_close(
831 close_commit_id, needs_push_on_close = shadow_repo._local_close(
832 target_ref, merger_name, merger_email, source_ref)
832 target_ref, merger_name, merger_email, source_ref)
833 merge_possible = True
833 merge_possible = True
834 except RepositoryError:
834 except RepositoryError:
835 log.exception('Failure when doing close branch on '
835 log.exception('Failure when doing close branch on '
836 'shadow repo: %s', shadow_repo)
836 'shadow repo: %s', shadow_repo)
837 merge_possible = False
837 merge_possible = False
838 merge_failure_reason = MergeFailureReason.MERGE_FAILED
838 merge_failure_reason = MergeFailureReason.MERGE_FAILED
839 else:
839 else:
840 merge_possible = True
840 merge_possible = True
841
841
842 needs_push = False
842 needs_push = False
843 if merge_possible:
843 if merge_possible:
844
844
845 try:
845 try:
846 merge_commit_id, needs_push = shadow_repo._local_merge(
846 merge_commit_id, needs_push = shadow_repo._local_merge(
847 target_ref, merge_message, merger_name, merger_email,
847 target_ref, merge_message, merger_name, merger_email,
848 source_ref, use_rebase=use_rebase,
848 source_ref, use_rebase=use_rebase,
849 close_commit_id=close_commit_id, dry_run=dry_run)
849 close_commit_id=close_commit_id, dry_run=dry_run)
850 merge_possible = True
850 merge_possible = True
851
851
852 # read the state of the close action, if it
852 # read the state of the close action, if it
853 # maybe required a push
853 # maybe required a push
854 needs_push = needs_push or needs_push_on_close
854 needs_push = needs_push or needs_push_on_close
855
855
856 # Set a bookmark pointing to the merge commit. This bookmark
856 # Set a bookmark pointing to the merge commit. This bookmark
857 # may be used to easily identify the last successful merge
857 # may be used to easily identify the last successful merge
858 # commit in the shadow repository.
858 # commit in the shadow repository.
859 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
859 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
860 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
860 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
861 except SubrepoMergeError:
861 except SubrepoMergeError:
862 log.exception(
862 log.exception(
863 'Subrepo merge error during local merge on hg shadow repo.')
863 'Subrepo merge error during local merge on hg shadow repo.')
864 merge_possible = False
864 merge_possible = False
865 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
865 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
866 needs_push = False
866 needs_push = False
867 except RepositoryError as e:
867 except RepositoryError as e:
868 log.exception('Failure when doing local merge on hg shadow repo')
868 log.exception('Failure when doing local merge on hg shadow repo')
869 metadata['unresolved_files'] = 'no unresolved files found'
869 metadata['unresolved_files'] = 'no unresolved files found'
870
870
871 if isinstance(e, UnresolvedFilesInRepo):
871 if isinstance(e, UnresolvedFilesInRepo):
872 all_conflicts = list(e.args[0])
872 all_conflicts = list(e.args[0])
873 max_conflicts = 20
873 max_conflicts = 20
874 if len(all_conflicts) > max_conflicts:
874 if len(all_conflicts) > max_conflicts:
875 conflicts = all_conflicts[:max_conflicts] \
875 conflicts = all_conflicts[:max_conflicts] \
876 + [f'and {len(all_conflicts)-max_conflicts} more.']
876 + [f'and {len(all_conflicts)-max_conflicts} more.']
877 else:
877 else:
878 conflicts = all_conflicts
878 conflicts = all_conflicts
879 metadata['unresolved_files'] = \
879 metadata['unresolved_files'] = \
880 '\n* conflict: ' + \
880 '\n* conflict: ' + \
881 ('\n * conflict: '.join(conflicts))
881 ('\n * conflict: '.join(conflicts))
882
882
883 merge_possible = False
883 merge_possible = False
884 merge_failure_reason = MergeFailureReason.MERGE_FAILED
884 merge_failure_reason = MergeFailureReason.MERGE_FAILED
885 needs_push = False
885 needs_push = False
886
886
887 if merge_possible and not dry_run:
887 if merge_possible and not dry_run:
888 if needs_push:
888 if needs_push:
889 # In case the target is a bookmark, update it, so after pushing
889 # In case the target is a bookmark, update it, so after pushing
890 # the bookmarks is also updated in the target.
890 # the bookmarks is also updated in the target.
891 if target_ref.type == 'book':
891 if target_ref.type == 'book':
892 shadow_repo.bookmark(
892 shadow_repo.bookmark(
893 target_ref.name, revision=merge_commit_id)
893 target_ref.name, revision=merge_commit_id)
894 try:
894 try:
895 shadow_repo_with_hooks = self.get_shadow_instance(
895 shadow_repo_with_hooks = self.get_shadow_instance(
896 shadow_repository_path,
896 shadow_repository_path,
897 enable_hooks=True)
897 enable_hooks=True)
898 # This is the actual merge action, we push from shadow
898 # This is the actual merge action, we push from shadow
899 # into origin.
899 # into origin.
900 # Note: the push_branches option will push any new branch
900 # Note: the push_branches option will push any new branch
901 # defined in the source repository to the target. This may
901 # defined in the source repository to the target. This may
902 # be dangerous as branches are permanent in Mercurial.
902 # be dangerous as branches are permanent in Mercurial.
903 # This feature was requested in issue #441.
903 # This feature was requested in issue #441.
904 shadow_repo_with_hooks._local_push(
904 shadow_repo_with_hooks._local_push(
905 merge_commit_id, self.path, push_branches=True,
905 merge_commit_id, self.path, push_branches=True,
906 enable_hooks=True)
906 enable_hooks=True)
907
907
908 # maybe we also need to push the close_commit_id
908 # maybe we also need to push the close_commit_id
909 if close_commit_id:
909 if close_commit_id:
910 shadow_repo_with_hooks._local_push(
910 shadow_repo_with_hooks._local_push(
911 close_commit_id, self.path, push_branches=True,
911 close_commit_id, self.path, push_branches=True,
912 enable_hooks=True)
912 enable_hooks=True)
913 merge_succeeded = True
913 merge_succeeded = True
914 except RepositoryError:
914 except RepositoryError:
915 log.exception(
915 log.exception(
916 'Failure when doing local push from the shadow '
916 'Failure when doing local push from the shadow '
917 'repository to the target repository at %s.', self.path)
917 'repository to the target repository at %s.', self.path)
918 merge_succeeded = False
918 merge_succeeded = False
919 merge_failure_reason = MergeFailureReason.PUSH_FAILED
919 merge_failure_reason = MergeFailureReason.PUSH_FAILED
920 metadata['target'] = 'hg shadow repo'
920 metadata['target'] = 'hg shadow repo'
921 metadata['merge_commit'] = merge_commit_id
921 metadata['merge_commit'] = merge_commit_id
922 else:
922 else:
923 merge_succeeded = True
923 merge_succeeded = True
924 else:
924 else:
925 merge_succeeded = False
925 merge_succeeded = False
926
926
927 return MergeResponse(
927 return MergeResponse(
928 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
928 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
929 metadata=metadata)
929 metadata=metadata)
930
930
931 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
931 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
932 config = self.config.copy()
932 config = self.config.copy()
933 if not enable_hooks:
933 if not enable_hooks:
934 config.clear_section('hooks')
934 config.clear_section('hooks')
935 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
935 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
936
936
937 def _validate_pull_reference(self, reference):
937 def _validate_pull_reference(self, reference):
938 if not (reference.name in self.bookmarks or
938 if not (reference.name in self.bookmarks or
939 reference.name in self.branches or
939 reference.name in self.branches or
940 self.get_commit(reference.commit_id)):
940 self.get_commit(reference.commit_id)):
941 raise CommitDoesNotExistError(
941 raise CommitDoesNotExistError(
942 'Unknown branch, bookmark or commit id')
942 'Unknown branch, bookmark or commit id')
943
943
944 def _local_pull(self, repository_path, reference):
944 def _local_pull(self, repository_path, reference):
945 """
945 """
946 Fetch a branch, bookmark or commit from a local repository.
946 Fetch a branch, bookmark or commit from a local repository.
947 """
947 """
948 repository_path = os.path.abspath(repository_path)
948 repository_path = os.path.abspath(repository_path)
949 if repository_path == self.path:
949 if repository_path == self.path:
950 raise ValueError('Cannot pull from the same repository')
950 raise ValueError('Cannot pull from the same repository')
951
951
952 reference_type_to_option_name = {
952 reference_type_to_option_name = {
953 'book': 'bookmark',
953 'book': 'bookmark',
954 'branch': 'branch',
954 'branch': 'branch',
955 }
955 }
956 option_name = reference_type_to_option_name.get(
956 option_name = reference_type_to_option_name.get(
957 reference.type, 'revision')
957 reference.type, 'revision')
958
958
959 if option_name == 'revision':
959 if option_name == 'revision':
960 ref = reference.commit_id
960 ref = reference.commit_id
961 else:
961 else:
962 ref = reference.name
962 ref = reference.name
963
963
964 options = {option_name: [ref]}
964 options = {option_name: [ref]}
965 self._remote.pull_cmd(repository_path, hooks=False, **options)
965 self._remote.pull_cmd(repository_path, hooks=False, **options)
966 self._remote.invalidate_vcs_cache()
966 self._remote.invalidate_vcs_cache()
967
967
968 def bookmark(self, bookmark, revision=None):
968 def bookmark(self, bookmark, revision=None):
969 if isinstance(bookmark, str):
969 if isinstance(bookmark, str):
970 bookmark = safe_str(bookmark)
970 bookmark = safe_str(bookmark)
971 self._remote.bookmark(bookmark, revision=revision)
971 self._remote.bookmark(bookmark, revision=revision)
972 self._remote.invalidate_vcs_cache()
972 self._remote.invalidate_vcs_cache()
973
973
974 def get_path_permissions(self, username):
974 def get_path_permissions(self, username):
975 hgacl_file = os.path.join(self.path, '.hg/hgacl')
975 hgacl_file = os.path.join(self.path, '.hg/hgacl')
976
976
977 def read_patterns(suffix):
977 def read_patterns(suffix):
978 svalue = None
978 svalue = None
979 for section, option in [
979 for section, option in [
980 ('narrowacl', username + suffix),
980 ('narrowacl', username + suffix),
981 ('narrowacl', 'default' + suffix),
981 ('narrowacl', 'default' + suffix),
982 ('narrowhgacl', username + suffix),
982 ('narrowhgacl', username + suffix),
983 ('narrowhgacl', 'default' + suffix)
983 ('narrowhgacl', 'default' + suffix)
984 ]:
984 ]:
985 try:
985 try:
986 svalue = hgacl.get(section, option)
986 svalue = hgacl.get(section, option)
987 break # stop at the first value we find
987 break # stop at the first value we find
988 except configparser.NoOptionError:
988 except configparser.NoOptionError:
989 pass
989 pass
990 if not svalue:
990 if not svalue:
991 return None
991 return None
992 result = ['/']
992 result = ['/']
993 for pattern in svalue.split():
993 for pattern in svalue.split():
994 result.append(pattern)
994 result.append(pattern)
995 if '*' not in pattern and '?' not in pattern:
995 if '*' not in pattern and '?' not in pattern:
996 result.append(pattern + '/*')
996 result.append(pattern + '/*')
997 return result
997 return result
998
998
999 if os.path.exists(hgacl_file):
999 if os.path.exists(hgacl_file):
1000 try:
1000 try:
1001 hgacl = configparser.RawConfigParser()
1001 hgacl = configparser.RawConfigParser()
1002 hgacl.read(hgacl_file)
1002 hgacl.read(hgacl_file)
1003
1003
1004 includes = read_patterns('.includes')
1004 includes = read_patterns('.includes')
1005 excludes = read_patterns('.excludes')
1005 excludes = read_patterns('.excludes')
1006 return BasePathPermissionChecker.create_from_patterns(
1006 return BasePathPermissionChecker.create_from_patterns(
1007 includes, excludes)
1007 includes, excludes)
1008 except BaseException as e:
1008 except BaseException as e:
1009 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1009 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1010 hgacl_file, self.name, e)
1010 hgacl_file, self.name, e)
1011 raise exceptions.RepositoryRequirementError(msg)
1011 raise exceptions.RepositoryRequirementError(msg)
1012 else:
1012 else:
1013 return None
1013 return None
1014
1014
1015
1015
1016 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1016 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1017
1017
1018 def _commit_factory(self, commit_id):
1018 def _commit_factory(self, commit_id):
1019 if isinstance(commit_id, int):
1019 if isinstance(commit_id, int):
1020 return self.repo.get_commit(
1020 return self.repo.get_commit(
1021 commit_idx=commit_id, pre_load=self.pre_load)
1021 commit_idx=commit_id, pre_load=self.pre_load)
1022 else:
1022 else:
1023 return self.repo.get_commit(
1023 return self.repo.get_commit(
1024 commit_id=commit_id, pre_load=self.pre_load)
1024 commit_id=commit_id, pre_load=self.pre_load)
@@ -1,1044 +1,1044 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Scm model for RhodeCode
20 Scm model for RhodeCode
21 """
21 """
22
22
23 import os.path
23 import os.path
24 import traceback
24 import traceback
25 import logging
25 import logging
26 import io
26 import io
27
27
28 from sqlalchemy import func
28 from sqlalchemy import func
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.lib.str_utils import safe_bytes
33 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib import helpers as h, rc_cache
37 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasUserGroupPermissionAny)
40 HasUserGroupPermissionAny)
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib import hooks_utils
42 from rhodecode.lib import hooks_utils
43 from rhodecode.lib.utils import (
43 from rhodecode.lib.utils import (
44 get_filesystem_repos, make_db_config)
44 get_filesystem_repos, make_db_config)
45 from rhodecode.lib.str_utils import safe_str
45 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.system_info import get_system_info
46 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.model import BaseModel
47 from rhodecode.model import BaseModel
48 from rhodecode.model.db import (
48 from rhodecode.model.db import (
49 or_, false, null,
49 or_, false, null,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest, FileStore)
51 PullRequest, FileStore)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker(*self.perm_set)
128 self.perm_checker = perm_checker(*self.perm_set)
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 for db_obj in self.obj_list:
138 for db_obj in self.obj_list:
139 # check permission at this level
139 # check permission at this level
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 name = db_obj.__dict__.get(self.obj_attr, None)
141 name = db_obj.__dict__.get(self.obj_attr, None)
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153
153
154 super().__init__(
154 super().__init__(
155 obj_list=db_repo_list,
155 obj_list=db_repo_list,
156 obj_attr='_repo_name', perm_set=perm_set,
156 obj_attr='_repo_name', perm_set=perm_set,
157 perm_checker=HasRepoPermissionAny,
157 perm_checker=HasRepoPermissionAny,
158 extra_kwargs=extra_kwargs)
158 extra_kwargs=extra_kwargs)
159
159
160
160
161 class RepoGroupList(_PermCheckIterator):
161 class RepoGroupList(_PermCheckIterator):
162
162
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 if not perm_set:
164 if not perm_set:
165 perm_set = ['group.read', 'group.write', 'group.admin']
165 perm_set = ['group.read', 'group.write', 'group.admin']
166
166
167 super().__init__(
167 super().__init__(
168 obj_list=db_repo_group_list,
168 obj_list=db_repo_group_list,
169 obj_attr='_group_name', perm_set=perm_set,
169 obj_attr='_group_name', perm_set=perm_set,
170 perm_checker=HasRepoGroupPermissionAny,
170 perm_checker=HasRepoGroupPermissionAny,
171 extra_kwargs=extra_kwargs)
171 extra_kwargs=extra_kwargs)
172
172
173
173
174 class UserGroupList(_PermCheckIterator):
174 class UserGroupList(_PermCheckIterator):
175
175
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 if not perm_set:
177 if not perm_set:
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179
179
180 super().__init__(
180 super().__init__(
181 obj_list=db_user_group_list,
181 obj_list=db_user_group_list,
182 obj_attr='users_group_name', perm_set=perm_set,
182 obj_attr='users_group_name', perm_set=perm_set,
183 perm_checker=HasUserGroupPermissionAny,
183 perm_checker=HasUserGroupPermissionAny,
184 extra_kwargs=extra_kwargs)
184 extra_kwargs=extra_kwargs)
185
185
186
186
187 class ScmModel(BaseModel):
187 class ScmModel(BaseModel):
188 """
188 """
189 Generic Scm Model
189 Generic Scm Model
190 """
190 """
191
191
192 @LazyProperty
192 @LazyProperty
193 def repos_path(self):
193 def repos_path(self):
194 """
194 """
195 Gets the repositories root path from database
195 Gets the repositories root path from database
196 """
196 """
197
197
198 settings_model = VcsSettingsModel(sa=self.sa)
198 settings_model = VcsSettingsModel(sa=self.sa)
199 return settings_model.get_repos_location()
199 return settings_model.get_repos_location()
200
200
201 def repo_scan(self, repos_path=None):
201 def repo_scan(self, repos_path=None):
202 """
202 """
203 Listing of repositories in given path. This path should not be a
203 Listing of repositories in given path. This path should not be a
204 repository itself. Return a dictionary of repository objects
204 repository itself. Return a dictionary of repository objects
205
205
206 :param repos_path: path to directory containing repositories
206 :param repos_path: path to directory containing repositories
207 """
207 """
208
208
209 if repos_path is None:
209 if repos_path is None:
210 repos_path = self.repos_path
210 repos_path = self.repos_path
211
211
212 log.info('scanning for repositories in %s', repos_path)
212 log.info('scanning for repositories in %s', repos_path)
213
213
214 config = make_db_config()
214 config = make_db_config()
215 config.set('extensions', 'largefiles', '')
215 config.set('extensions', 'largefiles', '')
216 repos = {}
216 repos = {}
217
217
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 # name need to be decomposed and put back together using the /
219 # name need to be decomposed and put back together using the /
220 # since this is internal storage separator for rhodecode
220 # since this is internal storage separator for rhodecode
221 name = Repository.normalize_repo_name(name)
221 name = Repository.normalize_repo_name(name)
222
222
223 try:
223 try:
224 if name in repos:
224 if name in repos:
225 raise RepositoryError('Duplicate repository name %s '
225 raise RepositoryError('Duplicate repository name %s '
226 'found in %s' % (name, path))
226 'found in %s' % (name, path))
227 elif path[0] in rhodecode.BACKENDS:
227 elif path[0] in rhodecode.BACKENDS:
228 backend = get_backend(path[0])
228 backend = get_backend(path[0])
229 repos[name] = backend(path[1], config=config,
229 repos[name] = backend(path[1], config=config,
230 with_wire={"cache": False})
230 with_wire={"cache": False})
231 except OSError:
231 except OSError:
232 continue
232 continue
233 except RepositoryError:
233 except RepositoryError:
234 log.exception('Failed to create a repo')
234 log.exception('Failed to create a repo')
235 continue
235 continue
236
236
237 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
238 return repos
238 return repos
239
239
240 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
241 """
241 """
242 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
244
244
245 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
247
247
248 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
249 """
249 """
250 if all_repos is None:
250 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == null())\
252 .filter(Repository.group_id == null())\
253 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
256 return repo_iter
257
257
258 def get_repo_groups(self, all_groups=None):
258 def get_repo_groups(self, all_groups=None):
259 if all_groups is None:
259 if all_groups is None:
260 all_groups = RepoGroup.query()\
260 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == null()).all()
261 .filter(RepoGroup.group_parent_id == null()).all()
262 return [x for x in RepoGroupList(all_groups)]
262 return [x for x in RepoGroupList(all_groups)]
263
263
264 def mark_for_invalidation(self, repo_name, delete=False):
264 def mark_for_invalidation(self, repo_name, delete=False):
265 """
265 """
266 Mark caches of this repo invalid in the database. `delete` flag
266 Mark caches of this repo invalid in the database. `delete` flag
267 removes the cache entries
267 removes the cache entries
268
268
269 :param repo_name: the repo_name for which caches should be marked
269 :param repo_name: the repo_name for which caches should be marked
270 invalid, or deleted
270 invalid, or deleted
271 :param delete: delete the entry keys instead of setting bool
271 :param delete: delete the entry keys instead of setting bool
272 flag on them, and also purge caches used by the dogpile
272 flag on them, and also purge caches used by the dogpile
273 """
273 """
274 repo = Repository.get_by_repo_name(repo_name)
274 repo = Repository.get_by_repo_name(repo_name)
275
275
276 if repo:
276 if repo:
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 repo_id=repo.repo_id)
278 repo_id=repo.repo_id)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280
280
281 repo_id = repo.repo_id
281 repo_id = repo.repo_id
282 config = repo._config
282 config = repo._config
283 config.set('extensions', 'largefiles', '')
283 config.set('extensions', 'largefiles', '')
284 repo.update_commit_cache(config=config, cs_cache=None)
284 repo.update_commit_cache(config=config, cs_cache=None)
285 if delete:
285 if delete:
286 cache_namespace_uid = f'cache_repo.{repo_id}'
286 cache_namespace_uid = f'cache_repo.{repo_id}'
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
288
288
289 def toggle_following_repo(self, follow_repo_id, user_id):
289 def toggle_following_repo(self, follow_repo_id, user_id):
290
290
291 f = self.sa.query(UserFollowing)\
291 f = self.sa.query(UserFollowing)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 .filter(UserFollowing.user_id == user_id).scalar()
293 .filter(UserFollowing.user_id == user_id).scalar()
294
294
295 if f is not None:
295 if f is not None:
296 try:
296 try:
297 self.sa.delete(f)
297 self.sa.delete(f)
298 return
298 return
299 except Exception:
299 except Exception:
300 log.error(traceback.format_exc())
300 log.error(traceback.format_exc())
301 raise
301 raise
302
302
303 try:
303 try:
304 f = UserFollowing()
304 f = UserFollowing()
305 f.user_id = user_id
305 f.user_id = user_id
306 f.follows_repo_id = follow_repo_id
306 f.follows_repo_id = follow_repo_id
307 self.sa.add(f)
307 self.sa.add(f)
308 except Exception:
308 except Exception:
309 log.error(traceback.format_exc())
309 log.error(traceback.format_exc())
310 raise
310 raise
311
311
312 def toggle_following_user(self, follow_user_id, user_id):
312 def toggle_following_user(self, follow_user_id, user_id):
313 f = self.sa.query(UserFollowing)\
313 f = self.sa.query(UserFollowing)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 .filter(UserFollowing.user_id == user_id).scalar()
315 .filter(UserFollowing.user_id == user_id).scalar()
316
316
317 if f is not None:
317 if f is not None:
318 try:
318 try:
319 self.sa.delete(f)
319 self.sa.delete(f)
320 return
320 return
321 except Exception:
321 except Exception:
322 log.error(traceback.format_exc())
322 log.error(traceback.format_exc())
323 raise
323 raise
324
324
325 try:
325 try:
326 f = UserFollowing()
326 f = UserFollowing()
327 f.user_id = user_id
327 f.user_id = user_id
328 f.follows_user_id = follow_user_id
328 f.follows_user_id = follow_user_id
329 self.sa.add(f)
329 self.sa.add(f)
330 except Exception:
330 except Exception:
331 log.error(traceback.format_exc())
331 log.error(traceback.format_exc())
332 raise
332 raise
333
333
334 def is_following_repo(self, repo_name, user_id, cache=False):
334 def is_following_repo(self, repo_name, user_id, cache=False):
335 r = self.sa.query(Repository)\
335 r = self.sa.query(Repository)\
336 .filter(Repository.repo_name == repo_name).scalar()
336 .filter(Repository.repo_name == repo_name).scalar()
337
337
338 f = self.sa.query(UserFollowing)\
338 f = self.sa.query(UserFollowing)\
339 .filter(UserFollowing.follows_repository == r)\
339 .filter(UserFollowing.follows_repository == r)\
340 .filter(UserFollowing.user_id == user_id).scalar()
340 .filter(UserFollowing.user_id == user_id).scalar()
341
341
342 return f is not None
342 return f is not None
343
343
344 def is_following_user(self, username, user_id, cache=False):
344 def is_following_user(self, username, user_id, cache=False):
345 u = User.get_by_username(username)
345 u = User.get_by_username(username)
346
346
347 f = self.sa.query(UserFollowing)\
347 f = self.sa.query(UserFollowing)\
348 .filter(UserFollowing.follows_user == u)\
348 .filter(UserFollowing.follows_user == u)\
349 .filter(UserFollowing.user_id == user_id).scalar()
349 .filter(UserFollowing.user_id == user_id).scalar()
350
350
351 return f is not None
351 return f is not None
352
352
353 def get_followers(self, repo):
353 def get_followers(self, repo):
354 repo = self._get_repo(repo)
354 repo = self._get_repo(repo)
355
355
356 return self.sa.query(UserFollowing)\
356 return self.sa.query(UserFollowing)\
357 .filter(UserFollowing.follows_repository == repo).count()
357 .filter(UserFollowing.follows_repository == repo).count()
358
358
359 def get_forks(self, repo):
359 def get_forks(self, repo):
360 repo = self._get_repo(repo)
360 repo = self._get_repo(repo)
361 return self.sa.query(Repository)\
361 return self.sa.query(Repository)\
362 .filter(Repository.fork == repo).count()
362 .filter(Repository.fork == repo).count()
363
363
364 def get_pull_requests(self, repo):
364 def get_pull_requests(self, repo):
365 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
366 return self.sa.query(PullRequest)\
366 return self.sa.query(PullRequest)\
367 .filter(PullRequest.target_repo == repo)\
367 .filter(PullRequest.target_repo == repo)\
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369
369
370 def get_artifacts(self, repo):
370 def get_artifacts(self, repo):
371 repo = self._get_repo(repo)
371 repo = self._get_repo(repo)
372 return self.sa.query(FileStore)\
372 return self.sa.query(FileStore)\
373 .filter(FileStore.repo == repo)\
373 .filter(FileStore.repo == repo)\
374 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
374 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
375
375
376 def mark_as_fork(self, repo, fork, user):
376 def mark_as_fork(self, repo, fork, user):
377 repo = self._get_repo(repo)
377 repo = self._get_repo(repo)
378 fork = self._get_repo(fork)
378 fork = self._get_repo(fork)
379 if fork and repo.repo_id == fork.repo_id:
379 if fork and repo.repo_id == fork.repo_id:
380 raise Exception("Cannot set repository as fork of itself")
380 raise Exception("Cannot set repository as fork of itself")
381
381
382 if fork and repo.repo_type != fork.repo_type:
382 if fork and repo.repo_type != fork.repo_type:
383 raise RepositoryError(
383 raise RepositoryError(
384 "Cannot set repository as fork of repository with other type")
384 "Cannot set repository as fork of repository with other type")
385
385
386 repo.fork = fork
386 repo.fork = fork
387 self.sa.add(repo)
387 self.sa.add(repo)
388 return repo
388 return repo
389
389
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
391 dbrepo = self._get_repo(repo)
391 dbrepo = self._get_repo(repo)
392 remote_uri = remote_uri or dbrepo.clone_uri
392 remote_uri = remote_uri or dbrepo.clone_uri
393 if not remote_uri:
393 if not remote_uri:
394 raise Exception("This repository doesn't have a clone uri")
394 raise Exception("This repository doesn't have a clone uri")
395
395
396 repo = dbrepo.scm_instance(cache=False)
396 repo = dbrepo.scm_instance(cache=False)
397 repo.config.clear_section('hooks')
397 repo.config.clear_section('hooks')
398
398
399 try:
399 try:
400 # NOTE(marcink): add extra validation so we skip invalid urls
400 # NOTE(marcink): add extra validation so we skip invalid urls
401 # this is due this tasks can be executed via scheduler without
401 # this is due this tasks can be executed via scheduler without
402 # proper validation of remote_uri
402 # proper validation of remote_uri
403 if validate_uri:
403 if validate_uri:
404 config = make_db_config(clear_session=False)
404 config = make_db_config(clear_session=False)
405 url_validator(remote_uri, dbrepo.repo_type, config)
405 url_validator(remote_uri, dbrepo.repo_type, config)
406 except InvalidCloneUrl:
406 except InvalidCloneUrl:
407 raise
407 raise
408
408
409 repo_name = dbrepo.repo_name
409 repo_name = dbrepo.repo_name
410 try:
410 try:
411 # TODO: we need to make sure those operations call proper hooks !
411 # TODO: we need to make sure those operations call proper hooks !
412 repo.fetch(remote_uri, **kwargs)
412 repo.fetch(remote_uri, **kwargs)
413
413
414 self.mark_for_invalidation(repo_name)
414 self.mark_for_invalidation(repo_name)
415 except Exception:
415 except Exception:
416 log.error(traceback.format_exc())
416 log.error(traceback.format_exc())
417 raise
417 raise
418
418
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
420 dbrepo = self._get_repo(repo)
420 dbrepo = self._get_repo(repo)
421 remote_uri = remote_uri or dbrepo.push_uri
421 remote_uri = remote_uri or dbrepo.push_uri
422 if not remote_uri:
422 if not remote_uri:
423 raise Exception("This repository doesn't have a clone uri")
423 raise Exception("This repository doesn't have a clone uri")
424
424
425 repo = dbrepo.scm_instance(cache=False)
425 repo = dbrepo.scm_instance(cache=False)
426 repo.config.clear_section('hooks')
426 repo.config.clear_section('hooks')
427
427
428 try:
428 try:
429 # NOTE(marcink): add extra validation so we skip invalid urls
429 # NOTE(marcink): add extra validation so we skip invalid urls
430 # this is due this tasks can be executed via scheduler without
430 # this is due this tasks can be executed via scheduler without
431 # proper validation of remote_uri
431 # proper validation of remote_uri
432 if validate_uri:
432 if validate_uri:
433 config = make_db_config(clear_session=False)
433 config = make_db_config(clear_session=False)
434 url_validator(remote_uri, dbrepo.repo_type, config)
434 url_validator(remote_uri, dbrepo.repo_type, config)
435 except InvalidCloneUrl:
435 except InvalidCloneUrl:
436 raise
436 raise
437
437
438 try:
438 try:
439 repo.push(remote_uri)
439 repo.push(remote_uri, **kwargs)
440 except Exception:
440 except Exception:
441 log.error(traceback.format_exc())
441 log.error(traceback.format_exc())
442 raise
442 raise
443
443
444 def commit_change(self, repo, repo_name, commit, user, author, message,
444 def commit_change(self, repo, repo_name, commit, user, author, message,
445 content: bytes, f_path: bytes, branch: str = None):
445 content: bytes, f_path: bytes, branch: str = None):
446 """
446 """
447 Commits changes
447 Commits changes
448 """
448 """
449 user = self._get_user(user)
449 user = self._get_user(user)
450
450
451 # message and author needs to be unicode
451 # message and author needs to be unicode
452 # proper backend should then translate that into required type
452 # proper backend should then translate that into required type
453 message = safe_str(message)
453 message = safe_str(message)
454 author = safe_str(author)
454 author = safe_str(author)
455 imc = repo.in_memory_commit
455 imc = repo.in_memory_commit
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
457 try:
457 try:
458 # TODO: handle pre-push action !
458 # TODO: handle pre-push action !
459 tip = imc.commit(
459 tip = imc.commit(
460 message=message, author=author, parents=[commit],
460 message=message, author=author, parents=[commit],
461 branch=branch or commit.branch)
461 branch=branch or commit.branch)
462 except Exception as e:
462 except Exception as e:
463 log.error(traceback.format_exc())
463 log.error(traceback.format_exc())
464 raise IMCCommitError(str(e))
464 raise IMCCommitError(str(e))
465 finally:
465 finally:
466 # always clear caches, if commit fails we want fresh object also
466 # always clear caches, if commit fails we want fresh object also
467 self.mark_for_invalidation(repo_name)
467 self.mark_for_invalidation(repo_name)
468
468
469 # We trigger the post-push action
469 # We trigger the post-push action
470 hooks_utils.trigger_post_push_hook(
470 hooks_utils.trigger_post_push_hook(
471 username=user.username, action='push_local', hook_type='post_push',
471 username=user.username, action='push_local', hook_type='post_push',
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
473 return tip
473 return tip
474
474
475 def _sanitize_path(self, f_path: bytes):
475 def _sanitize_path(self, f_path: bytes):
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
478 if f_path:
478 if f_path:
479 f_path = os.path.normpath(f_path)
479 f_path = os.path.normpath(f_path)
480 return f_path
480 return f_path
481
481
482 def get_dirnode_metadata(self, request, commit, dir_node):
482 def get_dirnode_metadata(self, request, commit, dir_node):
483 if not dir_node.is_dir():
483 if not dir_node.is_dir():
484 return []
484 return []
485
485
486 data = []
486 data = []
487 for node in dir_node:
487 for node in dir_node:
488 if not node.is_file():
488 if not node.is_file():
489 # we skip file-nodes
489 # we skip file-nodes
490 continue
490 continue
491
491
492 last_commit = node.last_commit
492 last_commit = node.last_commit
493 last_commit_date = last_commit.date
493 last_commit_date = last_commit.date
494 data.append({
494 data.append({
495 'name': node.name,
495 'name': node.name,
496 'size': h.format_byte_size_binary(node.size),
496 'size': h.format_byte_size_binary(node.size),
497 'modified_at': h.format_date(last_commit_date),
497 'modified_at': h.format_date(last_commit_date),
498 'modified_ts': last_commit_date.isoformat(),
498 'modified_ts': last_commit_date.isoformat(),
499 'revision': last_commit.revision,
499 'revision': last_commit.revision,
500 'short_id': last_commit.short_id,
500 'short_id': last_commit.short_id,
501 'message': h.escape(last_commit.message),
501 'message': h.escape(last_commit.message),
502 'author': h.escape(last_commit.author),
502 'author': h.escape(last_commit.author),
503 'user_profile': h.gravatar_with_user(
503 'user_profile': h.gravatar_with_user(
504 request, last_commit.author),
504 request, last_commit.author),
505 })
505 })
506
506
507 return data
507 return data
508
508
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
510 extended_info=False, content=False, max_file_bytes=None):
510 extended_info=False, content=False, max_file_bytes=None):
511 """
511 """
512 recursive walk in root dir and return a set of all path in that dir
512 recursive walk in root dir and return a set of all path in that dir
513 based on repository walk function
513 based on repository walk function
514
514
515 :param repo_name: name of repository
515 :param repo_name: name of repository
516 :param commit_id: commit id for which to list nodes
516 :param commit_id: commit id for which to list nodes
517 :param root_path: root path to list
517 :param root_path: root path to list
518 :param flat: return as a list, if False returns a dict with description
518 :param flat: return as a list, if False returns a dict with description
519 :param extended_info: show additional info such as md5, binary, size etc
519 :param extended_info: show additional info such as md5, binary, size etc
520 :param content: add nodes content to the return data
520 :param content: add nodes content to the return data
521 :param max_file_bytes: will not return file contents over this limit
521 :param max_file_bytes: will not return file contents over this limit
522
522
523 """
523 """
524 _files = list()
524 _files = list()
525 _dirs = list()
525 _dirs = list()
526
526
527 try:
527 try:
528 _repo = self._get_repo(repo_name)
528 _repo = self._get_repo(repo_name)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
530 root_path = root_path.lstrip('/')
530 root_path = root_path.lstrip('/')
531
531
532 # get RootNode, inject pre-load options before walking
532 # get RootNode, inject pre-load options before walking
533 top_node = commit.get_node(root_path)
533 top_node = commit.get_node(root_path)
534 extended_info_pre_load = []
534 extended_info_pre_load = []
535 if extended_info:
535 if extended_info:
536 extended_info_pre_load += ['md5']
536 extended_info_pre_load += ['md5']
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
538
538
539 for __, dirs, files in commit.walk(top_node):
539 for __, dirs, files in commit.walk(top_node):
540
540
541 for f in files:
541 for f in files:
542 _content = None
542 _content = None
543 _data = f_name = f.str_path
543 _data = f_name = f.str_path
544
544
545 if not flat:
545 if not flat:
546 _data = {
546 _data = {
547 "name": h.escape(f_name),
547 "name": h.escape(f_name),
548 "type": "file",
548 "type": "file",
549 }
549 }
550 if extended_info:
550 if extended_info:
551 _data.update({
551 _data.update({
552 "md5": f.md5,
552 "md5": f.md5,
553 "binary": f.is_binary,
553 "binary": f.is_binary,
554 "size": f.size,
554 "size": f.size,
555 "extension": f.extension,
555 "extension": f.extension,
556 "mimetype": f.mimetype,
556 "mimetype": f.mimetype,
557 "lines": f.lines()[0]
557 "lines": f.lines()[0]
558 })
558 })
559
559
560 if content:
560 if content:
561 over_size_limit = (max_file_bytes is not None
561 over_size_limit = (max_file_bytes is not None
562 and f.size > max_file_bytes)
562 and f.size > max_file_bytes)
563 full_content = None
563 full_content = None
564 if not f.is_binary and not over_size_limit:
564 if not f.is_binary and not over_size_limit:
565 full_content = f.str_content
565 full_content = f.str_content
566
566
567 _data.update({
567 _data.update({
568 "content": full_content,
568 "content": full_content,
569 })
569 })
570 _files.append(_data)
570 _files.append(_data)
571
571
572 for d in dirs:
572 for d in dirs:
573 _data = d_name = d.str_path
573 _data = d_name = d.str_path
574 if not flat:
574 if not flat:
575 _data = {
575 _data = {
576 "name": h.escape(d_name),
576 "name": h.escape(d_name),
577 "type": "dir",
577 "type": "dir",
578 }
578 }
579 if extended_info:
579 if extended_info:
580 _data.update({
580 _data.update({
581 "md5": "",
581 "md5": "",
582 "binary": False,
582 "binary": False,
583 "size": 0,
583 "size": 0,
584 "extension": "",
584 "extension": "",
585 })
585 })
586 if content:
586 if content:
587 _data.update({
587 _data.update({
588 "content": None
588 "content": None
589 })
589 })
590 _dirs.append(_data)
590 _dirs.append(_data)
591 except RepositoryError:
591 except RepositoryError:
592 log.exception("Exception in get_nodes")
592 log.exception("Exception in get_nodes")
593 raise
593 raise
594
594
595 return _dirs, _files
595 return _dirs, _files
596
596
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 """
598 """
599 Generate files for quick filter in files view
599 Generate files for quick filter in files view
600 """
600 """
601
601
602 _files = list()
602 _files = list()
603 _dirs = list()
603 _dirs = list()
604 try:
604 try:
605 _repo = self._get_repo(repo_name)
605 _repo = self._get_repo(repo_name)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 root_path = root_path.lstrip('/')
607 root_path = root_path.lstrip('/')
608
608
609 top_node = commit.get_node(root_path)
609 top_node = commit.get_node(root_path)
610 top_node.default_pre_load = []
610 top_node.default_pre_load = []
611
611
612 for __, dirs, files in commit.walk(top_node):
612 for __, dirs, files in commit.walk(top_node):
613 for f in files:
613 for f in files:
614
614
615 _data = {
615 _data = {
616 "name": h.escape(f.str_path),
616 "name": h.escape(f.str_path),
617 "type": "file",
617 "type": "file",
618 }
618 }
619
619
620 _files.append(_data)
620 _files.append(_data)
621
621
622 for d in dirs:
622 for d in dirs:
623
623
624 _data = {
624 _data = {
625 "name": h.escape(d.str_path),
625 "name": h.escape(d.str_path),
626 "type": "dir",
626 "type": "dir",
627 }
627 }
628
628
629 _dirs.append(_data)
629 _dirs.append(_data)
630 except RepositoryError:
630 except RepositoryError:
631 log.exception("Exception in get_quick_filter_nodes")
631 log.exception("Exception in get_quick_filter_nodes")
632 raise
632 raise
633
633
634 return _dirs, _files
634 return _dirs, _files
635
635
636 def get_node(self, repo_name, commit_id, file_path,
636 def get_node(self, repo_name, commit_id, file_path,
637 extended_info=False, content=False, max_file_bytes=None, cache=True):
637 extended_info=False, content=False, max_file_bytes=None, cache=True):
638 """
638 """
639 retrieve single node from commit
639 retrieve single node from commit
640 """
640 """
641
641
642 try:
642 try:
643
643
644 _repo = self._get_repo(repo_name)
644 _repo = self._get_repo(repo_name)
645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
646
646
647 file_node = commit.get_node(file_path)
647 file_node = commit.get_node(file_path)
648 if file_node.is_dir():
648 if file_node.is_dir():
649 raise RepositoryError('The given path is a directory')
649 raise RepositoryError('The given path is a directory')
650
650
651 _content = None
651 _content = None
652 f_name = file_node.str_path
652 f_name = file_node.str_path
653
653
654 file_data = {
654 file_data = {
655 "name": h.escape(f_name),
655 "name": h.escape(f_name),
656 "type": "file",
656 "type": "file",
657 }
657 }
658
658
659 if extended_info:
659 if extended_info:
660 file_data.update({
660 file_data.update({
661 "extension": file_node.extension,
661 "extension": file_node.extension,
662 "mimetype": file_node.mimetype,
662 "mimetype": file_node.mimetype,
663 })
663 })
664
664
665 if cache:
665 if cache:
666 md5 = file_node.md5
666 md5 = file_node.md5
667 is_binary = file_node.is_binary
667 is_binary = file_node.is_binary
668 size = file_node.size
668 size = file_node.size
669 else:
669 else:
670 is_binary, md5, size, _content = file_node.metadata_uncached()
670 is_binary, md5, size, _content = file_node.metadata_uncached()
671
671
672 file_data.update({
672 file_data.update({
673 "md5": md5,
673 "md5": md5,
674 "binary": is_binary,
674 "binary": is_binary,
675 "size": size,
675 "size": size,
676 })
676 })
677
677
678 if content and cache:
678 if content and cache:
679 # get content + cache
679 # get content + cache
680 size = file_node.size
680 size = file_node.size
681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
682 full_content = None
682 full_content = None
683 all_lines = 0
683 all_lines = 0
684 if not file_node.is_binary and not over_size_limit:
684 if not file_node.is_binary and not over_size_limit:
685 full_content = safe_str(file_node.content)
685 full_content = safe_str(file_node.content)
686 all_lines, empty_lines = file_node.count_lines(full_content)
686 all_lines, empty_lines = file_node.count_lines(full_content)
687
687
688 file_data.update({
688 file_data.update({
689 "content": full_content,
689 "content": full_content,
690 "lines": all_lines
690 "lines": all_lines
691 })
691 })
692 elif content:
692 elif content:
693 # get content *without* cache
693 # get content *without* cache
694 if _content is None:
694 if _content is None:
695 is_binary, md5, size, _content = file_node.metadata_uncached()
695 is_binary, md5, size, _content = file_node.metadata_uncached()
696
696
697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
698 full_content = None
698 full_content = None
699 all_lines = 0
699 all_lines = 0
700 if not is_binary and not over_size_limit:
700 if not is_binary and not over_size_limit:
701 full_content = safe_str(_content)
701 full_content = safe_str(_content)
702 all_lines, empty_lines = file_node.count_lines(full_content)
702 all_lines, empty_lines = file_node.count_lines(full_content)
703
703
704 file_data.update({
704 file_data.update({
705 "content": full_content,
705 "content": full_content,
706 "lines": all_lines
706 "lines": all_lines
707 })
707 })
708
708
709 except RepositoryError:
709 except RepositoryError:
710 log.exception("Exception in get_node")
710 log.exception("Exception in get_node")
711 raise
711 raise
712
712
713 return file_data
713 return file_data
714
714
715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
716 """
716 """
717 Fetch node tree for usage in full text search
717 Fetch node tree for usage in full text search
718 """
718 """
719
719
720 tree_info = list()
720 tree_info = list()
721
721
722 try:
722 try:
723 _repo = self._get_repo(repo_name)
723 _repo = self._get_repo(repo_name)
724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
725 root_path = root_path.lstrip('/')
725 root_path = root_path.lstrip('/')
726 top_node = commit.get_node(root_path)
726 top_node = commit.get_node(root_path)
727 top_node.default_pre_load = []
727 top_node.default_pre_load = []
728
728
729 for __, dirs, files in commit.walk(top_node):
729 for __, dirs, files in commit.walk(top_node):
730
730
731 for f in files:
731 for f in files:
732 is_binary, md5, size, _content = f.metadata_uncached()
732 is_binary, md5, size, _content = f.metadata_uncached()
733 _data = {
733 _data = {
734 "name": f.str_path,
734 "name": f.str_path,
735 "md5": md5,
735 "md5": md5,
736 "extension": f.extension,
736 "extension": f.extension,
737 "binary": is_binary,
737 "binary": is_binary,
738 "size": size
738 "size": size
739 }
739 }
740
740
741 tree_info.append(_data)
741 tree_info.append(_data)
742
742
743 except RepositoryError:
743 except RepositoryError:
744 log.exception("Exception in get_nodes")
744 log.exception("Exception in get_nodes")
745 raise
745 raise
746
746
747 return tree_info
747 return tree_info
748
748
749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
750 author=None, trigger_push_hook=True):
750 author=None, trigger_push_hook=True):
751 """
751 """
752 Commits given multiple nodes into repo
752 Commits given multiple nodes into repo
753
753
754 :param user: RhodeCode User object or user_id, the commiter
754 :param user: RhodeCode User object or user_id, the commiter
755 :param repo: RhodeCode Repository object
755 :param repo: RhodeCode Repository object
756 :param message: commit message
756 :param message: commit message
757 :param nodes: mapping {filename:{'content':content},...}
757 :param nodes: mapping {filename:{'content':content},...}
758 :param parent_commit: parent commit, can be empty than it's
758 :param parent_commit: parent commit, can be empty than it's
759 initial commit
759 initial commit
760 :param author: author of commit, cna be different that commiter
760 :param author: author of commit, cna be different that commiter
761 only for git
761 only for git
762 :param trigger_push_hook: trigger push hooks
762 :param trigger_push_hook: trigger push hooks
763
763
764 :returns: new committed commit
764 :returns: new committed commit
765 """
765 """
766
766
767 user = self._get_user(user)
767 user = self._get_user(user)
768 scm_instance = repo.scm_instance(cache=False)
768 scm_instance = repo.scm_instance(cache=False)
769
769
770 message = safe_str(message)
770 message = safe_str(message)
771 commiter = user.full_contact
771 commiter = user.full_contact
772 author = safe_str(author) if author else commiter
772 author = safe_str(author) if author else commiter
773
773
774 imc = scm_instance.in_memory_commit
774 imc = scm_instance.in_memory_commit
775
775
776 if not parent_commit:
776 if not parent_commit:
777 parent_commit = EmptyCommit(alias=scm_instance.alias)
777 parent_commit = EmptyCommit(alias=scm_instance.alias)
778
778
779 if isinstance(parent_commit, EmptyCommit):
779 if isinstance(parent_commit, EmptyCommit):
780 # EmptyCommit means we're editing empty repository
780 # EmptyCommit means we're editing empty repository
781 parents = None
781 parents = None
782 else:
782 else:
783 parents = [parent_commit]
783 parents = [parent_commit]
784
784
785 upload_file_types = (io.BytesIO, io.BufferedRandom)
785 upload_file_types = (io.BytesIO, io.BufferedRandom)
786 processed_nodes = []
786 processed_nodes = []
787 for filename, content_dict in nodes.items():
787 for filename, content_dict in nodes.items():
788 if not isinstance(filename, bytes):
788 if not isinstance(filename, bytes):
789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
790 content = content_dict['content']
790 content = content_dict['content']
791 if not isinstance(content, upload_file_types + (bytes,)):
791 if not isinstance(content, upload_file_types + (bytes,)):
792 raise ValueError('content key value in nodes needs to be bytes')
792 raise ValueError('content key value in nodes needs to be bytes')
793
793
794 for f_path in nodes:
794 for f_path in nodes:
795 f_path = self._sanitize_path(f_path)
795 f_path = self._sanitize_path(f_path)
796 content = nodes[f_path]['content']
796 content = nodes[f_path]['content']
797
797
798 # decoding here will force that we have proper encoded values
798 # decoding here will force that we have proper encoded values
799 # in any other case this will throw exceptions and deny commit
799 # in any other case this will throw exceptions and deny commit
800
800
801 if isinstance(content, bytes):
801 if isinstance(content, bytes):
802 pass
802 pass
803 elif isinstance(content, upload_file_types):
803 elif isinstance(content, upload_file_types):
804 content = content.read()
804 content = content.read()
805 else:
805 else:
806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
807 processed_nodes.append((f_path, content))
807 processed_nodes.append((f_path, content))
808
808
809 # add multiple nodes
809 # add multiple nodes
810 for path, content in processed_nodes:
810 for path, content in processed_nodes:
811 imc.add(FileNode(path, content=content))
811 imc.add(FileNode(path, content=content))
812
812
813 # TODO: handle pre push scenario
813 # TODO: handle pre push scenario
814 tip = imc.commit(message=message,
814 tip = imc.commit(message=message,
815 author=author,
815 author=author,
816 parents=parents,
816 parents=parents,
817 branch=parent_commit.branch)
817 branch=parent_commit.branch)
818
818
819 self.mark_for_invalidation(repo.repo_name)
819 self.mark_for_invalidation(repo.repo_name)
820 if trigger_push_hook:
820 if trigger_push_hook:
821 hooks_utils.trigger_post_push_hook(
821 hooks_utils.trigger_post_push_hook(
822 username=user.username, action='push_local',
822 username=user.username, action='push_local',
823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
824 hook_type='post_push',
824 hook_type='post_push',
825 commit_ids=[tip.raw_id])
825 commit_ids=[tip.raw_id])
826 return tip
826 return tip
827
827
828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
829 author=None, trigger_push_hook=True):
829 author=None, trigger_push_hook=True):
830 user = self._get_user(user)
830 user = self._get_user(user)
831 scm_instance = repo.scm_instance(cache=False)
831 scm_instance = repo.scm_instance(cache=False)
832
832
833 message = safe_str(message)
833 message = safe_str(message)
834 commiter = user.full_contact
834 commiter = user.full_contact
835 author = safe_str(author) if author else commiter
835 author = safe_str(author) if author else commiter
836
836
837 imc = scm_instance.in_memory_commit
837 imc = scm_instance.in_memory_commit
838
838
839 if not parent_commit:
839 if not parent_commit:
840 parent_commit = EmptyCommit(alias=scm_instance.alias)
840 parent_commit = EmptyCommit(alias=scm_instance.alias)
841
841
842 if isinstance(parent_commit, EmptyCommit):
842 if isinstance(parent_commit, EmptyCommit):
843 # EmptyCommit means we we're editing empty repository
843 # EmptyCommit means we we're editing empty repository
844 parents = None
844 parents = None
845 else:
845 else:
846 parents = [parent_commit]
846 parents = [parent_commit]
847
847
848 # add multiple nodes
848 # add multiple nodes
849 for _filename, data in nodes.items():
849 for _filename, data in nodes.items():
850 # new filename, can be renamed from the old one, also sanitaze
850 # new filename, can be renamed from the old one, also sanitaze
851 # the path for any hack around relative paths like ../../ etc.
851 # the path for any hack around relative paths like ../../ etc.
852 filename = self._sanitize_path(data['filename'])
852 filename = self._sanitize_path(data['filename'])
853 old_filename = self._sanitize_path(_filename)
853 old_filename = self._sanitize_path(_filename)
854 content = data['content']
854 content = data['content']
855 file_mode = data.get('mode')
855 file_mode = data.get('mode')
856 filenode = FileNode(old_filename, content=content, mode=file_mode)
856 filenode = FileNode(old_filename, content=content, mode=file_mode)
857 op = data['op']
857 op = data['op']
858 if op == 'add':
858 if op == 'add':
859 imc.add(filenode)
859 imc.add(filenode)
860 elif op == 'del':
860 elif op == 'del':
861 imc.remove(filenode)
861 imc.remove(filenode)
862 elif op == 'mod':
862 elif op == 'mod':
863 if filename != old_filename:
863 if filename != old_filename:
864 # TODO: handle renames more efficient, needs vcs lib changes
864 # TODO: handle renames more efficient, needs vcs lib changes
865 imc.remove(filenode)
865 imc.remove(filenode)
866 imc.add(FileNode(filename, content=content, mode=file_mode))
866 imc.add(FileNode(filename, content=content, mode=file_mode))
867 else:
867 else:
868 imc.change(filenode)
868 imc.change(filenode)
869
869
870 try:
870 try:
871 # TODO: handle pre push scenario commit changes
871 # TODO: handle pre push scenario commit changes
872 tip = imc.commit(message=message,
872 tip = imc.commit(message=message,
873 author=author,
873 author=author,
874 parents=parents,
874 parents=parents,
875 branch=parent_commit.branch)
875 branch=parent_commit.branch)
876 except NodeNotChangedError:
876 except NodeNotChangedError:
877 raise
877 raise
878 except Exception as e:
878 except Exception as e:
879 log.exception("Unexpected exception during call to imc.commit")
879 log.exception("Unexpected exception during call to imc.commit")
880 raise IMCCommitError(str(e))
880 raise IMCCommitError(str(e))
881 finally:
881 finally:
882 # always clear caches, if commit fails we want fresh object also
882 # always clear caches, if commit fails we want fresh object also
883 self.mark_for_invalidation(repo.repo_name)
883 self.mark_for_invalidation(repo.repo_name)
884
884
885 if trigger_push_hook:
885 if trigger_push_hook:
886 hooks_utils.trigger_post_push_hook(
886 hooks_utils.trigger_post_push_hook(
887 username=user.username, action='push_local', hook_type='post_push',
887 username=user.username, action='push_local', hook_type='post_push',
888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
889 commit_ids=[tip.raw_id])
889 commit_ids=[tip.raw_id])
890
890
891 return tip
891 return tip
892
892
893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
894 author=None, trigger_push_hook=True):
894 author=None, trigger_push_hook=True):
895 """
895 """
896 Deletes given multiple nodes into `repo`
896 Deletes given multiple nodes into `repo`
897
897
898 :param user: RhodeCode User object or user_id, the committer
898 :param user: RhodeCode User object or user_id, the committer
899 :param repo: RhodeCode Repository object
899 :param repo: RhodeCode Repository object
900 :param message: commit message
900 :param message: commit message
901 :param nodes: mapping {filename:{'content':content},...}
901 :param nodes: mapping {filename:{'content':content},...}
902 :param parent_commit: parent commit, can be empty than it's initial
902 :param parent_commit: parent commit, can be empty than it's initial
903 commit
903 commit
904 :param author: author of commit, cna be different that commiter only
904 :param author: author of commit, cna be different that commiter only
905 for git
905 for git
906 :param trigger_push_hook: trigger push hooks
906 :param trigger_push_hook: trigger push hooks
907
907
908 :returns: new commit after deletion
908 :returns: new commit after deletion
909 """
909 """
910
910
911 user = self._get_user(user)
911 user = self._get_user(user)
912 scm_instance = repo.scm_instance(cache=False)
912 scm_instance = repo.scm_instance(cache=False)
913
913
914 processed_nodes = []
914 processed_nodes = []
915 for f_path in nodes:
915 for f_path in nodes:
916 f_path = self._sanitize_path(f_path)
916 f_path = self._sanitize_path(f_path)
917 # content can be empty but for compatibility it allows same dicts
917 # content can be empty but for compatibility it allows same dicts
918 # structure as add_nodes
918 # structure as add_nodes
919 content = nodes[f_path].get('content')
919 content = nodes[f_path].get('content')
920 processed_nodes.append((safe_bytes(f_path), content))
920 processed_nodes.append((safe_bytes(f_path), content))
921
921
922 message = safe_str(message)
922 message = safe_str(message)
923 commiter = user.full_contact
923 commiter = user.full_contact
924 author = safe_str(author) if author else commiter
924 author = safe_str(author) if author else commiter
925
925
926 imc = scm_instance.in_memory_commit
926 imc = scm_instance.in_memory_commit
927
927
928 if not parent_commit:
928 if not parent_commit:
929 parent_commit = EmptyCommit(alias=scm_instance.alias)
929 parent_commit = EmptyCommit(alias=scm_instance.alias)
930
930
931 if isinstance(parent_commit, EmptyCommit):
931 if isinstance(parent_commit, EmptyCommit):
932 # EmptyCommit means we we're editing empty repository
932 # EmptyCommit means we we're editing empty repository
933 parents = None
933 parents = None
934 else:
934 else:
935 parents = [parent_commit]
935 parents = [parent_commit]
936 # add multiple nodes
936 # add multiple nodes
937 for path, content in processed_nodes:
937 for path, content in processed_nodes:
938 imc.remove(FileNode(path, content=content))
938 imc.remove(FileNode(path, content=content))
939
939
940 # TODO: handle pre push scenario
940 # TODO: handle pre push scenario
941 tip = imc.commit(message=message,
941 tip = imc.commit(message=message,
942 author=author,
942 author=author,
943 parents=parents,
943 parents=parents,
944 branch=parent_commit.branch)
944 branch=parent_commit.branch)
945
945
946 self.mark_for_invalidation(repo.repo_name)
946 self.mark_for_invalidation(repo.repo_name)
947 if trigger_push_hook:
947 if trigger_push_hook:
948 hooks_utils.trigger_post_push_hook(
948 hooks_utils.trigger_post_push_hook(
949 username=user.username, action='push_local', hook_type='post_push',
949 username=user.username, action='push_local', hook_type='post_push',
950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
951 commit_ids=[tip.raw_id])
951 commit_ids=[tip.raw_id])
952 return tip
952 return tip
953
953
954 def strip(self, repo, commit_id, branch):
954 def strip(self, repo, commit_id, branch):
955 scm_instance = repo.scm_instance(cache=False)
955 scm_instance = repo.scm_instance(cache=False)
956 scm_instance.config.clear_section('hooks')
956 scm_instance.config.clear_section('hooks')
957 scm_instance.strip(commit_id, branch)
957 scm_instance.strip(commit_id, branch)
958 self.mark_for_invalidation(repo.repo_name)
958 self.mark_for_invalidation(repo.repo_name)
959
959
960 def get_unread_journal(self):
960 def get_unread_journal(self):
961 return self.sa.query(UserLog).count()
961 return self.sa.query(UserLog).count()
962
962
963 @classmethod
963 @classmethod
964 def backend_landing_ref(cls, repo_type):
964 def backend_landing_ref(cls, repo_type):
965 """
965 """
966 Return a default landing ref based on a repository type.
966 Return a default landing ref based on a repository type.
967 """
967 """
968
968
969 landing_ref = {
969 landing_ref = {
970 'hg': ('branch:default', 'default'),
970 'hg': ('branch:default', 'default'),
971 'git': ('branch:master', 'master'),
971 'git': ('branch:master', 'master'),
972 'svn': ('rev:tip', 'latest tip'),
972 'svn': ('rev:tip', 'latest tip'),
973 'default': ('rev:tip', 'latest tip'),
973 'default': ('rev:tip', 'latest tip'),
974 }
974 }
975
975
976 return landing_ref.get(repo_type) or landing_ref['default']
976 return landing_ref.get(repo_type) or landing_ref['default']
977
977
978 def get_repo_landing_revs(self, translator, repo=None):
978 def get_repo_landing_revs(self, translator, repo=None):
979 """
979 """
980 Generates select option with tags branches and bookmarks (for hg only)
980 Generates select option with tags branches and bookmarks (for hg only)
981 grouped by type
981 grouped by type
982
982
983 :param repo:
983 :param repo:
984 """
984 """
985 from rhodecode.lib.vcs.backends.git import GitRepository
985 from rhodecode.lib.vcs.backends.git import GitRepository
986
986
987 _ = translator
987 _ = translator
988 repo = self._get_repo(repo)
988 repo = self._get_repo(repo)
989
989
990 if repo:
990 if repo:
991 repo_type = repo.repo_type
991 repo_type = repo.repo_type
992 else:
992 else:
993 repo_type = 'default'
993 repo_type = 'default'
994
994
995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
996
996
997 default_ref_options = [
997 default_ref_options = [
998 [default_landing_ref, landing_ref_lbl]
998 [default_landing_ref, landing_ref_lbl]
999 ]
999 ]
1000 default_choices = [
1000 default_choices = [
1001 default_landing_ref
1001 default_landing_ref
1002 ]
1002 ]
1003
1003
1004 if not repo:
1004 if not repo:
1005 # presented at NEW repo creation
1005 # presented at NEW repo creation
1006 return default_choices, default_ref_options
1006 return default_choices, default_ref_options
1007
1007
1008 repo = repo.scm_instance()
1008 repo = repo.scm_instance()
1009
1009
1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1011 choices = [default_landing_ref]
1011 choices = [default_landing_ref]
1012
1012
1013 # branches
1013 # branches
1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1015 if not branch_group:
1015 if not branch_group:
1016 # new repo, or without maybe a branch?
1016 # new repo, or without maybe a branch?
1017 branch_group = default_ref_options
1017 branch_group = default_ref_options
1018
1018
1019 branches_group = (branch_group, _("Branches"))
1019 branches_group = (branch_group, _("Branches"))
1020 ref_options.append(branches_group)
1020 ref_options.append(branches_group)
1021 choices.extend([x[0] for x in branches_group[0]])
1021 choices.extend([x[0] for x in branches_group[0]])
1022
1022
1023 # bookmarks for HG
1023 # bookmarks for HG
1024 if repo.alias == 'hg':
1024 if repo.alias == 'hg':
1025 bookmarks_group = (
1025 bookmarks_group = (
1026 [(f'book:{safe_str(b)}', safe_str(b))
1026 [(f'book:{safe_str(b)}', safe_str(b))
1027 for b in repo.bookmarks],
1027 for b in repo.bookmarks],
1028 _("Bookmarks"))
1028 _("Bookmarks"))
1029 ref_options.append(bookmarks_group)
1029 ref_options.append(bookmarks_group)
1030 choices.extend([x[0] for x in bookmarks_group[0]])
1030 choices.extend([x[0] for x in bookmarks_group[0]])
1031
1031
1032 # tags
1032 # tags
1033 tags_group = (
1033 tags_group = (
1034 [(f'tag:{safe_str(t)}', safe_str(t))
1034 [(f'tag:{safe_str(t)}', safe_str(t))
1035 for t in repo.tags],
1035 for t in repo.tags],
1036 _("Tags"))
1036 _("Tags"))
1037 ref_options.append(tags_group)
1037 ref_options.append(tags_group)
1038 choices.extend([x[0] for x in tags_group[0]])
1038 choices.extend([x[0] for x in tags_group[0]])
1039
1039
1040 return choices, ref_options
1040 return choices, ref_options
1041
1041
1042 def get_server_info(self, environ=None):
1042 def get_server_info(self, environ=None):
1043 server_info = get_system_info(environ)
1043 server_info = get_system_info(environ)
1044 return server_info
1044 return server_info
General Comments 0
You need to be logged in to leave comments. Login now