##// END OF EJS Templates
git/hg: skip double caching with bulk requests calls....
super-admin -
r1075:8fc1778b python3
parent child Browse files
Show More
@@ -1,1343 +1,1344 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib.request, urllib.parse, urllib.error
25 import urllib.request, urllib.parse, urllib.error
26 import urllib.request, urllib.error, urllib.parse
26 import urllib.request, urllib.error, urllib.parse
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.client import HttpGitClient, LocalGitClient
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = b'^{}'
53 PEELED_REF_MARKER = b'^{}'
54
54
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def reraise_safe_exceptions(func):
59 def reraise_safe_exceptions(func):
60 """Converts Dulwich exceptions to something neutral."""
60 """Converts Dulwich exceptions to something neutral."""
61
61
62 @wraps(func)
62 @wraps(func)
63 def wrapper(*args, **kwargs):
63 def wrapper(*args, **kwargs):
64 try:
64 try:
65 return func(*args, **kwargs)
65 return func(*args, **kwargs)
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 exc = exceptions.LookupException(org_exc=e)
67 exc = exceptions.LookupException(org_exc=e)
68 raise exc(safe_str(e))
68 raise exc(safe_str(e))
69 except (HangupException, UnexpectedCommandError) as e:
69 except (HangupException, UnexpectedCommandError) as e:
70 exc = exceptions.VcsException(org_exc=e)
70 exc = exceptions.VcsException(org_exc=e)
71 raise exc(safe_str(e))
71 raise exc(safe_str(e))
72 except Exception as e:
72 except Exception as e:
73 # NOTE(marcink): becuase of how dulwich handles some exceptions
73 # NOTE(marcink): becuase of how dulwich handles some exceptions
74 # (KeyError on empty repos), we cannot track this and catch all
74 # (KeyError on empty repos), we cannot track this and catch all
75 # exceptions, it's an exceptions from other handlers
75 # exceptions, it's an exceptions from other handlers
76 #if not hasattr(e, '_vcs_kind'):
76 #if not hasattr(e, '_vcs_kind'):
77 #log.exception("Unhandled exception in git remote call")
77 #log.exception("Unhandled exception in git remote call")
78 #raise_from_original(exceptions.UnhandledException)
78 #raise_from_original(exceptions.UnhandledException)
79 raise
79 raise
80 return wrapper
80 return wrapper
81
81
82
82
83 class Repo(DulwichRepo):
83 class Repo(DulwichRepo):
84 """
84 """
85 A wrapper for dulwich Repo class.
85 A wrapper for dulwich Repo class.
86
86
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 "Too many open files" error. We need to close all opened file descriptors
88 "Too many open files" error. We need to close all opened file descriptors
89 once the repo object is destroyed.
89 once the repo object is destroyed.
90 """
90 """
91 def __del__(self):
91 def __del__(self):
92 if hasattr(self, 'object_store'):
92 if hasattr(self, 'object_store'):
93 self.close()
93 self.close()
94
94
95
95
96 class Repository(LibGit2Repo):
96 class Repository(LibGit2Repo):
97
97
98 def __enter__(self):
98 def __enter__(self):
99 return self
99 return self
100
100
101 def __exit__(self, exc_type, exc_val, exc_tb):
101 def __exit__(self, exc_type, exc_val, exc_tb):
102 self.free()
102 self.free()
103
103
104
104
105 class GitFactory(RepoFactory):
105 class GitFactory(RepoFactory):
106 repo_type = 'git'
106 repo_type = 'git'
107
107
108 def _create_repo(self, wire, create, use_libgit2=False):
108 def _create_repo(self, wire, create, use_libgit2=False):
109 if use_libgit2:
109 if use_libgit2:
110 return Repository(wire['path'])
110 return Repository(wire['path'])
111 else:
111 else:
112 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
112 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
113 return Repo(repo_path)
113 return Repo(repo_path)
114
114
115 def repo(self, wire, create=False, use_libgit2=False):
115 def repo(self, wire, create=False, use_libgit2=False):
116 """
116 """
117 Get a repository instance for the given path.
117 Get a repository instance for the given path.
118 """
118 """
119 return self._create_repo(wire, create, use_libgit2)
119 return self._create_repo(wire, create, use_libgit2)
120
120
121 def repo_libgit2(self, wire):
121 def repo_libgit2(self, wire):
122 return self.repo(wire, use_libgit2=True)
122 return self.repo(wire, use_libgit2=True)
123
123
124
124
125 class GitRemote(RemoteBase):
125 class GitRemote(RemoteBase):
126
126
127 def __init__(self, factory):
127 def __init__(self, factory):
128 self._factory = factory
128 self._factory = factory
129 self._bulk_methods = {
129 self._bulk_methods = {
130 "date": self.date,
130 "date": self.date,
131 "author": self.author,
131 "author": self.author,
132 "branch": self.branch,
132 "branch": self.branch,
133 "message": self.message,
133 "message": self.message,
134 "parents": self.parents,
134 "parents": self.parents,
135 "_commit": self.revision,
135 "_commit": self.revision,
136 }
136 }
137
137
138 def _wire_to_config(self, wire):
138 def _wire_to_config(self, wire):
139 if 'config' in wire:
139 if 'config' in wire:
140 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
140 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
141 return {}
141 return {}
142
142
143 def _remote_conf(self, config):
143 def _remote_conf(self, config):
144 params = [
144 params = [
145 '-c', 'core.askpass=""',
145 '-c', 'core.askpass=""',
146 ]
146 ]
147 ssl_cert_dir = config.get('vcs_ssl_dir')
147 ssl_cert_dir = config.get('vcs_ssl_dir')
148 if ssl_cert_dir:
148 if ssl_cert_dir:
149 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
149 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
150 return params
150 return params
151
151
152 @reraise_safe_exceptions
152 @reraise_safe_exceptions
153 def discover_git_version(self):
153 def discover_git_version(self):
154 stdout, _ = self.run_git_command(
154 stdout, _ = self.run_git_command(
155 {}, ['--version'], _bare=True, _safe=True)
155 {}, ['--version'], _bare=True, _safe=True)
156 prefix = b'git version'
156 prefix = b'git version'
157 if stdout.startswith(prefix):
157 if stdout.startswith(prefix):
158 stdout = stdout[len(prefix):]
158 stdout = stdout[len(prefix):]
159 return safe_str(stdout.strip())
159 return safe_str(stdout.strip())
160
160
161 @reraise_safe_exceptions
161 @reraise_safe_exceptions
162 def is_empty(self, wire):
162 def is_empty(self, wire):
163 repo_init = self._factory.repo_libgit2(wire)
163 repo_init = self._factory.repo_libgit2(wire)
164 with repo_init as repo:
164 with repo_init as repo:
165
165
166 try:
166 try:
167 has_head = repo.head.name
167 has_head = repo.head.name
168 if has_head:
168 if has_head:
169 return False
169 return False
170
170
171 # NOTE(marcink): check again using more expensive method
171 # NOTE(marcink): check again using more expensive method
172 return repo.is_empty
172 return repo.is_empty
173 except Exception:
173 except Exception:
174 pass
174 pass
175
175
176 return True
176 return True
177
177
178 @reraise_safe_exceptions
178 @reraise_safe_exceptions
179 def assert_correct_path(self, wire):
179 def assert_correct_path(self, wire):
180 cache_on, context_uid, repo_id = self._cache_on(wire)
180 cache_on, context_uid, repo_id = self._cache_on(wire)
181 region = self._region(wire)
181 region = self._region(wire)
182
182
183 @region.conditional_cache_on_arguments(condition=cache_on)
183 @region.conditional_cache_on_arguments(condition=cache_on)
184 def _assert_correct_path(_context_uid, _repo_id):
184 def _assert_correct_path(_context_uid, _repo_id):
185 try:
185 try:
186 repo_init = self._factory.repo_libgit2(wire)
186 repo_init = self._factory.repo_libgit2(wire)
187 with repo_init as repo:
187 with repo_init as repo:
188 pass
188 pass
189 except pygit2.GitError:
189 except pygit2.GitError:
190 path = wire.get('path')
190 path = wire.get('path')
191 tb = traceback.format_exc()
191 tb = traceback.format_exc()
192 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
192 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
193 return False
193 return False
194
194
195 return True
195 return True
196 return _assert_correct_path(context_uid, repo_id)
196 return _assert_correct_path(context_uid, repo_id)
197
197
198 @reraise_safe_exceptions
198 @reraise_safe_exceptions
199 def bare(self, wire):
199 def bare(self, wire):
200 repo_init = self._factory.repo_libgit2(wire)
200 repo_init = self._factory.repo_libgit2(wire)
201 with repo_init as repo:
201 with repo_init as repo:
202 return repo.is_bare
202 return repo.is_bare
203
203
204 @reraise_safe_exceptions
204 @reraise_safe_exceptions
205 def blob_as_pretty_string(self, wire, sha):
205 def blob_as_pretty_string(self, wire, sha):
206 repo_init = self._factory.repo_libgit2(wire)
206 repo_init = self._factory.repo_libgit2(wire)
207 with repo_init as repo:
207 with repo_init as repo:
208 blob_obj = repo[sha]
208 blob_obj = repo[sha]
209 blob = blob_obj.data
209 blob = blob_obj.data
210 return blob
210 return blob
211
211
212 @reraise_safe_exceptions
212 @reraise_safe_exceptions
213 def blob_raw_length(self, wire, sha):
213 def blob_raw_length(self, wire, sha):
214 cache_on, context_uid, repo_id = self._cache_on(wire)
214 cache_on, context_uid, repo_id = self._cache_on(wire)
215 region = self._region(wire)
215 region = self._region(wire)
216
216
217 @region.conditional_cache_on_arguments(condition=cache_on)
217 @region.conditional_cache_on_arguments(condition=cache_on)
218 def _blob_raw_length(_repo_id, _sha):
218 def _blob_raw_length(_repo_id, _sha):
219
219
220 repo_init = self._factory.repo_libgit2(wire)
220 repo_init = self._factory.repo_libgit2(wire)
221 with repo_init as repo:
221 with repo_init as repo:
222 blob = repo[sha]
222 blob = repo[sha]
223 return blob.size
223 return blob.size
224
224
225 return _blob_raw_length(repo_id, sha)
225 return _blob_raw_length(repo_id, sha)
226
226
227 def _parse_lfs_pointer(self, raw_content):
227 def _parse_lfs_pointer(self, raw_content):
228 spec_string = b'version https://git-lfs.github.com/spec'
228 spec_string = b'version https://git-lfs.github.com/spec'
229 if raw_content and raw_content.startswith(spec_string):
229 if raw_content and raw_content.startswith(spec_string):
230
230
231 pattern = re.compile(rb"""
231 pattern = re.compile(rb"""
232 (?:\n)?
232 (?:\n)?
233 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
233 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
234 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
234 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
235 ^size[ ](?P<oid_size>[0-9]+)\n
235 ^size[ ](?P<oid_size>[0-9]+)\n
236 (?:\n)?
236 (?:\n)?
237 """, re.VERBOSE | re.MULTILINE)
237 """, re.VERBOSE | re.MULTILINE)
238 match = pattern.match(raw_content)
238 match = pattern.match(raw_content)
239 if match:
239 if match:
240 return match.groupdict()
240 return match.groupdict()
241
241
242 return {}
242 return {}
243
243
244 @reraise_safe_exceptions
244 @reraise_safe_exceptions
245 def is_large_file(self, wire, commit_id):
245 def is_large_file(self, wire, commit_id):
246 cache_on, context_uid, repo_id = self._cache_on(wire)
246 cache_on, context_uid, repo_id = self._cache_on(wire)
247 region = self._region(wire)
247 region = self._region(wire)
248
248
249 @region.conditional_cache_on_arguments(condition=cache_on)
249 @region.conditional_cache_on_arguments(condition=cache_on)
250 def _is_large_file(_repo_id, _sha):
250 def _is_large_file(_repo_id, _sha):
251 repo_init = self._factory.repo_libgit2(wire)
251 repo_init = self._factory.repo_libgit2(wire)
252 with repo_init as repo:
252 with repo_init as repo:
253 blob = repo[commit_id]
253 blob = repo[commit_id]
254 if blob.is_binary:
254 if blob.is_binary:
255 return {}
255 return {}
256
256
257 return self._parse_lfs_pointer(blob.data)
257 return self._parse_lfs_pointer(blob.data)
258
258
259 return _is_large_file(repo_id, commit_id)
259 return _is_large_file(repo_id, commit_id)
260
260
261 @reraise_safe_exceptions
261 @reraise_safe_exceptions
262 def is_binary(self, wire, tree_id):
262 def is_binary(self, wire, tree_id):
263 cache_on, context_uid, repo_id = self._cache_on(wire)
263 cache_on, context_uid, repo_id = self._cache_on(wire)
264 region = self._region(wire)
264 region = self._region(wire)
265
265
266 @region.conditional_cache_on_arguments(condition=cache_on)
266 @region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
267 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
268 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
269 with repo_init as repo:
270 blob_obj = repo[tree_id]
270 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
271 return blob_obj.is_binary
272
272
273 return _is_binary(repo_id, tree_id)
273 return _is_binary(repo_id, tree_id)
274
274
275 @reraise_safe_exceptions
275 @reraise_safe_exceptions
276 def md5_hash(self, wire, tree_id):
276 def md5_hash(self, wire, tree_id):
277 cache_on, context_uid, repo_id = self._cache_on(wire)
277 cache_on, context_uid, repo_id = self._cache_on(wire)
278 region = self._region(wire)
278 region = self._region(wire)
279
279
280 @region.conditional_cache_on_arguments(condition=cache_on)
280 @region.conditional_cache_on_arguments(condition=cache_on)
281 def _md5_hash(_repo_id, _tree_id):
281 def _md5_hash(_repo_id, _tree_id):
282 return ''
282 return ''
283
283
284 return _md5_hash(repo_id, tree_id)
284 return _md5_hash(repo_id, tree_id)
285
285
286 @reraise_safe_exceptions
286 @reraise_safe_exceptions
287 def in_largefiles_store(self, wire, oid):
287 def in_largefiles_store(self, wire, oid):
288 conf = self._wire_to_config(wire)
288 conf = self._wire_to_config(wire)
289 repo_init = self._factory.repo_libgit2(wire)
289 repo_init = self._factory.repo_libgit2(wire)
290 with repo_init as repo:
290 with repo_init as repo:
291 repo_name = repo.path
291 repo_name = repo.path
292
292
293 store_location = conf.get('vcs_git_lfs_store_location')
293 store_location = conf.get('vcs_git_lfs_store_location')
294 if store_location:
294 if store_location:
295
295
296 store = LFSOidStore(
296 store = LFSOidStore(
297 oid=oid, repo=repo_name, store_location=store_location)
297 oid=oid, repo=repo_name, store_location=store_location)
298 return store.has_oid()
298 return store.has_oid()
299
299
300 return False
300 return False
301
301
302 @reraise_safe_exceptions
302 @reraise_safe_exceptions
303 def store_path(self, wire, oid):
303 def store_path(self, wire, oid):
304 conf = self._wire_to_config(wire)
304 conf = self._wire_to_config(wire)
305 repo_init = self._factory.repo_libgit2(wire)
305 repo_init = self._factory.repo_libgit2(wire)
306 with repo_init as repo:
306 with repo_init as repo:
307 repo_name = repo.path
307 repo_name = repo.path
308
308
309 store_location = conf.get('vcs_git_lfs_store_location')
309 store_location = conf.get('vcs_git_lfs_store_location')
310 if store_location:
310 if store_location:
311 store = LFSOidStore(
311 store = LFSOidStore(
312 oid=oid, repo=repo_name, store_location=store_location)
312 oid=oid, repo=repo_name, store_location=store_location)
313 return store.oid_path
313 return store.oid_path
314 raise ValueError('Unable to fetch oid with path {}'.format(oid))
314 raise ValueError('Unable to fetch oid with path {}'.format(oid))
315
315
316 @reraise_safe_exceptions
316 @reraise_safe_exceptions
317 def bulk_request(self, wire, rev, pre_load):
317 def bulk_request(self, wire, rev, pre_load):
318 cache_on, context_uid, repo_id = self._cache_on(wire)
318 cache_on, context_uid, repo_id = self._cache_on(wire)
319 region = self._region(wire)
319 region = self._region(wire)
320
320
321 @region.conditional_cache_on_arguments(condition=cache_on)
321 @region.conditional_cache_on_arguments(condition=cache_on)
322 def _bulk_request(_repo_id, _rev, _pre_load):
322 def _bulk_request(_repo_id, _rev, _pre_load):
323 result = {}
323 result = {}
324 for attr in pre_load:
324 for attr in pre_load:
325 try:
325 try:
326 method = self._bulk_methods[attr]
326 method = self._bulk_methods[attr]
327 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
327 args = [wire, rev]
328 args = [wire, rev]
328 result[attr] = method(*args)
329 result[attr] = method(*args)
329 except KeyError as e:
330 except KeyError as e:
330 raise exceptions.VcsException(e)(
331 raise exceptions.VcsException(e)(
331 "Unknown bulk attribute: %s" % attr)
332 "Unknown bulk attribute: %s" % attr)
332 return result
333 return result
333
334
334 return _bulk_request(repo_id, rev, sorted(pre_load))
335 return _bulk_request(repo_id, rev, sorted(pre_load))
335
336
336 def _build_opener(self, url):
337 def _build_opener(self, url):
337 handlers = []
338 handlers = []
338 url_obj = url_parser(url)
339 url_obj = url_parser(url)
339 _, authinfo = url_obj.authinfo()
340 _, authinfo = url_obj.authinfo()
340
341
341 if authinfo:
342 if authinfo:
342 # create a password manager
343 # create a password manager
343 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
344 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
344 passmgr.add_password(*authinfo)
345 passmgr.add_password(*authinfo)
345
346
346 handlers.extend((httpbasicauthhandler(passmgr),
347 handlers.extend((httpbasicauthhandler(passmgr),
347 httpdigestauthhandler(passmgr)))
348 httpdigestauthhandler(passmgr)))
348
349
349 return urllib.request.build_opener(*handlers)
350 return urllib.request.build_opener(*handlers)
350
351
351 def _type_id_to_name(self, type_id: int):
352 def _type_id_to_name(self, type_id: int):
352 return {
353 return {
353 1: 'commit',
354 1: 'commit',
354 2: 'tree',
355 2: 'tree',
355 3: 'blob',
356 3: 'blob',
356 4: 'tag'
357 4: 'tag'
357 }[type_id]
358 }[type_id]
358
359
359 @reraise_safe_exceptions
360 @reraise_safe_exceptions
360 def check_url(self, url, config):
361 def check_url(self, url, config):
361 url_obj = url_parser(url)
362 url_obj = url_parser(url)
362 test_uri, _ = url_obj.authinfo()
363 test_uri, _ = url_obj.authinfo()
363 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
364 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
364 url_obj.query = obfuscate_qs(url_obj.query)
365 url_obj.query = obfuscate_qs(url_obj.query)
365 cleaned_uri = str(url_obj)
366 cleaned_uri = str(url_obj)
366 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
367 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
367
368
368 if not test_uri.endswith('info/refs'):
369 if not test_uri.endswith('info/refs'):
369 test_uri = test_uri.rstrip('/') + '/info/refs'
370 test_uri = test_uri.rstrip('/') + '/info/refs'
370
371
371 o = self._build_opener(url)
372 o = self._build_opener(url)
372 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
373 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
373
374
374 q = {"service": 'git-upload-pack'}
375 q = {"service": 'git-upload-pack'}
375 qs = '?%s' % urllib.parse.urlencode(q)
376 qs = '?%s' % urllib.parse.urlencode(q)
376 cu = "%s%s" % (test_uri, qs)
377 cu = "%s%s" % (test_uri, qs)
377 req = urllib.request.Request(cu, None, {})
378 req = urllib.request.Request(cu, None, {})
378
379
379 try:
380 try:
380 log.debug("Trying to open URL %s", cleaned_uri)
381 log.debug("Trying to open URL %s", cleaned_uri)
381 resp = o.open(req)
382 resp = o.open(req)
382 if resp.code != 200:
383 if resp.code != 200:
383 raise exceptions.URLError()('Return Code is not 200')
384 raise exceptions.URLError()('Return Code is not 200')
384 except Exception as e:
385 except Exception as e:
385 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
386 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
386 # means it cannot be cloned
387 # means it cannot be cloned
387 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
388 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
388
389
389 # now detect if it's proper git repo
390 # now detect if it's proper git repo
390 gitdata = resp.read()
391 gitdata = resp.read()
391 if 'service=git-upload-pack' in gitdata:
392 if 'service=git-upload-pack' in gitdata:
392 pass
393 pass
393 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
394 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
394 # old style git can return some other format !
395 # old style git can return some other format !
395 pass
396 pass
396 else:
397 else:
397 raise exceptions.URLError()(
398 raise exceptions.URLError()(
398 "url [%s] does not look like an git" % (cleaned_uri,))
399 "url [%s] does not look like an git" % (cleaned_uri,))
399
400
400 return True
401 return True
401
402
402 @reraise_safe_exceptions
403 @reraise_safe_exceptions
403 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
404 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
404 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
405 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
405 remote_refs = self.pull(wire, url, apply_refs=False)
406 remote_refs = self.pull(wire, url, apply_refs=False)
406 repo = self._factory.repo(wire)
407 repo = self._factory.repo(wire)
407 if isinstance(valid_refs, list):
408 if isinstance(valid_refs, list):
408 valid_refs = tuple(valid_refs)
409 valid_refs = tuple(valid_refs)
409
410
410 for k in remote_refs:
411 for k in remote_refs:
411 # only parse heads/tags and skip so called deferred tags
412 # only parse heads/tags and skip so called deferred tags
412 if k.startswith(valid_refs) and not k.endswith(deferred):
413 if k.startswith(valid_refs) and not k.endswith(deferred):
413 repo[k] = remote_refs[k]
414 repo[k] = remote_refs[k]
414
415
415 if update_after_clone:
416 if update_after_clone:
416 # we want to checkout HEAD
417 # we want to checkout HEAD
417 repo["HEAD"] = remote_refs["HEAD"]
418 repo["HEAD"] = remote_refs["HEAD"]
418 index.build_index_from_tree(repo.path, repo.index_path(),
419 index.build_index_from_tree(repo.path, repo.index_path(),
419 repo.object_store, repo["HEAD"].tree)
420 repo.object_store, repo["HEAD"].tree)
420
421
421 @reraise_safe_exceptions
422 @reraise_safe_exceptions
422 def branch(self, wire, commit_id):
423 def branch(self, wire, commit_id):
423 cache_on, context_uid, repo_id = self._cache_on(wire)
424 cache_on, context_uid, repo_id = self._cache_on(wire)
424 region = self._region(wire)
425 region = self._region(wire)
425 @region.conditional_cache_on_arguments(condition=cache_on)
426 @region.conditional_cache_on_arguments(condition=cache_on)
426 def _branch(_context_uid, _repo_id, _commit_id):
427 def _branch(_context_uid, _repo_id, _commit_id):
427 regex = re.compile('^refs/heads')
428 regex = re.compile('^refs/heads')
428
429
429 def filter_with(ref):
430 def filter_with(ref):
430 return regex.match(ref[0]) and ref[1] == _commit_id
431 return regex.match(ref[0]) and ref[1] == _commit_id
431
432
432 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
433 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
433 return [x[0].split('refs/heads/')[-1] for x in branches]
434 return [x[0].split('refs/heads/')[-1] for x in branches]
434
435
435 return _branch(context_uid, repo_id, commit_id)
436 return _branch(context_uid, repo_id, commit_id)
436
437
437 @reraise_safe_exceptions
438 @reraise_safe_exceptions
438 def commit_branches(self, wire, commit_id):
439 def commit_branches(self, wire, commit_id):
439 cache_on, context_uid, repo_id = self._cache_on(wire)
440 cache_on, context_uid, repo_id = self._cache_on(wire)
440 region = self._region(wire)
441 region = self._region(wire)
441 @region.conditional_cache_on_arguments(condition=cache_on)
442 @region.conditional_cache_on_arguments(condition=cache_on)
442 def _commit_branches(_context_uid, _repo_id, _commit_id):
443 def _commit_branches(_context_uid, _repo_id, _commit_id):
443 repo_init = self._factory.repo_libgit2(wire)
444 repo_init = self._factory.repo_libgit2(wire)
444 with repo_init as repo:
445 with repo_init as repo:
445 branches = [x for x in repo.branches.with_commit(_commit_id)]
446 branches = [x for x in repo.branches.with_commit(_commit_id)]
446 return branches
447 return branches
447
448
448 return _commit_branches(context_uid, repo_id, commit_id)
449 return _commit_branches(context_uid, repo_id, commit_id)
449
450
450 @reraise_safe_exceptions
451 @reraise_safe_exceptions
451 def add_object(self, wire, content):
452 def add_object(self, wire, content):
452 repo_init = self._factory.repo_libgit2(wire)
453 repo_init = self._factory.repo_libgit2(wire)
453 with repo_init as repo:
454 with repo_init as repo:
454 blob = objects.Blob()
455 blob = objects.Blob()
455 blob.set_raw_string(content)
456 blob.set_raw_string(content)
456 repo.object_store.add_object(blob)
457 repo.object_store.add_object(blob)
457 return blob.id
458 return blob.id
458
459
459 # TODO: this is quite complex, check if that can be simplified
460 # TODO: this is quite complex, check if that can be simplified
460 @reraise_safe_exceptions
461 @reraise_safe_exceptions
461 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
462 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
462 # Defines the root tree
463 # Defines the root tree
463 class _Root(object):
464 class _Root(object):
464 def __repr__(self):
465 def __repr__(self):
465 return 'ROOT TREE'
466 return 'ROOT TREE'
466 ROOT = _Root()
467 ROOT = _Root()
467
468
468 repo = self._factory.repo(wire)
469 repo = self._factory.repo(wire)
469 object_store = repo.object_store
470 object_store = repo.object_store
470
471
471 # Create tree and populates it with blobs
472 # Create tree and populates it with blobs
472
473
473 if commit_tree and repo[commit_tree]:
474 if commit_tree and repo[commit_tree]:
474 git_commit = repo[commit_data['parents'][0]]
475 git_commit = repo[commit_data['parents'][0]]
475 commit_tree = repo[git_commit.tree] # root tree
476 commit_tree = repo[git_commit.tree] # root tree
476 else:
477 else:
477 commit_tree = objects.Tree()
478 commit_tree = objects.Tree()
478
479
479 for node in updated:
480 for node in updated:
480 # Compute subdirs if needed
481 # Compute subdirs if needed
481 dirpath, nodename = vcspath.split(node['path'])
482 dirpath, nodename = vcspath.split(node['path'])
482 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
483 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
483 parent = commit_tree
484 parent = commit_tree
484 ancestors = [('', parent)]
485 ancestors = [('', parent)]
485
486
486 # Tries to dig for the deepest existing tree
487 # Tries to dig for the deepest existing tree
487 while dirnames:
488 while dirnames:
488 curdir = dirnames.pop(0)
489 curdir = dirnames.pop(0)
489 try:
490 try:
490 dir_id = parent[curdir][1]
491 dir_id = parent[curdir][1]
491 except KeyError:
492 except KeyError:
492 # put curdir back into dirnames and stops
493 # put curdir back into dirnames and stops
493 dirnames.insert(0, curdir)
494 dirnames.insert(0, curdir)
494 break
495 break
495 else:
496 else:
496 # If found, updates parent
497 # If found, updates parent
497 parent = repo[dir_id]
498 parent = repo[dir_id]
498 ancestors.append((curdir, parent))
499 ancestors.append((curdir, parent))
499 # Now parent is deepest existing tree and we need to create
500 # Now parent is deepest existing tree and we need to create
500 # subtrees for dirnames (in reverse order)
501 # subtrees for dirnames (in reverse order)
501 # [this only applies for nodes from added]
502 # [this only applies for nodes from added]
502 new_trees = []
503 new_trees = []
503
504
504 blob = objects.Blob.from_string(node['content'])
505 blob = objects.Blob.from_string(node['content'])
505
506
506 if dirnames:
507 if dirnames:
507 # If there are trees which should be created we need to build
508 # If there are trees which should be created we need to build
508 # them now (in reverse order)
509 # them now (in reverse order)
509 reversed_dirnames = list(reversed(dirnames))
510 reversed_dirnames = list(reversed(dirnames))
510 curtree = objects.Tree()
511 curtree = objects.Tree()
511 curtree[node['node_path']] = node['mode'], blob.id
512 curtree[node['node_path']] = node['mode'], blob.id
512 new_trees.append(curtree)
513 new_trees.append(curtree)
513 for dirname in reversed_dirnames[:-1]:
514 for dirname in reversed_dirnames[:-1]:
514 newtree = objects.Tree()
515 newtree = objects.Tree()
515 newtree[dirname] = (DIR_STAT, curtree.id)
516 newtree[dirname] = (DIR_STAT, curtree.id)
516 new_trees.append(newtree)
517 new_trees.append(newtree)
517 curtree = newtree
518 curtree = newtree
518 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
519 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
519 else:
520 else:
520 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
521 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
521
522
522 new_trees.append(parent)
523 new_trees.append(parent)
523 # Update ancestors
524 # Update ancestors
524 reversed_ancestors = reversed(
525 reversed_ancestors = reversed(
525 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
526 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
526 for parent, tree, path in reversed_ancestors:
527 for parent, tree, path in reversed_ancestors:
527 parent[path] = (DIR_STAT, tree.id)
528 parent[path] = (DIR_STAT, tree.id)
528 object_store.add_object(tree)
529 object_store.add_object(tree)
529
530
530 object_store.add_object(blob)
531 object_store.add_object(blob)
531 for tree in new_trees:
532 for tree in new_trees:
532 object_store.add_object(tree)
533 object_store.add_object(tree)
533
534
534 for node_path in removed:
535 for node_path in removed:
535 paths = node_path.split('/')
536 paths = node_path.split('/')
536 tree = commit_tree # start with top-level
537 tree = commit_tree # start with top-level
537 trees = [{'tree': tree, 'path': ROOT}]
538 trees = [{'tree': tree, 'path': ROOT}]
538 # Traverse deep into the forest...
539 # Traverse deep into the forest...
539 # resolve final tree by iterating the path.
540 # resolve final tree by iterating the path.
540 # e.g a/b/c.txt will get
541 # e.g a/b/c.txt will get
541 # - root as tree then
542 # - root as tree then
542 # - 'a' as tree,
543 # - 'a' as tree,
543 # - 'b' as tree,
544 # - 'b' as tree,
544 # - stop at c as blob.
545 # - stop at c as blob.
545 for path in paths:
546 for path in paths:
546 try:
547 try:
547 obj = repo[tree[path][1]]
548 obj = repo[tree[path][1]]
548 if isinstance(obj, objects.Tree):
549 if isinstance(obj, objects.Tree):
549 trees.append({'tree': obj, 'path': path})
550 trees.append({'tree': obj, 'path': path})
550 tree = obj
551 tree = obj
551 except KeyError:
552 except KeyError:
552 break
553 break
553 #PROBLEM:
554 #PROBLEM:
554 """
555 """
555 We're not editing same reference tree object
556 We're not editing same reference tree object
556 """
557 """
557 # Cut down the blob and all rotten trees on the way back...
558 # Cut down the blob and all rotten trees on the way back...
558 for path, tree_data in reversed(list(zip(paths, trees))):
559 for path, tree_data in reversed(list(zip(paths, trees))):
559 tree = tree_data['tree']
560 tree = tree_data['tree']
560 tree.__delitem__(path)
561 tree.__delitem__(path)
561 # This operation edits the tree, we need to mark new commit back
562 # This operation edits the tree, we need to mark new commit back
562
563
563 if len(tree) > 0:
564 if len(tree) > 0:
564 # This tree still has elements - don't remove it or any
565 # This tree still has elements - don't remove it or any
565 # of it's parents
566 # of it's parents
566 break
567 break
567
568
568 object_store.add_object(commit_tree)
569 object_store.add_object(commit_tree)
569
570
570 # Create commit
571 # Create commit
571 commit = objects.Commit()
572 commit = objects.Commit()
572 commit.tree = commit_tree.id
573 commit.tree = commit_tree.id
573 bytes_keys = [
574 bytes_keys = [
574 'author',
575 'author',
575 'committer',
576 'committer',
576 'message',
577 'message',
577 'encoding'
578 'encoding'
578 ]
579 ]
579
580
580 for k, v in commit_data.items():
581 for k, v in commit_data.items():
581 if k in bytes_keys:
582 if k in bytes_keys:
582 v = safe_bytes(v)
583 v = safe_bytes(v)
583 setattr(commit, k, v)
584 setattr(commit, k, v)
584
585
585 object_store.add_object(commit)
586 object_store.add_object(commit)
586
587
587 self.create_branch(wire, branch, safe_str(commit.id))
588 self.create_branch(wire, branch, safe_str(commit.id))
588
589
589 # dulwich set-ref
590 # dulwich set-ref
590 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
591 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
591
592
592 return commit.id
593 return commit.id
593
594
594 @reraise_safe_exceptions
595 @reraise_safe_exceptions
595 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
596 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
596 if url != 'default' and '://' not in url:
597 if url != 'default' and '://' not in url:
597 client = LocalGitClient(url)
598 client = LocalGitClient(url)
598 else:
599 else:
599 url_obj = url_parser(url)
600 url_obj = url_parser(url)
600 o = self._build_opener(url)
601 o = self._build_opener(url)
601 url, _ = url_obj.authinfo()
602 url, _ = url_obj.authinfo()
602 client = HttpGitClient(base_url=url, opener=o)
603 client = HttpGitClient(base_url=url, opener=o)
603 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
604
605
605 determine_wants = repo.object_store.determine_wants_all
606 determine_wants = repo.object_store.determine_wants_all
606 if refs:
607 if refs:
607 def determine_wants_requested(references):
608 def determine_wants_requested(references):
608 return [references[r] for r in references if r in refs]
609 return [references[r] for r in references if r in refs]
609 determine_wants = determine_wants_requested
610 determine_wants = determine_wants_requested
610
611
611 try:
612 try:
612 remote_refs = client.fetch(
613 remote_refs = client.fetch(
613 path=url, target=repo, determine_wants=determine_wants)
614 path=url, target=repo, determine_wants=determine_wants)
614 except NotGitRepository as e:
615 except NotGitRepository as e:
615 log.warning(
616 log.warning(
616 'Trying to fetch from "%s" failed, not a Git repository.', url)
617 'Trying to fetch from "%s" failed, not a Git repository.', url)
617 # Exception can contain unicode which we convert
618 # Exception can contain unicode which we convert
618 raise exceptions.AbortException(e)(repr(e))
619 raise exceptions.AbortException(e)(repr(e))
619
620
620 # mikhail: client.fetch() returns all the remote refs, but fetches only
621 # mikhail: client.fetch() returns all the remote refs, but fetches only
621 # refs filtered by `determine_wants` function. We need to filter result
622 # refs filtered by `determine_wants` function. We need to filter result
622 # as well
623 # as well
623 if refs:
624 if refs:
624 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
625 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
625
626
626 if apply_refs:
627 if apply_refs:
627 # TODO: johbo: Needs proper test coverage with a git repository
628 # TODO: johbo: Needs proper test coverage with a git repository
628 # that contains a tag object, so that we would end up with
629 # that contains a tag object, so that we would end up with
629 # a peeled ref at this point.
630 # a peeled ref at this point.
630 for k in remote_refs:
631 for k in remote_refs:
631 if k.endswith(PEELED_REF_MARKER):
632 if k.endswith(PEELED_REF_MARKER):
632 log.debug("Skipping peeled reference %s", k)
633 log.debug("Skipping peeled reference %s", k)
633 continue
634 continue
634 repo[k] = remote_refs[k]
635 repo[k] = remote_refs[k]
635
636
636 if refs and not update_after:
637 if refs and not update_after:
637 # mikhail: explicitly set the head to the last ref.
638 # mikhail: explicitly set the head to the last ref.
638 repo["HEAD"] = remote_refs[refs[-1]]
639 repo["HEAD"] = remote_refs[refs[-1]]
639
640
640 if update_after:
641 if update_after:
641 # we want to checkout HEAD
642 # we want to checkout HEAD
642 repo["HEAD"] = remote_refs["HEAD"]
643 repo["HEAD"] = remote_refs["HEAD"]
643 index.build_index_from_tree(repo.path, repo.index_path(),
644 index.build_index_from_tree(repo.path, repo.index_path(),
644 repo.object_store, repo["HEAD"].tree)
645 repo.object_store, repo["HEAD"].tree)
645 return remote_refs
646 return remote_refs
646
647
647 @reraise_safe_exceptions
648 @reraise_safe_exceptions
648 def sync_fetch(self, wire, url, refs=None, all_refs=False):
649 def sync_fetch(self, wire, url, refs=None, all_refs=False):
649 repo = self._factory.repo(wire)
650 repo = self._factory.repo(wire)
650 if refs and not isinstance(refs, (list, tuple)):
651 if refs and not isinstance(refs, (list, tuple)):
651 refs = [refs]
652 refs = [refs]
652
653
653 config = self._wire_to_config(wire)
654 config = self._wire_to_config(wire)
654 # get all remote refs we'll use to fetch later
655 # get all remote refs we'll use to fetch later
655 cmd = ['ls-remote']
656 cmd = ['ls-remote']
656 if not all_refs:
657 if not all_refs:
657 cmd += ['--heads', '--tags']
658 cmd += ['--heads', '--tags']
658 cmd += [url]
659 cmd += [url]
659 output, __ = self.run_git_command(
660 output, __ = self.run_git_command(
660 wire, cmd, fail_on_stderr=False,
661 wire, cmd, fail_on_stderr=False,
661 _copts=self._remote_conf(config),
662 _copts=self._remote_conf(config),
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663
664
664 remote_refs = collections.OrderedDict()
665 remote_refs = collections.OrderedDict()
665 fetch_refs = []
666 fetch_refs = []
666
667
667 for ref_line in output.splitlines():
668 for ref_line in output.splitlines():
668 sha, ref = ref_line.split(b'\t')
669 sha, ref = ref_line.split(b'\t')
669 sha = sha.strip()
670 sha = sha.strip()
670 if ref in remote_refs:
671 if ref in remote_refs:
671 # duplicate, skip
672 # duplicate, skip
672 continue
673 continue
673 if ref.endswith(PEELED_REF_MARKER):
674 if ref.endswith(PEELED_REF_MARKER):
674 log.debug("Skipping peeled reference %s", ref)
675 log.debug("Skipping peeled reference %s", ref)
675 continue
676 continue
676 # don't sync HEAD
677 # don't sync HEAD
677 if ref in [b'HEAD']:
678 if ref in [b'HEAD']:
678 continue
679 continue
679
680
680 remote_refs[ref] = sha
681 remote_refs[ref] = sha
681
682
682 if refs and sha in refs:
683 if refs and sha in refs:
683 # we filter fetch using our specified refs
684 # we filter fetch using our specified refs
684 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
685 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
685 elif not refs:
686 elif not refs:
686 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
687 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
687 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
688 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
688
689
689 if fetch_refs:
690 if fetch_refs:
690 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
691 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
691 fetch_refs_chunks = list(chunk)
692 fetch_refs_chunks = list(chunk)
692 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
693 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
693 self.run_git_command(
694 self.run_git_command(
694 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
695 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
695 fail_on_stderr=False,
696 fail_on_stderr=False,
696 _copts=self._remote_conf(config),
697 _copts=self._remote_conf(config),
697 extra_env={'GIT_TERMINAL_PROMPT': '0'})
698 extra_env={'GIT_TERMINAL_PROMPT': '0'})
698
699
699 return remote_refs
700 return remote_refs
700
701
701 @reraise_safe_exceptions
702 @reraise_safe_exceptions
702 def sync_push(self, wire, url, refs=None):
703 def sync_push(self, wire, url, refs=None):
703 if not self.check_url(url, wire):
704 if not self.check_url(url, wire):
704 return
705 return
705 config = self._wire_to_config(wire)
706 config = self._wire_to_config(wire)
706 self._factory.repo(wire)
707 self._factory.repo(wire)
707 self.run_git_command(
708 self.run_git_command(
708 wire, ['push', url, '--mirror'], fail_on_stderr=False,
709 wire, ['push', url, '--mirror'], fail_on_stderr=False,
709 _copts=self._remote_conf(config),
710 _copts=self._remote_conf(config),
710 extra_env={'GIT_TERMINAL_PROMPT': '0'})
711 extra_env={'GIT_TERMINAL_PROMPT': '0'})
711
712
712 @reraise_safe_exceptions
713 @reraise_safe_exceptions
713 def get_remote_refs(self, wire, url):
714 def get_remote_refs(self, wire, url):
714 repo = Repo(url)
715 repo = Repo(url)
715 return repo.get_refs()
716 return repo.get_refs()
716
717
717 @reraise_safe_exceptions
718 @reraise_safe_exceptions
718 def get_description(self, wire):
719 def get_description(self, wire):
719 repo = self._factory.repo(wire)
720 repo = self._factory.repo(wire)
720 return repo.get_description()
721 return repo.get_description()
721
722
722 @reraise_safe_exceptions
723 @reraise_safe_exceptions
723 def get_missing_revs(self, wire, rev1, rev2, path2):
724 def get_missing_revs(self, wire, rev1, rev2, path2):
724 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
725 LocalGitClient(thin_packs=False).fetch(path2, repo)
726 LocalGitClient(thin_packs=False).fetch(path2, repo)
726
727
727 wire_remote = wire.copy()
728 wire_remote = wire.copy()
728 wire_remote['path'] = path2
729 wire_remote['path'] = path2
729 repo_remote = self._factory.repo(wire_remote)
730 repo_remote = self._factory.repo(wire_remote)
730 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
731 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
731
732
732 revs = [
733 revs = [
733 x.commit.id
734 x.commit.id
734 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
735 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
735 return revs
736 return revs
736
737
737 @reraise_safe_exceptions
738 @reraise_safe_exceptions
738 def get_object(self, wire, sha, maybe_unreachable=False):
739 def get_object(self, wire, sha, maybe_unreachable=False):
739 cache_on, context_uid, repo_id = self._cache_on(wire)
740 cache_on, context_uid, repo_id = self._cache_on(wire)
740 region = self._region(wire)
741 region = self._region(wire)
741
742
742 @region.conditional_cache_on_arguments(condition=cache_on)
743 @region.conditional_cache_on_arguments(condition=cache_on)
743 def _get_object(_context_uid, _repo_id, _sha):
744 def _get_object(_context_uid, _repo_id, _sha):
744 repo_init = self._factory.repo_libgit2(wire)
745 repo_init = self._factory.repo_libgit2(wire)
745 with repo_init as repo:
746 with repo_init as repo:
746
747
747 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
748 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
748 try:
749 try:
749 commit = repo.revparse_single(sha)
750 commit = repo.revparse_single(sha)
750 except KeyError:
751 except KeyError:
751 # NOTE(marcink): KeyError doesn't give us any meaningful information
752 # NOTE(marcink): KeyError doesn't give us any meaningful information
752 # here, we instead give something more explicit
753 # here, we instead give something more explicit
753 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
754 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
754 raise exceptions.LookupException(e)(missing_commit_err)
755 raise exceptions.LookupException(e)(missing_commit_err)
755 except ValueError as e:
756 except ValueError as e:
756 raise exceptions.LookupException(e)(missing_commit_err)
757 raise exceptions.LookupException(e)(missing_commit_err)
757
758
758 is_tag = False
759 is_tag = False
759 if isinstance(commit, pygit2.Tag):
760 if isinstance(commit, pygit2.Tag):
760 commit = repo.get(commit.target)
761 commit = repo.get(commit.target)
761 is_tag = True
762 is_tag = True
762
763
763 check_dangling = True
764 check_dangling = True
764 if is_tag:
765 if is_tag:
765 check_dangling = False
766 check_dangling = False
766
767
767 if check_dangling and maybe_unreachable:
768 if check_dangling and maybe_unreachable:
768 check_dangling = False
769 check_dangling = False
769
770
770 # we used a reference and it parsed means we're not having a dangling commit
771 # we used a reference and it parsed means we're not having a dangling commit
771 if sha != commit.hex:
772 if sha != commit.hex:
772 check_dangling = False
773 check_dangling = False
773
774
774 if check_dangling:
775 if check_dangling:
775 # check for dangling commit
776 # check for dangling commit
776 for branch in repo.branches.with_commit(commit.hex):
777 for branch in repo.branches.with_commit(commit.hex):
777 if branch:
778 if branch:
778 break
779 break
779 else:
780 else:
780 # NOTE(marcink): Empty error doesn't give us any meaningful information
781 # NOTE(marcink): Empty error doesn't give us any meaningful information
781 # here, we instead give something more explicit
782 # here, we instead give something more explicit
782 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
783 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
783 raise exceptions.LookupException(e)(missing_commit_err)
784 raise exceptions.LookupException(e)(missing_commit_err)
784
785
785 commit_id = commit.hex
786 commit_id = commit.hex
786 type_id = commit.type
787 type_id = commit.type
787
788
788 return {
789 return {
789 'id': commit_id,
790 'id': commit_id,
790 'type': self._type_id_to_name(type_id),
791 'type': self._type_id_to_name(type_id),
791 'commit_id': commit_id,
792 'commit_id': commit_id,
792 'idx': 0
793 'idx': 0
793 }
794 }
794
795
795 return _get_object(context_uid, repo_id, sha)
796 return _get_object(context_uid, repo_id, sha)
796
797
797 @reraise_safe_exceptions
798 @reraise_safe_exceptions
798 def get_refs(self, wire):
799 def get_refs(self, wire):
799 cache_on, context_uid, repo_id = self._cache_on(wire)
800 cache_on, context_uid, repo_id = self._cache_on(wire)
800 region = self._region(wire)
801 region = self._region(wire)
801
802
802 @region.conditional_cache_on_arguments(condition=cache_on)
803 @region.conditional_cache_on_arguments(condition=cache_on)
803 def _get_refs(_context_uid, _repo_id):
804 def _get_refs(_context_uid, _repo_id):
804
805
805 repo_init = self._factory.repo_libgit2(wire)
806 repo_init = self._factory.repo_libgit2(wire)
806 with repo_init as repo:
807 with repo_init as repo:
807 regex = re.compile('^refs/(heads|tags)/')
808 regex = re.compile('^refs/(heads|tags)/')
808 return {x.name: x.target.hex for x in
809 return {x.name: x.target.hex for x in
809 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
810 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
810
811
811 return _get_refs(context_uid, repo_id)
812 return _get_refs(context_uid, repo_id)
812
813
813 @reraise_safe_exceptions
814 @reraise_safe_exceptions
814 def get_branch_pointers(self, wire):
815 def get_branch_pointers(self, wire):
815 cache_on, context_uid, repo_id = self._cache_on(wire)
816 cache_on, context_uid, repo_id = self._cache_on(wire)
816 region = self._region(wire)
817 region = self._region(wire)
817
818
818 @region.conditional_cache_on_arguments(condition=cache_on)
819 @region.conditional_cache_on_arguments(condition=cache_on)
819 def _get_branch_pointers(_context_uid, _repo_id):
820 def _get_branch_pointers(_context_uid, _repo_id):
820
821
821 repo_init = self._factory.repo_libgit2(wire)
822 repo_init = self._factory.repo_libgit2(wire)
822 regex = re.compile('^refs/heads')
823 regex = re.compile('^refs/heads')
823 with repo_init as repo:
824 with repo_init as repo:
824 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
825 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
825 return {x.target.hex: x.shorthand for x in branches}
826 return {x.target.hex: x.shorthand for x in branches}
826
827
827 return _get_branch_pointers(context_uid, repo_id)
828 return _get_branch_pointers(context_uid, repo_id)
828
829
829 @reraise_safe_exceptions
830 @reraise_safe_exceptions
830 def head(self, wire, show_exc=True):
831 def head(self, wire, show_exc=True):
831 cache_on, context_uid, repo_id = self._cache_on(wire)
832 cache_on, context_uid, repo_id = self._cache_on(wire)
832 region = self._region(wire)
833 region = self._region(wire)
833
834
834 @region.conditional_cache_on_arguments(condition=cache_on)
835 @region.conditional_cache_on_arguments(condition=cache_on)
835 def _head(_context_uid, _repo_id, _show_exc):
836 def _head(_context_uid, _repo_id, _show_exc):
836 repo_init = self._factory.repo_libgit2(wire)
837 repo_init = self._factory.repo_libgit2(wire)
837 with repo_init as repo:
838 with repo_init as repo:
838 try:
839 try:
839 return repo.head.peel().hex
840 return repo.head.peel().hex
840 except Exception:
841 except Exception:
841 if show_exc:
842 if show_exc:
842 raise
843 raise
843 return _head(context_uid, repo_id, show_exc)
844 return _head(context_uid, repo_id, show_exc)
844
845
845 @reraise_safe_exceptions
846 @reraise_safe_exceptions
846 def init(self, wire):
847 def init(self, wire):
847 repo_path = safe_str(wire['path'])
848 repo_path = safe_str(wire['path'])
848 self.repo = Repo.init(repo_path)
849 self.repo = Repo.init(repo_path)
849
850
850 @reraise_safe_exceptions
851 @reraise_safe_exceptions
851 def init_bare(self, wire):
852 def init_bare(self, wire):
852 repo_path = safe_str(wire['path'])
853 repo_path = safe_str(wire['path'])
853 self.repo = Repo.init_bare(repo_path)
854 self.repo = Repo.init_bare(repo_path)
854
855
855 @reraise_safe_exceptions
856 @reraise_safe_exceptions
856 def revision(self, wire, rev):
857 def revision(self, wire, rev):
857
858
858 cache_on, context_uid, repo_id = self._cache_on(wire)
859 cache_on, context_uid, repo_id = self._cache_on(wire)
859 region = self._region(wire)
860 region = self._region(wire)
860
861
861 @region.conditional_cache_on_arguments(condition=cache_on)
862 @region.conditional_cache_on_arguments(condition=cache_on)
862 def _revision(_context_uid, _repo_id, _rev):
863 def _revision(_context_uid, _repo_id, _rev):
863 repo_init = self._factory.repo_libgit2(wire)
864 repo_init = self._factory.repo_libgit2(wire)
864 with repo_init as repo:
865 with repo_init as repo:
865 commit = repo[rev]
866 commit = repo[rev]
866 obj_data = {
867 obj_data = {
867 'id': commit.id.hex,
868 'id': commit.id.hex,
868 }
869 }
869 # tree objects itself don't have tree_id attribute
870 # tree objects itself don't have tree_id attribute
870 if hasattr(commit, 'tree_id'):
871 if hasattr(commit, 'tree_id'):
871 obj_data['tree'] = commit.tree_id.hex
872 obj_data['tree'] = commit.tree_id.hex
872
873
873 return obj_data
874 return obj_data
874 return _revision(context_uid, repo_id, rev)
875 return _revision(context_uid, repo_id, rev)
875
876
876 @reraise_safe_exceptions
877 @reraise_safe_exceptions
877 def date(self, wire, commit_id):
878 def date(self, wire, commit_id):
878 cache_on, context_uid, repo_id = self._cache_on(wire)
879 cache_on, context_uid, repo_id = self._cache_on(wire)
879 region = self._region(wire)
880 region = self._region(wire)
880
881
881 @region.conditional_cache_on_arguments(condition=cache_on)
882 @region.conditional_cache_on_arguments(condition=cache_on)
882 def _date(_repo_id, _commit_id):
883 def _date(_repo_id, _commit_id):
883 repo_init = self._factory.repo_libgit2(wire)
884 repo_init = self._factory.repo_libgit2(wire)
884 with repo_init as repo:
885 with repo_init as repo:
885 commit = repo[commit_id]
886 commit = repo[commit_id]
886
887
887 if hasattr(commit, 'commit_time'):
888 if hasattr(commit, 'commit_time'):
888 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
889 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
889 else:
890 else:
890 commit = commit.get_object()
891 commit = commit.get_object()
891 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
892 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
892
893
893 # TODO(marcink): check dulwich difference of offset vs timezone
894 # TODO(marcink): check dulwich difference of offset vs timezone
894 return [commit_time, commit_time_offset]
895 return [commit_time, commit_time_offset]
895 return _date(repo_id, commit_id)
896 return _date(repo_id, commit_id)
896
897
897 @reraise_safe_exceptions
898 @reraise_safe_exceptions
898 def author(self, wire, commit_id):
899 def author(self, wire, commit_id):
899 cache_on, context_uid, repo_id = self._cache_on(wire)
900 cache_on, context_uid, repo_id = self._cache_on(wire)
900 region = self._region(wire)
901 region = self._region(wire)
901
902
902 @region.conditional_cache_on_arguments(condition=cache_on)
903 @region.conditional_cache_on_arguments(condition=cache_on)
903 def _author(_repo_id, _commit_id):
904 def _author(_repo_id, _commit_id):
904 repo_init = self._factory.repo_libgit2(wire)
905 repo_init = self._factory.repo_libgit2(wire)
905 with repo_init as repo:
906 with repo_init as repo:
906 commit = repo[commit_id]
907 commit = repo[commit_id]
907
908
908 if hasattr(commit, 'author'):
909 if hasattr(commit, 'author'):
909 author = commit.author
910 author = commit.author
910 else:
911 else:
911 author = commit.get_object().author
912 author = commit.get_object().author
912
913
913 if author.email:
914 if author.email:
914 return "{} <{}>".format(author.name, author.email)
915 return "{} <{}>".format(author.name, author.email)
915
916
916 try:
917 try:
917 return "{}".format(author.name)
918 return "{}".format(author.name)
918 except Exception:
919 except Exception:
919 return "{}".format(safe_str(author.raw_name))
920 return "{}".format(safe_str(author.raw_name))
920
921
921 return _author(repo_id, commit_id)
922 return _author(repo_id, commit_id)
922
923
923 @reraise_safe_exceptions
924 @reraise_safe_exceptions
924 def message(self, wire, commit_id):
925 def message(self, wire, commit_id):
925 cache_on, context_uid, repo_id = self._cache_on(wire)
926 cache_on, context_uid, repo_id = self._cache_on(wire)
926 region = self._region(wire)
927 region = self._region(wire)
927 @region.conditional_cache_on_arguments(condition=cache_on)
928 @region.conditional_cache_on_arguments(condition=cache_on)
928 def _message(_repo_id, _commit_id):
929 def _message(_repo_id, _commit_id):
929 repo_init = self._factory.repo_libgit2(wire)
930 repo_init = self._factory.repo_libgit2(wire)
930 with repo_init as repo:
931 with repo_init as repo:
931 commit = repo[commit_id]
932 commit = repo[commit_id]
932 return commit.message
933 return commit.message
933 return _message(repo_id, commit_id)
934 return _message(repo_id, commit_id)
934
935
935 @reraise_safe_exceptions
936 @reraise_safe_exceptions
936 def parents(self, wire, commit_id):
937 def parents(self, wire, commit_id):
937 cache_on, context_uid, repo_id = self._cache_on(wire)
938 cache_on, context_uid, repo_id = self._cache_on(wire)
938 region = self._region(wire)
939 region = self._region(wire)
939
940
940 @region.conditional_cache_on_arguments(condition=cache_on)
941 @region.conditional_cache_on_arguments(condition=cache_on)
941 def _parents(_repo_id, _commit_id):
942 def _parents(_repo_id, _commit_id):
942 repo_init = self._factory.repo_libgit2(wire)
943 repo_init = self._factory.repo_libgit2(wire)
943 with repo_init as repo:
944 with repo_init as repo:
944 commit = repo[commit_id]
945 commit = repo[commit_id]
945 if hasattr(commit, 'parent_ids'):
946 if hasattr(commit, 'parent_ids'):
946 parent_ids = commit.parent_ids
947 parent_ids = commit.parent_ids
947 else:
948 else:
948 parent_ids = commit.get_object().parent_ids
949 parent_ids = commit.get_object().parent_ids
949
950
950 return [x.hex for x in parent_ids]
951 return [x.hex for x in parent_ids]
951 return _parents(repo_id, commit_id)
952 return _parents(repo_id, commit_id)
952
953
953 @reraise_safe_exceptions
954 @reraise_safe_exceptions
954 def children(self, wire, commit_id):
955 def children(self, wire, commit_id):
955 cache_on, context_uid, repo_id = self._cache_on(wire)
956 cache_on, context_uid, repo_id = self._cache_on(wire)
956 region = self._region(wire)
957 region = self._region(wire)
957
958
958 head = self.head(wire)
959 head = self.head(wire)
959
960
960 @region.conditional_cache_on_arguments(condition=cache_on)
961 @region.conditional_cache_on_arguments(condition=cache_on)
961 def _children(_repo_id, _commit_id):
962 def _children(_repo_id, _commit_id):
962
963
963 output, __ = self.run_git_command(
964 output, __ = self.run_git_command(
964 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
965 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
965
966
966 child_ids = []
967 child_ids = []
967 pat = re.compile(r'^{}'.format(commit_id))
968 pat = re.compile(r'^{}'.format(commit_id))
968 for line in output.splitlines():
969 for line in output.splitlines():
969 line = safe_str(line)
970 line = safe_str(line)
970 if pat.match(line):
971 if pat.match(line):
971 found_ids = line.split(' ')[1:]
972 found_ids = line.split(' ')[1:]
972 child_ids.extend(found_ids)
973 child_ids.extend(found_ids)
973 break
974 break
974
975
975 return child_ids
976 return child_ids
976 return _children(repo_id, commit_id)
977 return _children(repo_id, commit_id)
977
978
978 @reraise_safe_exceptions
979 @reraise_safe_exceptions
979 def set_refs(self, wire, key, value):
980 def set_refs(self, wire, key, value):
980 repo_init = self._factory.repo_libgit2(wire)
981 repo_init = self._factory.repo_libgit2(wire)
981 with repo_init as repo:
982 with repo_init as repo:
982 repo.references.create(key, value, force=True)
983 repo.references.create(key, value, force=True)
983
984
984 @reraise_safe_exceptions
985 @reraise_safe_exceptions
985 def create_branch(self, wire, branch_name, commit_id, force=False):
986 def create_branch(self, wire, branch_name, commit_id, force=False):
986 repo_init = self._factory.repo_libgit2(wire)
987 repo_init = self._factory.repo_libgit2(wire)
987 with repo_init as repo:
988 with repo_init as repo:
988 commit = repo[commit_id]
989 commit = repo[commit_id]
989
990
990 if force:
991 if force:
991 repo.branches.local.create(branch_name, commit, force=force)
992 repo.branches.local.create(branch_name, commit, force=force)
992 elif not repo.branches.get(branch_name):
993 elif not repo.branches.get(branch_name):
993 # create only if that branch isn't existing
994 # create only if that branch isn't existing
994 repo.branches.local.create(branch_name, commit, force=force)
995 repo.branches.local.create(branch_name, commit, force=force)
995
996
996 @reraise_safe_exceptions
997 @reraise_safe_exceptions
997 def remove_ref(self, wire, key):
998 def remove_ref(self, wire, key):
998 repo_init = self._factory.repo_libgit2(wire)
999 repo_init = self._factory.repo_libgit2(wire)
999 with repo_init as repo:
1000 with repo_init as repo:
1000 repo.references.delete(key)
1001 repo.references.delete(key)
1001
1002
1002 @reraise_safe_exceptions
1003 @reraise_safe_exceptions
1003 def tag_remove(self, wire, tag_name):
1004 def tag_remove(self, wire, tag_name):
1004 repo_init = self._factory.repo_libgit2(wire)
1005 repo_init = self._factory.repo_libgit2(wire)
1005 with repo_init as repo:
1006 with repo_init as repo:
1006 key = 'refs/tags/{}'.format(tag_name)
1007 key = 'refs/tags/{}'.format(tag_name)
1007 repo.references.delete(key)
1008 repo.references.delete(key)
1008
1009
1009 @reraise_safe_exceptions
1010 @reraise_safe_exceptions
1010 def tree_changes(self, wire, source_id, target_id):
1011 def tree_changes(self, wire, source_id, target_id):
1011 # TODO(marcink): remove this seems it's only used by tests
1012 # TODO(marcink): remove this seems it's only used by tests
1012 repo = self._factory.repo(wire)
1013 repo = self._factory.repo(wire)
1013 source = repo[source_id].tree if source_id else None
1014 source = repo[source_id].tree if source_id else None
1014 target = repo[target_id].tree
1015 target = repo[target_id].tree
1015 result = repo.object_store.tree_changes(source, target)
1016 result = repo.object_store.tree_changes(source, target)
1016 return list(result)
1017 return list(result)
1017
1018
1018 @reraise_safe_exceptions
1019 @reraise_safe_exceptions
1019 def tree_and_type_for_path(self, wire, commit_id, path):
1020 def tree_and_type_for_path(self, wire, commit_id, path):
1020
1021
1021 cache_on, context_uid, repo_id = self._cache_on(wire)
1022 cache_on, context_uid, repo_id = self._cache_on(wire)
1022 region = self._region(wire)
1023 region = self._region(wire)
1023
1024
1024 @region.conditional_cache_on_arguments(condition=cache_on)
1025 @region.conditional_cache_on_arguments(condition=cache_on)
1025 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1026 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1026 repo_init = self._factory.repo_libgit2(wire)
1027 repo_init = self._factory.repo_libgit2(wire)
1027
1028
1028 with repo_init as repo:
1029 with repo_init as repo:
1029 commit = repo[commit_id]
1030 commit = repo[commit_id]
1030 try:
1031 try:
1031 tree = commit.tree[path]
1032 tree = commit.tree[path]
1032 except KeyError:
1033 except KeyError:
1033 return None, None, None
1034 return None, None, None
1034
1035
1035 return tree.id.hex, tree.type_str, tree.filemode
1036 return tree.id.hex, tree.type_str, tree.filemode
1036 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1037 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1037
1038
1038 @reraise_safe_exceptions
1039 @reraise_safe_exceptions
1039 def tree_items(self, wire, tree_id):
1040 def tree_items(self, wire, tree_id):
1040 cache_on, context_uid, repo_id = self._cache_on(wire)
1041 cache_on, context_uid, repo_id = self._cache_on(wire)
1041 region = self._region(wire)
1042 region = self._region(wire)
1042
1043
1043 @region.conditional_cache_on_arguments(condition=cache_on)
1044 @region.conditional_cache_on_arguments(condition=cache_on)
1044 def _tree_items(_repo_id, _tree_id):
1045 def _tree_items(_repo_id, _tree_id):
1045
1046
1046 repo_init = self._factory.repo_libgit2(wire)
1047 repo_init = self._factory.repo_libgit2(wire)
1047 with repo_init as repo:
1048 with repo_init as repo:
1048 try:
1049 try:
1049 tree = repo[tree_id]
1050 tree = repo[tree_id]
1050 except KeyError:
1051 except KeyError:
1051 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1052 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1052
1053
1053 result = []
1054 result = []
1054 for item in tree:
1055 for item in tree:
1055 item_sha = item.hex
1056 item_sha = item.hex
1056 item_mode = item.filemode
1057 item_mode = item.filemode
1057 item_type = item.type_str
1058 item_type = item.type_str
1058
1059
1059 if item_type == 'commit':
1060 if item_type == 'commit':
1060 # NOTE(marcink): submodules we translate to 'link' for backward compat
1061 # NOTE(marcink): submodules we translate to 'link' for backward compat
1061 item_type = 'link'
1062 item_type = 'link'
1062
1063
1063 result.append((item.name, item_mode, item_sha, item_type))
1064 result.append((item.name, item_mode, item_sha, item_type))
1064 return result
1065 return result
1065 return _tree_items(repo_id, tree_id)
1066 return _tree_items(repo_id, tree_id)
1066
1067
1067 @reraise_safe_exceptions
1068 @reraise_safe_exceptions
1068 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1069 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1069 """
1070 """
1070 Old version that uses subprocess to call diff
1071 Old version that uses subprocess to call diff
1071 """
1072 """
1072
1073
1073 flags = [
1074 flags = [
1074 '-U%s' % context, '--patch',
1075 '-U%s' % context, '--patch',
1075 '--binary',
1076 '--binary',
1076 '--find-renames',
1077 '--find-renames',
1077 '--no-indent-heuristic',
1078 '--no-indent-heuristic',
1078 # '--indent-heuristic',
1079 # '--indent-heuristic',
1079 #'--full-index',
1080 #'--full-index',
1080 #'--abbrev=40'
1081 #'--abbrev=40'
1081 ]
1082 ]
1082
1083
1083 if opt_ignorews:
1084 if opt_ignorews:
1084 flags.append('--ignore-all-space')
1085 flags.append('--ignore-all-space')
1085
1086
1086 if commit_id_1 == self.EMPTY_COMMIT:
1087 if commit_id_1 == self.EMPTY_COMMIT:
1087 cmd = ['show'] + flags + [commit_id_2]
1088 cmd = ['show'] + flags + [commit_id_2]
1088 else:
1089 else:
1089 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1090 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1090
1091
1091 if file_filter:
1092 if file_filter:
1092 cmd.extend(['--', file_filter])
1093 cmd.extend(['--', file_filter])
1093
1094
1094 diff, __ = self.run_git_command(wire, cmd)
1095 diff, __ = self.run_git_command(wire, cmd)
1095 # If we used 'show' command, strip first few lines (until actual diff
1096 # If we used 'show' command, strip first few lines (until actual diff
1096 # starts)
1097 # starts)
1097 if commit_id_1 == self.EMPTY_COMMIT:
1098 if commit_id_1 == self.EMPTY_COMMIT:
1098 lines = diff.splitlines()
1099 lines = diff.splitlines()
1099 x = 0
1100 x = 0
1100 for line in lines:
1101 for line in lines:
1101 if line.startswith(b'diff'):
1102 if line.startswith(b'diff'):
1102 break
1103 break
1103 x += 1
1104 x += 1
1104 # Append new line just like 'diff' command do
1105 # Append new line just like 'diff' command do
1105 diff = '\n'.join(lines[x:]) + '\n'
1106 diff = '\n'.join(lines[x:]) + '\n'
1106 return diff
1107 return diff
1107
1108
1108 @reraise_safe_exceptions
1109 @reraise_safe_exceptions
1109 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1110 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1110 repo_init = self._factory.repo_libgit2(wire)
1111 repo_init = self._factory.repo_libgit2(wire)
1111 with repo_init as repo:
1112 with repo_init as repo:
1112 swap = True
1113 swap = True
1113 flags = 0
1114 flags = 0
1114 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1115 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1115
1116
1116 if opt_ignorews:
1117 if opt_ignorews:
1117 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1118 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1118
1119
1119 if commit_id_1 == self.EMPTY_COMMIT:
1120 if commit_id_1 == self.EMPTY_COMMIT:
1120 comm1 = repo[commit_id_2]
1121 comm1 = repo[commit_id_2]
1121 diff_obj = comm1.tree.diff_to_tree(
1122 diff_obj = comm1.tree.diff_to_tree(
1122 flags=flags, context_lines=context, swap=swap)
1123 flags=flags, context_lines=context, swap=swap)
1123
1124
1124 else:
1125 else:
1125 comm1 = repo[commit_id_2]
1126 comm1 = repo[commit_id_2]
1126 comm2 = repo[commit_id_1]
1127 comm2 = repo[commit_id_1]
1127 diff_obj = comm1.tree.diff_to_tree(
1128 diff_obj = comm1.tree.diff_to_tree(
1128 comm2.tree, flags=flags, context_lines=context, swap=swap)
1129 comm2.tree, flags=flags, context_lines=context, swap=swap)
1129 similar_flags = 0
1130 similar_flags = 0
1130 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1131 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1131 diff_obj.find_similar(flags=similar_flags)
1132 diff_obj.find_similar(flags=similar_flags)
1132
1133
1133 if file_filter:
1134 if file_filter:
1134 for p in diff_obj:
1135 for p in diff_obj:
1135 if p.delta.old_file.path == file_filter:
1136 if p.delta.old_file.path == file_filter:
1136 return p.patch or ''
1137 return p.patch or ''
1137 # fo matching path == no diff
1138 # fo matching path == no diff
1138 return ''
1139 return ''
1139 return diff_obj.patch or ''
1140 return diff_obj.patch or ''
1140
1141
1141 @reraise_safe_exceptions
1142 @reraise_safe_exceptions
1142 def node_history(self, wire, commit_id, path, limit):
1143 def node_history(self, wire, commit_id, path, limit):
1143 cache_on, context_uid, repo_id = self._cache_on(wire)
1144 cache_on, context_uid, repo_id = self._cache_on(wire)
1144 region = self._region(wire)
1145 region = self._region(wire)
1145
1146
1146 @region.conditional_cache_on_arguments(condition=cache_on)
1147 @region.conditional_cache_on_arguments(condition=cache_on)
1147 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1148 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1148 # optimize for n==1, rev-list is much faster for that use-case
1149 # optimize for n==1, rev-list is much faster for that use-case
1149 if limit == 1:
1150 if limit == 1:
1150 cmd = ['rev-list', '-1', commit_id, '--', path]
1151 cmd = ['rev-list', '-1', commit_id, '--', path]
1151 else:
1152 else:
1152 cmd = ['log']
1153 cmd = ['log']
1153 if limit:
1154 if limit:
1154 cmd.extend(['-n', str(safe_int(limit, 0))])
1155 cmd.extend(['-n', str(safe_int(limit, 0))])
1155 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1156 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1156
1157
1157 output, __ = self.run_git_command(wire, cmd)
1158 output, __ = self.run_git_command(wire, cmd)
1158 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1159 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1159
1160
1160 return [x for x in commit_ids]
1161 return [x for x in commit_ids]
1161 return _node_history(context_uid, repo_id, commit_id, path, limit)
1162 return _node_history(context_uid, repo_id, commit_id, path, limit)
1162
1163
1163 @reraise_safe_exceptions
1164 @reraise_safe_exceptions
1164 def node_annotate_legacy(self, wire, commit_id, path):
1165 def node_annotate_legacy(self, wire, commit_id, path):
1165 #note: replaced by pygit2 impelementation
1166 #note: replaced by pygit2 impelementation
1166 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1167 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1167 # -l ==> outputs long shas (and we need all 40 characters)
1168 # -l ==> outputs long shas (and we need all 40 characters)
1168 # --root ==> doesn't put '^' character for boundaries
1169 # --root ==> doesn't put '^' character for boundaries
1169 # -r commit_id ==> blames for the given commit
1170 # -r commit_id ==> blames for the given commit
1170 output, __ = self.run_git_command(wire, cmd)
1171 output, __ = self.run_git_command(wire, cmd)
1171
1172
1172 result = []
1173 result = []
1173 for i, blame_line in enumerate(output.splitlines()[:-1]):
1174 for i, blame_line in enumerate(output.splitlines()[:-1]):
1174 line_no = i + 1
1175 line_no = i + 1
1175 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1176 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1176 result.append((line_no, blame_commit_id, line))
1177 result.append((line_no, blame_commit_id, line))
1177
1178
1178 return result
1179 return result
1179
1180
1180 @reraise_safe_exceptions
1181 @reraise_safe_exceptions
1181 def node_annotate(self, wire, commit_id, path):
1182 def node_annotate(self, wire, commit_id, path):
1182
1183
1183 result_libgit = []
1184 result_libgit = []
1184 repo_init = self._factory.repo_libgit2(wire)
1185 repo_init = self._factory.repo_libgit2(wire)
1185 with repo_init as repo:
1186 with repo_init as repo:
1186 commit = repo[commit_id]
1187 commit = repo[commit_id]
1187 blame_obj = repo.blame(path, newest_commit=commit_id)
1188 blame_obj = repo.blame(path, newest_commit=commit_id)
1188 for i, line in enumerate(commit.tree[path].data.splitlines()):
1189 for i, line in enumerate(commit.tree[path].data.splitlines()):
1189 line_no = i + 1
1190 line_no = i + 1
1190 hunk = blame_obj.for_line(line_no)
1191 hunk = blame_obj.for_line(line_no)
1191 blame_commit_id = hunk.final_commit_id.hex
1192 blame_commit_id = hunk.final_commit_id.hex
1192
1193
1193 result_libgit.append((line_no, blame_commit_id, line))
1194 result_libgit.append((line_no, blame_commit_id, line))
1194
1195
1195 return result_libgit
1196 return result_libgit
1196
1197
1197 @reraise_safe_exceptions
1198 @reraise_safe_exceptions
1198 def update_server_info(self, wire):
1199 def update_server_info(self, wire):
1199 repo = self._factory.repo(wire)
1200 repo = self._factory.repo(wire)
1200 update_server_info(repo)
1201 update_server_info(repo)
1201
1202
1202 @reraise_safe_exceptions
1203 @reraise_safe_exceptions
1203 def get_all_commit_ids(self, wire):
1204 def get_all_commit_ids(self, wire):
1204
1205
1205 cache_on, context_uid, repo_id = self._cache_on(wire)
1206 cache_on, context_uid, repo_id = self._cache_on(wire)
1206 region = self._region(wire)
1207 region = self._region(wire)
1207
1208
1208 @region.conditional_cache_on_arguments(condition=cache_on)
1209 @region.conditional_cache_on_arguments(condition=cache_on)
1209 def _get_all_commit_ids(_context_uid, _repo_id):
1210 def _get_all_commit_ids(_context_uid, _repo_id):
1210
1211
1211 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1212 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1212 try:
1213 try:
1213 output, __ = self.run_git_command(wire, cmd)
1214 output, __ = self.run_git_command(wire, cmd)
1214 return output.splitlines()
1215 return output.splitlines()
1215 except Exception:
1216 except Exception:
1216 # Can be raised for empty repositories
1217 # Can be raised for empty repositories
1217 return []
1218 return []
1218
1219
1219 @region.conditional_cache_on_arguments(condition=cache_on)
1220 @region.conditional_cache_on_arguments(condition=cache_on)
1220 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1221 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1221 repo_init = self._factory.repo_libgit2(wire)
1222 repo_init = self._factory.repo_libgit2(wire)
1222 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1223 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1223 results = []
1224 results = []
1224 with repo_init as repo:
1225 with repo_init as repo:
1225 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1226 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1226 results.append(commit.id.hex)
1227 results.append(commit.id.hex)
1227
1228
1228 return _get_all_commit_ids(context_uid, repo_id)
1229 return _get_all_commit_ids(context_uid, repo_id)
1229
1230
1230 @reraise_safe_exceptions
1231 @reraise_safe_exceptions
1231 def run_git_command(self, wire, cmd, **opts):
1232 def run_git_command(self, wire, cmd, **opts):
1232 path = wire.get('path', None)
1233 path = wire.get('path', None)
1233
1234
1234 if path and os.path.isdir(path):
1235 if path and os.path.isdir(path):
1235 opts['cwd'] = path
1236 opts['cwd'] = path
1236
1237
1237 if '_bare' in opts:
1238 if '_bare' in opts:
1238 _copts = []
1239 _copts = []
1239 del opts['_bare']
1240 del opts['_bare']
1240 else:
1241 else:
1241 _copts = ['-c', 'core.quotepath=false', ]
1242 _copts = ['-c', 'core.quotepath=false', ]
1242 safe_call = False
1243 safe_call = False
1243 if '_safe' in opts:
1244 if '_safe' in opts:
1244 # no exc on failure
1245 # no exc on failure
1245 del opts['_safe']
1246 del opts['_safe']
1246 safe_call = True
1247 safe_call = True
1247
1248
1248 if '_copts' in opts:
1249 if '_copts' in opts:
1249 _copts.extend(opts['_copts'] or [])
1250 _copts.extend(opts['_copts'] or [])
1250 del opts['_copts']
1251 del opts['_copts']
1251
1252
1252 gitenv = os.environ.copy()
1253 gitenv = os.environ.copy()
1253 gitenv.update(opts.pop('extra_env', {}))
1254 gitenv.update(opts.pop('extra_env', {}))
1254 # need to clean fix GIT_DIR !
1255 # need to clean fix GIT_DIR !
1255 if 'GIT_DIR' in gitenv:
1256 if 'GIT_DIR' in gitenv:
1256 del gitenv['GIT_DIR']
1257 del gitenv['GIT_DIR']
1257 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1258 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1258 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1259 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1259
1260
1260 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1261 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1261 _opts = {'env': gitenv, 'shell': False}
1262 _opts = {'env': gitenv, 'shell': False}
1262
1263
1263 proc = None
1264 proc = None
1264 try:
1265 try:
1265 _opts.update(opts)
1266 _opts.update(opts)
1266 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1267 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1267
1268
1268 return b''.join(proc), b''.join(proc.stderr)
1269 return b''.join(proc), b''.join(proc.stderr)
1269 except OSError as err:
1270 except OSError as err:
1270 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1271 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1271 tb_err = ("Couldn't run git command (%s).\n"
1272 tb_err = ("Couldn't run git command (%s).\n"
1272 "Original error was:%s\n"
1273 "Original error was:%s\n"
1273 "Call options:%s\n"
1274 "Call options:%s\n"
1274 % (cmd, err, _opts))
1275 % (cmd, err, _opts))
1275 log.exception(tb_err)
1276 log.exception(tb_err)
1276 if safe_call:
1277 if safe_call:
1277 return '', err
1278 return '', err
1278 else:
1279 else:
1279 raise exceptions.VcsException()(tb_err)
1280 raise exceptions.VcsException()(tb_err)
1280 finally:
1281 finally:
1281 if proc:
1282 if proc:
1282 proc.close()
1283 proc.close()
1283
1284
1284 @reraise_safe_exceptions
1285 @reraise_safe_exceptions
1285 def install_hooks(self, wire, force=False):
1286 def install_hooks(self, wire, force=False):
1286 from vcsserver.hook_utils import install_git_hooks
1287 from vcsserver.hook_utils import install_git_hooks
1287 bare = self.bare(wire)
1288 bare = self.bare(wire)
1288 path = wire['path']
1289 path = wire['path']
1289 return install_git_hooks(path, bare, force_create=force)
1290 return install_git_hooks(path, bare, force_create=force)
1290
1291
1291 @reraise_safe_exceptions
1292 @reraise_safe_exceptions
1292 def get_hooks_info(self, wire):
1293 def get_hooks_info(self, wire):
1293 from vcsserver.hook_utils import (
1294 from vcsserver.hook_utils import (
1294 get_git_pre_hook_version, get_git_post_hook_version)
1295 get_git_pre_hook_version, get_git_post_hook_version)
1295 bare = self.bare(wire)
1296 bare = self.bare(wire)
1296 path = wire['path']
1297 path = wire['path']
1297 return {
1298 return {
1298 'pre_version': get_git_pre_hook_version(path, bare),
1299 'pre_version': get_git_pre_hook_version(path, bare),
1299 'post_version': get_git_post_hook_version(path, bare),
1300 'post_version': get_git_post_hook_version(path, bare),
1300 }
1301 }
1301
1302
1302 @reraise_safe_exceptions
1303 @reraise_safe_exceptions
1303 def set_head_ref(self, wire, head_name):
1304 def set_head_ref(self, wire, head_name):
1304 log.debug('Setting refs/head to `%s`', head_name)
1305 log.debug('Setting refs/head to `%s`', head_name)
1305 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1306 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1306 output, __ = self.run_git_command(wire, cmd)
1307 output, __ = self.run_git_command(wire, cmd)
1307 return [head_name] + output.splitlines()
1308 return [head_name] + output.splitlines()
1308
1309
1309 @reraise_safe_exceptions
1310 @reraise_safe_exceptions
1310 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1311 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1311 archive_dir_name, commit_id):
1312 archive_dir_name, commit_id):
1312
1313
1313 def file_walker(_commit_id, path):
1314 def file_walker(_commit_id, path):
1314 repo_init = self._factory.repo_libgit2(wire)
1315 repo_init = self._factory.repo_libgit2(wire)
1315
1316
1316 with repo_init as repo:
1317 with repo_init as repo:
1317 commit = repo[commit_id]
1318 commit = repo[commit_id]
1318
1319
1319 if path in ['', '/']:
1320 if path in ['', '/']:
1320 tree = commit.tree
1321 tree = commit.tree
1321 else:
1322 else:
1322 tree = commit.tree[path.rstrip('/')]
1323 tree = commit.tree[path.rstrip('/')]
1323 tree_id = tree.id.hex
1324 tree_id = tree.id.hex
1324 try:
1325 try:
1325 tree = repo[tree_id]
1326 tree = repo[tree_id]
1326 except KeyError:
1327 except KeyError:
1327 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1328 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1328
1329
1329 index = LibGit2Index.Index()
1330 index = LibGit2Index.Index()
1330 index.read_tree(tree)
1331 index.read_tree(tree)
1331 file_iter = index
1332 file_iter = index
1332
1333
1333 for fn in file_iter:
1334 for fn in file_iter:
1334 file_path = fn.path
1335 file_path = fn.path
1335 mode = fn.mode
1336 mode = fn.mode
1336 is_link = stat.S_ISLNK(mode)
1337 is_link = stat.S_ISLNK(mode)
1337 if mode == pygit2.GIT_FILEMODE_COMMIT:
1338 if mode == pygit2.GIT_FILEMODE_COMMIT:
1338 log.debug('Skipping path %s as a commit node', file_path)
1339 log.debug('Skipping path %s as a commit node', file_path)
1339 continue
1340 continue
1340 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1341 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1341
1342
1342 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1343 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1343 archive_dir_name, commit_id)
1344 archive_dir_name, commit_id)
@@ -1,1086 +1,1087 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib.request, urllib.parse, urllib.error
21 import urllib.request, urllib.parse, urllib.error
22 import urllib.request, urllib.error, urllib.parse
22 import urllib.request, urllib.error, urllib.parse
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase, purge
25 from hgext import largefiles, rebase, purge
26
26
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30 from mercurial import repair
30 from mercurial import repair
31
31
32 import vcsserver
32 import vcsserver
33 from vcsserver import exceptions
33 from vcsserver import exceptions
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 from vcsserver.hgcompat import (
35 from vcsserver.hgcompat import (
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 RepoLookupError, InterventionRequired, RequirementError,
40 RepoLookupError, InterventionRequired, RequirementError,
41 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
41 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
42 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
42 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
43 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.vcs_base import RemoteBase
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 def make_ui_from_config(repo_config):
48 def make_ui_from_config(repo_config):
49
49
50 class LoggingUI(ui.ui):
50 class LoggingUI(ui.ui):
51
51
52 def status(self, *msg, **opts):
52 def status(self, *msg, **opts):
53 str_msg = map(safe_str, msg)
53 str_msg = map(safe_str, msg)
54 log.info(' '.join(str_msg).rstrip('\n'))
54 log.info(' '.join(str_msg).rstrip('\n'))
55 #super(LoggingUI, self).status(*msg, **opts)
55 #super(LoggingUI, self).status(*msg, **opts)
56
56
57 def warn(self, *msg, **opts):
57 def warn(self, *msg, **opts):
58 str_msg = map(safe_str, msg)
58 str_msg = map(safe_str, msg)
59 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
59 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
60 #super(LoggingUI, self).warn(*msg, **opts)
60 #super(LoggingUI, self).warn(*msg, **opts)
61
61
62 def error(self, *msg, **opts):
62 def error(self, *msg, **opts):
63 str_msg = map(safe_str, msg)
63 str_msg = map(safe_str, msg)
64 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
64 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
65 #super(LoggingUI, self).error(*msg, **opts)
65 #super(LoggingUI, self).error(*msg, **opts)
66
66
67 def note(self, *msg, **opts):
67 def note(self, *msg, **opts):
68 str_msg = map(safe_str, msg)
68 str_msg = map(safe_str, msg)
69 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
69 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
70 #super(LoggingUI, self).note(*msg, **opts)
70 #super(LoggingUI, self).note(*msg, **opts)
71
71
72 def debug(self, *msg, **opts):
72 def debug(self, *msg, **opts):
73 str_msg = map(safe_str, msg)
73 str_msg = map(safe_str, msg)
74 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
74 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
75 #super(LoggingUI, self).debug(*msg, **opts)
75 #super(LoggingUI, self).debug(*msg, **opts)
76
76
77 baseui = LoggingUI()
77 baseui = LoggingUI()
78
78
79 # clean the baseui object
79 # clean the baseui object
80 baseui._ocfg = hgconfig.config()
80 baseui._ocfg = hgconfig.config()
81 baseui._ucfg = hgconfig.config()
81 baseui._ucfg = hgconfig.config()
82 baseui._tcfg = hgconfig.config()
82 baseui._tcfg = hgconfig.config()
83
83
84 for section, option, value in repo_config:
84 for section, option, value in repo_config:
85 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
85 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
86
86
87 # make our hgweb quiet so it doesn't print output
87 # make our hgweb quiet so it doesn't print output
88 baseui.setconfig(b'ui', b'quiet', b'true')
88 baseui.setconfig(b'ui', b'quiet', b'true')
89
89
90 baseui.setconfig(b'ui', b'paginate', b'never')
90 baseui.setconfig(b'ui', b'paginate', b'never')
91 # for better Error reporting of Mercurial
91 # for better Error reporting of Mercurial
92 baseui.setconfig(b'ui', b'message-output', b'stderr')
92 baseui.setconfig(b'ui', b'message-output', b'stderr')
93
93
94 # force mercurial to only use 1 thread, otherwise it may try to set a
94 # force mercurial to only use 1 thread, otherwise it may try to set a
95 # signal in a non-main thread, thus generating a ValueError.
95 # signal in a non-main thread, thus generating a ValueError.
96 baseui.setconfig(b'worker', b'numcpus', 1)
96 baseui.setconfig(b'worker', b'numcpus', 1)
97
97
98 # If there is no config for the largefiles extension, we explicitly disable
98 # If there is no config for the largefiles extension, we explicitly disable
99 # it here. This overrides settings from repositories hgrc file. Recent
99 # it here. This overrides settings from repositories hgrc file. Recent
100 # mercurial versions enable largefiles in hgrc on clone from largefile
100 # mercurial versions enable largefiles in hgrc on clone from largefile
101 # repo.
101 # repo.
102 if not baseui.hasconfig(b'extensions', b'largefiles'):
102 if not baseui.hasconfig(b'extensions', b'largefiles'):
103 log.debug('Explicitly disable largefiles extension for repo.')
103 log.debug('Explicitly disable largefiles extension for repo.')
104 baseui.setconfig(b'extensions', b'largefiles', b'!')
104 baseui.setconfig(b'extensions', b'largefiles', b'!')
105
105
106 return baseui
106 return baseui
107
107
108
108
109 def reraise_safe_exceptions(func):
109 def reraise_safe_exceptions(func):
110 """Decorator for converting mercurial exceptions to something neutral."""
110 """Decorator for converting mercurial exceptions to something neutral."""
111
111
112 def wrapper(*args, **kwargs):
112 def wrapper(*args, **kwargs):
113 try:
113 try:
114 return func(*args, **kwargs)
114 return func(*args, **kwargs)
115 except (Abort, InterventionRequired) as e:
115 except (Abort, InterventionRequired) as e:
116 raise_from_original(exceptions.AbortException(e), e)
116 raise_from_original(exceptions.AbortException(e), e)
117 except RepoLookupError as e:
117 except RepoLookupError as e:
118 raise_from_original(exceptions.LookupException(e), e)
118 raise_from_original(exceptions.LookupException(e), e)
119 except RequirementError as e:
119 except RequirementError as e:
120 raise_from_original(exceptions.RequirementException(e), e)
120 raise_from_original(exceptions.RequirementException(e), e)
121 except RepoError as e:
121 except RepoError as e:
122 raise_from_original(exceptions.VcsException(e), e)
122 raise_from_original(exceptions.VcsException(e), e)
123 except LookupError as e:
123 except LookupError as e:
124 raise_from_original(exceptions.LookupException(e), e)
124 raise_from_original(exceptions.LookupException(e), e)
125 except Exception as e:
125 except Exception as e:
126 if not hasattr(e, '_vcs_kind'):
126 if not hasattr(e, '_vcs_kind'):
127 log.exception("Unhandled exception in hg remote call")
127 log.exception("Unhandled exception in hg remote call")
128 raise_from_original(exceptions.UnhandledException(e), e)
128 raise_from_original(exceptions.UnhandledException(e), e)
129
129
130 raise
130 raise
131 return wrapper
131 return wrapper
132
132
133
133
134 class MercurialFactory(RepoFactory):
134 class MercurialFactory(RepoFactory):
135 repo_type = 'hg'
135 repo_type = 'hg'
136
136
137 def _create_config(self, config, hooks=True):
137 def _create_config(self, config, hooks=True):
138 if not hooks:
138 if not hooks:
139 hooks_to_clean = frozenset((
139 hooks_to_clean = frozenset((
140 'changegroup.repo_size', 'preoutgoing.pre_pull',
140 'changegroup.repo_size', 'preoutgoing.pre_pull',
141 'outgoing.pull_logger', 'prechangegroup.pre_push'))
141 'outgoing.pull_logger', 'prechangegroup.pre_push'))
142 new_config = []
142 new_config = []
143 for section, option, value in config:
143 for section, option, value in config:
144 if section == 'hooks' and option in hooks_to_clean:
144 if section == 'hooks' and option in hooks_to_clean:
145 continue
145 continue
146 new_config.append((section, option, value))
146 new_config.append((section, option, value))
147 config = new_config
147 config = new_config
148
148
149 baseui = make_ui_from_config(config)
149 baseui = make_ui_from_config(config)
150 return baseui
150 return baseui
151
151
152 def _create_repo(self, wire, create):
152 def _create_repo(self, wire, create):
153 baseui = self._create_config(wire["config"])
153 baseui = self._create_config(wire["config"])
154 return instance(baseui, ascii_bytes(wire["path"]), create)
154 return instance(baseui, ascii_bytes(wire["path"]), create)
155
155
156 def repo(self, wire, create=False):
156 def repo(self, wire, create=False):
157 """
157 """
158 Get a repository instance for the given path.
158 Get a repository instance for the given path.
159 """
159 """
160 return self._create_repo(wire, create)
160 return self._create_repo(wire, create)
161
161
162
162
163 def patch_ui_message_output(baseui):
163 def patch_ui_message_output(baseui):
164 baseui.setconfig(b'ui', b'quiet', b'false')
164 baseui.setconfig(b'ui', b'quiet', b'false')
165 output = io.BytesIO()
165 output = io.BytesIO()
166
166
167 def write(data, **unused_kwargs):
167 def write(data, **unused_kwargs):
168 output.write(data)
168 output.write(data)
169
169
170 baseui.status = write
170 baseui.status = write
171 baseui.write = write
171 baseui.write = write
172 baseui.warn = write
172 baseui.warn = write
173 baseui.debug = write
173 baseui.debug = write
174
174
175 return baseui, output
175 return baseui, output
176
176
177
177
178 class HgRemote(RemoteBase):
178 class HgRemote(RemoteBase):
179
179
180 def __init__(self, factory):
180 def __init__(self, factory):
181 self._factory = factory
181 self._factory = factory
182 self._bulk_methods = {
182 self._bulk_methods = {
183 "affected_files": self.ctx_files,
183 "affected_files": self.ctx_files,
184 "author": self.ctx_user,
184 "author": self.ctx_user,
185 "branch": self.ctx_branch,
185 "branch": self.ctx_branch,
186 "children": self.ctx_children,
186 "children": self.ctx_children,
187 "date": self.ctx_date,
187 "date": self.ctx_date,
188 "message": self.ctx_description,
188 "message": self.ctx_description,
189 "parents": self.ctx_parents,
189 "parents": self.ctx_parents,
190 "status": self.ctx_status,
190 "status": self.ctx_status,
191 "obsolete": self.ctx_obsolete,
191 "obsolete": self.ctx_obsolete,
192 "phase": self.ctx_phase,
192 "phase": self.ctx_phase,
193 "hidden": self.ctx_hidden,
193 "hidden": self.ctx_hidden,
194 "_file_paths": self.ctx_list,
194 "_file_paths": self.ctx_list,
195 }
195 }
196
196
197 def _get_ctx(self, repo, ref):
197 def _get_ctx(self, repo, ref):
198 return get_ctx(repo, ref)
198 return get_ctx(repo, ref)
199
199
200 @reraise_safe_exceptions
200 @reraise_safe_exceptions
201 def discover_hg_version(self):
201 def discover_hg_version(self):
202 from mercurial import util
202 from mercurial import util
203 return safe_str(util.version())
203 return safe_str(util.version())
204
204
205 @reraise_safe_exceptions
205 @reraise_safe_exceptions
206 def is_empty(self, wire):
206 def is_empty(self, wire):
207 repo = self._factory.repo(wire)
207 repo = self._factory.repo(wire)
208
208
209 try:
209 try:
210 return len(repo) == 0
210 return len(repo) == 0
211 except Exception:
211 except Exception:
212 log.exception("failed to read object_store")
212 log.exception("failed to read object_store")
213 return False
213 return False
214
214
215 @reraise_safe_exceptions
215 @reraise_safe_exceptions
216 def bookmarks(self, wire):
216 def bookmarks(self, wire):
217 cache_on, context_uid, repo_id = self._cache_on(wire)
217 cache_on, context_uid, repo_id = self._cache_on(wire)
218 region = self._region(wire)
218 region = self._region(wire)
219
219
220 @region.conditional_cache_on_arguments(condition=cache_on)
220 @region.conditional_cache_on_arguments(condition=cache_on)
221 def _bookmarks(_context_uid, _repo_id):
221 def _bookmarks(_context_uid, _repo_id):
222 repo = self._factory.repo(wire)
222 repo = self._factory.repo(wire)
223 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
223 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
224
224
225 return _bookmarks(context_uid, repo_id)
225 return _bookmarks(context_uid, repo_id)
226
226
227 @reraise_safe_exceptions
227 @reraise_safe_exceptions
228 def branches(self, wire, normal, closed):
228 def branches(self, wire, normal, closed):
229 cache_on, context_uid, repo_id = self._cache_on(wire)
229 cache_on, context_uid, repo_id = self._cache_on(wire)
230 region = self._region(wire)
230 region = self._region(wire)
231
231
232 @region.conditional_cache_on_arguments(condition=cache_on)
232 @region.conditional_cache_on_arguments(condition=cache_on)
233 def _branches(_context_uid, _repo_id, _normal, _closed):
233 def _branches(_context_uid, _repo_id, _normal, _closed):
234 repo = self._factory.repo(wire)
234 repo = self._factory.repo(wire)
235 iter_branches = repo.branchmap().iterbranches()
235 iter_branches = repo.branchmap().iterbranches()
236 bt = {}
236 bt = {}
237 for branch_name, _heads, tip_node, is_closed in iter_branches:
237 for branch_name, _heads, tip_node, is_closed in iter_branches:
238 if normal and not is_closed:
238 if normal and not is_closed:
239 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
239 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
240 if closed and is_closed:
240 if closed and is_closed:
241 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
241 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
242
242
243 return bt
243 return bt
244
244
245 return _branches(context_uid, repo_id, normal, closed)
245 return _branches(context_uid, repo_id, normal, closed)
246
246
247 @reraise_safe_exceptions
247 @reraise_safe_exceptions
248 def bulk_request(self, wire, commit_id, pre_load):
248 def bulk_request(self, wire, commit_id, pre_load):
249 cache_on, context_uid, repo_id = self._cache_on(wire)
249 cache_on, context_uid, repo_id = self._cache_on(wire)
250 region = self._region(wire)
250 region = self._region(wire)
251
251
252 @region.conditional_cache_on_arguments(condition=cache_on)
252 @region.conditional_cache_on_arguments(condition=cache_on)
253 def _bulk_request(_repo_id, _commit_id, _pre_load):
253 def _bulk_request(_repo_id, _commit_id, _pre_load):
254 result = {}
254 result = {}
255 for attr in pre_load:
255 for attr in pre_load:
256 try:
256 try:
257 method = self._bulk_methods[attr]
257 method = self._bulk_methods[attr]
258 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
258 result[attr] = method(wire, commit_id)
259 result[attr] = method(wire, commit_id)
259 except KeyError as e:
260 except KeyError as e:
260 raise exceptions.VcsException(e)(
261 raise exceptions.VcsException(e)(
261 'Unknown bulk attribute: "%s"' % attr)
262 'Unknown bulk attribute: "%s"' % attr)
262 return result
263 return result
263
264
264 return _bulk_request(repo_id, commit_id, sorted(pre_load))
265 return _bulk_request(repo_id, commit_id, sorted(pre_load))
265
266
266 @reraise_safe_exceptions
267 @reraise_safe_exceptions
267 def ctx_branch(self, wire, commit_id):
268 def ctx_branch(self, wire, commit_id):
268 cache_on, context_uid, repo_id = self._cache_on(wire)
269 cache_on, context_uid, repo_id = self._cache_on(wire)
269 region = self._region(wire)
270 region = self._region(wire)
270
271
271 @region.conditional_cache_on_arguments(condition=cache_on)
272 @region.conditional_cache_on_arguments(condition=cache_on)
272 def _ctx_branch(_repo_id, _commit_id):
273 def _ctx_branch(_repo_id, _commit_id):
273 repo = self._factory.repo(wire)
274 repo = self._factory.repo(wire)
274 ctx = self._get_ctx(repo, commit_id)
275 ctx = self._get_ctx(repo, commit_id)
275 return ctx.branch()
276 return ctx.branch()
276 return _ctx_branch(repo_id, commit_id)
277 return _ctx_branch(repo_id, commit_id)
277
278
278 @reraise_safe_exceptions
279 @reraise_safe_exceptions
279 def ctx_date(self, wire, commit_id):
280 def ctx_date(self, wire, commit_id):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 cache_on, context_uid, repo_id = self._cache_on(wire)
281 region = self._region(wire)
282 region = self._region(wire)
282
283
283 @region.conditional_cache_on_arguments(condition=cache_on)
284 @region.conditional_cache_on_arguments(condition=cache_on)
284 def _ctx_date(_repo_id, _commit_id):
285 def _ctx_date(_repo_id, _commit_id):
285 repo = self._factory.repo(wire)
286 repo = self._factory.repo(wire)
286 ctx = self._get_ctx(repo, commit_id)
287 ctx = self._get_ctx(repo, commit_id)
287 return ctx.date()
288 return ctx.date()
288 return _ctx_date(repo_id, commit_id)
289 return _ctx_date(repo_id, commit_id)
289
290
290 @reraise_safe_exceptions
291 @reraise_safe_exceptions
291 def ctx_description(self, wire, revision):
292 def ctx_description(self, wire, revision):
292 repo = self._factory.repo(wire)
293 repo = self._factory.repo(wire)
293 ctx = self._get_ctx(repo, revision)
294 ctx = self._get_ctx(repo, revision)
294 return ctx.description()
295 return ctx.description()
295
296
296 @reraise_safe_exceptions
297 @reraise_safe_exceptions
297 def ctx_files(self, wire, commit_id):
298 def ctx_files(self, wire, commit_id):
298 cache_on, context_uid, repo_id = self._cache_on(wire)
299 cache_on, context_uid, repo_id = self._cache_on(wire)
299 region = self._region(wire)
300 region = self._region(wire)
300
301
301 @region.conditional_cache_on_arguments(condition=cache_on)
302 @region.conditional_cache_on_arguments(condition=cache_on)
302 def _ctx_files(_repo_id, _commit_id):
303 def _ctx_files(_repo_id, _commit_id):
303 repo = self._factory.repo(wire)
304 repo = self._factory.repo(wire)
304 ctx = self._get_ctx(repo, commit_id)
305 ctx = self._get_ctx(repo, commit_id)
305 return ctx.files()
306 return ctx.files()
306
307
307 return _ctx_files(repo_id, commit_id)
308 return _ctx_files(repo_id, commit_id)
308
309
309 @reraise_safe_exceptions
310 @reraise_safe_exceptions
310 def ctx_list(self, path, revision):
311 def ctx_list(self, path, revision):
311 repo = self._factory.repo(path)
312 repo = self._factory.repo(path)
312 ctx = self._get_ctx(repo, revision)
313 ctx = self._get_ctx(repo, revision)
313 return list(ctx)
314 return list(ctx)
314
315
315 @reraise_safe_exceptions
316 @reraise_safe_exceptions
316 def ctx_parents(self, wire, commit_id):
317 def ctx_parents(self, wire, commit_id):
317 cache_on, context_uid, repo_id = self._cache_on(wire)
318 cache_on, context_uid, repo_id = self._cache_on(wire)
318 region = self._region(wire)
319 region = self._region(wire)
319
320
320 @region.conditional_cache_on_arguments(condition=cache_on)
321 @region.conditional_cache_on_arguments(condition=cache_on)
321 def _ctx_parents(_repo_id, _commit_id):
322 def _ctx_parents(_repo_id, _commit_id):
322 repo = self._factory.repo(wire)
323 repo = self._factory.repo(wire)
323 ctx = self._get_ctx(repo, commit_id)
324 ctx = self._get_ctx(repo, commit_id)
324 return [parent.hex() for parent in ctx.parents()
325 return [parent.hex() for parent in ctx.parents()
325 if not (parent.hidden() or parent.obsolete())]
326 if not (parent.hidden() or parent.obsolete())]
326
327
327 return _ctx_parents(repo_id, commit_id)
328 return _ctx_parents(repo_id, commit_id)
328
329
329 @reraise_safe_exceptions
330 @reraise_safe_exceptions
330 def ctx_children(self, wire, commit_id):
331 def ctx_children(self, wire, commit_id):
331 cache_on, context_uid, repo_id = self._cache_on(wire)
332 cache_on, context_uid, repo_id = self._cache_on(wire)
332 region = self._region(wire)
333 region = self._region(wire)
333
334
334 @region.conditional_cache_on_arguments(condition=cache_on)
335 @region.conditional_cache_on_arguments(condition=cache_on)
335 def _ctx_children(_repo_id, _commit_id):
336 def _ctx_children(_repo_id, _commit_id):
336 repo = self._factory.repo(wire)
337 repo = self._factory.repo(wire)
337 ctx = self._get_ctx(repo, commit_id)
338 ctx = self._get_ctx(repo, commit_id)
338 return [child.hex() for child in ctx.children()
339 return [child.hex() for child in ctx.children()
339 if not (child.hidden() or child.obsolete())]
340 if not (child.hidden() or child.obsolete())]
340
341
341 return _ctx_children(repo_id, commit_id)
342 return _ctx_children(repo_id, commit_id)
342
343
343 @reraise_safe_exceptions
344 @reraise_safe_exceptions
344 def ctx_phase(self, wire, commit_id):
345 def ctx_phase(self, wire, commit_id):
345 cache_on, context_uid, repo_id = self._cache_on(wire)
346 cache_on, context_uid, repo_id = self._cache_on(wire)
346 region = self._region(wire)
347 region = self._region(wire)
347
348
348 @region.conditional_cache_on_arguments(condition=cache_on)
349 @region.conditional_cache_on_arguments(condition=cache_on)
349 def _ctx_phase(_context_uid, _repo_id, _commit_id):
350 def _ctx_phase(_context_uid, _repo_id, _commit_id):
350 repo = self._factory.repo(wire)
351 repo = self._factory.repo(wire)
351 ctx = self._get_ctx(repo, commit_id)
352 ctx = self._get_ctx(repo, commit_id)
352 # public=0, draft=1, secret=3
353 # public=0, draft=1, secret=3
353 return ctx.phase()
354 return ctx.phase()
354 return _ctx_phase(context_uid, repo_id, commit_id)
355 return _ctx_phase(context_uid, repo_id, commit_id)
355
356
356 @reraise_safe_exceptions
357 @reraise_safe_exceptions
357 def ctx_obsolete(self, wire, commit_id):
358 def ctx_obsolete(self, wire, commit_id):
358 cache_on, context_uid, repo_id = self._cache_on(wire)
359 cache_on, context_uid, repo_id = self._cache_on(wire)
359 region = self._region(wire)
360 region = self._region(wire)
360
361
361 @region.conditional_cache_on_arguments(condition=cache_on)
362 @region.conditional_cache_on_arguments(condition=cache_on)
362 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
363 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
363 repo = self._factory.repo(wire)
364 repo = self._factory.repo(wire)
364 ctx = self._get_ctx(repo, commit_id)
365 ctx = self._get_ctx(repo, commit_id)
365 return ctx.obsolete()
366 return ctx.obsolete()
366 return _ctx_obsolete(context_uid, repo_id, commit_id)
367 return _ctx_obsolete(context_uid, repo_id, commit_id)
367
368
368 @reraise_safe_exceptions
369 @reraise_safe_exceptions
369 def ctx_hidden(self, wire, commit_id):
370 def ctx_hidden(self, wire, commit_id):
370 cache_on, context_uid, repo_id = self._cache_on(wire)
371 cache_on, context_uid, repo_id = self._cache_on(wire)
371 region = self._region(wire)
372 region = self._region(wire)
372
373
373 @region.conditional_cache_on_arguments(condition=cache_on)
374 @region.conditional_cache_on_arguments(condition=cache_on)
374 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
375 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
375 repo = self._factory.repo(wire)
376 repo = self._factory.repo(wire)
376 ctx = self._get_ctx(repo, commit_id)
377 ctx = self._get_ctx(repo, commit_id)
377 return ctx.hidden()
378 return ctx.hidden()
378 return _ctx_hidden(context_uid, repo_id, commit_id)
379 return _ctx_hidden(context_uid, repo_id, commit_id)
379
380
380 @reraise_safe_exceptions
381 @reraise_safe_exceptions
381 def ctx_substate(self, wire, revision):
382 def ctx_substate(self, wire, revision):
382 repo = self._factory.repo(wire)
383 repo = self._factory.repo(wire)
383 ctx = self._get_ctx(repo, revision)
384 ctx = self._get_ctx(repo, revision)
384 return ctx.substate
385 return ctx.substate
385
386
386 @reraise_safe_exceptions
387 @reraise_safe_exceptions
387 def ctx_status(self, wire, revision):
388 def ctx_status(self, wire, revision):
388 repo = self._factory.repo(wire)
389 repo = self._factory.repo(wire)
389 ctx = self._get_ctx(repo, revision)
390 ctx = self._get_ctx(repo, revision)
390 status = repo[ctx.p1().node()].status(other=ctx.node())
391 status = repo[ctx.p1().node()].status(other=ctx.node())
391 # object of status (odd, custom named tuple in mercurial) is not
392 # object of status (odd, custom named tuple in mercurial) is not
392 # correctly serializable, we make it a list, as the underling
393 # correctly serializable, we make it a list, as the underling
393 # API expects this to be a list
394 # API expects this to be a list
394 return list(status)
395 return list(status)
395
396
396 @reraise_safe_exceptions
397 @reraise_safe_exceptions
397 def ctx_user(self, wire, revision):
398 def ctx_user(self, wire, revision):
398 repo = self._factory.repo(wire)
399 repo = self._factory.repo(wire)
399 ctx = self._get_ctx(repo, revision)
400 ctx = self._get_ctx(repo, revision)
400 return ctx.user()
401 return ctx.user()
401
402
402 @reraise_safe_exceptions
403 @reraise_safe_exceptions
403 def check_url(self, url, config):
404 def check_url(self, url, config):
404 _proto = None
405 _proto = None
405 if '+' in url[:url.find('://')]:
406 if '+' in url[:url.find('://')]:
406 _proto = url[0:url.find('+')]
407 _proto = url[0:url.find('+')]
407 url = url[url.find('+') + 1:]
408 url = url[url.find('+') + 1:]
408 handlers = []
409 handlers = []
409 url_obj = url_parser(url)
410 url_obj = url_parser(url)
410 test_uri, authinfo = url_obj.authinfo()
411 test_uri, authinfo = url_obj.authinfo()
411 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
412 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
412 url_obj.query = obfuscate_qs(url_obj.query)
413 url_obj.query = obfuscate_qs(url_obj.query)
413
414
414 cleaned_uri = str(url_obj)
415 cleaned_uri = str(url_obj)
415 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
416 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
416
417
417 if authinfo:
418 if authinfo:
418 # create a password manager
419 # create a password manager
419 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
420 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
420 passmgr.add_password(*authinfo)
421 passmgr.add_password(*authinfo)
421
422
422 handlers.extend((httpbasicauthhandler(passmgr),
423 handlers.extend((httpbasicauthhandler(passmgr),
423 httpdigestauthhandler(passmgr)))
424 httpdigestauthhandler(passmgr)))
424
425
425 o = urllib.request.build_opener(*handlers)
426 o = urllib.request.build_opener(*handlers)
426 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
427 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
427 ('Accept', 'application/mercurial-0.1')]
428 ('Accept', 'application/mercurial-0.1')]
428
429
429 q = {"cmd": 'between'}
430 q = {"cmd": 'between'}
430 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
431 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
431 qs = '?%s' % urllib.parse.urlencode(q)
432 qs = '?%s' % urllib.parse.urlencode(q)
432 cu = "%s%s" % (test_uri, qs)
433 cu = "%s%s" % (test_uri, qs)
433 req = urllib.request.Request(cu, None, {})
434 req = urllib.request.Request(cu, None, {})
434
435
435 try:
436 try:
436 log.debug("Trying to open URL %s", cleaned_uri)
437 log.debug("Trying to open URL %s", cleaned_uri)
437 resp = o.open(req)
438 resp = o.open(req)
438 if resp.code != 200:
439 if resp.code != 200:
439 raise exceptions.URLError()('Return Code is not 200')
440 raise exceptions.URLError()('Return Code is not 200')
440 except Exception as e:
441 except Exception as e:
441 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
442 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
442 # means it cannot be cloned
443 # means it cannot be cloned
443 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
444 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
444
445
445 # now check if it's a proper hg repo, but don't do it for svn
446 # now check if it's a proper hg repo, but don't do it for svn
446 try:
447 try:
447 if _proto == 'svn':
448 if _proto == 'svn':
448 pass
449 pass
449 else:
450 else:
450 # check for pure hg repos
451 # check for pure hg repos
451 log.debug(
452 log.debug(
452 "Verifying if URL is a Mercurial repository: %s",
453 "Verifying if URL is a Mercurial repository: %s",
453 cleaned_uri)
454 cleaned_uri)
454 ui = make_ui_from_config(config)
455 ui = make_ui_from_config(config)
455 peer_checker = makepeer(ui, url)
456 peer_checker = makepeer(ui, url)
456 peer_checker.lookup('tip')
457 peer_checker.lookup('tip')
457 except Exception as e:
458 except Exception as e:
458 log.warning("URL is not a valid Mercurial repository: %s",
459 log.warning("URL is not a valid Mercurial repository: %s",
459 cleaned_uri)
460 cleaned_uri)
460 raise exceptions.URLError(e)(
461 raise exceptions.URLError(e)(
461 "url [%s] does not look like an hg repo org_exc: %s"
462 "url [%s] does not look like an hg repo org_exc: %s"
462 % (cleaned_uri, e))
463 % (cleaned_uri, e))
463
464
464 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
465 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
465 return True
466 return True
466
467
467 @reraise_safe_exceptions
468 @reraise_safe_exceptions
468 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
469 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
469 repo = self._factory.repo(wire)
470 repo = self._factory.repo(wire)
470
471
471 if file_filter:
472 if file_filter:
472 match_filter = match(file_filter[0], '', [file_filter[1]])
473 match_filter = match(file_filter[0], '', [file_filter[1]])
473 else:
474 else:
474 match_filter = file_filter
475 match_filter = file_filter
475 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
476 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
476
477
477 try:
478 try:
478 diff_iter = patch.diff(
479 diff_iter = patch.diff(
479 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
480 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
480 return b"".join(diff_iter)
481 return b"".join(diff_iter)
481 except RepoLookupError as e:
482 except RepoLookupError as e:
482 raise exceptions.LookupException(e)()
483 raise exceptions.LookupException(e)()
483
484
484 @reraise_safe_exceptions
485 @reraise_safe_exceptions
485 def node_history(self, wire, revision, path, limit):
486 def node_history(self, wire, revision, path, limit):
486 cache_on, context_uid, repo_id = self._cache_on(wire)
487 cache_on, context_uid, repo_id = self._cache_on(wire)
487 region = self._region(wire)
488 region = self._region(wire)
488
489
489 @region.conditional_cache_on_arguments(condition=cache_on)
490 @region.conditional_cache_on_arguments(condition=cache_on)
490 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
491 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
491 repo = self._factory.repo(wire)
492 repo = self._factory.repo(wire)
492
493
493 ctx = self._get_ctx(repo, revision)
494 ctx = self._get_ctx(repo, revision)
494 fctx = ctx.filectx(safe_bytes(path))
495 fctx = ctx.filectx(safe_bytes(path))
495
496
496 def history_iter():
497 def history_iter():
497 limit_rev = fctx.rev()
498 limit_rev = fctx.rev()
498 for obj in reversed(list(fctx.filelog())):
499 for obj in reversed(list(fctx.filelog())):
499 obj = fctx.filectx(obj)
500 obj = fctx.filectx(obj)
500 ctx = obj.changectx()
501 ctx = obj.changectx()
501 if ctx.hidden() or ctx.obsolete():
502 if ctx.hidden() or ctx.obsolete():
502 continue
503 continue
503
504
504 if limit_rev >= obj.rev():
505 if limit_rev >= obj.rev():
505 yield obj
506 yield obj
506
507
507 history = []
508 history = []
508 for cnt, obj in enumerate(history_iter()):
509 for cnt, obj in enumerate(history_iter()):
509 if limit and cnt >= limit:
510 if limit and cnt >= limit:
510 break
511 break
511 history.append(hex(obj.node()))
512 history.append(hex(obj.node()))
512
513
513 return [x for x in history]
514 return [x for x in history]
514 return _node_history(context_uid, repo_id, revision, path, limit)
515 return _node_history(context_uid, repo_id, revision, path, limit)
515
516
516 @reraise_safe_exceptions
517 @reraise_safe_exceptions
517 def node_history_untill(self, wire, revision, path, limit):
518 def node_history_untill(self, wire, revision, path, limit):
518 cache_on, context_uid, repo_id = self._cache_on(wire)
519 cache_on, context_uid, repo_id = self._cache_on(wire)
519 region = self._region(wire)
520 region = self._region(wire)
520
521
521 @region.conditional_cache_on_arguments(condition=cache_on)
522 @region.conditional_cache_on_arguments(condition=cache_on)
522 def _node_history_until(_context_uid, _repo_id):
523 def _node_history_until(_context_uid, _repo_id):
523 repo = self._factory.repo(wire)
524 repo = self._factory.repo(wire)
524 ctx = self._get_ctx(repo, revision)
525 ctx = self._get_ctx(repo, revision)
525 fctx = ctx.filectx(safe_bytes(path))
526 fctx = ctx.filectx(safe_bytes(path))
526
527
527 file_log = list(fctx.filelog())
528 file_log = list(fctx.filelog())
528 if limit:
529 if limit:
529 # Limit to the last n items
530 # Limit to the last n items
530 file_log = file_log[-limit:]
531 file_log = file_log[-limit:]
531
532
532 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
533 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
533 return _node_history_until(context_uid, repo_id, revision, path, limit)
534 return _node_history_until(context_uid, repo_id, revision, path, limit)
534
535
535 @reraise_safe_exceptions
536 @reraise_safe_exceptions
536 def fctx_annotate(self, wire, revision, path):
537 def fctx_annotate(self, wire, revision, path):
537 repo = self._factory.repo(wire)
538 repo = self._factory.repo(wire)
538 ctx = self._get_ctx(repo, revision)
539 ctx = self._get_ctx(repo, revision)
539 fctx = ctx.filectx(safe_bytes(path))
540 fctx = ctx.filectx(safe_bytes(path))
540
541
541 result = []
542 result = []
542 for i, annotate_obj in enumerate(fctx.annotate(), 1):
543 for i, annotate_obj in enumerate(fctx.annotate(), 1):
543 ln_no = i
544 ln_no = i
544 sha = hex(annotate_obj.fctx.node())
545 sha = hex(annotate_obj.fctx.node())
545 content = annotate_obj.text
546 content = annotate_obj.text
546 result.append((ln_no, sha, content))
547 result.append((ln_no, sha, content))
547 return result
548 return result
548
549
549 @reraise_safe_exceptions
550 @reraise_safe_exceptions
550 def fctx_node_data(self, wire, revision, path):
551 def fctx_node_data(self, wire, revision, path):
551 repo = self._factory.repo(wire)
552 repo = self._factory.repo(wire)
552 ctx = self._get_ctx(repo, revision)
553 ctx = self._get_ctx(repo, revision)
553 fctx = ctx.filectx(safe_bytes(path))
554 fctx = ctx.filectx(safe_bytes(path))
554 return fctx.data()
555 return fctx.data()
555
556
556 @reraise_safe_exceptions
557 @reraise_safe_exceptions
557 def fctx_flags(self, wire, commit_id, path):
558 def fctx_flags(self, wire, commit_id, path):
558 cache_on, context_uid, repo_id = self._cache_on(wire)
559 cache_on, context_uid, repo_id = self._cache_on(wire)
559 region = self._region(wire)
560 region = self._region(wire)
560
561
561 @region.conditional_cache_on_arguments(condition=cache_on)
562 @region.conditional_cache_on_arguments(condition=cache_on)
562 def _fctx_flags(_repo_id, _commit_id, _path):
563 def _fctx_flags(_repo_id, _commit_id, _path):
563 repo = self._factory.repo(wire)
564 repo = self._factory.repo(wire)
564 ctx = self._get_ctx(repo, commit_id)
565 ctx = self._get_ctx(repo, commit_id)
565 fctx = ctx.filectx(safe_bytes(path))
566 fctx = ctx.filectx(safe_bytes(path))
566 return fctx.flags()
567 return fctx.flags()
567
568
568 return _fctx_flags(repo_id, commit_id, path)
569 return _fctx_flags(repo_id, commit_id, path)
569
570
570 @reraise_safe_exceptions
571 @reraise_safe_exceptions
571 def fctx_size(self, wire, commit_id, path):
572 def fctx_size(self, wire, commit_id, path):
572 cache_on, context_uid, repo_id = self._cache_on(wire)
573 cache_on, context_uid, repo_id = self._cache_on(wire)
573 region = self._region(wire)
574 region = self._region(wire)
574
575
575 @region.conditional_cache_on_arguments(condition=cache_on)
576 @region.conditional_cache_on_arguments(condition=cache_on)
576 def _fctx_size(_repo_id, _revision, _path):
577 def _fctx_size(_repo_id, _revision, _path):
577 repo = self._factory.repo(wire)
578 repo = self._factory.repo(wire)
578 ctx = self._get_ctx(repo, commit_id)
579 ctx = self._get_ctx(repo, commit_id)
579 fctx = ctx.filectx(safe_bytes(path))
580 fctx = ctx.filectx(safe_bytes(path))
580 return fctx.size()
581 return fctx.size()
581 return _fctx_size(repo_id, commit_id, path)
582 return _fctx_size(repo_id, commit_id, path)
582
583
583 @reraise_safe_exceptions
584 @reraise_safe_exceptions
584 def get_all_commit_ids(self, wire, name):
585 def get_all_commit_ids(self, wire, name):
585 cache_on, context_uid, repo_id = self._cache_on(wire)
586 cache_on, context_uid, repo_id = self._cache_on(wire)
586 region = self._region(wire)
587 region = self._region(wire)
587
588
588 @region.conditional_cache_on_arguments(condition=cache_on)
589 @region.conditional_cache_on_arguments(condition=cache_on)
589 def _get_all_commit_ids(_context_uid, _repo_id, _name):
590 def _get_all_commit_ids(_context_uid, _repo_id, _name):
590 repo = self._factory.repo(wire)
591 repo = self._factory.repo(wire)
591 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
592 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
592 return revs
593 return revs
593 return _get_all_commit_ids(context_uid, repo_id, name)
594 return _get_all_commit_ids(context_uid, repo_id, name)
594
595
595 @reraise_safe_exceptions
596 @reraise_safe_exceptions
596 def get_config_value(self, wire, section, name, untrusted=False):
597 def get_config_value(self, wire, section, name, untrusted=False):
597 repo = self._factory.repo(wire)
598 repo = self._factory.repo(wire)
598 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
599 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
599
600
600 @reraise_safe_exceptions
601 @reraise_safe_exceptions
601 def is_large_file(self, wire, commit_id, path):
602 def is_large_file(self, wire, commit_id, path):
602 cache_on, context_uid, repo_id = self._cache_on(wire)
603 cache_on, context_uid, repo_id = self._cache_on(wire)
603 region = self._region(wire)
604 region = self._region(wire)
604
605
605 @region.conditional_cache_on_arguments(condition=cache_on)
606 @region.conditional_cache_on_arguments(condition=cache_on)
606 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
607 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
607 return largefiles.lfutil.isstandin(safe_bytes(path))
608 return largefiles.lfutil.isstandin(safe_bytes(path))
608
609
609 return _is_large_file(context_uid, repo_id, commit_id, path)
610 return _is_large_file(context_uid, repo_id, commit_id, path)
610
611
611 @reraise_safe_exceptions
612 @reraise_safe_exceptions
612 def is_binary(self, wire, revision, path):
613 def is_binary(self, wire, revision, path):
613 cache_on, context_uid, repo_id = self._cache_on(wire)
614 cache_on, context_uid, repo_id = self._cache_on(wire)
614 region = self._region(wire)
615 region = self._region(wire)
615
616
616 @region.conditional_cache_on_arguments(condition=cache_on)
617 @region.conditional_cache_on_arguments(condition=cache_on)
617 def _is_binary(_repo_id, _sha, _path):
618 def _is_binary(_repo_id, _sha, _path):
618 repo = self._factory.repo(wire)
619 repo = self._factory.repo(wire)
619 ctx = self._get_ctx(repo, revision)
620 ctx = self._get_ctx(repo, revision)
620 fctx = ctx.filectx(safe_bytes(path))
621 fctx = ctx.filectx(safe_bytes(path))
621 return fctx.isbinary()
622 return fctx.isbinary()
622
623
623 return _is_binary(repo_id, revision, path)
624 return _is_binary(repo_id, revision, path)
624
625
625 @reraise_safe_exceptions
626 @reraise_safe_exceptions
626 def md5_hash(self, wire, revision, path):
627 def md5_hash(self, wire, revision, path):
627 cache_on, context_uid, repo_id = self._cache_on(wire)
628 cache_on, context_uid, repo_id = self._cache_on(wire)
628 region = self._region(wire)
629 region = self._region(wire)
629
630
630 @region.conditional_cache_on_arguments(condition=cache_on)
631 @region.conditional_cache_on_arguments(condition=cache_on)
631 def _md5_hash(_repo_id, _sha, _path):
632 def _md5_hash(_repo_id, _sha, _path):
632 repo = self._factory.repo(wire)
633 repo = self._factory.repo(wire)
633 ctx = self._get_ctx(repo, revision)
634 ctx = self._get_ctx(repo, revision)
634 fctx = ctx.filectx(safe_bytes(path))
635 fctx = ctx.filectx(safe_bytes(path))
635 return hashlib.md5(fctx.data()).hexdigest()
636 return hashlib.md5(fctx.data()).hexdigest()
636
637
637 return _md5_hash(repo_id, revision, path)
638 return _md5_hash(repo_id, revision, path)
638
639
639 @reraise_safe_exceptions
640 @reraise_safe_exceptions
640 def in_largefiles_store(self, wire, sha):
641 def in_largefiles_store(self, wire, sha):
641 repo = self._factory.repo(wire)
642 repo = self._factory.repo(wire)
642 return largefiles.lfutil.instore(repo, sha)
643 return largefiles.lfutil.instore(repo, sha)
643
644
644 @reraise_safe_exceptions
645 @reraise_safe_exceptions
645 def in_user_cache(self, wire, sha):
646 def in_user_cache(self, wire, sha):
646 repo = self._factory.repo(wire)
647 repo = self._factory.repo(wire)
647 return largefiles.lfutil.inusercache(repo.ui, sha)
648 return largefiles.lfutil.inusercache(repo.ui, sha)
648
649
649 @reraise_safe_exceptions
650 @reraise_safe_exceptions
650 def store_path(self, wire, sha):
651 def store_path(self, wire, sha):
651 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
652 return largefiles.lfutil.storepath(repo, sha)
653 return largefiles.lfutil.storepath(repo, sha)
653
654
654 @reraise_safe_exceptions
655 @reraise_safe_exceptions
655 def link(self, wire, sha, path):
656 def link(self, wire, sha, path):
656 repo = self._factory.repo(wire)
657 repo = self._factory.repo(wire)
657 largefiles.lfutil.link(
658 largefiles.lfutil.link(
658 largefiles.lfutil.usercachepath(repo.ui, sha), path)
659 largefiles.lfutil.usercachepath(repo.ui, sha), path)
659
660
660 @reraise_safe_exceptions
661 @reraise_safe_exceptions
661 def localrepository(self, wire, create=False):
662 def localrepository(self, wire, create=False):
662 self._factory.repo(wire, create=create)
663 self._factory.repo(wire, create=create)
663
664
664 @reraise_safe_exceptions
665 @reraise_safe_exceptions
665 def lookup(self, wire, revision, both):
666 def lookup(self, wire, revision, both):
666 cache_on, context_uid, repo_id = self._cache_on(wire)
667 cache_on, context_uid, repo_id = self._cache_on(wire)
667 region = self._region(wire)
668 region = self._region(wire)
668
669
669 @region.conditional_cache_on_arguments(condition=cache_on)
670 @region.conditional_cache_on_arguments(condition=cache_on)
670 def _lookup(_context_uid, _repo_id, _revision, _both):
671 def _lookup(_context_uid, _repo_id, _revision, _both):
671
672
672 repo = self._factory.repo(wire)
673 repo = self._factory.repo(wire)
673 rev = _revision
674 rev = _revision
674 if isinstance(rev, int):
675 if isinstance(rev, int):
675 # NOTE(marcink):
676 # NOTE(marcink):
676 # since Mercurial doesn't support negative indexes properly
677 # since Mercurial doesn't support negative indexes properly
677 # we need to shift accordingly by one to get proper index, e.g
678 # we need to shift accordingly by one to get proper index, e.g
678 # repo[-1] => repo[-2]
679 # repo[-1] => repo[-2]
679 # repo[0] => repo[-1]
680 # repo[0] => repo[-1]
680 if rev <= 0:
681 if rev <= 0:
681 rev = rev + -1
682 rev = rev + -1
682 try:
683 try:
683 ctx = self._get_ctx(repo, rev)
684 ctx = self._get_ctx(repo, rev)
684 except (TypeError, RepoLookupError) as e:
685 except (TypeError, RepoLookupError) as e:
685 e._org_exc_tb = traceback.format_exc()
686 e._org_exc_tb = traceback.format_exc()
686 raise exceptions.LookupException(e)(rev)
687 raise exceptions.LookupException(e)(rev)
687 except LookupError as e:
688 except LookupError as e:
688 e._org_exc_tb = traceback.format_exc()
689 e._org_exc_tb = traceback.format_exc()
689 raise exceptions.LookupException(e)(e.name)
690 raise exceptions.LookupException(e)(e.name)
690
691
691 if not both:
692 if not both:
692 return ctx.hex()
693 return ctx.hex()
693
694
694 ctx = repo[ctx.hex()]
695 ctx = repo[ctx.hex()]
695 return ctx.hex(), ctx.rev()
696 return ctx.hex(), ctx.rev()
696
697
697 return _lookup(context_uid, repo_id, revision, both)
698 return _lookup(context_uid, repo_id, revision, both)
698
699
699 @reraise_safe_exceptions
700 @reraise_safe_exceptions
700 def sync_push(self, wire, url):
701 def sync_push(self, wire, url):
701 if not self.check_url(url, wire['config']):
702 if not self.check_url(url, wire['config']):
702 return
703 return
703
704
704 repo = self._factory.repo(wire)
705 repo = self._factory.repo(wire)
705
706
706 # Disable any prompts for this repo
707 # Disable any prompts for this repo
707 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
708 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
708
709
709 bookmarks = list(dict(repo._bookmarks).keys())
710 bookmarks = list(dict(repo._bookmarks).keys())
710 remote = peer(repo, {}, safe_bytes(url))
711 remote = peer(repo, {}, safe_bytes(url))
711 # Disable any prompts for this remote
712 # Disable any prompts for this remote
712 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
713 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
713
714
714 return exchange.push(
715 return exchange.push(
715 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
716 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
716
717
717 @reraise_safe_exceptions
718 @reraise_safe_exceptions
718 def revision(self, wire, rev):
719 def revision(self, wire, rev):
719 repo = self._factory.repo(wire)
720 repo = self._factory.repo(wire)
720 ctx = self._get_ctx(repo, rev)
721 ctx = self._get_ctx(repo, rev)
721 return ctx.rev()
722 return ctx.rev()
722
723
723 @reraise_safe_exceptions
724 @reraise_safe_exceptions
724 def rev_range(self, wire, commit_filter):
725 def rev_range(self, wire, commit_filter):
725 cache_on, context_uid, repo_id = self._cache_on(wire)
726 cache_on, context_uid, repo_id = self._cache_on(wire)
726 region = self._region(wire)
727 region = self._region(wire)
727
728
728 @region.conditional_cache_on_arguments(condition=cache_on)
729 @region.conditional_cache_on_arguments(condition=cache_on)
729 def _rev_range(_context_uid, _repo_id, _filter):
730 def _rev_range(_context_uid, _repo_id, _filter):
730 repo = self._factory.repo(wire)
731 repo = self._factory.repo(wire)
731 revisions = [
732 revisions = [
732 ascii_str(repo[rev].hex())
733 ascii_str(repo[rev].hex())
733 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
734 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
734 ]
735 ]
735 return revisions
736 return revisions
736
737
737 return _rev_range(context_uid, repo_id, sorted(commit_filter))
738 return _rev_range(context_uid, repo_id, sorted(commit_filter))
738
739
739 @reraise_safe_exceptions
740 @reraise_safe_exceptions
740 def rev_range_hash(self, wire, node):
741 def rev_range_hash(self, wire, node):
741 repo = self._factory.repo(wire)
742 repo = self._factory.repo(wire)
742
743
743 def get_revs(repo, rev_opt):
744 def get_revs(repo, rev_opt):
744 if rev_opt:
745 if rev_opt:
745 revs = revrange(repo, rev_opt)
746 revs = revrange(repo, rev_opt)
746 if len(revs) == 0:
747 if len(revs) == 0:
747 return (nullrev, nullrev)
748 return (nullrev, nullrev)
748 return max(revs), min(revs)
749 return max(revs), min(revs)
749 else:
750 else:
750 return len(repo) - 1, 0
751 return len(repo) - 1, 0
751
752
752 stop, start = get_revs(repo, [node + ':'])
753 stop, start = get_revs(repo, [node + ':'])
753 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
754 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
754 return revs
755 return revs
755
756
756 @reraise_safe_exceptions
757 @reraise_safe_exceptions
757 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
758 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
758 other_path = kwargs.pop('other_path', None)
759 other_path = kwargs.pop('other_path', None)
759
760
760 # case when we want to compare two independent repositories
761 # case when we want to compare two independent repositories
761 if other_path and other_path != wire["path"]:
762 if other_path and other_path != wire["path"]:
762 baseui = self._factory._create_config(wire["config"])
763 baseui = self._factory._create_config(wire["config"])
763 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
764 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
764 else:
765 else:
765 repo = self._factory.repo(wire)
766 repo = self._factory.repo(wire)
766 return list(repo.revs(rev_spec, *args))
767 return list(repo.revs(rev_spec, *args))
767
768
768 @reraise_safe_exceptions
769 @reraise_safe_exceptions
769 def verify(self, wire,):
770 def verify(self, wire,):
770 repo = self._factory.repo(wire)
771 repo = self._factory.repo(wire)
771 baseui = self._factory._create_config(wire['config'])
772 baseui = self._factory._create_config(wire['config'])
772
773
773 baseui, output = patch_ui_message_output(baseui)
774 baseui, output = patch_ui_message_output(baseui)
774
775
775 repo.ui = baseui
776 repo.ui = baseui
776 verify.verify(repo)
777 verify.verify(repo)
777 return output.getvalue()
778 return output.getvalue()
778
779
779 @reraise_safe_exceptions
780 @reraise_safe_exceptions
780 def hg_update_cache(self, wire,):
781 def hg_update_cache(self, wire,):
781 repo = self._factory.repo(wire)
782 repo = self._factory.repo(wire)
782 baseui = self._factory._create_config(wire['config'])
783 baseui = self._factory._create_config(wire['config'])
783 baseui, output = patch_ui_message_output(baseui)
784 baseui, output = patch_ui_message_output(baseui)
784
785
785 repo.ui = baseui
786 repo.ui = baseui
786 with repo.wlock(), repo.lock():
787 with repo.wlock(), repo.lock():
787 repo.updatecaches(full=True)
788 repo.updatecaches(full=True)
788
789
789 return output.getvalue()
790 return output.getvalue()
790
791
791 @reraise_safe_exceptions
792 @reraise_safe_exceptions
792 def hg_rebuild_fn_cache(self, wire,):
793 def hg_rebuild_fn_cache(self, wire,):
793 repo = self._factory.repo(wire)
794 repo = self._factory.repo(wire)
794 baseui = self._factory._create_config(wire['config'])
795 baseui = self._factory._create_config(wire['config'])
795 baseui, output = patch_ui_message_output(baseui)
796 baseui, output = patch_ui_message_output(baseui)
796
797
797 repo.ui = baseui
798 repo.ui = baseui
798
799
799 repair.rebuildfncache(baseui, repo)
800 repair.rebuildfncache(baseui, repo)
800
801
801 return output.getvalue()
802 return output.getvalue()
802
803
803 @reraise_safe_exceptions
804 @reraise_safe_exceptions
804 def tags(self, wire):
805 def tags(self, wire):
805 cache_on, context_uid, repo_id = self._cache_on(wire)
806 cache_on, context_uid, repo_id = self._cache_on(wire)
806 region = self._region(wire)
807 region = self._region(wire)
807
808
808 @region.conditional_cache_on_arguments(condition=cache_on)
809 @region.conditional_cache_on_arguments(condition=cache_on)
809 def _tags(_context_uid, _repo_id):
810 def _tags(_context_uid, _repo_id):
810 repo = self._factory.repo(wire)
811 repo = self._factory.repo(wire)
811 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
812 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
812
813
813 return _tags(context_uid, repo_id)
814 return _tags(context_uid, repo_id)
814
815
815 @reraise_safe_exceptions
816 @reraise_safe_exceptions
816 def update(self, wire, node=None, clean=False):
817 def update(self, wire, node=None, clean=False):
817 repo = self._factory.repo(wire)
818 repo = self._factory.repo(wire)
818 baseui = self._factory._create_config(wire['config'])
819 baseui = self._factory._create_config(wire['config'])
819 commands.update(baseui, repo, node=node, clean=clean)
820 commands.update(baseui, repo, node=node, clean=clean)
820
821
821 @reraise_safe_exceptions
822 @reraise_safe_exceptions
822 def identify(self, wire):
823 def identify(self, wire):
823 repo = self._factory.repo(wire)
824 repo = self._factory.repo(wire)
824 baseui = self._factory._create_config(wire['config'])
825 baseui = self._factory._create_config(wire['config'])
825 output = io.BytesIO()
826 output = io.BytesIO()
826 baseui.write = output.write
827 baseui.write = output.write
827 # This is required to get a full node id
828 # This is required to get a full node id
828 baseui.debugflag = True
829 baseui.debugflag = True
829 commands.identify(baseui, repo, id=True)
830 commands.identify(baseui, repo, id=True)
830
831
831 return output.getvalue()
832 return output.getvalue()
832
833
833 @reraise_safe_exceptions
834 @reraise_safe_exceptions
834 def heads(self, wire, branch=None):
835 def heads(self, wire, branch=None):
835 repo = self._factory.repo(wire)
836 repo = self._factory.repo(wire)
836 baseui = self._factory._create_config(wire['config'])
837 baseui = self._factory._create_config(wire['config'])
837 output = io.BytesIO()
838 output = io.BytesIO()
838
839
839 def write(data, **unused_kwargs):
840 def write(data, **unused_kwargs):
840 output.write(data)
841 output.write(data)
841
842
842 baseui.write = write
843 baseui.write = write
843 if branch:
844 if branch:
844 args = [safe_bytes(branch)]
845 args = [safe_bytes(branch)]
845 else:
846 else:
846 args = []
847 args = []
847 commands.heads(baseui, repo, template=b'{node} ', *args)
848 commands.heads(baseui, repo, template=b'{node} ', *args)
848
849
849 return output.getvalue()
850 return output.getvalue()
850
851
851 @reraise_safe_exceptions
852 @reraise_safe_exceptions
852 def ancestor(self, wire, revision1, revision2):
853 def ancestor(self, wire, revision1, revision2):
853 repo = self._factory.repo(wire)
854 repo = self._factory.repo(wire)
854 changelog = repo.changelog
855 changelog = repo.changelog
855 lookup = repo.lookup
856 lookup = repo.lookup
856 a = changelog.ancestor(lookup(revision1), lookup(revision2))
857 a = changelog.ancestor(lookup(revision1), lookup(revision2))
857 return hex(a)
858 return hex(a)
858
859
859 @reraise_safe_exceptions
860 @reraise_safe_exceptions
860 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
861 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
861 baseui = self._factory._create_config(wire["config"], hooks=hooks)
862 baseui = self._factory._create_config(wire["config"], hooks=hooks)
862 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
863 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
863
864
864 @reraise_safe_exceptions
865 @reraise_safe_exceptions
865 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
866 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
866
867
867 repo = self._factory.repo(wire)
868 repo = self._factory.repo(wire)
868 baseui = self._factory._create_config(wire['config'])
869 baseui = self._factory._create_config(wire['config'])
869 publishing = baseui.configbool(b'phases', b'publish')
870 publishing = baseui.configbool(b'phases', b'publish')
870
871
871 def _filectxfn(_repo, ctx, path: bytes):
872 def _filectxfn(_repo, ctx, path: bytes):
872 """
873 """
873 Marks given path as added/changed/removed in a given _repo. This is
874 Marks given path as added/changed/removed in a given _repo. This is
874 for internal mercurial commit function.
875 for internal mercurial commit function.
875 """
876 """
876
877
877 # check if this path is removed
878 # check if this path is removed
878 if safe_str(path) in removed:
879 if safe_str(path) in removed:
879 # returning None is a way to mark node for removal
880 # returning None is a way to mark node for removal
880 return None
881 return None
881
882
882 # check if this path is added
883 # check if this path is added
883 for node in updated:
884 for node in updated:
884 if safe_bytes(node['path']) == path:
885 if safe_bytes(node['path']) == path:
885 return memfilectx(
886 return memfilectx(
886 _repo,
887 _repo,
887 changectx=ctx,
888 changectx=ctx,
888 path=safe_bytes(node['path']),
889 path=safe_bytes(node['path']),
889 data=safe_bytes(node['content']),
890 data=safe_bytes(node['content']),
890 islink=False,
891 islink=False,
891 isexec=bool(node['mode'] & stat.S_IXUSR),
892 isexec=bool(node['mode'] & stat.S_IXUSR),
892 copysource=False)
893 copysource=False)
893 abort_exc = exceptions.AbortException()
894 abort_exc = exceptions.AbortException()
894 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
895 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
895
896
896 if publishing:
897 if publishing:
897 new_commit_phase = b'public'
898 new_commit_phase = b'public'
898 else:
899 else:
899 new_commit_phase = b'draft'
900 new_commit_phase = b'draft'
900 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
901 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
901 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
902 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
902 commit_ctx = memctx(
903 commit_ctx = memctx(
903 repo=repo,
904 repo=repo,
904 parents=parents,
905 parents=parents,
905 text=safe_bytes(message),
906 text=safe_bytes(message),
906 files=[safe_bytes(x) for x in files],
907 files=[safe_bytes(x) for x in files],
907 filectxfn=_filectxfn,
908 filectxfn=_filectxfn,
908 user=safe_bytes(user),
909 user=safe_bytes(user),
909 date=(commit_time, commit_timezone),
910 date=(commit_time, commit_timezone),
910 extra=kwargs)
911 extra=kwargs)
911
912
912 n = repo.commitctx(commit_ctx)
913 n = repo.commitctx(commit_ctx)
913 new_id = hex(n)
914 new_id = hex(n)
914
915
915 return new_id
916 return new_id
916
917
917 @reraise_safe_exceptions
918 @reraise_safe_exceptions
918 def pull(self, wire, url, commit_ids=None):
919 def pull(self, wire, url, commit_ids=None):
919 repo = self._factory.repo(wire)
920 repo = self._factory.repo(wire)
920 # Disable any prompts for this repo
921 # Disable any prompts for this repo
921 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
922 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
922
923
923 remote = peer(repo, {}, safe_bytes(url))
924 remote = peer(repo, {}, safe_bytes(url))
924 # Disable any prompts for this remote
925 # Disable any prompts for this remote
925 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
926 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
926
927
927 if commit_ids:
928 if commit_ids:
928 commit_ids = [bin(commit_id) for commit_id in commit_ids]
929 commit_ids = [bin(commit_id) for commit_id in commit_ids]
929
930
930 return exchange.pull(
931 return exchange.pull(
931 repo, remote, heads=commit_ids, force=None).cgresult
932 repo, remote, heads=commit_ids, force=None).cgresult
932
933
933 @reraise_safe_exceptions
934 @reraise_safe_exceptions
934 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
935 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
935 repo = self._factory.repo(wire)
936 repo = self._factory.repo(wire)
936 baseui = self._factory._create_config(wire['config'], hooks=hooks)
937 baseui = self._factory._create_config(wire['config'], hooks=hooks)
937
938
938 # Mercurial internally has a lot of logic that checks ONLY if
939 # Mercurial internally has a lot of logic that checks ONLY if
939 # option is defined, we just pass those if they are defined then
940 # option is defined, we just pass those if they are defined then
940 opts = {}
941 opts = {}
941 if bookmark:
942 if bookmark:
942 opts['bookmark'] = bookmark
943 opts['bookmark'] = bookmark
943 if branch:
944 if branch:
944 opts['branch'] = branch
945 opts['branch'] = branch
945 if revision:
946 if revision:
946 opts['rev'] = revision
947 opts['rev'] = revision
947
948
948 commands.pull(baseui, repo, source, **opts)
949 commands.pull(baseui, repo, source, **opts)
949
950
950 @reraise_safe_exceptions
951 @reraise_safe_exceptions
951 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
952 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
952 repo = self._factory.repo(wire)
953 repo = self._factory.repo(wire)
953 baseui = self._factory._create_config(wire['config'], hooks=hooks)
954 baseui = self._factory._create_config(wire['config'], hooks=hooks)
954 commands.push(baseui, repo, dest=dest_path, rev=revisions,
955 commands.push(baseui, repo, dest=dest_path, rev=revisions,
955 new_branch=push_branches)
956 new_branch=push_branches)
956
957
957 @reraise_safe_exceptions
958 @reraise_safe_exceptions
958 def strip(self, wire, revision, update, backup):
959 def strip(self, wire, revision, update, backup):
959 repo = self._factory.repo(wire)
960 repo = self._factory.repo(wire)
960 ctx = self._get_ctx(repo, revision)
961 ctx = self._get_ctx(repo, revision)
961 hgext_strip(
962 hgext_strip(
962 repo.baseui, repo, ctx.node(), update=update, backup=backup)
963 repo.baseui, repo, ctx.node(), update=update, backup=backup)
963
964
964 @reraise_safe_exceptions
965 @reraise_safe_exceptions
965 def get_unresolved_files(self, wire):
966 def get_unresolved_files(self, wire):
966 repo = self._factory.repo(wire)
967 repo = self._factory.repo(wire)
967
968
968 log.debug('Calculating unresolved files for repo: %s', repo)
969 log.debug('Calculating unresolved files for repo: %s', repo)
969 output = io.BytesIO()
970 output = io.BytesIO()
970
971
971 def write(data, **unused_kwargs):
972 def write(data, **unused_kwargs):
972 output.write(data)
973 output.write(data)
973
974
974 baseui = self._factory._create_config(wire['config'])
975 baseui = self._factory._create_config(wire['config'])
975 baseui.write = write
976 baseui.write = write
976
977
977 commands.resolve(baseui, repo, list=True)
978 commands.resolve(baseui, repo, list=True)
978 unresolved = output.getvalue().splitlines(0)
979 unresolved = output.getvalue().splitlines(0)
979 return unresolved
980 return unresolved
980
981
981 @reraise_safe_exceptions
982 @reraise_safe_exceptions
982 def merge(self, wire, revision):
983 def merge(self, wire, revision):
983 repo = self._factory.repo(wire)
984 repo = self._factory.repo(wire)
984 baseui = self._factory._create_config(wire['config'])
985 baseui = self._factory._create_config(wire['config'])
985 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
986 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
986
987
987 # In case of sub repositories are used mercurial prompts the user in
988 # In case of sub repositories are used mercurial prompts the user in
988 # case of merge conflicts or different sub repository sources. By
989 # case of merge conflicts or different sub repository sources. By
989 # setting the interactive flag to `False` mercurial doesn't prompt the
990 # setting the interactive flag to `False` mercurial doesn't prompt the
990 # used but instead uses a default value.
991 # used but instead uses a default value.
991 repo.ui.setconfig(b'ui', b'interactive', False)
992 repo.ui.setconfig(b'ui', b'interactive', False)
992 commands.merge(baseui, repo, rev=revision)
993 commands.merge(baseui, repo, rev=revision)
993
994
994 @reraise_safe_exceptions
995 @reraise_safe_exceptions
995 def merge_state(self, wire):
996 def merge_state(self, wire):
996 repo = self._factory.repo(wire)
997 repo = self._factory.repo(wire)
997 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
998 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
998
999
999 # In case of sub repositories are used mercurial prompts the user in
1000 # In case of sub repositories are used mercurial prompts the user in
1000 # case of merge conflicts or different sub repository sources. By
1001 # case of merge conflicts or different sub repository sources. By
1001 # setting the interactive flag to `False` mercurial doesn't prompt the
1002 # setting the interactive flag to `False` mercurial doesn't prompt the
1002 # used but instead uses a default value.
1003 # used but instead uses a default value.
1003 repo.ui.setconfig(b'ui', b'interactive', False)
1004 repo.ui.setconfig(b'ui', b'interactive', False)
1004 ms = hg_merge.mergestate(repo)
1005 ms = hg_merge.mergestate(repo)
1005 return [x for x in ms.unresolved()]
1006 return [x for x in ms.unresolved()]
1006
1007
1007 @reraise_safe_exceptions
1008 @reraise_safe_exceptions
1008 def commit(self, wire, message, username, close_branch=False):
1009 def commit(self, wire, message, username, close_branch=False):
1009 repo = self._factory.repo(wire)
1010 repo = self._factory.repo(wire)
1010 baseui = self._factory._create_config(wire['config'])
1011 baseui = self._factory._create_config(wire['config'])
1011 repo.ui.setconfig(b'ui', b'username', username)
1012 repo.ui.setconfig(b'ui', b'username', username)
1012 commands.commit(baseui, repo, message=message, close_branch=close_branch)
1013 commands.commit(baseui, repo, message=message, close_branch=close_branch)
1013
1014
1014 @reraise_safe_exceptions
1015 @reraise_safe_exceptions
1015 def rebase(self, wire, source=None, dest=None, abort=False):
1016 def rebase(self, wire, source=None, dest=None, abort=False):
1016 repo = self._factory.repo(wire)
1017 repo = self._factory.repo(wire)
1017 baseui = self._factory._create_config(wire['config'])
1018 baseui = self._factory._create_config(wire['config'])
1018 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1019 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1019 # In case of sub repositories are used mercurial prompts the user in
1020 # In case of sub repositories are used mercurial prompts the user in
1020 # case of merge conflicts or different sub repository sources. By
1021 # case of merge conflicts or different sub repository sources. By
1021 # setting the interactive flag to `False` mercurial doesn't prompt the
1022 # setting the interactive flag to `False` mercurial doesn't prompt the
1022 # used but instead uses a default value.
1023 # used but instead uses a default value.
1023 repo.ui.setconfig(b'ui', b'interactive', False)
1024 repo.ui.setconfig(b'ui', b'interactive', False)
1024 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1025 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1025
1026
1026 @reraise_safe_exceptions
1027 @reraise_safe_exceptions
1027 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1028 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1028 repo = self._factory.repo(wire)
1029 repo = self._factory.repo(wire)
1029 ctx = self._get_ctx(repo, revision)
1030 ctx = self._get_ctx(repo, revision)
1030 node = ctx.node()
1031 node = ctx.node()
1031
1032
1032 date = (tag_time, tag_timezone)
1033 date = (tag_time, tag_timezone)
1033 try:
1034 try:
1034 hg_tag.tag(repo, name, node, message, local, user, date)
1035 hg_tag.tag(repo, name, node, message, local, user, date)
1035 except Abort as e:
1036 except Abort as e:
1036 log.exception("Tag operation aborted")
1037 log.exception("Tag operation aborted")
1037 # Exception can contain unicode which we convert
1038 # Exception can contain unicode which we convert
1038 raise exceptions.AbortException(e)(repr(e))
1039 raise exceptions.AbortException(e)(repr(e))
1039
1040
1040 @reraise_safe_exceptions
1041 @reraise_safe_exceptions
1041 def bookmark(self, wire, bookmark, revision=None):
1042 def bookmark(self, wire, bookmark, revision=None):
1042 repo = self._factory.repo(wire)
1043 repo = self._factory.repo(wire)
1043 baseui = self._factory._create_config(wire['config'])
1044 baseui = self._factory._create_config(wire['config'])
1044 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1045 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1045
1046
1046 @reraise_safe_exceptions
1047 @reraise_safe_exceptions
1047 def install_hooks(self, wire, force=False):
1048 def install_hooks(self, wire, force=False):
1048 # we don't need any special hooks for Mercurial
1049 # we don't need any special hooks for Mercurial
1049 pass
1050 pass
1050
1051
1051 @reraise_safe_exceptions
1052 @reraise_safe_exceptions
1052 def get_hooks_info(self, wire):
1053 def get_hooks_info(self, wire):
1053 return {
1054 return {
1054 'pre_version': vcsserver.__version__,
1055 'pre_version': vcsserver.__version__,
1055 'post_version': vcsserver.__version__,
1056 'post_version': vcsserver.__version__,
1056 }
1057 }
1057
1058
1058 @reraise_safe_exceptions
1059 @reraise_safe_exceptions
1059 def set_head_ref(self, wire, head_name):
1060 def set_head_ref(self, wire, head_name):
1060 pass
1061 pass
1061
1062
1062 @reraise_safe_exceptions
1063 @reraise_safe_exceptions
1063 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1064 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1064 archive_dir_name, commit_id):
1065 archive_dir_name, commit_id):
1065
1066
1066 def file_walker(_commit_id, path):
1067 def file_walker(_commit_id, path):
1067 repo = self._factory.repo(wire)
1068 repo = self._factory.repo(wire)
1068 ctx = repo[_commit_id]
1069 ctx = repo[_commit_id]
1069 is_root = path in ['', '/']
1070 is_root = path in ['', '/']
1070 if is_root:
1071 if is_root:
1071 matcher = alwaysmatcher(badfn=None)
1072 matcher = alwaysmatcher(badfn=None)
1072 else:
1073 else:
1073 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1074 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1074 file_iter = ctx.manifest().walk(matcher)
1075 file_iter = ctx.manifest().walk(matcher)
1075
1076
1076 for fn in file_iter:
1077 for fn in file_iter:
1077 file_path = fn
1078 file_path = fn
1078 flags = ctx.flags(fn)
1079 flags = ctx.flags(fn)
1079 mode = b'x' in flags and 0o755 or 0o644
1080 mode = b'x' in flags and 0o755 or 0o644
1080 is_link = b'l' in flags
1081 is_link = b'l' in flags
1081
1082
1082 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1083 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1083
1084
1084 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1085 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1085 archive_dir_name, commit_id)
1086 archive_dir_name, commit_id)
1086
1087
General Comments 0
You need to be logged in to leave comments. Login now