##// END OF EJS Templates
logging: added few usefull log entries
super-admin -
r1106:8665da11 python3
parent child Browse files
Show More
@@ -1,1371 +1,1374 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib.request, urllib.parse, urllib.error
25 import urllib.request, urllib.parse, urllib.error
26 import urllib.request, urllib.error, urllib.parse
26 import urllib.request, urllib.error, urllib.parse
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.client import HttpGitClient, LocalGitClient
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_str, ascii_bytes
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_str, ascii_bytes
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo, BinaryEnvelope
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo, BinaryEnvelope
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = b'^{}'
53 PEELED_REF_MARKER = b'^{}'
54 HEAD_MARKER = b'HEAD'
54 HEAD_MARKER = b'HEAD'
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def reraise_safe_exceptions(func):
59 def reraise_safe_exceptions(func):
60 """Converts Dulwich exceptions to something neutral."""
60 """Converts Dulwich exceptions to something neutral."""
61
61
62 @wraps(func)
62 @wraps(func)
63 def wrapper(*args, **kwargs):
63 def wrapper(*args, **kwargs):
64 try:
64 try:
65 return func(*args, **kwargs)
65 return func(*args, **kwargs)
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 exc = exceptions.LookupException(org_exc=e)
67 exc = exceptions.LookupException(org_exc=e)
68 raise exc(safe_str(e))
68 raise exc(safe_str(e))
69 except (HangupException, UnexpectedCommandError) as e:
69 except (HangupException, UnexpectedCommandError) as e:
70 exc = exceptions.VcsException(org_exc=e)
70 exc = exceptions.VcsException(org_exc=e)
71 raise exc(safe_str(e))
71 raise exc(safe_str(e))
72 except Exception as e:
72 except Exception as e:
73 # NOTE(marcink): because of how dulwich handles some exceptions
73 # NOTE(marcink): because of how dulwich handles some exceptions
74 # (KeyError on empty repos), we cannot track this and catch all
74 # (KeyError on empty repos), we cannot track this and catch all
75 # exceptions, it's an exceptions from other handlers
75 # exceptions, it's an exceptions from other handlers
76 #if not hasattr(e, '_vcs_kind'):
76 #if not hasattr(e, '_vcs_kind'):
77 #log.exception("Unhandled exception in git remote call")
77 #log.exception("Unhandled exception in git remote call")
78 #raise_from_original(exceptions.UnhandledException)
78 #raise_from_original(exceptions.UnhandledException)
79 raise
79 raise
80 return wrapper
80 return wrapper
81
81
82
82
83 class Repo(DulwichRepo):
83 class Repo(DulwichRepo):
84 """
84 """
85 A wrapper for dulwich Repo class.
85 A wrapper for dulwich Repo class.
86
86
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 "Too many open files" error. We need to close all opened file descriptors
88 "Too many open files" error. We need to close all opened file descriptors
89 once the repo object is destroyed.
89 once the repo object is destroyed.
90 """
90 """
91 def __del__(self):
91 def __del__(self):
92 if hasattr(self, 'object_store'):
92 if hasattr(self, 'object_store'):
93 self.close()
93 self.close()
94
94
95
95
96 class Repository(LibGit2Repo):
96 class Repository(LibGit2Repo):
97
97
98 def __enter__(self):
98 def __enter__(self):
99 return self
99 return self
100
100
101 def __exit__(self, exc_type, exc_val, exc_tb):
101 def __exit__(self, exc_type, exc_val, exc_tb):
102 self.free()
102 self.free()
103
103
104
104
105 class GitFactory(RepoFactory):
105 class GitFactory(RepoFactory):
106 repo_type = 'git'
106 repo_type = 'git'
107
107
108 def _create_repo(self, wire, create, use_libgit2=False):
108 def _create_repo(self, wire, create, use_libgit2=False):
109 if use_libgit2:
109 if use_libgit2:
110 return Repository(safe_bytes(wire['path']))
110 return Repository(safe_bytes(wire['path']))
111 else:
111 else:
112 # dulwich mode
112 # dulwich mode
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
114 return Repo(repo_path)
114 repo = Repo(repo_path)
115
116 log.debug('repository created: got GIT object: %s', repo)
117 return repo
115
118
116 def repo(self, wire, create=False, use_libgit2=False):
119 def repo(self, wire, create=False, use_libgit2=False):
117 """
120 """
118 Get a repository instance for the given path.
121 Get a repository instance for the given path.
119 """
122 """
120 return self._create_repo(wire, create, use_libgit2)
123 return self._create_repo(wire, create, use_libgit2)
121
124
122 def repo_libgit2(self, wire):
125 def repo_libgit2(self, wire):
123 return self.repo(wire, use_libgit2=True)
126 return self.repo(wire, use_libgit2=True)
124
127
125
128
126 class GitRemote(RemoteBase):
129 class GitRemote(RemoteBase):
127
130
128 def __init__(self, factory):
131 def __init__(self, factory):
129 self._factory = factory
132 self._factory = factory
130 self._bulk_methods = {
133 self._bulk_methods = {
131 "date": self.date,
134 "date": self.date,
132 "author": self.author,
135 "author": self.author,
133 "branch": self.branch,
136 "branch": self.branch,
134 "message": self.message,
137 "message": self.message,
135 "parents": self.parents,
138 "parents": self.parents,
136 "_commit": self.revision,
139 "_commit": self.revision,
137 }
140 }
138
141
139 def _wire_to_config(self, wire):
142 def _wire_to_config(self, wire):
140 if 'config' in wire:
143 if 'config' in wire:
141 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
144 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
142 return {}
145 return {}
143
146
144 def _remote_conf(self, config):
147 def _remote_conf(self, config):
145 params = [
148 params = [
146 '-c', 'core.askpass=""',
149 '-c', 'core.askpass=""',
147 ]
150 ]
148 ssl_cert_dir = config.get('vcs_ssl_dir')
151 ssl_cert_dir = config.get('vcs_ssl_dir')
149 if ssl_cert_dir:
152 if ssl_cert_dir:
150 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
153 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
151 return params
154 return params
152
155
153 @reraise_safe_exceptions
156 @reraise_safe_exceptions
154 def discover_git_version(self):
157 def discover_git_version(self):
155 stdout, _ = self.run_git_command(
158 stdout, _ = self.run_git_command(
156 {}, ['--version'], _bare=True, _safe=True)
159 {}, ['--version'], _bare=True, _safe=True)
157 prefix = b'git version'
160 prefix = b'git version'
158 if stdout.startswith(prefix):
161 if stdout.startswith(prefix):
159 stdout = stdout[len(prefix):]
162 stdout = stdout[len(prefix):]
160 return safe_str(stdout.strip())
163 return safe_str(stdout.strip())
161
164
162 @reraise_safe_exceptions
165 @reraise_safe_exceptions
163 def is_empty(self, wire):
166 def is_empty(self, wire):
164 repo_init = self._factory.repo_libgit2(wire)
167 repo_init = self._factory.repo_libgit2(wire)
165 with repo_init as repo:
168 with repo_init as repo:
166
169
167 try:
170 try:
168 has_head = repo.head.name
171 has_head = repo.head.name
169 if has_head:
172 if has_head:
170 return False
173 return False
171
174
172 # NOTE(marcink): check again using more expensive method
175 # NOTE(marcink): check again using more expensive method
173 return repo.is_empty
176 return repo.is_empty
174 except Exception:
177 except Exception:
175 pass
178 pass
176
179
177 return True
180 return True
178
181
179 @reraise_safe_exceptions
182 @reraise_safe_exceptions
180 def assert_correct_path(self, wire):
183 def assert_correct_path(self, wire):
181 cache_on, context_uid, repo_id = self._cache_on(wire)
184 cache_on, context_uid, repo_id = self._cache_on(wire)
182 region = self._region(wire)
185 region = self._region(wire)
183
186
184 @region.conditional_cache_on_arguments(condition=cache_on)
187 @region.conditional_cache_on_arguments(condition=cache_on)
185 def _assert_correct_path(_context_uid, _repo_id):
188 def _assert_correct_path(_context_uid, _repo_id):
186 try:
189 try:
187 repo_init = self._factory.repo_libgit2(wire)
190 repo_init = self._factory.repo_libgit2(wire)
188 with repo_init as repo:
191 with repo_init as repo:
189 pass
192 pass
190 except pygit2.GitError:
193 except pygit2.GitError:
191 path = wire.get('path')
194 path = wire.get('path')
192 tb = traceback.format_exc()
195 tb = traceback.format_exc()
193 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
194 return False
197 return False
195
198
196 return True
199 return True
197 return _assert_correct_path(context_uid, repo_id)
200 return _assert_correct_path(context_uid, repo_id)
198
201
199 @reraise_safe_exceptions
202 @reraise_safe_exceptions
200 def bare(self, wire):
203 def bare(self, wire):
201 repo_init = self._factory.repo_libgit2(wire)
204 repo_init = self._factory.repo_libgit2(wire)
202 with repo_init as repo:
205 with repo_init as repo:
203 return repo.is_bare
206 return repo.is_bare
204
207
205 @reraise_safe_exceptions
208 @reraise_safe_exceptions
206 def blob_as_pretty_string(self, wire, sha):
209 def blob_as_pretty_string(self, wire, sha):
207 repo_init = self._factory.repo_libgit2(wire)
210 repo_init = self._factory.repo_libgit2(wire)
208 with repo_init as repo:
211 with repo_init as repo:
209 blob_obj = repo[sha]
212 blob_obj = repo[sha]
210 return BinaryEnvelope(blob_obj.data)
213 return BinaryEnvelope(blob_obj.data)
211
214
212 @reraise_safe_exceptions
215 @reraise_safe_exceptions
213 def blob_raw_length(self, wire, sha):
216 def blob_raw_length(self, wire, sha):
214 cache_on, context_uid, repo_id = self._cache_on(wire)
217 cache_on, context_uid, repo_id = self._cache_on(wire)
215 region = self._region(wire)
218 region = self._region(wire)
216
219
217 @region.conditional_cache_on_arguments(condition=cache_on)
220 @region.conditional_cache_on_arguments(condition=cache_on)
218 def _blob_raw_length(_repo_id, _sha):
221 def _blob_raw_length(_repo_id, _sha):
219
222
220 repo_init = self._factory.repo_libgit2(wire)
223 repo_init = self._factory.repo_libgit2(wire)
221 with repo_init as repo:
224 with repo_init as repo:
222 blob = repo[sha]
225 blob = repo[sha]
223 return blob.size
226 return blob.size
224
227
225 return _blob_raw_length(repo_id, sha)
228 return _blob_raw_length(repo_id, sha)
226
229
227 def _parse_lfs_pointer(self, raw_content):
230 def _parse_lfs_pointer(self, raw_content):
228 spec_string = b'version https://git-lfs.github.com/spec'
231 spec_string = b'version https://git-lfs.github.com/spec'
229 if raw_content and raw_content.startswith(spec_string):
232 if raw_content and raw_content.startswith(spec_string):
230
233
231 pattern = re.compile(rb"""
234 pattern = re.compile(rb"""
232 (?:\n)?
235 (?:\n)?
233 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
234 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
235 ^size[ ](?P<oid_size>[0-9]+)\n
238 ^size[ ](?P<oid_size>[0-9]+)\n
236 (?:\n)?
239 (?:\n)?
237 """, re.VERBOSE | re.MULTILINE)
240 """, re.VERBOSE | re.MULTILINE)
238 match = pattern.match(raw_content)
241 match = pattern.match(raw_content)
239 if match:
242 if match:
240 return match.groupdict()
243 return match.groupdict()
241
244
242 return {}
245 return {}
243
246
244 @reraise_safe_exceptions
247 @reraise_safe_exceptions
245 def is_large_file(self, wire, commit_id):
248 def is_large_file(self, wire, commit_id):
246 cache_on, context_uid, repo_id = self._cache_on(wire)
249 cache_on, context_uid, repo_id = self._cache_on(wire)
247 region = self._region(wire)
250 region = self._region(wire)
248
251
249 @region.conditional_cache_on_arguments(condition=cache_on)
252 @region.conditional_cache_on_arguments(condition=cache_on)
250 def _is_large_file(_repo_id, _sha):
253 def _is_large_file(_repo_id, _sha):
251 repo_init = self._factory.repo_libgit2(wire)
254 repo_init = self._factory.repo_libgit2(wire)
252 with repo_init as repo:
255 with repo_init as repo:
253 blob = repo[commit_id]
256 blob = repo[commit_id]
254 if blob.is_binary:
257 if blob.is_binary:
255 return {}
258 return {}
256
259
257 return self._parse_lfs_pointer(blob.data)
260 return self._parse_lfs_pointer(blob.data)
258
261
259 return _is_large_file(repo_id, commit_id)
262 return _is_large_file(repo_id, commit_id)
260
263
261 @reraise_safe_exceptions
264 @reraise_safe_exceptions
262 def is_binary(self, wire, tree_id):
265 def is_binary(self, wire, tree_id):
263 cache_on, context_uid, repo_id = self._cache_on(wire)
266 cache_on, context_uid, repo_id = self._cache_on(wire)
264 region = self._region(wire)
267 region = self._region(wire)
265
268
266 @region.conditional_cache_on_arguments(condition=cache_on)
269 @region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
270 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
271 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
272 with repo_init as repo:
270 blob_obj = repo[tree_id]
273 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
274 return blob_obj.is_binary
272
275
273 return _is_binary(repo_id, tree_id)
276 return _is_binary(repo_id, tree_id)
274
277
275 @reraise_safe_exceptions
278 @reraise_safe_exceptions
276 def md5_hash(self, wire, tree_id):
279 def md5_hash(self, wire, tree_id):
277 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
278 region = self._region(wire)
281 region = self._region(wire)
279
282
280 @region.conditional_cache_on_arguments(condition=cache_on)
283 @region.conditional_cache_on_arguments(condition=cache_on)
281 def _md5_hash(_repo_id, _tree_id):
284 def _md5_hash(_repo_id, _tree_id):
282 return ''
285 return ''
283
286
284 return _md5_hash(repo_id, tree_id)
287 return _md5_hash(repo_id, tree_id)
285
288
286 @reraise_safe_exceptions
289 @reraise_safe_exceptions
287 def in_largefiles_store(self, wire, oid):
290 def in_largefiles_store(self, wire, oid):
288 conf = self._wire_to_config(wire)
291 conf = self._wire_to_config(wire)
289 repo_init = self._factory.repo_libgit2(wire)
292 repo_init = self._factory.repo_libgit2(wire)
290 with repo_init as repo:
293 with repo_init as repo:
291 repo_name = repo.path
294 repo_name = repo.path
292
295
293 store_location = conf.get('vcs_git_lfs_store_location')
296 store_location = conf.get('vcs_git_lfs_store_location')
294 if store_location:
297 if store_location:
295
298
296 store = LFSOidStore(
299 store = LFSOidStore(
297 oid=oid, repo=repo_name, store_location=store_location)
300 oid=oid, repo=repo_name, store_location=store_location)
298 return store.has_oid()
301 return store.has_oid()
299
302
300 return False
303 return False
301
304
302 @reraise_safe_exceptions
305 @reraise_safe_exceptions
303 def store_path(self, wire, oid):
306 def store_path(self, wire, oid):
304 conf = self._wire_to_config(wire)
307 conf = self._wire_to_config(wire)
305 repo_init = self._factory.repo_libgit2(wire)
308 repo_init = self._factory.repo_libgit2(wire)
306 with repo_init as repo:
309 with repo_init as repo:
307 repo_name = repo.path
310 repo_name = repo.path
308
311
309 store_location = conf.get('vcs_git_lfs_store_location')
312 store_location = conf.get('vcs_git_lfs_store_location')
310 if store_location:
313 if store_location:
311 store = LFSOidStore(
314 store = LFSOidStore(
312 oid=oid, repo=repo_name, store_location=store_location)
315 oid=oid, repo=repo_name, store_location=store_location)
313 return store.oid_path
316 return store.oid_path
314 raise ValueError('Unable to fetch oid with path {}'.format(oid))
317 raise ValueError('Unable to fetch oid with path {}'.format(oid))
315
318
316 @reraise_safe_exceptions
319 @reraise_safe_exceptions
317 def bulk_request(self, wire, rev, pre_load):
320 def bulk_request(self, wire, rev, pre_load):
318 cache_on, context_uid, repo_id = self._cache_on(wire)
321 cache_on, context_uid, repo_id = self._cache_on(wire)
319 region = self._region(wire)
322 region = self._region(wire)
320
323
321 @region.conditional_cache_on_arguments(condition=cache_on)
324 @region.conditional_cache_on_arguments(condition=cache_on)
322 def _bulk_request(_repo_id, _rev, _pre_load):
325 def _bulk_request(_repo_id, _rev, _pre_load):
323 result = {}
326 result = {}
324 for attr in pre_load:
327 for attr in pre_load:
325 try:
328 try:
326 method = self._bulk_methods[attr]
329 method = self._bulk_methods[attr]
327 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
330 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
328 args = [wire, rev]
331 args = [wire, rev]
329 result[attr] = method(*args)
332 result[attr] = method(*args)
330 except KeyError as e:
333 except KeyError as e:
331 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
334 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
332 return result
335 return result
333
336
334 return _bulk_request(repo_id, rev, sorted(pre_load))
337 return _bulk_request(repo_id, rev, sorted(pre_load))
335
338
336 def _build_opener(self, url):
339 def _build_opener(self, url):
337 handlers = []
340 handlers = []
338 url_obj = url_parser(url)
341 url_obj = url_parser(url)
339 _, authinfo = url_obj.authinfo()
342 _, authinfo = url_obj.authinfo()
340
343
341 if authinfo:
344 if authinfo:
342 # create a password manager
345 # create a password manager
343 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
346 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
344 passmgr.add_password(*authinfo)
347 passmgr.add_password(*authinfo)
345
348
346 handlers.extend((httpbasicauthhandler(passmgr),
349 handlers.extend((httpbasicauthhandler(passmgr),
347 httpdigestauthhandler(passmgr)))
350 httpdigestauthhandler(passmgr)))
348
351
349 return urllib.request.build_opener(*handlers)
352 return urllib.request.build_opener(*handlers)
350
353
351 def _type_id_to_name(self, type_id: int):
354 def _type_id_to_name(self, type_id: int):
352 return {
355 return {
353 1: 'commit',
356 1: 'commit',
354 2: 'tree',
357 2: 'tree',
355 3: 'blob',
358 3: 'blob',
356 4: 'tag'
359 4: 'tag'
357 }[type_id]
360 }[type_id]
358
361
359 @reraise_safe_exceptions
362 @reraise_safe_exceptions
360 def check_url(self, url, config):
363 def check_url(self, url, config):
361 url_obj = url_parser(safe_bytes(url))
364 url_obj = url_parser(safe_bytes(url))
362 test_uri, _ = url_obj.authinfo()
365 test_uri, _ = url_obj.authinfo()
363 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
366 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
364 url_obj.query = obfuscate_qs(url_obj.query)
367 url_obj.query = obfuscate_qs(url_obj.query)
365 cleaned_uri = str(url_obj)
368 cleaned_uri = str(url_obj)
366 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
369 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
367
370
368 if not test_uri.endswith('info/refs'):
371 if not test_uri.endswith('info/refs'):
369 test_uri = test_uri.rstrip('/') + '/info/refs'
372 test_uri = test_uri.rstrip('/') + '/info/refs'
370
373
371 o = self._build_opener(url)
374 o = self._build_opener(url)
372 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
375 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
373
376
374 q = {"service": 'git-upload-pack'}
377 q = {"service": 'git-upload-pack'}
375 qs = '?%s' % urllib.parse.urlencode(q)
378 qs = '?%s' % urllib.parse.urlencode(q)
376 cu = "%s%s" % (test_uri, qs)
379 cu = "%s%s" % (test_uri, qs)
377 req = urllib.request.Request(cu, None, {})
380 req = urllib.request.Request(cu, None, {})
378
381
379 try:
382 try:
380 log.debug("Trying to open URL %s", cleaned_uri)
383 log.debug("Trying to open URL %s", cleaned_uri)
381 resp = o.open(req)
384 resp = o.open(req)
382 if resp.code != 200:
385 if resp.code != 200:
383 raise exceptions.URLError()('Return Code is not 200')
386 raise exceptions.URLError()('Return Code is not 200')
384 except Exception as e:
387 except Exception as e:
385 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
388 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
386 # means it cannot be cloned
389 # means it cannot be cloned
387 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
390 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
388
391
389 # now detect if it's proper git repo
392 # now detect if it's proper git repo
390 gitdata = resp.read()
393 gitdata = resp.read()
391 if 'service=git-upload-pack' in gitdata:
394 if 'service=git-upload-pack' in gitdata:
392 pass
395 pass
393 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
396 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
394 # old style git can return some other format !
397 # old style git can return some other format !
395 pass
398 pass
396 else:
399 else:
397 raise exceptions.URLError()(
400 raise exceptions.URLError()(
398 "url [%s] does not look like an git" % (cleaned_uri,))
401 "url [%s] does not look like an git" % (cleaned_uri,))
399
402
400 return True
403 return True
401
404
402 @reraise_safe_exceptions
405 @reraise_safe_exceptions
403 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
406 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
404 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
407 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
405 remote_refs = self.pull(wire, url, apply_refs=False)
408 remote_refs = self.pull(wire, url, apply_refs=False)
406 repo = self._factory.repo(wire)
409 repo = self._factory.repo(wire)
407 if isinstance(valid_refs, list):
410 if isinstance(valid_refs, list):
408 valid_refs = tuple(valid_refs)
411 valid_refs = tuple(valid_refs)
409
412
410 for k in remote_refs:
413 for k in remote_refs:
411 # only parse heads/tags and skip so called deferred tags
414 # only parse heads/tags and skip so called deferred tags
412 if k.startswith(valid_refs) and not k.endswith(deferred):
415 if k.startswith(valid_refs) and not k.endswith(deferred):
413 repo[k] = remote_refs[k]
416 repo[k] = remote_refs[k]
414
417
415 if update_after_clone:
418 if update_after_clone:
416 # we want to checkout HEAD
419 # we want to checkout HEAD
417 repo["HEAD"] = remote_refs["HEAD"]
420 repo["HEAD"] = remote_refs["HEAD"]
418 index.build_index_from_tree(repo.path, repo.index_path(),
421 index.build_index_from_tree(repo.path, repo.index_path(),
419 repo.object_store, repo["HEAD"].tree)
422 repo.object_store, repo["HEAD"].tree)
420
423
421 @reraise_safe_exceptions
424 @reraise_safe_exceptions
422 def branch(self, wire, commit_id):
425 def branch(self, wire, commit_id):
423 cache_on, context_uid, repo_id = self._cache_on(wire)
426 cache_on, context_uid, repo_id = self._cache_on(wire)
424 region = self._region(wire)
427 region = self._region(wire)
425
428
426 @region.conditional_cache_on_arguments(condition=cache_on)
429 @region.conditional_cache_on_arguments(condition=cache_on)
427 def _branch(_context_uid, _repo_id, _commit_id):
430 def _branch(_context_uid, _repo_id, _commit_id):
428 regex = re.compile('^refs/heads')
431 regex = re.compile('^refs/heads')
429
432
430 def filter_with(ref):
433 def filter_with(ref):
431 return regex.match(ref[0]) and ref[1] == _commit_id
434 return regex.match(ref[0]) and ref[1] == _commit_id
432
435
433 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
436 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
434 return [x[0].split('refs/heads/')[-1] for x in branches]
437 return [x[0].split('refs/heads/')[-1] for x in branches]
435
438
436 return _branch(context_uid, repo_id, commit_id)
439 return _branch(context_uid, repo_id, commit_id)
437
440
438 @reraise_safe_exceptions
441 @reraise_safe_exceptions
439 def commit_branches(self, wire, commit_id):
442 def commit_branches(self, wire, commit_id):
440 cache_on, context_uid, repo_id = self._cache_on(wire)
443 cache_on, context_uid, repo_id = self._cache_on(wire)
441 region = self._region(wire)
444 region = self._region(wire)
442
445
443 @region.conditional_cache_on_arguments(condition=cache_on)
446 @region.conditional_cache_on_arguments(condition=cache_on)
444 def _commit_branches(_context_uid, _repo_id, _commit_id):
447 def _commit_branches(_context_uid, _repo_id, _commit_id):
445 repo_init = self._factory.repo_libgit2(wire)
448 repo_init = self._factory.repo_libgit2(wire)
446 with repo_init as repo:
449 with repo_init as repo:
447 branches = [x for x in repo.branches.with_commit(_commit_id)]
450 branches = [x for x in repo.branches.with_commit(_commit_id)]
448 return branches
451 return branches
449
452
450 return _commit_branches(context_uid, repo_id, commit_id)
453 return _commit_branches(context_uid, repo_id, commit_id)
451
454
452 @reraise_safe_exceptions
455 @reraise_safe_exceptions
453 def add_object(self, wire, content):
456 def add_object(self, wire, content):
454 repo_init = self._factory.repo_libgit2(wire)
457 repo_init = self._factory.repo_libgit2(wire)
455 with repo_init as repo:
458 with repo_init as repo:
456 blob = objects.Blob()
459 blob = objects.Blob()
457 blob.set_raw_string(content)
460 blob.set_raw_string(content)
458 repo.object_store.add_object(blob)
461 repo.object_store.add_object(blob)
459 return blob.id
462 return blob.id
460
463
461 # TODO: this is quite complex, check if that can be simplified
464 # TODO: this is quite complex, check if that can be simplified
462 @reraise_safe_exceptions
465 @reraise_safe_exceptions
463 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
466 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
464 # Defines the root tree
467 # Defines the root tree
465 class _Root(object):
468 class _Root(object):
466 def __repr__(self):
469 def __repr__(self):
467 return 'ROOT TREE'
470 return 'ROOT TREE'
468 ROOT = _Root()
471 ROOT = _Root()
469
472
470 repo = self._factory.repo(wire)
473 repo = self._factory.repo(wire)
471 object_store = repo.object_store
474 object_store = repo.object_store
472
475
473 # Create tree and populates it with blobs
476 # Create tree and populates it with blobs
474 if commit_tree:
477 if commit_tree:
475 commit_tree = safe_bytes(commit_tree)
478 commit_tree = safe_bytes(commit_tree)
476
479
477 if commit_tree and repo[commit_tree]:
480 if commit_tree and repo[commit_tree]:
478 git_commit = repo[safe_bytes(commit_data['parents'][0])]
481 git_commit = repo[safe_bytes(commit_data['parents'][0])]
479 commit_tree = repo[git_commit.tree] # root tree
482 commit_tree = repo[git_commit.tree] # root tree
480 else:
483 else:
481 commit_tree = objects.Tree()
484 commit_tree = objects.Tree()
482
485
483 for node in updated:
486 for node in updated:
484 # Compute subdirs if needed
487 # Compute subdirs if needed
485 dirpath, nodename = vcspath.split(node['path'])
488 dirpath, nodename = vcspath.split(node['path'])
486 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
489 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
487 parent = commit_tree
490 parent = commit_tree
488 ancestors = [('', parent)]
491 ancestors = [('', parent)]
489
492
490 # Tries to dig for the deepest existing tree
493 # Tries to dig for the deepest existing tree
491 while dirnames:
494 while dirnames:
492 curdir = dirnames.pop(0)
495 curdir = dirnames.pop(0)
493 try:
496 try:
494 dir_id = parent[curdir][1]
497 dir_id = parent[curdir][1]
495 except KeyError:
498 except KeyError:
496 # put curdir back into dirnames and stops
499 # put curdir back into dirnames and stops
497 dirnames.insert(0, curdir)
500 dirnames.insert(0, curdir)
498 break
501 break
499 else:
502 else:
500 # If found, updates parent
503 # If found, updates parent
501 parent = repo[dir_id]
504 parent = repo[dir_id]
502 ancestors.append((curdir, parent))
505 ancestors.append((curdir, parent))
503 # Now parent is deepest existing tree and we need to create
506 # Now parent is deepest existing tree and we need to create
504 # subtrees for dirnames (in reverse order)
507 # subtrees for dirnames (in reverse order)
505 # [this only applies for nodes from added]
508 # [this only applies for nodes from added]
506 new_trees = []
509 new_trees = []
507
510
508 blob = objects.Blob.from_string(node['content'])
511 blob = objects.Blob.from_string(node['content'])
509
512
510 node_path = safe_bytes(node['node_path'])
513 node_path = safe_bytes(node['node_path'])
511
514
512 if dirnames:
515 if dirnames:
513 # If there are trees which should be created we need to build
516 # If there are trees which should be created we need to build
514 # them now (in reverse order)
517 # them now (in reverse order)
515 reversed_dirnames = list(reversed(dirnames))
518 reversed_dirnames = list(reversed(dirnames))
516 curtree = objects.Tree()
519 curtree = objects.Tree()
517 curtree[node_path] = node['mode'], blob.id
520 curtree[node_path] = node['mode'], blob.id
518 new_trees.append(curtree)
521 new_trees.append(curtree)
519 for dirname in reversed_dirnames[:-1]:
522 for dirname in reversed_dirnames[:-1]:
520 newtree = objects.Tree()
523 newtree = objects.Tree()
521 newtree[dirname] = (DIR_STAT, curtree.id)
524 newtree[dirname] = (DIR_STAT, curtree.id)
522 new_trees.append(newtree)
525 new_trees.append(newtree)
523 curtree = newtree
526 curtree = newtree
524 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
527 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
525 else:
528 else:
526 parent.add(name=node_path, mode=node['mode'], hexsha=blob.id)
529 parent.add(name=node_path, mode=node['mode'], hexsha=blob.id)
527
530
528 new_trees.append(parent)
531 new_trees.append(parent)
529 # Update ancestors
532 # Update ancestors
530 reversed_ancestors = reversed(
533 reversed_ancestors = reversed(
531 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
534 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
532 for parent, tree, path in reversed_ancestors:
535 for parent, tree, path in reversed_ancestors:
533 parent[path] = (DIR_STAT, tree.id)
536 parent[path] = (DIR_STAT, tree.id)
534 object_store.add_object(tree)
537 object_store.add_object(tree)
535
538
536 object_store.add_object(blob)
539 object_store.add_object(blob)
537 for tree in new_trees:
540 for tree in new_trees:
538 object_store.add_object(tree)
541 object_store.add_object(tree)
539
542
540 for node_path in removed:
543 for node_path in removed:
541 paths = node_path.split('/')
544 paths = node_path.split('/')
542 tree = commit_tree # start with top-level
545 tree = commit_tree # start with top-level
543 trees = [{'tree': tree, 'path': ROOT}]
546 trees = [{'tree': tree, 'path': ROOT}]
544 # Traverse deep into the forest...
547 # Traverse deep into the forest...
545 # resolve final tree by iterating the path.
548 # resolve final tree by iterating the path.
546 # e.g a/b/c.txt will get
549 # e.g a/b/c.txt will get
547 # - root as tree then
550 # - root as tree then
548 # - 'a' as tree,
551 # - 'a' as tree,
549 # - 'b' as tree,
552 # - 'b' as tree,
550 # - stop at c as blob.
553 # - stop at c as blob.
551 for path in paths:
554 for path in paths:
552 try:
555 try:
553 obj = repo[tree[path][1]]
556 obj = repo[tree[path][1]]
554 if isinstance(obj, objects.Tree):
557 if isinstance(obj, objects.Tree):
555 trees.append({'tree': obj, 'path': path})
558 trees.append({'tree': obj, 'path': path})
556 tree = obj
559 tree = obj
557 except KeyError:
560 except KeyError:
558 break
561 break
559 #PROBLEM:
562 #PROBLEM:
560 """
563 """
561 We're not editing same reference tree object
564 We're not editing same reference tree object
562 """
565 """
563 # Cut down the blob and all rotten trees on the way back...
566 # Cut down the blob and all rotten trees on the way back...
564 for path, tree_data in reversed(list(zip(paths, trees))):
567 for path, tree_data in reversed(list(zip(paths, trees))):
565 tree = tree_data['tree']
568 tree = tree_data['tree']
566 tree.__delitem__(path)
569 tree.__delitem__(path)
567 # This operation edits the tree, we need to mark new commit back
570 # This operation edits the tree, we need to mark new commit back
568
571
569 if len(tree) > 0:
572 if len(tree) > 0:
570 # This tree still has elements - don't remove it or any
573 # This tree still has elements - don't remove it or any
571 # of it's parents
574 # of it's parents
572 break
575 break
573
576
574 object_store.add_object(commit_tree)
577 object_store.add_object(commit_tree)
575
578
576 # Create commit
579 # Create commit
577 commit = objects.Commit()
580 commit = objects.Commit()
578 commit.tree = commit_tree.id
581 commit.tree = commit_tree.id
579 bytes_keys = [
582 bytes_keys = [
580 'author',
583 'author',
581 'committer',
584 'committer',
582 'message',
585 'message',
583 'encoding',
586 'encoding',
584 'parents'
587 'parents'
585 ]
588 ]
586
589
587 for k, v in commit_data.items():
590 for k, v in commit_data.items():
588 if k in bytes_keys:
591 if k in bytes_keys:
589 if k == 'parents':
592 if k == 'parents':
590 v = [safe_bytes(x) for x in v]
593 v = [safe_bytes(x) for x in v]
591 else:
594 else:
592 v = safe_bytes(v)
595 v = safe_bytes(v)
593 setattr(commit, k, v)
596 setattr(commit, k, v)
594
597
595 object_store.add_object(commit)
598 object_store.add_object(commit)
596
599
597 self.create_branch(wire, branch, safe_str(commit.id))
600 self.create_branch(wire, branch, safe_str(commit.id))
598
601
599 # dulwich set-ref
602 # dulwich set-ref
600 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
603 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
601
604
602 return commit.id
605 return commit.id
603
606
604 @reraise_safe_exceptions
607 @reraise_safe_exceptions
605 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
608 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
606 if url != 'default' and '://' not in url:
609 if url != 'default' and '://' not in url:
607 client = LocalGitClient(url)
610 client = LocalGitClient(url)
608 else:
611 else:
609 url_obj = url_parser(url)
612 url_obj = url_parser(url)
610 o = self._build_opener(url)
613 o = self._build_opener(url)
611 url, _ = url_obj.authinfo()
614 url, _ = url_obj.authinfo()
612 client = HttpGitClient(base_url=url, opener=o)
615 client = HttpGitClient(base_url=url, opener=o)
613 repo = self._factory.repo(wire)
616 repo = self._factory.repo(wire)
614
617
615 determine_wants = repo.object_store.determine_wants_all
618 determine_wants = repo.object_store.determine_wants_all
616 if refs:
619 if refs:
617 refs = [ascii_bytes(x) for x in refs]
620 refs = [ascii_bytes(x) for x in refs]
618
621
619 def determine_wants_requested(remote_refs):
622 def determine_wants_requested(remote_refs):
620 determined = []
623 determined = []
621 for ref_name, ref_hash in remote_refs.items():
624 for ref_name, ref_hash in remote_refs.items():
622 bytes_ref_name = safe_bytes(ref_name)
625 bytes_ref_name = safe_bytes(ref_name)
623
626
624 if bytes_ref_name in refs:
627 if bytes_ref_name in refs:
625 bytes_ref_hash = safe_bytes(ref_hash)
628 bytes_ref_hash = safe_bytes(ref_hash)
626 determined.append(bytes_ref_hash)
629 determined.append(bytes_ref_hash)
627 return determined
630 return determined
628
631
629 # swap with our custom requested wants
632 # swap with our custom requested wants
630 determine_wants = determine_wants_requested
633 determine_wants = determine_wants_requested
631
634
632 try:
635 try:
633 remote_refs = client.fetch(
636 remote_refs = client.fetch(
634 path=url, target=repo, determine_wants=determine_wants)
637 path=url, target=repo, determine_wants=determine_wants)
635
638
636 except NotGitRepository as e:
639 except NotGitRepository as e:
637 log.warning(
640 log.warning(
638 'Trying to fetch from "%s" failed, not a Git repository.', url)
641 'Trying to fetch from "%s" failed, not a Git repository.', url)
639 # Exception can contain unicode which we convert
642 # Exception can contain unicode which we convert
640 raise exceptions.AbortException(e)(repr(e))
643 raise exceptions.AbortException(e)(repr(e))
641
644
642 # mikhail: client.fetch() returns all the remote refs, but fetches only
645 # mikhail: client.fetch() returns all the remote refs, but fetches only
643 # refs filtered by `determine_wants` function. We need to filter result
646 # refs filtered by `determine_wants` function. We need to filter result
644 # as well
647 # as well
645 if refs:
648 if refs:
646 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
649 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
647
650
648 if apply_refs:
651 if apply_refs:
649 # TODO: johbo: Needs proper test coverage with a git repository
652 # TODO: johbo: Needs proper test coverage with a git repository
650 # that contains a tag object, so that we would end up with
653 # that contains a tag object, so that we would end up with
651 # a peeled ref at this point.
654 # a peeled ref at this point.
652 for k in remote_refs:
655 for k in remote_refs:
653 if k.endswith(PEELED_REF_MARKER):
656 if k.endswith(PEELED_REF_MARKER):
654 log.debug("Skipping peeled reference %s", k)
657 log.debug("Skipping peeled reference %s", k)
655 continue
658 continue
656 repo[k] = remote_refs[k]
659 repo[k] = remote_refs[k]
657
660
658 if refs and not update_after:
661 if refs and not update_after:
659 # mikhail: explicitly set the head to the last ref.
662 # mikhail: explicitly set the head to the last ref.
660 repo[HEAD_MARKER] = remote_refs[refs[-1]]
663 repo[HEAD_MARKER] = remote_refs[refs[-1]]
661
664
662 if update_after:
665 if update_after:
663 # we want to check out HEAD
666 # we want to check out HEAD
664 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
667 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
665 index.build_index_from_tree(repo.path, repo.index_path(),
668 index.build_index_from_tree(repo.path, repo.index_path(),
666 repo.object_store, repo[HEAD_MARKER].tree)
669 repo.object_store, repo[HEAD_MARKER].tree)
667 return remote_refs
670 return remote_refs
668
671
669 @reraise_safe_exceptions
672 @reraise_safe_exceptions
670 def sync_fetch(self, wire, url, refs=None, all_refs=False):
673 def sync_fetch(self, wire, url, refs=None, all_refs=False):
671 repo = self._factory.repo(wire)
674 repo = self._factory.repo(wire)
672 if refs and not isinstance(refs, (list, tuple)):
675 if refs and not isinstance(refs, (list, tuple)):
673 refs = [refs]
676 refs = [refs]
674
677
675 config = self._wire_to_config(wire)
678 config = self._wire_to_config(wire)
676 # get all remote refs we'll use to fetch later
679 # get all remote refs we'll use to fetch later
677 cmd = ['ls-remote']
680 cmd = ['ls-remote']
678 if not all_refs:
681 if not all_refs:
679 cmd += ['--heads', '--tags']
682 cmd += ['--heads', '--tags']
680 cmd += [url]
683 cmd += [url]
681 output, __ = self.run_git_command(
684 output, __ = self.run_git_command(
682 wire, cmd, fail_on_stderr=False,
685 wire, cmd, fail_on_stderr=False,
683 _copts=self._remote_conf(config),
686 _copts=self._remote_conf(config),
684 extra_env={'GIT_TERMINAL_PROMPT': '0'})
687 extra_env={'GIT_TERMINAL_PROMPT': '0'})
685
688
686 remote_refs = collections.OrderedDict()
689 remote_refs = collections.OrderedDict()
687 fetch_refs = []
690 fetch_refs = []
688
691
689 for ref_line in output.splitlines():
692 for ref_line in output.splitlines():
690 sha, ref = ref_line.split(b'\t')
693 sha, ref = ref_line.split(b'\t')
691 sha = sha.strip()
694 sha = sha.strip()
692 if ref in remote_refs:
695 if ref in remote_refs:
693 # duplicate, skip
696 # duplicate, skip
694 continue
697 continue
695 if ref.endswith(PEELED_REF_MARKER):
698 if ref.endswith(PEELED_REF_MARKER):
696 log.debug("Skipping peeled reference %s", ref)
699 log.debug("Skipping peeled reference %s", ref)
697 continue
700 continue
698 # don't sync HEAD
701 # don't sync HEAD
699 if ref in [HEAD_MARKER]:
702 if ref in [HEAD_MARKER]:
700 continue
703 continue
701
704
702 remote_refs[ref] = sha
705 remote_refs[ref] = sha
703
706
704 if refs and sha in refs:
707 if refs and sha in refs:
705 # we filter fetch using our specified refs
708 # we filter fetch using our specified refs
706 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
709 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
707 elif not refs:
710 elif not refs:
708 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
711 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
709 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
712 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
710
713
711 if fetch_refs:
714 if fetch_refs:
712 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
715 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
713 fetch_refs_chunks = list(chunk)
716 fetch_refs_chunks = list(chunk)
714 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
717 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
715 self.run_git_command(
718 self.run_git_command(
716 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
719 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
717 fail_on_stderr=False,
720 fail_on_stderr=False,
718 _copts=self._remote_conf(config),
721 _copts=self._remote_conf(config),
719 extra_env={'GIT_TERMINAL_PROMPT': '0'})
722 extra_env={'GIT_TERMINAL_PROMPT': '0'})
720
723
721 return remote_refs
724 return remote_refs
722
725
723 @reraise_safe_exceptions
726 @reraise_safe_exceptions
724 def sync_push(self, wire, url, refs=None):
727 def sync_push(self, wire, url, refs=None):
725 if not self.check_url(url, wire):
728 if not self.check_url(url, wire):
726 return
729 return
727 config = self._wire_to_config(wire)
730 config = self._wire_to_config(wire)
728 self._factory.repo(wire)
731 self._factory.repo(wire)
729 self.run_git_command(
732 self.run_git_command(
730 wire, ['push', url, '--mirror'], fail_on_stderr=False,
733 wire, ['push', url, '--mirror'], fail_on_stderr=False,
731 _copts=self._remote_conf(config),
734 _copts=self._remote_conf(config),
732 extra_env={'GIT_TERMINAL_PROMPT': '0'})
735 extra_env={'GIT_TERMINAL_PROMPT': '0'})
733
736
734 @reraise_safe_exceptions
737 @reraise_safe_exceptions
735 def get_remote_refs(self, wire, url):
738 def get_remote_refs(self, wire, url):
736 repo = Repo(url)
739 repo = Repo(url)
737 return repo.get_refs()
740 return repo.get_refs()
738
741
739 @reraise_safe_exceptions
742 @reraise_safe_exceptions
740 def get_description(self, wire):
743 def get_description(self, wire):
741 repo = self._factory.repo(wire)
744 repo = self._factory.repo(wire)
742 return repo.get_description()
745 return repo.get_description()
743
746
744 @reraise_safe_exceptions
747 @reraise_safe_exceptions
745 def get_missing_revs(self, wire, rev1, rev2, path2):
748 def get_missing_revs(self, wire, rev1, rev2, path2):
746 repo = self._factory.repo(wire)
749 repo = self._factory.repo(wire)
747 LocalGitClient(thin_packs=False).fetch(path2, repo)
750 LocalGitClient(thin_packs=False).fetch(path2, repo)
748
751
749 wire_remote = wire.copy()
752 wire_remote = wire.copy()
750 wire_remote['path'] = path2
753 wire_remote['path'] = path2
751 repo_remote = self._factory.repo(wire_remote)
754 repo_remote = self._factory.repo(wire_remote)
752 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
755 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
753
756
754 revs = [
757 revs = [
755 x.commit.id
758 x.commit.id
756 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
759 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
757 return revs
760 return revs
758
761
759 @reraise_safe_exceptions
762 @reraise_safe_exceptions
760 def get_object(self, wire, sha, maybe_unreachable=False):
763 def get_object(self, wire, sha, maybe_unreachable=False):
761 cache_on, context_uid, repo_id = self._cache_on(wire)
764 cache_on, context_uid, repo_id = self._cache_on(wire)
762 region = self._region(wire)
765 region = self._region(wire)
763
766
764 @region.conditional_cache_on_arguments(condition=cache_on)
767 @region.conditional_cache_on_arguments(condition=cache_on)
765 def _get_object(_context_uid, _repo_id, _sha):
768 def _get_object(_context_uid, _repo_id, _sha):
766 repo_init = self._factory.repo_libgit2(wire)
769 repo_init = self._factory.repo_libgit2(wire)
767 with repo_init as repo:
770 with repo_init as repo:
768
771
769 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
772 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
770 try:
773 try:
771 commit = repo.revparse_single(sha)
774 commit = repo.revparse_single(sha)
772 except KeyError:
775 except KeyError:
773 # NOTE(marcink): KeyError doesn't give us any meaningful information
776 # NOTE(marcink): KeyError doesn't give us any meaningful information
774 # here, we instead give something more explicit
777 # here, we instead give something more explicit
775 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
778 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
776 raise exceptions.LookupException(e)(missing_commit_err)
779 raise exceptions.LookupException(e)(missing_commit_err)
777 except ValueError as e:
780 except ValueError as e:
778 raise exceptions.LookupException(e)(missing_commit_err)
781 raise exceptions.LookupException(e)(missing_commit_err)
779
782
780 is_tag = False
783 is_tag = False
781 if isinstance(commit, pygit2.Tag):
784 if isinstance(commit, pygit2.Tag):
782 commit = repo.get(commit.target)
785 commit = repo.get(commit.target)
783 is_tag = True
786 is_tag = True
784
787
785 check_dangling = True
788 check_dangling = True
786 if is_tag:
789 if is_tag:
787 check_dangling = False
790 check_dangling = False
788
791
789 if check_dangling and maybe_unreachable:
792 if check_dangling and maybe_unreachable:
790 check_dangling = False
793 check_dangling = False
791
794
792 # we used a reference and it parsed means we're not having a dangling commit
795 # we used a reference and it parsed means we're not having a dangling commit
793 if sha != commit.hex:
796 if sha != commit.hex:
794 check_dangling = False
797 check_dangling = False
795
798
796 if check_dangling:
799 if check_dangling:
797 # check for dangling commit
800 # check for dangling commit
798 for branch in repo.branches.with_commit(commit.hex):
801 for branch in repo.branches.with_commit(commit.hex):
799 if branch:
802 if branch:
800 break
803 break
801 else:
804 else:
802 # NOTE(marcink): Empty error doesn't give us any meaningful information
805 # NOTE(marcink): Empty error doesn't give us any meaningful information
803 # here, we instead give something more explicit
806 # here, we instead give something more explicit
804 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
807 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
805 raise exceptions.LookupException(e)(missing_commit_err)
808 raise exceptions.LookupException(e)(missing_commit_err)
806
809
807 commit_id = commit.hex
810 commit_id = commit.hex
808 type_id = commit.type
811 type_id = commit.type
809
812
810 return {
813 return {
811 'id': commit_id,
814 'id': commit_id,
812 'type': self._type_id_to_name(type_id),
815 'type': self._type_id_to_name(type_id),
813 'commit_id': commit_id,
816 'commit_id': commit_id,
814 'idx': 0
817 'idx': 0
815 }
818 }
816
819
817 return _get_object(context_uid, repo_id, sha)
820 return _get_object(context_uid, repo_id, sha)
818
821
819 @reraise_safe_exceptions
822 @reraise_safe_exceptions
820 def get_refs(self, wire):
823 def get_refs(self, wire):
821 cache_on, context_uid, repo_id = self._cache_on(wire)
824 cache_on, context_uid, repo_id = self._cache_on(wire)
822 region = self._region(wire)
825 region = self._region(wire)
823
826
824 @region.conditional_cache_on_arguments(condition=cache_on)
827 @region.conditional_cache_on_arguments(condition=cache_on)
825 def _get_refs(_context_uid, _repo_id):
828 def _get_refs(_context_uid, _repo_id):
826
829
827 repo_init = self._factory.repo_libgit2(wire)
830 repo_init = self._factory.repo_libgit2(wire)
828 with repo_init as repo:
831 with repo_init as repo:
829 regex = re.compile('^refs/(heads|tags)/')
832 regex = re.compile('^refs/(heads|tags)/')
830 return {x.name: x.target.hex for x in
833 return {x.name: x.target.hex for x in
831 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
834 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
832
835
833 return _get_refs(context_uid, repo_id)
836 return _get_refs(context_uid, repo_id)
834
837
835 @reraise_safe_exceptions
838 @reraise_safe_exceptions
836 def get_branch_pointers(self, wire):
839 def get_branch_pointers(self, wire):
837 cache_on, context_uid, repo_id = self._cache_on(wire)
840 cache_on, context_uid, repo_id = self._cache_on(wire)
838 region = self._region(wire)
841 region = self._region(wire)
839
842
840 @region.conditional_cache_on_arguments(condition=cache_on)
843 @region.conditional_cache_on_arguments(condition=cache_on)
841 def _get_branch_pointers(_context_uid, _repo_id):
844 def _get_branch_pointers(_context_uid, _repo_id):
842
845
843 repo_init = self._factory.repo_libgit2(wire)
846 repo_init = self._factory.repo_libgit2(wire)
844 regex = re.compile('^refs/heads')
847 regex = re.compile('^refs/heads')
845 with repo_init as repo:
848 with repo_init as repo:
846 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
849 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
847 return {x.target.hex: x.shorthand for x in branches}
850 return {x.target.hex: x.shorthand for x in branches}
848
851
849 return _get_branch_pointers(context_uid, repo_id)
852 return _get_branch_pointers(context_uid, repo_id)
850
853
851 @reraise_safe_exceptions
854 @reraise_safe_exceptions
852 def head(self, wire, show_exc=True):
855 def head(self, wire, show_exc=True):
853 cache_on, context_uid, repo_id = self._cache_on(wire)
856 cache_on, context_uid, repo_id = self._cache_on(wire)
854 region = self._region(wire)
857 region = self._region(wire)
855
858
856 @region.conditional_cache_on_arguments(condition=cache_on)
859 @region.conditional_cache_on_arguments(condition=cache_on)
857 def _head(_context_uid, _repo_id, _show_exc):
860 def _head(_context_uid, _repo_id, _show_exc):
858 repo_init = self._factory.repo_libgit2(wire)
861 repo_init = self._factory.repo_libgit2(wire)
859 with repo_init as repo:
862 with repo_init as repo:
860 try:
863 try:
861 return repo.head.peel().hex
864 return repo.head.peel().hex
862 except Exception:
865 except Exception:
863 if show_exc:
866 if show_exc:
864 raise
867 raise
865 return _head(context_uid, repo_id, show_exc)
868 return _head(context_uid, repo_id, show_exc)
866
869
867 @reraise_safe_exceptions
870 @reraise_safe_exceptions
868 def init(self, wire):
871 def init(self, wire):
869 repo_path = safe_str(wire['path'])
872 repo_path = safe_str(wire['path'])
870 self.repo = Repo.init(repo_path)
873 self.repo = Repo.init(repo_path)
871
874
872 @reraise_safe_exceptions
875 @reraise_safe_exceptions
873 def init_bare(self, wire):
876 def init_bare(self, wire):
874 repo_path = safe_str(wire['path'])
877 repo_path = safe_str(wire['path'])
875 self.repo = Repo.init_bare(repo_path)
878 self.repo = Repo.init_bare(repo_path)
876
879
877 @reraise_safe_exceptions
880 @reraise_safe_exceptions
878 def revision(self, wire, rev):
881 def revision(self, wire, rev):
879
882
880 cache_on, context_uid, repo_id = self._cache_on(wire)
883 cache_on, context_uid, repo_id = self._cache_on(wire)
881 region = self._region(wire)
884 region = self._region(wire)
882
885
883 @region.conditional_cache_on_arguments(condition=cache_on)
886 @region.conditional_cache_on_arguments(condition=cache_on)
884 def _revision(_context_uid, _repo_id, _rev):
887 def _revision(_context_uid, _repo_id, _rev):
885 repo_init = self._factory.repo_libgit2(wire)
888 repo_init = self._factory.repo_libgit2(wire)
886 with repo_init as repo:
889 with repo_init as repo:
887 commit = repo[rev]
890 commit = repo[rev]
888 obj_data = {
891 obj_data = {
889 'id': commit.id.hex,
892 'id': commit.id.hex,
890 }
893 }
891 # tree objects itself don't have tree_id attribute
894 # tree objects itself don't have tree_id attribute
892 if hasattr(commit, 'tree_id'):
895 if hasattr(commit, 'tree_id'):
893 obj_data['tree'] = commit.tree_id.hex
896 obj_data['tree'] = commit.tree_id.hex
894
897
895 return obj_data
898 return obj_data
896 return _revision(context_uid, repo_id, rev)
899 return _revision(context_uid, repo_id, rev)
897
900
898 @reraise_safe_exceptions
901 @reraise_safe_exceptions
899 def date(self, wire, commit_id):
902 def date(self, wire, commit_id):
900 cache_on, context_uid, repo_id = self._cache_on(wire)
903 cache_on, context_uid, repo_id = self._cache_on(wire)
901 region = self._region(wire)
904 region = self._region(wire)
902
905
903 @region.conditional_cache_on_arguments(condition=cache_on)
906 @region.conditional_cache_on_arguments(condition=cache_on)
904 def _date(_repo_id, _commit_id):
907 def _date(_repo_id, _commit_id):
905 repo_init = self._factory.repo_libgit2(wire)
908 repo_init = self._factory.repo_libgit2(wire)
906 with repo_init as repo:
909 with repo_init as repo:
907 commit = repo[commit_id]
910 commit = repo[commit_id]
908
911
909 if hasattr(commit, 'commit_time'):
912 if hasattr(commit, 'commit_time'):
910 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
913 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
911 else:
914 else:
912 commit = commit.get_object()
915 commit = commit.get_object()
913 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
916 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
914
917
915 # TODO(marcink): check dulwich difference of offset vs timezone
918 # TODO(marcink): check dulwich difference of offset vs timezone
916 return [commit_time, commit_time_offset]
919 return [commit_time, commit_time_offset]
917 return _date(repo_id, commit_id)
920 return _date(repo_id, commit_id)
918
921
919 @reraise_safe_exceptions
922 @reraise_safe_exceptions
920 def author(self, wire, commit_id):
923 def author(self, wire, commit_id):
921 cache_on, context_uid, repo_id = self._cache_on(wire)
924 cache_on, context_uid, repo_id = self._cache_on(wire)
922 region = self._region(wire)
925 region = self._region(wire)
923
926
924 @region.conditional_cache_on_arguments(condition=cache_on)
927 @region.conditional_cache_on_arguments(condition=cache_on)
925 def _author(_repo_id, _commit_id):
928 def _author(_repo_id, _commit_id):
926 repo_init = self._factory.repo_libgit2(wire)
929 repo_init = self._factory.repo_libgit2(wire)
927 with repo_init as repo:
930 with repo_init as repo:
928 commit = repo[commit_id]
931 commit = repo[commit_id]
929
932
930 if hasattr(commit, 'author'):
933 if hasattr(commit, 'author'):
931 author = commit.author
934 author = commit.author
932 else:
935 else:
933 author = commit.get_object().author
936 author = commit.get_object().author
934
937
935 if author.email:
938 if author.email:
936 return "{} <{}>".format(author.name, author.email)
939 return "{} <{}>".format(author.name, author.email)
937
940
938 try:
941 try:
939 return "{}".format(author.name)
942 return "{}".format(author.name)
940 except Exception:
943 except Exception:
941 return "{}".format(safe_str(author.raw_name))
944 return "{}".format(safe_str(author.raw_name))
942
945
943 return _author(repo_id, commit_id)
946 return _author(repo_id, commit_id)
944
947
945 @reraise_safe_exceptions
948 @reraise_safe_exceptions
946 def message(self, wire, commit_id):
949 def message(self, wire, commit_id):
947 cache_on, context_uid, repo_id = self._cache_on(wire)
950 cache_on, context_uid, repo_id = self._cache_on(wire)
948 region = self._region(wire)
951 region = self._region(wire)
949
952
950 @region.conditional_cache_on_arguments(condition=cache_on)
953 @region.conditional_cache_on_arguments(condition=cache_on)
951 def _message(_repo_id, _commit_id):
954 def _message(_repo_id, _commit_id):
952 repo_init = self._factory.repo_libgit2(wire)
955 repo_init = self._factory.repo_libgit2(wire)
953 with repo_init as repo:
956 with repo_init as repo:
954 commit = repo[commit_id]
957 commit = repo[commit_id]
955 return commit.message
958 return commit.message
956 return _message(repo_id, commit_id)
959 return _message(repo_id, commit_id)
957
960
958 @reraise_safe_exceptions
961 @reraise_safe_exceptions
959 def parents(self, wire, commit_id):
962 def parents(self, wire, commit_id):
960 cache_on, context_uid, repo_id = self._cache_on(wire)
963 cache_on, context_uid, repo_id = self._cache_on(wire)
961 region = self._region(wire)
964 region = self._region(wire)
962
965
963 @region.conditional_cache_on_arguments(condition=cache_on)
966 @region.conditional_cache_on_arguments(condition=cache_on)
964 def _parents(_repo_id, _commit_id):
967 def _parents(_repo_id, _commit_id):
965 repo_init = self._factory.repo_libgit2(wire)
968 repo_init = self._factory.repo_libgit2(wire)
966 with repo_init as repo:
969 with repo_init as repo:
967 commit = repo[commit_id]
970 commit = repo[commit_id]
968 if hasattr(commit, 'parent_ids'):
971 if hasattr(commit, 'parent_ids'):
969 parent_ids = commit.parent_ids
972 parent_ids = commit.parent_ids
970 else:
973 else:
971 parent_ids = commit.get_object().parent_ids
974 parent_ids = commit.get_object().parent_ids
972
975
973 return [x.hex for x in parent_ids]
976 return [x.hex for x in parent_ids]
974 return _parents(repo_id, commit_id)
977 return _parents(repo_id, commit_id)
975
978
976 @reraise_safe_exceptions
979 @reraise_safe_exceptions
977 def children(self, wire, commit_id):
980 def children(self, wire, commit_id):
978 cache_on, context_uid, repo_id = self._cache_on(wire)
981 cache_on, context_uid, repo_id = self._cache_on(wire)
979 region = self._region(wire)
982 region = self._region(wire)
980
983
981 head = self.head(wire)
984 head = self.head(wire)
982
985
983 @region.conditional_cache_on_arguments(condition=cache_on)
986 @region.conditional_cache_on_arguments(condition=cache_on)
984 def _children(_repo_id, _commit_id):
987 def _children(_repo_id, _commit_id):
985
988
986 output, __ = self.run_git_command(
989 output, __ = self.run_git_command(
987 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
990 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
988
991
989 child_ids = []
992 child_ids = []
990 pat = re.compile(r'^{}'.format(commit_id))
993 pat = re.compile(r'^{}'.format(commit_id))
991 for line in output.splitlines():
994 for line in output.splitlines():
992 line = safe_str(line)
995 line = safe_str(line)
993 if pat.match(line):
996 if pat.match(line):
994 found_ids = line.split(' ')[1:]
997 found_ids = line.split(' ')[1:]
995 child_ids.extend(found_ids)
998 child_ids.extend(found_ids)
996 break
999 break
997
1000
998 return child_ids
1001 return child_ids
999 return _children(repo_id, commit_id)
1002 return _children(repo_id, commit_id)
1000
1003
1001 @reraise_safe_exceptions
1004 @reraise_safe_exceptions
1002 def set_refs(self, wire, key, value):
1005 def set_refs(self, wire, key, value):
1003 repo_init = self._factory.repo_libgit2(wire)
1006 repo_init = self._factory.repo_libgit2(wire)
1004 with repo_init as repo:
1007 with repo_init as repo:
1005 repo.references.create(key, value, force=True)
1008 repo.references.create(key, value, force=True)
1006
1009
1007 @reraise_safe_exceptions
1010 @reraise_safe_exceptions
1008 def create_branch(self, wire, branch_name, commit_id, force=False):
1011 def create_branch(self, wire, branch_name, commit_id, force=False):
1009 repo_init = self._factory.repo_libgit2(wire)
1012 repo_init = self._factory.repo_libgit2(wire)
1010 with repo_init as repo:
1013 with repo_init as repo:
1011 commit = repo[commit_id]
1014 commit = repo[commit_id]
1012
1015
1013 if force:
1016 if force:
1014 repo.branches.local.create(branch_name, commit, force=force)
1017 repo.branches.local.create(branch_name, commit, force=force)
1015 elif not repo.branches.get(branch_name):
1018 elif not repo.branches.get(branch_name):
1016 # create only if that branch isn't existing
1019 # create only if that branch isn't existing
1017 repo.branches.local.create(branch_name, commit, force=force)
1020 repo.branches.local.create(branch_name, commit, force=force)
1018
1021
1019 @reraise_safe_exceptions
1022 @reraise_safe_exceptions
1020 def remove_ref(self, wire, key):
1023 def remove_ref(self, wire, key):
1021 repo_init = self._factory.repo_libgit2(wire)
1024 repo_init = self._factory.repo_libgit2(wire)
1022 with repo_init as repo:
1025 with repo_init as repo:
1023 repo.references.delete(key)
1026 repo.references.delete(key)
1024
1027
1025 @reraise_safe_exceptions
1028 @reraise_safe_exceptions
1026 def tag_remove(self, wire, tag_name):
1029 def tag_remove(self, wire, tag_name):
1027 repo_init = self._factory.repo_libgit2(wire)
1030 repo_init = self._factory.repo_libgit2(wire)
1028 with repo_init as repo:
1031 with repo_init as repo:
1029 key = 'refs/tags/{}'.format(tag_name)
1032 key = 'refs/tags/{}'.format(tag_name)
1030 repo.references.delete(key)
1033 repo.references.delete(key)
1031
1034
1032 @reraise_safe_exceptions
1035 @reraise_safe_exceptions
1033 def tree_changes(self, wire, source_id, target_id):
1036 def tree_changes(self, wire, source_id, target_id):
1034 # TODO(marcink): remove this seems it's only used by tests
1037 # TODO(marcink): remove this seems it's only used by tests
1035 repo = self._factory.repo(wire)
1038 repo = self._factory.repo(wire)
1036 source = repo[source_id].tree if source_id else None
1039 source = repo[source_id].tree if source_id else None
1037 target = repo[target_id].tree
1040 target = repo[target_id].tree
1038 result = repo.object_store.tree_changes(source, target)
1041 result = repo.object_store.tree_changes(source, target)
1039 return list(result)
1042 return list(result)
1040
1043
1041 @reraise_safe_exceptions
1044 @reraise_safe_exceptions
1042 def tree_and_type_for_path(self, wire, commit_id, path):
1045 def tree_and_type_for_path(self, wire, commit_id, path):
1043
1046
1044 cache_on, context_uid, repo_id = self._cache_on(wire)
1047 cache_on, context_uid, repo_id = self._cache_on(wire)
1045 region = self._region(wire)
1048 region = self._region(wire)
1046
1049
1047 @region.conditional_cache_on_arguments(condition=cache_on)
1050 @region.conditional_cache_on_arguments(condition=cache_on)
1048 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1051 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1049 repo_init = self._factory.repo_libgit2(wire)
1052 repo_init = self._factory.repo_libgit2(wire)
1050
1053
1051 with repo_init as repo:
1054 with repo_init as repo:
1052 commit = repo[commit_id]
1055 commit = repo[commit_id]
1053 try:
1056 try:
1054 tree = commit.tree[path]
1057 tree = commit.tree[path]
1055 except KeyError:
1058 except KeyError:
1056 return None, None, None
1059 return None, None, None
1057
1060
1058 return tree.id.hex, tree.type_str, tree.filemode
1061 return tree.id.hex, tree.type_str, tree.filemode
1059 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1062 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1060
1063
1061 @reraise_safe_exceptions
1064 @reraise_safe_exceptions
1062 def tree_items(self, wire, tree_id):
1065 def tree_items(self, wire, tree_id):
1063 cache_on, context_uid, repo_id = self._cache_on(wire)
1066 cache_on, context_uid, repo_id = self._cache_on(wire)
1064 region = self._region(wire)
1067 region = self._region(wire)
1065
1068
1066 @region.conditional_cache_on_arguments(condition=cache_on)
1069 @region.conditional_cache_on_arguments(condition=cache_on)
1067 def _tree_items(_repo_id, _tree_id):
1070 def _tree_items(_repo_id, _tree_id):
1068
1071
1069 repo_init = self._factory.repo_libgit2(wire)
1072 repo_init = self._factory.repo_libgit2(wire)
1070 with repo_init as repo:
1073 with repo_init as repo:
1071 try:
1074 try:
1072 tree = repo[tree_id]
1075 tree = repo[tree_id]
1073 except KeyError:
1076 except KeyError:
1074 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1077 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1075
1078
1076 result = []
1079 result = []
1077 for item in tree:
1080 for item in tree:
1078 item_sha = item.hex
1081 item_sha = item.hex
1079 item_mode = item.filemode
1082 item_mode = item.filemode
1080 item_type = item.type_str
1083 item_type = item.type_str
1081
1084
1082 if item_type == 'commit':
1085 if item_type == 'commit':
1083 # NOTE(marcink): submodules we translate to 'link' for backward compat
1086 # NOTE(marcink): submodules we translate to 'link' for backward compat
1084 item_type = 'link'
1087 item_type = 'link'
1085
1088
1086 result.append((item.name, item_mode, item_sha, item_type))
1089 result.append((item.name, item_mode, item_sha, item_type))
1087 return result
1090 return result
1088 return _tree_items(repo_id, tree_id)
1091 return _tree_items(repo_id, tree_id)
1089
1092
1090 @reraise_safe_exceptions
1093 @reraise_safe_exceptions
1091 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1094 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1092 """
1095 """
1093 Old version that uses subprocess to call diff
1096 Old version that uses subprocess to call diff
1094 """
1097 """
1095
1098
1096 flags = [
1099 flags = [
1097 '-U%s' % context, '--patch',
1100 '-U%s' % context, '--patch',
1098 '--binary',
1101 '--binary',
1099 '--find-renames',
1102 '--find-renames',
1100 '--no-indent-heuristic',
1103 '--no-indent-heuristic',
1101 # '--indent-heuristic',
1104 # '--indent-heuristic',
1102 #'--full-index',
1105 #'--full-index',
1103 #'--abbrev=40'
1106 #'--abbrev=40'
1104 ]
1107 ]
1105
1108
1106 if opt_ignorews:
1109 if opt_ignorews:
1107 flags.append('--ignore-all-space')
1110 flags.append('--ignore-all-space')
1108
1111
1109 if commit_id_1 == self.EMPTY_COMMIT:
1112 if commit_id_1 == self.EMPTY_COMMIT:
1110 cmd = ['show'] + flags + [commit_id_2]
1113 cmd = ['show'] + flags + [commit_id_2]
1111 else:
1114 else:
1112 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1115 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1113
1116
1114 if file_filter:
1117 if file_filter:
1115 cmd.extend(['--', file_filter])
1118 cmd.extend(['--', file_filter])
1116
1119
1117 diff, __ = self.run_git_command(wire, cmd)
1120 diff, __ = self.run_git_command(wire, cmd)
1118 # If we used 'show' command, strip first few lines (until actual diff
1121 # If we used 'show' command, strip first few lines (until actual diff
1119 # starts)
1122 # starts)
1120 if commit_id_1 == self.EMPTY_COMMIT:
1123 if commit_id_1 == self.EMPTY_COMMIT:
1121 lines = diff.splitlines()
1124 lines = diff.splitlines()
1122 x = 0
1125 x = 0
1123 for line in lines:
1126 for line in lines:
1124 if line.startswith(b'diff'):
1127 if line.startswith(b'diff'):
1125 break
1128 break
1126 x += 1
1129 x += 1
1127 # Append new line just like 'diff' command do
1130 # Append new line just like 'diff' command do
1128 diff = '\n'.join(lines[x:]) + '\n'
1131 diff = '\n'.join(lines[x:]) + '\n'
1129 return diff
1132 return diff
1130
1133
1131 @reraise_safe_exceptions
1134 @reraise_safe_exceptions
1132 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1135 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1133 repo_init = self._factory.repo_libgit2(wire)
1136 repo_init = self._factory.repo_libgit2(wire)
1134
1137
1135 with repo_init as repo:
1138 with repo_init as repo:
1136 swap = True
1139 swap = True
1137 flags = 0
1140 flags = 0
1138 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1141 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1139
1142
1140 if opt_ignorews:
1143 if opt_ignorews:
1141 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1144 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1142
1145
1143 if commit_id_1 == self.EMPTY_COMMIT:
1146 if commit_id_1 == self.EMPTY_COMMIT:
1144 comm1 = repo[commit_id_2]
1147 comm1 = repo[commit_id_2]
1145 diff_obj = comm1.tree.diff_to_tree(
1148 diff_obj = comm1.tree.diff_to_tree(
1146 flags=flags, context_lines=context, swap=swap)
1149 flags=flags, context_lines=context, swap=swap)
1147
1150
1148 else:
1151 else:
1149 comm1 = repo[commit_id_2]
1152 comm1 = repo[commit_id_2]
1150 comm2 = repo[commit_id_1]
1153 comm2 = repo[commit_id_1]
1151 diff_obj = comm1.tree.diff_to_tree(
1154 diff_obj = comm1.tree.diff_to_tree(
1152 comm2.tree, flags=flags, context_lines=context, swap=swap)
1155 comm2.tree, flags=flags, context_lines=context, swap=swap)
1153 similar_flags = 0
1156 similar_flags = 0
1154 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1157 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1155 diff_obj.find_similar(flags=similar_flags)
1158 diff_obj.find_similar(flags=similar_flags)
1156
1159
1157 if file_filter:
1160 if file_filter:
1158 for p in diff_obj:
1161 for p in diff_obj:
1159 if p.delta.old_file.path == file_filter:
1162 if p.delta.old_file.path == file_filter:
1160 return BinaryEnvelope(p.data) or BinaryEnvelope(b'')
1163 return BinaryEnvelope(p.data) or BinaryEnvelope(b'')
1161 # fo matching path == no diff
1164 # fo matching path == no diff
1162 return BinaryEnvelope(b'')
1165 return BinaryEnvelope(b'')
1163 return BinaryEnvelope(diff_obj.patch) or BinaryEnvelope(b'')
1166 return BinaryEnvelope(diff_obj.patch) or BinaryEnvelope(b'')
1164
1167
1165 @reraise_safe_exceptions
1168 @reraise_safe_exceptions
1166 def node_history(self, wire, commit_id, path, limit):
1169 def node_history(self, wire, commit_id, path, limit):
1167 cache_on, context_uid, repo_id = self._cache_on(wire)
1170 cache_on, context_uid, repo_id = self._cache_on(wire)
1168 region = self._region(wire)
1171 region = self._region(wire)
1169
1172
1170 @region.conditional_cache_on_arguments(condition=cache_on)
1173 @region.conditional_cache_on_arguments(condition=cache_on)
1171 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1174 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1172 # optimize for n==1, rev-list is much faster for that use-case
1175 # optimize for n==1, rev-list is much faster for that use-case
1173 if limit == 1:
1176 if limit == 1:
1174 cmd = ['rev-list', '-1', commit_id, '--', path]
1177 cmd = ['rev-list', '-1', commit_id, '--', path]
1175 else:
1178 else:
1176 cmd = ['log']
1179 cmd = ['log']
1177 if limit:
1180 if limit:
1178 cmd.extend(['-n', str(safe_int(limit, 0))])
1181 cmd.extend(['-n', str(safe_int(limit, 0))])
1179 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1182 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1180
1183
1181 output, __ = self.run_git_command(wire, cmd)
1184 output, __ = self.run_git_command(wire, cmd)
1182 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1185 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1183
1186
1184 return [x for x in commit_ids]
1187 return [x for x in commit_ids]
1185 return _node_history(context_uid, repo_id, commit_id, path, limit)
1188 return _node_history(context_uid, repo_id, commit_id, path, limit)
1186
1189
1187 @reraise_safe_exceptions
1190 @reraise_safe_exceptions
1188 def node_annotate_legacy(self, wire, commit_id, path):
1191 def node_annotate_legacy(self, wire, commit_id, path):
1189 # note: replaced by pygit2 implementation
1192 # note: replaced by pygit2 implementation
1190 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1193 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1191 # -l ==> outputs long shas (and we need all 40 characters)
1194 # -l ==> outputs long shas (and we need all 40 characters)
1192 # --root ==> doesn't put '^' character for boundaries
1195 # --root ==> doesn't put '^' character for boundaries
1193 # -r commit_id ==> blames for the given commit
1196 # -r commit_id ==> blames for the given commit
1194 output, __ = self.run_git_command(wire, cmd)
1197 output, __ = self.run_git_command(wire, cmd)
1195
1198
1196 result = []
1199 result = []
1197 for i, blame_line in enumerate(output.splitlines()[:-1]):
1200 for i, blame_line in enumerate(output.splitlines()[:-1]):
1198 line_no = i + 1
1201 line_no = i + 1
1199 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1202 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1200 result.append((line_no, blame_commit_id, line))
1203 result.append((line_no, blame_commit_id, line))
1201
1204
1202 return result
1205 return result
1203
1206
1204 @reraise_safe_exceptions
1207 @reraise_safe_exceptions
1205 def node_annotate(self, wire, commit_id, path):
1208 def node_annotate(self, wire, commit_id, path):
1206
1209
1207 result_libgit = []
1210 result_libgit = []
1208 repo_init = self._factory.repo_libgit2(wire)
1211 repo_init = self._factory.repo_libgit2(wire)
1209 with repo_init as repo:
1212 with repo_init as repo:
1210 commit = repo[commit_id]
1213 commit = repo[commit_id]
1211 blame_obj = repo.blame(path, newest_commit=commit_id)
1214 blame_obj = repo.blame(path, newest_commit=commit_id)
1212 for i, line in enumerate(commit.tree[path].data.splitlines()):
1215 for i, line in enumerate(commit.tree[path].data.splitlines()):
1213 line_no = i + 1
1216 line_no = i + 1
1214 hunk = blame_obj.for_line(line_no)
1217 hunk = blame_obj.for_line(line_no)
1215 blame_commit_id = hunk.final_commit_id.hex
1218 blame_commit_id = hunk.final_commit_id.hex
1216
1219
1217 result_libgit.append((line_no, blame_commit_id, line))
1220 result_libgit.append((line_no, blame_commit_id, line))
1218
1221
1219 return result_libgit
1222 return result_libgit
1220
1223
1221 @reraise_safe_exceptions
1224 @reraise_safe_exceptions
1222 def update_server_info(self, wire):
1225 def update_server_info(self, wire):
1223 repo = self._factory.repo(wire)
1226 repo = self._factory.repo(wire)
1224 update_server_info(repo)
1227 update_server_info(repo)
1225
1228
1226 @reraise_safe_exceptions
1229 @reraise_safe_exceptions
1227 def get_all_commit_ids(self, wire):
1230 def get_all_commit_ids(self, wire):
1228
1231
1229 cache_on, context_uid, repo_id = self._cache_on(wire)
1232 cache_on, context_uid, repo_id = self._cache_on(wire)
1230 region = self._region(wire)
1233 region = self._region(wire)
1231
1234
1232 @region.conditional_cache_on_arguments(condition=cache_on)
1235 @region.conditional_cache_on_arguments(condition=cache_on)
1233 def _get_all_commit_ids(_context_uid, _repo_id):
1236 def _get_all_commit_ids(_context_uid, _repo_id):
1234
1237
1235 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1238 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1236 try:
1239 try:
1237 output, __ = self.run_git_command(wire, cmd)
1240 output, __ = self.run_git_command(wire, cmd)
1238 return output.splitlines()
1241 return output.splitlines()
1239 except Exception:
1242 except Exception:
1240 # Can be raised for empty repositories
1243 # Can be raised for empty repositories
1241 return []
1244 return []
1242
1245
1243 @region.conditional_cache_on_arguments(condition=cache_on)
1246 @region.conditional_cache_on_arguments(condition=cache_on)
1244 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1247 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1245 repo_init = self._factory.repo_libgit2(wire)
1248 repo_init = self._factory.repo_libgit2(wire)
1246 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1249 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1247 results = []
1250 results = []
1248 with repo_init as repo:
1251 with repo_init as repo:
1249 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1252 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1250 results.append(commit.id.hex)
1253 results.append(commit.id.hex)
1251
1254
1252 return _get_all_commit_ids(context_uid, repo_id)
1255 return _get_all_commit_ids(context_uid, repo_id)
1253
1256
1254 @reraise_safe_exceptions
1257 @reraise_safe_exceptions
1255 def run_git_command(self, wire, cmd, **opts):
1258 def run_git_command(self, wire, cmd, **opts):
1256 path = wire.get('path', None)
1259 path = wire.get('path', None)
1257
1260
1258 if path and os.path.isdir(path):
1261 if path and os.path.isdir(path):
1259 opts['cwd'] = path
1262 opts['cwd'] = path
1260
1263
1261 if '_bare' in opts:
1264 if '_bare' in opts:
1262 _copts = []
1265 _copts = []
1263 del opts['_bare']
1266 del opts['_bare']
1264 else:
1267 else:
1265 _copts = ['-c', 'core.quotepath=false', ]
1268 _copts = ['-c', 'core.quotepath=false', ]
1266 safe_call = False
1269 safe_call = False
1267 if '_safe' in opts:
1270 if '_safe' in opts:
1268 # no exc on failure
1271 # no exc on failure
1269 del opts['_safe']
1272 del opts['_safe']
1270 safe_call = True
1273 safe_call = True
1271
1274
1272 if '_copts' in opts:
1275 if '_copts' in opts:
1273 _copts.extend(opts['_copts'] or [])
1276 _copts.extend(opts['_copts'] or [])
1274 del opts['_copts']
1277 del opts['_copts']
1275
1278
1276 gitenv = os.environ.copy()
1279 gitenv = os.environ.copy()
1277 gitenv.update(opts.pop('extra_env', {}))
1280 gitenv.update(opts.pop('extra_env', {}))
1278 # need to clean fix GIT_DIR !
1281 # need to clean fix GIT_DIR !
1279 if 'GIT_DIR' in gitenv:
1282 if 'GIT_DIR' in gitenv:
1280 del gitenv['GIT_DIR']
1283 del gitenv['GIT_DIR']
1281 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1284 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1282 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1285 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1283
1286
1284 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1287 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1285 _opts = {'env': gitenv, 'shell': False}
1288 _opts = {'env': gitenv, 'shell': False}
1286
1289
1287 proc = None
1290 proc = None
1288 try:
1291 try:
1289 _opts.update(opts)
1292 _opts.update(opts)
1290 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1293 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1291
1294
1292 return b''.join(proc), b''.join(proc.stderr)
1295 return b''.join(proc), b''.join(proc.stderr)
1293 except OSError as err:
1296 except OSError as err:
1294 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1297 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1295 tb_err = ("Couldn't run git command (%s).\n"
1298 tb_err = ("Couldn't run git command (%s).\n"
1296 "Original error was:%s\n"
1299 "Original error was:%s\n"
1297 "Call options:%s\n"
1300 "Call options:%s\n"
1298 % (cmd, err, _opts))
1301 % (cmd, err, _opts))
1299 log.exception(tb_err)
1302 log.exception(tb_err)
1300 if safe_call:
1303 if safe_call:
1301 return '', err
1304 return '', err
1302 else:
1305 else:
1303 raise exceptions.VcsException()(tb_err)
1306 raise exceptions.VcsException()(tb_err)
1304 finally:
1307 finally:
1305 if proc:
1308 if proc:
1306 proc.close()
1309 proc.close()
1307
1310
1308 @reraise_safe_exceptions
1311 @reraise_safe_exceptions
1309 def install_hooks(self, wire, force=False):
1312 def install_hooks(self, wire, force=False):
1310 from vcsserver.hook_utils import install_git_hooks
1313 from vcsserver.hook_utils import install_git_hooks
1311 bare = self.bare(wire)
1314 bare = self.bare(wire)
1312 path = wire['path']
1315 path = wire['path']
1313 binary_dir = settings.BINARY_DIR
1316 binary_dir = settings.BINARY_DIR
1314 executable = None
1317 executable = None
1315 if binary_dir:
1318 if binary_dir:
1316 executable = os.path.join(binary_dir, 'python3')
1319 executable = os.path.join(binary_dir, 'python3')
1317 return install_git_hooks(path, bare, force_create=force)
1320 return install_git_hooks(path, bare, force_create=force)
1318
1321
1319 @reraise_safe_exceptions
1322 @reraise_safe_exceptions
1320 def get_hooks_info(self, wire):
1323 def get_hooks_info(self, wire):
1321 from vcsserver.hook_utils import (
1324 from vcsserver.hook_utils import (
1322 get_git_pre_hook_version, get_git_post_hook_version)
1325 get_git_pre_hook_version, get_git_post_hook_version)
1323 bare = self.bare(wire)
1326 bare = self.bare(wire)
1324 path = wire['path']
1327 path = wire['path']
1325 return {
1328 return {
1326 'pre_version': get_git_pre_hook_version(path, bare),
1329 'pre_version': get_git_pre_hook_version(path, bare),
1327 'post_version': get_git_post_hook_version(path, bare),
1330 'post_version': get_git_post_hook_version(path, bare),
1328 }
1331 }
1329
1332
1330 @reraise_safe_exceptions
1333 @reraise_safe_exceptions
1331 def set_head_ref(self, wire, head_name):
1334 def set_head_ref(self, wire, head_name):
1332 log.debug('Setting refs/head to `%s`', head_name)
1335 log.debug('Setting refs/head to `%s`', head_name)
1333 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1336 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1334 output, __ = self.run_git_command(wire, cmd)
1337 output, __ = self.run_git_command(wire, cmd)
1335 return [head_name] + output.splitlines()
1338 return [head_name] + output.splitlines()
1336
1339
1337 @reraise_safe_exceptions
1340 @reraise_safe_exceptions
1338 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1341 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1339 archive_dir_name, commit_id):
1342 archive_dir_name, commit_id):
1340
1343
1341 def file_walker(_commit_id, path):
1344 def file_walker(_commit_id, path):
1342 repo_init = self._factory.repo_libgit2(wire)
1345 repo_init = self._factory.repo_libgit2(wire)
1343
1346
1344 with repo_init as repo:
1347 with repo_init as repo:
1345 commit = repo[commit_id]
1348 commit = repo[commit_id]
1346
1349
1347 if path in ['', '/']:
1350 if path in ['', '/']:
1348 tree = commit.tree
1351 tree = commit.tree
1349 else:
1352 else:
1350 tree = commit.tree[path.rstrip('/')]
1353 tree = commit.tree[path.rstrip('/')]
1351 tree_id = tree.id.hex
1354 tree_id = tree.id.hex
1352 try:
1355 try:
1353 tree = repo[tree_id]
1356 tree = repo[tree_id]
1354 except KeyError:
1357 except KeyError:
1355 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1358 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1356
1359
1357 index = LibGit2Index.Index()
1360 index = LibGit2Index.Index()
1358 index.read_tree(tree)
1361 index.read_tree(tree)
1359 file_iter = index
1362 file_iter = index
1360
1363
1361 for fn in file_iter:
1364 for fn in file_iter:
1362 file_path = fn.path
1365 file_path = fn.path
1363 mode = fn.mode
1366 mode = fn.mode
1364 is_link = stat.S_ISLNK(mode)
1367 is_link = stat.S_ISLNK(mode)
1365 if mode == pygit2.GIT_FILEMODE_COMMIT:
1368 if mode == pygit2.GIT_FILEMODE_COMMIT:
1366 log.debug('Skipping path %s as a commit node', file_path)
1369 log.debug('Skipping path %s as a commit node', file_path)
1367 continue
1370 continue
1368 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1371 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1369
1372
1370 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1373 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1371 archive_dir_name, commit_id)
1374 archive_dir_name, commit_id)
@@ -1,1103 +1,1105 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import binascii
17 import binascii
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib.request
21 import urllib.request
22 import urllib.parse
22 import urllib.parse
23 import traceback
23 import traceback
24 import hashlib
24 import hashlib
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27
27
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
31 from mercurial import repair
31 from mercurial import repair
32
32
33 import vcsserver
33 import vcsserver
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode, BinaryEnvelope
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode, BinaryEnvelope
36 from vcsserver.hgcompat import (
36 from vcsserver.hgcompat import (
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 RepoLookupError, InterventionRequired, RequirementError,
41 RepoLookupError, InterventionRequired, RequirementError,
42 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
42 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
43 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
43 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
44 from vcsserver.vcs_base import RemoteBase
44 from vcsserver.vcs_base import RemoteBase
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 def make_ui_from_config(repo_config):
49 def make_ui_from_config(repo_config):
50
50
51 class LoggingUI(ui.ui):
51 class LoggingUI(ui.ui):
52
52
53 def status(self, *msg, **opts):
53 def status(self, *msg, **opts):
54 str_msg = map(safe_str, msg)
54 str_msg = map(safe_str, msg)
55 log.info(' '.join(str_msg).rstrip('\n'))
55 log.info(' '.join(str_msg).rstrip('\n'))
56 #super(LoggingUI, self).status(*msg, **opts)
56 #super(LoggingUI, self).status(*msg, **opts)
57
57
58 def warn(self, *msg, **opts):
58 def warn(self, *msg, **opts):
59 str_msg = map(safe_str, msg)
59 str_msg = map(safe_str, msg)
60 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
60 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
61 #super(LoggingUI, self).warn(*msg, **opts)
61 #super(LoggingUI, self).warn(*msg, **opts)
62
62
63 def error(self, *msg, **opts):
63 def error(self, *msg, **opts):
64 str_msg = map(safe_str, msg)
64 str_msg = map(safe_str, msg)
65 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
65 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
66 #super(LoggingUI, self).error(*msg, **opts)
66 #super(LoggingUI, self).error(*msg, **opts)
67
67
68 def note(self, *msg, **opts):
68 def note(self, *msg, **opts):
69 str_msg = map(safe_str, msg)
69 str_msg = map(safe_str, msg)
70 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
70 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
71 #super(LoggingUI, self).note(*msg, **opts)
71 #super(LoggingUI, self).note(*msg, **opts)
72
72
73 def debug(self, *msg, **opts):
73 def debug(self, *msg, **opts):
74 str_msg = map(safe_str, msg)
74 str_msg = map(safe_str, msg)
75 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
75 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
76 #super(LoggingUI, self).debug(*msg, **opts)
76 #super(LoggingUI, self).debug(*msg, **opts)
77
77
78 baseui = LoggingUI()
78 baseui = LoggingUI()
79
79
80 # clean the baseui object
80 # clean the baseui object
81 baseui._ocfg = hgconfig.config()
81 baseui._ocfg = hgconfig.config()
82 baseui._ucfg = hgconfig.config()
82 baseui._ucfg = hgconfig.config()
83 baseui._tcfg = hgconfig.config()
83 baseui._tcfg = hgconfig.config()
84
84
85 for section, option, value in repo_config:
85 for section, option, value in repo_config:
86 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
86 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
87
87
88 # make our hgweb quiet so it doesn't print output
88 # make our hgweb quiet so it doesn't print output
89 baseui.setconfig(b'ui', b'quiet', b'true')
89 baseui.setconfig(b'ui', b'quiet', b'true')
90
90
91 baseui.setconfig(b'ui', b'paginate', b'never')
91 baseui.setconfig(b'ui', b'paginate', b'never')
92 # for better Error reporting of Mercurial
92 # for better Error reporting of Mercurial
93 baseui.setconfig(b'ui', b'message-output', b'stderr')
93 baseui.setconfig(b'ui', b'message-output', b'stderr')
94
94
95 # force mercurial to only use 1 thread, otherwise it may try to set a
95 # force mercurial to only use 1 thread, otherwise it may try to set a
96 # signal in a non-main thread, thus generating a ValueError.
96 # signal in a non-main thread, thus generating a ValueError.
97 baseui.setconfig(b'worker', b'numcpus', 1)
97 baseui.setconfig(b'worker', b'numcpus', 1)
98
98
99 # If there is no config for the largefiles extension, we explicitly disable
99 # If there is no config for the largefiles extension, we explicitly disable
100 # it here. This overrides settings from repositories hgrc file. Recent
100 # it here. This overrides settings from repositories hgrc file. Recent
101 # mercurial versions enable largefiles in hgrc on clone from largefile
101 # mercurial versions enable largefiles in hgrc on clone from largefile
102 # repo.
102 # repo.
103 if not baseui.hasconfig(b'extensions', b'largefiles'):
103 if not baseui.hasconfig(b'extensions', b'largefiles'):
104 log.debug('Explicitly disable largefiles extension for repo.')
104 log.debug('Explicitly disable largefiles extension for repo.')
105 baseui.setconfig(b'extensions', b'largefiles', b'!')
105 baseui.setconfig(b'extensions', b'largefiles', b'!')
106
106
107 return baseui
107 return baseui
108
108
109
109
110 def reraise_safe_exceptions(func):
110 def reraise_safe_exceptions(func):
111 """Decorator for converting mercurial exceptions to something neutral."""
111 """Decorator for converting mercurial exceptions to something neutral."""
112
112
113 def wrapper(*args, **kwargs):
113 def wrapper(*args, **kwargs):
114 try:
114 try:
115 return func(*args, **kwargs)
115 return func(*args, **kwargs)
116 except (Abort, InterventionRequired) as e:
116 except (Abort, InterventionRequired) as e:
117 raise_from_original(exceptions.AbortException(e), e)
117 raise_from_original(exceptions.AbortException(e), e)
118 except RepoLookupError as e:
118 except RepoLookupError as e:
119 raise_from_original(exceptions.LookupException(e), e)
119 raise_from_original(exceptions.LookupException(e), e)
120 except RequirementError as e:
120 except RequirementError as e:
121 raise_from_original(exceptions.RequirementException(e), e)
121 raise_from_original(exceptions.RequirementException(e), e)
122 except RepoError as e:
122 except RepoError as e:
123 raise_from_original(exceptions.VcsException(e), e)
123 raise_from_original(exceptions.VcsException(e), e)
124 except LookupError as e:
124 except LookupError as e:
125 raise_from_original(exceptions.LookupException(e), e)
125 raise_from_original(exceptions.LookupException(e), e)
126 except Exception as e:
126 except Exception as e:
127 if not hasattr(e, '_vcs_kind'):
127 if not hasattr(e, '_vcs_kind'):
128 log.exception("Unhandled exception in hg remote call")
128 log.exception("Unhandled exception in hg remote call")
129 raise_from_original(exceptions.UnhandledException(e), e)
129 raise_from_original(exceptions.UnhandledException(e), e)
130
130
131 raise
131 raise
132 return wrapper
132 return wrapper
133
133
134
134
135 class MercurialFactory(RepoFactory):
135 class MercurialFactory(RepoFactory):
136 repo_type = 'hg'
136 repo_type = 'hg'
137
137
138 def _create_config(self, config, hooks=True):
138 def _create_config(self, config, hooks=True):
139 if not hooks:
139 if not hooks:
140 hooks_to_clean = frozenset((
140 hooks_to_clean = frozenset((
141 'changegroup.repo_size', 'preoutgoing.pre_pull',
141 'changegroup.repo_size', 'preoutgoing.pre_pull',
142 'outgoing.pull_logger', 'prechangegroup.pre_push'))
142 'outgoing.pull_logger', 'prechangegroup.pre_push'))
143 new_config = []
143 new_config = []
144 for section, option, value in config:
144 for section, option, value in config:
145 if section == 'hooks' and option in hooks_to_clean:
145 if section == 'hooks' and option in hooks_to_clean:
146 continue
146 continue
147 new_config.append((section, option, value))
147 new_config.append((section, option, value))
148 config = new_config
148 config = new_config
149
149
150 baseui = make_ui_from_config(config)
150 baseui = make_ui_from_config(config)
151 return baseui
151 return baseui
152
152
153 def _create_repo(self, wire, create):
153 def _create_repo(self, wire, create):
154 baseui = self._create_config(wire["config"])
154 baseui = self._create_config(wire["config"])
155 return instance(baseui, safe_bytes(wire["path"]), create)
155 repo = instance(baseui, safe_bytes(wire["path"]), create)
156 log.debug('repository created: got HG object: %s', repo)
157 return repo
156
158
157 def repo(self, wire, create=False):
159 def repo(self, wire, create=False):
158 """
160 """
159 Get a repository instance for the given path.
161 Get a repository instance for the given path.
160 """
162 """
161 return self._create_repo(wire, create)
163 return self._create_repo(wire, create)
162
164
163
165
164 def patch_ui_message_output(baseui):
166 def patch_ui_message_output(baseui):
165 baseui.setconfig(b'ui', b'quiet', b'false')
167 baseui.setconfig(b'ui', b'quiet', b'false')
166 output = io.BytesIO()
168 output = io.BytesIO()
167
169
168 def write(data, **unused_kwargs):
170 def write(data, **unused_kwargs):
169 output.write(data)
171 output.write(data)
170
172
171 baseui.status = write
173 baseui.status = write
172 baseui.write = write
174 baseui.write = write
173 baseui.warn = write
175 baseui.warn = write
174 baseui.debug = write
176 baseui.debug = write
175
177
176 return baseui, output
178 return baseui, output
177
179
178
180
179 class HgRemote(RemoteBase):
181 class HgRemote(RemoteBase):
180
182
181 def __init__(self, factory):
183 def __init__(self, factory):
182 self._factory = factory
184 self._factory = factory
183 self._bulk_methods = {
185 self._bulk_methods = {
184 "affected_files": self.ctx_files,
186 "affected_files": self.ctx_files,
185 "author": self.ctx_user,
187 "author": self.ctx_user,
186 "branch": self.ctx_branch,
188 "branch": self.ctx_branch,
187 "children": self.ctx_children,
189 "children": self.ctx_children,
188 "date": self.ctx_date,
190 "date": self.ctx_date,
189 "message": self.ctx_description,
191 "message": self.ctx_description,
190 "parents": self.ctx_parents,
192 "parents": self.ctx_parents,
191 "status": self.ctx_status,
193 "status": self.ctx_status,
192 "obsolete": self.ctx_obsolete,
194 "obsolete": self.ctx_obsolete,
193 "phase": self.ctx_phase,
195 "phase": self.ctx_phase,
194 "hidden": self.ctx_hidden,
196 "hidden": self.ctx_hidden,
195 "_file_paths": self.ctx_list,
197 "_file_paths": self.ctx_list,
196 }
198 }
197
199
198 def _get_ctx(self, repo, ref):
200 def _get_ctx(self, repo, ref):
199 return get_ctx(repo, ref)
201 return get_ctx(repo, ref)
200
202
201 @reraise_safe_exceptions
203 @reraise_safe_exceptions
202 def discover_hg_version(self):
204 def discover_hg_version(self):
203 from mercurial import util
205 from mercurial import util
204 return safe_str(util.version())
206 return safe_str(util.version())
205
207
206 @reraise_safe_exceptions
208 @reraise_safe_exceptions
207 def is_empty(self, wire):
209 def is_empty(self, wire):
208 repo = self._factory.repo(wire)
210 repo = self._factory.repo(wire)
209
211
210 try:
212 try:
211 return len(repo) == 0
213 return len(repo) == 0
212 except Exception:
214 except Exception:
213 log.exception("failed to read object_store")
215 log.exception("failed to read object_store")
214 return False
216 return False
215
217
216 @reraise_safe_exceptions
218 @reraise_safe_exceptions
217 def bookmarks(self, wire):
219 def bookmarks(self, wire):
218 cache_on, context_uid, repo_id = self._cache_on(wire)
220 cache_on, context_uid, repo_id = self._cache_on(wire)
219 region = self._region(wire)
221 region = self._region(wire)
220
222
221 @region.conditional_cache_on_arguments(condition=cache_on)
223 @region.conditional_cache_on_arguments(condition=cache_on)
222 def _bookmarks(_context_uid, _repo_id):
224 def _bookmarks(_context_uid, _repo_id):
223 repo = self._factory.repo(wire)
225 repo = self._factory.repo(wire)
224 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
226 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
225
227
226 return _bookmarks(context_uid, repo_id)
228 return _bookmarks(context_uid, repo_id)
227
229
228 @reraise_safe_exceptions
230 @reraise_safe_exceptions
229 def branches(self, wire, normal, closed):
231 def branches(self, wire, normal, closed):
230 cache_on, context_uid, repo_id = self._cache_on(wire)
232 cache_on, context_uid, repo_id = self._cache_on(wire)
231 region = self._region(wire)
233 region = self._region(wire)
232
234
233 @region.conditional_cache_on_arguments(condition=cache_on)
235 @region.conditional_cache_on_arguments(condition=cache_on)
234 def _branches(_context_uid, _repo_id, _normal, _closed):
236 def _branches(_context_uid, _repo_id, _normal, _closed):
235 repo = self._factory.repo(wire)
237 repo = self._factory.repo(wire)
236 iter_branches = repo.branchmap().iterbranches()
238 iter_branches = repo.branchmap().iterbranches()
237 bt = {}
239 bt = {}
238 for branch_name, _heads, tip_node, is_closed in iter_branches:
240 for branch_name, _heads, tip_node, is_closed in iter_branches:
239 if normal and not is_closed:
241 if normal and not is_closed:
240 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
242 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
241 if closed and is_closed:
243 if closed and is_closed:
242 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
244 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
243
245
244 return bt
246 return bt
245
247
246 return _branches(context_uid, repo_id, normal, closed)
248 return _branches(context_uid, repo_id, normal, closed)
247
249
248 @reraise_safe_exceptions
250 @reraise_safe_exceptions
249 def bulk_request(self, wire, commit_id, pre_load):
251 def bulk_request(self, wire, commit_id, pre_load):
250 cache_on, context_uid, repo_id = self._cache_on(wire)
252 cache_on, context_uid, repo_id = self._cache_on(wire)
251 region = self._region(wire)
253 region = self._region(wire)
252
254
253 @region.conditional_cache_on_arguments(condition=cache_on)
255 @region.conditional_cache_on_arguments(condition=cache_on)
254 def _bulk_request(_repo_id, _commit_id, _pre_load):
256 def _bulk_request(_repo_id, _commit_id, _pre_load):
255 result = {}
257 result = {}
256 for attr in pre_load:
258 for attr in pre_load:
257 try:
259 try:
258 method = self._bulk_methods[attr]
260 method = self._bulk_methods[attr]
259 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
261 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
260 result[attr] = method(wire, commit_id)
262 result[attr] = method(wire, commit_id)
261 except KeyError as e:
263 except KeyError as e:
262 raise exceptions.VcsException(e)(
264 raise exceptions.VcsException(e)(
263 'Unknown bulk attribute: "%s"' % attr)
265 'Unknown bulk attribute: "%s"' % attr)
264 return result
266 return result
265
267
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
268 return _bulk_request(repo_id, commit_id, sorted(pre_load))
267
269
268 @reraise_safe_exceptions
270 @reraise_safe_exceptions
269 def ctx_branch(self, wire, commit_id):
271 def ctx_branch(self, wire, commit_id):
270 cache_on, context_uid, repo_id = self._cache_on(wire)
272 cache_on, context_uid, repo_id = self._cache_on(wire)
271 region = self._region(wire)
273 region = self._region(wire)
272
274
273 @region.conditional_cache_on_arguments(condition=cache_on)
275 @region.conditional_cache_on_arguments(condition=cache_on)
274 def _ctx_branch(_repo_id, _commit_id):
276 def _ctx_branch(_repo_id, _commit_id):
275 repo = self._factory.repo(wire)
277 repo = self._factory.repo(wire)
276 ctx = self._get_ctx(repo, commit_id)
278 ctx = self._get_ctx(repo, commit_id)
277 return ctx.branch()
279 return ctx.branch()
278 return _ctx_branch(repo_id, commit_id)
280 return _ctx_branch(repo_id, commit_id)
279
281
280 @reraise_safe_exceptions
282 @reraise_safe_exceptions
281 def ctx_date(self, wire, commit_id):
283 def ctx_date(self, wire, commit_id):
282 cache_on, context_uid, repo_id = self._cache_on(wire)
284 cache_on, context_uid, repo_id = self._cache_on(wire)
283 region = self._region(wire)
285 region = self._region(wire)
284
286
285 @region.conditional_cache_on_arguments(condition=cache_on)
287 @region.conditional_cache_on_arguments(condition=cache_on)
286 def _ctx_date(_repo_id, _commit_id):
288 def _ctx_date(_repo_id, _commit_id):
287 repo = self._factory.repo(wire)
289 repo = self._factory.repo(wire)
288 ctx = self._get_ctx(repo, commit_id)
290 ctx = self._get_ctx(repo, commit_id)
289 return ctx.date()
291 return ctx.date()
290 return _ctx_date(repo_id, commit_id)
292 return _ctx_date(repo_id, commit_id)
291
293
292 @reraise_safe_exceptions
294 @reraise_safe_exceptions
293 def ctx_description(self, wire, revision):
295 def ctx_description(self, wire, revision):
294 repo = self._factory.repo(wire)
296 repo = self._factory.repo(wire)
295 ctx = self._get_ctx(repo, revision)
297 ctx = self._get_ctx(repo, revision)
296 return ctx.description()
298 return ctx.description()
297
299
298 @reraise_safe_exceptions
300 @reraise_safe_exceptions
299 def ctx_files(self, wire, commit_id):
301 def ctx_files(self, wire, commit_id):
300 cache_on, context_uid, repo_id = self._cache_on(wire)
302 cache_on, context_uid, repo_id = self._cache_on(wire)
301 region = self._region(wire)
303 region = self._region(wire)
302
304
303 @region.conditional_cache_on_arguments(condition=cache_on)
305 @region.conditional_cache_on_arguments(condition=cache_on)
304 def _ctx_files(_repo_id, _commit_id):
306 def _ctx_files(_repo_id, _commit_id):
305 repo = self._factory.repo(wire)
307 repo = self._factory.repo(wire)
306 ctx = self._get_ctx(repo, commit_id)
308 ctx = self._get_ctx(repo, commit_id)
307 return ctx.files()
309 return ctx.files()
308
310
309 return _ctx_files(repo_id, commit_id)
311 return _ctx_files(repo_id, commit_id)
310
312
311 @reraise_safe_exceptions
313 @reraise_safe_exceptions
312 def ctx_list(self, path, revision):
314 def ctx_list(self, path, revision):
313 repo = self._factory.repo(path)
315 repo = self._factory.repo(path)
314 ctx = self._get_ctx(repo, revision)
316 ctx = self._get_ctx(repo, revision)
315 return list(ctx)
317 return list(ctx)
316
318
317 @reraise_safe_exceptions
319 @reraise_safe_exceptions
318 def ctx_parents(self, wire, commit_id):
320 def ctx_parents(self, wire, commit_id):
319 cache_on, context_uid, repo_id = self._cache_on(wire)
321 cache_on, context_uid, repo_id = self._cache_on(wire)
320 region = self._region(wire)
322 region = self._region(wire)
321
323
322 @region.conditional_cache_on_arguments(condition=cache_on)
324 @region.conditional_cache_on_arguments(condition=cache_on)
323 def _ctx_parents(_repo_id, _commit_id):
325 def _ctx_parents(_repo_id, _commit_id):
324 repo = self._factory.repo(wire)
326 repo = self._factory.repo(wire)
325 ctx = self._get_ctx(repo, commit_id)
327 ctx = self._get_ctx(repo, commit_id)
326 return [parent.hex() for parent in ctx.parents()
328 return [parent.hex() for parent in ctx.parents()
327 if not (parent.hidden() or parent.obsolete())]
329 if not (parent.hidden() or parent.obsolete())]
328
330
329 return _ctx_parents(repo_id, commit_id)
331 return _ctx_parents(repo_id, commit_id)
330
332
331 @reraise_safe_exceptions
333 @reraise_safe_exceptions
332 def ctx_children(self, wire, commit_id):
334 def ctx_children(self, wire, commit_id):
333 cache_on, context_uid, repo_id = self._cache_on(wire)
335 cache_on, context_uid, repo_id = self._cache_on(wire)
334 region = self._region(wire)
336 region = self._region(wire)
335
337
336 @region.conditional_cache_on_arguments(condition=cache_on)
338 @region.conditional_cache_on_arguments(condition=cache_on)
337 def _ctx_children(_repo_id, _commit_id):
339 def _ctx_children(_repo_id, _commit_id):
338 repo = self._factory.repo(wire)
340 repo = self._factory.repo(wire)
339 ctx = self._get_ctx(repo, commit_id)
341 ctx = self._get_ctx(repo, commit_id)
340 return [child.hex() for child in ctx.children()
342 return [child.hex() for child in ctx.children()
341 if not (child.hidden() or child.obsolete())]
343 if not (child.hidden() or child.obsolete())]
342
344
343 return _ctx_children(repo_id, commit_id)
345 return _ctx_children(repo_id, commit_id)
344
346
345 @reraise_safe_exceptions
347 @reraise_safe_exceptions
346 def ctx_phase(self, wire, commit_id):
348 def ctx_phase(self, wire, commit_id):
347 cache_on, context_uid, repo_id = self._cache_on(wire)
349 cache_on, context_uid, repo_id = self._cache_on(wire)
348 region = self._region(wire)
350 region = self._region(wire)
349
351
350 @region.conditional_cache_on_arguments(condition=cache_on)
352 @region.conditional_cache_on_arguments(condition=cache_on)
351 def _ctx_phase(_context_uid, _repo_id, _commit_id):
353 def _ctx_phase(_context_uid, _repo_id, _commit_id):
352 repo = self._factory.repo(wire)
354 repo = self._factory.repo(wire)
353 ctx = self._get_ctx(repo, commit_id)
355 ctx = self._get_ctx(repo, commit_id)
354 # public=0, draft=1, secret=3
356 # public=0, draft=1, secret=3
355 return ctx.phase()
357 return ctx.phase()
356 return _ctx_phase(context_uid, repo_id, commit_id)
358 return _ctx_phase(context_uid, repo_id, commit_id)
357
359
358 @reraise_safe_exceptions
360 @reraise_safe_exceptions
359 def ctx_obsolete(self, wire, commit_id):
361 def ctx_obsolete(self, wire, commit_id):
360 cache_on, context_uid, repo_id = self._cache_on(wire)
362 cache_on, context_uid, repo_id = self._cache_on(wire)
361 region = self._region(wire)
363 region = self._region(wire)
362
364
363 @region.conditional_cache_on_arguments(condition=cache_on)
365 @region.conditional_cache_on_arguments(condition=cache_on)
364 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
366 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
365 repo = self._factory.repo(wire)
367 repo = self._factory.repo(wire)
366 ctx = self._get_ctx(repo, commit_id)
368 ctx = self._get_ctx(repo, commit_id)
367 return ctx.obsolete()
369 return ctx.obsolete()
368 return _ctx_obsolete(context_uid, repo_id, commit_id)
370 return _ctx_obsolete(context_uid, repo_id, commit_id)
369
371
370 @reraise_safe_exceptions
372 @reraise_safe_exceptions
371 def ctx_hidden(self, wire, commit_id):
373 def ctx_hidden(self, wire, commit_id):
372 cache_on, context_uid, repo_id = self._cache_on(wire)
374 cache_on, context_uid, repo_id = self._cache_on(wire)
373 region = self._region(wire)
375 region = self._region(wire)
374
376
375 @region.conditional_cache_on_arguments(condition=cache_on)
377 @region.conditional_cache_on_arguments(condition=cache_on)
376 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
378 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
377 repo = self._factory.repo(wire)
379 repo = self._factory.repo(wire)
378 ctx = self._get_ctx(repo, commit_id)
380 ctx = self._get_ctx(repo, commit_id)
379 return ctx.hidden()
381 return ctx.hidden()
380 return _ctx_hidden(context_uid, repo_id, commit_id)
382 return _ctx_hidden(context_uid, repo_id, commit_id)
381
383
382 @reraise_safe_exceptions
384 @reraise_safe_exceptions
383 def ctx_substate(self, wire, revision):
385 def ctx_substate(self, wire, revision):
384 repo = self._factory.repo(wire)
386 repo = self._factory.repo(wire)
385 ctx = self._get_ctx(repo, revision)
387 ctx = self._get_ctx(repo, revision)
386 return ctx.substate
388 return ctx.substate
387
389
388 @reraise_safe_exceptions
390 @reraise_safe_exceptions
389 def ctx_status(self, wire, revision):
391 def ctx_status(self, wire, revision):
390 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
391 ctx = self._get_ctx(repo, revision)
393 ctx = self._get_ctx(repo, revision)
392 status = repo[ctx.p1().node()].status(other=ctx.node())
394 status = repo[ctx.p1().node()].status(other=ctx.node())
393 # object of status (odd, custom named tuple in mercurial) is not
395 # object of status (odd, custom named tuple in mercurial) is not
394 # correctly serializable, we make it a list, as the underling
396 # correctly serializable, we make it a list, as the underling
395 # API expects this to be a list
397 # API expects this to be a list
396 return list(status)
398 return list(status)
397
399
398 @reraise_safe_exceptions
400 @reraise_safe_exceptions
399 def ctx_user(self, wire, revision):
401 def ctx_user(self, wire, revision):
400 repo = self._factory.repo(wire)
402 repo = self._factory.repo(wire)
401 ctx = self._get_ctx(repo, revision)
403 ctx = self._get_ctx(repo, revision)
402 return ctx.user()
404 return ctx.user()
403
405
404 @reraise_safe_exceptions
406 @reraise_safe_exceptions
405 def check_url(self, url, config):
407 def check_url(self, url, config):
406 _proto = None
408 _proto = None
407 if '+' in url[:url.find('://')]:
409 if '+' in url[:url.find('://')]:
408 _proto = url[0:url.find('+')]
410 _proto = url[0:url.find('+')]
409 url = url[url.find('+') + 1:]
411 url = url[url.find('+') + 1:]
410 handlers = []
412 handlers = []
411 url_obj = url_parser(url)
413 url_obj = url_parser(url)
412 test_uri, authinfo = url_obj.authinfo()
414 test_uri, authinfo = url_obj.authinfo()
413 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
415 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
414 url_obj.query = obfuscate_qs(url_obj.query)
416 url_obj.query = obfuscate_qs(url_obj.query)
415
417
416 cleaned_uri = str(url_obj)
418 cleaned_uri = str(url_obj)
417 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
419 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
418
420
419 if authinfo:
421 if authinfo:
420 # create a password manager
422 # create a password manager
421 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
423 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
422 passmgr.add_password(*authinfo)
424 passmgr.add_password(*authinfo)
423
425
424 handlers.extend((httpbasicauthhandler(passmgr),
426 handlers.extend((httpbasicauthhandler(passmgr),
425 httpdigestauthhandler(passmgr)))
427 httpdigestauthhandler(passmgr)))
426
428
427 o = urllib.request.build_opener(*handlers)
429 o = urllib.request.build_opener(*handlers)
428 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
430 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
429 ('Accept', 'application/mercurial-0.1')]
431 ('Accept', 'application/mercurial-0.1')]
430
432
431 q = {"cmd": 'between'}
433 q = {"cmd": 'between'}
432 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
434 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
433 qs = '?%s' % urllib.parse.urlencode(q)
435 qs = '?%s' % urllib.parse.urlencode(q)
434 cu = "%s%s" % (test_uri, qs)
436 cu = "%s%s" % (test_uri, qs)
435 req = urllib.request.Request(cu, None, {})
437 req = urllib.request.Request(cu, None, {})
436
438
437 try:
439 try:
438 log.debug("Trying to open URL %s", cleaned_uri)
440 log.debug("Trying to open URL %s", cleaned_uri)
439 resp = o.open(req)
441 resp = o.open(req)
440 if resp.code != 200:
442 if resp.code != 200:
441 raise exceptions.URLError()('Return Code is not 200')
443 raise exceptions.URLError()('Return Code is not 200')
442 except Exception as e:
444 except Exception as e:
443 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
445 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
444 # means it cannot be cloned
446 # means it cannot be cloned
445 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
447 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
446
448
447 # now check if it's a proper hg repo, but don't do it for svn
449 # now check if it's a proper hg repo, but don't do it for svn
448 try:
450 try:
449 if _proto == 'svn':
451 if _proto == 'svn':
450 pass
452 pass
451 else:
453 else:
452 # check for pure hg repos
454 # check for pure hg repos
453 log.debug(
455 log.debug(
454 "Verifying if URL is a Mercurial repository: %s",
456 "Verifying if URL is a Mercurial repository: %s",
455 cleaned_uri)
457 cleaned_uri)
456 ui = make_ui_from_config(config)
458 ui = make_ui_from_config(config)
457 peer_checker = makepeer(ui, url)
459 peer_checker = makepeer(ui, url)
458 peer_checker.lookup('tip')
460 peer_checker.lookup('tip')
459 except Exception as e:
461 except Exception as e:
460 log.warning("URL is not a valid Mercurial repository: %s",
462 log.warning("URL is not a valid Mercurial repository: %s",
461 cleaned_uri)
463 cleaned_uri)
462 raise exceptions.URLError(e)(
464 raise exceptions.URLError(e)(
463 "url [%s] does not look like an hg repo org_exc: %s"
465 "url [%s] does not look like an hg repo org_exc: %s"
464 % (cleaned_uri, e))
466 % (cleaned_uri, e))
465
467
466 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
468 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
467 return True
469 return True
468
470
469 @reraise_safe_exceptions
471 @reraise_safe_exceptions
470 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
472 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
471 repo = self._factory.repo(wire)
473 repo = self._factory.repo(wire)
472
474
473 if file_filter:
475 if file_filter:
474 # unpack the file-filter
476 # unpack the file-filter
475 repo_path, node_path = file_filter
477 repo_path, node_path = file_filter
476 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
478 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
477 else:
479 else:
478 match_filter = file_filter
480 match_filter = file_filter
479 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
481 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
480
482
481 try:
483 try:
482 diff_iter = patch.diff(
484 diff_iter = patch.diff(
483 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
485 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
484 return BinaryEnvelope(b"".join(diff_iter))
486 return BinaryEnvelope(b"".join(diff_iter))
485 except RepoLookupError as e:
487 except RepoLookupError as e:
486 raise exceptions.LookupException(e)()
488 raise exceptions.LookupException(e)()
487
489
488 @reraise_safe_exceptions
490 @reraise_safe_exceptions
489 def node_history(self, wire, revision, path, limit):
491 def node_history(self, wire, revision, path, limit):
490 cache_on, context_uid, repo_id = self._cache_on(wire)
492 cache_on, context_uid, repo_id = self._cache_on(wire)
491 region = self._region(wire)
493 region = self._region(wire)
492
494
493 @region.conditional_cache_on_arguments(condition=cache_on)
495 @region.conditional_cache_on_arguments(condition=cache_on)
494 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
496 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
495 repo = self._factory.repo(wire)
497 repo = self._factory.repo(wire)
496
498
497 ctx = self._get_ctx(repo, revision)
499 ctx = self._get_ctx(repo, revision)
498 fctx = ctx.filectx(safe_bytes(path))
500 fctx = ctx.filectx(safe_bytes(path))
499
501
500 def history_iter():
502 def history_iter():
501 limit_rev = fctx.rev()
503 limit_rev = fctx.rev()
502 for obj in reversed(list(fctx.filelog())):
504 for obj in reversed(list(fctx.filelog())):
503 obj = fctx.filectx(obj)
505 obj = fctx.filectx(obj)
504 ctx = obj.changectx()
506 ctx = obj.changectx()
505 if ctx.hidden() or ctx.obsolete():
507 if ctx.hidden() or ctx.obsolete():
506 continue
508 continue
507
509
508 if limit_rev >= obj.rev():
510 if limit_rev >= obj.rev():
509 yield obj
511 yield obj
510
512
511 history = []
513 history = []
512 for cnt, obj in enumerate(history_iter()):
514 for cnt, obj in enumerate(history_iter()):
513 if limit and cnt >= limit:
515 if limit and cnt >= limit:
514 break
516 break
515 history.append(hex(obj.node()))
517 history.append(hex(obj.node()))
516
518
517 return [x for x in history]
519 return [x for x in history]
518 return _node_history(context_uid, repo_id, revision, path, limit)
520 return _node_history(context_uid, repo_id, revision, path, limit)
519
521
520 @reraise_safe_exceptions
522 @reraise_safe_exceptions
521 def node_history_untill(self, wire, revision, path, limit):
523 def node_history_untill(self, wire, revision, path, limit):
522 cache_on, context_uid, repo_id = self._cache_on(wire)
524 cache_on, context_uid, repo_id = self._cache_on(wire)
523 region = self._region(wire)
525 region = self._region(wire)
524
526
525 @region.conditional_cache_on_arguments(condition=cache_on)
527 @region.conditional_cache_on_arguments(condition=cache_on)
526 def _node_history_until(_context_uid, _repo_id):
528 def _node_history_until(_context_uid, _repo_id):
527 repo = self._factory.repo(wire)
529 repo = self._factory.repo(wire)
528 ctx = self._get_ctx(repo, revision)
530 ctx = self._get_ctx(repo, revision)
529 fctx = ctx.filectx(safe_bytes(path))
531 fctx = ctx.filectx(safe_bytes(path))
530
532
531 file_log = list(fctx.filelog())
533 file_log = list(fctx.filelog())
532 if limit:
534 if limit:
533 # Limit to the last n items
535 # Limit to the last n items
534 file_log = file_log[-limit:]
536 file_log = file_log[-limit:]
535
537
536 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
538 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
537 return _node_history_until(context_uid, repo_id, revision, path, limit)
539 return _node_history_until(context_uid, repo_id, revision, path, limit)
538
540
539 @reraise_safe_exceptions
541 @reraise_safe_exceptions
540 def fctx_annotate(self, wire, revision, path):
542 def fctx_annotate(self, wire, revision, path):
541 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
542 ctx = self._get_ctx(repo, revision)
544 ctx = self._get_ctx(repo, revision)
543 fctx = ctx.filectx(safe_bytes(path))
545 fctx = ctx.filectx(safe_bytes(path))
544
546
545 result = []
547 result = []
546 for i, annotate_obj in enumerate(fctx.annotate(), 1):
548 for i, annotate_obj in enumerate(fctx.annotate(), 1):
547 ln_no = i
549 ln_no = i
548 sha = hex(annotate_obj.fctx.node())
550 sha = hex(annotate_obj.fctx.node())
549 content = annotate_obj.text
551 content = annotate_obj.text
550 result.append((ln_no, sha, content))
552 result.append((ln_no, sha, content))
551 return result
553 return result
552
554
553 @reraise_safe_exceptions
555 @reraise_safe_exceptions
554 def fctx_node_data(self, wire, revision, path):
556 def fctx_node_data(self, wire, revision, path):
555 repo = self._factory.repo(wire)
557 repo = self._factory.repo(wire)
556 ctx = self._get_ctx(repo, revision)
558 ctx = self._get_ctx(repo, revision)
557 fctx = ctx.filectx(safe_bytes(path))
559 fctx = ctx.filectx(safe_bytes(path))
558 return BinaryEnvelope(fctx.data())
560 return BinaryEnvelope(fctx.data())
559
561
560 @reraise_safe_exceptions
562 @reraise_safe_exceptions
561 def fctx_flags(self, wire, commit_id, path):
563 def fctx_flags(self, wire, commit_id, path):
562 cache_on, context_uid, repo_id = self._cache_on(wire)
564 cache_on, context_uid, repo_id = self._cache_on(wire)
563 region = self._region(wire)
565 region = self._region(wire)
564
566
565 @region.conditional_cache_on_arguments(condition=cache_on)
567 @region.conditional_cache_on_arguments(condition=cache_on)
566 def _fctx_flags(_repo_id, _commit_id, _path):
568 def _fctx_flags(_repo_id, _commit_id, _path):
567 repo = self._factory.repo(wire)
569 repo = self._factory.repo(wire)
568 ctx = self._get_ctx(repo, commit_id)
570 ctx = self._get_ctx(repo, commit_id)
569 fctx = ctx.filectx(safe_bytes(path))
571 fctx = ctx.filectx(safe_bytes(path))
570 return fctx.flags()
572 return fctx.flags()
571
573
572 return _fctx_flags(repo_id, commit_id, path)
574 return _fctx_flags(repo_id, commit_id, path)
573
575
574 @reraise_safe_exceptions
576 @reraise_safe_exceptions
575 def fctx_size(self, wire, commit_id, path):
577 def fctx_size(self, wire, commit_id, path):
576 cache_on, context_uid, repo_id = self._cache_on(wire)
578 cache_on, context_uid, repo_id = self._cache_on(wire)
577 region = self._region(wire)
579 region = self._region(wire)
578
580
579 @region.conditional_cache_on_arguments(condition=cache_on)
581 @region.conditional_cache_on_arguments(condition=cache_on)
580 def _fctx_size(_repo_id, _revision, _path):
582 def _fctx_size(_repo_id, _revision, _path):
581 repo = self._factory.repo(wire)
583 repo = self._factory.repo(wire)
582 ctx = self._get_ctx(repo, commit_id)
584 ctx = self._get_ctx(repo, commit_id)
583 fctx = ctx.filectx(safe_bytes(path))
585 fctx = ctx.filectx(safe_bytes(path))
584 return fctx.size()
586 return fctx.size()
585 return _fctx_size(repo_id, commit_id, path)
587 return _fctx_size(repo_id, commit_id, path)
586
588
587 @reraise_safe_exceptions
589 @reraise_safe_exceptions
588 def get_all_commit_ids(self, wire, name):
590 def get_all_commit_ids(self, wire, name):
589 cache_on, context_uid, repo_id = self._cache_on(wire)
591 cache_on, context_uid, repo_id = self._cache_on(wire)
590 region = self._region(wire)
592 region = self._region(wire)
591
593
592 @region.conditional_cache_on_arguments(condition=cache_on)
594 @region.conditional_cache_on_arguments(condition=cache_on)
593 def _get_all_commit_ids(_context_uid, _repo_id, _name):
595 def _get_all_commit_ids(_context_uid, _repo_id, _name):
594 repo = self._factory.repo(wire)
596 repo = self._factory.repo(wire)
595 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
597 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
596 return revs
598 return revs
597 return _get_all_commit_ids(context_uid, repo_id, name)
599 return _get_all_commit_ids(context_uid, repo_id, name)
598
600
599 @reraise_safe_exceptions
601 @reraise_safe_exceptions
600 def get_config_value(self, wire, section, name, untrusted=False):
602 def get_config_value(self, wire, section, name, untrusted=False):
601 repo = self._factory.repo(wire)
603 repo = self._factory.repo(wire)
602 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
604 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
603
605
604 @reraise_safe_exceptions
606 @reraise_safe_exceptions
605 def is_large_file(self, wire, commit_id, path):
607 def is_large_file(self, wire, commit_id, path):
606 cache_on, context_uid, repo_id = self._cache_on(wire)
608 cache_on, context_uid, repo_id = self._cache_on(wire)
607 region = self._region(wire)
609 region = self._region(wire)
608
610
609 @region.conditional_cache_on_arguments(condition=cache_on)
611 @region.conditional_cache_on_arguments(condition=cache_on)
610 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
612 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
611 return largefiles.lfutil.isstandin(safe_bytes(path))
613 return largefiles.lfutil.isstandin(safe_bytes(path))
612
614
613 return _is_large_file(context_uid, repo_id, commit_id, path)
615 return _is_large_file(context_uid, repo_id, commit_id, path)
614
616
615 @reraise_safe_exceptions
617 @reraise_safe_exceptions
616 def is_binary(self, wire, revision, path):
618 def is_binary(self, wire, revision, path):
617 cache_on, context_uid, repo_id = self._cache_on(wire)
619 cache_on, context_uid, repo_id = self._cache_on(wire)
618 region = self._region(wire)
620 region = self._region(wire)
619
621
620 @region.conditional_cache_on_arguments(condition=cache_on)
622 @region.conditional_cache_on_arguments(condition=cache_on)
621 def _is_binary(_repo_id, _sha, _path):
623 def _is_binary(_repo_id, _sha, _path):
622 repo = self._factory.repo(wire)
624 repo = self._factory.repo(wire)
623 ctx = self._get_ctx(repo, revision)
625 ctx = self._get_ctx(repo, revision)
624 fctx = ctx.filectx(safe_bytes(path))
626 fctx = ctx.filectx(safe_bytes(path))
625 return fctx.isbinary()
627 return fctx.isbinary()
626
628
627 return _is_binary(repo_id, revision, path)
629 return _is_binary(repo_id, revision, path)
628
630
629 @reraise_safe_exceptions
631 @reraise_safe_exceptions
630 def md5_hash(self, wire, revision, path):
632 def md5_hash(self, wire, revision, path):
631 cache_on, context_uid, repo_id = self._cache_on(wire)
633 cache_on, context_uid, repo_id = self._cache_on(wire)
632 region = self._region(wire)
634 region = self._region(wire)
633
635
634 @region.conditional_cache_on_arguments(condition=cache_on)
636 @region.conditional_cache_on_arguments(condition=cache_on)
635 def _md5_hash(_repo_id, _sha, _path):
637 def _md5_hash(_repo_id, _sha, _path):
636 repo = self._factory.repo(wire)
638 repo = self._factory.repo(wire)
637 ctx = self._get_ctx(repo, revision)
639 ctx = self._get_ctx(repo, revision)
638 fctx = ctx.filectx(safe_bytes(path))
640 fctx = ctx.filectx(safe_bytes(path))
639 return hashlib.md5(fctx.data()).hexdigest()
641 return hashlib.md5(fctx.data()).hexdigest()
640
642
641 return _md5_hash(repo_id, revision, path)
643 return _md5_hash(repo_id, revision, path)
642
644
643 @reraise_safe_exceptions
645 @reraise_safe_exceptions
644 def in_largefiles_store(self, wire, sha):
646 def in_largefiles_store(self, wire, sha):
645 repo = self._factory.repo(wire)
647 repo = self._factory.repo(wire)
646 return largefiles.lfutil.instore(repo, sha)
648 return largefiles.lfutil.instore(repo, sha)
647
649
648 @reraise_safe_exceptions
650 @reraise_safe_exceptions
649 def in_user_cache(self, wire, sha):
651 def in_user_cache(self, wire, sha):
650 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
651 return largefiles.lfutil.inusercache(repo.ui, sha)
653 return largefiles.lfutil.inusercache(repo.ui, sha)
652
654
653 @reraise_safe_exceptions
655 @reraise_safe_exceptions
654 def store_path(self, wire, sha):
656 def store_path(self, wire, sha):
655 repo = self._factory.repo(wire)
657 repo = self._factory.repo(wire)
656 return largefiles.lfutil.storepath(repo, sha)
658 return largefiles.lfutil.storepath(repo, sha)
657
659
658 @reraise_safe_exceptions
660 @reraise_safe_exceptions
659 def link(self, wire, sha, path):
661 def link(self, wire, sha, path):
660 repo = self._factory.repo(wire)
662 repo = self._factory.repo(wire)
661 largefiles.lfutil.link(
663 largefiles.lfutil.link(
662 largefiles.lfutil.usercachepath(repo.ui, sha), path)
664 largefiles.lfutil.usercachepath(repo.ui, sha), path)
663
665
664 @reraise_safe_exceptions
666 @reraise_safe_exceptions
665 def localrepository(self, wire, create=False):
667 def localrepository(self, wire, create=False):
666 self._factory.repo(wire, create=create)
668 self._factory.repo(wire, create=create)
667
669
668 @reraise_safe_exceptions
670 @reraise_safe_exceptions
669 def lookup(self, wire, revision, both):
671 def lookup(self, wire, revision, both):
670 cache_on, context_uid, repo_id = self._cache_on(wire)
672 cache_on, context_uid, repo_id = self._cache_on(wire)
671 region = self._region(wire)
673 region = self._region(wire)
672
674
673 @region.conditional_cache_on_arguments(condition=cache_on)
675 @region.conditional_cache_on_arguments(condition=cache_on)
674 def _lookup(_context_uid, _repo_id, _revision, _both):
676 def _lookup(_context_uid, _repo_id, _revision, _both):
675
677
676 repo = self._factory.repo(wire)
678 repo = self._factory.repo(wire)
677 rev = _revision
679 rev = _revision
678 if isinstance(rev, int):
680 if isinstance(rev, int):
679 # NOTE(marcink):
681 # NOTE(marcink):
680 # since Mercurial doesn't support negative indexes properly
682 # since Mercurial doesn't support negative indexes properly
681 # we need to shift accordingly by one to get proper index, e.g
683 # we need to shift accordingly by one to get proper index, e.g
682 # repo[-1] => repo[-2]
684 # repo[-1] => repo[-2]
683 # repo[0] => repo[-1]
685 # repo[0] => repo[-1]
684 if rev <= 0:
686 if rev <= 0:
685 rev = rev + -1
687 rev = rev + -1
686 try:
688 try:
687 ctx = self._get_ctx(repo, rev)
689 ctx = self._get_ctx(repo, rev)
688 except (TypeError, RepoLookupError, binascii.Error) as e:
690 except (TypeError, RepoLookupError, binascii.Error) as e:
689 e._org_exc_tb = traceback.format_exc()
691 e._org_exc_tb = traceback.format_exc()
690 raise exceptions.LookupException(e)(rev)
692 raise exceptions.LookupException(e)(rev)
691 except LookupError as e:
693 except LookupError as e:
692 e._org_exc_tb = traceback.format_exc()
694 e._org_exc_tb = traceback.format_exc()
693 raise exceptions.LookupException(e)(e.name)
695 raise exceptions.LookupException(e)(e.name)
694
696
695 if not both:
697 if not both:
696 return ctx.hex()
698 return ctx.hex()
697
699
698 ctx = repo[ctx.hex()]
700 ctx = repo[ctx.hex()]
699 return ctx.hex(), ctx.rev()
701 return ctx.hex(), ctx.rev()
700
702
701 return _lookup(context_uid, repo_id, revision, both)
703 return _lookup(context_uid, repo_id, revision, both)
702
704
703 @reraise_safe_exceptions
705 @reraise_safe_exceptions
704 def sync_push(self, wire, url):
706 def sync_push(self, wire, url):
705 if not self.check_url(url, wire['config']):
707 if not self.check_url(url, wire['config']):
706 return
708 return
707
709
708 repo = self._factory.repo(wire)
710 repo = self._factory.repo(wire)
709
711
710 # Disable any prompts for this repo
712 # Disable any prompts for this repo
711 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
713 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
712
714
713 bookmarks = list(dict(repo._bookmarks).keys())
715 bookmarks = list(dict(repo._bookmarks).keys())
714 remote = peer(repo, {}, safe_bytes(url))
716 remote = peer(repo, {}, safe_bytes(url))
715 # Disable any prompts for this remote
717 # Disable any prompts for this remote
716 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
718 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
717
719
718 return exchange.push(
720 return exchange.push(
719 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
721 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
720
722
721 @reraise_safe_exceptions
723 @reraise_safe_exceptions
722 def revision(self, wire, rev):
724 def revision(self, wire, rev):
723 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
724 ctx = self._get_ctx(repo, rev)
726 ctx = self._get_ctx(repo, rev)
725 return ctx.rev()
727 return ctx.rev()
726
728
727 @reraise_safe_exceptions
729 @reraise_safe_exceptions
728 def rev_range(self, wire, commit_filter):
730 def rev_range(self, wire, commit_filter):
729 cache_on, context_uid, repo_id = self._cache_on(wire)
731 cache_on, context_uid, repo_id = self._cache_on(wire)
730 region = self._region(wire)
732 region = self._region(wire)
731
733
732 @region.conditional_cache_on_arguments(condition=cache_on)
734 @region.conditional_cache_on_arguments(condition=cache_on)
733 def _rev_range(_context_uid, _repo_id, _filter):
735 def _rev_range(_context_uid, _repo_id, _filter):
734 repo = self._factory.repo(wire)
736 repo = self._factory.repo(wire)
735 revisions = [
737 revisions = [
736 ascii_str(repo[rev].hex())
738 ascii_str(repo[rev].hex())
737 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
739 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
738 ]
740 ]
739 return revisions
741 return revisions
740
742
741 return _rev_range(context_uid, repo_id, sorted(commit_filter))
743 return _rev_range(context_uid, repo_id, sorted(commit_filter))
742
744
743 @reraise_safe_exceptions
745 @reraise_safe_exceptions
744 def rev_range_hash(self, wire, node):
746 def rev_range_hash(self, wire, node):
745 repo = self._factory.repo(wire)
747 repo = self._factory.repo(wire)
746
748
747 def get_revs(repo, rev_opt):
749 def get_revs(repo, rev_opt):
748 if rev_opt:
750 if rev_opt:
749 revs = revrange(repo, rev_opt)
751 revs = revrange(repo, rev_opt)
750 if len(revs) == 0:
752 if len(revs) == 0:
751 return (nullrev, nullrev)
753 return (nullrev, nullrev)
752 return max(revs), min(revs)
754 return max(revs), min(revs)
753 else:
755 else:
754 return len(repo) - 1, 0
756 return len(repo) - 1, 0
755
757
756 stop, start = get_revs(repo, [node + ':'])
758 stop, start = get_revs(repo, [node + ':'])
757 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
759 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
758 return revs
760 return revs
759
761
760 @reraise_safe_exceptions
762 @reraise_safe_exceptions
761 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
763 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
762 org_path = safe_bytes(wire["path"])
764 org_path = safe_bytes(wire["path"])
763 other_path = safe_bytes(kwargs.pop('other_path', ''))
765 other_path = safe_bytes(kwargs.pop('other_path', ''))
764
766
765 # case when we want to compare two independent repositories
767 # case when we want to compare two independent repositories
766 if other_path and other_path != wire["path"]:
768 if other_path and other_path != wire["path"]:
767 baseui = self._factory._create_config(wire["config"])
769 baseui = self._factory._create_config(wire["config"])
768 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
770 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
769 else:
771 else:
770 repo = self._factory.repo(wire)
772 repo = self._factory.repo(wire)
771 return list(repo.revs(rev_spec, *args))
773 return list(repo.revs(rev_spec, *args))
772
774
773 @reraise_safe_exceptions
775 @reraise_safe_exceptions
774 def verify(self, wire,):
776 def verify(self, wire,):
775 repo = self._factory.repo(wire)
777 repo = self._factory.repo(wire)
776 baseui = self._factory._create_config(wire['config'])
778 baseui = self._factory._create_config(wire['config'])
777
779
778 baseui, output = patch_ui_message_output(baseui)
780 baseui, output = patch_ui_message_output(baseui)
779
781
780 repo.ui = baseui
782 repo.ui = baseui
781 verify.verify(repo)
783 verify.verify(repo)
782 return output.getvalue()
784 return output.getvalue()
783
785
784 @reraise_safe_exceptions
786 @reraise_safe_exceptions
785 def hg_update_cache(self, wire,):
787 def hg_update_cache(self, wire,):
786 repo = self._factory.repo(wire)
788 repo = self._factory.repo(wire)
787 baseui = self._factory._create_config(wire['config'])
789 baseui = self._factory._create_config(wire['config'])
788 baseui, output = patch_ui_message_output(baseui)
790 baseui, output = patch_ui_message_output(baseui)
789
791
790 repo.ui = baseui
792 repo.ui = baseui
791 with repo.wlock(), repo.lock():
793 with repo.wlock(), repo.lock():
792 repo.updatecaches(full=True)
794 repo.updatecaches(full=True)
793
795
794 return output.getvalue()
796 return output.getvalue()
795
797
796 @reraise_safe_exceptions
798 @reraise_safe_exceptions
797 def hg_rebuild_fn_cache(self, wire,):
799 def hg_rebuild_fn_cache(self, wire,):
798 repo = self._factory.repo(wire)
800 repo = self._factory.repo(wire)
799 baseui = self._factory._create_config(wire['config'])
801 baseui = self._factory._create_config(wire['config'])
800 baseui, output = patch_ui_message_output(baseui)
802 baseui, output = patch_ui_message_output(baseui)
801
803
802 repo.ui = baseui
804 repo.ui = baseui
803
805
804 repair.rebuildfncache(baseui, repo)
806 repair.rebuildfncache(baseui, repo)
805
807
806 return output.getvalue()
808 return output.getvalue()
807
809
808 @reraise_safe_exceptions
810 @reraise_safe_exceptions
809 def tags(self, wire):
811 def tags(self, wire):
810 cache_on, context_uid, repo_id = self._cache_on(wire)
812 cache_on, context_uid, repo_id = self._cache_on(wire)
811 region = self._region(wire)
813 region = self._region(wire)
812
814
813 @region.conditional_cache_on_arguments(condition=cache_on)
815 @region.conditional_cache_on_arguments(condition=cache_on)
814 def _tags(_context_uid, _repo_id):
816 def _tags(_context_uid, _repo_id):
815 repo = self._factory.repo(wire)
817 repo = self._factory.repo(wire)
816 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
818 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
817
819
818 return _tags(context_uid, repo_id)
820 return _tags(context_uid, repo_id)
819
821
820 @reraise_safe_exceptions
822 @reraise_safe_exceptions
821 def update(self, wire, node='', clean=False):
823 def update(self, wire, node='', clean=False):
822 repo = self._factory.repo(wire)
824 repo = self._factory.repo(wire)
823 baseui = self._factory._create_config(wire['config'])
825 baseui = self._factory._create_config(wire['config'])
824 node = safe_bytes(node)
826 node = safe_bytes(node)
825
827
826 commands.update(baseui, repo, node=node, clean=clean)
828 commands.update(baseui, repo, node=node, clean=clean)
827
829
828 @reraise_safe_exceptions
830 @reraise_safe_exceptions
829 def identify(self, wire):
831 def identify(self, wire):
830 repo = self._factory.repo(wire)
832 repo = self._factory.repo(wire)
831 baseui = self._factory._create_config(wire['config'])
833 baseui = self._factory._create_config(wire['config'])
832 output = io.BytesIO()
834 output = io.BytesIO()
833 baseui.write = output.write
835 baseui.write = output.write
834 # This is required to get a full node id
836 # This is required to get a full node id
835 baseui.debugflag = True
837 baseui.debugflag = True
836 commands.identify(baseui, repo, id=True)
838 commands.identify(baseui, repo, id=True)
837
839
838 return output.getvalue()
840 return output.getvalue()
839
841
840 @reraise_safe_exceptions
842 @reraise_safe_exceptions
841 def heads(self, wire, branch=None):
843 def heads(self, wire, branch=None):
842 repo = self._factory.repo(wire)
844 repo = self._factory.repo(wire)
843 baseui = self._factory._create_config(wire['config'])
845 baseui = self._factory._create_config(wire['config'])
844 output = io.BytesIO()
846 output = io.BytesIO()
845
847
846 def write(data, **unused_kwargs):
848 def write(data, **unused_kwargs):
847 output.write(data)
849 output.write(data)
848
850
849 baseui.write = write
851 baseui.write = write
850 if branch:
852 if branch:
851 args = [safe_bytes(branch)]
853 args = [safe_bytes(branch)]
852 else:
854 else:
853 args = []
855 args = []
854 commands.heads(baseui, repo, template=b'{node} ', *args)
856 commands.heads(baseui, repo, template=b'{node} ', *args)
855
857
856 return output.getvalue()
858 return output.getvalue()
857
859
858 @reraise_safe_exceptions
860 @reraise_safe_exceptions
859 def ancestor(self, wire, revision1, revision2):
861 def ancestor(self, wire, revision1, revision2):
860 repo = self._factory.repo(wire)
862 repo = self._factory.repo(wire)
861 changelog = repo.changelog
863 changelog = repo.changelog
862 lookup = repo.lookup
864 lookup = repo.lookup
863 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
865 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
864 return hex(a)
866 return hex(a)
865
867
866 @reraise_safe_exceptions
868 @reraise_safe_exceptions
867 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
869 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
868 baseui = self._factory._create_config(wire["config"], hooks=hooks)
870 baseui = self._factory._create_config(wire["config"], hooks=hooks)
869 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
871 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
870
872
871 @reraise_safe_exceptions
873 @reraise_safe_exceptions
872 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
874 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
873
875
874 repo = self._factory.repo(wire)
876 repo = self._factory.repo(wire)
875 baseui = self._factory._create_config(wire['config'])
877 baseui = self._factory._create_config(wire['config'])
876 publishing = baseui.configbool(b'phases', b'publish')
878 publishing = baseui.configbool(b'phases', b'publish')
877
879
878 def _filectxfn(_repo, ctx, path: bytes):
880 def _filectxfn(_repo, ctx, path: bytes):
879 """
881 """
880 Marks given path as added/changed/removed in a given _repo. This is
882 Marks given path as added/changed/removed in a given _repo. This is
881 for internal mercurial commit function.
883 for internal mercurial commit function.
882 """
884 """
883
885
884 # check if this path is removed
886 # check if this path is removed
885 if safe_str(path) in removed:
887 if safe_str(path) in removed:
886 # returning None is a way to mark node for removal
888 # returning None is a way to mark node for removal
887 return None
889 return None
888
890
889 # check if this path is added
891 # check if this path is added
890 for node in updated:
892 for node in updated:
891 if safe_bytes(node['path']) == path:
893 if safe_bytes(node['path']) == path:
892 return memfilectx(
894 return memfilectx(
893 _repo,
895 _repo,
894 changectx=ctx,
896 changectx=ctx,
895 path=safe_bytes(node['path']),
897 path=safe_bytes(node['path']),
896 data=safe_bytes(node['content']),
898 data=safe_bytes(node['content']),
897 islink=False,
899 islink=False,
898 isexec=bool(node['mode'] & stat.S_IXUSR),
900 isexec=bool(node['mode'] & stat.S_IXUSR),
899 copysource=False)
901 copysource=False)
900 abort_exc = exceptions.AbortException()
902 abort_exc = exceptions.AbortException()
901 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
903 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
902
904
903 if publishing:
905 if publishing:
904 new_commit_phase = b'public'
906 new_commit_phase = b'public'
905 else:
907 else:
906 new_commit_phase = b'draft'
908 new_commit_phase = b'draft'
907 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
909 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
908 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
910 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
909 commit_ctx = memctx(
911 commit_ctx = memctx(
910 repo=repo,
912 repo=repo,
911 parents=parents,
913 parents=parents,
912 text=safe_bytes(message),
914 text=safe_bytes(message),
913 files=[safe_bytes(x) for x in files],
915 files=[safe_bytes(x) for x in files],
914 filectxfn=_filectxfn,
916 filectxfn=_filectxfn,
915 user=safe_bytes(user),
917 user=safe_bytes(user),
916 date=(commit_time, commit_timezone),
918 date=(commit_time, commit_timezone),
917 extra=kwargs)
919 extra=kwargs)
918
920
919 n = repo.commitctx(commit_ctx)
921 n = repo.commitctx(commit_ctx)
920 new_id = hex(n)
922 new_id = hex(n)
921
923
922 return new_id
924 return new_id
923
925
924 @reraise_safe_exceptions
926 @reraise_safe_exceptions
925 def pull(self, wire, url, commit_ids=None):
927 def pull(self, wire, url, commit_ids=None):
926 repo = self._factory.repo(wire)
928 repo = self._factory.repo(wire)
927 # Disable any prompts for this repo
929 # Disable any prompts for this repo
928 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
930 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
929
931
930 remote = peer(repo, {}, safe_bytes(url))
932 remote = peer(repo, {}, safe_bytes(url))
931 # Disable any prompts for this remote
933 # Disable any prompts for this remote
932 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
934 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
933
935
934 if commit_ids:
936 if commit_ids:
935 commit_ids = [bin(commit_id) for commit_id in commit_ids]
937 commit_ids = [bin(commit_id) for commit_id in commit_ids]
936
938
937 return exchange.pull(
939 return exchange.pull(
938 repo, remote, heads=commit_ids, force=None).cgresult
940 repo, remote, heads=commit_ids, force=None).cgresult
939
941
940 @reraise_safe_exceptions
942 @reraise_safe_exceptions
941 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
943 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
942 repo = self._factory.repo(wire)
944 repo = self._factory.repo(wire)
943 baseui = self._factory._create_config(wire['config'], hooks=hooks)
945 baseui = self._factory._create_config(wire['config'], hooks=hooks)
944
946
945 source = safe_bytes(source)
947 source = safe_bytes(source)
946
948
947 # Mercurial internally has a lot of logic that checks ONLY if
949 # Mercurial internally has a lot of logic that checks ONLY if
948 # option is defined, we just pass those if they are defined then
950 # option is defined, we just pass those if they are defined then
949 opts = {}
951 opts = {}
950 if bookmark:
952 if bookmark:
951 if isinstance(branch, list):
953 if isinstance(branch, list):
952 bookmark = [safe_bytes(x) for x in bookmark]
954 bookmark = [safe_bytes(x) for x in bookmark]
953 else:
955 else:
954 bookmark = safe_bytes(bookmark)
956 bookmark = safe_bytes(bookmark)
955 opts['bookmark'] = bookmark
957 opts['bookmark'] = bookmark
956 if branch:
958 if branch:
957 if isinstance(branch, list):
959 if isinstance(branch, list):
958 branch = [safe_bytes(x) for x in branch]
960 branch = [safe_bytes(x) for x in branch]
959 else:
961 else:
960 branch = safe_bytes(branch)
962 branch = safe_bytes(branch)
961 opts['branch'] = branch
963 opts['branch'] = branch
962 if revision:
964 if revision:
963 opts['rev'] = safe_bytes(revision)
965 opts['rev'] = safe_bytes(revision)
964
966
965 commands.pull(baseui, repo, source, **opts)
967 commands.pull(baseui, repo, source, **opts)
966
968
967 @reraise_safe_exceptions
969 @reraise_safe_exceptions
968 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
970 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
969 repo = self._factory.repo(wire)
971 repo = self._factory.repo(wire)
970 baseui = self._factory._create_config(wire['config'], hooks=hooks)
972 baseui = self._factory._create_config(wire['config'], hooks=hooks)
971 commands.push(baseui, repo, dest=dest_path, rev=revisions,
973 commands.push(baseui, repo, dest=dest_path, rev=revisions,
972 new_branch=push_branches)
974 new_branch=push_branches)
973
975
974 @reraise_safe_exceptions
976 @reraise_safe_exceptions
975 def strip(self, wire, revision, update, backup):
977 def strip(self, wire, revision, update, backup):
976 repo = self._factory.repo(wire)
978 repo = self._factory.repo(wire)
977 ctx = self._get_ctx(repo, revision)
979 ctx = self._get_ctx(repo, revision)
978 hgext_strip(
980 hgext_strip(
979 repo.baseui, repo, ctx.node(), update=update, backup=backup)
981 repo.baseui, repo, ctx.node(), update=update, backup=backup)
980
982
981 @reraise_safe_exceptions
983 @reraise_safe_exceptions
982 def get_unresolved_files(self, wire):
984 def get_unresolved_files(self, wire):
983 repo = self._factory.repo(wire)
985 repo = self._factory.repo(wire)
984
986
985 log.debug('Calculating unresolved files for repo: %s', repo)
987 log.debug('Calculating unresolved files for repo: %s', repo)
986 output = io.BytesIO()
988 output = io.BytesIO()
987
989
988 def write(data, **unused_kwargs):
990 def write(data, **unused_kwargs):
989 output.write(data)
991 output.write(data)
990
992
991 baseui = self._factory._create_config(wire['config'])
993 baseui = self._factory._create_config(wire['config'])
992 baseui.write = write
994 baseui.write = write
993
995
994 commands.resolve(baseui, repo, list=True)
996 commands.resolve(baseui, repo, list=True)
995 unresolved = output.getvalue().splitlines(0)
997 unresolved = output.getvalue().splitlines(0)
996 return unresolved
998 return unresolved
997
999
998 @reraise_safe_exceptions
1000 @reraise_safe_exceptions
999 def merge(self, wire, revision):
1001 def merge(self, wire, revision):
1000 repo = self._factory.repo(wire)
1002 repo = self._factory.repo(wire)
1001 baseui = self._factory._create_config(wire['config'])
1003 baseui = self._factory._create_config(wire['config'])
1002 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1004 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1003
1005
1004 # In case of sub repositories are used mercurial prompts the user in
1006 # In case of sub repositories are used mercurial prompts the user in
1005 # case of merge conflicts or different sub repository sources. By
1007 # case of merge conflicts or different sub repository sources. By
1006 # setting the interactive flag to `False` mercurial doesn't prompt the
1008 # setting the interactive flag to `False` mercurial doesn't prompt the
1007 # used but instead uses a default value.
1009 # used but instead uses a default value.
1008 repo.ui.setconfig(b'ui', b'interactive', False)
1010 repo.ui.setconfig(b'ui', b'interactive', False)
1009 commands.merge(baseui, repo, rev=revision)
1011 commands.merge(baseui, repo, rev=revision)
1010
1012
1011 @reraise_safe_exceptions
1013 @reraise_safe_exceptions
1012 def merge_state(self, wire):
1014 def merge_state(self, wire):
1013 repo = self._factory.repo(wire)
1015 repo = self._factory.repo(wire)
1014 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1016 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1015
1017
1016 # In case of sub repositories are used mercurial prompts the user in
1018 # In case of sub repositories are used mercurial prompts the user in
1017 # case of merge conflicts or different sub repository sources. By
1019 # case of merge conflicts or different sub repository sources. By
1018 # setting the interactive flag to `False` mercurial doesn't prompt the
1020 # setting the interactive flag to `False` mercurial doesn't prompt the
1019 # used but instead uses a default value.
1021 # used but instead uses a default value.
1020 repo.ui.setconfig(b'ui', b'interactive', False)
1022 repo.ui.setconfig(b'ui', b'interactive', False)
1021 ms = hg_merge.mergestate(repo)
1023 ms = hg_merge.mergestate(repo)
1022 return [x for x in ms.unresolved()]
1024 return [x for x in ms.unresolved()]
1023
1025
1024 @reraise_safe_exceptions
1026 @reraise_safe_exceptions
1025 def commit(self, wire, message, username, close_branch=False):
1027 def commit(self, wire, message, username, close_branch=False):
1026 repo = self._factory.repo(wire)
1028 repo = self._factory.repo(wire)
1027 baseui = self._factory._create_config(wire['config'])
1029 baseui = self._factory._create_config(wire['config'])
1028 repo.ui.setconfig(b'ui', b'username', username)
1030 repo.ui.setconfig(b'ui', b'username', username)
1029 commands.commit(baseui, repo, message=message, close_branch=close_branch)
1031 commands.commit(baseui, repo, message=message, close_branch=close_branch)
1030
1032
1031 @reraise_safe_exceptions
1033 @reraise_safe_exceptions
1032 def rebase(self, wire, source=None, dest=None, abort=False):
1034 def rebase(self, wire, source=None, dest=None, abort=False):
1033 repo = self._factory.repo(wire)
1035 repo = self._factory.repo(wire)
1034 baseui = self._factory._create_config(wire['config'])
1036 baseui = self._factory._create_config(wire['config'])
1035 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1037 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1036 # In case of sub repositories are used mercurial prompts the user in
1038 # In case of sub repositories are used mercurial prompts the user in
1037 # case of merge conflicts or different sub repository sources. By
1039 # case of merge conflicts or different sub repository sources. By
1038 # setting the interactive flag to `False` mercurial doesn't prompt the
1040 # setting the interactive flag to `False` mercurial doesn't prompt the
1039 # used but instead uses a default value.
1041 # used but instead uses a default value.
1040 repo.ui.setconfig(b'ui', b'interactive', False)
1042 repo.ui.setconfig(b'ui', b'interactive', False)
1041 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1043 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1042
1044
1043 @reraise_safe_exceptions
1045 @reraise_safe_exceptions
1044 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1046 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1045 repo = self._factory.repo(wire)
1047 repo = self._factory.repo(wire)
1046 ctx = self._get_ctx(repo, revision)
1048 ctx = self._get_ctx(repo, revision)
1047 node = ctx.node()
1049 node = ctx.node()
1048
1050
1049 date = (tag_time, tag_timezone)
1051 date = (tag_time, tag_timezone)
1050 try:
1052 try:
1051 hg_tag.tag(repo, name, node, message, local, user, date)
1053 hg_tag.tag(repo, name, node, message, local, user, date)
1052 except Abort as e:
1054 except Abort as e:
1053 log.exception("Tag operation aborted")
1055 log.exception("Tag operation aborted")
1054 # Exception can contain unicode which we convert
1056 # Exception can contain unicode which we convert
1055 raise exceptions.AbortException(e)(repr(e))
1057 raise exceptions.AbortException(e)(repr(e))
1056
1058
1057 @reraise_safe_exceptions
1059 @reraise_safe_exceptions
1058 def bookmark(self, wire, bookmark, revision=''):
1060 def bookmark(self, wire, bookmark, revision=''):
1059 repo = self._factory.repo(wire)
1061 repo = self._factory.repo(wire)
1060 baseui = self._factory._create_config(wire['config'])
1062 baseui = self._factory._create_config(wire['config'])
1061 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1063 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1062
1064
1063 @reraise_safe_exceptions
1065 @reraise_safe_exceptions
1064 def install_hooks(self, wire, force=False):
1066 def install_hooks(self, wire, force=False):
1065 # we don't need any special hooks for Mercurial
1067 # we don't need any special hooks for Mercurial
1066 pass
1068 pass
1067
1069
1068 @reraise_safe_exceptions
1070 @reraise_safe_exceptions
1069 def get_hooks_info(self, wire):
1071 def get_hooks_info(self, wire):
1070 return {
1072 return {
1071 'pre_version': vcsserver.__version__,
1073 'pre_version': vcsserver.__version__,
1072 'post_version': vcsserver.__version__,
1074 'post_version': vcsserver.__version__,
1073 }
1075 }
1074
1076
1075 @reraise_safe_exceptions
1077 @reraise_safe_exceptions
1076 def set_head_ref(self, wire, head_name):
1078 def set_head_ref(self, wire, head_name):
1077 pass
1079 pass
1078
1080
1079 @reraise_safe_exceptions
1081 @reraise_safe_exceptions
1080 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1082 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1081 archive_dir_name, commit_id):
1083 archive_dir_name, commit_id):
1082
1084
1083 def file_walker(_commit_id, path):
1085 def file_walker(_commit_id, path):
1084 repo = self._factory.repo(wire)
1086 repo = self._factory.repo(wire)
1085 ctx = repo[_commit_id]
1087 ctx = repo[_commit_id]
1086 is_root = path in ['', '/']
1088 is_root = path in ['', '/']
1087 if is_root:
1089 if is_root:
1088 matcher = alwaysmatcher(badfn=None)
1090 matcher = alwaysmatcher(badfn=None)
1089 else:
1091 else:
1090 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1092 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1091 file_iter = ctx.manifest().walk(matcher)
1093 file_iter = ctx.manifest().walk(matcher)
1092
1094
1093 for fn in file_iter:
1095 for fn in file_iter:
1094 file_path = fn
1096 file_path = fn
1095 flags = ctx.flags(fn)
1097 flags = ctx.flags(fn)
1096 mode = b'x' in flags and 0o755 or 0o644
1098 mode = b'x' in flags and 0o755 or 0o644
1097 is_link = b'l' in flags
1099 is_link = b'l' in flags
1098
1100
1099 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1101 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1100
1102
1101 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1103 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1102 archive_dir_name, commit_id)
1104 archive_dir_name, commit_id)
1103
1105
@@ -1,891 +1,891 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31
31
32 import svn.client # noqa
32 import svn.client # noqa
33 import svn.core # noqa
33 import svn.core # noqa
34 import svn.delta # noqa
34 import svn.delta # noqa
35 import svn.diff # noqa
35 import svn.diff # noqa
36 import svn.fs # noqa
36 import svn.fs # noqa
37 import svn.repos # noqa
37 import svn.repos # noqa
38
38
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
40 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo, BinaryEnvelope
40 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo, BinaryEnvelope
41 from vcsserver.exceptions import NoContentException
41 from vcsserver.exceptions import NoContentException
42 from vcsserver.str_utils import safe_str, safe_bytes
42 from vcsserver.str_utils import safe_str, safe_bytes
43 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.vcs_base import RemoteBase
44 from vcsserver.lib.svnremoterepo import svnremoterepo
44 from vcsserver.lib.svnremoterepo import svnremoterepo
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 svn_compatible_versions_map = {
48 svn_compatible_versions_map = {
49 'pre-1.4-compatible': '1.3',
49 'pre-1.4-compatible': '1.3',
50 'pre-1.5-compatible': '1.4',
50 'pre-1.5-compatible': '1.4',
51 'pre-1.6-compatible': '1.5',
51 'pre-1.6-compatible': '1.5',
52 'pre-1.8-compatible': '1.7',
52 'pre-1.8-compatible': '1.7',
53 'pre-1.9-compatible': '1.8',
53 'pre-1.9-compatible': '1.8',
54 }
54 }
55
55
56 current_compatible_version = '1.14'
56 current_compatible_version = '1.14'
57
57
58
58
59 def reraise_safe_exceptions(func):
59 def reraise_safe_exceptions(func):
60 """Decorator for converting svn exceptions to something neutral."""
60 """Decorator for converting svn exceptions to something neutral."""
61 def wrapper(*args, **kwargs):
61 def wrapper(*args, **kwargs):
62 try:
62 try:
63 return func(*args, **kwargs)
63 return func(*args, **kwargs)
64 except Exception as e:
64 except Exception as e:
65 if not hasattr(e, '_vcs_kind'):
65 if not hasattr(e, '_vcs_kind'):
66 log.exception("Unhandled exception in svn remote call")
66 log.exception("Unhandled exception in svn remote call")
67 raise_from_original(exceptions.UnhandledException(e), e)
67 raise_from_original(exceptions.UnhandledException(e), e)
68 raise
68 raise
69 return wrapper
69 return wrapper
70
70
71
71
72 class SubversionFactory(RepoFactory):
72 class SubversionFactory(RepoFactory):
73 repo_type = 'svn'
73 repo_type = 'svn'
74
74
75 def _create_repo(self, wire, create, compatible_version):
75 def _create_repo(self, wire, create, compatible_version):
76 path = svn.core.svn_path_canonicalize(wire['path'])
76 path = svn.core.svn_path_canonicalize(wire['path'])
77 if create:
77 if create:
78 fs_config = {'compatible-version': current_compatible_version}
78 fs_config = {'compatible-version': current_compatible_version}
79 if compatible_version:
79 if compatible_version:
80
80
81 compatible_version_string = \
81 compatible_version_string = \
82 svn_compatible_versions_map.get(compatible_version) \
82 svn_compatible_versions_map.get(compatible_version) \
83 or compatible_version
83 or compatible_version
84 fs_config['compatible-version'] = compatible_version_string
84 fs_config['compatible-version'] = compatible_version_string
85
85
86 log.debug('Create SVN repo with config "%s"', fs_config)
86 log.debug('Create SVN repo with config `%s`', fs_config)
87 repo = svn.repos.create(path, "", "", None, fs_config)
87 repo = svn.repos.create(path, "", "", None, fs_config)
88 else:
88 else:
89 repo = svn.repos.open(path)
89 repo = svn.repos.open(path)
90
90
91 log.debug('Got SVN object: %s', repo)
91 log.debug('repository created: got SVN object: %s', repo)
92 return repo
92 return repo
93
93
94 def repo(self, wire, create=False, compatible_version=None):
94 def repo(self, wire, create=False, compatible_version=None):
95 """
95 """
96 Get a repository instance for the given path.
96 Get a repository instance for the given path.
97 """
97 """
98 return self._create_repo(wire, create, compatible_version)
98 return self._create_repo(wire, create, compatible_version)
99
99
100
100
101 NODE_TYPE_MAPPING = {
101 NODE_TYPE_MAPPING = {
102 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_file: 'file',
103 svn.core.svn_node_dir: 'dir',
103 svn.core.svn_node_dir: 'dir',
104 }
104 }
105
105
106
106
107 class SvnRemote(RemoteBase):
107 class SvnRemote(RemoteBase):
108
108
109 def __init__(self, factory, hg_factory=None):
109 def __init__(self, factory, hg_factory=None):
110 self._factory = factory
110 self._factory = factory
111
111
112 @reraise_safe_exceptions
112 @reraise_safe_exceptions
113 def discover_svn_version(self):
113 def discover_svn_version(self):
114 try:
114 try:
115 import svn.core
115 import svn.core
116 svn_ver = svn.core.SVN_VERSION
116 svn_ver = svn.core.SVN_VERSION
117 except ImportError:
117 except ImportError:
118 svn_ver = None
118 svn_ver = None
119 return safe_str(svn_ver)
119 return safe_str(svn_ver)
120
120
121 @reraise_safe_exceptions
121 @reraise_safe_exceptions
122 def is_empty(self, wire):
122 def is_empty(self, wire):
123
123
124 try:
124 try:
125 return self.lookup(wire, -1) == 0
125 return self.lookup(wire, -1) == 0
126 except Exception:
126 except Exception:
127 log.exception("failed to read object_store")
127 log.exception("failed to read object_store")
128 return False
128 return False
129
129
130 def check_url(self, url):
130 def check_url(self, url):
131
131
132 # uuid function get's only valid UUID from proper repo, else
132 # uuid function get's only valid UUID from proper repo, else
133 # throws exception
133 # throws exception
134 username, password, src_url = self.get_url_and_credentials(url)
134 username, password, src_url = self.get_url_and_credentials(url)
135 try:
135 try:
136 svnremoterepo(username, password, src_url).svn().uuid
136 svnremoterepo(username, password, src_url).svn().uuid
137 except Exception:
137 except Exception:
138 tb = traceback.format_exc()
138 tb = traceback.format_exc()
139 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
139 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
140 raise URLError(
140 raise URLError(
141 '"%s" is not a valid Subversion source url.' % (url, ))
141 '"%s" is not a valid Subversion source url.' % (url, ))
142 return True
142 return True
143
143
144 def is_path_valid_repository(self, wire, path):
144 def is_path_valid_repository(self, wire, path):
145
145
146 # NOTE(marcink): short circuit the check for SVN repo
146 # NOTE(marcink): short circuit the check for SVN repo
147 # the repos.open might be expensive to check, but we have one cheap
147 # the repos.open might be expensive to check, but we have one cheap
148 # pre condition that we can use, to check for 'format' file
148 # pre condition that we can use, to check for 'format' file
149
149
150 if not os.path.isfile(os.path.join(path, 'format')):
150 if not os.path.isfile(os.path.join(path, 'format')):
151 return False
151 return False
152
152
153 try:
153 try:
154 svn.repos.open(path)
154 svn.repos.open(path)
155 except svn.core.SubversionException:
155 except svn.core.SubversionException:
156 tb = traceback.format_exc()
156 tb = traceback.format_exc()
157 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
157 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
158 return False
158 return False
159 return True
159 return True
160
160
161 @reraise_safe_exceptions
161 @reraise_safe_exceptions
162 def verify(self, wire,):
162 def verify(self, wire,):
163 repo_path = wire['path']
163 repo_path = wire['path']
164 if not self.is_path_valid_repository(wire, repo_path):
164 if not self.is_path_valid_repository(wire, repo_path):
165 raise Exception(
165 raise Exception(
166 "Path %s is not a valid Subversion repository." % repo_path)
166 "Path %s is not a valid Subversion repository." % repo_path)
167
167
168 cmd = ['svnadmin', 'info', repo_path]
168 cmd = ['svnadmin', 'info', repo_path]
169 stdout, stderr = subprocessio.run_command(cmd)
169 stdout, stderr = subprocessio.run_command(cmd)
170 return stdout
170 return stdout
171
171
172 def lookup(self, wire, revision):
172 def lookup(self, wire, revision):
173 if revision not in [-1, None, 'HEAD']:
173 if revision not in [-1, None, 'HEAD']:
174 raise NotImplementedError
174 raise NotImplementedError
175 repo = self._factory.repo(wire)
175 repo = self._factory.repo(wire)
176 fs_ptr = svn.repos.fs(repo)
176 fs_ptr = svn.repos.fs(repo)
177 head = svn.fs.youngest_rev(fs_ptr)
177 head = svn.fs.youngest_rev(fs_ptr)
178 return head
178 return head
179
179
180 def lookup_interval(self, wire, start_ts, end_ts):
180 def lookup_interval(self, wire, start_ts, end_ts):
181 repo = self._factory.repo(wire)
181 repo = self._factory.repo(wire)
182 fsobj = svn.repos.fs(repo)
182 fsobj = svn.repos.fs(repo)
183 start_rev = None
183 start_rev = None
184 end_rev = None
184 end_rev = None
185 if start_ts:
185 if start_ts:
186 start_ts_svn = apr_time_t(start_ts)
186 start_ts_svn = apr_time_t(start_ts)
187 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
187 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
188 else:
188 else:
189 start_rev = 1
189 start_rev = 1
190 if end_ts:
190 if end_ts:
191 end_ts_svn = apr_time_t(end_ts)
191 end_ts_svn = apr_time_t(end_ts)
192 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
192 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
193 else:
193 else:
194 end_rev = svn.fs.youngest_rev(fsobj)
194 end_rev = svn.fs.youngest_rev(fsobj)
195 return start_rev, end_rev
195 return start_rev, end_rev
196
196
197 def revision_properties(self, wire, revision):
197 def revision_properties(self, wire, revision):
198
198
199 cache_on, context_uid, repo_id = self._cache_on(wire)
199 cache_on, context_uid, repo_id = self._cache_on(wire)
200 region = self._region(wire)
200 region = self._region(wire)
201 @region.conditional_cache_on_arguments(condition=cache_on)
201 @region.conditional_cache_on_arguments(condition=cache_on)
202 def _revision_properties(_repo_id, _revision):
202 def _revision_properties(_repo_id, _revision):
203 repo = self._factory.repo(wire)
203 repo = self._factory.repo(wire)
204 fs_ptr = svn.repos.fs(repo)
204 fs_ptr = svn.repos.fs(repo)
205 return svn.fs.revision_proplist(fs_ptr, revision)
205 return svn.fs.revision_proplist(fs_ptr, revision)
206 return _revision_properties(repo_id, revision)
206 return _revision_properties(repo_id, revision)
207
207
208 def revision_changes(self, wire, revision):
208 def revision_changes(self, wire, revision):
209
209
210 repo = self._factory.repo(wire)
210 repo = self._factory.repo(wire)
211 fsobj = svn.repos.fs(repo)
211 fsobj = svn.repos.fs(repo)
212 rev_root = svn.fs.revision_root(fsobj, revision)
212 rev_root = svn.fs.revision_root(fsobj, revision)
213
213
214 editor = svn.repos.ChangeCollector(fsobj, rev_root)
214 editor = svn.repos.ChangeCollector(fsobj, rev_root)
215 editor_ptr, editor_baton = svn.delta.make_editor(editor)
215 editor_ptr, editor_baton = svn.delta.make_editor(editor)
216 base_dir = ""
216 base_dir = ""
217 send_deltas = False
217 send_deltas = False
218 svn.repos.replay2(
218 svn.repos.replay2(
219 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
219 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
220 editor_ptr, editor_baton, None)
220 editor_ptr, editor_baton, None)
221
221
222 added = []
222 added = []
223 changed = []
223 changed = []
224 removed = []
224 removed = []
225
225
226 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
226 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
227 for path, change in editor.changes.items():
227 for path, change in editor.changes.items():
228 # TODO: Decide what to do with directory nodes. Subversion can add
228 # TODO: Decide what to do with directory nodes. Subversion can add
229 # empty directories.
229 # empty directories.
230
230
231 if change.item_kind == svn.core.svn_node_dir:
231 if change.item_kind == svn.core.svn_node_dir:
232 continue
232 continue
233 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
233 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
234 added.append(path)
234 added.append(path)
235 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
235 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
236 svn.repos.CHANGE_ACTION_REPLACE]:
236 svn.repos.CHANGE_ACTION_REPLACE]:
237 changed.append(path)
237 changed.append(path)
238 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
238 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
239 removed.append(path)
239 removed.append(path)
240 else:
240 else:
241 raise NotImplementedError(
241 raise NotImplementedError(
242 "Action %s not supported on path %s" % (
242 "Action %s not supported on path %s" % (
243 change.action, path))
243 change.action, path))
244
244
245 changes = {
245 changes = {
246 'added': added,
246 'added': added,
247 'changed': changed,
247 'changed': changed,
248 'removed': removed,
248 'removed': removed,
249 }
249 }
250 return changes
250 return changes
251
251
252 @reraise_safe_exceptions
252 @reraise_safe_exceptions
253 def node_history(self, wire, path, revision, limit):
253 def node_history(self, wire, path, revision, limit):
254 cache_on, context_uid, repo_id = self._cache_on(wire)
254 cache_on, context_uid, repo_id = self._cache_on(wire)
255 region = self._region(wire)
255 region = self._region(wire)
256 @region.conditional_cache_on_arguments(condition=cache_on)
256 @region.conditional_cache_on_arguments(condition=cache_on)
257 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
258 cross_copies = False
258 cross_copies = False
259 repo = self._factory.repo(wire)
259 repo = self._factory.repo(wire)
260 fsobj = svn.repos.fs(repo)
260 fsobj = svn.repos.fs(repo)
261 rev_root = svn.fs.revision_root(fsobj, revision)
261 rev_root = svn.fs.revision_root(fsobj, revision)
262
262
263 history_revisions = []
263 history_revisions = []
264 history = svn.fs.node_history(rev_root, path)
264 history = svn.fs.node_history(rev_root, path)
265 history = svn.fs.history_prev(history, cross_copies)
265 history = svn.fs.history_prev(history, cross_copies)
266 while history:
266 while history:
267 __, node_revision = svn.fs.history_location(history)
267 __, node_revision = svn.fs.history_location(history)
268 history_revisions.append(node_revision)
268 history_revisions.append(node_revision)
269 if limit and len(history_revisions) >= limit:
269 if limit and len(history_revisions) >= limit:
270 break
270 break
271 history = svn.fs.history_prev(history, cross_copies)
271 history = svn.fs.history_prev(history, cross_copies)
272 return history_revisions
272 return history_revisions
273 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
274
274
275 def node_properties(self, wire, path, revision):
275 def node_properties(self, wire, path, revision):
276 cache_on, context_uid, repo_id = self._cache_on(wire)
276 cache_on, context_uid, repo_id = self._cache_on(wire)
277 region = self._region(wire)
277 region = self._region(wire)
278
278
279 @region.conditional_cache_on_arguments(condition=cache_on)
279 @region.conditional_cache_on_arguments(condition=cache_on)
280 def _node_properties(_repo_id, _path, _revision):
280 def _node_properties(_repo_id, _path, _revision):
281 repo = self._factory.repo(wire)
281 repo = self._factory.repo(wire)
282 fsobj = svn.repos.fs(repo)
282 fsobj = svn.repos.fs(repo)
283 rev_root = svn.fs.revision_root(fsobj, revision)
283 rev_root = svn.fs.revision_root(fsobj, revision)
284 return svn.fs.node_proplist(rev_root, path)
284 return svn.fs.node_proplist(rev_root, path)
285 return _node_properties(repo_id, path, revision)
285 return _node_properties(repo_id, path, revision)
286
286
287 def file_annotate(self, wire, path, revision):
287 def file_annotate(self, wire, path, revision):
288 abs_path = 'file://' + urllib.request.pathname2url(
288 abs_path = 'file://' + urllib.request.pathname2url(
289 vcspath.join(wire['path'], path))
289 vcspath.join(wire['path'], path))
290 file_uri = svn.core.svn_path_canonicalize(abs_path)
290 file_uri = svn.core.svn_path_canonicalize(abs_path)
291
291
292 start_rev = svn_opt_revision_value_t(0)
292 start_rev = svn_opt_revision_value_t(0)
293 peg_rev = svn_opt_revision_value_t(revision)
293 peg_rev = svn_opt_revision_value_t(revision)
294 end_rev = peg_rev
294 end_rev = peg_rev
295
295
296 annotations = []
296 annotations = []
297
297
298 def receiver(line_no, revision, author, date, line, pool):
298 def receiver(line_no, revision, author, date, line, pool):
299 annotations.append((line_no, revision, line))
299 annotations.append((line_no, revision, line))
300
300
301 # TODO: Cannot use blame5, missing typemap function in the swig code
301 # TODO: Cannot use blame5, missing typemap function in the swig code
302 try:
302 try:
303 svn.client.blame2(
303 svn.client.blame2(
304 file_uri, peg_rev, start_rev, end_rev,
304 file_uri, peg_rev, start_rev, end_rev,
305 receiver, svn.client.create_context())
305 receiver, svn.client.create_context())
306 except svn.core.SubversionException as exc:
306 except svn.core.SubversionException as exc:
307 log.exception("Error during blame operation.")
307 log.exception("Error during blame operation.")
308 raise Exception(
308 raise Exception(
309 "Blame not supported or file does not exist at path %s. "
309 "Blame not supported or file does not exist at path %s. "
310 "Error %s." % (path, exc))
310 "Error %s." % (path, exc))
311
311
312 return annotations
312 return annotations
313
313
314 def get_node_type(self, wire, path, revision=None):
314 def get_node_type(self, wire, path, revision=None):
315
315
316 cache_on, context_uid, repo_id = self._cache_on(wire)
316 cache_on, context_uid, repo_id = self._cache_on(wire)
317 region = self._region(wire)
317 region = self._region(wire)
318
318
319 @region.conditional_cache_on_arguments(condition=cache_on)
319 @region.conditional_cache_on_arguments(condition=cache_on)
320 def _get_node_type(_repo_id, _path, _revision):
320 def _get_node_type(_repo_id, _path, _revision):
321 repo = self._factory.repo(wire)
321 repo = self._factory.repo(wire)
322 fs_ptr = svn.repos.fs(repo)
322 fs_ptr = svn.repos.fs(repo)
323 if _revision is None:
323 if _revision is None:
324 _revision = svn.fs.youngest_rev(fs_ptr)
324 _revision = svn.fs.youngest_rev(fs_ptr)
325 root = svn.fs.revision_root(fs_ptr, _revision)
325 root = svn.fs.revision_root(fs_ptr, _revision)
326 node = svn.fs.check_path(root, path)
326 node = svn.fs.check_path(root, path)
327 return NODE_TYPE_MAPPING.get(node, None)
327 return NODE_TYPE_MAPPING.get(node, None)
328 return _get_node_type(repo_id, path, revision)
328 return _get_node_type(repo_id, path, revision)
329
329
330 def get_nodes(self, wire, path, revision=None):
330 def get_nodes(self, wire, path, revision=None):
331
331
332 cache_on, context_uid, repo_id = self._cache_on(wire)
332 cache_on, context_uid, repo_id = self._cache_on(wire)
333 region = self._region(wire)
333 region = self._region(wire)
334
334
335 @region.conditional_cache_on_arguments(condition=cache_on)
335 @region.conditional_cache_on_arguments(condition=cache_on)
336 def _get_nodes(_repo_id, _path, _revision):
336 def _get_nodes(_repo_id, _path, _revision):
337 repo = self._factory.repo(wire)
337 repo = self._factory.repo(wire)
338 fsobj = svn.repos.fs(repo)
338 fsobj = svn.repos.fs(repo)
339 if _revision is None:
339 if _revision is None:
340 _revision = svn.fs.youngest_rev(fsobj)
340 _revision = svn.fs.youngest_rev(fsobj)
341 root = svn.fs.revision_root(fsobj, _revision)
341 root = svn.fs.revision_root(fsobj, _revision)
342 entries = svn.fs.dir_entries(root, path)
342 entries = svn.fs.dir_entries(root, path)
343 result = []
343 result = []
344 for entry_path, entry_info in entries.items():
344 for entry_path, entry_info in entries.items():
345 result.append(
345 result.append(
346 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
346 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
347 return result
347 return result
348 return _get_nodes(repo_id, path, revision)
348 return _get_nodes(repo_id, path, revision)
349
349
350 def get_file_content(self, wire, path, rev=None):
350 def get_file_content(self, wire, path, rev=None):
351 repo = self._factory.repo(wire)
351 repo = self._factory.repo(wire)
352 fsobj = svn.repos.fs(repo)
352 fsobj = svn.repos.fs(repo)
353 if rev is None:
353 if rev is None:
354 rev = svn.fs.youngest_revision(fsobj)
354 rev = svn.fs.youngest_revision(fsobj)
355 root = svn.fs.revision_root(fsobj, rev)
355 root = svn.fs.revision_root(fsobj, rev)
356 content = svn.core.Stream(svn.fs.file_contents(root, path))
356 content = svn.core.Stream(svn.fs.file_contents(root, path))
357 return BinaryEnvelope(content.read())
357 return BinaryEnvelope(content.read())
358
358
359 def get_file_size(self, wire, path, revision=None):
359 def get_file_size(self, wire, path, revision=None):
360
360
361 cache_on, context_uid, repo_id = self._cache_on(wire)
361 cache_on, context_uid, repo_id = self._cache_on(wire)
362 region = self._region(wire)
362 region = self._region(wire)
363
363
364 @region.conditional_cache_on_arguments(condition=cache_on)
364 @region.conditional_cache_on_arguments(condition=cache_on)
365 def _get_file_size(_repo_id, _path, _revision):
365 def _get_file_size(_repo_id, _path, _revision):
366 repo = self._factory.repo(wire)
366 repo = self._factory.repo(wire)
367 fsobj = svn.repos.fs(repo)
367 fsobj = svn.repos.fs(repo)
368 if _revision is None:
368 if _revision is None:
369 _revision = svn.fs.youngest_revision(fsobj)
369 _revision = svn.fs.youngest_revision(fsobj)
370 root = svn.fs.revision_root(fsobj, _revision)
370 root = svn.fs.revision_root(fsobj, _revision)
371 size = svn.fs.file_length(root, path)
371 size = svn.fs.file_length(root, path)
372 return size
372 return size
373 return _get_file_size(repo_id, path, revision)
373 return _get_file_size(repo_id, path, revision)
374
374
375 def create_repository(self, wire, compatible_version=None):
375 def create_repository(self, wire, compatible_version=None):
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
377 self._factory.repo(wire, create=True,
377 self._factory.repo(wire, create=True,
378 compatible_version=compatible_version)
378 compatible_version=compatible_version)
379
379
380 def get_url_and_credentials(self, src_url):
380 def get_url_and_credentials(self, src_url):
381 obj = urllib.parse.urlparse(src_url)
381 obj = urllib.parse.urlparse(src_url)
382 username = obj.username or None
382 username = obj.username or None
383 password = obj.password or None
383 password = obj.password or None
384 return username, password, src_url
384 return username, password, src_url
385
385
386 def import_remote_repository(self, wire, src_url):
386 def import_remote_repository(self, wire, src_url):
387 repo_path = wire['path']
387 repo_path = wire['path']
388 if not self.is_path_valid_repository(wire, repo_path):
388 if not self.is_path_valid_repository(wire, repo_path):
389 raise Exception(
389 raise Exception(
390 "Path %s is not a valid Subversion repository." % repo_path)
390 "Path %s is not a valid Subversion repository." % repo_path)
391
391
392 username, password, src_url = self.get_url_and_credentials(src_url)
392 username, password, src_url = self.get_url_and_credentials(src_url)
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
394 '--trust-server-cert-failures=unknown-ca']
394 '--trust-server-cert-failures=unknown-ca']
395 if username and password:
395 if username and password:
396 rdump_cmd += ['--username', username, '--password', password]
396 rdump_cmd += ['--username', username, '--password', password]
397 rdump_cmd += [src_url]
397 rdump_cmd += [src_url]
398
398
399 rdump = subprocess.Popen(
399 rdump = subprocess.Popen(
400 rdump_cmd,
400 rdump_cmd,
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
402 load = subprocess.Popen(
402 load = subprocess.Popen(
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
404
404
405 # TODO: johbo: This can be a very long operation, might be better
405 # TODO: johbo: This can be a very long operation, might be better
406 # to track some kind of status and provide an api to check if the
406 # to track some kind of status and provide an api to check if the
407 # import is done.
407 # import is done.
408 rdump.wait()
408 rdump.wait()
409 load.wait()
409 load.wait()
410
410
411 log.debug('Return process ended with code: %s', rdump.returncode)
411 log.debug('Return process ended with code: %s', rdump.returncode)
412 if rdump.returncode != 0:
412 if rdump.returncode != 0:
413 errors = rdump.stderr.read()
413 errors = rdump.stderr.read()
414 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
414 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
415
415
416 reason = 'UNKNOWN'
416 reason = 'UNKNOWN'
417 if b'svnrdump: E230001:' in errors:
417 if b'svnrdump: E230001:' in errors:
418 reason = 'INVALID_CERTIFICATE'
418 reason = 'INVALID_CERTIFICATE'
419
419
420 if reason == 'UNKNOWN':
420 if reason == 'UNKNOWN':
421 reason = 'UNKNOWN:{}'.format(safe_str(errors))
421 reason = 'UNKNOWN:{}'.format(safe_str(errors))
422
422
423 raise Exception(
423 raise Exception(
424 'Failed to dump the remote repository from %s. Reason:%s' % (
424 'Failed to dump the remote repository from %s. Reason:%s' % (
425 src_url, reason))
425 src_url, reason))
426 if load.returncode != 0:
426 if load.returncode != 0:
427 raise Exception(
427 raise Exception(
428 'Failed to load the dump of remote repository from %s.' %
428 'Failed to load the dump of remote repository from %s.' %
429 (src_url, ))
429 (src_url, ))
430
430
431 def commit(self, wire, message, author, timestamp, updated, removed):
431 def commit(self, wire, message, author, timestamp, updated, removed):
432
432
433 updated = [{k: safe_bytes(v) for k, v in x.items() if isinstance(v, str)} for x in updated]
433 updated = [{k: safe_bytes(v) for k, v in x.items() if isinstance(v, str)} for x in updated]
434
434
435 message = safe_bytes(message)
435 message = safe_bytes(message)
436 author = safe_bytes(author)
436 author = safe_bytes(author)
437
437
438 repo = self._factory.repo(wire)
438 repo = self._factory.repo(wire)
439 fsobj = svn.repos.fs(repo)
439 fsobj = svn.repos.fs(repo)
440
440
441 rev = svn.fs.youngest_rev(fsobj)
441 rev = svn.fs.youngest_rev(fsobj)
442 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
442 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
443 txn_root = svn.fs.txn_root(txn)
443 txn_root = svn.fs.txn_root(txn)
444
444
445 for node in updated:
445 for node in updated:
446 TxnNodeProcessor(node, txn_root).update()
446 TxnNodeProcessor(node, txn_root).update()
447 for node in removed:
447 for node in removed:
448 TxnNodeProcessor(node, txn_root).remove()
448 TxnNodeProcessor(node, txn_root).remove()
449
449
450 commit_id = svn.repos.fs_commit_txn(repo, txn)
450 commit_id = svn.repos.fs_commit_txn(repo, txn)
451
451
452 if timestamp:
452 if timestamp:
453 apr_time = int(apr_time_t(timestamp))
453 apr_time = int(apr_time_t(timestamp))
454 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
454 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
455 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
455 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
456
456
457 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
457 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
458 return commit_id
458 return commit_id
459
459
460 def diff(self, wire, rev1, rev2, path1=None, path2=None,
460 def diff(self, wire, rev1, rev2, path1=None, path2=None,
461 ignore_whitespace=False, context=3):
461 ignore_whitespace=False, context=3):
462
462
463 wire.update(cache=False)
463 wire.update(cache=False)
464 repo = self._factory.repo(wire)
464 repo = self._factory.repo(wire)
465 diff_creator = SvnDiffer(
465 diff_creator = SvnDiffer(
466 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
466 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
467 try:
467 try:
468 return BinaryEnvelope(diff_creator.generate_diff())
468 return BinaryEnvelope(diff_creator.generate_diff())
469 except svn.core.SubversionException as e:
469 except svn.core.SubversionException as e:
470 log.exception(
470 log.exception(
471 "Error during diff operation operation. "
471 "Error during diff operation operation. "
472 "Path might not exist %s, %s", path1, path2)
472 "Path might not exist %s, %s", path1, path2)
473 return BinaryEnvelope(b'')
473 return BinaryEnvelope(b'')
474
474
475 @reraise_safe_exceptions
475 @reraise_safe_exceptions
476 def is_large_file(self, wire, path):
476 def is_large_file(self, wire, path):
477 return False
477 return False
478
478
479 @reraise_safe_exceptions
479 @reraise_safe_exceptions
480 def is_binary(self, wire, rev, path):
480 def is_binary(self, wire, rev, path):
481 cache_on, context_uid, repo_id = self._cache_on(wire)
481 cache_on, context_uid, repo_id = self._cache_on(wire)
482 region = self._region(wire)
482 region = self._region(wire)
483
483
484 @region.conditional_cache_on_arguments(condition=cache_on)
484 @region.conditional_cache_on_arguments(condition=cache_on)
485 def _is_binary(_repo_id, _rev, _path):
485 def _is_binary(_repo_id, _rev, _path):
486 raw_bytes = self.get_file_content(wire, path, rev)
486 raw_bytes = self.get_file_content(wire, path, rev)
487 return raw_bytes and b'\0' in raw_bytes
487 return raw_bytes and b'\0' in raw_bytes
488
488
489 return _is_binary(repo_id, rev, path)
489 return _is_binary(repo_id, rev, path)
490
490
491 @reraise_safe_exceptions
491 @reraise_safe_exceptions
492 def md5_hash(self, wire, rev, path):
492 def md5_hash(self, wire, rev, path):
493 cache_on, context_uid, repo_id = self._cache_on(wire)
493 cache_on, context_uid, repo_id = self._cache_on(wire)
494 region = self._region(wire)
494 region = self._region(wire)
495
495
496 @region.conditional_cache_on_arguments(condition=cache_on)
496 @region.conditional_cache_on_arguments(condition=cache_on)
497 def _md5_hash(_repo_id, _rev, _path):
497 def _md5_hash(_repo_id, _rev, _path):
498 return ''
498 return ''
499
499
500 return _md5_hash(repo_id, rev, path)
500 return _md5_hash(repo_id, rev, path)
501
501
502 @reraise_safe_exceptions
502 @reraise_safe_exceptions
503 def run_svn_command(self, wire, cmd, **opts):
503 def run_svn_command(self, wire, cmd, **opts):
504 path = wire.get('path', None)
504 path = wire.get('path', None)
505
505
506 if path and os.path.isdir(path):
506 if path and os.path.isdir(path):
507 opts['cwd'] = path
507 opts['cwd'] = path
508
508
509 safe_call = opts.pop('_safe', False)
509 safe_call = opts.pop('_safe', False)
510
510
511 svnenv = os.environ.copy()
511 svnenv = os.environ.copy()
512 svnenv.update(opts.pop('extra_env', {}))
512 svnenv.update(opts.pop('extra_env', {}))
513
513
514 _opts = {'env': svnenv, 'shell': False}
514 _opts = {'env': svnenv, 'shell': False}
515
515
516 try:
516 try:
517 _opts.update(opts)
517 _opts.update(opts)
518 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
518 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
519
519
520 return b''.join(proc), b''.join(proc.stderr)
520 return b''.join(proc), b''.join(proc.stderr)
521 except OSError as err:
521 except OSError as err:
522 if safe_call:
522 if safe_call:
523 return '', safe_str(err).strip()
523 return '', safe_str(err).strip()
524 else:
524 else:
525 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
525 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
526 tb_err = ("Couldn't run svn command (%s).\n"
526 tb_err = ("Couldn't run svn command (%s).\n"
527 "Original error was:%s\n"
527 "Original error was:%s\n"
528 "Call options:%s\n"
528 "Call options:%s\n"
529 % (cmd, err, _opts))
529 % (cmd, err, _opts))
530 log.exception(tb_err)
530 log.exception(tb_err)
531 raise exceptions.VcsException()(tb_err)
531 raise exceptions.VcsException()(tb_err)
532
532
533 @reraise_safe_exceptions
533 @reraise_safe_exceptions
534 def install_hooks(self, wire, force=False):
534 def install_hooks(self, wire, force=False):
535 from vcsserver.hook_utils import install_svn_hooks
535 from vcsserver.hook_utils import install_svn_hooks
536 repo_path = wire['path']
536 repo_path = wire['path']
537 binary_dir = settings.BINARY_DIR
537 binary_dir = settings.BINARY_DIR
538 executable = None
538 executable = None
539 if binary_dir:
539 if binary_dir:
540 executable = os.path.join(binary_dir, 'python3')
540 executable = os.path.join(binary_dir, 'python3')
541 return install_svn_hooks(repo_path, force_create=force)
541 return install_svn_hooks(repo_path, force_create=force)
542
542
543 @reraise_safe_exceptions
543 @reraise_safe_exceptions
544 def get_hooks_info(self, wire):
544 def get_hooks_info(self, wire):
545 from vcsserver.hook_utils import (
545 from vcsserver.hook_utils import (
546 get_svn_pre_hook_version, get_svn_post_hook_version)
546 get_svn_pre_hook_version, get_svn_post_hook_version)
547 repo_path = wire['path']
547 repo_path = wire['path']
548 return {
548 return {
549 'pre_version': get_svn_pre_hook_version(repo_path),
549 'pre_version': get_svn_pre_hook_version(repo_path),
550 'post_version': get_svn_post_hook_version(repo_path),
550 'post_version': get_svn_post_hook_version(repo_path),
551 }
551 }
552
552
553 @reraise_safe_exceptions
553 @reraise_safe_exceptions
554 def set_head_ref(self, wire, head_name):
554 def set_head_ref(self, wire, head_name):
555 pass
555 pass
556
556
557 @reraise_safe_exceptions
557 @reraise_safe_exceptions
558 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
558 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
559 archive_dir_name, commit_id):
559 archive_dir_name, commit_id):
560
560
561 def walk_tree(root, root_dir, _commit_id):
561 def walk_tree(root, root_dir, _commit_id):
562 """
562 """
563 Special recursive svn repo walker
563 Special recursive svn repo walker
564 """
564 """
565 root_dir = safe_bytes(root_dir)
565 root_dir = safe_bytes(root_dir)
566
566
567 filemode_default = 0o100644
567 filemode_default = 0o100644
568 filemode_executable = 0o100755
568 filemode_executable = 0o100755
569
569
570 file_iter = svn.fs.dir_entries(root, root_dir)
570 file_iter = svn.fs.dir_entries(root, root_dir)
571 for f_name in file_iter:
571 for f_name in file_iter:
572 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
572 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
573
573
574 if f_type == 'dir':
574 if f_type == 'dir':
575 # return only DIR, and then all entries in that dir
575 # return only DIR, and then all entries in that dir
576 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
576 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
577 new_root = os.path.join(root_dir, f_name)
577 new_root = os.path.join(root_dir, f_name)
578 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
578 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
579 yield _f_name, _f_data, _f_type
579 yield _f_name, _f_data, _f_type
580 else:
580 else:
581
581
582 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
582 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
583 prop_list = svn.fs.node_proplist(root, f_path)
583 prop_list = svn.fs.node_proplist(root, f_path)
584
584
585 f_mode = filemode_default
585 f_mode = filemode_default
586 if prop_list.get('svn:executable'):
586 if prop_list.get('svn:executable'):
587 f_mode = filemode_executable
587 f_mode = filemode_executable
588
588
589 f_is_link = False
589 f_is_link = False
590 if prop_list.get('svn:special'):
590 if prop_list.get('svn:special'):
591 f_is_link = True
591 f_is_link = True
592
592
593 data = {
593 data = {
594 'is_link': f_is_link,
594 'is_link': f_is_link,
595 'mode': f_mode,
595 'mode': f_mode,
596 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
596 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
597 }
597 }
598
598
599 yield f_path, data, f_type
599 yield f_path, data, f_type
600
600
601 def file_walker(_commit_id, path):
601 def file_walker(_commit_id, path):
602 repo = self._factory.repo(wire)
602 repo = self._factory.repo(wire)
603 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
603 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
604
604
605 def no_content():
605 def no_content():
606 raise NoContentException()
606 raise NoContentException()
607
607
608 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
608 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
609 file_path = f_name
609 file_path = f_name
610
610
611 if f_type == 'dir':
611 if f_type == 'dir':
612 mode = f_data['mode']
612 mode = f_data['mode']
613 yield ArchiveNode(file_path, mode, False, no_content)
613 yield ArchiveNode(file_path, mode, False, no_content)
614 else:
614 else:
615 mode = f_data['mode']
615 mode = f_data['mode']
616 is_link = f_data['is_link']
616 is_link = f_data['is_link']
617 data_stream = f_data['content_stream']
617 data_stream = f_data['content_stream']
618 yield ArchiveNode(file_path, mode, is_link, data_stream)
618 yield ArchiveNode(file_path, mode, is_link, data_stream)
619
619
620 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
620 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
621 archive_dir_name, commit_id)
621 archive_dir_name, commit_id)
622
622
623
623
624 class SvnDiffer(object):
624 class SvnDiffer(object):
625 """
625 """
626 Utility to create diffs based on difflib and the Subversion api
626 Utility to create diffs based on difflib and the Subversion api
627 """
627 """
628
628
629 binary_content = False
629 binary_content = False
630
630
631 def __init__(
631 def __init__(
632 self, repo, src_rev, src_path, tgt_rev, tgt_path,
632 self, repo, src_rev, src_path, tgt_rev, tgt_path,
633 ignore_whitespace, context):
633 ignore_whitespace, context):
634 self.repo = repo
634 self.repo = repo
635 self.ignore_whitespace = ignore_whitespace
635 self.ignore_whitespace = ignore_whitespace
636 self.context = context
636 self.context = context
637
637
638 fsobj = svn.repos.fs(repo)
638 fsobj = svn.repos.fs(repo)
639
639
640 self.tgt_rev = tgt_rev
640 self.tgt_rev = tgt_rev
641 self.tgt_path = tgt_path or ''
641 self.tgt_path = tgt_path or ''
642 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
642 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
643 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
643 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
644
644
645 self.src_rev = src_rev
645 self.src_rev = src_rev
646 self.src_path = src_path or self.tgt_path
646 self.src_path = src_path or self.tgt_path
647 self.src_root = svn.fs.revision_root(fsobj, src_rev)
647 self.src_root = svn.fs.revision_root(fsobj, src_rev)
648 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
648 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
649
649
650 self._validate()
650 self._validate()
651
651
652 def _validate(self):
652 def _validate(self):
653 if (self.tgt_kind != svn.core.svn_node_none and
653 if (self.tgt_kind != svn.core.svn_node_none and
654 self.src_kind != svn.core.svn_node_none and
654 self.src_kind != svn.core.svn_node_none and
655 self.src_kind != self.tgt_kind):
655 self.src_kind != self.tgt_kind):
656 # TODO: johbo: proper error handling
656 # TODO: johbo: proper error handling
657 raise Exception(
657 raise Exception(
658 "Source and target are not compatible for diff generation. "
658 "Source and target are not compatible for diff generation. "
659 "Source type: %s, target type: %s" %
659 "Source type: %s, target type: %s" %
660 (self.src_kind, self.tgt_kind))
660 (self.src_kind, self.tgt_kind))
661
661
662 def generate_diff(self):
662 def generate_diff(self):
663 buf = io.StringIO()
663 buf = io.StringIO()
664 if self.tgt_kind == svn.core.svn_node_dir:
664 if self.tgt_kind == svn.core.svn_node_dir:
665 self._generate_dir_diff(buf)
665 self._generate_dir_diff(buf)
666 else:
666 else:
667 self._generate_file_diff(buf)
667 self._generate_file_diff(buf)
668 return buf.getvalue()
668 return buf.getvalue()
669
669
670 def _generate_dir_diff(self, buf):
670 def _generate_dir_diff(self, buf):
671 editor = DiffChangeEditor()
671 editor = DiffChangeEditor()
672 editor_ptr, editor_baton = svn.delta.make_editor(editor)
672 editor_ptr, editor_baton = svn.delta.make_editor(editor)
673 svn.repos.dir_delta2(
673 svn.repos.dir_delta2(
674 self.src_root,
674 self.src_root,
675 self.src_path,
675 self.src_path,
676 '', # src_entry
676 '', # src_entry
677 self.tgt_root,
677 self.tgt_root,
678 self.tgt_path,
678 self.tgt_path,
679 editor_ptr, editor_baton,
679 editor_ptr, editor_baton,
680 authorization_callback_allow_all,
680 authorization_callback_allow_all,
681 False, # text_deltas
681 False, # text_deltas
682 svn.core.svn_depth_infinity, # depth
682 svn.core.svn_depth_infinity, # depth
683 False, # entry_props
683 False, # entry_props
684 False, # ignore_ancestry
684 False, # ignore_ancestry
685 )
685 )
686
686
687 for path, __, change in sorted(editor.changes):
687 for path, __, change in sorted(editor.changes):
688 self._generate_node_diff(
688 self._generate_node_diff(
689 buf, change, path, self.tgt_path, path, self.src_path)
689 buf, change, path, self.tgt_path, path, self.src_path)
690
690
691 def _generate_file_diff(self, buf):
691 def _generate_file_diff(self, buf):
692 change = None
692 change = None
693 if self.src_kind == svn.core.svn_node_none:
693 if self.src_kind == svn.core.svn_node_none:
694 change = "add"
694 change = "add"
695 elif self.tgt_kind == svn.core.svn_node_none:
695 elif self.tgt_kind == svn.core.svn_node_none:
696 change = "delete"
696 change = "delete"
697 tgt_base, tgt_path = vcspath.split(self.tgt_path)
697 tgt_base, tgt_path = vcspath.split(self.tgt_path)
698 src_base, src_path = vcspath.split(self.src_path)
698 src_base, src_path = vcspath.split(self.src_path)
699 self._generate_node_diff(
699 self._generate_node_diff(
700 buf, change, tgt_path, tgt_base, src_path, src_base)
700 buf, change, tgt_path, tgt_base, src_path, src_base)
701
701
702 def _generate_node_diff(
702 def _generate_node_diff(
703 self, buf, change, tgt_path, tgt_base, src_path, src_base):
703 self, buf, change, tgt_path, tgt_base, src_path, src_base):
704
704
705
705
706 tgt_path = safe_str(tgt_path)
706 tgt_path = safe_str(tgt_path)
707 src_path = safe_str(src_path)
707 src_path = safe_str(src_path)
708
708
709
709
710 if self.src_rev == self.tgt_rev and tgt_base == src_base:
710 if self.src_rev == self.tgt_rev and tgt_base == src_base:
711 # makes consistent behaviour with git/hg to return empty diff if
711 # makes consistent behaviour with git/hg to return empty diff if
712 # we compare same revisions
712 # we compare same revisions
713 return
713 return
714
714
715 tgt_full_path = vcspath.join(tgt_base, tgt_path)
715 tgt_full_path = vcspath.join(tgt_base, tgt_path)
716 src_full_path = vcspath.join(src_base, src_path)
716 src_full_path = vcspath.join(src_base, src_path)
717
717
718 self.binary_content = False
718 self.binary_content = False
719 mime_type = self._get_mime_type(tgt_full_path)
719 mime_type = self._get_mime_type(tgt_full_path)
720
720
721 if mime_type and not mime_type.startswith('text'):
721 if mime_type and not mime_type.startswith('text'):
722 self.binary_content = True
722 self.binary_content = True
723 buf.write("=" * 67 + '\n')
723 buf.write("=" * 67 + '\n')
724 buf.write("Cannot display: file marked as a binary type.\n")
724 buf.write("Cannot display: file marked as a binary type.\n")
725 buf.write("svn:mime-type = %s\n" % mime_type)
725 buf.write("svn:mime-type = %s\n" % mime_type)
726 buf.write("Index: %s\n" % (tgt_path, ))
726 buf.write("Index: %s\n" % (tgt_path, ))
727 buf.write("=" * 67 + '\n')
727 buf.write("=" * 67 + '\n')
728 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
728 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
729 'tgt_path': tgt_path})
729 'tgt_path': tgt_path})
730
730
731 if change == 'add':
731 if change == 'add':
732 # TODO: johbo: SVN is missing a zero here compared to git
732 # TODO: johbo: SVN is missing a zero here compared to git
733 buf.write("new file mode 10644\n")
733 buf.write("new file mode 10644\n")
734
734
735 #TODO(marcink): intro to binary detection of svn patches
735 #TODO(marcink): intro to binary detection of svn patches
736 # if self.binary_content:
736 # if self.binary_content:
737 # buf.write('GIT binary patch\n')
737 # buf.write('GIT binary patch\n')
738
738
739 buf.write("--- /dev/null\t(revision 0)\n")
739 buf.write("--- /dev/null\t(revision 0)\n")
740 src_lines = []
740 src_lines = []
741 else:
741 else:
742 if change == 'delete':
742 if change == 'delete':
743 buf.write("deleted file mode 10644\n")
743 buf.write("deleted file mode 10644\n")
744
744
745 #TODO(marcink): intro to binary detection of svn patches
745 #TODO(marcink): intro to binary detection of svn patches
746 # if self.binary_content:
746 # if self.binary_content:
747 # buf.write('GIT binary patch\n')
747 # buf.write('GIT binary patch\n')
748
748
749 buf.write("--- a/%s\t(revision %s)\n" % (
749 buf.write("--- a/%s\t(revision %s)\n" % (
750 src_path, self.src_rev))
750 src_path, self.src_rev))
751 src_lines = self._svn_readlines(self.src_root, src_full_path)
751 src_lines = self._svn_readlines(self.src_root, src_full_path)
752
752
753 if change == 'delete':
753 if change == 'delete':
754 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
754 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
755 tgt_lines = []
755 tgt_lines = []
756 else:
756 else:
757 buf.write("+++ b/%s\t(revision %s)\n" % (
757 buf.write("+++ b/%s\t(revision %s)\n" % (
758 tgt_path, self.tgt_rev))
758 tgt_path, self.tgt_rev))
759 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
759 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
760
760
761 if not self.binary_content:
761 if not self.binary_content:
762 udiff = svn_diff.unified_diff(
762 udiff = svn_diff.unified_diff(
763 src_lines, tgt_lines, context=self.context,
763 src_lines, tgt_lines, context=self.context,
764 ignore_blank_lines=self.ignore_whitespace,
764 ignore_blank_lines=self.ignore_whitespace,
765 ignore_case=False,
765 ignore_case=False,
766 ignore_space_changes=self.ignore_whitespace)
766 ignore_space_changes=self.ignore_whitespace)
767
767
768 buf.writelines(udiff)
768 buf.writelines(udiff)
769
769
770 def _get_mime_type(self, path):
770 def _get_mime_type(self, path):
771 try:
771 try:
772 mime_type = svn.fs.node_prop(
772 mime_type = svn.fs.node_prop(
773 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
773 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
774 except svn.core.SubversionException:
774 except svn.core.SubversionException:
775 mime_type = svn.fs.node_prop(
775 mime_type = svn.fs.node_prop(
776 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
776 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
777 return mime_type
777 return mime_type
778
778
779 def _svn_readlines(self, fs_root, node_path):
779 def _svn_readlines(self, fs_root, node_path):
780 if self.binary_content:
780 if self.binary_content:
781 return []
781 return []
782 node_kind = svn.fs.check_path(fs_root, node_path)
782 node_kind = svn.fs.check_path(fs_root, node_path)
783 if node_kind not in (
783 if node_kind not in (
784 svn.core.svn_node_file, svn.core.svn_node_symlink):
784 svn.core.svn_node_file, svn.core.svn_node_symlink):
785 return []
785 return []
786 content = svn.core.Stream(
786 content = svn.core.Stream(
787 svn.fs.file_contents(fs_root, node_path)).read()
787 svn.fs.file_contents(fs_root, node_path)).read()
788
788
789 return content.splitlines(True)
789 return content.splitlines(True)
790
790
791
791
792 class DiffChangeEditor(svn.delta.Editor):
792 class DiffChangeEditor(svn.delta.Editor):
793 """
793 """
794 Records changes between two given revisions
794 Records changes between two given revisions
795 """
795 """
796
796
797 def __init__(self):
797 def __init__(self):
798 self.changes = []
798 self.changes = []
799
799
800 def delete_entry(self, path, revision, parent_baton, pool=None):
800 def delete_entry(self, path, revision, parent_baton, pool=None):
801 self.changes.append((path, None, 'delete'))
801 self.changes.append((path, None, 'delete'))
802
802
803 def add_file(
803 def add_file(
804 self, path, parent_baton, copyfrom_path, copyfrom_revision,
804 self, path, parent_baton, copyfrom_path, copyfrom_revision,
805 file_pool=None):
805 file_pool=None):
806 self.changes.append((path, 'file', 'add'))
806 self.changes.append((path, 'file', 'add'))
807
807
808 def open_file(self, path, parent_baton, base_revision, file_pool=None):
808 def open_file(self, path, parent_baton, base_revision, file_pool=None):
809 self.changes.append((path, 'file', 'change'))
809 self.changes.append((path, 'file', 'change'))
810
810
811
811
812 def authorization_callback_allow_all(root, path, pool):
812 def authorization_callback_allow_all(root, path, pool):
813 return True
813 return True
814
814
815
815
816 class TxnNodeProcessor(object):
816 class TxnNodeProcessor(object):
817 """
817 """
818 Utility to process the change of one node within a transaction root.
818 Utility to process the change of one node within a transaction root.
819
819
820 It encapsulates the knowledge of how to add, update or remove
820 It encapsulates the knowledge of how to add, update or remove
821 a node for a given transaction root. The purpose is to support the method
821 a node for a given transaction root. The purpose is to support the method
822 `SvnRemote.commit`.
822 `SvnRemote.commit`.
823 """
823 """
824
824
825 def __init__(self, node, txn_root):
825 def __init__(self, node, txn_root):
826 assert isinstance(node['path'], bytes)
826 assert isinstance(node['path'], bytes)
827
827
828 self.node = node
828 self.node = node
829 self.txn_root = txn_root
829 self.txn_root = txn_root
830
830
831 def update(self):
831 def update(self):
832 self._ensure_parent_dirs()
832 self._ensure_parent_dirs()
833 self._add_file_if_node_does_not_exist()
833 self._add_file_if_node_does_not_exist()
834 self._update_file_content()
834 self._update_file_content()
835 self._update_file_properties()
835 self._update_file_properties()
836
836
837 def remove(self):
837 def remove(self):
838 svn.fs.delete(self.txn_root, self.node['path'])
838 svn.fs.delete(self.txn_root, self.node['path'])
839 # TODO: Clean up directory if empty
839 # TODO: Clean up directory if empty
840
840
841 def _ensure_parent_dirs(self):
841 def _ensure_parent_dirs(self):
842 curdir = vcspath.dirname(self.node['path'])
842 curdir = vcspath.dirname(self.node['path'])
843 dirs_to_create = []
843 dirs_to_create = []
844 while not self._svn_path_exists(curdir):
844 while not self._svn_path_exists(curdir):
845 dirs_to_create.append(curdir)
845 dirs_to_create.append(curdir)
846 curdir = vcspath.dirname(curdir)
846 curdir = vcspath.dirname(curdir)
847
847
848 for curdir in reversed(dirs_to_create):
848 for curdir in reversed(dirs_to_create):
849 log.debug('Creating missing directory "%s"', curdir)
849 log.debug('Creating missing directory "%s"', curdir)
850 svn.fs.make_dir(self.txn_root, curdir)
850 svn.fs.make_dir(self.txn_root, curdir)
851
851
852 def _svn_path_exists(self, path):
852 def _svn_path_exists(self, path):
853 path_status = svn.fs.check_path(self.txn_root, path)
853 path_status = svn.fs.check_path(self.txn_root, path)
854 return path_status != svn.core.svn_node_none
854 return path_status != svn.core.svn_node_none
855
855
856 def _add_file_if_node_does_not_exist(self):
856 def _add_file_if_node_does_not_exist(self):
857 kind = svn.fs.check_path(self.txn_root, self.node['path'])
857 kind = svn.fs.check_path(self.txn_root, self.node['path'])
858 if kind == svn.core.svn_node_none:
858 if kind == svn.core.svn_node_none:
859 svn.fs.make_file(self.txn_root, self.node['path'])
859 svn.fs.make_file(self.txn_root, self.node['path'])
860
860
861 def _update_file_content(self):
861 def _update_file_content(self):
862 assert isinstance(self.node['content'], bytes)
862 assert isinstance(self.node['content'], bytes)
863
863
864 handler, baton = svn.fs.apply_textdelta(
864 handler, baton = svn.fs.apply_textdelta(
865 self.txn_root, self.node['path'], None, None)
865 self.txn_root, self.node['path'], None, None)
866 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
866 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
867
867
868 def _update_file_properties(self):
868 def _update_file_properties(self):
869 properties = self.node.get('properties', {})
869 properties = self.node.get('properties', {})
870 for key, value in properties.items():
870 for key, value in properties.items():
871 svn.fs.change_node_prop(
871 svn.fs.change_node_prop(
872 self.txn_root, self.node['path'], key, value)
872 self.txn_root, self.node['path'], key, value)
873
873
874
874
875 def apr_time_t(timestamp):
875 def apr_time_t(timestamp):
876 """
876 """
877 Convert a Python timestamp into APR timestamp type apr_time_t
877 Convert a Python timestamp into APR timestamp type apr_time_t
878 """
878 """
879 return timestamp * 1E6
879 return timestamp * 1E6
880
880
881
881
882 def svn_opt_revision_value_t(num):
882 def svn_opt_revision_value_t(num):
883 """
883 """
884 Put `num` into a `svn_opt_revision_value_t` structure.
884 Put `num` into a `svn_opt_revision_value_t` structure.
885 """
885 """
886 value = svn.core.svn_opt_revision_value_t()
886 value = svn.core.svn_opt_revision_value_t()
887 value.number = num
887 value.number = num
888 revision = svn.core.svn_opt_revision_t()
888 revision = svn.core.svn_opt_revision_t()
889 revision.kind = svn.core.svn_opt_revision_number
889 revision.kind = svn.core.svn_opt_revision_number
890 revision.value = value
890 revision.value = value
891 return revision
891 return revision
General Comments 0
You need to be logged in to leave comments. Login now