##// END OF EJS Templates
blame: use BinaryEnvelope wrapper to handle raw non-ascii content of files
super-admin -
r1139:1b29ba78 default
parent child Browse files
Show More
@@ -1,194 +1,194 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import os
17 import os
18 import sys
18 import sys
19 import tempfile
19 import tempfile
20 import traceback
20 import traceback
21 import logging
21 import logging
22 import urllib.parse
22 import urllib.parse
23
23
24 from vcsserver.lib.rc_cache.archive_cache import get_archival_cache_store
24 from vcsserver.lib.rc_cache.archive_cache import get_archival_cache_store
25
25
26 from vcsserver import exceptions
26 from vcsserver import exceptions
27 from vcsserver.exceptions import NoContentException
27 from vcsserver.exceptions import NoContentException
28 from vcsserver.hgcompat import archival
28 from vcsserver.hgcompat import archival
29 from vcsserver.str_utils import safe_bytes
29 from vcsserver.str_utils import safe_bytes
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class RepoFactory(object):
34 class RepoFactory(object):
35 """
35 """
36 Utility to create instances of repository
36 Utility to create instances of repository
37
37
38 It provides internal caching of the `repo` object based on
38 It provides internal caching of the `repo` object based on
39 the :term:`call context`.
39 the :term:`call context`.
40 """
40 """
41 repo_type = None
41 repo_type = None
42
42
43 def __init__(self):
43 def __init__(self):
44 pass
44 pass
45
45
46 def _create_config(self, path, config):
46 def _create_config(self, path, config):
47 config = {}
47 config = {}
48 return config
48 return config
49
49
50 def _create_repo(self, wire, create):
50 def _create_repo(self, wire, create):
51 raise NotImplementedError()
51 raise NotImplementedError()
52
52
53 def repo(self, wire, create=False):
53 def repo(self, wire, create=False):
54 raise NotImplementedError()
54 raise NotImplementedError()
55
55
56
56
57 def obfuscate_qs(query_string):
57 def obfuscate_qs(query_string):
58 if query_string is None:
58 if query_string is None:
59 return None
59 return None
60
60
61 parsed = []
61 parsed = []
62 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
62 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
63 if k in ['auth_token', 'api_key']:
63 if k in ['auth_token', 'api_key']:
64 v = "*****"
64 v = "*****"
65 parsed.append((k, v))
65 parsed.append((k, v))
66
66
67 return '&'.join('{}{}'.format(
67 return '&'.join('{}{}'.format(
68 k, f'={v}' if v else '') for k, v in parsed)
68 k, f'={v}' if v else '') for k, v in parsed)
69
69
70
70
71 def raise_from_original(new_type, org_exc: Exception):
71 def raise_from_original(new_type, org_exc: Exception):
72 """
72 """
73 Raise a new exception type with original args and traceback.
73 Raise a new exception type with original args and traceback.
74 """
74 """
75
75
76 exc_type, exc_value, exc_traceback = sys.exc_info()
76 exc_type, exc_value, exc_traceback = sys.exc_info()
77 new_exc = new_type(*exc_value.args)
77 new_exc = new_type(*exc_value.args)
78
78
79 # store the original traceback into the new exc
79 # store the original traceback into the new exc
80 new_exc._org_exc_tb = traceback.format_tb(exc_traceback)
80 new_exc._org_exc_tb = traceback.format_tb(exc_traceback)
81
81
82 try:
82 try:
83 raise new_exc.with_traceback(exc_traceback)
83 raise new_exc.with_traceback(exc_traceback)
84 finally:
84 finally:
85 del exc_traceback
85 del exc_traceback
86
86
87
87
88 class ArchiveNode(object):
88 class ArchiveNode(object):
89 def __init__(self, path, mode, is_link, raw_bytes):
89 def __init__(self, path, mode, is_link, raw_bytes):
90 self.path = path
90 self.path = path
91 self.mode = mode
91 self.mode = mode
92 self.is_link = is_link
92 self.is_link = is_link
93 self.raw_bytes = raw_bytes
93 self.raw_bytes = raw_bytes
94
94
95
95
96 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
96 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
97 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
97 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
98 """
98 """
99 Function that would store generate archive and send it to a dedicated backend store
99 Function that would store generate archive and send it to a dedicated backend store
100 In here we use diskcache
100 In here we use diskcache
101
101
102 :param node_walker: a generator returning nodes to add to archive
102 :param node_walker: a generator returning nodes to add to archive
103 :param archive_key: key used to store the path
103 :param archive_key: key used to store the path
104 :param kind: archive kind
104 :param kind: archive kind
105 :param mtime: time of creation
105 :param mtime: time of creation
106 :param archive_at_path: default '/' the path at archive was started.
106 :param archive_at_path: default '/' the path at archive was started.
107 If this is not '/' it means it's a partial archive
107 If this is not '/' it means it's a partial archive
108 :param archive_dir_name: inside dir name when creating an archive
108 :param archive_dir_name: inside dir name when creating an archive
109 :param commit_id: commit sha of revision archive was created at
109 :param commit_id: commit sha of revision archive was created at
110 :param write_metadata:
110 :param write_metadata:
111 :param extra_metadata:
111 :param extra_metadata:
112 :param cache_config:
112 :param cache_config:
113
113
114 walker should be a file walker, for example,
114 walker should be a file walker, for example,
115 def node_walker():
115 def node_walker():
116 for file_info in files:
116 for file_info in files:
117 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
117 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
118 """
118 """
119 extra_metadata = extra_metadata or {}
119 extra_metadata = extra_metadata or {}
120
120
121 d_cache = get_archival_cache_store(config=cache_config)
121 d_cache = get_archival_cache_store(config=cache_config)
122
122
123 if archive_key in d_cache:
123 if archive_key in d_cache:
124 with d_cache as d_cache_reader:
124 with d_cache as d_cache_reader:
125 reader, tag = d_cache_reader.get(archive_key, read=True, tag=True, retry=True)
125 reader, tag = d_cache_reader.get(archive_key, read=True, tag=True, retry=True)
126 return reader.name
126 return reader.name
127
127
128 archive_tmp_path = safe_bytes(tempfile.mkstemp()[1])
128 archive_tmp_path = safe_bytes(tempfile.mkstemp()[1])
129 log.debug('Creating new temp archive in %s', archive_tmp_path)
129 log.debug('Creating new temp archive in %s', archive_tmp_path)
130
130
131 if kind == "tgz":
131 if kind == "tgz":
132 archiver = archival.tarit(archive_tmp_path, mtime, b"gz")
132 archiver = archival.tarit(archive_tmp_path, mtime, b"gz")
133 elif kind == "tbz2":
133 elif kind == "tbz2":
134 archiver = archival.tarit(archive_tmp_path, mtime, b"bz2")
134 archiver = archival.tarit(archive_tmp_path, mtime, b"bz2")
135 elif kind == 'zip':
135 elif kind == 'zip':
136 archiver = archival.zipit(archive_tmp_path, mtime)
136 archiver = archival.zipit(archive_tmp_path, mtime)
137 else:
137 else:
138 raise exceptions.ArchiveException()(
138 raise exceptions.ArchiveException()(
139 f'Remote does not support: "{kind}" archive type.')
139 f'Remote does not support: "{kind}" archive type.')
140
140
141 for f in node_walker(commit_id, archive_at_path):
141 for f in node_walker(commit_id, archive_at_path):
142 f_path = os.path.join(safe_bytes(archive_dir_name), safe_bytes(f.path).lstrip(b'/'))
142 f_path = os.path.join(safe_bytes(archive_dir_name), safe_bytes(f.path).lstrip(b'/'))
143 try:
143 try:
144 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
144 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
145 except NoContentException:
145 except NoContentException:
146 # NOTE(marcink): this is a special case for SVN so we can create "empty"
146 # NOTE(marcink): this is a special case for SVN so we can create "empty"
147 # directories which are not supported by archiver
147 # directories which are not supported by archiver
148 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
148 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
149
149
150 if write_metadata:
150 if write_metadata:
151 metadata = dict([
151 metadata = dict([
152 ('commit_id', commit_id),
152 ('commit_id', commit_id),
153 ('mtime', mtime),
153 ('mtime', mtime),
154 ])
154 ])
155 metadata.update(extra_metadata)
155 metadata.update(extra_metadata)
156
156
157 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
157 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
158 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
158 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
159 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
159 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
160
160
161 archiver.done()
161 archiver.done()
162
162
163 # ensure set & get are atomic
163 # ensure set & get are atomic
164 with d_cache.transact():
164 with d_cache.transact():
165
165
166 with open(archive_tmp_path, 'rb') as archive_file:
166 with open(archive_tmp_path, 'rb') as archive_file:
167 add_result = d_cache.set(archive_key, archive_file, read=True, tag='db-name', retry=True)
167 add_result = d_cache.set(archive_key, archive_file, read=True, tag='db-name', retry=True)
168 if not add_result:
168 if not add_result:
169 log.error('Failed to store cache for key=%s', archive_key)
169 log.error('Failed to store cache for key=%s', archive_key)
170
170
171 os.remove(archive_tmp_path)
171 os.remove(archive_tmp_path)
172
172
173 reader, tag = d_cache.get(archive_key, read=True, tag=True, retry=True)
173 reader, tag = d_cache.get(archive_key, read=True, tag=True, retry=True)
174 if not reader:
174 if not reader:
175 raise AssertionError(f'empty reader on key={archive_key} added={add_result}')
175 raise AssertionError(f'empty reader on key={archive_key} added={add_result}')
176
176
177 return reader.name
177 return reader.name
178
178
179
179
180 class BinaryEnvelope(object):
180 class BinaryEnvelope(object):
181 def __init__(self, val):
181 def __init__(self, val):
182 self.val = val
182 self.val = val
183
183
184
184
185 class BytesEnvelope(bytes):
185 class BytesEnvelope(bytes):
186 def __new__(cls, content):
186 def __new__(cls, content):
187 if isinstance(content, bytes):
187 if isinstance(content, bytes):
188 return super().__new__(cls, content)
188 return super().__new__(cls, content)
189 else:
189 else:
190 raise TypeError('Content must be bytes.')
190 raise TypeError('BytesEnvelope content= param must be bytes. Use BinaryEnvelope to wrap other types')
191
191
192
192
193 class BinaryBytesEnvelope(BytesEnvelope):
193 class BinaryBytesEnvelope(BytesEnvelope):
194 pass
194 pass
@@ -1,1463 +1,1463 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib.request
24 import urllib.request
25 import urllib.parse
25 import urllib.parse
26 import urllib.error
26 import urllib.error
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient, FetchPackResult
34 from dulwich.client import HttpGitClient, LocalGitClient, FetchPackResult
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_bytes
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_bytes
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = b'^{}'
53 PEELED_REF_MARKER = b'^{}'
54 HEAD_MARKER = b'HEAD'
54 HEAD_MARKER = b'HEAD'
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def reraise_safe_exceptions(func):
59 def reraise_safe_exceptions(func):
60 """Converts Dulwich exceptions to something neutral."""
60 """Converts Dulwich exceptions to something neutral."""
61
61
62 @wraps(func)
62 @wraps(func)
63 def wrapper(*args, **kwargs):
63 def wrapper(*args, **kwargs):
64 try:
64 try:
65 return func(*args, **kwargs)
65 return func(*args, **kwargs)
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 exc = exceptions.LookupException(org_exc=e)
67 exc = exceptions.LookupException(org_exc=e)
68 raise exc(safe_str(e))
68 raise exc(safe_str(e))
69 except (HangupException, UnexpectedCommandError) as e:
69 except (HangupException, UnexpectedCommandError) as e:
70 exc = exceptions.VcsException(org_exc=e)
70 exc = exceptions.VcsException(org_exc=e)
71 raise exc(safe_str(e))
71 raise exc(safe_str(e))
72 except Exception:
72 except Exception:
73 # NOTE(marcink): because of how dulwich handles some exceptions
73 # NOTE(marcink): because of how dulwich handles some exceptions
74 # (KeyError on empty repos), we cannot track this and catch all
74 # (KeyError on empty repos), we cannot track this and catch all
75 # exceptions, it's an exceptions from other handlers
75 # exceptions, it's an exceptions from other handlers
76 #if not hasattr(e, '_vcs_kind'):
76 #if not hasattr(e, '_vcs_kind'):
77 #log.exception("Unhandled exception in git remote call")
77 #log.exception("Unhandled exception in git remote call")
78 #raise_from_original(exceptions.UnhandledException)
78 #raise_from_original(exceptions.UnhandledException)
79 raise
79 raise
80 return wrapper
80 return wrapper
81
81
82
82
83 class Repo(DulwichRepo):
83 class Repo(DulwichRepo):
84 """
84 """
85 A wrapper for dulwich Repo class.
85 A wrapper for dulwich Repo class.
86
86
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 "Too many open files" error. We need to close all opened file descriptors
88 "Too many open files" error. We need to close all opened file descriptors
89 once the repo object is destroyed.
89 once the repo object is destroyed.
90 """
90 """
91 def __del__(self):
91 def __del__(self):
92 if hasattr(self, 'object_store'):
92 if hasattr(self, 'object_store'):
93 self.close()
93 self.close()
94
94
95
95
96 class Repository(LibGit2Repo):
96 class Repository(LibGit2Repo):
97
97
98 def __enter__(self):
98 def __enter__(self):
99 return self
99 return self
100
100
101 def __exit__(self, exc_type, exc_val, exc_tb):
101 def __exit__(self, exc_type, exc_val, exc_tb):
102 self.free()
102 self.free()
103
103
104
104
105 class GitFactory(RepoFactory):
105 class GitFactory(RepoFactory):
106 repo_type = 'git'
106 repo_type = 'git'
107
107
108 def _create_repo(self, wire, create, use_libgit2=False):
108 def _create_repo(self, wire, create, use_libgit2=False):
109 if use_libgit2:
109 if use_libgit2:
110 repo = Repository(safe_bytes(wire['path']))
110 repo = Repository(safe_bytes(wire['path']))
111 else:
111 else:
112 # dulwich mode
112 # dulwich mode
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
113 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
114 repo = Repo(repo_path)
114 repo = Repo(repo_path)
115
115
116 log.debug('repository created: got GIT object: %s', repo)
116 log.debug('repository created: got GIT object: %s', repo)
117 return repo
117 return repo
118
118
119 def repo(self, wire, create=False, use_libgit2=False):
119 def repo(self, wire, create=False, use_libgit2=False):
120 """
120 """
121 Get a repository instance for the given path.
121 Get a repository instance for the given path.
122 """
122 """
123 return self._create_repo(wire, create, use_libgit2)
123 return self._create_repo(wire, create, use_libgit2)
124
124
125 def repo_libgit2(self, wire):
125 def repo_libgit2(self, wire):
126 return self.repo(wire, use_libgit2=True)
126 return self.repo(wire, use_libgit2=True)
127
127
128
128
129 def create_signature_from_string(author_str, **kwargs):
129 def create_signature_from_string(author_str, **kwargs):
130 """
130 """
131 Creates a pygit2.Signature object from a string of the format 'Name <email>'.
131 Creates a pygit2.Signature object from a string of the format 'Name <email>'.
132
132
133 :param author_str: String of the format 'Name <email>'
133 :param author_str: String of the format 'Name <email>'
134 :return: pygit2.Signature object
134 :return: pygit2.Signature object
135 """
135 """
136 match = re.match(r'^(.+) <(.+)>$', author_str)
136 match = re.match(r'^(.+) <(.+)>$', author_str)
137 if match is None:
137 if match is None:
138 raise ValueError(f"Invalid format: {author_str}")
138 raise ValueError(f"Invalid format: {author_str}")
139
139
140 name, email = match.groups()
140 name, email = match.groups()
141 return pygit2.Signature(name, email, **kwargs)
141 return pygit2.Signature(name, email, **kwargs)
142
142
143
143
144 def get_obfuscated_url(url_obj):
144 def get_obfuscated_url(url_obj):
145 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
145 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
146 url_obj.query = obfuscate_qs(url_obj.query)
146 url_obj.query = obfuscate_qs(url_obj.query)
147 obfuscated_uri = str(url_obj)
147 obfuscated_uri = str(url_obj)
148 return obfuscated_uri
148 return obfuscated_uri
149
149
150
150
151 class GitRemote(RemoteBase):
151 class GitRemote(RemoteBase):
152
152
153 def __init__(self, factory):
153 def __init__(self, factory):
154 self._factory = factory
154 self._factory = factory
155 self._bulk_methods = {
155 self._bulk_methods = {
156 "date": self.date,
156 "date": self.date,
157 "author": self.author,
157 "author": self.author,
158 "branch": self.branch,
158 "branch": self.branch,
159 "message": self.message,
159 "message": self.message,
160 "parents": self.parents,
160 "parents": self.parents,
161 "_commit": self.revision,
161 "_commit": self.revision,
162 }
162 }
163 self._bulk_file_methods = {
163 self._bulk_file_methods = {
164 "size": self.get_node_size,
164 "size": self.get_node_size,
165 "data": self.get_node_data,
165 "data": self.get_node_data,
166 "flags": self.get_node_flags,
166 "flags": self.get_node_flags,
167 "is_binary": self.get_node_is_binary,
167 "is_binary": self.get_node_is_binary,
168 "md5": self.md5_hash
168 "md5": self.md5_hash
169 }
169 }
170
170
171 def _wire_to_config(self, wire):
171 def _wire_to_config(self, wire):
172 if 'config' in wire:
172 if 'config' in wire:
173 return {x[0] + '_' + x[1]: x[2] for x in wire['config']}
173 return {x[0] + '_' + x[1]: x[2] for x in wire['config']}
174 return {}
174 return {}
175
175
176 def _remote_conf(self, config):
176 def _remote_conf(self, config):
177 params = [
177 params = [
178 '-c', 'core.askpass=""',
178 '-c', 'core.askpass=""',
179 ]
179 ]
180 ssl_cert_dir = config.get('vcs_ssl_dir')
180 ssl_cert_dir = config.get('vcs_ssl_dir')
181 if ssl_cert_dir:
181 if ssl_cert_dir:
182 params.extend(['-c', f'http.sslCAinfo={ssl_cert_dir}'])
182 params.extend(['-c', f'http.sslCAinfo={ssl_cert_dir}'])
183 return params
183 return params
184
184
185 @reraise_safe_exceptions
185 @reraise_safe_exceptions
186 def discover_git_version(self):
186 def discover_git_version(self):
187 stdout, _ = self.run_git_command(
187 stdout, _ = self.run_git_command(
188 {}, ['--version'], _bare=True, _safe=True)
188 {}, ['--version'], _bare=True, _safe=True)
189 prefix = b'git version'
189 prefix = b'git version'
190 if stdout.startswith(prefix):
190 if stdout.startswith(prefix):
191 stdout = stdout[len(prefix):]
191 stdout = stdout[len(prefix):]
192 return safe_str(stdout.strip())
192 return safe_str(stdout.strip())
193
193
194 @reraise_safe_exceptions
194 @reraise_safe_exceptions
195 def is_empty(self, wire):
195 def is_empty(self, wire):
196 repo_init = self._factory.repo_libgit2(wire)
196 repo_init = self._factory.repo_libgit2(wire)
197 with repo_init as repo:
197 with repo_init as repo:
198
198
199 try:
199 try:
200 has_head = repo.head.name
200 has_head = repo.head.name
201 if has_head:
201 if has_head:
202 return False
202 return False
203
203
204 # NOTE(marcink): check again using more expensive method
204 # NOTE(marcink): check again using more expensive method
205 return repo.is_empty
205 return repo.is_empty
206 except Exception:
206 except Exception:
207 pass
207 pass
208
208
209 return True
209 return True
210
210
211 @reraise_safe_exceptions
211 @reraise_safe_exceptions
212 def assert_correct_path(self, wire):
212 def assert_correct_path(self, wire):
213 cache_on, context_uid, repo_id = self._cache_on(wire)
213 cache_on, context_uid, repo_id = self._cache_on(wire)
214 region = self._region(wire)
214 region = self._region(wire)
215
215
216 @region.conditional_cache_on_arguments(condition=cache_on)
216 @region.conditional_cache_on_arguments(condition=cache_on)
217 def _assert_correct_path(_context_uid, _repo_id, fast_check):
217 def _assert_correct_path(_context_uid, _repo_id, fast_check):
218 if fast_check:
218 if fast_check:
219 path = safe_str(wire['path'])
219 path = safe_str(wire['path'])
220 if pygit2.discover_repository(path):
220 if pygit2.discover_repository(path):
221 return True
221 return True
222 return False
222 return False
223 else:
223 else:
224 try:
224 try:
225 repo_init = self._factory.repo_libgit2(wire)
225 repo_init = self._factory.repo_libgit2(wire)
226 with repo_init:
226 with repo_init:
227 pass
227 pass
228 except pygit2.GitError:
228 except pygit2.GitError:
229 path = wire.get('path')
229 path = wire.get('path')
230 tb = traceback.format_exc()
230 tb = traceback.format_exc()
231 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
231 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
232 return False
232 return False
233 return True
233 return True
234
234
235 return _assert_correct_path(context_uid, repo_id, True)
235 return _assert_correct_path(context_uid, repo_id, True)
236
236
237 @reraise_safe_exceptions
237 @reraise_safe_exceptions
238 def bare(self, wire):
238 def bare(self, wire):
239 repo_init = self._factory.repo_libgit2(wire)
239 repo_init = self._factory.repo_libgit2(wire)
240 with repo_init as repo:
240 with repo_init as repo:
241 return repo.is_bare
241 return repo.is_bare
242
242
243 @reraise_safe_exceptions
243 @reraise_safe_exceptions
244 def get_node_data(self, wire, commit_id, path):
244 def get_node_data(self, wire, commit_id, path):
245 repo_init = self._factory.repo_libgit2(wire)
245 repo_init = self._factory.repo_libgit2(wire)
246 with repo_init as repo:
246 with repo_init as repo:
247 commit = repo[commit_id]
247 commit = repo[commit_id]
248 blob_obj = commit.tree[path]
248 blob_obj = commit.tree[path]
249
249
250 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
250 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
251 raise exceptions.LookupException()(
251 raise exceptions.LookupException()(
252 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
252 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
253
253
254 return BytesEnvelope(blob_obj.data)
254 return BytesEnvelope(blob_obj.data)
255
255
256 @reraise_safe_exceptions
256 @reraise_safe_exceptions
257 def get_node_size(self, wire, commit_id, path):
257 def get_node_size(self, wire, commit_id, path):
258 repo_init = self._factory.repo_libgit2(wire)
258 repo_init = self._factory.repo_libgit2(wire)
259 with repo_init as repo:
259 with repo_init as repo:
260 commit = repo[commit_id]
260 commit = repo[commit_id]
261 blob_obj = commit.tree[path]
261 blob_obj = commit.tree[path]
262
262
263 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
263 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
264 raise exceptions.LookupException()(
264 raise exceptions.LookupException()(
265 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
265 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
266
266
267 return blob_obj.size
267 return blob_obj.size
268
268
269 @reraise_safe_exceptions
269 @reraise_safe_exceptions
270 def get_node_flags(self, wire, commit_id, path):
270 def get_node_flags(self, wire, commit_id, path):
271 repo_init = self._factory.repo_libgit2(wire)
271 repo_init = self._factory.repo_libgit2(wire)
272 with repo_init as repo:
272 with repo_init as repo:
273 commit = repo[commit_id]
273 commit = repo[commit_id]
274 blob_obj = commit.tree[path]
274 blob_obj = commit.tree[path]
275
275
276 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
276 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
277 raise exceptions.LookupException()(
277 raise exceptions.LookupException()(
278 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
278 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
279
279
280 return blob_obj.filemode
280 return blob_obj.filemode
281
281
282 @reraise_safe_exceptions
282 @reraise_safe_exceptions
283 def get_node_is_binary(self, wire, commit_id, path):
283 def get_node_is_binary(self, wire, commit_id, path):
284 repo_init = self._factory.repo_libgit2(wire)
284 repo_init = self._factory.repo_libgit2(wire)
285 with repo_init as repo:
285 with repo_init as repo:
286 commit = repo[commit_id]
286 commit = repo[commit_id]
287 blob_obj = commit.tree[path]
287 blob_obj = commit.tree[path]
288
288
289 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
289 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
290 raise exceptions.LookupException()(
290 raise exceptions.LookupException()(
291 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
291 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
292
292
293 return blob_obj.is_binary
293 return blob_obj.is_binary
294
294
295 @reraise_safe_exceptions
295 @reraise_safe_exceptions
296 def blob_as_pretty_string(self, wire, sha):
296 def blob_as_pretty_string(self, wire, sha):
297 repo_init = self._factory.repo_libgit2(wire)
297 repo_init = self._factory.repo_libgit2(wire)
298 with repo_init as repo:
298 with repo_init as repo:
299 blob_obj = repo[sha]
299 blob_obj = repo[sha]
300 return BytesEnvelope(blob_obj.data)
300 return BytesEnvelope(blob_obj.data)
301
301
302 @reraise_safe_exceptions
302 @reraise_safe_exceptions
303 def blob_raw_length(self, wire, sha):
303 def blob_raw_length(self, wire, sha):
304 cache_on, context_uid, repo_id = self._cache_on(wire)
304 cache_on, context_uid, repo_id = self._cache_on(wire)
305 region = self._region(wire)
305 region = self._region(wire)
306
306
307 @region.conditional_cache_on_arguments(condition=cache_on)
307 @region.conditional_cache_on_arguments(condition=cache_on)
308 def _blob_raw_length(_repo_id, _sha):
308 def _blob_raw_length(_repo_id, _sha):
309
309
310 repo_init = self._factory.repo_libgit2(wire)
310 repo_init = self._factory.repo_libgit2(wire)
311 with repo_init as repo:
311 with repo_init as repo:
312 blob = repo[sha]
312 blob = repo[sha]
313 return blob.size
313 return blob.size
314
314
315 return _blob_raw_length(repo_id, sha)
315 return _blob_raw_length(repo_id, sha)
316
316
317 def _parse_lfs_pointer(self, raw_content):
317 def _parse_lfs_pointer(self, raw_content):
318 spec_string = b'version https://git-lfs.github.com/spec'
318 spec_string = b'version https://git-lfs.github.com/spec'
319 if raw_content and raw_content.startswith(spec_string):
319 if raw_content and raw_content.startswith(spec_string):
320
320
321 pattern = re.compile(rb"""
321 pattern = re.compile(rb"""
322 (?:\n)?
322 (?:\n)?
323 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
323 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
324 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
324 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
325 ^size[ ](?P<oid_size>[0-9]+)\n
325 ^size[ ](?P<oid_size>[0-9]+)\n
326 (?:\n)?
326 (?:\n)?
327 """, re.VERBOSE | re.MULTILINE)
327 """, re.VERBOSE | re.MULTILINE)
328 match = pattern.match(raw_content)
328 match = pattern.match(raw_content)
329 if match:
329 if match:
330 return match.groupdict()
330 return match.groupdict()
331
331
332 return {}
332 return {}
333
333
334 @reraise_safe_exceptions
334 @reraise_safe_exceptions
335 def is_large_file(self, wire, commit_id):
335 def is_large_file(self, wire, commit_id):
336 cache_on, context_uid, repo_id = self._cache_on(wire)
336 cache_on, context_uid, repo_id = self._cache_on(wire)
337 region = self._region(wire)
337 region = self._region(wire)
338
338
339 @region.conditional_cache_on_arguments(condition=cache_on)
339 @region.conditional_cache_on_arguments(condition=cache_on)
340 def _is_large_file(_repo_id, _sha):
340 def _is_large_file(_repo_id, _sha):
341 repo_init = self._factory.repo_libgit2(wire)
341 repo_init = self._factory.repo_libgit2(wire)
342 with repo_init as repo:
342 with repo_init as repo:
343 blob = repo[commit_id]
343 blob = repo[commit_id]
344 if blob.is_binary:
344 if blob.is_binary:
345 return {}
345 return {}
346
346
347 return self._parse_lfs_pointer(blob.data)
347 return self._parse_lfs_pointer(blob.data)
348
348
349 return _is_large_file(repo_id, commit_id)
349 return _is_large_file(repo_id, commit_id)
350
350
351 @reraise_safe_exceptions
351 @reraise_safe_exceptions
352 def is_binary(self, wire, tree_id):
352 def is_binary(self, wire, tree_id):
353 cache_on, context_uid, repo_id = self._cache_on(wire)
353 cache_on, context_uid, repo_id = self._cache_on(wire)
354 region = self._region(wire)
354 region = self._region(wire)
355
355
356 @region.conditional_cache_on_arguments(condition=cache_on)
356 @region.conditional_cache_on_arguments(condition=cache_on)
357 def _is_binary(_repo_id, _tree_id):
357 def _is_binary(_repo_id, _tree_id):
358 repo_init = self._factory.repo_libgit2(wire)
358 repo_init = self._factory.repo_libgit2(wire)
359 with repo_init as repo:
359 with repo_init as repo:
360 blob_obj = repo[tree_id]
360 blob_obj = repo[tree_id]
361 return blob_obj.is_binary
361 return blob_obj.is_binary
362
362
363 return _is_binary(repo_id, tree_id)
363 return _is_binary(repo_id, tree_id)
364
364
365 @reraise_safe_exceptions
365 @reraise_safe_exceptions
366 def md5_hash(self, wire, commit_id, path):
366 def md5_hash(self, wire, commit_id, path):
367 cache_on, context_uid, repo_id = self._cache_on(wire)
367 cache_on, context_uid, repo_id = self._cache_on(wire)
368 region = self._region(wire)
368 region = self._region(wire)
369
369
370 @region.conditional_cache_on_arguments(condition=cache_on)
370 @region.conditional_cache_on_arguments(condition=cache_on)
371 def _md5_hash(_repo_id, _commit_id, _path):
371 def _md5_hash(_repo_id, _commit_id, _path):
372 repo_init = self._factory.repo_libgit2(wire)
372 repo_init = self._factory.repo_libgit2(wire)
373 with repo_init as repo:
373 with repo_init as repo:
374 commit = repo[_commit_id]
374 commit = repo[_commit_id]
375 blob_obj = commit.tree[_path]
375 blob_obj = commit.tree[_path]
376
376
377 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
377 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
378 raise exceptions.LookupException()(
378 raise exceptions.LookupException()(
379 f'Tree for commit_id:{_commit_id} is not a blob: {blob_obj.type_str}')
379 f'Tree for commit_id:{_commit_id} is not a blob: {blob_obj.type_str}')
380
380
381 return ''
381 return ''
382
382
383 return _md5_hash(repo_id, commit_id, path)
383 return _md5_hash(repo_id, commit_id, path)
384
384
385 @reraise_safe_exceptions
385 @reraise_safe_exceptions
386 def in_largefiles_store(self, wire, oid):
386 def in_largefiles_store(self, wire, oid):
387 conf = self._wire_to_config(wire)
387 conf = self._wire_to_config(wire)
388 repo_init = self._factory.repo_libgit2(wire)
388 repo_init = self._factory.repo_libgit2(wire)
389 with repo_init as repo:
389 with repo_init as repo:
390 repo_name = repo.path
390 repo_name = repo.path
391
391
392 store_location = conf.get('vcs_git_lfs_store_location')
392 store_location = conf.get('vcs_git_lfs_store_location')
393 if store_location:
393 if store_location:
394
394
395 store = LFSOidStore(
395 store = LFSOidStore(
396 oid=oid, repo=repo_name, store_location=store_location)
396 oid=oid, repo=repo_name, store_location=store_location)
397 return store.has_oid()
397 return store.has_oid()
398
398
399 return False
399 return False
400
400
401 @reraise_safe_exceptions
401 @reraise_safe_exceptions
402 def store_path(self, wire, oid):
402 def store_path(self, wire, oid):
403 conf = self._wire_to_config(wire)
403 conf = self._wire_to_config(wire)
404 repo_init = self._factory.repo_libgit2(wire)
404 repo_init = self._factory.repo_libgit2(wire)
405 with repo_init as repo:
405 with repo_init as repo:
406 repo_name = repo.path
406 repo_name = repo.path
407
407
408 store_location = conf.get('vcs_git_lfs_store_location')
408 store_location = conf.get('vcs_git_lfs_store_location')
409 if store_location:
409 if store_location:
410 store = LFSOidStore(
410 store = LFSOidStore(
411 oid=oid, repo=repo_name, store_location=store_location)
411 oid=oid, repo=repo_name, store_location=store_location)
412 return store.oid_path
412 return store.oid_path
413 raise ValueError(f'Unable to fetch oid with path {oid}')
413 raise ValueError(f'Unable to fetch oid with path {oid}')
414
414
415 @reraise_safe_exceptions
415 @reraise_safe_exceptions
416 def bulk_request(self, wire, rev, pre_load):
416 def bulk_request(self, wire, rev, pre_load):
417 cache_on, context_uid, repo_id = self._cache_on(wire)
417 cache_on, context_uid, repo_id = self._cache_on(wire)
418 region = self._region(wire)
418 region = self._region(wire)
419
419
420 @region.conditional_cache_on_arguments(condition=cache_on)
420 @region.conditional_cache_on_arguments(condition=cache_on)
421 def _bulk_request(_repo_id, _rev, _pre_load):
421 def _bulk_request(_repo_id, _rev, _pre_load):
422 result = {}
422 result = {}
423 for attr in pre_load:
423 for attr in pre_load:
424 try:
424 try:
425 method = self._bulk_methods[attr]
425 method = self._bulk_methods[attr]
426 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
426 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
427 args = [wire, rev]
427 args = [wire, rev]
428 result[attr] = method(*args)
428 result[attr] = method(*args)
429 except KeyError as e:
429 except KeyError as e:
430 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
430 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
431 return result
431 return result
432
432
433 return _bulk_request(repo_id, rev, sorted(pre_load))
433 return _bulk_request(repo_id, rev, sorted(pre_load))
434
434
435 @reraise_safe_exceptions
435 @reraise_safe_exceptions
436 def bulk_file_request(self, wire, commit_id, path, pre_load):
436 def bulk_file_request(self, wire, commit_id, path, pre_load):
437 cache_on, context_uid, repo_id = self._cache_on(wire)
437 cache_on, context_uid, repo_id = self._cache_on(wire)
438 region = self._region(wire)
438 region = self._region(wire)
439
439
440 @region.conditional_cache_on_arguments(condition=cache_on)
440 @region.conditional_cache_on_arguments(condition=cache_on)
441 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
441 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
442 result = {}
442 result = {}
443 for attr in pre_load:
443 for attr in pre_load:
444 try:
444 try:
445 method = self._bulk_file_methods[attr]
445 method = self._bulk_file_methods[attr]
446 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
446 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
447 result[attr] = method(wire, _commit_id, _path)
447 result[attr] = method(wire, _commit_id, _path)
448 except KeyError as e:
448 except KeyError as e:
449 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
449 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
450 return result
450 return result
451
451
452 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
452 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
453
453
454 def _build_opener(self, url: str):
454 def _build_opener(self, url: str):
455 handlers = []
455 handlers = []
456 url_obj = url_parser(safe_bytes(url))
456 url_obj = url_parser(safe_bytes(url))
457 authinfo = url_obj.authinfo()[1]
457 authinfo = url_obj.authinfo()[1]
458
458
459 if authinfo:
459 if authinfo:
460 # create a password manager
460 # create a password manager
461 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
461 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
462 passmgr.add_password(*authinfo)
462 passmgr.add_password(*authinfo)
463
463
464 handlers.extend((httpbasicauthhandler(passmgr),
464 handlers.extend((httpbasicauthhandler(passmgr),
465 httpdigestauthhandler(passmgr)))
465 httpdigestauthhandler(passmgr)))
466
466
467 return urllib.request.build_opener(*handlers)
467 return urllib.request.build_opener(*handlers)
468
468
469 @reraise_safe_exceptions
469 @reraise_safe_exceptions
470 def check_url(self, url, config):
470 def check_url(self, url, config):
471 url_obj = url_parser(safe_bytes(url))
471 url_obj = url_parser(safe_bytes(url))
472
472
473 test_uri = safe_str(url_obj.authinfo()[0])
473 test_uri = safe_str(url_obj.authinfo()[0])
474 obfuscated_uri = get_obfuscated_url(url_obj)
474 obfuscated_uri = get_obfuscated_url(url_obj)
475
475
476 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
476 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
477
477
478 if not test_uri.endswith('info/refs'):
478 if not test_uri.endswith('info/refs'):
479 test_uri = test_uri.rstrip('/') + '/info/refs'
479 test_uri = test_uri.rstrip('/') + '/info/refs'
480
480
481 o = self._build_opener(test_uri)
481 o = self._build_opener(test_uri)
482 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
482 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
483
483
484 q = {"service": 'git-upload-pack'}
484 q = {"service": 'git-upload-pack'}
485 qs = '?%s' % urllib.parse.urlencode(q)
485 qs = '?%s' % urllib.parse.urlencode(q)
486 cu = f"{test_uri}{qs}"
486 cu = f"{test_uri}{qs}"
487 req = urllib.request.Request(cu, None, {})
487 req = urllib.request.Request(cu, None, {})
488
488
489 try:
489 try:
490 log.debug("Trying to open URL %s", obfuscated_uri)
490 log.debug("Trying to open URL %s", obfuscated_uri)
491 resp = o.open(req)
491 resp = o.open(req)
492 if resp.code != 200:
492 if resp.code != 200:
493 raise exceptions.URLError()('Return Code is not 200')
493 raise exceptions.URLError()('Return Code is not 200')
494 except Exception as e:
494 except Exception as e:
495 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
495 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
496 # means it cannot be cloned
496 # means it cannot be cloned
497 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
497 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
498
498
499 # now detect if it's proper git repo
499 # now detect if it's proper git repo
500 gitdata: bytes = resp.read()
500 gitdata: bytes = resp.read()
501
501
502 if b'service=git-upload-pack' in gitdata:
502 if b'service=git-upload-pack' in gitdata:
503 pass
503 pass
504 elif re.findall(br'[0-9a-fA-F]{40}\s+refs', gitdata):
504 elif re.findall(br'[0-9a-fA-F]{40}\s+refs', gitdata):
505 # old style git can return some other format !
505 # old style git can return some other format !
506 pass
506 pass
507 else:
507 else:
508 e = None
508 e = None
509 raise exceptions.URLError(e)(
509 raise exceptions.URLError(e)(
510 "url [%s] does not look like an hg repo org_exc: %s"
510 "url [%s] does not look like an hg repo org_exc: %s"
511 % (obfuscated_uri, e))
511 % (obfuscated_uri, e))
512
512
513 return True
513 return True
514
514
515 @reraise_safe_exceptions
515 @reraise_safe_exceptions
516 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
516 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
517 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
517 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
518 remote_refs = self.pull(wire, url, apply_refs=False)
518 remote_refs = self.pull(wire, url, apply_refs=False)
519 repo = self._factory.repo(wire)
519 repo = self._factory.repo(wire)
520 if isinstance(valid_refs, list):
520 if isinstance(valid_refs, list):
521 valid_refs = tuple(valid_refs)
521 valid_refs = tuple(valid_refs)
522
522
523 for k in remote_refs:
523 for k in remote_refs:
524 # only parse heads/tags and skip so called deferred tags
524 # only parse heads/tags and skip so called deferred tags
525 if k.startswith(valid_refs) and not k.endswith(deferred):
525 if k.startswith(valid_refs) and not k.endswith(deferred):
526 repo[k] = remote_refs[k]
526 repo[k] = remote_refs[k]
527
527
528 if update_after_clone:
528 if update_after_clone:
529 # we want to checkout HEAD
529 # we want to checkout HEAD
530 repo["HEAD"] = remote_refs["HEAD"]
530 repo["HEAD"] = remote_refs["HEAD"]
531 index.build_index_from_tree(repo.path, repo.index_path(),
531 index.build_index_from_tree(repo.path, repo.index_path(),
532 repo.object_store, repo["HEAD"].tree)
532 repo.object_store, repo["HEAD"].tree)
533
533
534 @reraise_safe_exceptions
534 @reraise_safe_exceptions
535 def branch(self, wire, commit_id):
535 def branch(self, wire, commit_id):
536 cache_on, context_uid, repo_id = self._cache_on(wire)
536 cache_on, context_uid, repo_id = self._cache_on(wire)
537 region = self._region(wire)
537 region = self._region(wire)
538
538
539 @region.conditional_cache_on_arguments(condition=cache_on)
539 @region.conditional_cache_on_arguments(condition=cache_on)
540 def _branch(_context_uid, _repo_id, _commit_id):
540 def _branch(_context_uid, _repo_id, _commit_id):
541 regex = re.compile('^refs/heads')
541 regex = re.compile('^refs/heads')
542
542
543 def filter_with(ref):
543 def filter_with(ref):
544 return regex.match(ref[0]) and ref[1] == _commit_id
544 return regex.match(ref[0]) and ref[1] == _commit_id
545
545
546 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
546 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
547 return [x[0].split('refs/heads/')[-1] for x in branches]
547 return [x[0].split('refs/heads/')[-1] for x in branches]
548
548
549 return _branch(context_uid, repo_id, commit_id)
549 return _branch(context_uid, repo_id, commit_id)
550
550
551 @reraise_safe_exceptions
551 @reraise_safe_exceptions
552 def commit_branches(self, wire, commit_id):
552 def commit_branches(self, wire, commit_id):
553 cache_on, context_uid, repo_id = self._cache_on(wire)
553 cache_on, context_uid, repo_id = self._cache_on(wire)
554 region = self._region(wire)
554 region = self._region(wire)
555
555
556 @region.conditional_cache_on_arguments(condition=cache_on)
556 @region.conditional_cache_on_arguments(condition=cache_on)
557 def _commit_branches(_context_uid, _repo_id, _commit_id):
557 def _commit_branches(_context_uid, _repo_id, _commit_id):
558 repo_init = self._factory.repo_libgit2(wire)
558 repo_init = self._factory.repo_libgit2(wire)
559 with repo_init as repo:
559 with repo_init as repo:
560 branches = [x for x in repo.branches.with_commit(_commit_id)]
560 branches = [x for x in repo.branches.with_commit(_commit_id)]
561 return branches
561 return branches
562
562
563 return _commit_branches(context_uid, repo_id, commit_id)
563 return _commit_branches(context_uid, repo_id, commit_id)
564
564
565 @reraise_safe_exceptions
565 @reraise_safe_exceptions
566 def add_object(self, wire, content):
566 def add_object(self, wire, content):
567 repo_init = self._factory.repo_libgit2(wire)
567 repo_init = self._factory.repo_libgit2(wire)
568 with repo_init as repo:
568 with repo_init as repo:
569 blob = objects.Blob()
569 blob = objects.Blob()
570 blob.set_raw_string(content)
570 blob.set_raw_string(content)
571 repo.object_store.add_object(blob)
571 repo.object_store.add_object(blob)
572 return blob.id
572 return blob.id
573
573
574 @reraise_safe_exceptions
574 @reraise_safe_exceptions
575 def create_commit(self, wire, author, committer, message, branch, new_tree_id, date_args: list[int, int] = None):
575 def create_commit(self, wire, author, committer, message, branch, new_tree_id, date_args: list[int, int] = None):
576 repo_init = self._factory.repo_libgit2(wire)
576 repo_init = self._factory.repo_libgit2(wire)
577 with repo_init as repo:
577 with repo_init as repo:
578
578
579 if date_args:
579 if date_args:
580 current_time, offset = date_args
580 current_time, offset = date_args
581
581
582 kw = {
582 kw = {
583 'time': current_time,
583 'time': current_time,
584 'offset': offset
584 'offset': offset
585 }
585 }
586 author = create_signature_from_string(author, **kw)
586 author = create_signature_from_string(author, **kw)
587 committer = create_signature_from_string(committer, **kw)
587 committer = create_signature_from_string(committer, **kw)
588
588
589 tree = new_tree_id
589 tree = new_tree_id
590 if isinstance(tree, (bytes, str)):
590 if isinstance(tree, (bytes, str)):
591 # validate this tree is in the repo...
591 # validate this tree is in the repo...
592 tree = repo[safe_str(tree)].id
592 tree = repo[safe_str(tree)].id
593
593
594 parents = []
594 parents = []
595 # ensure we COMMIT on top of given branch head
595 # ensure we COMMIT on top of given branch head
596 # check if this repo has ANY branches, otherwise it's a new branch case we need to make
596 # check if this repo has ANY branches, otherwise it's a new branch case we need to make
597 if branch in repo.branches.local:
597 if branch in repo.branches.local:
598 parents += [repo.branches[branch].target]
598 parents += [repo.branches[branch].target]
599 elif [x for x in repo.branches.local]:
599 elif [x for x in repo.branches.local]:
600 parents += [repo.head.target]
600 parents += [repo.head.target]
601 #else:
601 #else:
602 # in case we want to commit on new branch we create it on top of HEAD
602 # in case we want to commit on new branch we create it on top of HEAD
603 #repo.branches.local.create(branch, repo.revparse_single('HEAD'))
603 #repo.branches.local.create(branch, repo.revparse_single('HEAD'))
604
604
605 # # Create a new commit
605 # # Create a new commit
606 commit_oid = repo.create_commit(
606 commit_oid = repo.create_commit(
607 f'refs/heads/{branch}', # the name of the reference to update
607 f'refs/heads/{branch}', # the name of the reference to update
608 author, # the author of the commit
608 author, # the author of the commit
609 committer, # the committer of the commit
609 committer, # the committer of the commit
610 message, # the commit message
610 message, # the commit message
611 tree, # the tree produced by the index
611 tree, # the tree produced by the index
612 parents # list of parents for the new commit, usually just one,
612 parents # list of parents for the new commit, usually just one,
613 )
613 )
614
614
615 new_commit_id = safe_str(commit_oid)
615 new_commit_id = safe_str(commit_oid)
616
616
617 return new_commit_id
617 return new_commit_id
618
618
619 @reraise_safe_exceptions
619 @reraise_safe_exceptions
620 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
620 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
621
621
622 def mode2pygit(mode):
622 def mode2pygit(mode):
623 """
623 """
624 git only supports two filemode 644 and 755
624 git only supports two filemode 644 and 755
625
625
626 0o100755 -> 33261
626 0o100755 -> 33261
627 0o100644 -> 33188
627 0o100644 -> 33188
628 """
628 """
629 return {
629 return {
630 0o100644: pygit2.GIT_FILEMODE_BLOB,
630 0o100644: pygit2.GIT_FILEMODE_BLOB,
631 0o100755: pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
631 0o100755: pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
632 0o120000: pygit2.GIT_FILEMODE_LINK
632 0o120000: pygit2.GIT_FILEMODE_LINK
633 }.get(mode) or pygit2.GIT_FILEMODE_BLOB
633 }.get(mode) or pygit2.GIT_FILEMODE_BLOB
634
634
635 repo_init = self._factory.repo_libgit2(wire)
635 repo_init = self._factory.repo_libgit2(wire)
636 with repo_init as repo:
636 with repo_init as repo:
637 repo_index = repo.index
637 repo_index = repo.index
638
638
639 for pathspec in updated:
639 for pathspec in updated:
640 blob_id = repo.create_blob(pathspec['content'])
640 blob_id = repo.create_blob(pathspec['content'])
641 ie = pygit2.IndexEntry(pathspec['path'], blob_id, mode2pygit(pathspec['mode']))
641 ie = pygit2.IndexEntry(pathspec['path'], blob_id, mode2pygit(pathspec['mode']))
642 repo_index.add(ie)
642 repo_index.add(ie)
643
643
644 for pathspec in removed:
644 for pathspec in removed:
645 repo_index.remove(pathspec)
645 repo_index.remove(pathspec)
646
646
647 # Write changes to the index
647 # Write changes to the index
648 repo_index.write()
648 repo_index.write()
649
649
650 # Create a tree from the updated index
650 # Create a tree from the updated index
651 commit_tree = repo_index.write_tree()
651 commit_tree = repo_index.write_tree()
652
652
653 new_tree_id = commit_tree
653 new_tree_id = commit_tree
654
654
655 author = commit_data['author']
655 author = commit_data['author']
656 committer = commit_data['committer']
656 committer = commit_data['committer']
657 message = commit_data['message']
657 message = commit_data['message']
658
658
659 date_args = [int(commit_data['commit_time']), int(commit_data['commit_timezone'])]
659 date_args = [int(commit_data['commit_time']), int(commit_data['commit_timezone'])]
660
660
661 new_commit_id = self.create_commit(wire, author, committer, message, branch,
661 new_commit_id = self.create_commit(wire, author, committer, message, branch,
662 new_tree_id, date_args=date_args)
662 new_tree_id, date_args=date_args)
663
663
664 # libgit2, ensure the branch is there and exists
664 # libgit2, ensure the branch is there and exists
665 self.create_branch(wire, branch, new_commit_id)
665 self.create_branch(wire, branch, new_commit_id)
666
666
667 # libgit2, set new ref to this created commit
667 # libgit2, set new ref to this created commit
668 self.set_refs(wire, f'refs/heads/{branch}', new_commit_id)
668 self.set_refs(wire, f'refs/heads/{branch}', new_commit_id)
669
669
670 return new_commit_id
670 return new_commit_id
671
671
672 @reraise_safe_exceptions
672 @reraise_safe_exceptions
673 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
673 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
674 if url != 'default' and '://' not in url:
674 if url != 'default' and '://' not in url:
675 client = LocalGitClient(url)
675 client = LocalGitClient(url)
676 else:
676 else:
677 url_obj = url_parser(safe_bytes(url))
677 url_obj = url_parser(safe_bytes(url))
678 o = self._build_opener(url)
678 o = self._build_opener(url)
679 url = url_obj.authinfo()[0]
679 url = url_obj.authinfo()[0]
680 client = HttpGitClient(base_url=url, opener=o)
680 client = HttpGitClient(base_url=url, opener=o)
681 repo = self._factory.repo(wire)
681 repo = self._factory.repo(wire)
682
682
683 determine_wants = repo.object_store.determine_wants_all
683 determine_wants = repo.object_store.determine_wants_all
684 if refs:
684 if refs:
685 refs = [ascii_bytes(x) for x in refs]
685 refs = [ascii_bytes(x) for x in refs]
686
686
687 def determine_wants_requested(remote_refs):
687 def determine_wants_requested(remote_refs):
688 determined = []
688 determined = []
689 for ref_name, ref_hash in remote_refs.items():
689 for ref_name, ref_hash in remote_refs.items():
690 bytes_ref_name = safe_bytes(ref_name)
690 bytes_ref_name = safe_bytes(ref_name)
691
691
692 if bytes_ref_name in refs:
692 if bytes_ref_name in refs:
693 bytes_ref_hash = safe_bytes(ref_hash)
693 bytes_ref_hash = safe_bytes(ref_hash)
694 determined.append(bytes_ref_hash)
694 determined.append(bytes_ref_hash)
695 return determined
695 return determined
696
696
697 # swap with our custom requested wants
697 # swap with our custom requested wants
698 determine_wants = determine_wants_requested
698 determine_wants = determine_wants_requested
699
699
700 try:
700 try:
701 remote_refs = client.fetch(
701 remote_refs = client.fetch(
702 path=url, target=repo, determine_wants=determine_wants)
702 path=url, target=repo, determine_wants=determine_wants)
703
703
704 except NotGitRepository as e:
704 except NotGitRepository as e:
705 log.warning(
705 log.warning(
706 'Trying to fetch from "%s" failed, not a Git repository.', url)
706 'Trying to fetch from "%s" failed, not a Git repository.', url)
707 # Exception can contain unicode which we convert
707 # Exception can contain unicode which we convert
708 raise exceptions.AbortException(e)(repr(e))
708 raise exceptions.AbortException(e)(repr(e))
709
709
710 # mikhail: client.fetch() returns all the remote refs, but fetches only
710 # mikhail: client.fetch() returns all the remote refs, but fetches only
711 # refs filtered by `determine_wants` function. We need to filter result
711 # refs filtered by `determine_wants` function. We need to filter result
712 # as well
712 # as well
713 if refs:
713 if refs:
714 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
714 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
715
715
716 if apply_refs:
716 if apply_refs:
717 # TODO: johbo: Needs proper test coverage with a git repository
717 # TODO: johbo: Needs proper test coverage with a git repository
718 # that contains a tag object, so that we would end up with
718 # that contains a tag object, so that we would end up with
719 # a peeled ref at this point.
719 # a peeled ref at this point.
720 for k in remote_refs:
720 for k in remote_refs:
721 if k.endswith(PEELED_REF_MARKER):
721 if k.endswith(PEELED_REF_MARKER):
722 log.debug("Skipping peeled reference %s", k)
722 log.debug("Skipping peeled reference %s", k)
723 continue
723 continue
724 repo[k] = remote_refs[k]
724 repo[k] = remote_refs[k]
725
725
726 if refs and not update_after:
726 if refs and not update_after:
727 # mikhail: explicitly set the head to the last ref.
727 # mikhail: explicitly set the head to the last ref.
728 repo[HEAD_MARKER] = remote_refs[refs[-1]]
728 repo[HEAD_MARKER] = remote_refs[refs[-1]]
729
729
730 if update_after:
730 if update_after:
731 # we want to check out HEAD
731 # we want to check out HEAD
732 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
732 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
733 index.build_index_from_tree(repo.path, repo.index_path(),
733 index.build_index_from_tree(repo.path, repo.index_path(),
734 repo.object_store, repo[HEAD_MARKER].tree)
734 repo.object_store, repo[HEAD_MARKER].tree)
735
735
736 if isinstance(remote_refs, FetchPackResult):
736 if isinstance(remote_refs, FetchPackResult):
737 return remote_refs.refs
737 return remote_refs.refs
738 return remote_refs
738 return remote_refs
739
739
740 @reraise_safe_exceptions
740 @reraise_safe_exceptions
741 def sync_fetch(self, wire, url, refs=None, all_refs=False):
741 def sync_fetch(self, wire, url, refs=None, all_refs=False):
742 self._factory.repo(wire)
742 self._factory.repo(wire)
743 if refs and not isinstance(refs, (list, tuple)):
743 if refs and not isinstance(refs, (list, tuple)):
744 refs = [refs]
744 refs = [refs]
745
745
746 config = self._wire_to_config(wire)
746 config = self._wire_to_config(wire)
747 # get all remote refs we'll use to fetch later
747 # get all remote refs we'll use to fetch later
748 cmd = ['ls-remote']
748 cmd = ['ls-remote']
749 if not all_refs:
749 if not all_refs:
750 cmd += ['--heads', '--tags']
750 cmd += ['--heads', '--tags']
751 cmd += [url]
751 cmd += [url]
752 output, __ = self.run_git_command(
752 output, __ = self.run_git_command(
753 wire, cmd, fail_on_stderr=False,
753 wire, cmd, fail_on_stderr=False,
754 _copts=self._remote_conf(config),
754 _copts=self._remote_conf(config),
755 extra_env={'GIT_TERMINAL_PROMPT': '0'})
755 extra_env={'GIT_TERMINAL_PROMPT': '0'})
756
756
757 remote_refs = collections.OrderedDict()
757 remote_refs = collections.OrderedDict()
758 fetch_refs = []
758 fetch_refs = []
759
759
760 for ref_line in output.splitlines():
760 for ref_line in output.splitlines():
761 sha, ref = ref_line.split(b'\t')
761 sha, ref = ref_line.split(b'\t')
762 sha = sha.strip()
762 sha = sha.strip()
763 if ref in remote_refs:
763 if ref in remote_refs:
764 # duplicate, skip
764 # duplicate, skip
765 continue
765 continue
766 if ref.endswith(PEELED_REF_MARKER):
766 if ref.endswith(PEELED_REF_MARKER):
767 log.debug("Skipping peeled reference %s", ref)
767 log.debug("Skipping peeled reference %s", ref)
768 continue
768 continue
769 # don't sync HEAD
769 # don't sync HEAD
770 if ref in [HEAD_MARKER]:
770 if ref in [HEAD_MARKER]:
771 continue
771 continue
772
772
773 remote_refs[ref] = sha
773 remote_refs[ref] = sha
774
774
775 if refs and sha in refs:
775 if refs and sha in refs:
776 # we filter fetch using our specified refs
776 # we filter fetch using our specified refs
777 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
777 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
778 elif not refs:
778 elif not refs:
779 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
779 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
780 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
780 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
781
781
782 if fetch_refs:
782 if fetch_refs:
783 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
783 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
784 fetch_refs_chunks = list(chunk)
784 fetch_refs_chunks = list(chunk)
785 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
785 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
786 self.run_git_command(
786 self.run_git_command(
787 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
787 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
788 fail_on_stderr=False,
788 fail_on_stderr=False,
789 _copts=self._remote_conf(config),
789 _copts=self._remote_conf(config),
790 extra_env={'GIT_TERMINAL_PROMPT': '0'})
790 extra_env={'GIT_TERMINAL_PROMPT': '0'})
791
791
792 return remote_refs
792 return remote_refs
793
793
794 @reraise_safe_exceptions
794 @reraise_safe_exceptions
795 def sync_push(self, wire, url, refs=None):
795 def sync_push(self, wire, url, refs=None):
796 if not self.check_url(url, wire):
796 if not self.check_url(url, wire):
797 return
797 return
798 config = self._wire_to_config(wire)
798 config = self._wire_to_config(wire)
799 self._factory.repo(wire)
799 self._factory.repo(wire)
800 self.run_git_command(
800 self.run_git_command(
801 wire, ['push', url, '--mirror'], fail_on_stderr=False,
801 wire, ['push', url, '--mirror'], fail_on_stderr=False,
802 _copts=self._remote_conf(config),
802 _copts=self._remote_conf(config),
803 extra_env={'GIT_TERMINAL_PROMPT': '0'})
803 extra_env={'GIT_TERMINAL_PROMPT': '0'})
804
804
805 @reraise_safe_exceptions
805 @reraise_safe_exceptions
806 def get_remote_refs(self, wire, url):
806 def get_remote_refs(self, wire, url):
807 repo = Repo(url)
807 repo = Repo(url)
808 return repo.get_refs()
808 return repo.get_refs()
809
809
810 @reraise_safe_exceptions
810 @reraise_safe_exceptions
811 def get_description(self, wire):
811 def get_description(self, wire):
812 repo = self._factory.repo(wire)
812 repo = self._factory.repo(wire)
813 return repo.get_description()
813 return repo.get_description()
814
814
815 @reraise_safe_exceptions
815 @reraise_safe_exceptions
816 def get_missing_revs(self, wire, rev1, rev2, path2):
816 def get_missing_revs(self, wire, rev1, rev2, path2):
817 repo = self._factory.repo(wire)
817 repo = self._factory.repo(wire)
818 LocalGitClient(thin_packs=False).fetch(path2, repo)
818 LocalGitClient(thin_packs=False).fetch(path2, repo)
819
819
820 wire_remote = wire.copy()
820 wire_remote = wire.copy()
821 wire_remote['path'] = path2
821 wire_remote['path'] = path2
822 repo_remote = self._factory.repo(wire_remote)
822 repo_remote = self._factory.repo(wire_remote)
823 LocalGitClient(thin_packs=False).fetch(path2, repo_remote)
823 LocalGitClient(thin_packs=False).fetch(path2, repo_remote)
824
824
825 revs = [
825 revs = [
826 x.commit.id
826 x.commit.id
827 for x in repo_remote.get_walker(include=[safe_bytes(rev2)], exclude=[safe_bytes(rev1)])]
827 for x in repo_remote.get_walker(include=[safe_bytes(rev2)], exclude=[safe_bytes(rev1)])]
828 return revs
828 return revs
829
829
830 @reraise_safe_exceptions
830 @reraise_safe_exceptions
831 def get_object(self, wire, sha, maybe_unreachable=False):
831 def get_object(self, wire, sha, maybe_unreachable=False):
832 cache_on, context_uid, repo_id = self._cache_on(wire)
832 cache_on, context_uid, repo_id = self._cache_on(wire)
833 region = self._region(wire)
833 region = self._region(wire)
834
834
835 @region.conditional_cache_on_arguments(condition=cache_on)
835 @region.conditional_cache_on_arguments(condition=cache_on)
836 def _get_object(_context_uid, _repo_id, _sha):
836 def _get_object(_context_uid, _repo_id, _sha):
837 repo_init = self._factory.repo_libgit2(wire)
837 repo_init = self._factory.repo_libgit2(wire)
838 with repo_init as repo:
838 with repo_init as repo:
839
839
840 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
840 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
841 try:
841 try:
842 commit = repo.revparse_single(sha)
842 commit = repo.revparse_single(sha)
843 except KeyError:
843 except KeyError:
844 # NOTE(marcink): KeyError doesn't give us any meaningful information
844 # NOTE(marcink): KeyError doesn't give us any meaningful information
845 # here, we instead give something more explicit
845 # here, we instead give something more explicit
846 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
846 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
847 raise exceptions.LookupException(e)(missing_commit_err)
847 raise exceptions.LookupException(e)(missing_commit_err)
848 except ValueError as e:
848 except ValueError as e:
849 raise exceptions.LookupException(e)(missing_commit_err)
849 raise exceptions.LookupException(e)(missing_commit_err)
850
850
851 is_tag = False
851 is_tag = False
852 if isinstance(commit, pygit2.Tag):
852 if isinstance(commit, pygit2.Tag):
853 commit = repo.get(commit.target)
853 commit = repo.get(commit.target)
854 is_tag = True
854 is_tag = True
855
855
856 check_dangling = True
856 check_dangling = True
857 if is_tag:
857 if is_tag:
858 check_dangling = False
858 check_dangling = False
859
859
860 if check_dangling and maybe_unreachable:
860 if check_dangling and maybe_unreachable:
861 check_dangling = False
861 check_dangling = False
862
862
863 # we used a reference and it parsed means we're not having a dangling commit
863 # we used a reference and it parsed means we're not having a dangling commit
864 if sha != commit.hex:
864 if sha != commit.hex:
865 check_dangling = False
865 check_dangling = False
866
866
867 if check_dangling:
867 if check_dangling:
868 # check for dangling commit
868 # check for dangling commit
869 for branch in repo.branches.with_commit(commit.hex):
869 for branch in repo.branches.with_commit(commit.hex):
870 if branch:
870 if branch:
871 break
871 break
872 else:
872 else:
873 # NOTE(marcink): Empty error doesn't give us any meaningful information
873 # NOTE(marcink): Empty error doesn't give us any meaningful information
874 # here, we instead give something more explicit
874 # here, we instead give something more explicit
875 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
875 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
876 raise exceptions.LookupException(e)(missing_commit_err)
876 raise exceptions.LookupException(e)(missing_commit_err)
877
877
878 commit_id = commit.hex
878 commit_id = commit.hex
879 type_str = commit.type_str
879 type_str = commit.type_str
880
880
881 return {
881 return {
882 'id': commit_id,
882 'id': commit_id,
883 'type': type_str,
883 'type': type_str,
884 'commit_id': commit_id,
884 'commit_id': commit_id,
885 'idx': 0
885 'idx': 0
886 }
886 }
887
887
888 return _get_object(context_uid, repo_id, sha)
888 return _get_object(context_uid, repo_id, sha)
889
889
890 @reraise_safe_exceptions
890 @reraise_safe_exceptions
891 def get_refs(self, wire):
891 def get_refs(self, wire):
892 cache_on, context_uid, repo_id = self._cache_on(wire)
892 cache_on, context_uid, repo_id = self._cache_on(wire)
893 region = self._region(wire)
893 region = self._region(wire)
894
894
895 @region.conditional_cache_on_arguments(condition=cache_on)
895 @region.conditional_cache_on_arguments(condition=cache_on)
896 def _get_refs(_context_uid, _repo_id):
896 def _get_refs(_context_uid, _repo_id):
897
897
898 repo_init = self._factory.repo_libgit2(wire)
898 repo_init = self._factory.repo_libgit2(wire)
899 with repo_init as repo:
899 with repo_init as repo:
900 regex = re.compile('^refs/(heads|tags)/')
900 regex = re.compile('^refs/(heads|tags)/')
901 return {x.name: x.target.hex for x in
901 return {x.name: x.target.hex for x in
902 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
902 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
903
903
904 return _get_refs(context_uid, repo_id)
904 return _get_refs(context_uid, repo_id)
905
905
906 @reraise_safe_exceptions
906 @reraise_safe_exceptions
907 def get_branch_pointers(self, wire):
907 def get_branch_pointers(self, wire):
908 cache_on, context_uid, repo_id = self._cache_on(wire)
908 cache_on, context_uid, repo_id = self._cache_on(wire)
909 region = self._region(wire)
909 region = self._region(wire)
910
910
911 @region.conditional_cache_on_arguments(condition=cache_on)
911 @region.conditional_cache_on_arguments(condition=cache_on)
912 def _get_branch_pointers(_context_uid, _repo_id):
912 def _get_branch_pointers(_context_uid, _repo_id):
913
913
914 repo_init = self._factory.repo_libgit2(wire)
914 repo_init = self._factory.repo_libgit2(wire)
915 regex = re.compile('^refs/heads')
915 regex = re.compile('^refs/heads')
916 with repo_init as repo:
916 with repo_init as repo:
917 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
917 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
918 return {x.target.hex: x.shorthand for x in branches}
918 return {x.target.hex: x.shorthand for x in branches}
919
919
920 return _get_branch_pointers(context_uid, repo_id)
920 return _get_branch_pointers(context_uid, repo_id)
921
921
922 @reraise_safe_exceptions
922 @reraise_safe_exceptions
923 def head(self, wire, show_exc=True):
923 def head(self, wire, show_exc=True):
924 cache_on, context_uid, repo_id = self._cache_on(wire)
924 cache_on, context_uid, repo_id = self._cache_on(wire)
925 region = self._region(wire)
925 region = self._region(wire)
926
926
927 @region.conditional_cache_on_arguments(condition=cache_on)
927 @region.conditional_cache_on_arguments(condition=cache_on)
928 def _head(_context_uid, _repo_id, _show_exc):
928 def _head(_context_uid, _repo_id, _show_exc):
929 repo_init = self._factory.repo_libgit2(wire)
929 repo_init = self._factory.repo_libgit2(wire)
930 with repo_init as repo:
930 with repo_init as repo:
931 try:
931 try:
932 return repo.head.peel().hex
932 return repo.head.peel().hex
933 except Exception:
933 except Exception:
934 if show_exc:
934 if show_exc:
935 raise
935 raise
936 return _head(context_uid, repo_id, show_exc)
936 return _head(context_uid, repo_id, show_exc)
937
937
938 @reraise_safe_exceptions
938 @reraise_safe_exceptions
939 def init(self, wire):
939 def init(self, wire):
940 repo_path = safe_str(wire['path'])
940 repo_path = safe_str(wire['path'])
941 self.repo = Repo.init(repo_path)
941 self.repo = Repo.init(repo_path)
942
942
943 @reraise_safe_exceptions
943 @reraise_safe_exceptions
944 def init_bare(self, wire):
944 def init_bare(self, wire):
945 repo_path = safe_str(wire['path'])
945 repo_path = safe_str(wire['path'])
946 self.repo = Repo.init_bare(repo_path)
946 self.repo = Repo.init_bare(repo_path)
947
947
948 @reraise_safe_exceptions
948 @reraise_safe_exceptions
949 def revision(self, wire, rev):
949 def revision(self, wire, rev):
950
950
951 cache_on, context_uid, repo_id = self._cache_on(wire)
951 cache_on, context_uid, repo_id = self._cache_on(wire)
952 region = self._region(wire)
952 region = self._region(wire)
953
953
954 @region.conditional_cache_on_arguments(condition=cache_on)
954 @region.conditional_cache_on_arguments(condition=cache_on)
955 def _revision(_context_uid, _repo_id, _rev):
955 def _revision(_context_uid, _repo_id, _rev):
956 repo_init = self._factory.repo_libgit2(wire)
956 repo_init = self._factory.repo_libgit2(wire)
957 with repo_init as repo:
957 with repo_init as repo:
958 commit = repo[rev]
958 commit = repo[rev]
959 obj_data = {
959 obj_data = {
960 'id': commit.id.hex,
960 'id': commit.id.hex,
961 }
961 }
962 # tree objects itself don't have tree_id attribute
962 # tree objects itself don't have tree_id attribute
963 if hasattr(commit, 'tree_id'):
963 if hasattr(commit, 'tree_id'):
964 obj_data['tree'] = commit.tree_id.hex
964 obj_data['tree'] = commit.tree_id.hex
965
965
966 return obj_data
966 return obj_data
967 return _revision(context_uid, repo_id, rev)
967 return _revision(context_uid, repo_id, rev)
968
968
969 @reraise_safe_exceptions
969 @reraise_safe_exceptions
970 def date(self, wire, commit_id):
970 def date(self, wire, commit_id):
971 cache_on, context_uid, repo_id = self._cache_on(wire)
971 cache_on, context_uid, repo_id = self._cache_on(wire)
972 region = self._region(wire)
972 region = self._region(wire)
973
973
974 @region.conditional_cache_on_arguments(condition=cache_on)
974 @region.conditional_cache_on_arguments(condition=cache_on)
975 def _date(_repo_id, _commit_id):
975 def _date(_repo_id, _commit_id):
976 repo_init = self._factory.repo_libgit2(wire)
976 repo_init = self._factory.repo_libgit2(wire)
977 with repo_init as repo:
977 with repo_init as repo:
978 commit = repo[commit_id]
978 commit = repo[commit_id]
979
979
980 if hasattr(commit, 'commit_time'):
980 if hasattr(commit, 'commit_time'):
981 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
981 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
982 else:
982 else:
983 commit = commit.get_object()
983 commit = commit.get_object()
984 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
984 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
985
985
986 # TODO(marcink): check dulwich difference of offset vs timezone
986 # TODO(marcink): check dulwich difference of offset vs timezone
987 return [commit_time, commit_time_offset]
987 return [commit_time, commit_time_offset]
988 return _date(repo_id, commit_id)
988 return _date(repo_id, commit_id)
989
989
990 @reraise_safe_exceptions
990 @reraise_safe_exceptions
991 def author(self, wire, commit_id):
991 def author(self, wire, commit_id):
992 cache_on, context_uid, repo_id = self._cache_on(wire)
992 cache_on, context_uid, repo_id = self._cache_on(wire)
993 region = self._region(wire)
993 region = self._region(wire)
994
994
995 @region.conditional_cache_on_arguments(condition=cache_on)
995 @region.conditional_cache_on_arguments(condition=cache_on)
996 def _author(_repo_id, _commit_id):
996 def _author(_repo_id, _commit_id):
997 repo_init = self._factory.repo_libgit2(wire)
997 repo_init = self._factory.repo_libgit2(wire)
998 with repo_init as repo:
998 with repo_init as repo:
999 commit = repo[commit_id]
999 commit = repo[commit_id]
1000
1000
1001 if hasattr(commit, 'author'):
1001 if hasattr(commit, 'author'):
1002 author = commit.author
1002 author = commit.author
1003 else:
1003 else:
1004 author = commit.get_object().author
1004 author = commit.get_object().author
1005
1005
1006 if author.email:
1006 if author.email:
1007 return f"{author.name} <{author.email}>"
1007 return f"{author.name} <{author.email}>"
1008
1008
1009 try:
1009 try:
1010 return f"{author.name}"
1010 return f"{author.name}"
1011 except Exception:
1011 except Exception:
1012 return f"{safe_str(author.raw_name)}"
1012 return f"{safe_str(author.raw_name)}"
1013
1013
1014 return _author(repo_id, commit_id)
1014 return _author(repo_id, commit_id)
1015
1015
1016 @reraise_safe_exceptions
1016 @reraise_safe_exceptions
1017 def message(self, wire, commit_id):
1017 def message(self, wire, commit_id):
1018 cache_on, context_uid, repo_id = self._cache_on(wire)
1018 cache_on, context_uid, repo_id = self._cache_on(wire)
1019 region = self._region(wire)
1019 region = self._region(wire)
1020
1020
1021 @region.conditional_cache_on_arguments(condition=cache_on)
1021 @region.conditional_cache_on_arguments(condition=cache_on)
1022 def _message(_repo_id, _commit_id):
1022 def _message(_repo_id, _commit_id):
1023 repo_init = self._factory.repo_libgit2(wire)
1023 repo_init = self._factory.repo_libgit2(wire)
1024 with repo_init as repo:
1024 with repo_init as repo:
1025 commit = repo[commit_id]
1025 commit = repo[commit_id]
1026 return commit.message
1026 return commit.message
1027 return _message(repo_id, commit_id)
1027 return _message(repo_id, commit_id)
1028
1028
1029 @reraise_safe_exceptions
1029 @reraise_safe_exceptions
1030 def parents(self, wire, commit_id):
1030 def parents(self, wire, commit_id):
1031 cache_on, context_uid, repo_id = self._cache_on(wire)
1031 cache_on, context_uid, repo_id = self._cache_on(wire)
1032 region = self._region(wire)
1032 region = self._region(wire)
1033
1033
1034 @region.conditional_cache_on_arguments(condition=cache_on)
1034 @region.conditional_cache_on_arguments(condition=cache_on)
1035 def _parents(_repo_id, _commit_id):
1035 def _parents(_repo_id, _commit_id):
1036 repo_init = self._factory.repo_libgit2(wire)
1036 repo_init = self._factory.repo_libgit2(wire)
1037 with repo_init as repo:
1037 with repo_init as repo:
1038 commit = repo[commit_id]
1038 commit = repo[commit_id]
1039 if hasattr(commit, 'parent_ids'):
1039 if hasattr(commit, 'parent_ids'):
1040 parent_ids = commit.parent_ids
1040 parent_ids = commit.parent_ids
1041 else:
1041 else:
1042 parent_ids = commit.get_object().parent_ids
1042 parent_ids = commit.get_object().parent_ids
1043
1043
1044 return [x.hex for x in parent_ids]
1044 return [x.hex for x in parent_ids]
1045 return _parents(repo_id, commit_id)
1045 return _parents(repo_id, commit_id)
1046
1046
1047 @reraise_safe_exceptions
1047 @reraise_safe_exceptions
1048 def children(self, wire, commit_id):
1048 def children(self, wire, commit_id):
1049 cache_on, context_uid, repo_id = self._cache_on(wire)
1049 cache_on, context_uid, repo_id = self._cache_on(wire)
1050 region = self._region(wire)
1050 region = self._region(wire)
1051
1051
1052 head = self.head(wire)
1052 head = self.head(wire)
1053
1053
1054 @region.conditional_cache_on_arguments(condition=cache_on)
1054 @region.conditional_cache_on_arguments(condition=cache_on)
1055 def _children(_repo_id, _commit_id):
1055 def _children(_repo_id, _commit_id):
1056
1056
1057 output, __ = self.run_git_command(
1057 output, __ = self.run_git_command(
1058 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
1058 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
1059
1059
1060 child_ids = []
1060 child_ids = []
1061 pat = re.compile(fr'^{commit_id}')
1061 pat = re.compile(fr'^{commit_id}')
1062 for line in output.splitlines():
1062 for line in output.splitlines():
1063 line = safe_str(line)
1063 line = safe_str(line)
1064 if pat.match(line):
1064 if pat.match(line):
1065 found_ids = line.split(' ')[1:]
1065 found_ids = line.split(' ')[1:]
1066 child_ids.extend(found_ids)
1066 child_ids.extend(found_ids)
1067 break
1067 break
1068
1068
1069 return child_ids
1069 return child_ids
1070 return _children(repo_id, commit_id)
1070 return _children(repo_id, commit_id)
1071
1071
1072 @reraise_safe_exceptions
1072 @reraise_safe_exceptions
1073 def set_refs(self, wire, key, value):
1073 def set_refs(self, wire, key, value):
1074 repo_init = self._factory.repo_libgit2(wire)
1074 repo_init = self._factory.repo_libgit2(wire)
1075 with repo_init as repo:
1075 with repo_init as repo:
1076 repo.references.create(key, value, force=True)
1076 repo.references.create(key, value, force=True)
1077
1077
1078 @reraise_safe_exceptions
1078 @reraise_safe_exceptions
1079 def create_branch(self, wire, branch_name, commit_id, force=False):
1079 def create_branch(self, wire, branch_name, commit_id, force=False):
1080 repo_init = self._factory.repo_libgit2(wire)
1080 repo_init = self._factory.repo_libgit2(wire)
1081 with repo_init as repo:
1081 with repo_init as repo:
1082 if commit_id:
1082 if commit_id:
1083 commit = repo[commit_id]
1083 commit = repo[commit_id]
1084 else:
1084 else:
1085 # if commit is not given just use the HEAD
1085 # if commit is not given just use the HEAD
1086 commit = repo.head()
1086 commit = repo.head()
1087
1087
1088 if force:
1088 if force:
1089 repo.branches.local.create(branch_name, commit, force=force)
1089 repo.branches.local.create(branch_name, commit, force=force)
1090 elif not repo.branches.get(branch_name):
1090 elif not repo.branches.get(branch_name):
1091 # create only if that branch isn't existing
1091 # create only if that branch isn't existing
1092 repo.branches.local.create(branch_name, commit, force=force)
1092 repo.branches.local.create(branch_name, commit, force=force)
1093
1093
1094 @reraise_safe_exceptions
1094 @reraise_safe_exceptions
1095 def remove_ref(self, wire, key):
1095 def remove_ref(self, wire, key):
1096 repo_init = self._factory.repo_libgit2(wire)
1096 repo_init = self._factory.repo_libgit2(wire)
1097 with repo_init as repo:
1097 with repo_init as repo:
1098 repo.references.delete(key)
1098 repo.references.delete(key)
1099
1099
1100 @reraise_safe_exceptions
1100 @reraise_safe_exceptions
1101 def tag_remove(self, wire, tag_name):
1101 def tag_remove(self, wire, tag_name):
1102 repo_init = self._factory.repo_libgit2(wire)
1102 repo_init = self._factory.repo_libgit2(wire)
1103 with repo_init as repo:
1103 with repo_init as repo:
1104 key = f'refs/tags/{tag_name}'
1104 key = f'refs/tags/{tag_name}'
1105 repo.references.delete(key)
1105 repo.references.delete(key)
1106
1106
1107 @reraise_safe_exceptions
1107 @reraise_safe_exceptions
1108 def tree_changes(self, wire, source_id, target_id):
1108 def tree_changes(self, wire, source_id, target_id):
1109 repo = self._factory.repo(wire)
1109 repo = self._factory.repo(wire)
1110 # source can be empty
1110 # source can be empty
1111 source_id = safe_bytes(source_id if source_id else b'')
1111 source_id = safe_bytes(source_id if source_id else b'')
1112 target_id = safe_bytes(target_id)
1112 target_id = safe_bytes(target_id)
1113
1113
1114 source = repo[source_id].tree if source_id else None
1114 source = repo[source_id].tree if source_id else None
1115 target = repo[target_id].tree
1115 target = repo[target_id].tree
1116 result = repo.object_store.tree_changes(source, target)
1116 result = repo.object_store.tree_changes(source, target)
1117
1117
1118 added = set()
1118 added = set()
1119 modified = set()
1119 modified = set()
1120 deleted = set()
1120 deleted = set()
1121 for (old_path, new_path), (_, _), (_, _) in list(result):
1121 for (old_path, new_path), (_, _), (_, _) in list(result):
1122 if new_path and old_path:
1122 if new_path and old_path:
1123 modified.add(new_path)
1123 modified.add(new_path)
1124 elif new_path and not old_path:
1124 elif new_path and not old_path:
1125 added.add(new_path)
1125 added.add(new_path)
1126 elif not new_path and old_path:
1126 elif not new_path and old_path:
1127 deleted.add(old_path)
1127 deleted.add(old_path)
1128
1128
1129 return list(added), list(modified), list(deleted)
1129 return list(added), list(modified), list(deleted)
1130
1130
1131 @reraise_safe_exceptions
1131 @reraise_safe_exceptions
1132 def tree_and_type_for_path(self, wire, commit_id, path):
1132 def tree_and_type_for_path(self, wire, commit_id, path):
1133
1133
1134 cache_on, context_uid, repo_id = self._cache_on(wire)
1134 cache_on, context_uid, repo_id = self._cache_on(wire)
1135 region = self._region(wire)
1135 region = self._region(wire)
1136
1136
1137 @region.conditional_cache_on_arguments(condition=cache_on)
1137 @region.conditional_cache_on_arguments(condition=cache_on)
1138 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1138 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1139 repo_init = self._factory.repo_libgit2(wire)
1139 repo_init = self._factory.repo_libgit2(wire)
1140
1140
1141 with repo_init as repo:
1141 with repo_init as repo:
1142 commit = repo[commit_id]
1142 commit = repo[commit_id]
1143 try:
1143 try:
1144 tree = commit.tree[path]
1144 tree = commit.tree[path]
1145 except KeyError:
1145 except KeyError:
1146 return None, None, None
1146 return None, None, None
1147
1147
1148 return tree.id.hex, tree.type_str, tree.filemode
1148 return tree.id.hex, tree.type_str, tree.filemode
1149 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1149 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1150
1150
1151 @reraise_safe_exceptions
1151 @reraise_safe_exceptions
1152 def tree_items(self, wire, tree_id):
1152 def tree_items(self, wire, tree_id):
1153 cache_on, context_uid, repo_id = self._cache_on(wire)
1153 cache_on, context_uid, repo_id = self._cache_on(wire)
1154 region = self._region(wire)
1154 region = self._region(wire)
1155
1155
1156 @region.conditional_cache_on_arguments(condition=cache_on)
1156 @region.conditional_cache_on_arguments(condition=cache_on)
1157 def _tree_items(_repo_id, _tree_id):
1157 def _tree_items(_repo_id, _tree_id):
1158
1158
1159 repo_init = self._factory.repo_libgit2(wire)
1159 repo_init = self._factory.repo_libgit2(wire)
1160 with repo_init as repo:
1160 with repo_init as repo:
1161 try:
1161 try:
1162 tree = repo[tree_id]
1162 tree = repo[tree_id]
1163 except KeyError:
1163 except KeyError:
1164 raise ObjectMissing(f'No tree with id: {tree_id}')
1164 raise ObjectMissing(f'No tree with id: {tree_id}')
1165
1165
1166 result = []
1166 result = []
1167 for item in tree:
1167 for item in tree:
1168 item_sha = item.hex
1168 item_sha = item.hex
1169 item_mode = item.filemode
1169 item_mode = item.filemode
1170 item_type = item.type_str
1170 item_type = item.type_str
1171
1171
1172 if item_type == 'commit':
1172 if item_type == 'commit':
1173 # NOTE(marcink): submodules we translate to 'link' for backward compat
1173 # NOTE(marcink): submodules we translate to 'link' for backward compat
1174 item_type = 'link'
1174 item_type = 'link'
1175
1175
1176 result.append((item.name, item_mode, item_sha, item_type))
1176 result.append((item.name, item_mode, item_sha, item_type))
1177 return result
1177 return result
1178 return _tree_items(repo_id, tree_id)
1178 return _tree_items(repo_id, tree_id)
1179
1179
1180 @reraise_safe_exceptions
1180 @reraise_safe_exceptions
1181 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1181 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1182 """
1182 """
1183 Old version that uses subprocess to call diff
1183 Old version that uses subprocess to call diff
1184 """
1184 """
1185
1185
1186 flags = [
1186 flags = [
1187 '-U%s' % context, '--patch',
1187 '-U%s' % context, '--patch',
1188 '--binary',
1188 '--binary',
1189 '--find-renames',
1189 '--find-renames',
1190 '--no-indent-heuristic',
1190 '--no-indent-heuristic',
1191 # '--indent-heuristic',
1191 # '--indent-heuristic',
1192 #'--full-index',
1192 #'--full-index',
1193 #'--abbrev=40'
1193 #'--abbrev=40'
1194 ]
1194 ]
1195
1195
1196 if opt_ignorews:
1196 if opt_ignorews:
1197 flags.append('--ignore-all-space')
1197 flags.append('--ignore-all-space')
1198
1198
1199 if commit_id_1 == self.EMPTY_COMMIT:
1199 if commit_id_1 == self.EMPTY_COMMIT:
1200 cmd = ['show'] + flags + [commit_id_2]
1200 cmd = ['show'] + flags + [commit_id_2]
1201 else:
1201 else:
1202 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1202 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1203
1203
1204 if file_filter:
1204 if file_filter:
1205 cmd.extend(['--', file_filter])
1205 cmd.extend(['--', file_filter])
1206
1206
1207 diff, __ = self.run_git_command(wire, cmd)
1207 diff, __ = self.run_git_command(wire, cmd)
1208 # If we used 'show' command, strip first few lines (until actual diff
1208 # If we used 'show' command, strip first few lines (until actual diff
1209 # starts)
1209 # starts)
1210 if commit_id_1 == self.EMPTY_COMMIT:
1210 if commit_id_1 == self.EMPTY_COMMIT:
1211 lines = diff.splitlines()
1211 lines = diff.splitlines()
1212 x = 0
1212 x = 0
1213 for line in lines:
1213 for line in lines:
1214 if line.startswith(b'diff'):
1214 if line.startswith(b'diff'):
1215 break
1215 break
1216 x += 1
1216 x += 1
1217 # Append new line just like 'diff' command do
1217 # Append new line just like 'diff' command do
1218 diff = '\n'.join(lines[x:]) + '\n'
1218 diff = '\n'.join(lines[x:]) + '\n'
1219 return diff
1219 return diff
1220
1220
1221 @reraise_safe_exceptions
1221 @reraise_safe_exceptions
1222 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1222 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1223 repo_init = self._factory.repo_libgit2(wire)
1223 repo_init = self._factory.repo_libgit2(wire)
1224
1224
1225 with repo_init as repo:
1225 with repo_init as repo:
1226 swap = True
1226 swap = True
1227 flags = 0
1227 flags = 0
1228 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1228 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1229
1229
1230 if opt_ignorews:
1230 if opt_ignorews:
1231 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1231 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1232
1232
1233 if commit_id_1 == self.EMPTY_COMMIT:
1233 if commit_id_1 == self.EMPTY_COMMIT:
1234 comm1 = repo[commit_id_2]
1234 comm1 = repo[commit_id_2]
1235 diff_obj = comm1.tree.diff_to_tree(
1235 diff_obj = comm1.tree.diff_to_tree(
1236 flags=flags, context_lines=context, swap=swap)
1236 flags=flags, context_lines=context, swap=swap)
1237
1237
1238 else:
1238 else:
1239 comm1 = repo[commit_id_2]
1239 comm1 = repo[commit_id_2]
1240 comm2 = repo[commit_id_1]
1240 comm2 = repo[commit_id_1]
1241 diff_obj = comm1.tree.diff_to_tree(
1241 diff_obj = comm1.tree.diff_to_tree(
1242 comm2.tree, flags=flags, context_lines=context, swap=swap)
1242 comm2.tree, flags=flags, context_lines=context, swap=swap)
1243 similar_flags = 0
1243 similar_flags = 0
1244 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1244 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1245 diff_obj.find_similar(flags=similar_flags)
1245 diff_obj.find_similar(flags=similar_flags)
1246
1246
1247 if file_filter:
1247 if file_filter:
1248 for p in diff_obj:
1248 for p in diff_obj:
1249 if p.delta.old_file.path == file_filter:
1249 if p.delta.old_file.path == file_filter:
1250 return BytesEnvelope(p.data) or BytesEnvelope(b'')
1250 return BytesEnvelope(p.data) or BytesEnvelope(b'')
1251 # fo matching path == no diff
1251 # fo matching path == no diff
1252 return BytesEnvelope(b'')
1252 return BytesEnvelope(b'')
1253
1253
1254 return BytesEnvelope(safe_bytes(diff_obj.patch)) or BytesEnvelope(b'')
1254 return BytesEnvelope(safe_bytes(diff_obj.patch)) or BytesEnvelope(b'')
1255
1255
1256 @reraise_safe_exceptions
1256 @reraise_safe_exceptions
1257 def node_history(self, wire, commit_id, path, limit):
1257 def node_history(self, wire, commit_id, path, limit):
1258 cache_on, context_uid, repo_id = self._cache_on(wire)
1258 cache_on, context_uid, repo_id = self._cache_on(wire)
1259 region = self._region(wire)
1259 region = self._region(wire)
1260
1260
1261 @region.conditional_cache_on_arguments(condition=cache_on)
1261 @region.conditional_cache_on_arguments(condition=cache_on)
1262 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1262 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1263 # optimize for n==1, rev-list is much faster for that use-case
1263 # optimize for n==1, rev-list is much faster for that use-case
1264 if limit == 1:
1264 if limit == 1:
1265 cmd = ['rev-list', '-1', commit_id, '--', path]
1265 cmd = ['rev-list', '-1', commit_id, '--', path]
1266 else:
1266 else:
1267 cmd = ['log']
1267 cmd = ['log']
1268 if limit:
1268 if limit:
1269 cmd.extend(['-n', str(safe_int(limit, 0))])
1269 cmd.extend(['-n', str(safe_int(limit, 0))])
1270 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1270 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1271
1271
1272 output, __ = self.run_git_command(wire, cmd)
1272 output, __ = self.run_git_command(wire, cmd)
1273 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1273 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1274
1274
1275 return [x for x in commit_ids]
1275 return [x for x in commit_ids]
1276 return _node_history(context_uid, repo_id, commit_id, path, limit)
1276 return _node_history(context_uid, repo_id, commit_id, path, limit)
1277
1277
1278 @reraise_safe_exceptions
1278 @reraise_safe_exceptions
1279 def node_annotate_legacy(self, wire, commit_id, path):
1279 def node_annotate_legacy(self, wire, commit_id, path):
1280 # note: replaced by pygit2 implementation
1280 # note: replaced by pygit2 implementation
1281 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1281 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1282 # -l ==> outputs long shas (and we need all 40 characters)
1282 # -l ==> outputs long shas (and we need all 40 characters)
1283 # --root ==> doesn't put '^' character for boundaries
1283 # --root ==> doesn't put '^' character for boundaries
1284 # -r commit_id ==> blames for the given commit
1284 # -r commit_id ==> blames for the given commit
1285 output, __ = self.run_git_command(wire, cmd)
1285 output, __ = self.run_git_command(wire, cmd)
1286
1286
1287 result = []
1287 result = []
1288 for i, blame_line in enumerate(output.splitlines()[:-1]):
1288 for i, blame_line in enumerate(output.splitlines()[:-1]):
1289 line_no = i + 1
1289 line_no = i + 1
1290 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1290 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1291 result.append((line_no, blame_commit_id, line))
1291 result.append((line_no, blame_commit_id, line))
1292
1292
1293 return result
1293 return result
1294
1294
1295 @reraise_safe_exceptions
1295 @reraise_safe_exceptions
1296 def node_annotate(self, wire, commit_id, path):
1296 def node_annotate(self, wire, commit_id, path):
1297
1297
1298 result_libgit = []
1298 result_libgit = []
1299 repo_init = self._factory.repo_libgit2(wire)
1299 repo_init = self._factory.repo_libgit2(wire)
1300 with repo_init as repo:
1300 with repo_init as repo:
1301 commit = repo[commit_id]
1301 commit = repo[commit_id]
1302 blame_obj = repo.blame(path, newest_commit=commit_id)
1302 blame_obj = repo.blame(path, newest_commit=commit_id)
1303 for i, line in enumerate(commit.tree[path].data.splitlines()):
1303 for i, line in enumerate(commit.tree[path].data.splitlines()):
1304 line_no = i + 1
1304 line_no = i + 1
1305 hunk = blame_obj.for_line(line_no)
1305 hunk = blame_obj.for_line(line_no)
1306 blame_commit_id = hunk.final_commit_id.hex
1306 blame_commit_id = hunk.final_commit_id.hex
1307
1307
1308 result_libgit.append((line_no, blame_commit_id, line))
1308 result_libgit.append((line_no, blame_commit_id, line))
1309
1309
1310 return result_libgit
1310 return BinaryEnvelope(result_libgit)
1311
1311
1312 @reraise_safe_exceptions
1312 @reraise_safe_exceptions
1313 def update_server_info(self, wire):
1313 def update_server_info(self, wire):
1314 repo = self._factory.repo(wire)
1314 repo = self._factory.repo(wire)
1315 update_server_info(repo)
1315 update_server_info(repo)
1316
1316
1317 @reraise_safe_exceptions
1317 @reraise_safe_exceptions
1318 def get_all_commit_ids(self, wire):
1318 def get_all_commit_ids(self, wire):
1319
1319
1320 cache_on, context_uid, repo_id = self._cache_on(wire)
1320 cache_on, context_uid, repo_id = self._cache_on(wire)
1321 region = self._region(wire)
1321 region = self._region(wire)
1322
1322
1323 @region.conditional_cache_on_arguments(condition=cache_on)
1323 @region.conditional_cache_on_arguments(condition=cache_on)
1324 def _get_all_commit_ids(_context_uid, _repo_id):
1324 def _get_all_commit_ids(_context_uid, _repo_id):
1325
1325
1326 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1326 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1327 try:
1327 try:
1328 output, __ = self.run_git_command(wire, cmd)
1328 output, __ = self.run_git_command(wire, cmd)
1329 return output.splitlines()
1329 return output.splitlines()
1330 except Exception:
1330 except Exception:
1331 # Can be raised for empty repositories
1331 # Can be raised for empty repositories
1332 return []
1332 return []
1333
1333
1334 @region.conditional_cache_on_arguments(condition=cache_on)
1334 @region.conditional_cache_on_arguments(condition=cache_on)
1335 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1335 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1336 repo_init = self._factory.repo_libgit2(wire)
1336 repo_init = self._factory.repo_libgit2(wire)
1337 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1337 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1338 results = []
1338 results = []
1339 with repo_init as repo:
1339 with repo_init as repo:
1340 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1340 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1341 results.append(commit.id.hex)
1341 results.append(commit.id.hex)
1342
1342
1343 return _get_all_commit_ids(context_uid, repo_id)
1343 return _get_all_commit_ids(context_uid, repo_id)
1344
1344
1345 @reraise_safe_exceptions
1345 @reraise_safe_exceptions
1346 def run_git_command(self, wire, cmd, **opts):
1346 def run_git_command(self, wire, cmd, **opts):
1347 path = wire.get('path', None)
1347 path = wire.get('path', None)
1348
1348
1349 if path and os.path.isdir(path):
1349 if path and os.path.isdir(path):
1350 opts['cwd'] = path
1350 opts['cwd'] = path
1351
1351
1352 if '_bare' in opts:
1352 if '_bare' in opts:
1353 _copts = []
1353 _copts = []
1354 del opts['_bare']
1354 del opts['_bare']
1355 else:
1355 else:
1356 _copts = ['-c', 'core.quotepath=false',]
1356 _copts = ['-c', 'core.quotepath=false',]
1357 safe_call = False
1357 safe_call = False
1358 if '_safe' in opts:
1358 if '_safe' in opts:
1359 # no exc on failure
1359 # no exc on failure
1360 del opts['_safe']
1360 del opts['_safe']
1361 safe_call = True
1361 safe_call = True
1362
1362
1363 if '_copts' in opts:
1363 if '_copts' in opts:
1364 _copts.extend(opts['_copts'] or [])
1364 _copts.extend(opts['_copts'] or [])
1365 del opts['_copts']
1365 del opts['_copts']
1366
1366
1367 gitenv = os.environ.copy()
1367 gitenv = os.environ.copy()
1368 gitenv.update(opts.pop('extra_env', {}))
1368 gitenv.update(opts.pop('extra_env', {}))
1369 # need to clean fix GIT_DIR !
1369 # need to clean fix GIT_DIR !
1370 if 'GIT_DIR' in gitenv:
1370 if 'GIT_DIR' in gitenv:
1371 del gitenv['GIT_DIR']
1371 del gitenv['GIT_DIR']
1372 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1372 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1373 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1373 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1374
1374
1375 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1375 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1376 _opts = {'env': gitenv, 'shell': False}
1376 _opts = {'env': gitenv, 'shell': False}
1377
1377
1378 proc = None
1378 proc = None
1379 try:
1379 try:
1380 _opts.update(opts)
1380 _opts.update(opts)
1381 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1381 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1382
1382
1383 return b''.join(proc), b''.join(proc.stderr)
1383 return b''.join(proc), b''.join(proc.stderr)
1384 except OSError as err:
1384 except OSError as err:
1385 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1385 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1386 tb_err = ("Couldn't run git command (%s).\n"
1386 tb_err = ("Couldn't run git command (%s).\n"
1387 "Original error was:%s\n"
1387 "Original error was:%s\n"
1388 "Call options:%s\n"
1388 "Call options:%s\n"
1389 % (cmd, err, _opts))
1389 % (cmd, err, _opts))
1390 log.exception(tb_err)
1390 log.exception(tb_err)
1391 if safe_call:
1391 if safe_call:
1392 return '', err
1392 return '', err
1393 else:
1393 else:
1394 raise exceptions.VcsException()(tb_err)
1394 raise exceptions.VcsException()(tb_err)
1395 finally:
1395 finally:
1396 if proc:
1396 if proc:
1397 proc.close()
1397 proc.close()
1398
1398
1399 @reraise_safe_exceptions
1399 @reraise_safe_exceptions
1400 def install_hooks(self, wire, force=False):
1400 def install_hooks(self, wire, force=False):
1401 from vcsserver.hook_utils import install_git_hooks
1401 from vcsserver.hook_utils import install_git_hooks
1402 bare = self.bare(wire)
1402 bare = self.bare(wire)
1403 path = wire['path']
1403 path = wire['path']
1404 binary_dir = settings.BINARY_DIR
1404 binary_dir = settings.BINARY_DIR
1405 if binary_dir:
1405 if binary_dir:
1406 os.path.join(binary_dir, 'python3')
1406 os.path.join(binary_dir, 'python3')
1407 return install_git_hooks(path, bare, force_create=force)
1407 return install_git_hooks(path, bare, force_create=force)
1408
1408
1409 @reraise_safe_exceptions
1409 @reraise_safe_exceptions
1410 def get_hooks_info(self, wire):
1410 def get_hooks_info(self, wire):
1411 from vcsserver.hook_utils import (
1411 from vcsserver.hook_utils import (
1412 get_git_pre_hook_version, get_git_post_hook_version)
1412 get_git_pre_hook_version, get_git_post_hook_version)
1413 bare = self.bare(wire)
1413 bare = self.bare(wire)
1414 path = wire['path']
1414 path = wire['path']
1415 return {
1415 return {
1416 'pre_version': get_git_pre_hook_version(path, bare),
1416 'pre_version': get_git_pre_hook_version(path, bare),
1417 'post_version': get_git_post_hook_version(path, bare),
1417 'post_version': get_git_post_hook_version(path, bare),
1418 }
1418 }
1419
1419
1420 @reraise_safe_exceptions
1420 @reraise_safe_exceptions
1421 def set_head_ref(self, wire, head_name):
1421 def set_head_ref(self, wire, head_name):
1422 log.debug('Setting refs/head to `%s`', head_name)
1422 log.debug('Setting refs/head to `%s`', head_name)
1423 repo_init = self._factory.repo_libgit2(wire)
1423 repo_init = self._factory.repo_libgit2(wire)
1424 with repo_init as repo:
1424 with repo_init as repo:
1425 repo.set_head(f'refs/heads/{head_name}')
1425 repo.set_head(f'refs/heads/{head_name}')
1426
1426
1427 return [head_name] + [f'set HEAD to refs/heads/{head_name}']
1427 return [head_name] + [f'set HEAD to refs/heads/{head_name}']
1428
1428
1429 @reraise_safe_exceptions
1429 @reraise_safe_exceptions
1430 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1430 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1431 archive_dir_name, commit_id, cache_config):
1431 archive_dir_name, commit_id, cache_config):
1432
1432
1433 def file_walker(_commit_id, path):
1433 def file_walker(_commit_id, path):
1434 repo_init = self._factory.repo_libgit2(wire)
1434 repo_init = self._factory.repo_libgit2(wire)
1435
1435
1436 with repo_init as repo:
1436 with repo_init as repo:
1437 commit = repo[commit_id]
1437 commit = repo[commit_id]
1438
1438
1439 if path in ['', '/']:
1439 if path in ['', '/']:
1440 tree = commit.tree
1440 tree = commit.tree
1441 else:
1441 else:
1442 tree = commit.tree[path.rstrip('/')]
1442 tree = commit.tree[path.rstrip('/')]
1443 tree_id = tree.id.hex
1443 tree_id = tree.id.hex
1444 try:
1444 try:
1445 tree = repo[tree_id]
1445 tree = repo[tree_id]
1446 except KeyError:
1446 except KeyError:
1447 raise ObjectMissing(f'No tree with id: {tree_id}')
1447 raise ObjectMissing(f'No tree with id: {tree_id}')
1448
1448
1449 index = LibGit2Index.Index()
1449 index = LibGit2Index.Index()
1450 index.read_tree(tree)
1450 index.read_tree(tree)
1451 file_iter = index
1451 file_iter = index
1452
1452
1453 for file_node in file_iter:
1453 for file_node in file_iter:
1454 file_path = file_node.path
1454 file_path = file_node.path
1455 mode = file_node.mode
1455 mode = file_node.mode
1456 is_link = stat.S_ISLNK(mode)
1456 is_link = stat.S_ISLNK(mode)
1457 if mode == pygit2.GIT_FILEMODE_COMMIT:
1457 if mode == pygit2.GIT_FILEMODE_COMMIT:
1458 log.debug('Skipping path %s as a commit node', file_path)
1458 log.debug('Skipping path %s as a commit node', file_path)
1459 continue
1459 continue
1460 yield ArchiveNode(file_path, mode, is_link, repo[file_node.hex].read_raw)
1460 yield ArchiveNode(file_path, mode, is_link, repo[file_node.hex].read_raw)
1461
1461
1462 return store_archive_in_cache(
1462 return store_archive_in_cache(
1463 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1463 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
@@ -1,1159 +1,1159 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import binascii
17 import binascii
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib.request
21 import urllib.request
22 import urllib.parse
22 import urllib.parse
23 import traceback
23 import traceback
24 import hashlib
24 import hashlib
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27
27
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
31 from mercurial import repair
31 from mercurial import repair
32
32
33 import vcsserver
33 import vcsserver
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, store_archive_in_cache, ArchiveNode, BytesEnvelope, \
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, store_archive_in_cache, ArchiveNode, BytesEnvelope, \
36 BinaryEnvelope
36 BinaryEnvelope
37 from vcsserver.hgcompat import (
37 from vcsserver.hgcompat import (
38 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
39 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
40 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
41 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
42 RepoLookupError, InterventionRequired, RequirementError,
42 RepoLookupError, InterventionRequired, RequirementError,
43 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
43 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
44 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
44 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
45 from vcsserver.vcs_base import RemoteBase
45 from vcsserver.vcs_base import RemoteBase
46 from vcsserver.config import hooks as hooks_config
46 from vcsserver.config import hooks as hooks_config
47
47
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 def make_ui_from_config(repo_config):
52 def make_ui_from_config(repo_config):
53
53
54 class LoggingUI(ui.ui):
54 class LoggingUI(ui.ui):
55
55
56 def status(self, *msg, **opts):
56 def status(self, *msg, **opts):
57 str_msg = map(safe_str, msg)
57 str_msg = map(safe_str, msg)
58 log.info(' '.join(str_msg).rstrip('\n'))
58 log.info(' '.join(str_msg).rstrip('\n'))
59 #super(LoggingUI, self).status(*msg, **opts)
59 #super(LoggingUI, self).status(*msg, **opts)
60
60
61 def warn(self, *msg, **opts):
61 def warn(self, *msg, **opts):
62 str_msg = map(safe_str, msg)
62 str_msg = map(safe_str, msg)
63 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
63 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
64 #super(LoggingUI, self).warn(*msg, **opts)
64 #super(LoggingUI, self).warn(*msg, **opts)
65
65
66 def error(self, *msg, **opts):
66 def error(self, *msg, **opts):
67 str_msg = map(safe_str, msg)
67 str_msg = map(safe_str, msg)
68 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
68 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
69 #super(LoggingUI, self).error(*msg, **opts)
69 #super(LoggingUI, self).error(*msg, **opts)
70
70
71 def note(self, *msg, **opts):
71 def note(self, *msg, **opts):
72 str_msg = map(safe_str, msg)
72 str_msg = map(safe_str, msg)
73 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
73 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
74 #super(LoggingUI, self).note(*msg, **opts)
74 #super(LoggingUI, self).note(*msg, **opts)
75
75
76 def debug(self, *msg, **opts):
76 def debug(self, *msg, **opts):
77 str_msg = map(safe_str, msg)
77 str_msg = map(safe_str, msg)
78 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
78 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
79 #super(LoggingUI, self).debug(*msg, **opts)
79 #super(LoggingUI, self).debug(*msg, **opts)
80
80
81 baseui = LoggingUI()
81 baseui = LoggingUI()
82
82
83 # clean the baseui object
83 # clean the baseui object
84 baseui._ocfg = hgconfig.config()
84 baseui._ocfg = hgconfig.config()
85 baseui._ucfg = hgconfig.config()
85 baseui._ucfg = hgconfig.config()
86 baseui._tcfg = hgconfig.config()
86 baseui._tcfg = hgconfig.config()
87
87
88 for section, option, value in repo_config:
88 for section, option, value in repo_config:
89 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
89 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
90
90
91 # make our hgweb quiet so it doesn't print output
91 # make our hgweb quiet so it doesn't print output
92 baseui.setconfig(b'ui', b'quiet', b'true')
92 baseui.setconfig(b'ui', b'quiet', b'true')
93
93
94 baseui.setconfig(b'ui', b'paginate', b'never')
94 baseui.setconfig(b'ui', b'paginate', b'never')
95 # for better Error reporting of Mercurial
95 # for better Error reporting of Mercurial
96 baseui.setconfig(b'ui', b'message-output', b'stderr')
96 baseui.setconfig(b'ui', b'message-output', b'stderr')
97
97
98 # force mercurial to only use 1 thread, otherwise it may try to set a
98 # force mercurial to only use 1 thread, otherwise it may try to set a
99 # signal in a non-main thread, thus generating a ValueError.
99 # signal in a non-main thread, thus generating a ValueError.
100 baseui.setconfig(b'worker', b'numcpus', 1)
100 baseui.setconfig(b'worker', b'numcpus', 1)
101
101
102 # If there is no config for the largefiles extension, we explicitly disable
102 # If there is no config for the largefiles extension, we explicitly disable
103 # it here. This overrides settings from repositories hgrc file. Recent
103 # it here. This overrides settings from repositories hgrc file. Recent
104 # mercurial versions enable largefiles in hgrc on clone from largefile
104 # mercurial versions enable largefiles in hgrc on clone from largefile
105 # repo.
105 # repo.
106 if not baseui.hasconfig(b'extensions', b'largefiles'):
106 if not baseui.hasconfig(b'extensions', b'largefiles'):
107 log.debug('Explicitly disable largefiles extension for repo.')
107 log.debug('Explicitly disable largefiles extension for repo.')
108 baseui.setconfig(b'extensions', b'largefiles', b'!')
108 baseui.setconfig(b'extensions', b'largefiles', b'!')
109
109
110 return baseui
110 return baseui
111
111
112
112
113 def reraise_safe_exceptions(func):
113 def reraise_safe_exceptions(func):
114 """Decorator for converting mercurial exceptions to something neutral."""
114 """Decorator for converting mercurial exceptions to something neutral."""
115
115
116 def wrapper(*args, **kwargs):
116 def wrapper(*args, **kwargs):
117 try:
117 try:
118 return func(*args, **kwargs)
118 return func(*args, **kwargs)
119 except (Abort, InterventionRequired) as e:
119 except (Abort, InterventionRequired) as e:
120 raise_from_original(exceptions.AbortException(e), e)
120 raise_from_original(exceptions.AbortException(e), e)
121 except RepoLookupError as e:
121 except RepoLookupError as e:
122 raise_from_original(exceptions.LookupException(e), e)
122 raise_from_original(exceptions.LookupException(e), e)
123 except RequirementError as e:
123 except RequirementError as e:
124 raise_from_original(exceptions.RequirementException(e), e)
124 raise_from_original(exceptions.RequirementException(e), e)
125 except RepoError as e:
125 except RepoError as e:
126 raise_from_original(exceptions.VcsException(e), e)
126 raise_from_original(exceptions.VcsException(e), e)
127 except LookupError as e:
127 except LookupError as e:
128 raise_from_original(exceptions.LookupException(e), e)
128 raise_from_original(exceptions.LookupException(e), e)
129 except Exception as e:
129 except Exception as e:
130 if not hasattr(e, '_vcs_kind'):
130 if not hasattr(e, '_vcs_kind'):
131 log.exception("Unhandled exception in hg remote call")
131 log.exception("Unhandled exception in hg remote call")
132 raise_from_original(exceptions.UnhandledException(e), e)
132 raise_from_original(exceptions.UnhandledException(e), e)
133
133
134 raise
134 raise
135 return wrapper
135 return wrapper
136
136
137
137
138 class MercurialFactory(RepoFactory):
138 class MercurialFactory(RepoFactory):
139 repo_type = 'hg'
139 repo_type = 'hg'
140
140
141 def _create_config(self, config, hooks=True):
141 def _create_config(self, config, hooks=True):
142 if not hooks:
142 if not hooks:
143
143
144 hooks_to_clean = {
144 hooks_to_clean = {
145
145
146 hooks_config.HOOK_REPO_SIZE,
146 hooks_config.HOOK_REPO_SIZE,
147 hooks_config.HOOK_PRE_PULL,
147 hooks_config.HOOK_PRE_PULL,
148 hooks_config.HOOK_PULL,
148 hooks_config.HOOK_PULL,
149
149
150 hooks_config.HOOK_PRE_PUSH,
150 hooks_config.HOOK_PRE_PUSH,
151 # TODO: what about PRETXT, this was disabled in pre 5.0.0
151 # TODO: what about PRETXT, this was disabled in pre 5.0.0
152 hooks_config.HOOK_PRETX_PUSH,
152 hooks_config.HOOK_PRETX_PUSH,
153
153
154 }
154 }
155 new_config = []
155 new_config = []
156 for section, option, value in config:
156 for section, option, value in config:
157 if section == 'hooks' and option in hooks_to_clean:
157 if section == 'hooks' and option in hooks_to_clean:
158 continue
158 continue
159 new_config.append((section, option, value))
159 new_config.append((section, option, value))
160 config = new_config
160 config = new_config
161
161
162 baseui = make_ui_from_config(config)
162 baseui = make_ui_from_config(config)
163 return baseui
163 return baseui
164
164
165 def _create_repo(self, wire, create):
165 def _create_repo(self, wire, create):
166 baseui = self._create_config(wire["config"])
166 baseui = self._create_config(wire["config"])
167 repo = instance(baseui, safe_bytes(wire["path"]), create)
167 repo = instance(baseui, safe_bytes(wire["path"]), create)
168 log.debug('repository created: got HG object: %s', repo)
168 log.debug('repository created: got HG object: %s', repo)
169 return repo
169 return repo
170
170
171 def repo(self, wire, create=False):
171 def repo(self, wire, create=False):
172 """
172 """
173 Get a repository instance for the given path.
173 Get a repository instance for the given path.
174 """
174 """
175 return self._create_repo(wire, create)
175 return self._create_repo(wire, create)
176
176
177
177
178 def patch_ui_message_output(baseui):
178 def patch_ui_message_output(baseui):
179 baseui.setconfig(b'ui', b'quiet', b'false')
179 baseui.setconfig(b'ui', b'quiet', b'false')
180 output = io.BytesIO()
180 output = io.BytesIO()
181
181
182 def write(data, **unused_kwargs):
182 def write(data, **unused_kwargs):
183 output.write(data)
183 output.write(data)
184
184
185 baseui.status = write
185 baseui.status = write
186 baseui.write = write
186 baseui.write = write
187 baseui.warn = write
187 baseui.warn = write
188 baseui.debug = write
188 baseui.debug = write
189
189
190 return baseui, output
190 return baseui, output
191
191
192
192
193 def get_obfuscated_url(url_obj):
193 def get_obfuscated_url(url_obj):
194 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
194 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
195 url_obj.query = obfuscate_qs(url_obj.query)
195 url_obj.query = obfuscate_qs(url_obj.query)
196 obfuscated_uri = str(url_obj)
196 obfuscated_uri = str(url_obj)
197 return obfuscated_uri
197 return obfuscated_uri
198
198
199
199
200 def normalize_url_for_hg(url: str):
200 def normalize_url_for_hg(url: str):
201 _proto = None
201 _proto = None
202
202
203 if '+' in url[:url.find('://')]:
203 if '+' in url[:url.find('://')]:
204 _proto = url[0:url.find('+')]
204 _proto = url[0:url.find('+')]
205 url = url[url.find('+') + 1:]
205 url = url[url.find('+') + 1:]
206 return url, _proto
206 return url, _proto
207
207
208
208
209 class HgRemote(RemoteBase):
209 class HgRemote(RemoteBase):
210
210
211 def __init__(self, factory):
211 def __init__(self, factory):
212 self._factory = factory
212 self._factory = factory
213 self._bulk_methods = {
213 self._bulk_methods = {
214 "affected_files": self.ctx_files,
214 "affected_files": self.ctx_files,
215 "author": self.ctx_user,
215 "author": self.ctx_user,
216 "branch": self.ctx_branch,
216 "branch": self.ctx_branch,
217 "children": self.ctx_children,
217 "children": self.ctx_children,
218 "date": self.ctx_date,
218 "date": self.ctx_date,
219 "message": self.ctx_description,
219 "message": self.ctx_description,
220 "parents": self.ctx_parents,
220 "parents": self.ctx_parents,
221 "status": self.ctx_status,
221 "status": self.ctx_status,
222 "obsolete": self.ctx_obsolete,
222 "obsolete": self.ctx_obsolete,
223 "phase": self.ctx_phase,
223 "phase": self.ctx_phase,
224 "hidden": self.ctx_hidden,
224 "hidden": self.ctx_hidden,
225 "_file_paths": self.ctx_list,
225 "_file_paths": self.ctx_list,
226 }
226 }
227 self._bulk_file_methods = {
227 self._bulk_file_methods = {
228 "size": self.fctx_size,
228 "size": self.fctx_size,
229 "data": self.fctx_node_data,
229 "data": self.fctx_node_data,
230 "flags": self.fctx_flags,
230 "flags": self.fctx_flags,
231 "is_binary": self.is_binary,
231 "is_binary": self.is_binary,
232 "md5": self.md5_hash,
232 "md5": self.md5_hash,
233 }
233 }
234
234
235 def _get_ctx(self, repo, ref):
235 def _get_ctx(self, repo, ref):
236 return get_ctx(repo, ref)
236 return get_ctx(repo, ref)
237
237
238 @reraise_safe_exceptions
238 @reraise_safe_exceptions
239 def discover_hg_version(self):
239 def discover_hg_version(self):
240 from mercurial import util
240 from mercurial import util
241 return safe_str(util.version())
241 return safe_str(util.version())
242
242
243 @reraise_safe_exceptions
243 @reraise_safe_exceptions
244 def is_empty(self, wire):
244 def is_empty(self, wire):
245 repo = self._factory.repo(wire)
245 repo = self._factory.repo(wire)
246
246
247 try:
247 try:
248 return len(repo) == 0
248 return len(repo) == 0
249 except Exception:
249 except Exception:
250 log.exception("failed to read object_store")
250 log.exception("failed to read object_store")
251 return False
251 return False
252
252
253 @reraise_safe_exceptions
253 @reraise_safe_exceptions
254 def bookmarks(self, wire):
254 def bookmarks(self, wire):
255 cache_on, context_uid, repo_id = self._cache_on(wire)
255 cache_on, context_uid, repo_id = self._cache_on(wire)
256 region = self._region(wire)
256 region = self._region(wire)
257
257
258 @region.conditional_cache_on_arguments(condition=cache_on)
258 @region.conditional_cache_on_arguments(condition=cache_on)
259 def _bookmarks(_context_uid, _repo_id):
259 def _bookmarks(_context_uid, _repo_id):
260 repo = self._factory.repo(wire)
260 repo = self._factory.repo(wire)
261 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
261 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
262
262
263 return _bookmarks(context_uid, repo_id)
263 return _bookmarks(context_uid, repo_id)
264
264
265 @reraise_safe_exceptions
265 @reraise_safe_exceptions
266 def branches(self, wire, normal, closed):
266 def branches(self, wire, normal, closed):
267 cache_on, context_uid, repo_id = self._cache_on(wire)
267 cache_on, context_uid, repo_id = self._cache_on(wire)
268 region = self._region(wire)
268 region = self._region(wire)
269
269
270 @region.conditional_cache_on_arguments(condition=cache_on)
270 @region.conditional_cache_on_arguments(condition=cache_on)
271 def _branches(_context_uid, _repo_id, _normal, _closed):
271 def _branches(_context_uid, _repo_id, _normal, _closed):
272 repo = self._factory.repo(wire)
272 repo = self._factory.repo(wire)
273 iter_branches = repo.branchmap().iterbranches()
273 iter_branches = repo.branchmap().iterbranches()
274 bt = {}
274 bt = {}
275 for branch_name, _heads, tip_node, is_closed in iter_branches:
275 for branch_name, _heads, tip_node, is_closed in iter_branches:
276 if normal and not is_closed:
276 if normal and not is_closed:
277 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
277 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
278 if closed and is_closed:
278 if closed and is_closed:
279 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
279 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
280
280
281 return bt
281 return bt
282
282
283 return _branches(context_uid, repo_id, normal, closed)
283 return _branches(context_uid, repo_id, normal, closed)
284
284
285 @reraise_safe_exceptions
285 @reraise_safe_exceptions
286 def bulk_request(self, wire, commit_id, pre_load):
286 def bulk_request(self, wire, commit_id, pre_load):
287 cache_on, context_uid, repo_id = self._cache_on(wire)
287 cache_on, context_uid, repo_id = self._cache_on(wire)
288 region = self._region(wire)
288 region = self._region(wire)
289
289
290 @region.conditional_cache_on_arguments(condition=cache_on)
290 @region.conditional_cache_on_arguments(condition=cache_on)
291 def _bulk_request(_repo_id, _commit_id, _pre_load):
291 def _bulk_request(_repo_id, _commit_id, _pre_load):
292 result = {}
292 result = {}
293 for attr in pre_load:
293 for attr in pre_load:
294 try:
294 try:
295 method = self._bulk_methods[attr]
295 method = self._bulk_methods[attr]
296 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
296 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
297 result[attr] = method(wire, commit_id)
297 result[attr] = method(wire, commit_id)
298 except KeyError as e:
298 except KeyError as e:
299 raise exceptions.VcsException(e)(
299 raise exceptions.VcsException(e)(
300 'Unknown bulk attribute: "%s"' % attr)
300 'Unknown bulk attribute: "%s"' % attr)
301 return result
301 return result
302
302
303 return _bulk_request(repo_id, commit_id, sorted(pre_load))
303 return _bulk_request(repo_id, commit_id, sorted(pre_load))
304
304
305 @reraise_safe_exceptions
305 @reraise_safe_exceptions
306 def ctx_branch(self, wire, commit_id):
306 def ctx_branch(self, wire, commit_id):
307 cache_on, context_uid, repo_id = self._cache_on(wire)
307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 region = self._region(wire)
308 region = self._region(wire)
309
309
310 @region.conditional_cache_on_arguments(condition=cache_on)
310 @region.conditional_cache_on_arguments(condition=cache_on)
311 def _ctx_branch(_repo_id, _commit_id):
311 def _ctx_branch(_repo_id, _commit_id):
312 repo = self._factory.repo(wire)
312 repo = self._factory.repo(wire)
313 ctx = self._get_ctx(repo, commit_id)
313 ctx = self._get_ctx(repo, commit_id)
314 return ctx.branch()
314 return ctx.branch()
315 return _ctx_branch(repo_id, commit_id)
315 return _ctx_branch(repo_id, commit_id)
316
316
317 @reraise_safe_exceptions
317 @reraise_safe_exceptions
318 def ctx_date(self, wire, commit_id):
318 def ctx_date(self, wire, commit_id):
319 cache_on, context_uid, repo_id = self._cache_on(wire)
319 cache_on, context_uid, repo_id = self._cache_on(wire)
320 region = self._region(wire)
320 region = self._region(wire)
321
321
322 @region.conditional_cache_on_arguments(condition=cache_on)
322 @region.conditional_cache_on_arguments(condition=cache_on)
323 def _ctx_date(_repo_id, _commit_id):
323 def _ctx_date(_repo_id, _commit_id):
324 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
325 ctx = self._get_ctx(repo, commit_id)
325 ctx = self._get_ctx(repo, commit_id)
326 return ctx.date()
326 return ctx.date()
327 return _ctx_date(repo_id, commit_id)
327 return _ctx_date(repo_id, commit_id)
328
328
329 @reraise_safe_exceptions
329 @reraise_safe_exceptions
330 def ctx_description(self, wire, revision):
330 def ctx_description(self, wire, revision):
331 repo = self._factory.repo(wire)
331 repo = self._factory.repo(wire)
332 ctx = self._get_ctx(repo, revision)
332 ctx = self._get_ctx(repo, revision)
333 return ctx.description()
333 return ctx.description()
334
334
335 @reraise_safe_exceptions
335 @reraise_safe_exceptions
336 def ctx_files(self, wire, commit_id):
336 def ctx_files(self, wire, commit_id):
337 cache_on, context_uid, repo_id = self._cache_on(wire)
337 cache_on, context_uid, repo_id = self._cache_on(wire)
338 region = self._region(wire)
338 region = self._region(wire)
339
339
340 @region.conditional_cache_on_arguments(condition=cache_on)
340 @region.conditional_cache_on_arguments(condition=cache_on)
341 def _ctx_files(_repo_id, _commit_id):
341 def _ctx_files(_repo_id, _commit_id):
342 repo = self._factory.repo(wire)
342 repo = self._factory.repo(wire)
343 ctx = self._get_ctx(repo, commit_id)
343 ctx = self._get_ctx(repo, commit_id)
344 return ctx.files()
344 return ctx.files()
345
345
346 return _ctx_files(repo_id, commit_id)
346 return _ctx_files(repo_id, commit_id)
347
347
348 @reraise_safe_exceptions
348 @reraise_safe_exceptions
349 def ctx_list(self, path, revision):
349 def ctx_list(self, path, revision):
350 repo = self._factory.repo(path)
350 repo = self._factory.repo(path)
351 ctx = self._get_ctx(repo, revision)
351 ctx = self._get_ctx(repo, revision)
352 return list(ctx)
352 return list(ctx)
353
353
354 @reraise_safe_exceptions
354 @reraise_safe_exceptions
355 def ctx_parents(self, wire, commit_id):
355 def ctx_parents(self, wire, commit_id):
356 cache_on, context_uid, repo_id = self._cache_on(wire)
356 cache_on, context_uid, repo_id = self._cache_on(wire)
357 region = self._region(wire)
357 region = self._region(wire)
358
358
359 @region.conditional_cache_on_arguments(condition=cache_on)
359 @region.conditional_cache_on_arguments(condition=cache_on)
360 def _ctx_parents(_repo_id, _commit_id):
360 def _ctx_parents(_repo_id, _commit_id):
361 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
362 ctx = self._get_ctx(repo, commit_id)
362 ctx = self._get_ctx(repo, commit_id)
363 return [parent.hex() for parent in ctx.parents()
363 return [parent.hex() for parent in ctx.parents()
364 if not (parent.hidden() or parent.obsolete())]
364 if not (parent.hidden() or parent.obsolete())]
365
365
366 return _ctx_parents(repo_id, commit_id)
366 return _ctx_parents(repo_id, commit_id)
367
367
368 @reraise_safe_exceptions
368 @reraise_safe_exceptions
369 def ctx_children(self, wire, commit_id):
369 def ctx_children(self, wire, commit_id):
370 cache_on, context_uid, repo_id = self._cache_on(wire)
370 cache_on, context_uid, repo_id = self._cache_on(wire)
371 region = self._region(wire)
371 region = self._region(wire)
372
372
373 @region.conditional_cache_on_arguments(condition=cache_on)
373 @region.conditional_cache_on_arguments(condition=cache_on)
374 def _ctx_children(_repo_id, _commit_id):
374 def _ctx_children(_repo_id, _commit_id):
375 repo = self._factory.repo(wire)
375 repo = self._factory.repo(wire)
376 ctx = self._get_ctx(repo, commit_id)
376 ctx = self._get_ctx(repo, commit_id)
377 return [child.hex() for child in ctx.children()
377 return [child.hex() for child in ctx.children()
378 if not (child.hidden() or child.obsolete())]
378 if not (child.hidden() or child.obsolete())]
379
379
380 return _ctx_children(repo_id, commit_id)
380 return _ctx_children(repo_id, commit_id)
381
381
382 @reraise_safe_exceptions
382 @reraise_safe_exceptions
383 def ctx_phase(self, wire, commit_id):
383 def ctx_phase(self, wire, commit_id):
384 cache_on, context_uid, repo_id = self._cache_on(wire)
384 cache_on, context_uid, repo_id = self._cache_on(wire)
385 region = self._region(wire)
385 region = self._region(wire)
386
386
387 @region.conditional_cache_on_arguments(condition=cache_on)
387 @region.conditional_cache_on_arguments(condition=cache_on)
388 def _ctx_phase(_context_uid, _repo_id, _commit_id):
388 def _ctx_phase(_context_uid, _repo_id, _commit_id):
389 repo = self._factory.repo(wire)
389 repo = self._factory.repo(wire)
390 ctx = self._get_ctx(repo, commit_id)
390 ctx = self._get_ctx(repo, commit_id)
391 # public=0, draft=1, secret=3
391 # public=0, draft=1, secret=3
392 return ctx.phase()
392 return ctx.phase()
393 return _ctx_phase(context_uid, repo_id, commit_id)
393 return _ctx_phase(context_uid, repo_id, commit_id)
394
394
395 @reraise_safe_exceptions
395 @reraise_safe_exceptions
396 def ctx_obsolete(self, wire, commit_id):
396 def ctx_obsolete(self, wire, commit_id):
397 cache_on, context_uid, repo_id = self._cache_on(wire)
397 cache_on, context_uid, repo_id = self._cache_on(wire)
398 region = self._region(wire)
398 region = self._region(wire)
399
399
400 @region.conditional_cache_on_arguments(condition=cache_on)
400 @region.conditional_cache_on_arguments(condition=cache_on)
401 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
401 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
402 repo = self._factory.repo(wire)
402 repo = self._factory.repo(wire)
403 ctx = self._get_ctx(repo, commit_id)
403 ctx = self._get_ctx(repo, commit_id)
404 return ctx.obsolete()
404 return ctx.obsolete()
405 return _ctx_obsolete(context_uid, repo_id, commit_id)
405 return _ctx_obsolete(context_uid, repo_id, commit_id)
406
406
407 @reraise_safe_exceptions
407 @reraise_safe_exceptions
408 def ctx_hidden(self, wire, commit_id):
408 def ctx_hidden(self, wire, commit_id):
409 cache_on, context_uid, repo_id = self._cache_on(wire)
409 cache_on, context_uid, repo_id = self._cache_on(wire)
410 region = self._region(wire)
410 region = self._region(wire)
411
411
412 @region.conditional_cache_on_arguments(condition=cache_on)
412 @region.conditional_cache_on_arguments(condition=cache_on)
413 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
413 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
414 repo = self._factory.repo(wire)
414 repo = self._factory.repo(wire)
415 ctx = self._get_ctx(repo, commit_id)
415 ctx = self._get_ctx(repo, commit_id)
416 return ctx.hidden()
416 return ctx.hidden()
417 return _ctx_hidden(context_uid, repo_id, commit_id)
417 return _ctx_hidden(context_uid, repo_id, commit_id)
418
418
419 @reraise_safe_exceptions
419 @reraise_safe_exceptions
420 def ctx_substate(self, wire, revision):
420 def ctx_substate(self, wire, revision):
421 repo = self._factory.repo(wire)
421 repo = self._factory.repo(wire)
422 ctx = self._get_ctx(repo, revision)
422 ctx = self._get_ctx(repo, revision)
423 return ctx.substate
423 return ctx.substate
424
424
425 @reraise_safe_exceptions
425 @reraise_safe_exceptions
426 def ctx_status(self, wire, revision):
426 def ctx_status(self, wire, revision):
427 repo = self._factory.repo(wire)
427 repo = self._factory.repo(wire)
428 ctx = self._get_ctx(repo, revision)
428 ctx = self._get_ctx(repo, revision)
429 status = repo[ctx.p1().node()].status(other=ctx.node())
429 status = repo[ctx.p1().node()].status(other=ctx.node())
430 # object of status (odd, custom named tuple in mercurial) is not
430 # object of status (odd, custom named tuple in mercurial) is not
431 # correctly serializable, we make it a list, as the underling
431 # correctly serializable, we make it a list, as the underling
432 # API expects this to be a list
432 # API expects this to be a list
433 return list(status)
433 return list(status)
434
434
435 @reraise_safe_exceptions
435 @reraise_safe_exceptions
436 def ctx_user(self, wire, revision):
436 def ctx_user(self, wire, revision):
437 repo = self._factory.repo(wire)
437 repo = self._factory.repo(wire)
438 ctx = self._get_ctx(repo, revision)
438 ctx = self._get_ctx(repo, revision)
439 return ctx.user()
439 return ctx.user()
440
440
441 @reraise_safe_exceptions
441 @reraise_safe_exceptions
442 def check_url(self, url, config):
442 def check_url(self, url, config):
443 url, _proto = normalize_url_for_hg(url)
443 url, _proto = normalize_url_for_hg(url)
444 url_obj = url_parser(safe_bytes(url))
444 url_obj = url_parser(safe_bytes(url))
445
445
446 test_uri = safe_str(url_obj.authinfo()[0])
446 test_uri = safe_str(url_obj.authinfo()[0])
447 authinfo = url_obj.authinfo()[1]
447 authinfo = url_obj.authinfo()[1]
448 obfuscated_uri = get_obfuscated_url(url_obj)
448 obfuscated_uri = get_obfuscated_url(url_obj)
449 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
449 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
450
450
451 handlers = []
451 handlers = []
452 if authinfo:
452 if authinfo:
453 # create a password manager
453 # create a password manager
454 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
454 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
455 passmgr.add_password(*authinfo)
455 passmgr.add_password(*authinfo)
456
456
457 handlers.extend((httpbasicauthhandler(passmgr),
457 handlers.extend((httpbasicauthhandler(passmgr),
458 httpdigestauthhandler(passmgr)))
458 httpdigestauthhandler(passmgr)))
459
459
460 o = urllib.request.build_opener(*handlers)
460 o = urllib.request.build_opener(*handlers)
461 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
461 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
462 ('Accept', 'application/mercurial-0.1')]
462 ('Accept', 'application/mercurial-0.1')]
463
463
464 q = {"cmd": 'between'}
464 q = {"cmd": 'between'}
465 q.update({'pairs': "{}-{}".format('0' * 40, '0' * 40)})
465 q.update({'pairs': "{}-{}".format('0' * 40, '0' * 40)})
466 qs = '?%s' % urllib.parse.urlencode(q)
466 qs = '?%s' % urllib.parse.urlencode(q)
467 cu = f"{test_uri}{qs}"
467 cu = f"{test_uri}{qs}"
468 req = urllib.request.Request(cu, None, {})
468 req = urllib.request.Request(cu, None, {})
469
469
470 try:
470 try:
471 log.debug("Trying to open URL %s", obfuscated_uri)
471 log.debug("Trying to open URL %s", obfuscated_uri)
472 resp = o.open(req)
472 resp = o.open(req)
473 if resp.code != 200:
473 if resp.code != 200:
474 raise exceptions.URLError()('Return Code is not 200')
474 raise exceptions.URLError()('Return Code is not 200')
475 except Exception as e:
475 except Exception as e:
476 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
476 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
477 # means it cannot be cloned
477 # means it cannot be cloned
478 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
478 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
479
479
480 # now check if it's a proper hg repo, but don't do it for svn
480 # now check if it's a proper hg repo, but don't do it for svn
481 try:
481 try:
482 if _proto == 'svn':
482 if _proto == 'svn':
483 pass
483 pass
484 else:
484 else:
485 # check for pure hg repos
485 # check for pure hg repos
486 log.debug(
486 log.debug(
487 "Verifying if URL is a Mercurial repository: %s", obfuscated_uri)
487 "Verifying if URL is a Mercurial repository: %s", obfuscated_uri)
488 ui = make_ui_from_config(config)
488 ui = make_ui_from_config(config)
489 peer_checker = makepeer(ui, safe_bytes(url))
489 peer_checker = makepeer(ui, safe_bytes(url))
490 peer_checker.lookup(b'tip')
490 peer_checker.lookup(b'tip')
491 except Exception as e:
491 except Exception as e:
492 log.warning("URL is not a valid Mercurial repository: %s",
492 log.warning("URL is not a valid Mercurial repository: %s",
493 obfuscated_uri)
493 obfuscated_uri)
494 raise exceptions.URLError(e)(
494 raise exceptions.URLError(e)(
495 "url [%s] does not look like an hg repo org_exc: %s"
495 "url [%s] does not look like an hg repo org_exc: %s"
496 % (obfuscated_uri, e))
496 % (obfuscated_uri, e))
497
497
498 log.info("URL is a valid Mercurial repository: %s", obfuscated_uri)
498 log.info("URL is a valid Mercurial repository: %s", obfuscated_uri)
499 return True
499 return True
500
500
501 @reraise_safe_exceptions
501 @reraise_safe_exceptions
502 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
502 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
503 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
504
504
505 if file_filter:
505 if file_filter:
506 # unpack the file-filter
506 # unpack the file-filter
507 repo_path, node_path = file_filter
507 repo_path, node_path = file_filter
508 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
508 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
509 else:
509 else:
510 match_filter = file_filter
510 match_filter = file_filter
511 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
511 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
512
512
513 try:
513 try:
514 diff_iter = patch.diff(
514 diff_iter = patch.diff(
515 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
515 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
516 return BytesEnvelope(b"".join(diff_iter))
516 return BytesEnvelope(b"".join(diff_iter))
517 except RepoLookupError as e:
517 except RepoLookupError as e:
518 raise exceptions.LookupException(e)()
518 raise exceptions.LookupException(e)()
519
519
520 @reraise_safe_exceptions
520 @reraise_safe_exceptions
521 def node_history(self, wire, revision, path, limit):
521 def node_history(self, wire, revision, path, limit):
522 cache_on, context_uid, repo_id = self._cache_on(wire)
522 cache_on, context_uid, repo_id = self._cache_on(wire)
523 region = self._region(wire)
523 region = self._region(wire)
524
524
525 @region.conditional_cache_on_arguments(condition=cache_on)
525 @region.conditional_cache_on_arguments(condition=cache_on)
526 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
526 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
527 repo = self._factory.repo(wire)
527 repo = self._factory.repo(wire)
528
528
529 ctx = self._get_ctx(repo, revision)
529 ctx = self._get_ctx(repo, revision)
530 fctx = ctx.filectx(safe_bytes(path))
530 fctx = ctx.filectx(safe_bytes(path))
531
531
532 def history_iter():
532 def history_iter():
533 limit_rev = fctx.rev()
533 limit_rev = fctx.rev()
534 for obj in reversed(list(fctx.filelog())):
534 for obj in reversed(list(fctx.filelog())):
535 obj = fctx.filectx(obj)
535 obj = fctx.filectx(obj)
536 ctx = obj.changectx()
536 ctx = obj.changectx()
537 if ctx.hidden() or ctx.obsolete():
537 if ctx.hidden() or ctx.obsolete():
538 continue
538 continue
539
539
540 if limit_rev >= obj.rev():
540 if limit_rev >= obj.rev():
541 yield obj
541 yield obj
542
542
543 history = []
543 history = []
544 for cnt, obj in enumerate(history_iter()):
544 for cnt, obj in enumerate(history_iter()):
545 if limit and cnt >= limit:
545 if limit and cnt >= limit:
546 break
546 break
547 history.append(hex(obj.node()))
547 history.append(hex(obj.node()))
548
548
549 return [x for x in history]
549 return [x for x in history]
550 return _node_history(context_uid, repo_id, revision, path, limit)
550 return _node_history(context_uid, repo_id, revision, path, limit)
551
551
552 @reraise_safe_exceptions
552 @reraise_safe_exceptions
553 def node_history_untill(self, wire, revision, path, limit):
553 def node_history_untill(self, wire, revision, path, limit):
554 cache_on, context_uid, repo_id = self._cache_on(wire)
554 cache_on, context_uid, repo_id = self._cache_on(wire)
555 region = self._region(wire)
555 region = self._region(wire)
556
556
557 @region.conditional_cache_on_arguments(condition=cache_on)
557 @region.conditional_cache_on_arguments(condition=cache_on)
558 def _node_history_until(_context_uid, _repo_id):
558 def _node_history_until(_context_uid, _repo_id):
559 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
560 ctx = self._get_ctx(repo, revision)
560 ctx = self._get_ctx(repo, revision)
561 fctx = ctx.filectx(safe_bytes(path))
561 fctx = ctx.filectx(safe_bytes(path))
562
562
563 file_log = list(fctx.filelog())
563 file_log = list(fctx.filelog())
564 if limit:
564 if limit:
565 # Limit to the last n items
565 # Limit to the last n items
566 file_log = file_log[-limit:]
566 file_log = file_log[-limit:]
567
567
568 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
568 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
569 return _node_history_until(context_uid, repo_id, revision, path, limit)
569 return _node_history_until(context_uid, repo_id, revision, path, limit)
570
570
571 @reraise_safe_exceptions
571 @reraise_safe_exceptions
572 def bulk_file_request(self, wire, commit_id, path, pre_load):
572 def bulk_file_request(self, wire, commit_id, path, pre_load):
573 cache_on, context_uid, repo_id = self._cache_on(wire)
573 cache_on, context_uid, repo_id = self._cache_on(wire)
574 region = self._region(wire)
574 region = self._region(wire)
575
575
576 @region.conditional_cache_on_arguments(condition=cache_on)
576 @region.conditional_cache_on_arguments(condition=cache_on)
577 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
577 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
578 result = {}
578 result = {}
579 for attr in pre_load:
579 for attr in pre_load:
580 try:
580 try:
581 method = self._bulk_file_methods[attr]
581 method = self._bulk_file_methods[attr]
582 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
582 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
583 result[attr] = method(wire, _commit_id, _path)
583 result[attr] = method(wire, _commit_id, _path)
584 except KeyError as e:
584 except KeyError as e:
585 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
585 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
586 return result
586 return result
587
587
588 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
588 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
589
589
590 @reraise_safe_exceptions
590 @reraise_safe_exceptions
591 def fctx_annotate(self, wire, revision, path):
591 def fctx_annotate(self, wire, revision, path):
592 repo = self._factory.repo(wire)
592 repo = self._factory.repo(wire)
593 ctx = self._get_ctx(repo, revision)
593 ctx = self._get_ctx(repo, revision)
594 fctx = ctx.filectx(safe_bytes(path))
594 fctx = ctx.filectx(safe_bytes(path))
595
595
596 result = []
596 result = []
597 for i, annotate_obj in enumerate(fctx.annotate(), 1):
597 for i, annotate_obj in enumerate(fctx.annotate(), 1):
598 ln_no = i
598 ln_no = i
599 sha = hex(annotate_obj.fctx.node())
599 sha = hex(annotate_obj.fctx.node())
600 content = annotate_obj.text
600 content = annotate_obj.text
601 result.append((ln_no, sha, content))
601 result.append((ln_no, ascii_str(sha), content))
602 return result
602 return BinaryEnvelope(result)
603
603
604 @reraise_safe_exceptions
604 @reraise_safe_exceptions
605 def fctx_node_data(self, wire, revision, path):
605 def fctx_node_data(self, wire, revision, path):
606 repo = self._factory.repo(wire)
606 repo = self._factory.repo(wire)
607 ctx = self._get_ctx(repo, revision)
607 ctx = self._get_ctx(repo, revision)
608 fctx = ctx.filectx(safe_bytes(path))
608 fctx = ctx.filectx(safe_bytes(path))
609 return BytesEnvelope(fctx.data())
609 return BytesEnvelope(fctx.data())
610
610
611 @reraise_safe_exceptions
611 @reraise_safe_exceptions
612 def fctx_flags(self, wire, commit_id, path):
612 def fctx_flags(self, wire, commit_id, path):
613 cache_on, context_uid, repo_id = self._cache_on(wire)
613 cache_on, context_uid, repo_id = self._cache_on(wire)
614 region = self._region(wire)
614 region = self._region(wire)
615
615
616 @region.conditional_cache_on_arguments(condition=cache_on)
616 @region.conditional_cache_on_arguments(condition=cache_on)
617 def _fctx_flags(_repo_id, _commit_id, _path):
617 def _fctx_flags(_repo_id, _commit_id, _path):
618 repo = self._factory.repo(wire)
618 repo = self._factory.repo(wire)
619 ctx = self._get_ctx(repo, commit_id)
619 ctx = self._get_ctx(repo, commit_id)
620 fctx = ctx.filectx(safe_bytes(path))
620 fctx = ctx.filectx(safe_bytes(path))
621 return fctx.flags()
621 return fctx.flags()
622
622
623 return _fctx_flags(repo_id, commit_id, path)
623 return _fctx_flags(repo_id, commit_id, path)
624
624
625 @reraise_safe_exceptions
625 @reraise_safe_exceptions
626 def fctx_size(self, wire, commit_id, path):
626 def fctx_size(self, wire, commit_id, path):
627 cache_on, context_uid, repo_id = self._cache_on(wire)
627 cache_on, context_uid, repo_id = self._cache_on(wire)
628 region = self._region(wire)
628 region = self._region(wire)
629
629
630 @region.conditional_cache_on_arguments(condition=cache_on)
630 @region.conditional_cache_on_arguments(condition=cache_on)
631 def _fctx_size(_repo_id, _revision, _path):
631 def _fctx_size(_repo_id, _revision, _path):
632 repo = self._factory.repo(wire)
632 repo = self._factory.repo(wire)
633 ctx = self._get_ctx(repo, commit_id)
633 ctx = self._get_ctx(repo, commit_id)
634 fctx = ctx.filectx(safe_bytes(path))
634 fctx = ctx.filectx(safe_bytes(path))
635 return fctx.size()
635 return fctx.size()
636 return _fctx_size(repo_id, commit_id, path)
636 return _fctx_size(repo_id, commit_id, path)
637
637
638 @reraise_safe_exceptions
638 @reraise_safe_exceptions
639 def get_all_commit_ids(self, wire, name):
639 def get_all_commit_ids(self, wire, name):
640 cache_on, context_uid, repo_id = self._cache_on(wire)
640 cache_on, context_uid, repo_id = self._cache_on(wire)
641 region = self._region(wire)
641 region = self._region(wire)
642
642
643 @region.conditional_cache_on_arguments(condition=cache_on)
643 @region.conditional_cache_on_arguments(condition=cache_on)
644 def _get_all_commit_ids(_context_uid, _repo_id, _name):
644 def _get_all_commit_ids(_context_uid, _repo_id, _name):
645 repo = self._factory.repo(wire)
645 repo = self._factory.repo(wire)
646 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
646 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
647 return revs
647 return revs
648 return _get_all_commit_ids(context_uid, repo_id, name)
648 return _get_all_commit_ids(context_uid, repo_id, name)
649
649
650 @reraise_safe_exceptions
650 @reraise_safe_exceptions
651 def get_config_value(self, wire, section, name, untrusted=False):
651 def get_config_value(self, wire, section, name, untrusted=False):
652 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
653 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
653 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
654
654
655 @reraise_safe_exceptions
655 @reraise_safe_exceptions
656 def is_large_file(self, wire, commit_id, path):
656 def is_large_file(self, wire, commit_id, path):
657 cache_on, context_uid, repo_id = self._cache_on(wire)
657 cache_on, context_uid, repo_id = self._cache_on(wire)
658 region = self._region(wire)
658 region = self._region(wire)
659
659
660 @region.conditional_cache_on_arguments(condition=cache_on)
660 @region.conditional_cache_on_arguments(condition=cache_on)
661 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
661 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
662 return largefiles.lfutil.isstandin(safe_bytes(path))
662 return largefiles.lfutil.isstandin(safe_bytes(path))
663
663
664 return _is_large_file(context_uid, repo_id, commit_id, path)
664 return _is_large_file(context_uid, repo_id, commit_id, path)
665
665
666 @reraise_safe_exceptions
666 @reraise_safe_exceptions
667 def is_binary(self, wire, revision, path):
667 def is_binary(self, wire, revision, path):
668 cache_on, context_uid, repo_id = self._cache_on(wire)
668 cache_on, context_uid, repo_id = self._cache_on(wire)
669 region = self._region(wire)
669 region = self._region(wire)
670
670
671 @region.conditional_cache_on_arguments(condition=cache_on)
671 @region.conditional_cache_on_arguments(condition=cache_on)
672 def _is_binary(_repo_id, _sha, _path):
672 def _is_binary(_repo_id, _sha, _path):
673 repo = self._factory.repo(wire)
673 repo = self._factory.repo(wire)
674 ctx = self._get_ctx(repo, revision)
674 ctx = self._get_ctx(repo, revision)
675 fctx = ctx.filectx(safe_bytes(path))
675 fctx = ctx.filectx(safe_bytes(path))
676 return fctx.isbinary()
676 return fctx.isbinary()
677
677
678 return _is_binary(repo_id, revision, path)
678 return _is_binary(repo_id, revision, path)
679
679
680 @reraise_safe_exceptions
680 @reraise_safe_exceptions
681 def md5_hash(self, wire, revision, path):
681 def md5_hash(self, wire, revision, path):
682 cache_on, context_uid, repo_id = self._cache_on(wire)
682 cache_on, context_uid, repo_id = self._cache_on(wire)
683 region = self._region(wire)
683 region = self._region(wire)
684
684
685 @region.conditional_cache_on_arguments(condition=cache_on)
685 @region.conditional_cache_on_arguments(condition=cache_on)
686 def _md5_hash(_repo_id, _sha, _path):
686 def _md5_hash(_repo_id, _sha, _path):
687 repo = self._factory.repo(wire)
687 repo = self._factory.repo(wire)
688 ctx = self._get_ctx(repo, revision)
688 ctx = self._get_ctx(repo, revision)
689 fctx = ctx.filectx(safe_bytes(path))
689 fctx = ctx.filectx(safe_bytes(path))
690 return hashlib.md5(fctx.data()).hexdigest()
690 return hashlib.md5(fctx.data()).hexdigest()
691
691
692 return _md5_hash(repo_id, revision, path)
692 return _md5_hash(repo_id, revision, path)
693
693
694 @reraise_safe_exceptions
694 @reraise_safe_exceptions
695 def in_largefiles_store(self, wire, sha):
695 def in_largefiles_store(self, wire, sha):
696 repo = self._factory.repo(wire)
696 repo = self._factory.repo(wire)
697 return largefiles.lfutil.instore(repo, sha)
697 return largefiles.lfutil.instore(repo, sha)
698
698
699 @reraise_safe_exceptions
699 @reraise_safe_exceptions
700 def in_user_cache(self, wire, sha):
700 def in_user_cache(self, wire, sha):
701 repo = self._factory.repo(wire)
701 repo = self._factory.repo(wire)
702 return largefiles.lfutil.inusercache(repo.ui, sha)
702 return largefiles.lfutil.inusercache(repo.ui, sha)
703
703
704 @reraise_safe_exceptions
704 @reraise_safe_exceptions
705 def store_path(self, wire, sha):
705 def store_path(self, wire, sha):
706 repo = self._factory.repo(wire)
706 repo = self._factory.repo(wire)
707 return largefiles.lfutil.storepath(repo, sha)
707 return largefiles.lfutil.storepath(repo, sha)
708
708
709 @reraise_safe_exceptions
709 @reraise_safe_exceptions
710 def link(self, wire, sha, path):
710 def link(self, wire, sha, path):
711 repo = self._factory.repo(wire)
711 repo = self._factory.repo(wire)
712 largefiles.lfutil.link(
712 largefiles.lfutil.link(
713 largefiles.lfutil.usercachepath(repo.ui, sha), path)
713 largefiles.lfutil.usercachepath(repo.ui, sha), path)
714
714
715 @reraise_safe_exceptions
715 @reraise_safe_exceptions
716 def localrepository(self, wire, create=False):
716 def localrepository(self, wire, create=False):
717 self._factory.repo(wire, create=create)
717 self._factory.repo(wire, create=create)
718
718
719 @reraise_safe_exceptions
719 @reraise_safe_exceptions
720 def lookup(self, wire, revision, both):
720 def lookup(self, wire, revision, both):
721 cache_on, context_uid, repo_id = self._cache_on(wire)
721 cache_on, context_uid, repo_id = self._cache_on(wire)
722 region = self._region(wire)
722 region = self._region(wire)
723
723
724 @region.conditional_cache_on_arguments(condition=cache_on)
724 @region.conditional_cache_on_arguments(condition=cache_on)
725 def _lookup(_context_uid, _repo_id, _revision, _both):
725 def _lookup(_context_uid, _repo_id, _revision, _both):
726 repo = self._factory.repo(wire)
726 repo = self._factory.repo(wire)
727 rev = _revision
727 rev = _revision
728 if isinstance(rev, int):
728 if isinstance(rev, int):
729 # NOTE(marcink):
729 # NOTE(marcink):
730 # since Mercurial doesn't support negative indexes properly
730 # since Mercurial doesn't support negative indexes properly
731 # we need to shift accordingly by one to get proper index, e.g
731 # we need to shift accordingly by one to get proper index, e.g
732 # repo[-1] => repo[-2]
732 # repo[-1] => repo[-2]
733 # repo[0] => repo[-1]
733 # repo[0] => repo[-1]
734 if rev <= 0:
734 if rev <= 0:
735 rev = rev + -1
735 rev = rev + -1
736 try:
736 try:
737 ctx = self._get_ctx(repo, rev)
737 ctx = self._get_ctx(repo, rev)
738 except (TypeError, RepoLookupError, binascii.Error) as e:
738 except (TypeError, RepoLookupError, binascii.Error) as e:
739 e._org_exc_tb = traceback.format_exc()
739 e._org_exc_tb = traceback.format_exc()
740 raise exceptions.LookupException(e)(rev)
740 raise exceptions.LookupException(e)(rev)
741 except LookupError as e:
741 except LookupError as e:
742 e._org_exc_tb = traceback.format_exc()
742 e._org_exc_tb = traceback.format_exc()
743 raise exceptions.LookupException(e)(e.name)
743 raise exceptions.LookupException(e)(e.name)
744
744
745 if not both:
745 if not both:
746 return ctx.hex()
746 return ctx.hex()
747
747
748 ctx = repo[ctx.hex()]
748 ctx = repo[ctx.hex()]
749 return ctx.hex(), ctx.rev()
749 return ctx.hex(), ctx.rev()
750
750
751 return _lookup(context_uid, repo_id, revision, both)
751 return _lookup(context_uid, repo_id, revision, both)
752
752
753 @reraise_safe_exceptions
753 @reraise_safe_exceptions
754 def sync_push(self, wire, url):
754 def sync_push(self, wire, url):
755 if not self.check_url(url, wire['config']):
755 if not self.check_url(url, wire['config']):
756 return
756 return
757
757
758 repo = self._factory.repo(wire)
758 repo = self._factory.repo(wire)
759
759
760 # Disable any prompts for this repo
760 # Disable any prompts for this repo
761 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
761 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
762
762
763 bookmarks = list(dict(repo._bookmarks).keys())
763 bookmarks = list(dict(repo._bookmarks).keys())
764 remote = peer(repo, {}, safe_bytes(url))
764 remote = peer(repo, {}, safe_bytes(url))
765 # Disable any prompts for this remote
765 # Disable any prompts for this remote
766 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
766 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
767
767
768 return exchange.push(
768 return exchange.push(
769 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
769 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
770
770
771 @reraise_safe_exceptions
771 @reraise_safe_exceptions
772 def revision(self, wire, rev):
772 def revision(self, wire, rev):
773 repo = self._factory.repo(wire)
773 repo = self._factory.repo(wire)
774 ctx = self._get_ctx(repo, rev)
774 ctx = self._get_ctx(repo, rev)
775 return ctx.rev()
775 return ctx.rev()
776
776
777 @reraise_safe_exceptions
777 @reraise_safe_exceptions
778 def rev_range(self, wire, commit_filter):
778 def rev_range(self, wire, commit_filter):
779 cache_on, context_uid, repo_id = self._cache_on(wire)
779 cache_on, context_uid, repo_id = self._cache_on(wire)
780 region = self._region(wire)
780 region = self._region(wire)
781
781
782 @region.conditional_cache_on_arguments(condition=cache_on)
782 @region.conditional_cache_on_arguments(condition=cache_on)
783 def _rev_range(_context_uid, _repo_id, _filter):
783 def _rev_range(_context_uid, _repo_id, _filter):
784 repo = self._factory.repo(wire)
784 repo = self._factory.repo(wire)
785 revisions = [
785 revisions = [
786 ascii_str(repo[rev].hex())
786 ascii_str(repo[rev].hex())
787 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
787 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
788 ]
788 ]
789 return revisions
789 return revisions
790
790
791 return _rev_range(context_uid, repo_id, sorted(commit_filter))
791 return _rev_range(context_uid, repo_id, sorted(commit_filter))
792
792
793 @reraise_safe_exceptions
793 @reraise_safe_exceptions
794 def rev_range_hash(self, wire, node):
794 def rev_range_hash(self, wire, node):
795 repo = self._factory.repo(wire)
795 repo = self._factory.repo(wire)
796
796
797 def get_revs(repo, rev_opt):
797 def get_revs(repo, rev_opt):
798 if rev_opt:
798 if rev_opt:
799 revs = revrange(repo, rev_opt)
799 revs = revrange(repo, rev_opt)
800 if len(revs) == 0:
800 if len(revs) == 0:
801 return (nullrev, nullrev)
801 return (nullrev, nullrev)
802 return max(revs), min(revs)
802 return max(revs), min(revs)
803 else:
803 else:
804 return len(repo) - 1, 0
804 return len(repo) - 1, 0
805
805
806 stop, start = get_revs(repo, [node + ':'])
806 stop, start = get_revs(repo, [node + ':'])
807 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
807 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
808 return revs
808 return revs
809
809
810 @reraise_safe_exceptions
810 @reraise_safe_exceptions
811 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
811 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
812 org_path = safe_bytes(wire["path"])
812 org_path = safe_bytes(wire["path"])
813 other_path = safe_bytes(kwargs.pop('other_path', ''))
813 other_path = safe_bytes(kwargs.pop('other_path', ''))
814
814
815 # case when we want to compare two independent repositories
815 # case when we want to compare two independent repositories
816 if other_path and other_path != wire["path"]:
816 if other_path and other_path != wire["path"]:
817 baseui = self._factory._create_config(wire["config"])
817 baseui = self._factory._create_config(wire["config"])
818 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
818 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
819 else:
819 else:
820 repo = self._factory.repo(wire)
820 repo = self._factory.repo(wire)
821 return list(repo.revs(rev_spec, *args))
821 return list(repo.revs(rev_spec, *args))
822
822
823 @reraise_safe_exceptions
823 @reraise_safe_exceptions
824 def verify(self, wire,):
824 def verify(self, wire,):
825 repo = self._factory.repo(wire)
825 repo = self._factory.repo(wire)
826 baseui = self._factory._create_config(wire['config'])
826 baseui = self._factory._create_config(wire['config'])
827
827
828 baseui, output = patch_ui_message_output(baseui)
828 baseui, output = patch_ui_message_output(baseui)
829
829
830 repo.ui = baseui
830 repo.ui = baseui
831 verify.verify(repo)
831 verify.verify(repo)
832 return output.getvalue()
832 return output.getvalue()
833
833
834 @reraise_safe_exceptions
834 @reraise_safe_exceptions
835 def hg_update_cache(self, wire,):
835 def hg_update_cache(self, wire,):
836 repo = self._factory.repo(wire)
836 repo = self._factory.repo(wire)
837 baseui = self._factory._create_config(wire['config'])
837 baseui = self._factory._create_config(wire['config'])
838 baseui, output = patch_ui_message_output(baseui)
838 baseui, output = patch_ui_message_output(baseui)
839
839
840 repo.ui = baseui
840 repo.ui = baseui
841 with repo.wlock(), repo.lock():
841 with repo.wlock(), repo.lock():
842 repo.updatecaches(full=True)
842 repo.updatecaches(full=True)
843
843
844 return output.getvalue()
844 return output.getvalue()
845
845
846 @reraise_safe_exceptions
846 @reraise_safe_exceptions
847 def hg_rebuild_fn_cache(self, wire,):
847 def hg_rebuild_fn_cache(self, wire,):
848 repo = self._factory.repo(wire)
848 repo = self._factory.repo(wire)
849 baseui = self._factory._create_config(wire['config'])
849 baseui = self._factory._create_config(wire['config'])
850 baseui, output = patch_ui_message_output(baseui)
850 baseui, output = patch_ui_message_output(baseui)
851
851
852 repo.ui = baseui
852 repo.ui = baseui
853
853
854 repair.rebuildfncache(baseui, repo)
854 repair.rebuildfncache(baseui, repo)
855
855
856 return output.getvalue()
856 return output.getvalue()
857
857
858 @reraise_safe_exceptions
858 @reraise_safe_exceptions
859 def tags(self, wire):
859 def tags(self, wire):
860 cache_on, context_uid, repo_id = self._cache_on(wire)
860 cache_on, context_uid, repo_id = self._cache_on(wire)
861 region = self._region(wire)
861 region = self._region(wire)
862
862
863 @region.conditional_cache_on_arguments(condition=cache_on)
863 @region.conditional_cache_on_arguments(condition=cache_on)
864 def _tags(_context_uid, _repo_id):
864 def _tags(_context_uid, _repo_id):
865 repo = self._factory.repo(wire)
865 repo = self._factory.repo(wire)
866 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
866 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
867
867
868 return _tags(context_uid, repo_id)
868 return _tags(context_uid, repo_id)
869
869
870 @reraise_safe_exceptions
870 @reraise_safe_exceptions
871 def update(self, wire, node='', clean=False):
871 def update(self, wire, node='', clean=False):
872 repo = self._factory.repo(wire)
872 repo = self._factory.repo(wire)
873 baseui = self._factory._create_config(wire['config'])
873 baseui = self._factory._create_config(wire['config'])
874 node = safe_bytes(node)
874 node = safe_bytes(node)
875
875
876 commands.update(baseui, repo, node=node, clean=clean)
876 commands.update(baseui, repo, node=node, clean=clean)
877
877
878 @reraise_safe_exceptions
878 @reraise_safe_exceptions
879 def identify(self, wire):
879 def identify(self, wire):
880 repo = self._factory.repo(wire)
880 repo = self._factory.repo(wire)
881 baseui = self._factory._create_config(wire['config'])
881 baseui = self._factory._create_config(wire['config'])
882 output = io.BytesIO()
882 output = io.BytesIO()
883 baseui.write = output.write
883 baseui.write = output.write
884 # This is required to get a full node id
884 # This is required to get a full node id
885 baseui.debugflag = True
885 baseui.debugflag = True
886 commands.identify(baseui, repo, id=True)
886 commands.identify(baseui, repo, id=True)
887
887
888 return output.getvalue()
888 return output.getvalue()
889
889
890 @reraise_safe_exceptions
890 @reraise_safe_exceptions
891 def heads(self, wire, branch=None):
891 def heads(self, wire, branch=None):
892 repo = self._factory.repo(wire)
892 repo = self._factory.repo(wire)
893 baseui = self._factory._create_config(wire['config'])
893 baseui = self._factory._create_config(wire['config'])
894 output = io.BytesIO()
894 output = io.BytesIO()
895
895
896 def write(data, **unused_kwargs):
896 def write(data, **unused_kwargs):
897 output.write(data)
897 output.write(data)
898
898
899 baseui.write = write
899 baseui.write = write
900 if branch:
900 if branch:
901 args = [safe_bytes(branch)]
901 args = [safe_bytes(branch)]
902 else:
902 else:
903 args = []
903 args = []
904 commands.heads(baseui, repo, template=b'{node} ', *args)
904 commands.heads(baseui, repo, template=b'{node} ', *args)
905
905
906 return output.getvalue()
906 return output.getvalue()
907
907
908 @reraise_safe_exceptions
908 @reraise_safe_exceptions
909 def ancestor(self, wire, revision1, revision2):
909 def ancestor(self, wire, revision1, revision2):
910 repo = self._factory.repo(wire)
910 repo = self._factory.repo(wire)
911 changelog = repo.changelog
911 changelog = repo.changelog
912 lookup = repo.lookup
912 lookup = repo.lookup
913 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
913 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
914 return hex(a)
914 return hex(a)
915
915
916 @reraise_safe_exceptions
916 @reraise_safe_exceptions
917 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
917 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
918 baseui = self._factory._create_config(wire["config"], hooks=hooks)
918 baseui = self._factory._create_config(wire["config"], hooks=hooks)
919 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
919 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
920
920
921 @reraise_safe_exceptions
921 @reraise_safe_exceptions
922 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
922 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
923
923
924 repo = self._factory.repo(wire)
924 repo = self._factory.repo(wire)
925 baseui = self._factory._create_config(wire['config'])
925 baseui = self._factory._create_config(wire['config'])
926 publishing = baseui.configbool(b'phases', b'publish')
926 publishing = baseui.configbool(b'phases', b'publish')
927
927
928 def _filectxfn(_repo, ctx, path: bytes):
928 def _filectxfn(_repo, ctx, path: bytes):
929 """
929 """
930 Marks given path as added/changed/removed in a given _repo. This is
930 Marks given path as added/changed/removed in a given _repo. This is
931 for internal mercurial commit function.
931 for internal mercurial commit function.
932 """
932 """
933
933
934 # check if this path is removed
934 # check if this path is removed
935 if safe_str(path) in removed:
935 if safe_str(path) in removed:
936 # returning None is a way to mark node for removal
936 # returning None is a way to mark node for removal
937 return None
937 return None
938
938
939 # check if this path is added
939 # check if this path is added
940 for node in updated:
940 for node in updated:
941 if safe_bytes(node['path']) == path:
941 if safe_bytes(node['path']) == path:
942 return memfilectx(
942 return memfilectx(
943 _repo,
943 _repo,
944 changectx=ctx,
944 changectx=ctx,
945 path=safe_bytes(node['path']),
945 path=safe_bytes(node['path']),
946 data=safe_bytes(node['content']),
946 data=safe_bytes(node['content']),
947 islink=False,
947 islink=False,
948 isexec=bool(node['mode'] & stat.S_IXUSR),
948 isexec=bool(node['mode'] & stat.S_IXUSR),
949 copysource=False)
949 copysource=False)
950 abort_exc = exceptions.AbortException()
950 abort_exc = exceptions.AbortException()
951 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
951 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
952
952
953 if publishing:
953 if publishing:
954 new_commit_phase = b'public'
954 new_commit_phase = b'public'
955 else:
955 else:
956 new_commit_phase = b'draft'
956 new_commit_phase = b'draft'
957 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
957 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
958 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
958 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
959 commit_ctx = memctx(
959 commit_ctx = memctx(
960 repo=repo,
960 repo=repo,
961 parents=parents,
961 parents=parents,
962 text=safe_bytes(message),
962 text=safe_bytes(message),
963 files=[safe_bytes(x) for x in files],
963 files=[safe_bytes(x) for x in files],
964 filectxfn=_filectxfn,
964 filectxfn=_filectxfn,
965 user=safe_bytes(user),
965 user=safe_bytes(user),
966 date=(commit_time, commit_timezone),
966 date=(commit_time, commit_timezone),
967 extra=kwargs)
967 extra=kwargs)
968
968
969 n = repo.commitctx(commit_ctx)
969 n = repo.commitctx(commit_ctx)
970 new_id = hex(n)
970 new_id = hex(n)
971
971
972 return new_id
972 return new_id
973
973
974 @reraise_safe_exceptions
974 @reraise_safe_exceptions
975 def pull(self, wire, url, commit_ids=None):
975 def pull(self, wire, url, commit_ids=None):
976 repo = self._factory.repo(wire)
976 repo = self._factory.repo(wire)
977 # Disable any prompts for this repo
977 # Disable any prompts for this repo
978 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
978 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
979
979
980 remote = peer(repo, {}, safe_bytes(url))
980 remote = peer(repo, {}, safe_bytes(url))
981 # Disable any prompts for this remote
981 # Disable any prompts for this remote
982 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
982 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
983
983
984 if commit_ids:
984 if commit_ids:
985 commit_ids = [bin(commit_id) for commit_id in commit_ids]
985 commit_ids = [bin(commit_id) for commit_id in commit_ids]
986
986
987 return exchange.pull(
987 return exchange.pull(
988 repo, remote, heads=commit_ids, force=None).cgresult
988 repo, remote, heads=commit_ids, force=None).cgresult
989
989
990 @reraise_safe_exceptions
990 @reraise_safe_exceptions
991 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
991 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
992 repo = self._factory.repo(wire)
992 repo = self._factory.repo(wire)
993 baseui = self._factory._create_config(wire['config'], hooks=hooks)
993 baseui = self._factory._create_config(wire['config'], hooks=hooks)
994
994
995 source = safe_bytes(source)
995 source = safe_bytes(source)
996
996
997 # Mercurial internally has a lot of logic that checks ONLY if
997 # Mercurial internally has a lot of logic that checks ONLY if
998 # option is defined, we just pass those if they are defined then
998 # option is defined, we just pass those if they are defined then
999 opts = {}
999 opts = {}
1000
1000
1001 if bookmark:
1001 if bookmark:
1002 opts['bookmark'] = [safe_bytes(x) for x in bookmark] \
1002 opts['bookmark'] = [safe_bytes(x) for x in bookmark] \
1003 if isinstance(bookmark, list) else safe_bytes(bookmark)
1003 if isinstance(bookmark, list) else safe_bytes(bookmark)
1004
1004
1005 if branch:
1005 if branch:
1006 opts['branch'] = [safe_bytes(x) for x in branch] \
1006 opts['branch'] = [safe_bytes(x) for x in branch] \
1007 if isinstance(branch, list) else safe_bytes(branch)
1007 if isinstance(branch, list) else safe_bytes(branch)
1008
1008
1009 if revision:
1009 if revision:
1010 opts['rev'] = [safe_bytes(x) for x in revision] \
1010 opts['rev'] = [safe_bytes(x) for x in revision] \
1011 if isinstance(revision, list) else safe_bytes(revision)
1011 if isinstance(revision, list) else safe_bytes(revision)
1012
1012
1013 commands.pull(baseui, repo, source, **opts)
1013 commands.pull(baseui, repo, source, **opts)
1014
1014
1015 @reraise_safe_exceptions
1015 @reraise_safe_exceptions
1016 def push(self, wire, revisions, dest_path, hooks: bool = True, push_branches: bool = False):
1016 def push(self, wire, revisions, dest_path, hooks: bool = True, push_branches: bool = False):
1017 repo = self._factory.repo(wire)
1017 repo = self._factory.repo(wire)
1018 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1018 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1019
1019
1020 revisions = [safe_bytes(x) for x in revisions] \
1020 revisions = [safe_bytes(x) for x in revisions] \
1021 if isinstance(revisions, list) else safe_bytes(revisions)
1021 if isinstance(revisions, list) else safe_bytes(revisions)
1022
1022
1023 commands.push(baseui, repo, safe_bytes(dest_path),
1023 commands.push(baseui, repo, safe_bytes(dest_path),
1024 rev=revisions,
1024 rev=revisions,
1025 new_branch=push_branches)
1025 new_branch=push_branches)
1026
1026
1027 @reraise_safe_exceptions
1027 @reraise_safe_exceptions
1028 def strip(self, wire, revision, update, backup):
1028 def strip(self, wire, revision, update, backup):
1029 repo = self._factory.repo(wire)
1029 repo = self._factory.repo(wire)
1030 ctx = self._get_ctx(repo, revision)
1030 ctx = self._get_ctx(repo, revision)
1031 hgext_strip.strip(
1031 hgext_strip.strip(
1032 repo.baseui, repo, ctx.node(), update=update, backup=backup)
1032 repo.baseui, repo, ctx.node(), update=update, backup=backup)
1033
1033
1034 @reraise_safe_exceptions
1034 @reraise_safe_exceptions
1035 def get_unresolved_files(self, wire):
1035 def get_unresolved_files(self, wire):
1036 repo = self._factory.repo(wire)
1036 repo = self._factory.repo(wire)
1037
1037
1038 log.debug('Calculating unresolved files for repo: %s', repo)
1038 log.debug('Calculating unresolved files for repo: %s', repo)
1039 output = io.BytesIO()
1039 output = io.BytesIO()
1040
1040
1041 def write(data, **unused_kwargs):
1041 def write(data, **unused_kwargs):
1042 output.write(data)
1042 output.write(data)
1043
1043
1044 baseui = self._factory._create_config(wire['config'])
1044 baseui = self._factory._create_config(wire['config'])
1045 baseui.write = write
1045 baseui.write = write
1046
1046
1047 commands.resolve(baseui, repo, list=True)
1047 commands.resolve(baseui, repo, list=True)
1048 unresolved = output.getvalue().splitlines(0)
1048 unresolved = output.getvalue().splitlines(0)
1049 return unresolved
1049 return unresolved
1050
1050
1051 @reraise_safe_exceptions
1051 @reraise_safe_exceptions
1052 def merge(self, wire, revision):
1052 def merge(self, wire, revision):
1053 repo = self._factory.repo(wire)
1053 repo = self._factory.repo(wire)
1054 baseui = self._factory._create_config(wire['config'])
1054 baseui = self._factory._create_config(wire['config'])
1055 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1055 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1056
1056
1057 # In case of sub repositories are used mercurial prompts the user in
1057 # In case of sub repositories are used mercurial prompts the user in
1058 # case of merge conflicts or different sub repository sources. By
1058 # case of merge conflicts or different sub repository sources. By
1059 # setting the interactive flag to `False` mercurial doesn't prompt the
1059 # setting the interactive flag to `False` mercurial doesn't prompt the
1060 # used but instead uses a default value.
1060 # used but instead uses a default value.
1061 repo.ui.setconfig(b'ui', b'interactive', False)
1061 repo.ui.setconfig(b'ui', b'interactive', False)
1062 commands.merge(baseui, repo, rev=safe_bytes(revision))
1062 commands.merge(baseui, repo, rev=safe_bytes(revision))
1063
1063
1064 @reraise_safe_exceptions
1064 @reraise_safe_exceptions
1065 def merge_state(self, wire):
1065 def merge_state(self, wire):
1066 repo = self._factory.repo(wire)
1066 repo = self._factory.repo(wire)
1067 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1067 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1068
1068
1069 # In case of sub repositories are used mercurial prompts the user in
1069 # In case of sub repositories are used mercurial prompts the user in
1070 # case of merge conflicts or different sub repository sources. By
1070 # case of merge conflicts or different sub repository sources. By
1071 # setting the interactive flag to `False` mercurial doesn't prompt the
1071 # setting the interactive flag to `False` mercurial doesn't prompt the
1072 # used but instead uses a default value.
1072 # used but instead uses a default value.
1073 repo.ui.setconfig(b'ui', b'interactive', False)
1073 repo.ui.setconfig(b'ui', b'interactive', False)
1074 ms = hg_merge.mergestate(repo)
1074 ms = hg_merge.mergestate(repo)
1075 return [x for x in ms.unresolved()]
1075 return [x for x in ms.unresolved()]
1076
1076
1077 @reraise_safe_exceptions
1077 @reraise_safe_exceptions
1078 def commit(self, wire, message, username, close_branch=False):
1078 def commit(self, wire, message, username, close_branch=False):
1079 repo = self._factory.repo(wire)
1079 repo = self._factory.repo(wire)
1080 baseui = self._factory._create_config(wire['config'])
1080 baseui = self._factory._create_config(wire['config'])
1081 repo.ui.setconfig(b'ui', b'username', safe_bytes(username))
1081 repo.ui.setconfig(b'ui', b'username', safe_bytes(username))
1082 commands.commit(baseui, repo, message=safe_bytes(message), close_branch=close_branch)
1082 commands.commit(baseui, repo, message=safe_bytes(message), close_branch=close_branch)
1083
1083
1084 @reraise_safe_exceptions
1084 @reraise_safe_exceptions
1085 def rebase(self, wire, source='', dest='', abort=False):
1085 def rebase(self, wire, source='', dest='', abort=False):
1086 repo = self._factory.repo(wire)
1086 repo = self._factory.repo(wire)
1087 baseui = self._factory._create_config(wire['config'])
1087 baseui = self._factory._create_config(wire['config'])
1088 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1088 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1089 # In case of sub repositories are used mercurial prompts the user in
1089 # In case of sub repositories are used mercurial prompts the user in
1090 # case of merge conflicts or different sub repository sources. By
1090 # case of merge conflicts or different sub repository sources. By
1091 # setting the interactive flag to `False` mercurial doesn't prompt the
1091 # setting the interactive flag to `False` mercurial doesn't prompt the
1092 # used but instead uses a default value.
1092 # used but instead uses a default value.
1093 repo.ui.setconfig(b'ui', b'interactive', False)
1093 repo.ui.setconfig(b'ui', b'interactive', False)
1094
1094
1095 rebase.rebase(baseui, repo, base=safe_bytes(source or ''), dest=safe_bytes(dest or ''),
1095 rebase.rebase(baseui, repo, base=safe_bytes(source or ''), dest=safe_bytes(dest or ''),
1096 abort=abort, keep=not abort)
1096 abort=abort, keep=not abort)
1097
1097
1098 @reraise_safe_exceptions
1098 @reraise_safe_exceptions
1099 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1099 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1100 repo = self._factory.repo(wire)
1100 repo = self._factory.repo(wire)
1101 ctx = self._get_ctx(repo, revision)
1101 ctx = self._get_ctx(repo, revision)
1102 node = ctx.node()
1102 node = ctx.node()
1103
1103
1104 date = (tag_time, tag_timezone)
1104 date = (tag_time, tag_timezone)
1105 try:
1105 try:
1106 hg_tag.tag(repo, safe_bytes(name), node, safe_bytes(message), local, safe_bytes(user), date)
1106 hg_tag.tag(repo, safe_bytes(name), node, safe_bytes(message), local, safe_bytes(user), date)
1107 except Abort as e:
1107 except Abort as e:
1108 log.exception("Tag operation aborted")
1108 log.exception("Tag operation aborted")
1109 # Exception can contain unicode which we convert
1109 # Exception can contain unicode which we convert
1110 raise exceptions.AbortException(e)(repr(e))
1110 raise exceptions.AbortException(e)(repr(e))
1111
1111
1112 @reraise_safe_exceptions
1112 @reraise_safe_exceptions
1113 def bookmark(self, wire, bookmark, revision=''):
1113 def bookmark(self, wire, bookmark, revision=''):
1114 repo = self._factory.repo(wire)
1114 repo = self._factory.repo(wire)
1115 baseui = self._factory._create_config(wire['config'])
1115 baseui = self._factory._create_config(wire['config'])
1116 revision = revision or ''
1116 revision = revision or ''
1117 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1117 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1118
1118
1119 @reraise_safe_exceptions
1119 @reraise_safe_exceptions
1120 def install_hooks(self, wire, force=False):
1120 def install_hooks(self, wire, force=False):
1121 # we don't need any special hooks for Mercurial
1121 # we don't need any special hooks for Mercurial
1122 pass
1122 pass
1123
1123
1124 @reraise_safe_exceptions
1124 @reraise_safe_exceptions
1125 def get_hooks_info(self, wire):
1125 def get_hooks_info(self, wire):
1126 return {
1126 return {
1127 'pre_version': vcsserver.__version__,
1127 'pre_version': vcsserver.__version__,
1128 'post_version': vcsserver.__version__,
1128 'post_version': vcsserver.__version__,
1129 }
1129 }
1130
1130
1131 @reraise_safe_exceptions
1131 @reraise_safe_exceptions
1132 def set_head_ref(self, wire, head_name):
1132 def set_head_ref(self, wire, head_name):
1133 pass
1133 pass
1134
1134
1135 @reraise_safe_exceptions
1135 @reraise_safe_exceptions
1136 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1136 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1137 archive_dir_name, commit_id, cache_config):
1137 archive_dir_name, commit_id, cache_config):
1138
1138
1139 def file_walker(_commit_id, path):
1139 def file_walker(_commit_id, path):
1140 repo = self._factory.repo(wire)
1140 repo = self._factory.repo(wire)
1141 ctx = repo[_commit_id]
1141 ctx = repo[_commit_id]
1142 is_root = path in ['', '/']
1142 is_root = path in ['', '/']
1143 if is_root:
1143 if is_root:
1144 matcher = alwaysmatcher(badfn=None)
1144 matcher = alwaysmatcher(badfn=None)
1145 else:
1145 else:
1146 matcher = patternmatcher('', [(b'glob', safe_bytes(path)+b'/**', b'')], badfn=None)
1146 matcher = patternmatcher('', [(b'glob', safe_bytes(path)+b'/**', b'')], badfn=None)
1147 file_iter = ctx.manifest().walk(matcher)
1147 file_iter = ctx.manifest().walk(matcher)
1148
1148
1149 for fn in file_iter:
1149 for fn in file_iter:
1150 file_path = fn
1150 file_path = fn
1151 flags = ctx.flags(fn)
1151 flags = ctx.flags(fn)
1152 mode = b'x' in flags and 0o755 or 0o644
1152 mode = b'x' in flags and 0o755 or 0o644
1153 is_link = b'l' in flags
1153 is_link = b'l' in flags
1154
1154
1155 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1155 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1156
1156
1157 return store_archive_in_cache(
1157 return store_archive_in_cache(
1158 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1158 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1159
1159
@@ -1,935 +1,935 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31
31
32 import svn.client # noqa
32 import svn.client # noqa
33 import svn.core # noqa
33 import svn.core # noqa
34 import svn.delta # noqa
34 import svn.delta # noqa
35 import svn.diff # noqa
35 import svn.diff # noqa
36 import svn.fs # noqa
36 import svn.fs # noqa
37 import svn.repos # noqa
37 import svn.repos # noqa
38
38
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
40 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
40 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
41 from vcsserver.exceptions import NoContentException
41 from vcsserver.exceptions import NoContentException
42 from vcsserver.str_utils import safe_str, safe_bytes
42 from vcsserver.str_utils import safe_str, safe_bytes
43 from vcsserver.type_utils import assert_bytes
43 from vcsserver.type_utils import assert_bytes
44 from vcsserver.vcs_base import RemoteBase
44 from vcsserver.vcs_base import RemoteBase
45 from vcsserver.lib.svnremoterepo import svnremoterepo
45 from vcsserver.lib.svnremoterepo import svnremoterepo
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 svn_compatible_versions_map = {
49 svn_compatible_versions_map = {
50 'pre-1.4-compatible': '1.3',
50 'pre-1.4-compatible': '1.3',
51 'pre-1.5-compatible': '1.4',
51 'pre-1.5-compatible': '1.4',
52 'pre-1.6-compatible': '1.5',
52 'pre-1.6-compatible': '1.5',
53 'pre-1.8-compatible': '1.7',
53 'pre-1.8-compatible': '1.7',
54 'pre-1.9-compatible': '1.8',
54 'pre-1.9-compatible': '1.8',
55 }
55 }
56
56
57 current_compatible_version = '1.14'
57 current_compatible_version = '1.14'
58
58
59
59
60 def reraise_safe_exceptions(func):
60 def reraise_safe_exceptions(func):
61 """Decorator for converting svn exceptions to something neutral."""
61 """Decorator for converting svn exceptions to something neutral."""
62 def wrapper(*args, **kwargs):
62 def wrapper(*args, **kwargs):
63 try:
63 try:
64 return func(*args, **kwargs)
64 return func(*args, **kwargs)
65 except Exception as e:
65 except Exception as e:
66 if not hasattr(e, '_vcs_kind'):
66 if not hasattr(e, '_vcs_kind'):
67 log.exception("Unhandled exception in svn remote call")
67 log.exception("Unhandled exception in svn remote call")
68 raise_from_original(exceptions.UnhandledException(e), e)
68 raise_from_original(exceptions.UnhandledException(e), e)
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class SubversionFactory(RepoFactory):
73 class SubversionFactory(RepoFactory):
74 repo_type = 'svn'
74 repo_type = 'svn'
75
75
76 def _create_repo(self, wire, create, compatible_version):
76 def _create_repo(self, wire, create, compatible_version):
77 path = svn.core.svn_path_canonicalize(wire['path'])
77 path = svn.core.svn_path_canonicalize(wire['path'])
78 if create:
78 if create:
79 fs_config = {'compatible-version': current_compatible_version}
79 fs_config = {'compatible-version': current_compatible_version}
80 if compatible_version:
80 if compatible_version:
81
81
82 compatible_version_string = \
82 compatible_version_string = \
83 svn_compatible_versions_map.get(compatible_version) \
83 svn_compatible_versions_map.get(compatible_version) \
84 or compatible_version
84 or compatible_version
85 fs_config['compatible-version'] = compatible_version_string
85 fs_config['compatible-version'] = compatible_version_string
86
86
87 log.debug('Create SVN repo with config `%s`', fs_config)
87 log.debug('Create SVN repo with config `%s`', fs_config)
88 repo = svn.repos.create(path, "", "", None, fs_config)
88 repo = svn.repos.create(path, "", "", None, fs_config)
89 else:
89 else:
90 repo = svn.repos.open(path)
90 repo = svn.repos.open(path)
91
91
92 log.debug('repository created: got SVN object: %s', repo)
92 log.debug('repository created: got SVN object: %s', repo)
93 return repo
93 return repo
94
94
95 def repo(self, wire, create=False, compatible_version=None):
95 def repo(self, wire, create=False, compatible_version=None):
96 """
96 """
97 Get a repository instance for the given path.
97 Get a repository instance for the given path.
98 """
98 """
99 return self._create_repo(wire, create, compatible_version)
99 return self._create_repo(wire, create, compatible_version)
100
100
101
101
102 NODE_TYPE_MAPPING = {
102 NODE_TYPE_MAPPING = {
103 svn.core.svn_node_file: 'file',
103 svn.core.svn_node_file: 'file',
104 svn.core.svn_node_dir: 'dir',
104 svn.core.svn_node_dir: 'dir',
105 }
105 }
106
106
107
107
108 class SvnRemote(RemoteBase):
108 class SvnRemote(RemoteBase):
109
109
110 def __init__(self, factory, hg_factory=None):
110 def __init__(self, factory, hg_factory=None):
111 self._factory = factory
111 self._factory = factory
112
112
113 self._bulk_methods = {
113 self._bulk_methods = {
114 # NOT supported in SVN ATM...
114 # NOT supported in SVN ATM...
115 }
115 }
116 self._bulk_file_methods = {
116 self._bulk_file_methods = {
117 "size": self.get_file_size,
117 "size": self.get_file_size,
118 "data": self.get_file_content,
118 "data": self.get_file_content,
119 "flags": self.get_node_type,
119 "flags": self.get_node_type,
120 "is_binary": self.is_binary,
120 "is_binary": self.is_binary,
121 "md5": self.md5_hash
121 "md5": self.md5_hash
122 }
122 }
123
123
124 @reraise_safe_exceptions
124 @reraise_safe_exceptions
125 def bulk_file_request(self, wire, commit_id, path, pre_load):
125 def bulk_file_request(self, wire, commit_id, path, pre_load):
126 cache_on, context_uid, repo_id = self._cache_on(wire)
126 cache_on, context_uid, repo_id = self._cache_on(wire)
127 region = self._region(wire)
127 region = self._region(wire)
128
128
129 # since we use unified API, we need to cast from str to in for SVN
129 # since we use unified API, we need to cast from str to in for SVN
130 commit_id = int(commit_id)
130 commit_id = int(commit_id)
131
131
132 @region.conditional_cache_on_arguments(condition=cache_on)
132 @region.conditional_cache_on_arguments(condition=cache_on)
133 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
133 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
134 result = {}
134 result = {}
135 for attr in pre_load:
135 for attr in pre_load:
136 try:
136 try:
137 method = self._bulk_file_methods[attr]
137 method = self._bulk_file_methods[attr]
138 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
138 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
139 result[attr] = method(wire, _commit_id, _path)
139 result[attr] = method(wire, _commit_id, _path)
140 except KeyError as e:
140 except KeyError as e:
141 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
141 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
142 return result
142 return result
143
143
144 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
144 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
145
145
146 @reraise_safe_exceptions
146 @reraise_safe_exceptions
147 def discover_svn_version(self):
147 def discover_svn_version(self):
148 try:
148 try:
149 import svn.core
149 import svn.core
150 svn_ver = svn.core.SVN_VERSION
150 svn_ver = svn.core.SVN_VERSION
151 except ImportError:
151 except ImportError:
152 svn_ver = None
152 svn_ver = None
153 return safe_str(svn_ver)
153 return safe_str(svn_ver)
154
154
155 @reraise_safe_exceptions
155 @reraise_safe_exceptions
156 def is_empty(self, wire):
156 def is_empty(self, wire):
157 try:
157 try:
158 return self.lookup(wire, -1) == 0
158 return self.lookup(wire, -1) == 0
159 except Exception:
159 except Exception:
160 log.exception("failed to read object_store")
160 log.exception("failed to read object_store")
161 return False
161 return False
162
162
163 def check_url(self, url, config):
163 def check_url(self, url, config):
164
164
165 # uuid function gets only valid UUID from proper repo, else
165 # uuid function gets only valid UUID from proper repo, else
166 # throws exception
166 # throws exception
167 username, password, src_url = self.get_url_and_credentials(url)
167 username, password, src_url = self.get_url_and_credentials(url)
168 try:
168 try:
169 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
169 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
170 except Exception:
170 except Exception:
171 tb = traceback.format_exc()
171 tb = traceback.format_exc()
172 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
172 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
173 raise URLError(f'"{url}" is not a valid Subversion source url.')
173 raise URLError(f'"{url}" is not a valid Subversion source url.')
174 return True
174 return True
175
175
176 def is_path_valid_repository(self, wire, path):
176 def is_path_valid_repository(self, wire, path):
177
177
178 # NOTE(marcink): short circuit the check for SVN repo
178 # NOTE(marcink): short circuit the check for SVN repo
179 # the repos.open might be expensive to check, but we have one cheap
179 # the repos.open might be expensive to check, but we have one cheap
180 # pre condition that we can use, to check for 'format' file
180 # pre condition that we can use, to check for 'format' file
181
181
182 if not os.path.isfile(os.path.join(path, 'format')):
182 if not os.path.isfile(os.path.join(path, 'format')):
183 return False
183 return False
184
184
185 try:
185 try:
186 svn.repos.open(path)
186 svn.repos.open(path)
187 except svn.core.SubversionException:
187 except svn.core.SubversionException:
188 tb = traceback.format_exc()
188 tb = traceback.format_exc()
189 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
189 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
190 return False
190 return False
191 return True
191 return True
192
192
193 @reraise_safe_exceptions
193 @reraise_safe_exceptions
194 def verify(self, wire,):
194 def verify(self, wire,):
195 repo_path = wire['path']
195 repo_path = wire['path']
196 if not self.is_path_valid_repository(wire, repo_path):
196 if not self.is_path_valid_repository(wire, repo_path):
197 raise Exception(
197 raise Exception(
198 "Path %s is not a valid Subversion repository." % repo_path)
198 "Path %s is not a valid Subversion repository." % repo_path)
199
199
200 cmd = ['svnadmin', 'info', repo_path]
200 cmd = ['svnadmin', 'info', repo_path]
201 stdout, stderr = subprocessio.run_command(cmd)
201 stdout, stderr = subprocessio.run_command(cmd)
202 return stdout
202 return stdout
203
203
204 @reraise_safe_exceptions
204 @reraise_safe_exceptions
205 def lookup(self, wire, revision):
205 def lookup(self, wire, revision):
206 if revision not in [-1, None, 'HEAD']:
206 if revision not in [-1, None, 'HEAD']:
207 raise NotImplementedError
207 raise NotImplementedError
208 repo = self._factory.repo(wire)
208 repo = self._factory.repo(wire)
209 fs_ptr = svn.repos.fs(repo)
209 fs_ptr = svn.repos.fs(repo)
210 head = svn.fs.youngest_rev(fs_ptr)
210 head = svn.fs.youngest_rev(fs_ptr)
211 return head
211 return head
212
212
213 @reraise_safe_exceptions
213 @reraise_safe_exceptions
214 def lookup_interval(self, wire, start_ts, end_ts):
214 def lookup_interval(self, wire, start_ts, end_ts):
215 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
216 fsobj = svn.repos.fs(repo)
216 fsobj = svn.repos.fs(repo)
217 start_rev = None
217 start_rev = None
218 end_rev = None
218 end_rev = None
219 if start_ts:
219 if start_ts:
220 start_ts_svn = apr_time_t(start_ts)
220 start_ts_svn = apr_time_t(start_ts)
221 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
221 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
222 else:
222 else:
223 start_rev = 1
223 start_rev = 1
224 if end_ts:
224 if end_ts:
225 end_ts_svn = apr_time_t(end_ts)
225 end_ts_svn = apr_time_t(end_ts)
226 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
226 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
227 else:
227 else:
228 end_rev = svn.fs.youngest_rev(fsobj)
228 end_rev = svn.fs.youngest_rev(fsobj)
229 return start_rev, end_rev
229 return start_rev, end_rev
230
230
231 @reraise_safe_exceptions
231 @reraise_safe_exceptions
232 def revision_properties(self, wire, revision):
232 def revision_properties(self, wire, revision):
233
233
234 cache_on, context_uid, repo_id = self._cache_on(wire)
234 cache_on, context_uid, repo_id = self._cache_on(wire)
235 region = self._region(wire)
235 region = self._region(wire)
236
236
237 @region.conditional_cache_on_arguments(condition=cache_on)
237 @region.conditional_cache_on_arguments(condition=cache_on)
238 def _revision_properties(_repo_id, _revision):
238 def _revision_properties(_repo_id, _revision):
239 repo = self._factory.repo(wire)
239 repo = self._factory.repo(wire)
240 fs_ptr = svn.repos.fs(repo)
240 fs_ptr = svn.repos.fs(repo)
241 return svn.fs.revision_proplist(fs_ptr, revision)
241 return svn.fs.revision_proplist(fs_ptr, revision)
242 return _revision_properties(repo_id, revision)
242 return _revision_properties(repo_id, revision)
243
243
244 def revision_changes(self, wire, revision):
244 def revision_changes(self, wire, revision):
245
245
246 repo = self._factory.repo(wire)
246 repo = self._factory.repo(wire)
247 fsobj = svn.repos.fs(repo)
247 fsobj = svn.repos.fs(repo)
248 rev_root = svn.fs.revision_root(fsobj, revision)
248 rev_root = svn.fs.revision_root(fsobj, revision)
249
249
250 editor = svn.repos.ChangeCollector(fsobj, rev_root)
250 editor = svn.repos.ChangeCollector(fsobj, rev_root)
251 editor_ptr, editor_baton = svn.delta.make_editor(editor)
251 editor_ptr, editor_baton = svn.delta.make_editor(editor)
252 base_dir = ""
252 base_dir = ""
253 send_deltas = False
253 send_deltas = False
254 svn.repos.replay2(
254 svn.repos.replay2(
255 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
255 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
256 editor_ptr, editor_baton, None)
256 editor_ptr, editor_baton, None)
257
257
258 added = []
258 added = []
259 changed = []
259 changed = []
260 removed = []
260 removed = []
261
261
262 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
262 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
263 for path, change in editor.changes.items():
263 for path, change in editor.changes.items():
264 # TODO: Decide what to do with directory nodes. Subversion can add
264 # TODO: Decide what to do with directory nodes. Subversion can add
265 # empty directories.
265 # empty directories.
266
266
267 if change.item_kind == svn.core.svn_node_dir:
267 if change.item_kind == svn.core.svn_node_dir:
268 continue
268 continue
269 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
269 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
270 added.append(path)
270 added.append(path)
271 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
271 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
272 svn.repos.CHANGE_ACTION_REPLACE]:
272 svn.repos.CHANGE_ACTION_REPLACE]:
273 changed.append(path)
273 changed.append(path)
274 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
274 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
275 removed.append(path)
275 removed.append(path)
276 else:
276 else:
277 raise NotImplementedError(
277 raise NotImplementedError(
278 "Action {} not supported on path {}".format(
278 "Action {} not supported on path {}".format(
279 change.action, path))
279 change.action, path))
280
280
281 changes = {
281 changes = {
282 'added': added,
282 'added': added,
283 'changed': changed,
283 'changed': changed,
284 'removed': removed,
284 'removed': removed,
285 }
285 }
286 return changes
286 return changes
287
287
288 @reraise_safe_exceptions
288 @reraise_safe_exceptions
289 def node_history(self, wire, path, revision, limit):
289 def node_history(self, wire, path, revision, limit):
290 cache_on, context_uid, repo_id = self._cache_on(wire)
290 cache_on, context_uid, repo_id = self._cache_on(wire)
291 region = self._region(wire)
291 region = self._region(wire)
292
292
293 @region.conditional_cache_on_arguments(condition=cache_on)
293 @region.conditional_cache_on_arguments(condition=cache_on)
294 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
294 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
295 cross_copies = False
295 cross_copies = False
296 repo = self._factory.repo(wire)
296 repo = self._factory.repo(wire)
297 fsobj = svn.repos.fs(repo)
297 fsobj = svn.repos.fs(repo)
298 rev_root = svn.fs.revision_root(fsobj, revision)
298 rev_root = svn.fs.revision_root(fsobj, revision)
299
299
300 history_revisions = []
300 history_revisions = []
301 history = svn.fs.node_history(rev_root, path)
301 history = svn.fs.node_history(rev_root, path)
302 history = svn.fs.history_prev(history, cross_copies)
302 history = svn.fs.history_prev(history, cross_copies)
303 while history:
303 while history:
304 __, node_revision = svn.fs.history_location(history)
304 __, node_revision = svn.fs.history_location(history)
305 history_revisions.append(node_revision)
305 history_revisions.append(node_revision)
306 if limit and len(history_revisions) >= limit:
306 if limit and len(history_revisions) >= limit:
307 break
307 break
308 history = svn.fs.history_prev(history, cross_copies)
308 history = svn.fs.history_prev(history, cross_copies)
309 return history_revisions
309 return history_revisions
310 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
310 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
311
311
312 @reraise_safe_exceptions
312 @reraise_safe_exceptions
313 def node_properties(self, wire, path, revision):
313 def node_properties(self, wire, path, revision):
314 cache_on, context_uid, repo_id = self._cache_on(wire)
314 cache_on, context_uid, repo_id = self._cache_on(wire)
315 region = self._region(wire)
315 region = self._region(wire)
316
316
317 @region.conditional_cache_on_arguments(condition=cache_on)
317 @region.conditional_cache_on_arguments(condition=cache_on)
318 def _node_properties(_repo_id, _path, _revision):
318 def _node_properties(_repo_id, _path, _revision):
319 repo = self._factory.repo(wire)
319 repo = self._factory.repo(wire)
320 fsobj = svn.repos.fs(repo)
320 fsobj = svn.repos.fs(repo)
321 rev_root = svn.fs.revision_root(fsobj, revision)
321 rev_root = svn.fs.revision_root(fsobj, revision)
322 return svn.fs.node_proplist(rev_root, path)
322 return svn.fs.node_proplist(rev_root, path)
323 return _node_properties(repo_id, path, revision)
323 return _node_properties(repo_id, path, revision)
324
324
325 def file_annotate(self, wire, path, revision):
325 def file_annotate(self, wire, path, revision):
326 abs_path = 'file://' + urllib.request.pathname2url(
326 abs_path = 'file://' + urllib.request.pathname2url(
327 vcspath.join(wire['path'], path))
327 vcspath.join(wire['path'], path))
328 file_uri = svn.core.svn_path_canonicalize(abs_path)
328 file_uri = svn.core.svn_path_canonicalize(abs_path)
329
329
330 start_rev = svn_opt_revision_value_t(0)
330 start_rev = svn_opt_revision_value_t(0)
331 peg_rev = svn_opt_revision_value_t(revision)
331 peg_rev = svn_opt_revision_value_t(revision)
332 end_rev = peg_rev
332 end_rev = peg_rev
333
333
334 annotations = []
334 annotations = []
335
335
336 def receiver(line_no, revision, author, date, line, pool):
336 def receiver(line_no, revision, author, date, line, pool):
337 annotations.append((line_no, revision, line))
337 annotations.append((line_no, revision, line))
338
338
339 # TODO: Cannot use blame5, missing typemap function in the swig code
339 # TODO: Cannot use blame5, missing typemap function in the swig code
340 try:
340 try:
341 svn.client.blame2(
341 svn.client.blame2(
342 file_uri, peg_rev, start_rev, end_rev,
342 file_uri, peg_rev, start_rev, end_rev,
343 receiver, svn.client.create_context())
343 receiver, svn.client.create_context())
344 except svn.core.SubversionException as exc:
344 except svn.core.SubversionException as exc:
345 log.exception("Error during blame operation.")
345 log.exception("Error during blame operation.")
346 raise Exception(
346 raise Exception(
347 "Blame not supported or file does not exist at path %s. "
347 f"Blame not supported or file does not exist at path {path}. "
348 "Error %s." % (path, exc))
348 f"Error {exc}.")
349
349
350 return annotations
350 return BinaryEnvelope(annotations)
351
351
352 @reraise_safe_exceptions
352 @reraise_safe_exceptions
353 def get_node_type(self, wire, revision=None, path=''):
353 def get_node_type(self, wire, revision=None, path=''):
354
354
355 cache_on, context_uid, repo_id = self._cache_on(wire)
355 cache_on, context_uid, repo_id = self._cache_on(wire)
356 region = self._region(wire)
356 region = self._region(wire)
357
357
358 @region.conditional_cache_on_arguments(condition=cache_on)
358 @region.conditional_cache_on_arguments(condition=cache_on)
359 def _get_node_type(_repo_id, _revision, _path):
359 def _get_node_type(_repo_id, _revision, _path):
360 repo = self._factory.repo(wire)
360 repo = self._factory.repo(wire)
361 fs_ptr = svn.repos.fs(repo)
361 fs_ptr = svn.repos.fs(repo)
362 if _revision is None:
362 if _revision is None:
363 _revision = svn.fs.youngest_rev(fs_ptr)
363 _revision = svn.fs.youngest_rev(fs_ptr)
364 root = svn.fs.revision_root(fs_ptr, _revision)
364 root = svn.fs.revision_root(fs_ptr, _revision)
365 node = svn.fs.check_path(root, path)
365 node = svn.fs.check_path(root, path)
366 return NODE_TYPE_MAPPING.get(node, None)
366 return NODE_TYPE_MAPPING.get(node, None)
367 return _get_node_type(repo_id, revision, path)
367 return _get_node_type(repo_id, revision, path)
368
368
369 @reraise_safe_exceptions
369 @reraise_safe_exceptions
370 def get_nodes(self, wire, revision=None, path=''):
370 def get_nodes(self, wire, revision=None, path=''):
371
371
372 cache_on, context_uid, repo_id = self._cache_on(wire)
372 cache_on, context_uid, repo_id = self._cache_on(wire)
373 region = self._region(wire)
373 region = self._region(wire)
374
374
375 @region.conditional_cache_on_arguments(condition=cache_on)
375 @region.conditional_cache_on_arguments(condition=cache_on)
376 def _get_nodes(_repo_id, _path, _revision):
376 def _get_nodes(_repo_id, _path, _revision):
377 repo = self._factory.repo(wire)
377 repo = self._factory.repo(wire)
378 fsobj = svn.repos.fs(repo)
378 fsobj = svn.repos.fs(repo)
379 if _revision is None:
379 if _revision is None:
380 _revision = svn.fs.youngest_rev(fsobj)
380 _revision = svn.fs.youngest_rev(fsobj)
381 root = svn.fs.revision_root(fsobj, _revision)
381 root = svn.fs.revision_root(fsobj, _revision)
382 entries = svn.fs.dir_entries(root, path)
382 entries = svn.fs.dir_entries(root, path)
383 result = []
383 result = []
384 for entry_path, entry_info in entries.items():
384 for entry_path, entry_info in entries.items():
385 result.append(
385 result.append(
386 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
386 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
387 return result
387 return result
388 return _get_nodes(repo_id, path, revision)
388 return _get_nodes(repo_id, path, revision)
389
389
390 @reraise_safe_exceptions
390 @reraise_safe_exceptions
391 def get_file_content(self, wire, rev=None, path=''):
391 def get_file_content(self, wire, rev=None, path=''):
392 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
393 fsobj = svn.repos.fs(repo)
393 fsobj = svn.repos.fs(repo)
394
394
395 if rev is None:
395 if rev is None:
396 rev = svn.fs.youngest_rev(fsobj)
396 rev = svn.fs.youngest_rev(fsobj)
397
397
398 root = svn.fs.revision_root(fsobj, rev)
398 root = svn.fs.revision_root(fsobj, rev)
399 content = svn.core.Stream(svn.fs.file_contents(root, path))
399 content = svn.core.Stream(svn.fs.file_contents(root, path))
400 return BytesEnvelope(content.read())
400 return BytesEnvelope(content.read())
401
401
402 @reraise_safe_exceptions
402 @reraise_safe_exceptions
403 def get_file_size(self, wire, revision=None, path=''):
403 def get_file_size(self, wire, revision=None, path=''):
404
404
405 cache_on, context_uid, repo_id = self._cache_on(wire)
405 cache_on, context_uid, repo_id = self._cache_on(wire)
406 region = self._region(wire)
406 region = self._region(wire)
407
407
408 @region.conditional_cache_on_arguments(condition=cache_on)
408 @region.conditional_cache_on_arguments(condition=cache_on)
409 def _get_file_size(_repo_id, _revision, _path):
409 def _get_file_size(_repo_id, _revision, _path):
410 repo = self._factory.repo(wire)
410 repo = self._factory.repo(wire)
411 fsobj = svn.repos.fs(repo)
411 fsobj = svn.repos.fs(repo)
412 if _revision is None:
412 if _revision is None:
413 _revision = svn.fs.youngest_revision(fsobj)
413 _revision = svn.fs.youngest_revision(fsobj)
414 root = svn.fs.revision_root(fsobj, _revision)
414 root = svn.fs.revision_root(fsobj, _revision)
415 size = svn.fs.file_length(root, path)
415 size = svn.fs.file_length(root, path)
416 return size
416 return size
417 return _get_file_size(repo_id, revision, path)
417 return _get_file_size(repo_id, revision, path)
418
418
419 def create_repository(self, wire, compatible_version=None):
419 def create_repository(self, wire, compatible_version=None):
420 log.info('Creating Subversion repository in path "%s"', wire['path'])
420 log.info('Creating Subversion repository in path "%s"', wire['path'])
421 self._factory.repo(wire, create=True,
421 self._factory.repo(wire, create=True,
422 compatible_version=compatible_version)
422 compatible_version=compatible_version)
423
423
424 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
424 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
425 obj = urllib.parse.urlparse(src_url)
425 obj = urllib.parse.urlparse(src_url)
426 username = obj.username or ''
426 username = obj.username or ''
427 password = obj.password or ''
427 password = obj.password or ''
428 return username, password, src_url
428 return username, password, src_url
429
429
430 def import_remote_repository(self, wire, src_url):
430 def import_remote_repository(self, wire, src_url):
431 repo_path = wire['path']
431 repo_path = wire['path']
432 if not self.is_path_valid_repository(wire, repo_path):
432 if not self.is_path_valid_repository(wire, repo_path):
433 raise Exception(
433 raise Exception(
434 "Path %s is not a valid Subversion repository." % repo_path)
434 "Path %s is not a valid Subversion repository." % repo_path)
435
435
436 username, password, src_url = self.get_url_and_credentials(src_url)
436 username, password, src_url = self.get_url_and_credentials(src_url)
437 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
437 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
438 '--trust-server-cert-failures=unknown-ca']
438 '--trust-server-cert-failures=unknown-ca']
439 if username and password:
439 if username and password:
440 rdump_cmd += ['--username', username, '--password', password]
440 rdump_cmd += ['--username', username, '--password', password]
441 rdump_cmd += [src_url]
441 rdump_cmd += [src_url]
442
442
443 rdump = subprocess.Popen(
443 rdump = subprocess.Popen(
444 rdump_cmd,
444 rdump_cmd,
445 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
445 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
446 load = subprocess.Popen(
446 load = subprocess.Popen(
447 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
447 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
448
448
449 # TODO: johbo: This can be a very long operation, might be better
449 # TODO: johbo: This can be a very long operation, might be better
450 # to track some kind of status and provide an api to check if the
450 # to track some kind of status and provide an api to check if the
451 # import is done.
451 # import is done.
452 rdump.wait()
452 rdump.wait()
453 load.wait()
453 load.wait()
454
454
455 log.debug('Return process ended with code: %s', rdump.returncode)
455 log.debug('Return process ended with code: %s', rdump.returncode)
456 if rdump.returncode != 0:
456 if rdump.returncode != 0:
457 errors = rdump.stderr.read()
457 errors = rdump.stderr.read()
458 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
458 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
459
459
460 reason = 'UNKNOWN'
460 reason = 'UNKNOWN'
461 if b'svnrdump: E230001:' in errors:
461 if b'svnrdump: E230001:' in errors:
462 reason = 'INVALID_CERTIFICATE'
462 reason = 'INVALID_CERTIFICATE'
463
463
464 if reason == 'UNKNOWN':
464 if reason == 'UNKNOWN':
465 reason = f'UNKNOWN:{safe_str(errors)}'
465 reason = f'UNKNOWN:{safe_str(errors)}'
466
466
467 raise Exception(
467 raise Exception(
468 'Failed to dump the remote repository from {}. Reason:{}'.format(
468 'Failed to dump the remote repository from {}. Reason:{}'.format(
469 src_url, reason))
469 src_url, reason))
470 if load.returncode != 0:
470 if load.returncode != 0:
471 raise Exception(
471 raise Exception(
472 'Failed to load the dump of remote repository from %s.' %
472 'Failed to load the dump of remote repository from %s.' %
473 (src_url, ))
473 (src_url, ))
474
474
475 def commit(self, wire, message, author, timestamp, updated, removed):
475 def commit(self, wire, message, author, timestamp, updated, removed):
476
476
477 message = safe_bytes(message)
477 message = safe_bytes(message)
478 author = safe_bytes(author)
478 author = safe_bytes(author)
479
479
480 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
481 fsobj = svn.repos.fs(repo)
481 fsobj = svn.repos.fs(repo)
482
482
483 rev = svn.fs.youngest_rev(fsobj)
483 rev = svn.fs.youngest_rev(fsobj)
484 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
484 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
485 txn_root = svn.fs.txn_root(txn)
485 txn_root = svn.fs.txn_root(txn)
486
486
487 for node in updated:
487 for node in updated:
488 TxnNodeProcessor(node, txn_root).update()
488 TxnNodeProcessor(node, txn_root).update()
489 for node in removed:
489 for node in removed:
490 TxnNodeProcessor(node, txn_root).remove()
490 TxnNodeProcessor(node, txn_root).remove()
491
491
492 commit_id = svn.repos.fs_commit_txn(repo, txn)
492 commit_id = svn.repos.fs_commit_txn(repo, txn)
493
493
494 if timestamp:
494 if timestamp:
495 apr_time = apr_time_t(timestamp)
495 apr_time = apr_time_t(timestamp)
496 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
496 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
497 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
497 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
498
498
499 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
499 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
500 return commit_id
500 return commit_id
501
501
502 @reraise_safe_exceptions
502 @reraise_safe_exceptions
503 def diff(self, wire, rev1, rev2, path1=None, path2=None,
503 def diff(self, wire, rev1, rev2, path1=None, path2=None,
504 ignore_whitespace=False, context=3):
504 ignore_whitespace=False, context=3):
505
505
506 wire.update(cache=False)
506 wire.update(cache=False)
507 repo = self._factory.repo(wire)
507 repo = self._factory.repo(wire)
508 diff_creator = SvnDiffer(
508 diff_creator = SvnDiffer(
509 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
509 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
510 try:
510 try:
511 return BytesEnvelope(diff_creator.generate_diff())
511 return BytesEnvelope(diff_creator.generate_diff())
512 except svn.core.SubversionException as e:
512 except svn.core.SubversionException as e:
513 log.exception(
513 log.exception(
514 "Error during diff operation operation. "
514 "Error during diff operation operation. "
515 "Path might not exist %s, %s", path1, path2)
515 "Path might not exist %s, %s", path1, path2)
516 return BytesEnvelope(b'')
516 return BytesEnvelope(b'')
517
517
518 @reraise_safe_exceptions
518 @reraise_safe_exceptions
519 def is_large_file(self, wire, path):
519 def is_large_file(self, wire, path):
520 return False
520 return False
521
521
522 @reraise_safe_exceptions
522 @reraise_safe_exceptions
523 def is_binary(self, wire, rev, path):
523 def is_binary(self, wire, rev, path):
524 cache_on, context_uid, repo_id = self._cache_on(wire)
524 cache_on, context_uid, repo_id = self._cache_on(wire)
525 region = self._region(wire)
525 region = self._region(wire)
526
526
527 @region.conditional_cache_on_arguments(condition=cache_on)
527 @region.conditional_cache_on_arguments(condition=cache_on)
528 def _is_binary(_repo_id, _rev, _path):
528 def _is_binary(_repo_id, _rev, _path):
529 raw_bytes = self.get_file_content(wire, rev, path)
529 raw_bytes = self.get_file_content(wire, rev, path)
530 if not raw_bytes:
530 if not raw_bytes:
531 return False
531 return False
532 return b'\0' in raw_bytes
532 return b'\0' in raw_bytes
533
533
534 return _is_binary(repo_id, rev, path)
534 return _is_binary(repo_id, rev, path)
535
535
536 @reraise_safe_exceptions
536 @reraise_safe_exceptions
537 def md5_hash(self, wire, rev, path):
537 def md5_hash(self, wire, rev, path):
538 cache_on, context_uid, repo_id = self._cache_on(wire)
538 cache_on, context_uid, repo_id = self._cache_on(wire)
539 region = self._region(wire)
539 region = self._region(wire)
540
540
541 @region.conditional_cache_on_arguments(condition=cache_on)
541 @region.conditional_cache_on_arguments(condition=cache_on)
542 def _md5_hash(_repo_id, _rev, _path):
542 def _md5_hash(_repo_id, _rev, _path):
543 return ''
543 return ''
544
544
545 return _md5_hash(repo_id, rev, path)
545 return _md5_hash(repo_id, rev, path)
546
546
547 @reraise_safe_exceptions
547 @reraise_safe_exceptions
548 def run_svn_command(self, wire, cmd, **opts):
548 def run_svn_command(self, wire, cmd, **opts):
549 path = wire.get('path', None)
549 path = wire.get('path', None)
550
550
551 if path and os.path.isdir(path):
551 if path and os.path.isdir(path):
552 opts['cwd'] = path
552 opts['cwd'] = path
553
553
554 safe_call = opts.pop('_safe', False)
554 safe_call = opts.pop('_safe', False)
555
555
556 svnenv = os.environ.copy()
556 svnenv = os.environ.copy()
557 svnenv.update(opts.pop('extra_env', {}))
557 svnenv.update(opts.pop('extra_env', {}))
558
558
559 _opts = {'env': svnenv, 'shell': False}
559 _opts = {'env': svnenv, 'shell': False}
560
560
561 try:
561 try:
562 _opts.update(opts)
562 _opts.update(opts)
563 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
563 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
564
564
565 return b''.join(proc), b''.join(proc.stderr)
565 return b''.join(proc), b''.join(proc.stderr)
566 except OSError as err:
566 except OSError as err:
567 if safe_call:
567 if safe_call:
568 return '', safe_str(err).strip()
568 return '', safe_str(err).strip()
569 else:
569 else:
570 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
570 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
571 tb_err = ("Couldn't run svn command (%s).\n"
571 tb_err = ("Couldn't run svn command (%s).\n"
572 "Original error was:%s\n"
572 "Original error was:%s\n"
573 "Call options:%s\n"
573 "Call options:%s\n"
574 % (cmd, err, _opts))
574 % (cmd, err, _opts))
575 log.exception(tb_err)
575 log.exception(tb_err)
576 raise exceptions.VcsException()(tb_err)
576 raise exceptions.VcsException()(tb_err)
577
577
578 @reraise_safe_exceptions
578 @reraise_safe_exceptions
579 def install_hooks(self, wire, force=False):
579 def install_hooks(self, wire, force=False):
580 from vcsserver.hook_utils import install_svn_hooks
580 from vcsserver.hook_utils import install_svn_hooks
581 repo_path = wire['path']
581 repo_path = wire['path']
582 binary_dir = settings.BINARY_DIR
582 binary_dir = settings.BINARY_DIR
583 executable = None
583 executable = None
584 if binary_dir:
584 if binary_dir:
585 executable = os.path.join(binary_dir, 'python3')
585 executable = os.path.join(binary_dir, 'python3')
586 return install_svn_hooks(repo_path, force_create=force)
586 return install_svn_hooks(repo_path, force_create=force)
587
587
588 @reraise_safe_exceptions
588 @reraise_safe_exceptions
589 def get_hooks_info(self, wire):
589 def get_hooks_info(self, wire):
590 from vcsserver.hook_utils import (
590 from vcsserver.hook_utils import (
591 get_svn_pre_hook_version, get_svn_post_hook_version)
591 get_svn_pre_hook_version, get_svn_post_hook_version)
592 repo_path = wire['path']
592 repo_path = wire['path']
593 return {
593 return {
594 'pre_version': get_svn_pre_hook_version(repo_path),
594 'pre_version': get_svn_pre_hook_version(repo_path),
595 'post_version': get_svn_post_hook_version(repo_path),
595 'post_version': get_svn_post_hook_version(repo_path),
596 }
596 }
597
597
598 @reraise_safe_exceptions
598 @reraise_safe_exceptions
599 def set_head_ref(self, wire, head_name):
599 def set_head_ref(self, wire, head_name):
600 pass
600 pass
601
601
602 @reraise_safe_exceptions
602 @reraise_safe_exceptions
603 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
603 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
604 archive_dir_name, commit_id, cache_config):
604 archive_dir_name, commit_id, cache_config):
605
605
606 def walk_tree(root, root_dir, _commit_id):
606 def walk_tree(root, root_dir, _commit_id):
607 """
607 """
608 Special recursive svn repo walker
608 Special recursive svn repo walker
609 """
609 """
610 root_dir = safe_bytes(root_dir)
610 root_dir = safe_bytes(root_dir)
611
611
612 filemode_default = 0o100644
612 filemode_default = 0o100644
613 filemode_executable = 0o100755
613 filemode_executable = 0o100755
614
614
615 file_iter = svn.fs.dir_entries(root, root_dir)
615 file_iter = svn.fs.dir_entries(root, root_dir)
616 for f_name in file_iter:
616 for f_name in file_iter:
617 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
617 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
618
618
619 if f_type == 'dir':
619 if f_type == 'dir':
620 # return only DIR, and then all entries in that dir
620 # return only DIR, and then all entries in that dir
621 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
621 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
622 new_root = os.path.join(root_dir, f_name)
622 new_root = os.path.join(root_dir, f_name)
623 yield from walk_tree(root, new_root, _commit_id)
623 yield from walk_tree(root, new_root, _commit_id)
624 else:
624 else:
625
625
626 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
626 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
627 prop_list = svn.fs.node_proplist(root, f_path)
627 prop_list = svn.fs.node_proplist(root, f_path)
628
628
629 f_mode = filemode_default
629 f_mode = filemode_default
630 if prop_list.get('svn:executable'):
630 if prop_list.get('svn:executable'):
631 f_mode = filemode_executable
631 f_mode = filemode_executable
632
632
633 f_is_link = False
633 f_is_link = False
634 if prop_list.get('svn:special'):
634 if prop_list.get('svn:special'):
635 f_is_link = True
635 f_is_link = True
636
636
637 data = {
637 data = {
638 'is_link': f_is_link,
638 'is_link': f_is_link,
639 'mode': f_mode,
639 'mode': f_mode,
640 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
640 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
641 }
641 }
642
642
643 yield f_path, data, f_type
643 yield f_path, data, f_type
644
644
645 def file_walker(_commit_id, path):
645 def file_walker(_commit_id, path):
646 repo = self._factory.repo(wire)
646 repo = self._factory.repo(wire)
647 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
647 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
648
648
649 def no_content():
649 def no_content():
650 raise NoContentException()
650 raise NoContentException()
651
651
652 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
652 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
653 file_path = f_name
653 file_path = f_name
654
654
655 if f_type == 'dir':
655 if f_type == 'dir':
656 mode = f_data['mode']
656 mode = f_data['mode']
657 yield ArchiveNode(file_path, mode, False, no_content)
657 yield ArchiveNode(file_path, mode, False, no_content)
658 else:
658 else:
659 mode = f_data['mode']
659 mode = f_data['mode']
660 is_link = f_data['is_link']
660 is_link = f_data['is_link']
661 data_stream = f_data['content_stream']
661 data_stream = f_data['content_stream']
662 yield ArchiveNode(file_path, mode, is_link, data_stream)
662 yield ArchiveNode(file_path, mode, is_link, data_stream)
663
663
664 return store_archive_in_cache(
664 return store_archive_in_cache(
665 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
665 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
666
666
667
667
668 class SvnDiffer(object):
668 class SvnDiffer(object):
669 """
669 """
670 Utility to create diffs based on difflib and the Subversion api
670 Utility to create diffs based on difflib and the Subversion api
671 """
671 """
672
672
673 binary_content = False
673 binary_content = False
674
674
675 def __init__(
675 def __init__(
676 self, repo, src_rev, src_path, tgt_rev, tgt_path,
676 self, repo, src_rev, src_path, tgt_rev, tgt_path,
677 ignore_whitespace, context):
677 ignore_whitespace, context):
678 self.repo = repo
678 self.repo = repo
679 self.ignore_whitespace = ignore_whitespace
679 self.ignore_whitespace = ignore_whitespace
680 self.context = context
680 self.context = context
681
681
682 fsobj = svn.repos.fs(repo)
682 fsobj = svn.repos.fs(repo)
683
683
684 self.tgt_rev = tgt_rev
684 self.tgt_rev = tgt_rev
685 self.tgt_path = tgt_path or ''
685 self.tgt_path = tgt_path or ''
686 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
686 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
687 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
687 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
688
688
689 self.src_rev = src_rev
689 self.src_rev = src_rev
690 self.src_path = src_path or self.tgt_path
690 self.src_path = src_path or self.tgt_path
691 self.src_root = svn.fs.revision_root(fsobj, src_rev)
691 self.src_root = svn.fs.revision_root(fsobj, src_rev)
692 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
692 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
693
693
694 self._validate()
694 self._validate()
695
695
696 def _validate(self):
696 def _validate(self):
697 if (self.tgt_kind != svn.core.svn_node_none and
697 if (self.tgt_kind != svn.core.svn_node_none and
698 self.src_kind != svn.core.svn_node_none and
698 self.src_kind != svn.core.svn_node_none and
699 self.src_kind != self.tgt_kind):
699 self.src_kind != self.tgt_kind):
700 # TODO: johbo: proper error handling
700 # TODO: johbo: proper error handling
701 raise Exception(
701 raise Exception(
702 "Source and target are not compatible for diff generation. "
702 "Source and target are not compatible for diff generation. "
703 "Source type: %s, target type: %s" %
703 "Source type: %s, target type: %s" %
704 (self.src_kind, self.tgt_kind))
704 (self.src_kind, self.tgt_kind))
705
705
706 def generate_diff(self) -> bytes:
706 def generate_diff(self) -> bytes:
707 buf = io.BytesIO()
707 buf = io.BytesIO()
708 if self.tgt_kind == svn.core.svn_node_dir:
708 if self.tgt_kind == svn.core.svn_node_dir:
709 self._generate_dir_diff(buf)
709 self._generate_dir_diff(buf)
710 else:
710 else:
711 self._generate_file_diff(buf)
711 self._generate_file_diff(buf)
712 return buf.getvalue()
712 return buf.getvalue()
713
713
714 def _generate_dir_diff(self, buf: io.BytesIO):
714 def _generate_dir_diff(self, buf: io.BytesIO):
715 editor = DiffChangeEditor()
715 editor = DiffChangeEditor()
716 editor_ptr, editor_baton = svn.delta.make_editor(editor)
716 editor_ptr, editor_baton = svn.delta.make_editor(editor)
717 svn.repos.dir_delta2(
717 svn.repos.dir_delta2(
718 self.src_root,
718 self.src_root,
719 self.src_path,
719 self.src_path,
720 '', # src_entry
720 '', # src_entry
721 self.tgt_root,
721 self.tgt_root,
722 self.tgt_path,
722 self.tgt_path,
723 editor_ptr, editor_baton,
723 editor_ptr, editor_baton,
724 authorization_callback_allow_all,
724 authorization_callback_allow_all,
725 False, # text_deltas
725 False, # text_deltas
726 svn.core.svn_depth_infinity, # depth
726 svn.core.svn_depth_infinity, # depth
727 False, # entry_props
727 False, # entry_props
728 False, # ignore_ancestry
728 False, # ignore_ancestry
729 )
729 )
730
730
731 for path, __, change in sorted(editor.changes):
731 for path, __, change in sorted(editor.changes):
732 self._generate_node_diff(
732 self._generate_node_diff(
733 buf, change, path, self.tgt_path, path, self.src_path)
733 buf, change, path, self.tgt_path, path, self.src_path)
734
734
735 def _generate_file_diff(self, buf: io.BytesIO):
735 def _generate_file_diff(self, buf: io.BytesIO):
736 change = None
736 change = None
737 if self.src_kind == svn.core.svn_node_none:
737 if self.src_kind == svn.core.svn_node_none:
738 change = "add"
738 change = "add"
739 elif self.tgt_kind == svn.core.svn_node_none:
739 elif self.tgt_kind == svn.core.svn_node_none:
740 change = "delete"
740 change = "delete"
741 tgt_base, tgt_path = vcspath.split(self.tgt_path)
741 tgt_base, tgt_path = vcspath.split(self.tgt_path)
742 src_base, src_path = vcspath.split(self.src_path)
742 src_base, src_path = vcspath.split(self.src_path)
743 self._generate_node_diff(
743 self._generate_node_diff(
744 buf, change, tgt_path, tgt_base, src_path, src_base)
744 buf, change, tgt_path, tgt_base, src_path, src_base)
745
745
746 def _generate_node_diff(
746 def _generate_node_diff(
747 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
747 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
748
748
749 tgt_path_bytes = safe_bytes(tgt_path)
749 tgt_path_bytes = safe_bytes(tgt_path)
750 tgt_path = safe_str(tgt_path)
750 tgt_path = safe_str(tgt_path)
751
751
752 src_path_bytes = safe_bytes(src_path)
752 src_path_bytes = safe_bytes(src_path)
753 src_path = safe_str(src_path)
753 src_path = safe_str(src_path)
754
754
755 if self.src_rev == self.tgt_rev and tgt_base == src_base:
755 if self.src_rev == self.tgt_rev and tgt_base == src_base:
756 # makes consistent behaviour with git/hg to return empty diff if
756 # makes consistent behaviour with git/hg to return empty diff if
757 # we compare same revisions
757 # we compare same revisions
758 return
758 return
759
759
760 tgt_full_path = vcspath.join(tgt_base, tgt_path)
760 tgt_full_path = vcspath.join(tgt_base, tgt_path)
761 src_full_path = vcspath.join(src_base, src_path)
761 src_full_path = vcspath.join(src_base, src_path)
762
762
763 self.binary_content = False
763 self.binary_content = False
764 mime_type = self._get_mime_type(tgt_full_path)
764 mime_type = self._get_mime_type(tgt_full_path)
765
765
766 if mime_type and not mime_type.startswith(b'text'):
766 if mime_type and not mime_type.startswith(b'text'):
767 self.binary_content = True
767 self.binary_content = True
768 buf.write(b"=" * 67 + b'\n')
768 buf.write(b"=" * 67 + b'\n')
769 buf.write(b"Cannot display: file marked as a binary type.\n")
769 buf.write(b"Cannot display: file marked as a binary type.\n")
770 buf.write(b"svn:mime-type = %s\n" % mime_type)
770 buf.write(b"svn:mime-type = %s\n" % mime_type)
771 buf.write(b"Index: %b\n" % tgt_path_bytes)
771 buf.write(b"Index: %b\n" % tgt_path_bytes)
772 buf.write(b"=" * 67 + b'\n')
772 buf.write(b"=" * 67 + b'\n')
773 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
773 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
774
774
775 if change == 'add':
775 if change == 'add':
776 # TODO: johbo: SVN is missing a zero here compared to git
776 # TODO: johbo: SVN is missing a zero here compared to git
777 buf.write(b"new file mode 10644\n")
777 buf.write(b"new file mode 10644\n")
778
778
779 # TODO(marcink): intro to binary detection of svn patches
779 # TODO(marcink): intro to binary detection of svn patches
780 # if self.binary_content:
780 # if self.binary_content:
781 # buf.write(b'GIT binary patch\n')
781 # buf.write(b'GIT binary patch\n')
782
782
783 buf.write(b"--- /dev/null\t(revision 0)\n")
783 buf.write(b"--- /dev/null\t(revision 0)\n")
784 src_lines = []
784 src_lines = []
785 else:
785 else:
786 if change == 'delete':
786 if change == 'delete':
787 buf.write(b"deleted file mode 10644\n")
787 buf.write(b"deleted file mode 10644\n")
788
788
789 # TODO(marcink): intro to binary detection of svn patches
789 # TODO(marcink): intro to binary detection of svn patches
790 # if self.binary_content:
790 # if self.binary_content:
791 # buf.write('GIT binary patch\n')
791 # buf.write('GIT binary patch\n')
792
792
793 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
793 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
794 src_lines = self._svn_readlines(self.src_root, src_full_path)
794 src_lines = self._svn_readlines(self.src_root, src_full_path)
795
795
796 if change == 'delete':
796 if change == 'delete':
797 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
797 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
798 tgt_lines = []
798 tgt_lines = []
799 else:
799 else:
800 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
800 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
801 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
801 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
802
802
803 # we made our diff header, time to generate the diff content into our buffer
803 # we made our diff header, time to generate the diff content into our buffer
804
804
805 if not self.binary_content:
805 if not self.binary_content:
806 udiff = svn_diff.unified_diff(
806 udiff = svn_diff.unified_diff(
807 src_lines, tgt_lines, context=self.context,
807 src_lines, tgt_lines, context=self.context,
808 ignore_blank_lines=self.ignore_whitespace,
808 ignore_blank_lines=self.ignore_whitespace,
809 ignore_case=False,
809 ignore_case=False,
810 ignore_space_changes=self.ignore_whitespace)
810 ignore_space_changes=self.ignore_whitespace)
811
811
812 buf.writelines(udiff)
812 buf.writelines(udiff)
813
813
814 def _get_mime_type(self, path) -> bytes:
814 def _get_mime_type(self, path) -> bytes:
815 try:
815 try:
816 mime_type = svn.fs.node_prop(
816 mime_type = svn.fs.node_prop(
817 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
817 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
818 except svn.core.SubversionException:
818 except svn.core.SubversionException:
819 mime_type = svn.fs.node_prop(
819 mime_type = svn.fs.node_prop(
820 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
820 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
821 return mime_type
821 return mime_type
822
822
823 def _svn_readlines(self, fs_root, node_path):
823 def _svn_readlines(self, fs_root, node_path):
824 if self.binary_content:
824 if self.binary_content:
825 return []
825 return []
826 node_kind = svn.fs.check_path(fs_root, node_path)
826 node_kind = svn.fs.check_path(fs_root, node_path)
827 if node_kind not in (
827 if node_kind not in (
828 svn.core.svn_node_file, svn.core.svn_node_symlink):
828 svn.core.svn_node_file, svn.core.svn_node_symlink):
829 return []
829 return []
830 content = svn.core.Stream(
830 content = svn.core.Stream(
831 svn.fs.file_contents(fs_root, node_path)).read()
831 svn.fs.file_contents(fs_root, node_path)).read()
832
832
833 return content.splitlines(True)
833 return content.splitlines(True)
834
834
835
835
836 class DiffChangeEditor(svn.delta.Editor):
836 class DiffChangeEditor(svn.delta.Editor):
837 """
837 """
838 Records changes between two given revisions
838 Records changes between two given revisions
839 """
839 """
840
840
841 def __init__(self):
841 def __init__(self):
842 self.changes = []
842 self.changes = []
843
843
844 def delete_entry(self, path, revision, parent_baton, pool=None):
844 def delete_entry(self, path, revision, parent_baton, pool=None):
845 self.changes.append((path, None, 'delete'))
845 self.changes.append((path, None, 'delete'))
846
846
847 def add_file(
847 def add_file(
848 self, path, parent_baton, copyfrom_path, copyfrom_revision,
848 self, path, parent_baton, copyfrom_path, copyfrom_revision,
849 file_pool=None):
849 file_pool=None):
850 self.changes.append((path, 'file', 'add'))
850 self.changes.append((path, 'file', 'add'))
851
851
852 def open_file(self, path, parent_baton, base_revision, file_pool=None):
852 def open_file(self, path, parent_baton, base_revision, file_pool=None):
853 self.changes.append((path, 'file', 'change'))
853 self.changes.append((path, 'file', 'change'))
854
854
855
855
856 def authorization_callback_allow_all(root, path, pool):
856 def authorization_callback_allow_all(root, path, pool):
857 return True
857 return True
858
858
859
859
860 class TxnNodeProcessor(object):
860 class TxnNodeProcessor(object):
861 """
861 """
862 Utility to process the change of one node within a transaction root.
862 Utility to process the change of one node within a transaction root.
863
863
864 It encapsulates the knowledge of how to add, update or remove
864 It encapsulates the knowledge of how to add, update or remove
865 a node for a given transaction root. The purpose is to support the method
865 a node for a given transaction root. The purpose is to support the method
866 `SvnRemote.commit`.
866 `SvnRemote.commit`.
867 """
867 """
868
868
869 def __init__(self, node, txn_root):
869 def __init__(self, node, txn_root):
870 assert_bytes(node['path'])
870 assert_bytes(node['path'])
871
871
872 self.node = node
872 self.node = node
873 self.txn_root = txn_root
873 self.txn_root = txn_root
874
874
875 def update(self):
875 def update(self):
876 self._ensure_parent_dirs()
876 self._ensure_parent_dirs()
877 self._add_file_if_node_does_not_exist()
877 self._add_file_if_node_does_not_exist()
878 self._update_file_content()
878 self._update_file_content()
879 self._update_file_properties()
879 self._update_file_properties()
880
880
881 def remove(self):
881 def remove(self):
882 svn.fs.delete(self.txn_root, self.node['path'])
882 svn.fs.delete(self.txn_root, self.node['path'])
883 # TODO: Clean up directory if empty
883 # TODO: Clean up directory if empty
884
884
885 def _ensure_parent_dirs(self):
885 def _ensure_parent_dirs(self):
886 curdir = vcspath.dirname(self.node['path'])
886 curdir = vcspath.dirname(self.node['path'])
887 dirs_to_create = []
887 dirs_to_create = []
888 while not self._svn_path_exists(curdir):
888 while not self._svn_path_exists(curdir):
889 dirs_to_create.append(curdir)
889 dirs_to_create.append(curdir)
890 curdir = vcspath.dirname(curdir)
890 curdir = vcspath.dirname(curdir)
891
891
892 for curdir in reversed(dirs_to_create):
892 for curdir in reversed(dirs_to_create):
893 log.debug('Creating missing directory "%s"', curdir)
893 log.debug('Creating missing directory "%s"', curdir)
894 svn.fs.make_dir(self.txn_root, curdir)
894 svn.fs.make_dir(self.txn_root, curdir)
895
895
896 def _svn_path_exists(self, path):
896 def _svn_path_exists(self, path):
897 path_status = svn.fs.check_path(self.txn_root, path)
897 path_status = svn.fs.check_path(self.txn_root, path)
898 return path_status != svn.core.svn_node_none
898 return path_status != svn.core.svn_node_none
899
899
900 def _add_file_if_node_does_not_exist(self):
900 def _add_file_if_node_does_not_exist(self):
901 kind = svn.fs.check_path(self.txn_root, self.node['path'])
901 kind = svn.fs.check_path(self.txn_root, self.node['path'])
902 if kind == svn.core.svn_node_none:
902 if kind == svn.core.svn_node_none:
903 svn.fs.make_file(self.txn_root, self.node['path'])
903 svn.fs.make_file(self.txn_root, self.node['path'])
904
904
905 def _update_file_content(self):
905 def _update_file_content(self):
906 assert_bytes(self.node['content'])
906 assert_bytes(self.node['content'])
907
907
908 handler, baton = svn.fs.apply_textdelta(
908 handler, baton = svn.fs.apply_textdelta(
909 self.txn_root, self.node['path'], None, None)
909 self.txn_root, self.node['path'], None, None)
910 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
910 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
911
911
912 def _update_file_properties(self):
912 def _update_file_properties(self):
913 properties = self.node.get('properties', {})
913 properties = self.node.get('properties', {})
914 for key, value in properties.items():
914 for key, value in properties.items():
915 svn.fs.change_node_prop(
915 svn.fs.change_node_prop(
916 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
916 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
917
917
918
918
919 def apr_time_t(timestamp):
919 def apr_time_t(timestamp):
920 """
920 """
921 Convert a Python timestamp into APR timestamp type apr_time_t
921 Convert a Python timestamp into APR timestamp type apr_time_t
922 """
922 """
923 return int(timestamp * 1E6)
923 return int(timestamp * 1E6)
924
924
925
925
926 def svn_opt_revision_value_t(num):
926 def svn_opt_revision_value_t(num):
927 """
927 """
928 Put `num` into a `svn_opt_revision_value_t` structure.
928 Put `num` into a `svn_opt_revision_value_t` structure.
929 """
929 """
930 value = svn.core.svn_opt_revision_value_t()
930 value = svn.core.svn_opt_revision_value_t()
931 value.number = num
931 value.number = num
932 revision = svn.core.svn_opt_revision_t()
932 revision = svn.core.svn_opt_revision_t()
933 revision.kind = svn.core.svn_opt_revision_number
933 revision.kind = svn.core.svn_opt_revision_number
934 revision.value = value
934 revision.value = value
935 return revision
935 return revision
General Comments 0
You need to be logged in to leave comments. Login now