##// END OF EJS Templates
exc-tracking: use more rich style tracebacks.
super-admin -
r1144:00db014b default
parent child Browse files
Show More
@@ -1,194 +1,193 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import os
17 import os
18 import sys
18 import sys
19 import tempfile
19 import tempfile
20 import traceback
21 import logging
20 import logging
22 import urllib.parse
21 import urllib.parse
23
22
24 from vcsserver.lib.rc_cache.archive_cache import get_archival_cache_store
23 from vcsserver.lib.rc_cache.archive_cache import get_archival_cache_store
25
24
26 from vcsserver import exceptions
25 from vcsserver import exceptions
27 from vcsserver.exceptions import NoContentException
26 from vcsserver.exceptions import NoContentException
28 from vcsserver.hgcompat import archival
27 from vcsserver.hgcompat import archival
29 from vcsserver.str_utils import safe_bytes
28 from vcsserver.str_utils import safe_bytes
30
29 from vcsserver.lib.exc_tracking import format_exc
31 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
32
31
33
32
34 class RepoFactory(object):
33 class RepoFactory(object):
35 """
34 """
36 Utility to create instances of repository
35 Utility to create instances of repository
37
36
38 It provides internal caching of the `repo` object based on
37 It provides internal caching of the `repo` object based on
39 the :term:`call context`.
38 the :term:`call context`.
40 """
39 """
41 repo_type = None
40 repo_type = None
42
41
43 def __init__(self):
42 def __init__(self):
44 pass
43 pass
45
44
46 def _create_config(self, path, config):
45 def _create_config(self, path, config):
47 config = {}
46 config = {}
48 return config
47 return config
49
48
50 def _create_repo(self, wire, create):
49 def _create_repo(self, wire, create):
51 raise NotImplementedError()
50 raise NotImplementedError()
52
51
53 def repo(self, wire, create=False):
52 def repo(self, wire, create=False):
54 raise NotImplementedError()
53 raise NotImplementedError()
55
54
56
55
57 def obfuscate_qs(query_string):
56 def obfuscate_qs(query_string):
58 if query_string is None:
57 if query_string is None:
59 return None
58 return None
60
59
61 parsed = []
60 parsed = []
62 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
61 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
63 if k in ['auth_token', 'api_key']:
62 if k in ['auth_token', 'api_key']:
64 v = "*****"
63 v = "*****"
65 parsed.append((k, v))
64 parsed.append((k, v))
66
65
67 return '&'.join('{}{}'.format(
66 return '&'.join('{}{}'.format(
68 k, f'={v}' if v else '') for k, v in parsed)
67 k, f'={v}' if v else '') for k, v in parsed)
69
68
70
69
71 def raise_from_original(new_type, org_exc: Exception):
70 def raise_from_original(new_type, org_exc: Exception):
72 """
71 """
73 Raise a new exception type with original args and traceback.
72 Raise a new exception type with original args and traceback.
74 """
73 """
75
74 exc_info = sys.exc_info()
76 exc_type, exc_value, exc_traceback = sys.exc_info()
75 exc_type, exc_value, exc_traceback = exc_info
77 new_exc = new_type(*exc_value.args)
76 new_exc = new_type(*exc_value.args)
78
77
79 # store the original traceback into the new exc
78 # store the original traceback into the new exc
80 new_exc._org_exc_tb = traceback.format_tb(exc_traceback)
79 new_exc._org_exc_tb = format_exc(exc_info)
81
80
82 try:
81 try:
83 raise new_exc.with_traceback(exc_traceback)
82 raise new_exc.with_traceback(exc_traceback)
84 finally:
83 finally:
85 del exc_traceback
84 del exc_traceback
86
85
87
86
88 class ArchiveNode(object):
87 class ArchiveNode(object):
89 def __init__(self, path, mode, is_link, raw_bytes):
88 def __init__(self, path, mode, is_link, raw_bytes):
90 self.path = path
89 self.path = path
91 self.mode = mode
90 self.mode = mode
92 self.is_link = is_link
91 self.is_link = is_link
93 self.raw_bytes = raw_bytes
92 self.raw_bytes = raw_bytes
94
93
95
94
96 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
95 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
97 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
96 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
98 """
97 """
99 Function that would store generate archive and send it to a dedicated backend store
98 Function that would store generate archive and send it to a dedicated backend store
100 In here we use diskcache
99 In here we use diskcache
101
100
102 :param node_walker: a generator returning nodes to add to archive
101 :param node_walker: a generator returning nodes to add to archive
103 :param archive_key: key used to store the path
102 :param archive_key: key used to store the path
104 :param kind: archive kind
103 :param kind: archive kind
105 :param mtime: time of creation
104 :param mtime: time of creation
106 :param archive_at_path: default '/' the path at archive was started.
105 :param archive_at_path: default '/' the path at archive was started.
107 If this is not '/' it means it's a partial archive
106 If this is not '/' it means it's a partial archive
108 :param archive_dir_name: inside dir name when creating an archive
107 :param archive_dir_name: inside dir name when creating an archive
109 :param commit_id: commit sha of revision archive was created at
108 :param commit_id: commit sha of revision archive was created at
110 :param write_metadata:
109 :param write_metadata:
111 :param extra_metadata:
110 :param extra_metadata:
112 :param cache_config:
111 :param cache_config:
113
112
114 walker should be a file walker, for example,
113 walker should be a file walker, for example,
115 def node_walker():
114 def node_walker():
116 for file_info in files:
115 for file_info in files:
117 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
116 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
118 """
117 """
119 extra_metadata = extra_metadata or {}
118 extra_metadata = extra_metadata or {}
120
119
121 d_cache = get_archival_cache_store(config=cache_config)
120 d_cache = get_archival_cache_store(config=cache_config)
122
121
123 if archive_key in d_cache:
122 if archive_key in d_cache:
124 with d_cache as d_cache_reader:
123 with d_cache as d_cache_reader:
125 reader, tag = d_cache_reader.get(archive_key, read=True, tag=True, retry=True)
124 reader, tag = d_cache_reader.get(archive_key, read=True, tag=True, retry=True)
126 return reader.name
125 return reader.name
127
126
128 archive_tmp_path = safe_bytes(tempfile.mkstemp()[1])
127 archive_tmp_path = safe_bytes(tempfile.mkstemp()[1])
129 log.debug('Creating new temp archive in %s', archive_tmp_path)
128 log.debug('Creating new temp archive in %s', archive_tmp_path)
130
129
131 if kind == "tgz":
130 if kind == "tgz":
132 archiver = archival.tarit(archive_tmp_path, mtime, b"gz")
131 archiver = archival.tarit(archive_tmp_path, mtime, b"gz")
133 elif kind == "tbz2":
132 elif kind == "tbz2":
134 archiver = archival.tarit(archive_tmp_path, mtime, b"bz2")
133 archiver = archival.tarit(archive_tmp_path, mtime, b"bz2")
135 elif kind == 'zip':
134 elif kind == 'zip':
136 archiver = archival.zipit(archive_tmp_path, mtime)
135 archiver = archival.zipit(archive_tmp_path, mtime)
137 else:
136 else:
138 raise exceptions.ArchiveException()(
137 raise exceptions.ArchiveException()(
139 f'Remote does not support: "{kind}" archive type.')
138 f'Remote does not support: "{kind}" archive type.')
140
139
141 for f in node_walker(commit_id, archive_at_path):
140 for f in node_walker(commit_id, archive_at_path):
142 f_path = os.path.join(safe_bytes(archive_dir_name), safe_bytes(f.path).lstrip(b'/'))
141 f_path = os.path.join(safe_bytes(archive_dir_name), safe_bytes(f.path).lstrip(b'/'))
143 try:
142 try:
144 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
143 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
145 except NoContentException:
144 except NoContentException:
146 # NOTE(marcink): this is a special case for SVN so we can create "empty"
145 # NOTE(marcink): this is a special case for SVN so we can create "empty"
147 # directories which are not supported by archiver
146 # directories which are not supported by archiver
148 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
147 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
149
148
150 if write_metadata:
149 if write_metadata:
151 metadata = dict([
150 metadata = dict([
152 ('commit_id', commit_id),
151 ('commit_id', commit_id),
153 ('mtime', mtime),
152 ('mtime', mtime),
154 ])
153 ])
155 metadata.update(extra_metadata)
154 metadata.update(extra_metadata)
156
155
157 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
156 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
158 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
157 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
159 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
158 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
160
159
161 archiver.done()
160 archiver.done()
162
161
163 # ensure set & get are atomic
162 # ensure set & get are atomic
164 with d_cache.transact():
163 with d_cache.transact():
165
164
166 with open(archive_tmp_path, 'rb') as archive_file:
165 with open(archive_tmp_path, 'rb') as archive_file:
167 add_result = d_cache.set(archive_key, archive_file, read=True, tag='db-name', retry=True)
166 add_result = d_cache.set(archive_key, archive_file, read=True, tag='db-name', retry=True)
168 if not add_result:
167 if not add_result:
169 log.error('Failed to store cache for key=%s', archive_key)
168 log.error('Failed to store cache for key=%s', archive_key)
170
169
171 os.remove(archive_tmp_path)
170 os.remove(archive_tmp_path)
172
171
173 reader, tag = d_cache.get(archive_key, read=True, tag=True, retry=True)
172 reader, tag = d_cache.get(archive_key, read=True, tag=True, retry=True)
174 if not reader:
173 if not reader:
175 raise AssertionError(f'empty reader on key={archive_key} added={add_result}')
174 raise AssertionError(f'empty reader on key={archive_key} added={add_result}')
176
175
177 return reader.name
176 return reader.name
178
177
179
178
180 class BinaryEnvelope(object):
179 class BinaryEnvelope(object):
181 def __init__(self, val):
180 def __init__(self, val):
182 self.val = val
181 self.val = val
183
182
184
183
185 class BytesEnvelope(bytes):
184 class BytesEnvelope(bytes):
186 def __new__(cls, content):
185 def __new__(cls, content):
187 if isinstance(content, bytes):
186 if isinstance(content, bytes):
188 return super().__new__(cls, content)
187 return super().__new__(cls, content)
189 else:
188 else:
190 raise TypeError('BytesEnvelope content= param must be bytes. Use BinaryEnvelope to wrap other types')
189 raise TypeError('BytesEnvelope content= param must be bytes. Use BinaryEnvelope to wrap other types')
191
190
192
191
193 class BinaryBytesEnvelope(BytesEnvelope):
192 class BinaryBytesEnvelope(BytesEnvelope):
194 pass
193 pass
@@ -1,777 +1,774 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import time
24 import time
25 import wsgiref.util
25 import wsgiref.util
26 import traceback
27 import tempfile
26 import tempfile
28 import psutil
27 import psutil
29
28
30 from itertools import chain
29 from itertools import chain
31
30
32 import msgpack
31 import msgpack
33 import configparser
32 import configparser
34
33
35 from pyramid.config import Configurator
34 from pyramid.config import Configurator
36 from pyramid.wsgi import wsgiapp
35 from pyramid.wsgi import wsgiapp
37 from pyramid.response import Response
36 from pyramid.response import Response
38
37
39 from vcsserver.base import BytesEnvelope, BinaryEnvelope
38 from vcsserver.base import BytesEnvelope, BinaryEnvelope
40 from vcsserver.lib.rc_json import json
39 from vcsserver.lib.rc_json import json
41 from vcsserver.config.settings_maker import SettingsMaker
40 from vcsserver.config.settings_maker import SettingsMaker
42 from vcsserver.str_utils import safe_int
41 from vcsserver.str_utils import safe_int
43 from vcsserver.lib.statsd_client import StatsdClient
42 from vcsserver.lib.statsd_client import StatsdClient
44 from vcsserver.tweens.request_wrapper import get_call_context, get_headers_call_context
43 from vcsserver.tweens.request_wrapper import get_call_context, get_headers_call_context
45
44
46 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
47
46
48 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
47 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
49 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
48 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
50
49
51 try:
50 try:
52 locale.setlocale(locale.LC_ALL, '')
51 locale.setlocale(locale.LC_ALL, '')
53 except locale.Error as e:
52 except locale.Error as e:
54 log.error(
53 log.error(
55 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
54 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
56 os.environ['LC_ALL'] = 'C'
55 os.environ['LC_ALL'] = 'C'
57
56
58
57
59 import vcsserver
58 import vcsserver
60 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
59 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
61 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
60 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
62 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
61 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
63 from vcsserver.echo_stub.echo_app import EchoApp
62 from vcsserver.echo_stub.echo_app import EchoApp
64 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
63 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
65 from vcsserver.lib.exc_tracking import store_exception
64 from vcsserver.lib.exc_tracking import store_exception, format_exc
66 from vcsserver.server import VcsServer
65 from vcsserver.server import VcsServer
67
66
68 strict_vcs = True
67 strict_vcs = True
69
68
70 git_import_err = None
69 git_import_err = None
71 try:
70 try:
72 from vcsserver.remote.git import GitFactory, GitRemote
71 from vcsserver.remote.git import GitFactory, GitRemote
73 except ImportError as e:
72 except ImportError as e:
74 GitFactory = None
73 GitFactory = None
75 GitRemote = None
74 GitRemote = None
76 git_import_err = e
75 git_import_err = e
77 if strict_vcs:
76 if strict_vcs:
78 raise
77 raise
79
78
80
79
81 hg_import_err = None
80 hg_import_err = None
82 try:
81 try:
83 from vcsserver.remote.hg import MercurialFactory, HgRemote
82 from vcsserver.remote.hg import MercurialFactory, HgRemote
84 except ImportError as e:
83 except ImportError as e:
85 MercurialFactory = None
84 MercurialFactory = None
86 HgRemote = None
85 HgRemote = None
87 hg_import_err = e
86 hg_import_err = e
88 if strict_vcs:
87 if strict_vcs:
89 raise
88 raise
90
89
91
90
92 svn_import_err = None
91 svn_import_err = None
93 try:
92 try:
94 from vcsserver.remote.svn import SubversionFactory, SvnRemote
93 from vcsserver.remote.svn import SubversionFactory, SvnRemote
95 except ImportError as e:
94 except ImportError as e:
96 SubversionFactory = None
95 SubversionFactory = None
97 SvnRemote = None
96 SvnRemote = None
98 svn_import_err = e
97 svn_import_err = e
99 if strict_vcs:
98 if strict_vcs:
100 raise
99 raise
101
100
102
101
103 def _is_request_chunked(environ):
102 def _is_request_chunked(environ):
104 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
103 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
105 return stream
104 return stream
106
105
107
106
108 def log_max_fd():
107 def log_max_fd():
109 try:
108 try:
110 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
109 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
111 log.info('Max file descriptors value: %s', maxfd)
110 log.info('Max file descriptors value: %s', maxfd)
112 except Exception:
111 except Exception:
113 pass
112 pass
114
113
115
114
116 class VCS(object):
115 class VCS(object):
117 def __init__(self, locale_conf=None, cache_config=None):
116 def __init__(self, locale_conf=None, cache_config=None):
118 self.locale = locale_conf
117 self.locale = locale_conf
119 self.cache_config = cache_config
118 self.cache_config = cache_config
120 self._configure_locale()
119 self._configure_locale()
121
120
122 log_max_fd()
121 log_max_fd()
123
122
124 if GitFactory and GitRemote:
123 if GitFactory and GitRemote:
125 git_factory = GitFactory()
124 git_factory = GitFactory()
126 self._git_remote = GitRemote(git_factory)
125 self._git_remote = GitRemote(git_factory)
127 else:
126 else:
128 log.error("Git client import failed: %s", git_import_err)
127 log.error("Git client import failed: %s", git_import_err)
129
128
130 if MercurialFactory and HgRemote:
129 if MercurialFactory and HgRemote:
131 hg_factory = MercurialFactory()
130 hg_factory = MercurialFactory()
132 self._hg_remote = HgRemote(hg_factory)
131 self._hg_remote = HgRemote(hg_factory)
133 else:
132 else:
134 log.error("Mercurial client import failed: %s", hg_import_err)
133 log.error("Mercurial client import failed: %s", hg_import_err)
135
134
136 if SubversionFactory and SvnRemote:
135 if SubversionFactory and SvnRemote:
137 svn_factory = SubversionFactory()
136 svn_factory = SubversionFactory()
138
137
139 # hg factory is used for svn url validation
138 # hg factory is used for svn url validation
140 hg_factory = MercurialFactory()
139 hg_factory = MercurialFactory()
141 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
140 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
142 else:
141 else:
143 log.error("Subversion client import failed: %s", svn_import_err)
142 log.error("Subversion client import failed: %s", svn_import_err)
144
143
145 self._vcsserver = VcsServer()
144 self._vcsserver = VcsServer()
146
145
147 def _configure_locale(self):
146 def _configure_locale(self):
148 if self.locale:
147 if self.locale:
149 log.info('Settings locale: `LC_ALL` to %s', self.locale)
148 log.info('Settings locale: `LC_ALL` to %s', self.locale)
150 else:
149 else:
151 log.info('Configuring locale subsystem based on environment variables')
150 log.info('Configuring locale subsystem based on environment variables')
152 try:
151 try:
153 # If self.locale is the empty string, then the locale
152 # If self.locale is the empty string, then the locale
154 # module will use the environment variables. See the
153 # module will use the environment variables. See the
155 # documentation of the package `locale`.
154 # documentation of the package `locale`.
156 locale.setlocale(locale.LC_ALL, self.locale)
155 locale.setlocale(locale.LC_ALL, self.locale)
157
156
158 language_code, encoding = locale.getlocale()
157 language_code, encoding = locale.getlocale()
159 log.info(
158 log.info(
160 'Locale set to language code "%s" with encoding "%s".',
159 'Locale set to language code "%s" with encoding "%s".',
161 language_code, encoding)
160 language_code, encoding)
162 except locale.Error:
161 except locale.Error:
163 log.exception('Cannot set locale, not configuring the locale system')
162 log.exception('Cannot set locale, not configuring the locale system')
164
163
165
164
166 class WsgiProxy(object):
165 class WsgiProxy(object):
167 def __init__(self, wsgi):
166 def __init__(self, wsgi):
168 self.wsgi = wsgi
167 self.wsgi = wsgi
169
168
170 def __call__(self, environ, start_response):
169 def __call__(self, environ, start_response):
171 input_data = environ['wsgi.input'].read()
170 input_data = environ['wsgi.input'].read()
172 input_data = msgpack.unpackb(input_data)
171 input_data = msgpack.unpackb(input_data)
173
172
174 error = None
173 error = None
175 try:
174 try:
176 data, status, headers = self.wsgi.handle(
175 data, status, headers = self.wsgi.handle(
177 input_data['environment'], input_data['input_data'],
176 input_data['environment'], input_data['input_data'],
178 *input_data['args'], **input_data['kwargs'])
177 *input_data['args'], **input_data['kwargs'])
179 except Exception as e:
178 except Exception as e:
180 data, status, headers = [], None, None
179 data, status, headers = [], None, None
181 error = {
180 error = {
182 'message': str(e),
181 'message': str(e),
183 '_vcs_kind': getattr(e, '_vcs_kind', None)
182 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 }
183 }
185
184
186 start_response(200, {})
185 start_response(200, {})
187 return self._iterator(error, status, headers, data)
186 return self._iterator(error, status, headers, data)
188
187
189 def _iterator(self, error, status, headers, data):
188 def _iterator(self, error, status, headers, data):
190 initial_data = [
189 initial_data = [
191 error,
190 error,
192 status,
191 status,
193 headers,
192 headers,
194 ]
193 ]
195
194
196 for d in chain(initial_data, data):
195 for d in chain(initial_data, data):
197 yield msgpack.packb(d)
196 yield msgpack.packb(d)
198
197
199
198
200 def not_found(request):
199 def not_found(request):
201 return {'status': '404 NOT FOUND'}
200 return {'status': '404 NOT FOUND'}
202
201
203
202
204 class VCSViewPredicate(object):
203 class VCSViewPredicate(object):
205 def __init__(self, val, config):
204 def __init__(self, val, config):
206 self.remotes = val
205 self.remotes = val
207
206
208 def text(self):
207 def text(self):
209 return f'vcs view method = {list(self.remotes.keys())}'
208 return f'vcs view method = {list(self.remotes.keys())}'
210
209
211 phash = text
210 phash = text
212
211
213 def __call__(self, context, request):
212 def __call__(self, context, request):
214 """
213 """
215 View predicate that returns true if given backend is supported by
214 View predicate that returns true if given backend is supported by
216 defined remotes.
215 defined remotes.
217 """
216 """
218 backend = request.matchdict.get('backend')
217 backend = request.matchdict.get('backend')
219 return backend in self.remotes
218 return backend in self.remotes
220
219
221
220
222 class HTTPApplication(object):
221 class HTTPApplication(object):
223 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
222 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
224
223
225 remote_wsgi = remote_wsgi
224 remote_wsgi = remote_wsgi
226 _use_echo_app = False
225 _use_echo_app = False
227
226
228 def __init__(self, settings=None, global_config=None):
227 def __init__(self, settings=None, global_config=None):
229
228
230 self.config = Configurator(settings=settings)
229 self.config = Configurator(settings=settings)
231 # Init our statsd at very start
230 # Init our statsd at very start
232 self.config.registry.statsd = StatsdClient.statsd
231 self.config.registry.statsd = StatsdClient.statsd
233 self.config.registry.vcs_call_context = {}
232 self.config.registry.vcs_call_context = {}
234
233
235 self.global_config = global_config
234 self.global_config = global_config
236 self.config.include('vcsserver.lib.rc_cache')
235 self.config.include('vcsserver.lib.rc_cache')
237 self.config.include('vcsserver.lib.rc_cache.archive_cache')
236 self.config.include('vcsserver.lib.rc_cache.archive_cache')
238
237
239 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
240 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
241 self._remotes = {
240 self._remotes = {
242 'hg': vcs._hg_remote,
241 'hg': vcs._hg_remote,
243 'git': vcs._git_remote,
242 'git': vcs._git_remote,
244 'svn': vcs._svn_remote,
243 'svn': vcs._svn_remote,
245 'server': vcs._vcsserver,
244 'server': vcs._vcsserver,
246 }
245 }
247 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
248 self._use_echo_app = True
247 self._use_echo_app = True
249 log.warning("Using EchoApp for VCS operations.")
248 log.warning("Using EchoApp for VCS operations.")
250 self.remote_wsgi = remote_wsgi_stub
249 self.remote_wsgi = remote_wsgi_stub
251
250
252 self._configure_settings(global_config, settings)
251 self._configure_settings(global_config, settings)
253
252
254 self._configure()
253 self._configure()
255
254
256 def _configure_settings(self, global_config, app_settings):
255 def _configure_settings(self, global_config, app_settings):
257 """
256 """
258 Configure the settings module.
257 Configure the settings module.
259 """
258 """
260 settings_merged = global_config.copy()
259 settings_merged = global_config.copy()
261 settings_merged.update(app_settings)
260 settings_merged.update(app_settings)
262
261
263 git_path = app_settings.get('git_path', None)
262 git_path = app_settings.get('git_path', None)
264 if git_path:
263 if git_path:
265 settings.GIT_EXECUTABLE = git_path
264 settings.GIT_EXECUTABLE = git_path
266 binary_dir = app_settings.get('core.binary_dir', None)
265 binary_dir = app_settings.get('core.binary_dir', None)
267 if binary_dir:
266 if binary_dir:
268 settings.BINARY_DIR = binary_dir
267 settings.BINARY_DIR = binary_dir
269
268
270 # Store the settings to make them available to other modules.
269 # Store the settings to make them available to other modules.
271 vcsserver.PYRAMID_SETTINGS = settings_merged
270 vcsserver.PYRAMID_SETTINGS = settings_merged
272 vcsserver.CONFIG = settings_merged
271 vcsserver.CONFIG = settings_merged
273
272
274 def _configure(self):
273 def _configure(self):
275 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
274 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
276
275
277 self.config.add_route('service', '/_service')
276 self.config.add_route('service', '/_service')
278 self.config.add_route('status', '/status')
277 self.config.add_route('status', '/status')
279 self.config.add_route('hg_proxy', '/proxy/hg')
278 self.config.add_route('hg_proxy', '/proxy/hg')
280 self.config.add_route('git_proxy', '/proxy/git')
279 self.config.add_route('git_proxy', '/proxy/git')
281
280
282 # rpc methods
281 # rpc methods
283 self.config.add_route('vcs', '/{backend}')
282 self.config.add_route('vcs', '/{backend}')
284
283
285 # streaming rpc remote methods
284 # streaming rpc remote methods
286 self.config.add_route('vcs_stream', '/{backend}/stream')
285 self.config.add_route('vcs_stream', '/{backend}/stream')
287
286
288 # vcs operations clone/push as streaming
287 # vcs operations clone/push as streaming
289 self.config.add_route('stream_git', '/stream/git/*repo_name')
288 self.config.add_route('stream_git', '/stream/git/*repo_name')
290 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
289 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
291
290
292 self.config.add_view(self.status_view, route_name='status', renderer='json')
291 self.config.add_view(self.status_view, route_name='status', renderer='json')
293 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
292 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
294
293
295 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
294 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
296 self.config.add_view(self.git_proxy(), route_name='git_proxy')
295 self.config.add_view(self.git_proxy(), route_name='git_proxy')
297 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
296 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
298 vcs_view=self._remotes)
297 vcs_view=self._remotes)
299 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
298 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
300 vcs_view=self._remotes)
299 vcs_view=self._remotes)
301
300
302 self.config.add_view(self.hg_stream(), route_name='stream_hg')
301 self.config.add_view(self.hg_stream(), route_name='stream_hg')
303 self.config.add_view(self.git_stream(), route_name='stream_git')
302 self.config.add_view(self.git_stream(), route_name='stream_git')
304
303
305 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
304 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
306
305
307 self.config.add_notfound_view(not_found, renderer='json')
306 self.config.add_notfound_view(not_found, renderer='json')
308
307
309 self.config.add_view(self.handle_vcs_exception, context=Exception)
308 self.config.add_view(self.handle_vcs_exception, context=Exception)
310
309
311 self.config.add_tween(
310 self.config.add_tween(
312 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
311 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
313 )
312 )
314 self.config.add_request_method(
313 self.config.add_request_method(
315 'vcsserver.lib.request_counter.get_request_counter',
314 'vcsserver.lib.request_counter.get_request_counter',
316 'request_count')
315 'request_count')
317
316
318 def wsgi_app(self):
317 def wsgi_app(self):
319 return self.config.make_wsgi_app()
318 return self.config.make_wsgi_app()
320
319
321 def _vcs_view_params(self, request):
320 def _vcs_view_params(self, request):
322 remote = self._remotes[request.matchdict['backend']]
321 remote = self._remotes[request.matchdict['backend']]
323 payload = msgpack.unpackb(request.body, use_list=True)
322 payload = msgpack.unpackb(request.body, use_list=True)
324
323
325 method = payload.get('method')
324 method = payload.get('method')
326 params = payload['params']
325 params = payload['params']
327 wire = params.get('wire')
326 wire = params.get('wire')
328 args = params.get('args')
327 args = params.get('args')
329 kwargs = params.get('kwargs')
328 kwargs = params.get('kwargs')
330 context_uid = None
329 context_uid = None
331
330
332 request.registry.vcs_call_context = {
331 request.registry.vcs_call_context = {
333 'method': method,
332 'method': method,
334 'repo_name': payload.get('_repo_name'),
333 'repo_name': payload.get('_repo_name'),
335 }
334 }
336
335
337 if wire:
336 if wire:
338 try:
337 try:
339 wire['context'] = context_uid = uuid.UUID(wire['context'])
338 wire['context'] = context_uid = uuid.UUID(wire['context'])
340 except KeyError:
339 except KeyError:
341 pass
340 pass
342 args.insert(0, wire)
341 args.insert(0, wire)
343 repo_state_uid = wire.get('repo_state_uid') if wire else None
342 repo_state_uid = wire.get('repo_state_uid') if wire else None
344
343
345 # NOTE(marcink): trading complexity for slight performance
344 # NOTE(marcink): trading complexity for slight performance
346 if log.isEnabledFor(logging.DEBUG):
345 if log.isEnabledFor(logging.DEBUG):
347 # also we SKIP printing out any of those methods args since they maybe excessive
346 # also we SKIP printing out any of those methods args since they maybe excessive
348 just_args_methods = {
347 just_args_methods = {
349 'commitctx': ('content', 'removed', 'updated'),
348 'commitctx': ('content', 'removed', 'updated'),
350 'commit': ('content', 'removed', 'updated')
349 'commit': ('content', 'removed', 'updated')
351 }
350 }
352 if method in just_args_methods:
351 if method in just_args_methods:
353 skip_args = just_args_methods[method]
352 skip_args = just_args_methods[method]
354 call_args = ''
353 call_args = ''
355 call_kwargs = {}
354 call_kwargs = {}
356 for k in kwargs:
355 for k in kwargs:
357 if k in skip_args:
356 if k in skip_args:
358 # replace our skip key with dummy
357 # replace our skip key with dummy
359 call_kwargs[k] = f'RemovedParam({k})'
358 call_kwargs[k] = f'RemovedParam({k})'
360 else:
359 else:
361 call_kwargs[k] = kwargs[k]
360 call_kwargs[k] = kwargs[k]
362 else:
361 else:
363 call_args = args[1:]
362 call_args = args[1:]
364 call_kwargs = kwargs
363 call_kwargs = kwargs
365
364
366 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
365 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
367 method, call_args, call_kwargs, context_uid, repo_state_uid)
366 method, call_args, call_kwargs, context_uid, repo_state_uid)
368
367
369 statsd = request.registry.statsd
368 statsd = request.registry.statsd
370 if statsd:
369 if statsd:
371 statsd.incr(
370 statsd.incr(
372 'vcsserver_method_total', tags=[
371 'vcsserver_method_total', tags=[
373 f"method:{method}",
372 f"method:{method}",
374 ])
373 ])
375 return payload, remote, method, args, kwargs
374 return payload, remote, method, args, kwargs
376
375
377 def vcs_view(self, request):
376 def vcs_view(self, request):
378
377
379 payload, remote, method, args, kwargs = self._vcs_view_params(request)
378 payload, remote, method, args, kwargs = self._vcs_view_params(request)
380 payload_id = payload.get('id')
379 payload_id = payload.get('id')
381
380
382 try:
381 try:
383 resp = getattr(remote, method)(*args, **kwargs)
382 resp = getattr(remote, method)(*args, **kwargs)
384 except Exception as e:
383 except Exception as e:
385 exc_info = list(sys.exc_info())
384 exc_info = list(sys.exc_info())
386 exc_type, exc_value, exc_traceback = exc_info
385 exc_type, exc_value, exc_traceback = exc_info
387
386
388 org_exc = getattr(e, '_org_exc', None)
387 org_exc = getattr(e, '_org_exc', None)
389 org_exc_name = None
388 org_exc_name = None
390 org_exc_tb = ''
389 org_exc_tb = ''
391 if org_exc:
390 if org_exc:
392 org_exc_name = org_exc.__class__.__name__
391 org_exc_name = org_exc.__class__.__name__
393 org_exc_tb = getattr(e, '_org_exc_tb', '')
392 org_exc_tb = getattr(e, '_org_exc_tb', '')
394 # replace our "faked" exception with our org
393 # replace our "faked" exception with our org
395 exc_info[0] = org_exc.__class__
394 exc_info[0] = org_exc.__class__
396 exc_info[1] = org_exc
395 exc_info[1] = org_exc
397
396
398 should_store_exc = True
397 should_store_exc = True
399 if org_exc:
398 if org_exc:
400 def get_exc_fqn(_exc_obj):
399 def get_exc_fqn(_exc_obj):
401 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
400 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
402 return module_name + '.' + org_exc_name
401 return module_name + '.' + org_exc_name
403
402
404 exc_fqn = get_exc_fqn(org_exc)
403 exc_fqn = get_exc_fqn(org_exc)
405
404
406 if exc_fqn in ['mercurial.error.RepoLookupError',
405 if exc_fqn in ['mercurial.error.RepoLookupError',
407 'vcsserver.exceptions.RefNotFoundException']:
406 'vcsserver.exceptions.RefNotFoundException']:
408 should_store_exc = False
407 should_store_exc = False
409
408
410 if should_store_exc:
409 if should_store_exc:
411 store_exception(id(exc_info), exc_info, request_path=request.path)
410 store_exception(id(exc_info), exc_info, request_path=request.path)
412
411
413 tb_info = ''.join(
412 tb_info = format_exc(exc_info)
414 traceback.format_exception(exc_type, exc_value, exc_traceback))
415
413
416 type_ = e.__class__.__name__
414 type_ = e.__class__.__name__
417 if type_ not in self.ALLOWED_EXCEPTIONS:
415 if type_ not in self.ALLOWED_EXCEPTIONS:
418 type_ = None
416 type_ = None
419
417
420 resp = {
418 resp = {
421 'id': payload_id,
419 'id': payload_id,
422 'error': {
420 'error': {
423 'message': str(e),
421 'message': str(e),
424 'traceback': tb_info,
422 'traceback': tb_info,
425 'org_exc': org_exc_name,
423 'org_exc': org_exc_name,
426 'org_exc_tb': org_exc_tb,
424 'org_exc_tb': org_exc_tb,
427 'type': type_
425 'type': type_
428 }
426 }
429 }
427 }
430
428
431 try:
429 try:
432 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
430 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
433 except AttributeError:
431 except AttributeError:
434 pass
432 pass
435 else:
433 else:
436 resp = {
434 resp = {
437 'id': payload_id,
435 'id': payload_id,
438 'result': resp
436 'result': resp
439 }
437 }
440 log.debug('Serving data for method %s', method)
438 log.debug('Serving data for method %s', method)
441 return resp
439 return resp
442
440
443 def vcs_stream_view(self, request):
441 def vcs_stream_view(self, request):
444 payload, remote, method, args, kwargs = self._vcs_view_params(request)
442 payload, remote, method, args, kwargs = self._vcs_view_params(request)
445 # this method has a stream: marker we remove it here
443 # this method has a stream: marker we remove it here
446 method = method.split('stream:')[-1]
444 method = method.split('stream:')[-1]
447 chunk_size = safe_int(payload.get('chunk_size')) or 4096
445 chunk_size = safe_int(payload.get('chunk_size')) or 4096
448
446
449 try:
447 try:
450 resp = getattr(remote, method)(*args, **kwargs)
448 resp = getattr(remote, method)(*args, **kwargs)
451 except Exception as e:
449 except Exception as e:
452 raise
450 raise
453
451
454 def get_chunked_data(method_resp):
452 def get_chunked_data(method_resp):
455 stream = io.BytesIO(method_resp)
453 stream = io.BytesIO(method_resp)
456 while 1:
454 while 1:
457 chunk = stream.read(chunk_size)
455 chunk = stream.read(chunk_size)
458 if not chunk:
456 if not chunk:
459 break
457 break
460 yield chunk
458 yield chunk
461
459
462 response = Response(app_iter=get_chunked_data(resp))
460 response = Response(app_iter=get_chunked_data(resp))
463 response.content_type = 'application/octet-stream'
461 response.content_type = 'application/octet-stream'
464
462
465 return response
463 return response
466
464
467 def status_view(self, request):
465 def status_view(self, request):
468 import vcsserver
466 import vcsserver
469 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
467 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
470 'pid': os.getpid()}
468 'pid': os.getpid()}
471
469
472 def service_view(self, request):
470 def service_view(self, request):
473 import vcsserver
471 import vcsserver
474
472
475 payload = msgpack.unpackb(request.body, use_list=True)
473 payload = msgpack.unpackb(request.body, use_list=True)
476 server_config, app_config = {}, {}
474 server_config, app_config = {}, {}
477
475
478 try:
476 try:
479 path = self.global_config['__file__']
477 path = self.global_config['__file__']
480 config = configparser.RawConfigParser()
478 config = configparser.RawConfigParser()
481
479
482 config.read(path)
480 config.read(path)
483
481
484 if config.has_section('server:main'):
482 if config.has_section('server:main'):
485 server_config = dict(config.items('server:main'))
483 server_config = dict(config.items('server:main'))
486 if config.has_section('app:main'):
484 if config.has_section('app:main'):
487 app_config = dict(config.items('app:main'))
485 app_config = dict(config.items('app:main'))
488
486
489 except Exception:
487 except Exception:
490 log.exception('Failed to read .ini file for display')
488 log.exception('Failed to read .ini file for display')
491
489
492 environ = list(os.environ.items())
490 environ = list(os.environ.items())
493
491
494 resp = {
492 resp = {
495 'id': payload.get('id'),
493 'id': payload.get('id'),
496 'result': dict(
494 'result': dict(
497 version=vcsserver.__version__,
495 version=vcsserver.__version__,
498 config=server_config,
496 config=server_config,
499 app_config=app_config,
497 app_config=app_config,
500 environ=environ,
498 environ=environ,
501 payload=payload,
499 payload=payload,
502 )
500 )
503 }
501 }
504 return resp
502 return resp
505
503
506 def _msgpack_renderer_factory(self, info):
504 def _msgpack_renderer_factory(self, info):
507
505
508 def _render(value, system):
506 def _render(value, system):
509 bin_type = False
507 bin_type = False
510 res = value.get('result')
508 res = value.get('result')
511 if isinstance(res, BytesEnvelope):
509 if isinstance(res, BytesEnvelope):
512 log.debug('Result is wrapped in BytesEnvelope type')
510 log.debug('Result is wrapped in BytesEnvelope type')
513 bin_type = True
511 bin_type = True
514 elif isinstance(res, BinaryEnvelope):
512 elif isinstance(res, BinaryEnvelope):
515 log.debug('Result is wrapped in BinaryEnvelope type')
513 log.debug('Result is wrapped in BinaryEnvelope type')
516 value['result'] = res.val
514 value['result'] = res.val
517 bin_type = True
515 bin_type = True
518
516
519 request = system.get('request')
517 request = system.get('request')
520 if request is not None:
518 if request is not None:
521 response = request.response
519 response = request.response
522 ct = response.content_type
520 ct = response.content_type
523 if ct == response.default_content_type:
521 if ct == response.default_content_type:
524 response.content_type = 'application/x-msgpack'
522 response.content_type = 'application/x-msgpack'
525 if bin_type:
523 if bin_type:
526 response.content_type = 'application/x-msgpack-bin'
524 response.content_type = 'application/x-msgpack-bin'
527
525
528 return msgpack.packb(value, use_bin_type=bin_type)
526 return msgpack.packb(value, use_bin_type=bin_type)
529 return _render
527 return _render
530
528
531 def set_env_from_config(self, environ, config):
529 def set_env_from_config(self, environ, config):
532 dict_conf = {}
530 dict_conf = {}
533 try:
531 try:
534 for elem in config:
532 for elem in config:
535 if elem[0] == 'rhodecode':
533 if elem[0] == 'rhodecode':
536 dict_conf = json.loads(elem[2])
534 dict_conf = json.loads(elem[2])
537 break
535 break
538 except Exception:
536 except Exception:
539 log.exception('Failed to fetch SCM CONFIG')
537 log.exception('Failed to fetch SCM CONFIG')
540 return
538 return
541
539
542 username = dict_conf.get('username')
540 username = dict_conf.get('username')
543 if username:
541 if username:
544 environ['REMOTE_USER'] = username
542 environ['REMOTE_USER'] = username
545 # mercurial specific, some extension api rely on this
543 # mercurial specific, some extension api rely on this
546 environ['HGUSER'] = username
544 environ['HGUSER'] = username
547
545
548 ip = dict_conf.get('ip')
546 ip = dict_conf.get('ip')
549 if ip:
547 if ip:
550 environ['REMOTE_HOST'] = ip
548 environ['REMOTE_HOST'] = ip
551
549
552 if _is_request_chunked(environ):
550 if _is_request_chunked(environ):
553 # set the compatibility flag for webob
551 # set the compatibility flag for webob
554 environ['wsgi.input_terminated'] = True
552 environ['wsgi.input_terminated'] = True
555
553
556 def hg_proxy(self):
554 def hg_proxy(self):
557 @wsgiapp
555 @wsgiapp
558 def _hg_proxy(environ, start_response):
556 def _hg_proxy(environ, start_response):
559 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
557 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
560 return app(environ, start_response)
558 return app(environ, start_response)
561 return _hg_proxy
559 return _hg_proxy
562
560
563 def git_proxy(self):
561 def git_proxy(self):
564 @wsgiapp
562 @wsgiapp
565 def _git_proxy(environ, start_response):
563 def _git_proxy(environ, start_response):
566 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
564 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
567 return app(environ, start_response)
565 return app(environ, start_response)
568 return _git_proxy
566 return _git_proxy
569
567
570 def hg_stream(self):
568 def hg_stream(self):
571 if self._use_echo_app:
569 if self._use_echo_app:
572 @wsgiapp
570 @wsgiapp
573 def _hg_stream(environ, start_response):
571 def _hg_stream(environ, start_response):
574 app = EchoApp('fake_path', 'fake_name', None)
572 app = EchoApp('fake_path', 'fake_name', None)
575 return app(environ, start_response)
573 return app(environ, start_response)
576 return _hg_stream
574 return _hg_stream
577 else:
575 else:
578 @wsgiapp
576 @wsgiapp
579 def _hg_stream(environ, start_response):
577 def _hg_stream(environ, start_response):
580 log.debug('http-app: handling hg stream')
578 log.debug('http-app: handling hg stream')
581 call_context = get_headers_call_context(environ)
579 call_context = get_headers_call_context(environ)
582
580
583 repo_path = call_context['repo_path']
581 repo_path = call_context['repo_path']
584 repo_name = call_context['repo_name']
582 repo_name = call_context['repo_name']
585 config = call_context['repo_config']
583 config = call_context['repo_config']
586
584
587 app = scm_app.create_hg_wsgi_app(
585 app = scm_app.create_hg_wsgi_app(
588 repo_path, repo_name, config)
586 repo_path, repo_name, config)
589
587
590 # Consistent path information for hgweb
588 # Consistent path information for hgweb
591 environ['PATH_INFO'] = call_context['path_info']
589 environ['PATH_INFO'] = call_context['path_info']
592 environ['REPO_NAME'] = repo_name
590 environ['REPO_NAME'] = repo_name
593 self.set_env_from_config(environ, config)
591 self.set_env_from_config(environ, config)
594
592
595 log.debug('http-app: starting app handler '
593 log.debug('http-app: starting app handler '
596 'with %s and process request', app)
594 'with %s and process request', app)
597 return app(environ, ResponseFilter(start_response))
595 return app(environ, ResponseFilter(start_response))
598 return _hg_stream
596 return _hg_stream
599
597
600 def git_stream(self):
598 def git_stream(self):
601 if self._use_echo_app:
599 if self._use_echo_app:
602 @wsgiapp
600 @wsgiapp
603 def _git_stream(environ, start_response):
601 def _git_stream(environ, start_response):
604 app = EchoApp('fake_path', 'fake_name', None)
602 app = EchoApp('fake_path', 'fake_name', None)
605 return app(environ, start_response)
603 return app(environ, start_response)
606 return _git_stream
604 return _git_stream
607 else:
605 else:
608 @wsgiapp
606 @wsgiapp
609 def _git_stream(environ, start_response):
607 def _git_stream(environ, start_response):
610 log.debug('http-app: handling git stream')
608 log.debug('http-app: handling git stream')
611
609
612 call_context = get_headers_call_context(environ)
610 call_context = get_headers_call_context(environ)
613
611
614 repo_path = call_context['repo_path']
612 repo_path = call_context['repo_path']
615 repo_name = call_context['repo_name']
613 repo_name = call_context['repo_name']
616 config = call_context['repo_config']
614 config = call_context['repo_config']
617
615
618 environ['PATH_INFO'] = call_context['path_info']
616 environ['PATH_INFO'] = call_context['path_info']
619 self.set_env_from_config(environ, config)
617 self.set_env_from_config(environ, config)
620
618
621 content_type = environ.get('CONTENT_TYPE', '')
619 content_type = environ.get('CONTENT_TYPE', '')
622
620
623 path = environ['PATH_INFO']
621 path = environ['PATH_INFO']
624 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
622 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
625 log.debug(
623 log.debug(
626 'LFS: Detecting if request `%s` is LFS server path based '
624 'LFS: Detecting if request `%s` is LFS server path based '
627 'on content type:`%s`, is_lfs:%s',
625 'on content type:`%s`, is_lfs:%s',
628 path, content_type, is_lfs_request)
626 path, content_type, is_lfs_request)
629
627
630 if not is_lfs_request:
628 if not is_lfs_request:
631 # fallback detection by path
629 # fallback detection by path
632 if GIT_LFS_PROTO_PAT.match(path):
630 if GIT_LFS_PROTO_PAT.match(path):
633 is_lfs_request = True
631 is_lfs_request = True
634 log.debug(
632 log.debug(
635 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
633 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
636 path, is_lfs_request)
634 path, is_lfs_request)
637
635
638 if is_lfs_request:
636 if is_lfs_request:
639 app = scm_app.create_git_lfs_wsgi_app(
637 app = scm_app.create_git_lfs_wsgi_app(
640 repo_path, repo_name, config)
638 repo_path, repo_name, config)
641 else:
639 else:
642 app = scm_app.create_git_wsgi_app(
640 app = scm_app.create_git_wsgi_app(
643 repo_path, repo_name, config)
641 repo_path, repo_name, config)
644
642
645 log.debug('http-app: starting app handler '
643 log.debug('http-app: starting app handler '
646 'with %s and process request', app)
644 'with %s and process request', app)
647
645
648 return app(environ, start_response)
646 return app(environ, start_response)
649
647
650 return _git_stream
648 return _git_stream
651
649
652 def handle_vcs_exception(self, exception, request):
650 def handle_vcs_exception(self, exception, request):
653 _vcs_kind = getattr(exception, '_vcs_kind', '')
651 _vcs_kind = getattr(exception, '_vcs_kind', '')
654
652
655 if _vcs_kind == 'repo_locked':
653 if _vcs_kind == 'repo_locked':
656 headers_call_context = get_headers_call_context(request.environ)
654 headers_call_context = get_headers_call_context(request.environ)
657 status_code = safe_int(headers_call_context['locked_status_code'])
655 status_code = safe_int(headers_call_context['locked_status_code'])
658
656
659 return HTTPRepoLocked(
657 return HTTPRepoLocked(
660 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
658 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
661
659
662 elif _vcs_kind == 'repo_branch_protected':
660 elif _vcs_kind == 'repo_branch_protected':
663 # Get custom repo-branch-protected status code if present.
661 # Get custom repo-branch-protected status code if present.
664 return HTTPRepoBranchProtected(
662 return HTTPRepoBranchProtected(
665 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
663 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
666
664
667 exc_info = request.exc_info
665 exc_info = request.exc_info
668 store_exception(id(exc_info), exc_info)
666 store_exception(id(exc_info), exc_info)
669
667
670 traceback_info = 'unavailable'
668 traceback_info = 'unavailable'
671 if request.exc_info:
669 if request.exc_info:
672 exc_type, exc_value, exc_tb = request.exc_info
670 traceback_info = format_exc(request.exc_info)
673 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
674
671
675 log.error(
672 log.error(
676 'error occurred handling this request for path: %s, \n tb: %s',
673 'error occurred handling this request for path: %s, \n%s',
677 request.path, traceback_info)
674 request.path, traceback_info)
678
675
679 statsd = request.registry.statsd
676 statsd = request.registry.statsd
680 if statsd:
677 if statsd:
681 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
678 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
682 statsd.incr('vcsserver_exception_total',
679 statsd.incr('vcsserver_exception_total',
683 tags=[f"type:{exc_type}"])
680 tags=[f"type:{exc_type}"])
684 raise exception
681 raise exception
685
682
686
683
687 class ResponseFilter(object):
684 class ResponseFilter(object):
688
685
689 def __init__(self, start_response):
686 def __init__(self, start_response):
690 self._start_response = start_response
687 self._start_response = start_response
691
688
692 def __call__(self, status, response_headers, exc_info=None):
689 def __call__(self, status, response_headers, exc_info=None):
693 headers = tuple(
690 headers = tuple(
694 (h, v) for h, v in response_headers
691 (h, v) for h, v in response_headers
695 if not wsgiref.util.is_hop_by_hop(h))
692 if not wsgiref.util.is_hop_by_hop(h))
696 return self._start_response(status, headers, exc_info)
693 return self._start_response(status, headers, exc_info)
697
694
698
695
699 def sanitize_settings_and_apply_defaults(global_config, settings):
696 def sanitize_settings_and_apply_defaults(global_config, settings):
700 global_settings_maker = SettingsMaker(global_config)
697 global_settings_maker = SettingsMaker(global_config)
701 settings_maker = SettingsMaker(settings)
698 settings_maker = SettingsMaker(settings)
702
699
703 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
700 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
704
701
705 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
702 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
706 settings_maker.enable_logging(logging_conf)
703 settings_maker.enable_logging(logging_conf)
707
704
708 # Default includes, possible to change as a user
705 # Default includes, possible to change as a user
709 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
706 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
710 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
707 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
711
708
712 settings_maker.make_setting('__file__', global_config.get('__file__'))
709 settings_maker.make_setting('__file__', global_config.get('__file__'))
713
710
714 settings_maker.make_setting('pyramid.default_locale_name', 'en')
711 settings_maker.make_setting('pyramid.default_locale_name', 'en')
715 settings_maker.make_setting('locale', 'en_US.UTF-8')
712 settings_maker.make_setting('locale', 'en_US.UTF-8')
716
713
717 settings_maker.make_setting('core.binary_dir', '')
714 settings_maker.make_setting('core.binary_dir', '')
718
715
719 temp_store = tempfile.gettempdir()
716 temp_store = tempfile.gettempdir()
720 default_cache_dir = os.path.join(temp_store, 'rc_cache')
717 default_cache_dir = os.path.join(temp_store, 'rc_cache')
721 # save default, cache dir, and use it for all backends later.
718 # save default, cache dir, and use it for all backends later.
722 default_cache_dir = settings_maker.make_setting(
719 default_cache_dir = settings_maker.make_setting(
723 'cache_dir',
720 'cache_dir',
724 default=default_cache_dir, default_when_empty=True,
721 default=default_cache_dir, default_when_empty=True,
725 parser='dir:ensured')
722 parser='dir:ensured')
726
723
727 # exception store cache
724 # exception store cache
728 settings_maker.make_setting(
725 settings_maker.make_setting(
729 'exception_tracker.store_path',
726 'exception_tracker.store_path',
730 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
727 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
731 parser='dir:ensured'
728 parser='dir:ensured'
732 )
729 )
733
730
734 # repo_object cache defaults
731 # repo_object cache defaults
735 settings_maker.make_setting(
732 settings_maker.make_setting(
736 'rc_cache.repo_object.backend',
733 'rc_cache.repo_object.backend',
737 default='dogpile.cache.rc.file_namespace',
734 default='dogpile.cache.rc.file_namespace',
738 parser='string')
735 parser='string')
739 settings_maker.make_setting(
736 settings_maker.make_setting(
740 'rc_cache.repo_object.expiration_time',
737 'rc_cache.repo_object.expiration_time',
741 default=30 * 24 * 60 * 60, # 30days
738 default=30 * 24 * 60 * 60, # 30days
742 parser='int')
739 parser='int')
743 settings_maker.make_setting(
740 settings_maker.make_setting(
744 'rc_cache.repo_object.arguments.filename',
741 'rc_cache.repo_object.arguments.filename',
745 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
742 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
746 parser='string')
743 parser='string')
747
744
748 # statsd
745 # statsd
749 settings_maker.make_setting('statsd.enabled', False, parser='bool')
746 settings_maker.make_setting('statsd.enabled', False, parser='bool')
750 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
747 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
751 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
748 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
752 settings_maker.make_setting('statsd.statsd_prefix', '')
749 settings_maker.make_setting('statsd.statsd_prefix', '')
753 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
750 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
754
751
755 settings_maker.env_expand()
752 settings_maker.env_expand()
756
753
757
754
758 def main(global_config, **settings):
755 def main(global_config, **settings):
759 start_time = time.time()
756 start_time = time.time()
760 log.info('Pyramid app config starting')
757 log.info('Pyramid app config starting')
761
758
762 if MercurialFactory:
759 if MercurialFactory:
763 hgpatches.patch_largefiles_capabilities()
760 hgpatches.patch_largefiles_capabilities()
764 hgpatches.patch_subrepo_type_mapping()
761 hgpatches.patch_subrepo_type_mapping()
765
762
766 # Fill in and sanitize the defaults & do ENV expansion
763 # Fill in and sanitize the defaults & do ENV expansion
767 sanitize_settings_and_apply_defaults(global_config, settings)
764 sanitize_settings_and_apply_defaults(global_config, settings)
768
765
769 # init and bootstrap StatsdClient
766 # init and bootstrap StatsdClient
770 StatsdClient.setup(settings)
767 StatsdClient.setup(settings)
771
768
772 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
769 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
773 total_time = time.time() - start_time
770 total_time = time.time() - start_time
774 log.info('Pyramid app created and configured in %.2fs', total_time)
771 log.info('Pyramid app created and configured in %.2fs', total_time)
775 return pyramid_app
772 return pyramid_app
776
773
777
774
@@ -1,172 +1,267 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import time
20 import time
21 import sys
21 import datetime
22 import datetime
22 import msgpack
23 import msgpack
23 import logging
24 import logging
24 import traceback
25 import traceback
25 import tempfile
26 import tempfile
27 import glob
26
28
27 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
28
30
29 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
31 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
30 global_prefix = 'vcsserver'
32 global_prefix = 'vcsserver'
31 exc_store_dir_name = 'rc_exception_store_v1'
33 exc_store_dir_name = 'rc_exception_store_v1'
32
34
33
35
34 def exc_serialize(exc_id, tb, exc_type):
36 def exc_serialize(exc_id, tb, exc_type, extra_data=None):
35
37
36 data = {
38 data = {
37 'version': 'v1',
39 'version': 'v1',
38 'exc_id': exc_id,
40 'exc_id': exc_id,
39 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
41 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
40 'exc_timestamp': repr(time.time()),
42 'exc_timestamp': repr(time.time()),
41 'exc_message': tb,
43 'exc_message': tb,
42 'exc_type': exc_type,
44 'exc_type': exc_type,
43 }
45 }
46 if extra_data:
47 data.update(extra_data)
44 return msgpack.packb(data), data
48 return msgpack.packb(data), data
45
49
46
50
47 def exc_unserialize(tb):
51 def exc_unserialize(tb):
48 return msgpack.unpackb(tb)
52 return msgpack.unpackb(tb)
49
53
50
54
55 _exc_store = None
56
57
51 def get_exc_store():
58 def get_exc_store():
52 """
59 """
53 Get and create exception store if it's not existing
60 Get and create exception store if it's not existing
54 """
61 """
62 global _exc_store
63
64 if _exc_store is not None:
65 # quick global cache
66 return _exc_store
67
55 import vcsserver as app
68 import vcsserver as app
56
69
57 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
70 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
58 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
71 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
59
72
60 _exc_store_path = os.path.abspath(_exc_store_path)
73 _exc_store_path = os.path.abspath(_exc_store_path)
61 if not os.path.isdir(_exc_store_path):
74 if not os.path.isdir(_exc_store_path):
62 os.makedirs(_exc_store_path)
75 os.makedirs(_exc_store_path)
63 log.debug('Initializing exceptions store at %s', _exc_store_path)
76 log.debug('Initializing exceptions store at %s', _exc_store_path)
77 _exc_store = _exc_store_path
78
64 return _exc_store_path
79 return _exc_store_path
65
80
66
81
67 def _store_exception(exc_id, exc_info, prefix, request_path=''):
82 def get_detailed_tb(exc_info):
68 exc_type, exc_value, exc_traceback = exc_info
83 from io import StringIO
84
85 try:
86 from pip._vendor.rich import traceback as rich_tb, scope as rich_scope, console as rich_console
87 except ImportError:
88 try:
89 from rich import traceback as rich_tb, scope as rich_scope, console as rich_console
90 except ImportError:
91 return None
92
93 console = rich_console.Console(
94 width=160,
95 file=StringIO()
96 )
97
98 exc = rich_tb.Traceback.extract(*exc_info, show_locals=True)
99
100 tb_rich = rich_tb.Traceback(
101 trace=exc,
102 width=160,
103 extra_lines=3,
104 theme=None,
105 word_wrap=False,
106 show_locals=False,
107 max_frames=100
108 )
109
110 formatted_locals = ""
69
111
70 tb = ''.join(traceback.format_exception(
112 # last_stack = exc.stacks[-1]
71 exc_type, exc_value, exc_traceback, None))
113 # last_frame = last_stack.frames[-1]
114 # if last_frame and last_frame.locals:
115 # console.print(
116 # rich_scope.render_scope(
117 # last_frame.locals,
118 # title=f'{last_frame.filename}:{last_frame.lineno}'))
119
120 console.print(tb_rich)
121 formatted_locals = console.file.getvalue()
122
123 return formatted_locals
124
72
125
73 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
126 def get_request_metadata(request=None) -> dict:
127 request_metadata = {}
128 if not request:
129 from pyramid.threadlocal import get_current_request
130 request = get_current_request()
131
132 # NOTE(marcink): store request information into exc_data
133 if request:
134 request_metadata['client_address'] = getattr(request, 'client_addr', '')
135 request_metadata['user_agent'] = getattr(request, 'user_agent', '')
136 request_metadata['method'] = getattr(request, 'method', '')
137 request_metadata['url'] = getattr(request, 'url', '')
138 return request_metadata
139
140
141 def format_exc(exc_info):
142 exc_type, exc_value, exc_traceback = exc_info
143 tb = "++ TRACEBACK ++\n\n"
144 tb += "".join(traceback.format_exception(exc_type, exc_value, exc_traceback, None))
145
146 detailed_tb = getattr(exc_value, "_org_exc_tb", None)
74
147
75 if detailed_tb:
148 if detailed_tb:
76 remote_tb = detailed_tb
149 remote_tb = detailed_tb
77 if isinstance(detailed_tb, str):
150 if isinstance(detailed_tb, str):
78 remote_tb = [detailed_tb]
151 remote_tb = [detailed_tb]
79
152
80 tb += (
153 tb += (
81 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
154 "\n+++ BEG SOURCE EXCEPTION +++\n\n"
82 '{}\n'
155 "{}\n"
83 '+++ END SOURCE EXCEPTION +++\n'
156 "+++ END SOURCE EXCEPTION +++\n"
84 ''.format('\n'.join(remote_tb))
157 "".format("\n".join(remote_tb))
85 )
158 )
86
159
87 # Avoid that remote_tb also appears in the frame
160 # Avoid that remote_tb also appears in the frame
88 del remote_tb
161 del remote_tb
89
162
163 locals_tb = get_detailed_tb(exc_info)
164 if locals_tb:
165 tb += f"\n+++ DETAILS +++\n\n{locals_tb}\n" ""
166 return tb
167
168
169 def _store_exception(exc_id, exc_info, prefix, request_path=''):
170 """
171 Low level function to store exception in the exception tracker
172 """
173
174 extra_data = {}
175 extra_data.update(get_request_metadata())
176
177 exc_type, exc_value, exc_traceback = exc_info
178 tb = format_exc(exc_info)
179
90 exc_type_name = exc_type.__name__
180 exc_type_name = exc_type.__name__
181 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name, extra_data=extra_data)
182
183 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
91 exc_store_path = get_exc_store()
184 exc_store_path = get_exc_store()
92 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
93 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
94 if not os.path.isdir(exc_store_path):
185 if not os.path.isdir(exc_store_path):
95 os.makedirs(exc_store_path)
186 os.makedirs(exc_store_path)
96 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
187 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
97 with open(stored_exc_path, 'wb') as f:
188 with open(stored_exc_path, 'wb') as f:
98 f.write(exc_data)
189 f.write(exc_data)
99 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
190 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
100
191
101 log.error(
192 log.error(
102 'error occurred handling this request.\n'
193 'error occurred handling this request.\n'
103 'Path: `%s`, tb: %s',
194 'Path: `%s`, %s',
104 request_path, tb)
195 request_path, tb)
105
196
106
197
107 def store_exception(exc_id, exc_info, prefix=global_prefix, request_path=''):
198 def store_exception(exc_id, exc_info, prefix=global_prefix, request_path=''):
108 """
199 """
109 Example usage::
200 Example usage::
110
201
111 exc_info = sys.exc_info()
202 exc_info = sys.exc_info()
112 store_exception(id(exc_info), exc_info)
203 store_exception(id(exc_info), exc_info)
113 """
204 """
114
205
115 try:
206 try:
207 exc_type = exc_info[0]
208 exc_type_name = exc_type.__name__
209
116 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix,
210 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix,
117 request_path=request_path)
211 request_path=request_path)
212 return exc_id, exc_type_name
118 except Exception:
213 except Exception:
119 log.exception('Failed to store exception `%s` information', exc_id)
214 log.exception('Failed to store exception `%s` information', exc_id)
120 # there's no way this can fail, it will crash server badly if it does.
215 # there's no way this can fail, it will crash server badly if it does.
121 pass
216 pass
122
217
123
218
124 def _find_exc_file(exc_id, prefix=global_prefix):
219 def _find_exc_file(exc_id, prefix=global_prefix):
125 exc_store_path = get_exc_store()
220 exc_store_path = get_exc_store()
126 if prefix:
221 if prefix:
127 exc_id = f'{exc_id}_{prefix}'
222 exc_id = f'{exc_id}_{prefix}'
128 else:
223 else:
129 # search without a prefix
224 # search without a prefix
130 exc_id = f'{exc_id}'
225 exc_id = f'{exc_id}'
131
226
132 # we need to search the store for such start pattern as above
227 found_exc_id = None
133 for fname in os.listdir(exc_store_path):
228 matches = glob.glob(os.path.join(exc_store_path, exc_id) + '*')
134 if fname.startswith(exc_id):
229 if matches:
135 exc_id = os.path.join(exc_store_path, fname)
230 found_exc_id = matches[0]
136 break
137 continue
138 else:
139 exc_id = None
140
231
141 return exc_id
232 return found_exc_id
142
233
143
234
144 def _read_exception(exc_id, prefix):
235 def _read_exception(exc_id, prefix):
145 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
236 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
146 if exc_id_file_path:
237 if exc_id_file_path:
147 with open(exc_id_file_path, 'rb') as f:
238 with open(exc_id_file_path, 'rb') as f:
148 return exc_unserialize(f.read())
239 return exc_unserialize(f.read())
149 else:
240 else:
150 log.debug('Exception File `%s` not found', exc_id_file_path)
241 log.debug('Exception File `%s` not found', exc_id_file_path)
151 return None
242 return None
152
243
153
244
154 def read_exception(exc_id, prefix=global_prefix):
245 def read_exception(exc_id, prefix=global_prefix):
155 try:
246 try:
156 return _read_exception(exc_id=exc_id, prefix=prefix)
247 return _read_exception(exc_id=exc_id, prefix=prefix)
157 except Exception:
248 except Exception:
158 log.exception('Failed to read exception `%s` information', exc_id)
249 log.exception('Failed to read exception `%s` information', exc_id)
159 # there's no way this can fail, it will crash server badly if it does.
250 # there's no way this can fail, it will crash server badly if it does.
160 return None
251 return None
161
252
162
253
163 def delete_exception(exc_id, prefix=global_prefix):
254 def delete_exception(exc_id, prefix=global_prefix):
164 try:
255 try:
165 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
256 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
166 if exc_id_file_path:
257 if exc_id_file_path:
167 os.remove(exc_id_file_path)
258 os.remove(exc_id_file_path)
168
259
169 except Exception:
260 except Exception:
170 log.exception('Failed to remove exception `%s` information', exc_id)
261 log.exception('Failed to remove exception `%s` information', exc_id)
171 # there's no way this can fail, it will crash server badly if it does.
262 # there's no way this can fail, it will crash server badly if it does.
172 pass
263 pass
264
265
266 def generate_id():
267 return id(object())
@@ -1,1164 +1,1199 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import binascii
17 import binascii
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import sys
21 import urllib.request
22 import urllib.request
22 import urllib.parse
23 import urllib.parse
23 import traceback
24 import hashlib
24 import hashlib
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27
27
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
31 from mercurial import repair
31 from mercurial import repair
32 from mercurial.error import AmbiguousPrefixLookupError
32 from mercurial.error import AmbiguousPrefixLookupError
33
33
34 import vcsserver
34 import vcsserver
35 from vcsserver import exceptions
35 from vcsserver import exceptions
36 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, store_archive_in_cache, ArchiveNode, BytesEnvelope, \
36 from vcsserver.base import (
37 BinaryEnvelope
37 RepoFactory,
38 obfuscate_qs,
39 raise_from_original,
40 store_archive_in_cache,
41 ArchiveNode,
42 BytesEnvelope,
43 BinaryEnvelope,
44 )
38 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
39 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
46 archival,
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
47 bin,
41 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
48 clone,
42 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
49 config as hgconfig,
43 RepoLookupError, InterventionRequired, RequirementError,
50 diffopts,
44 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
51 hex,
52 get_ctx,
53 hg_url as url_parser,
54 httpbasicauthhandler,
55 httpdigestauthhandler,
56 makepeer,
57 instance,
58 match,
59 memctx,
60 exchange,
61 memfilectx,
62 nullrev,
63 hg_merge,
64 patch,
65 peer,
66 revrange,
67 ui,
68 hg_tag,
69 Abort,
70 LookupError,
71 RepoError,
72 RepoLookupError,
73 InterventionRequired,
74 RequirementError,
75 alwaysmatcher,
76 patternmatcher,
77 hgutil,
78 hgext_strip,
79 )
45 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
80 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
46 from vcsserver.vcs_base import RemoteBase
81 from vcsserver.vcs_base import RemoteBase
47 from vcsserver.config import hooks as hooks_config
82 from vcsserver.config import hooks as hooks_config
48
83 from vcsserver.lib.exc_tracking import format_exc
49
84
50 log = logging.getLogger(__name__)
85 log = logging.getLogger(__name__)
51
86
52
87
53 def make_ui_from_config(repo_config):
88 def make_ui_from_config(repo_config):
54
89
55 class LoggingUI(ui.ui):
90 class LoggingUI(ui.ui):
56
91
57 def status(self, *msg, **opts):
92 def status(self, *msg, **opts):
58 str_msg = map(safe_str, msg)
93 str_msg = map(safe_str, msg)
59 log.info(' '.join(str_msg).rstrip('\n'))
94 log.info(' '.join(str_msg).rstrip('\n'))
60 #super(LoggingUI, self).status(*msg, **opts)
95 #super(LoggingUI, self).status(*msg, **opts)
61
96
62 def warn(self, *msg, **opts):
97 def warn(self, *msg, **opts):
63 str_msg = map(safe_str, msg)
98 str_msg = map(safe_str, msg)
64 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
99 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
65 #super(LoggingUI, self).warn(*msg, **opts)
100 #super(LoggingUI, self).warn(*msg, **opts)
66
101
67 def error(self, *msg, **opts):
102 def error(self, *msg, **opts):
68 str_msg = map(safe_str, msg)
103 str_msg = map(safe_str, msg)
69 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
104 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
70 #super(LoggingUI, self).error(*msg, **opts)
105 #super(LoggingUI, self).error(*msg, **opts)
71
106
72 def note(self, *msg, **opts):
107 def note(self, *msg, **opts):
73 str_msg = map(safe_str, msg)
108 str_msg = map(safe_str, msg)
74 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
109 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
75 #super(LoggingUI, self).note(*msg, **opts)
110 #super(LoggingUI, self).note(*msg, **opts)
76
111
77 def debug(self, *msg, **opts):
112 def debug(self, *msg, **opts):
78 str_msg = map(safe_str, msg)
113 str_msg = map(safe_str, msg)
79 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
114 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
80 #super(LoggingUI, self).debug(*msg, **opts)
115 #super(LoggingUI, self).debug(*msg, **opts)
81
116
82 baseui = LoggingUI()
117 baseui = LoggingUI()
83
118
84 # clean the baseui object
119 # clean the baseui object
85 baseui._ocfg = hgconfig.config()
120 baseui._ocfg = hgconfig.config()
86 baseui._ucfg = hgconfig.config()
121 baseui._ucfg = hgconfig.config()
87 baseui._tcfg = hgconfig.config()
122 baseui._tcfg = hgconfig.config()
88
123
89 for section, option, value in repo_config:
124 for section, option, value in repo_config:
90 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
125 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
91
126
92 # make our hgweb quiet so it doesn't print output
127 # make our hgweb quiet so it doesn't print output
93 baseui.setconfig(b'ui', b'quiet', b'true')
128 baseui.setconfig(b'ui', b'quiet', b'true')
94
129
95 baseui.setconfig(b'ui', b'paginate', b'never')
130 baseui.setconfig(b'ui', b'paginate', b'never')
96 # for better Error reporting of Mercurial
131 # for better Error reporting of Mercurial
97 baseui.setconfig(b'ui', b'message-output', b'stderr')
132 baseui.setconfig(b'ui', b'message-output', b'stderr')
98
133
99 # force mercurial to only use 1 thread, otherwise it may try to set a
134 # force mercurial to only use 1 thread, otherwise it may try to set a
100 # signal in a non-main thread, thus generating a ValueError.
135 # signal in a non-main thread, thus generating a ValueError.
101 baseui.setconfig(b'worker', b'numcpus', 1)
136 baseui.setconfig(b'worker', b'numcpus', 1)
102
137
103 # If there is no config for the largefiles extension, we explicitly disable
138 # If there is no config for the largefiles extension, we explicitly disable
104 # it here. This overrides settings from repositories hgrc file. Recent
139 # it here. This overrides settings from repositories hgrc file. Recent
105 # mercurial versions enable largefiles in hgrc on clone from largefile
140 # mercurial versions enable largefiles in hgrc on clone from largefile
106 # repo.
141 # repo.
107 if not baseui.hasconfig(b'extensions', b'largefiles'):
142 if not baseui.hasconfig(b'extensions', b'largefiles'):
108 log.debug('Explicitly disable largefiles extension for repo.')
143 log.debug('Explicitly disable largefiles extension for repo.')
109 baseui.setconfig(b'extensions', b'largefiles', b'!')
144 baseui.setconfig(b'extensions', b'largefiles', b'!')
110
145
111 return baseui
146 return baseui
112
147
113
148
114 def reraise_safe_exceptions(func):
149 def reraise_safe_exceptions(func):
115 """Decorator for converting mercurial exceptions to something neutral."""
150 """Decorator for converting mercurial exceptions to something neutral."""
116
151
117 def wrapper(*args, **kwargs):
152 def wrapper(*args, **kwargs):
118 try:
153 try:
119 return func(*args, **kwargs)
154 return func(*args, **kwargs)
120 except (Abort, InterventionRequired) as e:
155 except (Abort, InterventionRequired) as e:
121 raise_from_original(exceptions.AbortException(e), e)
156 raise_from_original(exceptions.AbortException(e), e)
122 except RepoLookupError as e:
157 except RepoLookupError as e:
123 raise_from_original(exceptions.LookupException(e), e)
158 raise_from_original(exceptions.LookupException(e), e)
124 except RequirementError as e:
159 except RequirementError as e:
125 raise_from_original(exceptions.RequirementException(e), e)
160 raise_from_original(exceptions.RequirementException(e), e)
126 except RepoError as e:
161 except RepoError as e:
127 raise_from_original(exceptions.VcsException(e), e)
162 raise_from_original(exceptions.VcsException(e), e)
128 except LookupError as e:
163 except LookupError as e:
129 raise_from_original(exceptions.LookupException(e), e)
164 raise_from_original(exceptions.LookupException(e), e)
130 except Exception as e:
165 except Exception as e:
131 if not hasattr(e, '_vcs_kind'):
166 if not hasattr(e, '_vcs_kind'):
132 log.exception("Unhandled exception in hg remote call")
167 log.exception("Unhandled exception in hg remote call")
133 raise_from_original(exceptions.UnhandledException(e), e)
168 raise_from_original(exceptions.UnhandledException(e), e)
134
169
135 raise
170 raise
136 return wrapper
171 return wrapper
137
172
138
173
139 class MercurialFactory(RepoFactory):
174 class MercurialFactory(RepoFactory):
140 repo_type = 'hg'
175 repo_type = 'hg'
141
176
142 def _create_config(self, config, hooks=True):
177 def _create_config(self, config, hooks=True):
143 if not hooks:
178 if not hooks:
144
179
145 hooks_to_clean = {
180 hooks_to_clean = {
146
181
147 hooks_config.HOOK_REPO_SIZE,
182 hooks_config.HOOK_REPO_SIZE,
148 hooks_config.HOOK_PRE_PULL,
183 hooks_config.HOOK_PRE_PULL,
149 hooks_config.HOOK_PULL,
184 hooks_config.HOOK_PULL,
150
185
151 hooks_config.HOOK_PRE_PUSH,
186 hooks_config.HOOK_PRE_PUSH,
152 # TODO: what about PRETXT, this was disabled in pre 5.0.0
187 # TODO: what about PRETXT, this was disabled in pre 5.0.0
153 hooks_config.HOOK_PRETX_PUSH,
188 hooks_config.HOOK_PRETX_PUSH,
154
189
155 }
190 }
156 new_config = []
191 new_config = []
157 for section, option, value in config:
192 for section, option, value in config:
158 if section == 'hooks' and option in hooks_to_clean:
193 if section == 'hooks' and option in hooks_to_clean:
159 continue
194 continue
160 new_config.append((section, option, value))
195 new_config.append((section, option, value))
161 config = new_config
196 config = new_config
162
197
163 baseui = make_ui_from_config(config)
198 baseui = make_ui_from_config(config)
164 return baseui
199 return baseui
165
200
166 def _create_repo(self, wire, create):
201 def _create_repo(self, wire, create):
167 baseui = self._create_config(wire["config"])
202 baseui = self._create_config(wire["config"])
168 repo = instance(baseui, safe_bytes(wire["path"]), create)
203 repo = instance(baseui, safe_bytes(wire["path"]), create)
169 log.debug('repository created: got HG object: %s', repo)
204 log.debug('repository created: got HG object: %s', repo)
170 return repo
205 return repo
171
206
172 def repo(self, wire, create=False):
207 def repo(self, wire, create=False):
173 """
208 """
174 Get a repository instance for the given path.
209 Get a repository instance for the given path.
175 """
210 """
176 return self._create_repo(wire, create)
211 return self._create_repo(wire, create)
177
212
178
213
179 def patch_ui_message_output(baseui):
214 def patch_ui_message_output(baseui):
180 baseui.setconfig(b'ui', b'quiet', b'false')
215 baseui.setconfig(b'ui', b'quiet', b'false')
181 output = io.BytesIO()
216 output = io.BytesIO()
182
217
183 def write(data, **unused_kwargs):
218 def write(data, **unused_kwargs):
184 output.write(data)
219 output.write(data)
185
220
186 baseui.status = write
221 baseui.status = write
187 baseui.write = write
222 baseui.write = write
188 baseui.warn = write
223 baseui.warn = write
189 baseui.debug = write
224 baseui.debug = write
190
225
191 return baseui, output
226 return baseui, output
192
227
193
228
194 def get_obfuscated_url(url_obj):
229 def get_obfuscated_url(url_obj):
195 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
230 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
196 url_obj.query = obfuscate_qs(url_obj.query)
231 url_obj.query = obfuscate_qs(url_obj.query)
197 obfuscated_uri = str(url_obj)
232 obfuscated_uri = str(url_obj)
198 return obfuscated_uri
233 return obfuscated_uri
199
234
200
235
201 def normalize_url_for_hg(url: str):
236 def normalize_url_for_hg(url: str):
202 _proto = None
237 _proto = None
203
238
204 if '+' in url[:url.find('://')]:
239 if '+' in url[:url.find('://')]:
205 _proto = url[0:url.find('+')]
240 _proto = url[0:url.find('+')]
206 url = url[url.find('+') + 1:]
241 url = url[url.find('+') + 1:]
207 return url, _proto
242 return url, _proto
208
243
209
244
210 class HgRemote(RemoteBase):
245 class HgRemote(RemoteBase):
211
246
212 def __init__(self, factory):
247 def __init__(self, factory):
213 self._factory = factory
248 self._factory = factory
214 self._bulk_methods = {
249 self._bulk_methods = {
215 "affected_files": self.ctx_files,
250 "affected_files": self.ctx_files,
216 "author": self.ctx_user,
251 "author": self.ctx_user,
217 "branch": self.ctx_branch,
252 "branch": self.ctx_branch,
218 "children": self.ctx_children,
253 "children": self.ctx_children,
219 "date": self.ctx_date,
254 "date": self.ctx_date,
220 "message": self.ctx_description,
255 "message": self.ctx_description,
221 "parents": self.ctx_parents,
256 "parents": self.ctx_parents,
222 "status": self.ctx_status,
257 "status": self.ctx_status,
223 "obsolete": self.ctx_obsolete,
258 "obsolete": self.ctx_obsolete,
224 "phase": self.ctx_phase,
259 "phase": self.ctx_phase,
225 "hidden": self.ctx_hidden,
260 "hidden": self.ctx_hidden,
226 "_file_paths": self.ctx_list,
261 "_file_paths": self.ctx_list,
227 }
262 }
228 self._bulk_file_methods = {
263 self._bulk_file_methods = {
229 "size": self.fctx_size,
264 "size": self.fctx_size,
230 "data": self.fctx_node_data,
265 "data": self.fctx_node_data,
231 "flags": self.fctx_flags,
266 "flags": self.fctx_flags,
232 "is_binary": self.is_binary,
267 "is_binary": self.is_binary,
233 "md5": self.md5_hash,
268 "md5": self.md5_hash,
234 }
269 }
235
270
236 def _get_ctx(self, repo, ref):
271 def _get_ctx(self, repo, ref):
237 return get_ctx(repo, ref)
272 return get_ctx(repo, ref)
238
273
239 @reraise_safe_exceptions
274 @reraise_safe_exceptions
240 def discover_hg_version(self):
275 def discover_hg_version(self):
241 from mercurial import util
276 from mercurial import util
242 return safe_str(util.version())
277 return safe_str(util.version())
243
278
244 @reraise_safe_exceptions
279 @reraise_safe_exceptions
245 def is_empty(self, wire):
280 def is_empty(self, wire):
246 repo = self._factory.repo(wire)
281 repo = self._factory.repo(wire)
247
282
248 try:
283 try:
249 return len(repo) == 0
284 return len(repo) == 0
250 except Exception:
285 except Exception:
251 log.exception("failed to read object_store")
286 log.exception("failed to read object_store")
252 return False
287 return False
253
288
254 @reraise_safe_exceptions
289 @reraise_safe_exceptions
255 def bookmarks(self, wire):
290 def bookmarks(self, wire):
256 cache_on, context_uid, repo_id = self._cache_on(wire)
291 cache_on, context_uid, repo_id = self._cache_on(wire)
257 region = self._region(wire)
292 region = self._region(wire)
258
293
259 @region.conditional_cache_on_arguments(condition=cache_on)
294 @region.conditional_cache_on_arguments(condition=cache_on)
260 def _bookmarks(_context_uid, _repo_id):
295 def _bookmarks(_context_uid, _repo_id):
261 repo = self._factory.repo(wire)
296 repo = self._factory.repo(wire)
262 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
297 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
263
298
264 return _bookmarks(context_uid, repo_id)
299 return _bookmarks(context_uid, repo_id)
265
300
266 @reraise_safe_exceptions
301 @reraise_safe_exceptions
267 def branches(self, wire, normal, closed):
302 def branches(self, wire, normal, closed):
268 cache_on, context_uid, repo_id = self._cache_on(wire)
303 cache_on, context_uid, repo_id = self._cache_on(wire)
269 region = self._region(wire)
304 region = self._region(wire)
270
305
271 @region.conditional_cache_on_arguments(condition=cache_on)
306 @region.conditional_cache_on_arguments(condition=cache_on)
272 def _branches(_context_uid, _repo_id, _normal, _closed):
307 def _branches(_context_uid, _repo_id, _normal, _closed):
273 repo = self._factory.repo(wire)
308 repo = self._factory.repo(wire)
274 iter_branches = repo.branchmap().iterbranches()
309 iter_branches = repo.branchmap().iterbranches()
275 bt = {}
310 bt = {}
276 for branch_name, _heads, tip_node, is_closed in iter_branches:
311 for branch_name, _heads, tip_node, is_closed in iter_branches:
277 if normal and not is_closed:
312 if normal and not is_closed:
278 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
313 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
279 if closed and is_closed:
314 if closed and is_closed:
280 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
315 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
281
316
282 return bt
317 return bt
283
318
284 return _branches(context_uid, repo_id, normal, closed)
319 return _branches(context_uid, repo_id, normal, closed)
285
320
286 @reraise_safe_exceptions
321 @reraise_safe_exceptions
287 def bulk_request(self, wire, commit_id, pre_load):
322 def bulk_request(self, wire, commit_id, pre_load):
288 cache_on, context_uid, repo_id = self._cache_on(wire)
323 cache_on, context_uid, repo_id = self._cache_on(wire)
289 region = self._region(wire)
324 region = self._region(wire)
290
325
291 @region.conditional_cache_on_arguments(condition=cache_on)
326 @region.conditional_cache_on_arguments(condition=cache_on)
292 def _bulk_request(_repo_id, _commit_id, _pre_load):
327 def _bulk_request(_repo_id, _commit_id, _pre_load):
293 result = {}
328 result = {}
294 for attr in pre_load:
329 for attr in pre_load:
295 try:
330 try:
296 method = self._bulk_methods[attr]
331 method = self._bulk_methods[attr]
297 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
332 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
298 result[attr] = method(wire, commit_id)
333 result[attr] = method(wire, commit_id)
299 except KeyError as e:
334 except KeyError as e:
300 raise exceptions.VcsException(e)(
335 raise exceptions.VcsException(e)(
301 'Unknown bulk attribute: "%s"' % attr)
336 'Unknown bulk attribute: "%s"' % attr)
302 return result
337 return result
303
338
304 return _bulk_request(repo_id, commit_id, sorted(pre_load))
339 return _bulk_request(repo_id, commit_id, sorted(pre_load))
305
340
306 @reraise_safe_exceptions
341 @reraise_safe_exceptions
307 def ctx_branch(self, wire, commit_id):
342 def ctx_branch(self, wire, commit_id):
308 cache_on, context_uid, repo_id = self._cache_on(wire)
343 cache_on, context_uid, repo_id = self._cache_on(wire)
309 region = self._region(wire)
344 region = self._region(wire)
310
345
311 @region.conditional_cache_on_arguments(condition=cache_on)
346 @region.conditional_cache_on_arguments(condition=cache_on)
312 def _ctx_branch(_repo_id, _commit_id):
347 def _ctx_branch(_repo_id, _commit_id):
313 repo = self._factory.repo(wire)
348 repo = self._factory.repo(wire)
314 ctx = self._get_ctx(repo, commit_id)
349 ctx = self._get_ctx(repo, commit_id)
315 return ctx.branch()
350 return ctx.branch()
316 return _ctx_branch(repo_id, commit_id)
351 return _ctx_branch(repo_id, commit_id)
317
352
318 @reraise_safe_exceptions
353 @reraise_safe_exceptions
319 def ctx_date(self, wire, commit_id):
354 def ctx_date(self, wire, commit_id):
320 cache_on, context_uid, repo_id = self._cache_on(wire)
355 cache_on, context_uid, repo_id = self._cache_on(wire)
321 region = self._region(wire)
356 region = self._region(wire)
322
357
323 @region.conditional_cache_on_arguments(condition=cache_on)
358 @region.conditional_cache_on_arguments(condition=cache_on)
324 def _ctx_date(_repo_id, _commit_id):
359 def _ctx_date(_repo_id, _commit_id):
325 repo = self._factory.repo(wire)
360 repo = self._factory.repo(wire)
326 ctx = self._get_ctx(repo, commit_id)
361 ctx = self._get_ctx(repo, commit_id)
327 return ctx.date()
362 return ctx.date()
328 return _ctx_date(repo_id, commit_id)
363 return _ctx_date(repo_id, commit_id)
329
364
330 @reraise_safe_exceptions
365 @reraise_safe_exceptions
331 def ctx_description(self, wire, revision):
366 def ctx_description(self, wire, revision):
332 repo = self._factory.repo(wire)
367 repo = self._factory.repo(wire)
333 ctx = self._get_ctx(repo, revision)
368 ctx = self._get_ctx(repo, revision)
334 return ctx.description()
369 return ctx.description()
335
370
336 @reraise_safe_exceptions
371 @reraise_safe_exceptions
337 def ctx_files(self, wire, commit_id):
372 def ctx_files(self, wire, commit_id):
338 cache_on, context_uid, repo_id = self._cache_on(wire)
373 cache_on, context_uid, repo_id = self._cache_on(wire)
339 region = self._region(wire)
374 region = self._region(wire)
340
375
341 @region.conditional_cache_on_arguments(condition=cache_on)
376 @region.conditional_cache_on_arguments(condition=cache_on)
342 def _ctx_files(_repo_id, _commit_id):
377 def _ctx_files(_repo_id, _commit_id):
343 repo = self._factory.repo(wire)
378 repo = self._factory.repo(wire)
344 ctx = self._get_ctx(repo, commit_id)
379 ctx = self._get_ctx(repo, commit_id)
345 return ctx.files()
380 return ctx.files()
346
381
347 return _ctx_files(repo_id, commit_id)
382 return _ctx_files(repo_id, commit_id)
348
383
349 @reraise_safe_exceptions
384 @reraise_safe_exceptions
350 def ctx_list(self, path, revision):
385 def ctx_list(self, path, revision):
351 repo = self._factory.repo(path)
386 repo = self._factory.repo(path)
352 ctx = self._get_ctx(repo, revision)
387 ctx = self._get_ctx(repo, revision)
353 return list(ctx)
388 return list(ctx)
354
389
355 @reraise_safe_exceptions
390 @reraise_safe_exceptions
356 def ctx_parents(self, wire, commit_id):
391 def ctx_parents(self, wire, commit_id):
357 cache_on, context_uid, repo_id = self._cache_on(wire)
392 cache_on, context_uid, repo_id = self._cache_on(wire)
358 region = self._region(wire)
393 region = self._region(wire)
359
394
360 @region.conditional_cache_on_arguments(condition=cache_on)
395 @region.conditional_cache_on_arguments(condition=cache_on)
361 def _ctx_parents(_repo_id, _commit_id):
396 def _ctx_parents(_repo_id, _commit_id):
362 repo = self._factory.repo(wire)
397 repo = self._factory.repo(wire)
363 ctx = self._get_ctx(repo, commit_id)
398 ctx = self._get_ctx(repo, commit_id)
364 return [parent.hex() for parent in ctx.parents()
399 return [parent.hex() for parent in ctx.parents()
365 if not (parent.hidden() or parent.obsolete())]
400 if not (parent.hidden() or parent.obsolete())]
366
401
367 return _ctx_parents(repo_id, commit_id)
402 return _ctx_parents(repo_id, commit_id)
368
403
369 @reraise_safe_exceptions
404 @reraise_safe_exceptions
370 def ctx_children(self, wire, commit_id):
405 def ctx_children(self, wire, commit_id):
371 cache_on, context_uid, repo_id = self._cache_on(wire)
406 cache_on, context_uid, repo_id = self._cache_on(wire)
372 region = self._region(wire)
407 region = self._region(wire)
373
408
374 @region.conditional_cache_on_arguments(condition=cache_on)
409 @region.conditional_cache_on_arguments(condition=cache_on)
375 def _ctx_children(_repo_id, _commit_id):
410 def _ctx_children(_repo_id, _commit_id):
376 repo = self._factory.repo(wire)
411 repo = self._factory.repo(wire)
377 ctx = self._get_ctx(repo, commit_id)
412 ctx = self._get_ctx(repo, commit_id)
378 return [child.hex() for child in ctx.children()
413 return [child.hex() for child in ctx.children()
379 if not (child.hidden() or child.obsolete())]
414 if not (child.hidden() or child.obsolete())]
380
415
381 return _ctx_children(repo_id, commit_id)
416 return _ctx_children(repo_id, commit_id)
382
417
383 @reraise_safe_exceptions
418 @reraise_safe_exceptions
384 def ctx_phase(self, wire, commit_id):
419 def ctx_phase(self, wire, commit_id):
385 cache_on, context_uid, repo_id = self._cache_on(wire)
420 cache_on, context_uid, repo_id = self._cache_on(wire)
386 region = self._region(wire)
421 region = self._region(wire)
387
422
388 @region.conditional_cache_on_arguments(condition=cache_on)
423 @region.conditional_cache_on_arguments(condition=cache_on)
389 def _ctx_phase(_context_uid, _repo_id, _commit_id):
424 def _ctx_phase(_context_uid, _repo_id, _commit_id):
390 repo = self._factory.repo(wire)
425 repo = self._factory.repo(wire)
391 ctx = self._get_ctx(repo, commit_id)
426 ctx = self._get_ctx(repo, commit_id)
392 # public=0, draft=1, secret=3
427 # public=0, draft=1, secret=3
393 return ctx.phase()
428 return ctx.phase()
394 return _ctx_phase(context_uid, repo_id, commit_id)
429 return _ctx_phase(context_uid, repo_id, commit_id)
395
430
396 @reraise_safe_exceptions
431 @reraise_safe_exceptions
397 def ctx_obsolete(self, wire, commit_id):
432 def ctx_obsolete(self, wire, commit_id):
398 cache_on, context_uid, repo_id = self._cache_on(wire)
433 cache_on, context_uid, repo_id = self._cache_on(wire)
399 region = self._region(wire)
434 region = self._region(wire)
400
435
401 @region.conditional_cache_on_arguments(condition=cache_on)
436 @region.conditional_cache_on_arguments(condition=cache_on)
402 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
437 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
403 repo = self._factory.repo(wire)
438 repo = self._factory.repo(wire)
404 ctx = self._get_ctx(repo, commit_id)
439 ctx = self._get_ctx(repo, commit_id)
405 return ctx.obsolete()
440 return ctx.obsolete()
406 return _ctx_obsolete(context_uid, repo_id, commit_id)
441 return _ctx_obsolete(context_uid, repo_id, commit_id)
407
442
408 @reraise_safe_exceptions
443 @reraise_safe_exceptions
409 def ctx_hidden(self, wire, commit_id):
444 def ctx_hidden(self, wire, commit_id):
410 cache_on, context_uid, repo_id = self._cache_on(wire)
445 cache_on, context_uid, repo_id = self._cache_on(wire)
411 region = self._region(wire)
446 region = self._region(wire)
412
447
413 @region.conditional_cache_on_arguments(condition=cache_on)
448 @region.conditional_cache_on_arguments(condition=cache_on)
414 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
449 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
415 repo = self._factory.repo(wire)
450 repo = self._factory.repo(wire)
416 ctx = self._get_ctx(repo, commit_id)
451 ctx = self._get_ctx(repo, commit_id)
417 return ctx.hidden()
452 return ctx.hidden()
418 return _ctx_hidden(context_uid, repo_id, commit_id)
453 return _ctx_hidden(context_uid, repo_id, commit_id)
419
454
420 @reraise_safe_exceptions
455 @reraise_safe_exceptions
421 def ctx_substate(self, wire, revision):
456 def ctx_substate(self, wire, revision):
422 repo = self._factory.repo(wire)
457 repo = self._factory.repo(wire)
423 ctx = self._get_ctx(repo, revision)
458 ctx = self._get_ctx(repo, revision)
424 return ctx.substate
459 return ctx.substate
425
460
426 @reraise_safe_exceptions
461 @reraise_safe_exceptions
427 def ctx_status(self, wire, revision):
462 def ctx_status(self, wire, revision):
428 repo = self._factory.repo(wire)
463 repo = self._factory.repo(wire)
429 ctx = self._get_ctx(repo, revision)
464 ctx = self._get_ctx(repo, revision)
430 status = repo[ctx.p1().node()].status(other=ctx.node())
465 status = repo[ctx.p1().node()].status(other=ctx.node())
431 # object of status (odd, custom named tuple in mercurial) is not
466 # object of status (odd, custom named tuple in mercurial) is not
432 # correctly serializable, we make it a list, as the underling
467 # correctly serializable, we make it a list, as the underling
433 # API expects this to be a list
468 # API expects this to be a list
434 return list(status)
469 return list(status)
435
470
436 @reraise_safe_exceptions
471 @reraise_safe_exceptions
437 def ctx_user(self, wire, revision):
472 def ctx_user(self, wire, revision):
438 repo = self._factory.repo(wire)
473 repo = self._factory.repo(wire)
439 ctx = self._get_ctx(repo, revision)
474 ctx = self._get_ctx(repo, revision)
440 return ctx.user()
475 return ctx.user()
441
476
442 @reraise_safe_exceptions
477 @reraise_safe_exceptions
443 def check_url(self, url, config):
478 def check_url(self, url, config):
444 url, _proto = normalize_url_for_hg(url)
479 url, _proto = normalize_url_for_hg(url)
445 url_obj = url_parser(safe_bytes(url))
480 url_obj = url_parser(safe_bytes(url))
446
481
447 test_uri = safe_str(url_obj.authinfo()[0])
482 test_uri = safe_str(url_obj.authinfo()[0])
448 authinfo = url_obj.authinfo()[1]
483 authinfo = url_obj.authinfo()[1]
449 obfuscated_uri = get_obfuscated_url(url_obj)
484 obfuscated_uri = get_obfuscated_url(url_obj)
450 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
485 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
451
486
452 handlers = []
487 handlers = []
453 if authinfo:
488 if authinfo:
454 # create a password manager
489 # create a password manager
455 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
490 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
456 passmgr.add_password(*authinfo)
491 passmgr.add_password(*authinfo)
457
492
458 handlers.extend((httpbasicauthhandler(passmgr),
493 handlers.extend((httpbasicauthhandler(passmgr),
459 httpdigestauthhandler(passmgr)))
494 httpdigestauthhandler(passmgr)))
460
495
461 o = urllib.request.build_opener(*handlers)
496 o = urllib.request.build_opener(*handlers)
462 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
497 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
463 ('Accept', 'application/mercurial-0.1')]
498 ('Accept', 'application/mercurial-0.1')]
464
499
465 q = {"cmd": 'between'}
500 q = {"cmd": 'between'}
466 q.update({'pairs': "{}-{}".format('0' * 40, '0' * 40)})
501 q.update({'pairs': "{}-{}".format('0' * 40, '0' * 40)})
467 qs = '?%s' % urllib.parse.urlencode(q)
502 qs = '?%s' % urllib.parse.urlencode(q)
468 cu = f"{test_uri}{qs}"
503 cu = f"{test_uri}{qs}"
469 req = urllib.request.Request(cu, None, {})
504 req = urllib.request.Request(cu, None, {})
470
505
471 try:
506 try:
472 log.debug("Trying to open URL %s", obfuscated_uri)
507 log.debug("Trying to open URL %s", obfuscated_uri)
473 resp = o.open(req)
508 resp = o.open(req)
474 if resp.code != 200:
509 if resp.code != 200:
475 raise exceptions.URLError()('Return Code is not 200')
510 raise exceptions.URLError()('Return Code is not 200')
476 except Exception as e:
511 except Exception as e:
477 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
512 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
478 # means it cannot be cloned
513 # means it cannot be cloned
479 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
514 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
480
515
481 # now check if it's a proper hg repo, but don't do it for svn
516 # now check if it's a proper hg repo, but don't do it for svn
482 try:
517 try:
483 if _proto == 'svn':
518 if _proto == 'svn':
484 pass
519 pass
485 else:
520 else:
486 # check for pure hg repos
521 # check for pure hg repos
487 log.debug(
522 log.debug(
488 "Verifying if URL is a Mercurial repository: %s", obfuscated_uri)
523 "Verifying if URL is a Mercurial repository: %s", obfuscated_uri)
489 ui = make_ui_from_config(config)
524 ui = make_ui_from_config(config)
490 peer_checker = makepeer(ui, safe_bytes(url))
525 peer_checker = makepeer(ui, safe_bytes(url))
491 peer_checker.lookup(b'tip')
526 peer_checker.lookup(b'tip')
492 except Exception as e:
527 except Exception as e:
493 log.warning("URL is not a valid Mercurial repository: %s",
528 log.warning("URL is not a valid Mercurial repository: %s",
494 obfuscated_uri)
529 obfuscated_uri)
495 raise exceptions.URLError(e)(
530 raise exceptions.URLError(e)(
496 "url [%s] does not look like an hg repo org_exc: %s"
531 "url [%s] does not look like an hg repo org_exc: %s"
497 % (obfuscated_uri, e))
532 % (obfuscated_uri, e))
498
533
499 log.info("URL is a valid Mercurial repository: %s", obfuscated_uri)
534 log.info("URL is a valid Mercurial repository: %s", obfuscated_uri)
500 return True
535 return True
501
536
502 @reraise_safe_exceptions
537 @reraise_safe_exceptions
503 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
538 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
504 repo = self._factory.repo(wire)
539 repo = self._factory.repo(wire)
505
540
506 if file_filter:
541 if file_filter:
507 # unpack the file-filter
542 # unpack the file-filter
508 repo_path, node_path = file_filter
543 repo_path, node_path = file_filter
509 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
544 match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)])
510 else:
545 else:
511 match_filter = file_filter
546 match_filter = file_filter
512 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
547 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
513
548
514 try:
549 try:
515 diff_iter = patch.diff(
550 diff_iter = patch.diff(
516 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
551 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
517 return BytesEnvelope(b"".join(diff_iter))
552 return BytesEnvelope(b"".join(diff_iter))
518 except RepoLookupError as e:
553 except RepoLookupError as e:
519 raise exceptions.LookupException(e)()
554 raise exceptions.LookupException(e)()
520
555
521 @reraise_safe_exceptions
556 @reraise_safe_exceptions
522 def node_history(self, wire, revision, path, limit):
557 def node_history(self, wire, revision, path, limit):
523 cache_on, context_uid, repo_id = self._cache_on(wire)
558 cache_on, context_uid, repo_id = self._cache_on(wire)
524 region = self._region(wire)
559 region = self._region(wire)
525
560
526 @region.conditional_cache_on_arguments(condition=cache_on)
561 @region.conditional_cache_on_arguments(condition=cache_on)
527 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
562 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
528 repo = self._factory.repo(wire)
563 repo = self._factory.repo(wire)
529
564
530 ctx = self._get_ctx(repo, revision)
565 ctx = self._get_ctx(repo, revision)
531 fctx = ctx.filectx(safe_bytes(path))
566 fctx = ctx.filectx(safe_bytes(path))
532
567
533 def history_iter():
568 def history_iter():
534 limit_rev = fctx.rev()
569 limit_rev = fctx.rev()
535 for obj in reversed(list(fctx.filelog())):
570 for obj in reversed(list(fctx.filelog())):
536 obj = fctx.filectx(obj)
571 obj = fctx.filectx(obj)
537 ctx = obj.changectx()
572 ctx = obj.changectx()
538 if ctx.hidden() or ctx.obsolete():
573 if ctx.hidden() or ctx.obsolete():
539 continue
574 continue
540
575
541 if limit_rev >= obj.rev():
576 if limit_rev >= obj.rev():
542 yield obj
577 yield obj
543
578
544 history = []
579 history = []
545 for cnt, obj in enumerate(history_iter()):
580 for cnt, obj in enumerate(history_iter()):
546 if limit and cnt >= limit:
581 if limit and cnt >= limit:
547 break
582 break
548 history.append(hex(obj.node()))
583 history.append(hex(obj.node()))
549
584
550 return [x for x in history]
585 return [x for x in history]
551 return _node_history(context_uid, repo_id, revision, path, limit)
586 return _node_history(context_uid, repo_id, revision, path, limit)
552
587
553 @reraise_safe_exceptions
588 @reraise_safe_exceptions
554 def node_history_untill(self, wire, revision, path, limit):
589 def node_history_untill(self, wire, revision, path, limit):
555 cache_on, context_uid, repo_id = self._cache_on(wire)
590 cache_on, context_uid, repo_id = self._cache_on(wire)
556 region = self._region(wire)
591 region = self._region(wire)
557
592
558 @region.conditional_cache_on_arguments(condition=cache_on)
593 @region.conditional_cache_on_arguments(condition=cache_on)
559 def _node_history_until(_context_uid, _repo_id):
594 def _node_history_until(_context_uid, _repo_id):
560 repo = self._factory.repo(wire)
595 repo = self._factory.repo(wire)
561 ctx = self._get_ctx(repo, revision)
596 ctx = self._get_ctx(repo, revision)
562 fctx = ctx.filectx(safe_bytes(path))
597 fctx = ctx.filectx(safe_bytes(path))
563
598
564 file_log = list(fctx.filelog())
599 file_log = list(fctx.filelog())
565 if limit:
600 if limit:
566 # Limit to the last n items
601 # Limit to the last n items
567 file_log = file_log[-limit:]
602 file_log = file_log[-limit:]
568
603
569 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
604 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
570 return _node_history_until(context_uid, repo_id, revision, path, limit)
605 return _node_history_until(context_uid, repo_id, revision, path, limit)
571
606
572 @reraise_safe_exceptions
607 @reraise_safe_exceptions
573 def bulk_file_request(self, wire, commit_id, path, pre_load):
608 def bulk_file_request(self, wire, commit_id, path, pre_load):
574 cache_on, context_uid, repo_id = self._cache_on(wire)
609 cache_on, context_uid, repo_id = self._cache_on(wire)
575 region = self._region(wire)
610 region = self._region(wire)
576
611
577 @region.conditional_cache_on_arguments(condition=cache_on)
612 @region.conditional_cache_on_arguments(condition=cache_on)
578 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
613 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
579 result = {}
614 result = {}
580 for attr in pre_load:
615 for attr in pre_load:
581 try:
616 try:
582 method = self._bulk_file_methods[attr]
617 method = self._bulk_file_methods[attr]
583 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
618 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
584 result[attr] = method(wire, _commit_id, _path)
619 result[attr] = method(wire, _commit_id, _path)
585 except KeyError as e:
620 except KeyError as e:
586 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
621 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
587 return result
622 return result
588
623
589 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
624 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
590
625
591 @reraise_safe_exceptions
626 @reraise_safe_exceptions
592 def fctx_annotate(self, wire, revision, path):
627 def fctx_annotate(self, wire, revision, path):
593 repo = self._factory.repo(wire)
628 repo = self._factory.repo(wire)
594 ctx = self._get_ctx(repo, revision)
629 ctx = self._get_ctx(repo, revision)
595 fctx = ctx.filectx(safe_bytes(path))
630 fctx = ctx.filectx(safe_bytes(path))
596
631
597 result = []
632 result = []
598 for i, annotate_obj in enumerate(fctx.annotate(), 1):
633 for i, annotate_obj in enumerate(fctx.annotate(), 1):
599 ln_no = i
634 ln_no = i
600 sha = hex(annotate_obj.fctx.node())
635 sha = hex(annotate_obj.fctx.node())
601 content = annotate_obj.text
636 content = annotate_obj.text
602 result.append((ln_no, ascii_str(sha), content))
637 result.append((ln_no, ascii_str(sha), content))
603 return BinaryEnvelope(result)
638 return BinaryEnvelope(result)
604
639
605 @reraise_safe_exceptions
640 @reraise_safe_exceptions
606 def fctx_node_data(self, wire, revision, path):
641 def fctx_node_data(self, wire, revision, path):
607 repo = self._factory.repo(wire)
642 repo = self._factory.repo(wire)
608 ctx = self._get_ctx(repo, revision)
643 ctx = self._get_ctx(repo, revision)
609 fctx = ctx.filectx(safe_bytes(path))
644 fctx = ctx.filectx(safe_bytes(path))
610 return BytesEnvelope(fctx.data())
645 return BytesEnvelope(fctx.data())
611
646
612 @reraise_safe_exceptions
647 @reraise_safe_exceptions
613 def fctx_flags(self, wire, commit_id, path):
648 def fctx_flags(self, wire, commit_id, path):
614 cache_on, context_uid, repo_id = self._cache_on(wire)
649 cache_on, context_uid, repo_id = self._cache_on(wire)
615 region = self._region(wire)
650 region = self._region(wire)
616
651
617 @region.conditional_cache_on_arguments(condition=cache_on)
652 @region.conditional_cache_on_arguments(condition=cache_on)
618 def _fctx_flags(_repo_id, _commit_id, _path):
653 def _fctx_flags(_repo_id, _commit_id, _path):
619 repo = self._factory.repo(wire)
654 repo = self._factory.repo(wire)
620 ctx = self._get_ctx(repo, commit_id)
655 ctx = self._get_ctx(repo, commit_id)
621 fctx = ctx.filectx(safe_bytes(path))
656 fctx = ctx.filectx(safe_bytes(path))
622 return fctx.flags()
657 return fctx.flags()
623
658
624 return _fctx_flags(repo_id, commit_id, path)
659 return _fctx_flags(repo_id, commit_id, path)
625
660
626 @reraise_safe_exceptions
661 @reraise_safe_exceptions
627 def fctx_size(self, wire, commit_id, path):
662 def fctx_size(self, wire, commit_id, path):
628 cache_on, context_uid, repo_id = self._cache_on(wire)
663 cache_on, context_uid, repo_id = self._cache_on(wire)
629 region = self._region(wire)
664 region = self._region(wire)
630
665
631 @region.conditional_cache_on_arguments(condition=cache_on)
666 @region.conditional_cache_on_arguments(condition=cache_on)
632 def _fctx_size(_repo_id, _revision, _path):
667 def _fctx_size(_repo_id, _revision, _path):
633 repo = self._factory.repo(wire)
668 repo = self._factory.repo(wire)
634 ctx = self._get_ctx(repo, commit_id)
669 ctx = self._get_ctx(repo, commit_id)
635 fctx = ctx.filectx(safe_bytes(path))
670 fctx = ctx.filectx(safe_bytes(path))
636 return fctx.size()
671 return fctx.size()
637 return _fctx_size(repo_id, commit_id, path)
672 return _fctx_size(repo_id, commit_id, path)
638
673
639 @reraise_safe_exceptions
674 @reraise_safe_exceptions
640 def get_all_commit_ids(self, wire, name):
675 def get_all_commit_ids(self, wire, name):
641 cache_on, context_uid, repo_id = self._cache_on(wire)
676 cache_on, context_uid, repo_id = self._cache_on(wire)
642 region = self._region(wire)
677 region = self._region(wire)
643
678
644 @region.conditional_cache_on_arguments(condition=cache_on)
679 @region.conditional_cache_on_arguments(condition=cache_on)
645 def _get_all_commit_ids(_context_uid, _repo_id, _name):
680 def _get_all_commit_ids(_context_uid, _repo_id, _name):
646 repo = self._factory.repo(wire)
681 repo = self._factory.repo(wire)
647 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
682 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
648 return revs
683 return revs
649 return _get_all_commit_ids(context_uid, repo_id, name)
684 return _get_all_commit_ids(context_uid, repo_id, name)
650
685
651 @reraise_safe_exceptions
686 @reraise_safe_exceptions
652 def get_config_value(self, wire, section, name, untrusted=False):
687 def get_config_value(self, wire, section, name, untrusted=False):
653 repo = self._factory.repo(wire)
688 repo = self._factory.repo(wire)
654 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
689 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
655
690
656 @reraise_safe_exceptions
691 @reraise_safe_exceptions
657 def is_large_file(self, wire, commit_id, path):
692 def is_large_file(self, wire, commit_id, path):
658 cache_on, context_uid, repo_id = self._cache_on(wire)
693 cache_on, context_uid, repo_id = self._cache_on(wire)
659 region = self._region(wire)
694 region = self._region(wire)
660
695
661 @region.conditional_cache_on_arguments(condition=cache_on)
696 @region.conditional_cache_on_arguments(condition=cache_on)
662 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
697 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
663 return largefiles.lfutil.isstandin(safe_bytes(path))
698 return largefiles.lfutil.isstandin(safe_bytes(path))
664
699
665 return _is_large_file(context_uid, repo_id, commit_id, path)
700 return _is_large_file(context_uid, repo_id, commit_id, path)
666
701
667 @reraise_safe_exceptions
702 @reraise_safe_exceptions
668 def is_binary(self, wire, revision, path):
703 def is_binary(self, wire, revision, path):
669 cache_on, context_uid, repo_id = self._cache_on(wire)
704 cache_on, context_uid, repo_id = self._cache_on(wire)
670 region = self._region(wire)
705 region = self._region(wire)
671
706
672 @region.conditional_cache_on_arguments(condition=cache_on)
707 @region.conditional_cache_on_arguments(condition=cache_on)
673 def _is_binary(_repo_id, _sha, _path):
708 def _is_binary(_repo_id, _sha, _path):
674 repo = self._factory.repo(wire)
709 repo = self._factory.repo(wire)
675 ctx = self._get_ctx(repo, revision)
710 ctx = self._get_ctx(repo, revision)
676 fctx = ctx.filectx(safe_bytes(path))
711 fctx = ctx.filectx(safe_bytes(path))
677 return fctx.isbinary()
712 return fctx.isbinary()
678
713
679 return _is_binary(repo_id, revision, path)
714 return _is_binary(repo_id, revision, path)
680
715
681 @reraise_safe_exceptions
716 @reraise_safe_exceptions
682 def md5_hash(self, wire, revision, path):
717 def md5_hash(self, wire, revision, path):
683 cache_on, context_uid, repo_id = self._cache_on(wire)
718 cache_on, context_uid, repo_id = self._cache_on(wire)
684 region = self._region(wire)
719 region = self._region(wire)
685
720
686 @region.conditional_cache_on_arguments(condition=cache_on)
721 @region.conditional_cache_on_arguments(condition=cache_on)
687 def _md5_hash(_repo_id, _sha, _path):
722 def _md5_hash(_repo_id, _sha, _path):
688 repo = self._factory.repo(wire)
723 repo = self._factory.repo(wire)
689 ctx = self._get_ctx(repo, revision)
724 ctx = self._get_ctx(repo, revision)
690 fctx = ctx.filectx(safe_bytes(path))
725 fctx = ctx.filectx(safe_bytes(path))
691 return hashlib.md5(fctx.data()).hexdigest()
726 return hashlib.md5(fctx.data()).hexdigest()
692
727
693 return _md5_hash(repo_id, revision, path)
728 return _md5_hash(repo_id, revision, path)
694
729
695 @reraise_safe_exceptions
730 @reraise_safe_exceptions
696 def in_largefiles_store(self, wire, sha):
731 def in_largefiles_store(self, wire, sha):
697 repo = self._factory.repo(wire)
732 repo = self._factory.repo(wire)
698 return largefiles.lfutil.instore(repo, sha)
733 return largefiles.lfutil.instore(repo, sha)
699
734
700 @reraise_safe_exceptions
735 @reraise_safe_exceptions
701 def in_user_cache(self, wire, sha):
736 def in_user_cache(self, wire, sha):
702 repo = self._factory.repo(wire)
737 repo = self._factory.repo(wire)
703 return largefiles.lfutil.inusercache(repo.ui, sha)
738 return largefiles.lfutil.inusercache(repo.ui, sha)
704
739
705 @reraise_safe_exceptions
740 @reraise_safe_exceptions
706 def store_path(self, wire, sha):
741 def store_path(self, wire, sha):
707 repo = self._factory.repo(wire)
742 repo = self._factory.repo(wire)
708 return largefiles.lfutil.storepath(repo, sha)
743 return largefiles.lfutil.storepath(repo, sha)
709
744
710 @reraise_safe_exceptions
745 @reraise_safe_exceptions
711 def link(self, wire, sha, path):
746 def link(self, wire, sha, path):
712 repo = self._factory.repo(wire)
747 repo = self._factory.repo(wire)
713 largefiles.lfutil.link(
748 largefiles.lfutil.link(
714 largefiles.lfutil.usercachepath(repo.ui, sha), path)
749 largefiles.lfutil.usercachepath(repo.ui, sha), path)
715
750
716 @reraise_safe_exceptions
751 @reraise_safe_exceptions
717 def localrepository(self, wire, create=False):
752 def localrepository(self, wire, create=False):
718 self._factory.repo(wire, create=create)
753 self._factory.repo(wire, create=create)
719
754
720 @reraise_safe_exceptions
755 @reraise_safe_exceptions
721 def lookup(self, wire, revision, both):
756 def lookup(self, wire, revision, both):
722 cache_on, context_uid, repo_id = self._cache_on(wire)
757 cache_on, context_uid, repo_id = self._cache_on(wire)
723 region = self._region(wire)
758 region = self._region(wire)
724
759
725 @region.conditional_cache_on_arguments(condition=cache_on)
760 @region.conditional_cache_on_arguments(condition=cache_on)
726 def _lookup(_context_uid, _repo_id, _revision, _both):
761 def _lookup(_context_uid, _repo_id, _revision, _both):
727 repo = self._factory.repo(wire)
762 repo = self._factory.repo(wire)
728 rev = _revision
763 rev = _revision
729 if isinstance(rev, int):
764 if isinstance(rev, int):
730 # NOTE(marcink):
765 # NOTE(marcink):
731 # since Mercurial doesn't support negative indexes properly
766 # since Mercurial doesn't support negative indexes properly
732 # we need to shift accordingly by one to get proper index, e.g
767 # we need to shift accordingly by one to get proper index, e.g
733 # repo[-1] => repo[-2]
768 # repo[-1] => repo[-2]
734 # repo[0] => repo[-1]
769 # repo[0] => repo[-1]
735 if rev <= 0:
770 if rev <= 0:
736 rev = rev + -1
771 rev = rev + -1
737 try:
772 try:
738 ctx = self._get_ctx(repo, rev)
773 ctx = self._get_ctx(repo, rev)
739 except (AmbiguousPrefixLookupError) as e:
774 except AmbiguousPrefixLookupError:
740 e = RepoLookupError(rev)
775 e = RepoLookupError(rev)
741 e._org_exc_tb = traceback.format_exc()
776 e._org_exc_tb = format_exc(sys.exc_info())
742 raise exceptions.LookupException(e)(rev)
777 raise exceptions.LookupException(e)(rev)
743 except (TypeError, RepoLookupError, binascii.Error) as e:
778 except (TypeError, RepoLookupError, binascii.Error) as e:
744 e._org_exc_tb = traceback.format_exc()
779 e._org_exc_tb = format_exc(sys.exc_info())
745 raise exceptions.LookupException(e)(rev)
780 raise exceptions.LookupException(e)(rev)
746 except LookupError as e:
781 except LookupError as e:
747 e._org_exc_tb = traceback.format_exc()
782 e._org_exc_tb = format_exc(sys.exc_info())
748 raise exceptions.LookupException(e)(e.name)
783 raise exceptions.LookupException(e)(e.name)
749
784
750 if not both:
785 if not both:
751 return ctx.hex()
786 return ctx.hex()
752
787
753 ctx = repo[ctx.hex()]
788 ctx = repo[ctx.hex()]
754 return ctx.hex(), ctx.rev()
789 return ctx.hex(), ctx.rev()
755
790
756 return _lookup(context_uid, repo_id, revision, both)
791 return _lookup(context_uid, repo_id, revision, both)
757
792
758 @reraise_safe_exceptions
793 @reraise_safe_exceptions
759 def sync_push(self, wire, url):
794 def sync_push(self, wire, url):
760 if not self.check_url(url, wire['config']):
795 if not self.check_url(url, wire['config']):
761 return
796 return
762
797
763 repo = self._factory.repo(wire)
798 repo = self._factory.repo(wire)
764
799
765 # Disable any prompts for this repo
800 # Disable any prompts for this repo
766 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
801 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
767
802
768 bookmarks = list(dict(repo._bookmarks).keys())
803 bookmarks = list(dict(repo._bookmarks).keys())
769 remote = peer(repo, {}, safe_bytes(url))
804 remote = peer(repo, {}, safe_bytes(url))
770 # Disable any prompts for this remote
805 # Disable any prompts for this remote
771 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
806 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
772
807
773 return exchange.push(
808 return exchange.push(
774 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
809 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
775
810
776 @reraise_safe_exceptions
811 @reraise_safe_exceptions
777 def revision(self, wire, rev):
812 def revision(self, wire, rev):
778 repo = self._factory.repo(wire)
813 repo = self._factory.repo(wire)
779 ctx = self._get_ctx(repo, rev)
814 ctx = self._get_ctx(repo, rev)
780 return ctx.rev()
815 return ctx.rev()
781
816
782 @reraise_safe_exceptions
817 @reraise_safe_exceptions
783 def rev_range(self, wire, commit_filter):
818 def rev_range(self, wire, commit_filter):
784 cache_on, context_uid, repo_id = self._cache_on(wire)
819 cache_on, context_uid, repo_id = self._cache_on(wire)
785 region = self._region(wire)
820 region = self._region(wire)
786
821
787 @region.conditional_cache_on_arguments(condition=cache_on)
822 @region.conditional_cache_on_arguments(condition=cache_on)
788 def _rev_range(_context_uid, _repo_id, _filter):
823 def _rev_range(_context_uid, _repo_id, _filter):
789 repo = self._factory.repo(wire)
824 repo = self._factory.repo(wire)
790 revisions = [
825 revisions = [
791 ascii_str(repo[rev].hex())
826 ascii_str(repo[rev].hex())
792 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
827 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
793 ]
828 ]
794 return revisions
829 return revisions
795
830
796 return _rev_range(context_uid, repo_id, sorted(commit_filter))
831 return _rev_range(context_uid, repo_id, sorted(commit_filter))
797
832
798 @reraise_safe_exceptions
833 @reraise_safe_exceptions
799 def rev_range_hash(self, wire, node):
834 def rev_range_hash(self, wire, node):
800 repo = self._factory.repo(wire)
835 repo = self._factory.repo(wire)
801
836
802 def get_revs(repo, rev_opt):
837 def get_revs(repo, rev_opt):
803 if rev_opt:
838 if rev_opt:
804 revs = revrange(repo, rev_opt)
839 revs = revrange(repo, rev_opt)
805 if len(revs) == 0:
840 if len(revs) == 0:
806 return (nullrev, nullrev)
841 return (nullrev, nullrev)
807 return max(revs), min(revs)
842 return max(revs), min(revs)
808 else:
843 else:
809 return len(repo) - 1, 0
844 return len(repo) - 1, 0
810
845
811 stop, start = get_revs(repo, [node + ':'])
846 stop, start = get_revs(repo, [node + ':'])
812 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
847 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
813 return revs
848 return revs
814
849
815 @reraise_safe_exceptions
850 @reraise_safe_exceptions
816 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
851 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
817 org_path = safe_bytes(wire["path"])
852 org_path = safe_bytes(wire["path"])
818 other_path = safe_bytes(kwargs.pop('other_path', ''))
853 other_path = safe_bytes(kwargs.pop('other_path', ''))
819
854
820 # case when we want to compare two independent repositories
855 # case when we want to compare two independent repositories
821 if other_path and other_path != wire["path"]:
856 if other_path and other_path != wire["path"]:
822 baseui = self._factory._create_config(wire["config"])
857 baseui = self._factory._create_config(wire["config"])
823 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
858 repo = unionrepo.makeunionrepository(baseui, other_path, org_path)
824 else:
859 else:
825 repo = self._factory.repo(wire)
860 repo = self._factory.repo(wire)
826 return list(repo.revs(rev_spec, *args))
861 return list(repo.revs(rev_spec, *args))
827
862
828 @reraise_safe_exceptions
863 @reraise_safe_exceptions
829 def verify(self, wire,):
864 def verify(self, wire,):
830 repo = self._factory.repo(wire)
865 repo = self._factory.repo(wire)
831 baseui = self._factory._create_config(wire['config'])
866 baseui = self._factory._create_config(wire['config'])
832
867
833 baseui, output = patch_ui_message_output(baseui)
868 baseui, output = patch_ui_message_output(baseui)
834
869
835 repo.ui = baseui
870 repo.ui = baseui
836 verify.verify(repo)
871 verify.verify(repo)
837 return output.getvalue()
872 return output.getvalue()
838
873
839 @reraise_safe_exceptions
874 @reraise_safe_exceptions
840 def hg_update_cache(self, wire,):
875 def hg_update_cache(self, wire,):
841 repo = self._factory.repo(wire)
876 repo = self._factory.repo(wire)
842 baseui = self._factory._create_config(wire['config'])
877 baseui = self._factory._create_config(wire['config'])
843 baseui, output = patch_ui_message_output(baseui)
878 baseui, output = patch_ui_message_output(baseui)
844
879
845 repo.ui = baseui
880 repo.ui = baseui
846 with repo.wlock(), repo.lock():
881 with repo.wlock(), repo.lock():
847 repo.updatecaches(full=True)
882 repo.updatecaches(full=True)
848
883
849 return output.getvalue()
884 return output.getvalue()
850
885
851 @reraise_safe_exceptions
886 @reraise_safe_exceptions
852 def hg_rebuild_fn_cache(self, wire,):
887 def hg_rebuild_fn_cache(self, wire,):
853 repo = self._factory.repo(wire)
888 repo = self._factory.repo(wire)
854 baseui = self._factory._create_config(wire['config'])
889 baseui = self._factory._create_config(wire['config'])
855 baseui, output = patch_ui_message_output(baseui)
890 baseui, output = patch_ui_message_output(baseui)
856
891
857 repo.ui = baseui
892 repo.ui = baseui
858
893
859 repair.rebuildfncache(baseui, repo)
894 repair.rebuildfncache(baseui, repo)
860
895
861 return output.getvalue()
896 return output.getvalue()
862
897
863 @reraise_safe_exceptions
898 @reraise_safe_exceptions
864 def tags(self, wire):
899 def tags(self, wire):
865 cache_on, context_uid, repo_id = self._cache_on(wire)
900 cache_on, context_uid, repo_id = self._cache_on(wire)
866 region = self._region(wire)
901 region = self._region(wire)
867
902
868 @region.conditional_cache_on_arguments(condition=cache_on)
903 @region.conditional_cache_on_arguments(condition=cache_on)
869 def _tags(_context_uid, _repo_id):
904 def _tags(_context_uid, _repo_id):
870 repo = self._factory.repo(wire)
905 repo = self._factory.repo(wire)
871 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
906 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
872
907
873 return _tags(context_uid, repo_id)
908 return _tags(context_uid, repo_id)
874
909
875 @reraise_safe_exceptions
910 @reraise_safe_exceptions
876 def update(self, wire, node='', clean=False):
911 def update(self, wire, node='', clean=False):
877 repo = self._factory.repo(wire)
912 repo = self._factory.repo(wire)
878 baseui = self._factory._create_config(wire['config'])
913 baseui = self._factory._create_config(wire['config'])
879 node = safe_bytes(node)
914 node = safe_bytes(node)
880
915
881 commands.update(baseui, repo, node=node, clean=clean)
916 commands.update(baseui, repo, node=node, clean=clean)
882
917
883 @reraise_safe_exceptions
918 @reraise_safe_exceptions
884 def identify(self, wire):
919 def identify(self, wire):
885 repo = self._factory.repo(wire)
920 repo = self._factory.repo(wire)
886 baseui = self._factory._create_config(wire['config'])
921 baseui = self._factory._create_config(wire['config'])
887 output = io.BytesIO()
922 output = io.BytesIO()
888 baseui.write = output.write
923 baseui.write = output.write
889 # This is required to get a full node id
924 # This is required to get a full node id
890 baseui.debugflag = True
925 baseui.debugflag = True
891 commands.identify(baseui, repo, id=True)
926 commands.identify(baseui, repo, id=True)
892
927
893 return output.getvalue()
928 return output.getvalue()
894
929
895 @reraise_safe_exceptions
930 @reraise_safe_exceptions
896 def heads(self, wire, branch=None):
931 def heads(self, wire, branch=None):
897 repo = self._factory.repo(wire)
932 repo = self._factory.repo(wire)
898 baseui = self._factory._create_config(wire['config'])
933 baseui = self._factory._create_config(wire['config'])
899 output = io.BytesIO()
934 output = io.BytesIO()
900
935
901 def write(data, **unused_kwargs):
936 def write(data, **unused_kwargs):
902 output.write(data)
937 output.write(data)
903
938
904 baseui.write = write
939 baseui.write = write
905 if branch:
940 if branch:
906 args = [safe_bytes(branch)]
941 args = [safe_bytes(branch)]
907 else:
942 else:
908 args = []
943 args = []
909 commands.heads(baseui, repo, template=b'{node} ', *args)
944 commands.heads(baseui, repo, template=b'{node} ', *args)
910
945
911 return output.getvalue()
946 return output.getvalue()
912
947
913 @reraise_safe_exceptions
948 @reraise_safe_exceptions
914 def ancestor(self, wire, revision1, revision2):
949 def ancestor(self, wire, revision1, revision2):
915 repo = self._factory.repo(wire)
950 repo = self._factory.repo(wire)
916 changelog = repo.changelog
951 changelog = repo.changelog
917 lookup = repo.lookup
952 lookup = repo.lookup
918 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
953 a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2)))
919 return hex(a)
954 return hex(a)
920
955
921 @reraise_safe_exceptions
956 @reraise_safe_exceptions
922 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
957 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
923 baseui = self._factory._create_config(wire["config"], hooks=hooks)
958 baseui = self._factory._create_config(wire["config"], hooks=hooks)
924 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
959 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
925
960
926 @reraise_safe_exceptions
961 @reraise_safe_exceptions
927 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
962 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
928
963
929 repo = self._factory.repo(wire)
964 repo = self._factory.repo(wire)
930 baseui = self._factory._create_config(wire['config'])
965 baseui = self._factory._create_config(wire['config'])
931 publishing = baseui.configbool(b'phases', b'publish')
966 publishing = baseui.configbool(b'phases', b'publish')
932
967
933 def _filectxfn(_repo, ctx, path: bytes):
968 def _filectxfn(_repo, ctx, path: bytes):
934 """
969 """
935 Marks given path as added/changed/removed in a given _repo. This is
970 Marks given path as added/changed/removed in a given _repo. This is
936 for internal mercurial commit function.
971 for internal mercurial commit function.
937 """
972 """
938
973
939 # check if this path is removed
974 # check if this path is removed
940 if safe_str(path) in removed:
975 if safe_str(path) in removed:
941 # returning None is a way to mark node for removal
976 # returning None is a way to mark node for removal
942 return None
977 return None
943
978
944 # check if this path is added
979 # check if this path is added
945 for node in updated:
980 for node in updated:
946 if safe_bytes(node['path']) == path:
981 if safe_bytes(node['path']) == path:
947 return memfilectx(
982 return memfilectx(
948 _repo,
983 _repo,
949 changectx=ctx,
984 changectx=ctx,
950 path=safe_bytes(node['path']),
985 path=safe_bytes(node['path']),
951 data=safe_bytes(node['content']),
986 data=safe_bytes(node['content']),
952 islink=False,
987 islink=False,
953 isexec=bool(node['mode'] & stat.S_IXUSR),
988 isexec=bool(node['mode'] & stat.S_IXUSR),
954 copysource=False)
989 copysource=False)
955 abort_exc = exceptions.AbortException()
990 abort_exc = exceptions.AbortException()
956 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
991 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
957
992
958 if publishing:
993 if publishing:
959 new_commit_phase = b'public'
994 new_commit_phase = b'public'
960 else:
995 else:
961 new_commit_phase = b'draft'
996 new_commit_phase = b'draft'
962 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
997 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
963 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
998 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
964 commit_ctx = memctx(
999 commit_ctx = memctx(
965 repo=repo,
1000 repo=repo,
966 parents=parents,
1001 parents=parents,
967 text=safe_bytes(message),
1002 text=safe_bytes(message),
968 files=[safe_bytes(x) for x in files],
1003 files=[safe_bytes(x) for x in files],
969 filectxfn=_filectxfn,
1004 filectxfn=_filectxfn,
970 user=safe_bytes(user),
1005 user=safe_bytes(user),
971 date=(commit_time, commit_timezone),
1006 date=(commit_time, commit_timezone),
972 extra=kwargs)
1007 extra=kwargs)
973
1008
974 n = repo.commitctx(commit_ctx)
1009 n = repo.commitctx(commit_ctx)
975 new_id = hex(n)
1010 new_id = hex(n)
976
1011
977 return new_id
1012 return new_id
978
1013
979 @reraise_safe_exceptions
1014 @reraise_safe_exceptions
980 def pull(self, wire, url, commit_ids=None):
1015 def pull(self, wire, url, commit_ids=None):
981 repo = self._factory.repo(wire)
1016 repo = self._factory.repo(wire)
982 # Disable any prompts for this repo
1017 # Disable any prompts for this repo
983 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
1018 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
984
1019
985 remote = peer(repo, {}, safe_bytes(url))
1020 remote = peer(repo, {}, safe_bytes(url))
986 # Disable any prompts for this remote
1021 # Disable any prompts for this remote
987 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
1022 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
988
1023
989 if commit_ids:
1024 if commit_ids:
990 commit_ids = [bin(commit_id) for commit_id in commit_ids]
1025 commit_ids = [bin(commit_id) for commit_id in commit_ids]
991
1026
992 return exchange.pull(
1027 return exchange.pull(
993 repo, remote, heads=commit_ids, force=None).cgresult
1028 repo, remote, heads=commit_ids, force=None).cgresult
994
1029
995 @reraise_safe_exceptions
1030 @reraise_safe_exceptions
996 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
1031 def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True):
997 repo = self._factory.repo(wire)
1032 repo = self._factory.repo(wire)
998 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1033 baseui = self._factory._create_config(wire['config'], hooks=hooks)
999
1034
1000 source = safe_bytes(source)
1035 source = safe_bytes(source)
1001
1036
1002 # Mercurial internally has a lot of logic that checks ONLY if
1037 # Mercurial internally has a lot of logic that checks ONLY if
1003 # option is defined, we just pass those if they are defined then
1038 # option is defined, we just pass those if they are defined then
1004 opts = {}
1039 opts = {}
1005
1040
1006 if bookmark:
1041 if bookmark:
1007 opts['bookmark'] = [safe_bytes(x) for x in bookmark] \
1042 opts['bookmark'] = [safe_bytes(x) for x in bookmark] \
1008 if isinstance(bookmark, list) else safe_bytes(bookmark)
1043 if isinstance(bookmark, list) else safe_bytes(bookmark)
1009
1044
1010 if branch:
1045 if branch:
1011 opts['branch'] = [safe_bytes(x) for x in branch] \
1046 opts['branch'] = [safe_bytes(x) for x in branch] \
1012 if isinstance(branch, list) else safe_bytes(branch)
1047 if isinstance(branch, list) else safe_bytes(branch)
1013
1048
1014 if revision:
1049 if revision:
1015 opts['rev'] = [safe_bytes(x) for x in revision] \
1050 opts['rev'] = [safe_bytes(x) for x in revision] \
1016 if isinstance(revision, list) else safe_bytes(revision)
1051 if isinstance(revision, list) else safe_bytes(revision)
1017
1052
1018 commands.pull(baseui, repo, source, **opts)
1053 commands.pull(baseui, repo, source, **opts)
1019
1054
1020 @reraise_safe_exceptions
1055 @reraise_safe_exceptions
1021 def push(self, wire, revisions, dest_path, hooks: bool = True, push_branches: bool = False):
1056 def push(self, wire, revisions, dest_path, hooks: bool = True, push_branches: bool = False):
1022 repo = self._factory.repo(wire)
1057 repo = self._factory.repo(wire)
1023 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1058 baseui = self._factory._create_config(wire['config'], hooks=hooks)
1024
1059
1025 revisions = [safe_bytes(x) for x in revisions] \
1060 revisions = [safe_bytes(x) for x in revisions] \
1026 if isinstance(revisions, list) else safe_bytes(revisions)
1061 if isinstance(revisions, list) else safe_bytes(revisions)
1027
1062
1028 commands.push(baseui, repo, safe_bytes(dest_path),
1063 commands.push(baseui, repo, safe_bytes(dest_path),
1029 rev=revisions,
1064 rev=revisions,
1030 new_branch=push_branches)
1065 new_branch=push_branches)
1031
1066
1032 @reraise_safe_exceptions
1067 @reraise_safe_exceptions
1033 def strip(self, wire, revision, update, backup):
1068 def strip(self, wire, revision, update, backup):
1034 repo = self._factory.repo(wire)
1069 repo = self._factory.repo(wire)
1035 ctx = self._get_ctx(repo, revision)
1070 ctx = self._get_ctx(repo, revision)
1036 hgext_strip.strip(
1071 hgext_strip.strip(
1037 repo.baseui, repo, ctx.node(), update=update, backup=backup)
1072 repo.baseui, repo, ctx.node(), update=update, backup=backup)
1038
1073
1039 @reraise_safe_exceptions
1074 @reraise_safe_exceptions
1040 def get_unresolved_files(self, wire):
1075 def get_unresolved_files(self, wire):
1041 repo = self._factory.repo(wire)
1076 repo = self._factory.repo(wire)
1042
1077
1043 log.debug('Calculating unresolved files for repo: %s', repo)
1078 log.debug('Calculating unresolved files for repo: %s', repo)
1044 output = io.BytesIO()
1079 output = io.BytesIO()
1045
1080
1046 def write(data, **unused_kwargs):
1081 def write(data, **unused_kwargs):
1047 output.write(data)
1082 output.write(data)
1048
1083
1049 baseui = self._factory._create_config(wire['config'])
1084 baseui = self._factory._create_config(wire['config'])
1050 baseui.write = write
1085 baseui.write = write
1051
1086
1052 commands.resolve(baseui, repo, list=True)
1087 commands.resolve(baseui, repo, list=True)
1053 unresolved = output.getvalue().splitlines(0)
1088 unresolved = output.getvalue().splitlines(0)
1054 return unresolved
1089 return unresolved
1055
1090
1056 @reraise_safe_exceptions
1091 @reraise_safe_exceptions
1057 def merge(self, wire, revision):
1092 def merge(self, wire, revision):
1058 repo = self._factory.repo(wire)
1093 repo = self._factory.repo(wire)
1059 baseui = self._factory._create_config(wire['config'])
1094 baseui = self._factory._create_config(wire['config'])
1060 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1095 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1061
1096
1062 # In case of sub repositories are used mercurial prompts the user in
1097 # In case of sub repositories are used mercurial prompts the user in
1063 # case of merge conflicts or different sub repository sources. By
1098 # case of merge conflicts or different sub repository sources. By
1064 # setting the interactive flag to `False` mercurial doesn't prompt the
1099 # setting the interactive flag to `False` mercurial doesn't prompt the
1065 # used but instead uses a default value.
1100 # used but instead uses a default value.
1066 repo.ui.setconfig(b'ui', b'interactive', False)
1101 repo.ui.setconfig(b'ui', b'interactive', False)
1067 commands.merge(baseui, repo, rev=safe_bytes(revision))
1102 commands.merge(baseui, repo, rev=safe_bytes(revision))
1068
1103
1069 @reraise_safe_exceptions
1104 @reraise_safe_exceptions
1070 def merge_state(self, wire):
1105 def merge_state(self, wire):
1071 repo = self._factory.repo(wire)
1106 repo = self._factory.repo(wire)
1072 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1107 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1073
1108
1074 # In case of sub repositories are used mercurial prompts the user in
1109 # In case of sub repositories are used mercurial prompts the user in
1075 # case of merge conflicts or different sub repository sources. By
1110 # case of merge conflicts or different sub repository sources. By
1076 # setting the interactive flag to `False` mercurial doesn't prompt the
1111 # setting the interactive flag to `False` mercurial doesn't prompt the
1077 # used but instead uses a default value.
1112 # used but instead uses a default value.
1078 repo.ui.setconfig(b'ui', b'interactive', False)
1113 repo.ui.setconfig(b'ui', b'interactive', False)
1079 ms = hg_merge.mergestate(repo)
1114 ms = hg_merge.mergestate(repo)
1080 return [x for x in ms.unresolved()]
1115 return [x for x in ms.unresolved()]
1081
1116
1082 @reraise_safe_exceptions
1117 @reraise_safe_exceptions
1083 def commit(self, wire, message, username, close_branch=False):
1118 def commit(self, wire, message, username, close_branch=False):
1084 repo = self._factory.repo(wire)
1119 repo = self._factory.repo(wire)
1085 baseui = self._factory._create_config(wire['config'])
1120 baseui = self._factory._create_config(wire['config'])
1086 repo.ui.setconfig(b'ui', b'username', safe_bytes(username))
1121 repo.ui.setconfig(b'ui', b'username', safe_bytes(username))
1087 commands.commit(baseui, repo, message=safe_bytes(message), close_branch=close_branch)
1122 commands.commit(baseui, repo, message=safe_bytes(message), close_branch=close_branch)
1088
1123
1089 @reraise_safe_exceptions
1124 @reraise_safe_exceptions
1090 def rebase(self, wire, source='', dest='', abort=False):
1125 def rebase(self, wire, source='', dest='', abort=False):
1091 repo = self._factory.repo(wire)
1126 repo = self._factory.repo(wire)
1092 baseui = self._factory._create_config(wire['config'])
1127 baseui = self._factory._create_config(wire['config'])
1093 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1128 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1094 # In case of sub repositories are used mercurial prompts the user in
1129 # In case of sub repositories are used mercurial prompts the user in
1095 # case of merge conflicts or different sub repository sources. By
1130 # case of merge conflicts or different sub repository sources. By
1096 # setting the interactive flag to `False` mercurial doesn't prompt the
1131 # setting the interactive flag to `False` mercurial doesn't prompt the
1097 # used but instead uses a default value.
1132 # used but instead uses a default value.
1098 repo.ui.setconfig(b'ui', b'interactive', False)
1133 repo.ui.setconfig(b'ui', b'interactive', False)
1099
1134
1100 rebase.rebase(baseui, repo, base=safe_bytes(source or ''), dest=safe_bytes(dest or ''),
1135 rebase.rebase(baseui, repo, base=safe_bytes(source or ''), dest=safe_bytes(dest or ''),
1101 abort=abort, keep=not abort)
1136 abort=abort, keep=not abort)
1102
1137
1103 @reraise_safe_exceptions
1138 @reraise_safe_exceptions
1104 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1139 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1105 repo = self._factory.repo(wire)
1140 repo = self._factory.repo(wire)
1106 ctx = self._get_ctx(repo, revision)
1141 ctx = self._get_ctx(repo, revision)
1107 node = ctx.node()
1142 node = ctx.node()
1108
1143
1109 date = (tag_time, tag_timezone)
1144 date = (tag_time, tag_timezone)
1110 try:
1145 try:
1111 hg_tag.tag(repo, safe_bytes(name), node, safe_bytes(message), local, safe_bytes(user), date)
1146 hg_tag.tag(repo, safe_bytes(name), node, safe_bytes(message), local, safe_bytes(user), date)
1112 except Abort as e:
1147 except Abort as e:
1113 log.exception("Tag operation aborted")
1148 log.exception("Tag operation aborted")
1114 # Exception can contain unicode which we convert
1149 # Exception can contain unicode which we convert
1115 raise exceptions.AbortException(e)(repr(e))
1150 raise exceptions.AbortException(e)(repr(e))
1116
1151
1117 @reraise_safe_exceptions
1152 @reraise_safe_exceptions
1118 def bookmark(self, wire, bookmark, revision=''):
1153 def bookmark(self, wire, bookmark, revision=''):
1119 repo = self._factory.repo(wire)
1154 repo = self._factory.repo(wire)
1120 baseui = self._factory._create_config(wire['config'])
1155 baseui = self._factory._create_config(wire['config'])
1121 revision = revision or ''
1156 revision = revision or ''
1122 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1157 commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True)
1123
1158
1124 @reraise_safe_exceptions
1159 @reraise_safe_exceptions
1125 def install_hooks(self, wire, force=False):
1160 def install_hooks(self, wire, force=False):
1126 # we don't need any special hooks for Mercurial
1161 # we don't need any special hooks for Mercurial
1127 pass
1162 pass
1128
1163
1129 @reraise_safe_exceptions
1164 @reraise_safe_exceptions
1130 def get_hooks_info(self, wire):
1165 def get_hooks_info(self, wire):
1131 return {
1166 return {
1132 'pre_version': vcsserver.__version__,
1167 'pre_version': vcsserver.__version__,
1133 'post_version': vcsserver.__version__,
1168 'post_version': vcsserver.__version__,
1134 }
1169 }
1135
1170
1136 @reraise_safe_exceptions
1171 @reraise_safe_exceptions
1137 def set_head_ref(self, wire, head_name):
1172 def set_head_ref(self, wire, head_name):
1138 pass
1173 pass
1139
1174
1140 @reraise_safe_exceptions
1175 @reraise_safe_exceptions
1141 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1176 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1142 archive_dir_name, commit_id, cache_config):
1177 archive_dir_name, commit_id, cache_config):
1143
1178
1144 def file_walker(_commit_id, path):
1179 def file_walker(_commit_id, path):
1145 repo = self._factory.repo(wire)
1180 repo = self._factory.repo(wire)
1146 ctx = repo[_commit_id]
1181 ctx = repo[_commit_id]
1147 is_root = path in ['', '/']
1182 is_root = path in ['', '/']
1148 if is_root:
1183 if is_root:
1149 matcher = alwaysmatcher(badfn=None)
1184 matcher = alwaysmatcher(badfn=None)
1150 else:
1185 else:
1151 matcher = patternmatcher('', [(b'glob', safe_bytes(path)+b'/**', b'')], badfn=None)
1186 matcher = patternmatcher('', [(b'glob', safe_bytes(path)+b'/**', b'')], badfn=None)
1152 file_iter = ctx.manifest().walk(matcher)
1187 file_iter = ctx.manifest().walk(matcher)
1153
1188
1154 for fn in file_iter:
1189 for fn in file_iter:
1155 file_path = fn
1190 file_path = fn
1156 flags = ctx.flags(fn)
1191 flags = ctx.flags(fn)
1157 mode = b'x' in flags and 0o755 or 0o644
1192 mode = b'x' in flags and 0o755 or 0o644
1158 is_link = b'l' in flags
1193 is_link = b'l' in flags
1159
1194
1160 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1195 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1161
1196
1162 return store_archive_in_cache(
1197 return store_archive_in_cache(
1163 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1198 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1164
1199
General Comments 0
You need to be logged in to leave comments. Login now