##// END OF EJS Templates
merge: with lataest default branch
super-admin -
r1041:e1e68d19 merge python3
parent child Browse files
Show More
@@ -0,0 +1,16 b''
1 [DEFAULT]
2 done = false
3
4 [task:bump_version]
5 done = true
6
7 [task:fixes_on_stable]
8 done = true
9
10 [task:pip2nix_generated]
11 done = true
12
13 [release]
14 state = prepared
15 version = 4.27.1
16
@@ -0,0 +1,53 b''
1 ; #####################
2 ; LOGGING CONFIGURATION
3 ; #####################
4 ; Logging template, used for configure the logging
5 ; some variables here are replaced by RhodeCode to default values
6
7 [loggers]
8 keys = root, vcsserver
9
10 [handlers]
11 keys = console
12
13 [formatters]
14 keys = generic, json
15
16 ; #######
17 ; LOGGERS
18 ; #######
19 [logger_root]
20 level = NOTSET
21 handlers = console
22
23 [logger_vcsserver]
24 level = $RC_LOGGING_LEVEL
25 handlers =
26 qualname = vcsserver
27 propagate = 1
28
29 ; ########
30 ; HANDLERS
31 ; ########
32
33 [handler_console]
34 class = StreamHandler
35 args = (sys.stderr, )
36 level = $RC_LOGGING_LEVEL
37 ; To enable JSON formatted logs replace generic with json
38 ; This allows sending properly formatted logs to grafana loki or elasticsearch
39 #formatter = json
40 #formatter = generic
41 formatter = $RC_LOGGING_FORMATTER
42
43 ; ##########
44 ; FORMATTERS
45 ; ##########
46
47 [formatter_generic]
48 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
49 datefmt = %Y-%m-%d %H:%M:%S
50
51 [formatter_json]
52 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
53 class = vcsserver.lib._vendor.jsonlogger.JsonFormatter
@@ -0,0 +1,10 b''
1 diff -rup configparser-4.0.2-orig/pyproject.toml configparser-4.0.2/pyproject.toml
2 --- configparser-4.0.2-orig/pyproject.toml 2021-03-22 21:28:11.000000000 +0100
3 +++ configparser-4.0.2/pyproject.toml 2021-03-22 21:28:11.000000000 +0100
4 @@ -1,5 +1,5 @@
5 [build-system]
6 -requires = ["setuptools>=40.7", "wheel", "setuptools_scm>=1.15"]
7 +requires = ["setuptools<=42.0", "wheel", "setuptools_scm<6.0.0"]
8 build-backend = "setuptools.build_meta"
9
10 [tool.black]
@@ -0,0 +1,7 b''
1 diff -rup importlib-metadata-1.6.0-orig/yproject.toml importlib-metadata-1.6.0/pyproject.toml
2 --- importlib-metadata-1.6.0-orig/yproject.toml 2021-03-22 22:10:33.000000000 +0100
3 +++ importlib-metadata-1.6.0/pyproject.toml 2021-03-22 22:11:09.000000000 +0100
4 @@ -1,3 +1,3 @@
5 [build-system]
6 -requires = ["setuptools>=30.3", "wheel", "setuptools_scm"]
7 +requires = ["setuptools<42.0", "wheel", "setuptools_scm<6.0.0"]
@@ -0,0 +1,12 b''
1 diff -rup pytest-4.6.9-orig/setup.py pytest-4.6.9/setup.py
2 --- pytest-4.6.9-orig/setup.py 2018-04-10 10:23:04.000000000 +0200
3 +++ pytest-4.6.9/setup.py 2018-04-10 10:23:34.000000000 +0200
4 @@ -24,7 +24,7 @@ def main():
5 def main():
6 setup(
7 use_scm_version={"write_to": "src/_pytest/_version.py"},
8 - setup_requires=["setuptools-scm", "setuptools>=40.0"],
9 + setup_requires=["setuptools-scm<6.0.0", "setuptools<=42.0"],
10 package_dir={"": "src"},
11 # fmt: off
12 extras_require={ No newline at end of file
@@ -0,0 +1,10 b''
1 diff -rup zip-1.2.0-orig/pyproject.toml zip-1.2.0/pyproject.toml
2 --- zip-1.2.0-orig/pyproject.toml 2021-03-23 10:55:37.000000000 +0100
3 +++ zip-1.2.0/pyproject.toml 2021-03-23 10:56:05.000000000 +0100
4 @@ -1,5 +1,5 @@
5 [build-system]
6 -requires = ["setuptools>=34.4", "wheel", "setuptools_scm>=1.15"]
7 +requires = ["setuptools<42.0", "wheel", "setuptools_scm<6.0.0"]
8 build-backend = "setuptools.build_meta"
9
10 [tool.black]
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
@@ -0,0 +1,207 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import os
22 import textwrap
23 import string
24 import functools
25 import logging
26 import tempfile
27 import logging.config
28 log = logging.getLogger(__name__)
29
30 # skip keys, that are set here, so we don't double process those
31 set_keys = {
32 '__file__': ''
33 }
34
35
36 def str2bool(_str):
37 """
38 returns True/False value from given string, it tries to translate the
39 string into boolean
40
41 :param _str: string value to translate into boolean
42 :rtype: boolean
43 :returns: boolean from given string
44 """
45 if _str is None:
46 return False
47 if _str in (True, False):
48 return _str
49 _str = str(_str).strip().lower()
50 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
51
52
53 def aslist(obj, sep=None, strip=True):
54 """
55 Returns given string separated by sep as list
56
57 :param obj:
58 :param sep:
59 :param strip:
60 """
61 if isinstance(obj, (basestring,)):
62 if obj in ['', ""]:
63 return []
64
65 lst = obj.split(sep)
66 if strip:
67 lst = [v.strip() for v in lst]
68 return lst
69 elif isinstance(obj, (list, tuple)):
70 return obj
71 elif obj is None:
72 return []
73 else:
74 return [obj]
75
76
77 class SettingsMaker(object):
78
79 def __init__(self, app_settings):
80 self.settings = app_settings
81
82 @classmethod
83 def _bool_func(cls, input_val):
84 if isinstance(input_val, unicode):
85 input_val = input_val.encode('utf8')
86 return str2bool(input_val)
87
88 @classmethod
89 def _int_func(cls, input_val):
90 return int(input_val)
91
92 @classmethod
93 def _list_func(cls, input_val, sep=','):
94 return aslist(input_val, sep=sep)
95
96 @classmethod
97 def _string_func(cls, input_val, lower=True):
98 if lower:
99 input_val = input_val.lower()
100 return input_val
101
102 @classmethod
103 def _float_func(cls, input_val):
104 return float(input_val)
105
106 @classmethod
107 def _dir_func(cls, input_val, ensure_dir=False, mode=0o755):
108
109 # ensure we have our dir created
110 if not os.path.isdir(input_val) and ensure_dir:
111 os.makedirs(input_val, mode=mode)
112
113 if not os.path.isdir(input_val):
114 raise Exception('Dir at {} does not exist'.format(input_val))
115 return input_val
116
117 @classmethod
118 def _file_path_func(cls, input_val, ensure_dir=False, mode=0o755):
119 dirname = os.path.dirname(input_val)
120 cls._dir_func(dirname, ensure_dir=ensure_dir)
121 return input_val
122
123 @classmethod
124 def _key_transformator(cls, key):
125 return "{}_{}".format('RC'.upper(), key.upper().replace('.', '_').replace('-', '_'))
126
127 def maybe_env_key(self, key):
128 # now maybe we have this KEY in env, search and use the value with higher priority.
129 transformed_key = self._key_transformator(key)
130 envvar_value = os.environ.get(transformed_key)
131 if envvar_value:
132 log.debug('using `%s` key instead of `%s` key for config', transformed_key, key)
133
134 return envvar_value
135
136 def env_expand(self):
137 replaced = {}
138 for k, v in self.settings.items():
139 if k not in set_keys:
140 envvar_value = self.maybe_env_key(k)
141 if envvar_value:
142 replaced[k] = envvar_value
143 set_keys[k] = envvar_value
144
145 # replace ALL keys updated
146 self.settings.update(replaced)
147
148 def enable_logging(self, logging_conf=None, level='INFO', formatter='generic'):
149 """
150 Helper to enable debug on running instance
151 :return:
152 """
153
154 if not str2bool(self.settings.get('logging.autoconfigure')):
155 log.info('logging configuration based on main .ini file')
156 return
157
158 if logging_conf is None:
159 logging_conf = self.settings.get('logging.logging_conf_file') or ''
160
161 if not os.path.isfile(logging_conf):
162 log.error('Unable to setup logging based on %s, '
163 'file does not exist.... specify path using logging.logging_conf_file= config setting. ', logging_conf)
164 return
165
166 with open(logging_conf, 'rb') as f:
167 ini_template = textwrap.dedent(f.read())
168 ini_template = string.Template(ini_template).safe_substitute(
169 RC_LOGGING_LEVEL=os.environ.get('RC_LOGGING_LEVEL', '') or level,
170 RC_LOGGING_FORMATTER=os.environ.get('RC_LOGGING_FORMATTER', '') or formatter
171 )
172
173 with tempfile.NamedTemporaryFile(prefix='rc_logging_', suffix='.ini', delete=False) as f:
174 log.info('Saved Temporary LOGGING config at %s', f.name)
175 f.write(ini_template)
176
177 logging.config.fileConfig(f.name)
178 os.remove(f.name)
179
180 def make_setting(self, key, default, lower=False, default_when_empty=False, parser=None):
181 input_val = self.settings.get(key, default)
182
183 if default_when_empty and not input_val:
184 # use default value when value is set in the config but it is empty
185 input_val = default
186
187 parser_func = {
188 'bool': self._bool_func,
189 'int': self._int_func,
190 'list': self._list_func,
191 'list:newline': functools.partial(self._list_func, sep='/n'),
192 'list:spacesep': functools.partial(self._list_func, sep=' '),
193 'string': functools.partial(self._string_func, lower=lower),
194 'dir': self._dir_func,
195 'dir:ensured': functools.partial(self._dir_func, ensure_dir=True),
196 'file': self._file_path_func,
197 'file:ensured': functools.partial(self._file_path_func, ensure_dir=True),
198 None: lambda i: i
199 }[parser]
200
201 envvar_value = self.maybe_env_key(key)
202 if envvar_value:
203 input_val = envvar_value
204 set_keys[key] = input_val
205
206 self.settings[key] = parser_func(input_val)
207 return self.settings[key]
@@ -0,0 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 # This package contains non rhodecode licensed packages that are
19 # vendored for various reasons
20
21 import os
22 import sys
23
24 vendor_dir = os.path.abspath(os.path.dirname(__file__))
25
26 sys.path.append(vendor_dir)
@@ -0,0 +1,243 b''
1 '''
2 This library is provided to allow standard python logging
3 to output log data as JSON formatted strings
4 '''
5 import logging
6 import json
7 import re
8 from datetime import date, datetime, time, tzinfo, timedelta
9 import traceback
10 import importlib
11
12 from inspect import istraceback
13
14 from collections import OrderedDict
15
16
17 def _inject_req_id(record, *args, **kwargs):
18 return record
19
20
21 ExceptionAwareFormatter = logging.Formatter
22
23
24 ZERO = timedelta(0)
25 HOUR = timedelta(hours=1)
26
27
28 class UTC(tzinfo):
29 """UTC"""
30
31 def utcoffset(self, dt):
32 return ZERO
33
34 def tzname(self, dt):
35 return "UTC"
36
37 def dst(self, dt):
38 return ZERO
39
40 utc = UTC()
41
42
43 # skip natural LogRecord attributes
44 # http://docs.python.org/library/logging.html#logrecord-attributes
45 RESERVED_ATTRS = (
46 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
47 'funcName', 'levelname', 'levelno', 'lineno', 'module',
48 'msecs', 'message', 'msg', 'name', 'pathname', 'process',
49 'processName', 'relativeCreated', 'stack_info', 'thread', 'threadName')
50
51
52 def merge_record_extra(record, target, reserved):
53 """
54 Merges extra attributes from LogRecord object into target dictionary
55
56 :param record: logging.LogRecord
57 :param target: dict to update
58 :param reserved: dict or list with reserved keys to skip
59 """
60 for key, value in record.__dict__.items():
61 # this allows to have numeric keys
62 if (key not in reserved
63 and not (hasattr(key, "startswith")
64 and key.startswith('_'))):
65 target[key] = value
66 return target
67
68
69 class JsonEncoder(json.JSONEncoder):
70 """
71 A custom encoder extending the default JSONEncoder
72 """
73
74 def default(self, obj):
75 if isinstance(obj, (date, datetime, time)):
76 return self.format_datetime_obj(obj)
77
78 elif istraceback(obj):
79 return ''.join(traceback.format_tb(obj)).strip()
80
81 elif type(obj) == Exception \
82 or isinstance(obj, Exception) \
83 or type(obj) == type:
84 return str(obj)
85
86 try:
87 return super(JsonEncoder, self).default(obj)
88
89 except TypeError:
90 try:
91 return str(obj)
92
93 except Exception:
94 return None
95
96 def format_datetime_obj(self, obj):
97 return obj.isoformat()
98
99
100 class JsonFormatter(ExceptionAwareFormatter):
101 """
102 A custom formatter to format logging records as json strings.
103 Extra values will be formatted as str() if not supported by
104 json default encoder
105 """
106
107 def __init__(self, *args, **kwargs):
108 """
109 :param json_default: a function for encoding non-standard objects
110 as outlined in http://docs.python.org/2/library/json.html
111 :param json_encoder: optional custom encoder
112 :param json_serializer: a :meth:`json.dumps`-compatible callable
113 that will be used to serialize the log record.
114 :param json_indent: an optional :meth:`json.dumps`-compatible numeric value
115 that will be used to customize the indent of the output json.
116 :param prefix: an optional string prefix added at the beginning of
117 the formatted string
118 :param json_indent: indent parameter for json.dumps
119 :param json_ensure_ascii: ensure_ascii parameter for json.dumps
120 :param reserved_attrs: an optional list of fields that will be skipped when
121 outputting json log record. Defaults to all log record attributes:
122 http://docs.python.org/library/logging.html#logrecord-attributes
123 :param timestamp: an optional string/boolean field to add a timestamp when
124 outputting the json log record. If string is passed, timestamp will be added
125 to log record using string as key. If True boolean is passed, timestamp key
126 will be "timestamp". Defaults to False/off.
127 """
128 self.json_default = self._str_to_fn(kwargs.pop("json_default", None))
129 self.json_encoder = self._str_to_fn(kwargs.pop("json_encoder", None))
130 self.json_serializer = self._str_to_fn(kwargs.pop("json_serializer", json.dumps))
131 self.json_indent = kwargs.pop("json_indent", None)
132 self.json_ensure_ascii = kwargs.pop("json_ensure_ascii", True)
133 self.prefix = kwargs.pop("prefix", "")
134 reserved_attrs = kwargs.pop("reserved_attrs", RESERVED_ATTRS)
135 self.reserved_attrs = dict(zip(reserved_attrs, reserved_attrs))
136 self.timestamp = kwargs.pop("timestamp", True)
137
138 # super(JsonFormatter, self).__init__(*args, **kwargs)
139 logging.Formatter.__init__(self, *args, **kwargs)
140 if not self.json_encoder and not self.json_default:
141 self.json_encoder = JsonEncoder
142
143 self._required_fields = self.parse()
144 self._skip_fields = dict(zip(self._required_fields,
145 self._required_fields))
146 self._skip_fields.update(self.reserved_attrs)
147
148 def _str_to_fn(self, fn_as_str):
149 """
150 If the argument is not a string, return whatever was passed in.
151 Parses a string such as package.module.function, imports the module
152 and returns the function.
153
154 :param fn_as_str: The string to parse. If not a string, return it.
155 """
156 if not isinstance(fn_as_str, str):
157 return fn_as_str
158
159 path, _, function = fn_as_str.rpartition('.')
160 module = importlib.import_module(path)
161 return getattr(module, function)
162
163 def parse(self):
164 """
165 Parses format string looking for substitutions
166
167 This method is responsible for returning a list of fields (as strings)
168 to include in all log messages.
169 """
170 standard_formatters = re.compile(r'\((.+?)\)', re.IGNORECASE)
171 return standard_formatters.findall(self._fmt)
172
173 def add_fields(self, log_record, record, message_dict):
174 """
175 Override this method to implement custom logic for adding fields.
176 """
177 for field in self._required_fields:
178 log_record[field] = record.__dict__.get(field)
179 log_record.update(message_dict)
180 merge_record_extra(record, log_record, reserved=self._skip_fields)
181
182 if self.timestamp:
183 key = self.timestamp if type(self.timestamp) == str else 'timestamp'
184 log_record[key] = datetime.fromtimestamp(record.created, tz=utc)
185
186 def process_log_record(self, log_record):
187 """
188 Override this method to implement custom logic
189 on the possibly ordered dictionary.
190 """
191 return log_record
192
193 def jsonify_log_record(self, log_record):
194 """Returns a json string of the log record."""
195 return self.json_serializer(log_record,
196 default=self.json_default,
197 cls=self.json_encoder,
198 indent=self.json_indent,
199 ensure_ascii=self.json_ensure_ascii)
200
201 def serialize_log_record(self, log_record):
202 """Returns the final representation of the log record."""
203 return "%s%s" % (self.prefix, self.jsonify_log_record(log_record))
204
205 def format(self, record):
206 """Formats a log record and serializes to json"""
207 message_dict = {}
208 # FIXME: logging.LogRecord.msg and logging.LogRecord.message in typeshed
209 # are always type of str. We shouldn't need to override that.
210 if isinstance(record.msg, dict):
211 message_dict = record.msg
212 record.message = None
213 else:
214 record.message = record.getMessage()
215 # only format time if needed
216 if "asctime" in self._required_fields:
217 record.asctime = self.formatTime(record, self.datefmt)
218
219 # Display formatted exception, but allow overriding it in the
220 # user-supplied dict.
221 if record.exc_info and not message_dict.get('exc_info'):
222 message_dict['exc_info'] = self.formatException(record.exc_info)
223 if not message_dict.get('exc_info') and record.exc_text:
224 message_dict['exc_info'] = record.exc_text
225 # Display formatted record of stack frames
226 # default format is a string returned from :func:`traceback.print_stack`
227 try:
228 if record.stack_info and not message_dict.get('stack_info'):
229 message_dict['stack_info'] = self.formatStack(record.stack_info)
230 except AttributeError:
231 # Python2.7 doesn't have stack_info.
232 pass
233
234 try:
235 log_record = OrderedDict()
236 except NameError:
237 log_record = {}
238
239 _inject_req_id(record, with_prefix=False)
240 self.add_fields(log_record, record, message_dict)
241 log_record = self.process_log_record(log_record)
242
243 return self.serialize_log_record(log_record)
@@ -0,0 +1,390 b''
1 import sys
2 import threading
3 import weakref
4 from base64 import b64encode
5 from logging import getLogger
6 from os import urandom
7
8 from redis import StrictRedis
9
10 __version__ = '3.7.0'
11
12 loggers = {
13 k: getLogger("vcsserver." + ".".join((__name__, k)))
14 for k in [
15 "acquire",
16 "refresh.thread.start",
17 "refresh.thread.stop",
18 "refresh.thread.exit",
19 "refresh.start",
20 "refresh.shutdown",
21 "refresh.exit",
22 "release",
23 ]
24 }
25
26 PY3 = sys.version_info[0] == 3
27
28 if PY3:
29 text_type = str
30 binary_type = bytes
31 else:
32 text_type = unicode # noqa
33 binary_type = str
34
35
36 # Check if the id match. If not, return an error code.
37 UNLOCK_SCRIPT = b"""
38 if redis.call("get", KEYS[1]) ~= ARGV[1] then
39 return 1
40 else
41 redis.call("del", KEYS[2])
42 redis.call("lpush", KEYS[2], 1)
43 redis.call("pexpire", KEYS[2], ARGV[2])
44 redis.call("del", KEYS[1])
45 return 0
46 end
47 """
48
49 # Covers both cases when key doesn't exist and doesn't equal to lock's id
50 EXTEND_SCRIPT = b"""
51 if redis.call("get", KEYS[1]) ~= ARGV[1] then
52 return 1
53 elseif redis.call("ttl", KEYS[1]) < 0 then
54 return 2
55 else
56 redis.call("expire", KEYS[1], ARGV[2])
57 return 0
58 end
59 """
60
61 RESET_SCRIPT = b"""
62 redis.call('del', KEYS[2])
63 redis.call('lpush', KEYS[2], 1)
64 redis.call('pexpire', KEYS[2], ARGV[2])
65 return redis.call('del', KEYS[1])
66 """
67
68 RESET_ALL_SCRIPT = b"""
69 local locks = redis.call('keys', 'lock:*')
70 local signal
71 for _, lock in pairs(locks) do
72 signal = 'lock-signal:' .. string.sub(lock, 6)
73 redis.call('del', signal)
74 redis.call('lpush', signal, 1)
75 redis.call('expire', signal, 1)
76 redis.call('del', lock)
77 end
78 return #locks
79 """
80
81
82 class AlreadyAcquired(RuntimeError):
83 pass
84
85
86 class NotAcquired(RuntimeError):
87 pass
88
89
90 class AlreadyStarted(RuntimeError):
91 pass
92
93
94 class TimeoutNotUsable(RuntimeError):
95 pass
96
97
98 class InvalidTimeout(RuntimeError):
99 pass
100
101
102 class TimeoutTooLarge(RuntimeError):
103 pass
104
105
106 class NotExpirable(RuntimeError):
107 pass
108
109
110 class Lock(object):
111 """
112 A Lock context manager implemented via redis SETNX/BLPOP.
113 """
114 unlock_script = None
115 extend_script = None
116 reset_script = None
117 reset_all_script = None
118
119 def __init__(self, redis_client, name, expire=None, id=None, auto_renewal=False, strict=True, signal_expire=1000):
120 """
121 :param redis_client:
122 An instance of :class:`~StrictRedis`.
123 :param name:
124 The name (redis key) the lock should have.
125 :param expire:
126 The lock expiry time in seconds. If left at the default (None)
127 the lock will not expire.
128 :param id:
129 The ID (redis value) the lock should have. A random value is
130 generated when left at the default.
131
132 Note that if you specify this then the lock is marked as "held". Acquires
133 won't be possible.
134 :param auto_renewal:
135 If set to ``True``, Lock will automatically renew the lock so that it
136 doesn't expire for as long as the lock is held (acquire() called
137 or running in a context manager).
138
139 Implementation note: Renewal will happen using a daemon thread with
140 an interval of ``expire*2/3``. If wishing to use a different renewal
141 time, subclass Lock, call ``super().__init__()`` then set
142 ``self._lock_renewal_interval`` to your desired interval.
143 :param strict:
144 If set ``True`` then the ``redis_client`` needs to be an instance of ``redis.StrictRedis``.
145 :param signal_expire:
146 Advanced option to override signal list expiration in milliseconds. Increase it for very slow clients. Default: ``1000``.
147 """
148 if strict and not isinstance(redis_client, StrictRedis):
149 raise ValueError("redis_client must be instance of StrictRedis. "
150 "Use strict=False if you know what you're doing.")
151 if auto_renewal and expire is None:
152 raise ValueError("Expire may not be None when auto_renewal is set")
153
154 self._client = redis_client
155
156 if expire:
157 expire = int(expire)
158 if expire < 0:
159 raise ValueError("A negative expire is not acceptable.")
160 else:
161 expire = None
162 self._expire = expire
163
164 self._signal_expire = signal_expire
165 if id is None:
166 self._id = b64encode(urandom(18)).decode('ascii')
167 elif isinstance(id, binary_type):
168 try:
169 self._id = id.decode('ascii')
170 except UnicodeDecodeError:
171 self._id = b64encode(id).decode('ascii')
172 elif isinstance(id, text_type):
173 self._id = id
174 else:
175 raise TypeError("Incorrect type for `id`. Must be bytes/str not %s." % type(id))
176 self._name = 'lock:' + name
177 self._signal = 'lock-signal:' + name
178 self._lock_renewal_interval = (float(expire) * 2 / 3
179 if auto_renewal
180 else None)
181 self._lock_renewal_thread = None
182
183 self.register_scripts(redis_client)
184
185 @classmethod
186 def register_scripts(cls, redis_client):
187 global reset_all_script
188 if reset_all_script is None:
189 reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
190 cls.unlock_script = redis_client.register_script(UNLOCK_SCRIPT)
191 cls.extend_script = redis_client.register_script(EXTEND_SCRIPT)
192 cls.reset_script = redis_client.register_script(RESET_SCRIPT)
193 cls.reset_all_script = redis_client.register_script(RESET_ALL_SCRIPT)
194
195 @property
196 def _held(self):
197 return self.id == self.get_owner_id()
198
199 def reset(self):
200 """
201 Forcibly deletes the lock. Use this with care.
202 """
203 self.reset_script(client=self._client, keys=(self._name, self._signal), args=(self.id, self._signal_expire))
204
205 @property
206 def id(self):
207 return self._id
208
209 def get_owner_id(self):
210 owner_id = self._client.get(self._name)
211 if isinstance(owner_id, binary_type):
212 owner_id = owner_id.decode('ascii', 'replace')
213 return owner_id
214
215 def acquire(self, blocking=True, timeout=None):
216 """
217 :param blocking:
218 Boolean value specifying whether lock should be blocking or not.
219 :param timeout:
220 An integer value specifying the maximum number of seconds to block.
221 """
222 logger = loggers["acquire"]
223
224 logger.debug("Getting blocking: %s acquire on %r ...", blocking, self._name)
225
226 if self._held:
227 owner_id = self.get_owner_id()
228 raise AlreadyAcquired("Already acquired from this Lock instance. Lock id: {}".format(owner_id))
229
230 if not blocking and timeout is not None:
231 raise TimeoutNotUsable("Timeout cannot be used if blocking=False")
232
233 if timeout:
234 timeout = int(timeout)
235 if timeout < 0:
236 raise InvalidTimeout("Timeout (%d) cannot be less than or equal to 0" % timeout)
237
238 if self._expire and not self._lock_renewal_interval and timeout > self._expire:
239 raise TimeoutTooLarge("Timeout (%d) cannot be greater than expire (%d)" % (timeout, self._expire))
240
241 busy = True
242 blpop_timeout = timeout or self._expire or 0
243 timed_out = False
244 while busy:
245 busy = not self._client.set(self._name, self._id, nx=True, ex=self._expire)
246 if busy:
247 if timed_out:
248 return False
249 elif blocking:
250 timed_out = not self._client.blpop(self._signal, blpop_timeout) and timeout
251 else:
252 logger.warning("Failed to get %r.", self._name)
253 return False
254
255 logger.debug("Got lock for %r.", self._name)
256 if self._lock_renewal_interval is not None:
257 self._start_lock_renewer()
258 return True
259
260 def extend(self, expire=None):
261 """Extends expiration time of the lock.
262
263 :param expire:
264 New expiration time. If ``None`` - `expire` provided during
265 lock initialization will be taken.
266 """
267 if expire:
268 expire = int(expire)
269 if expire < 0:
270 raise ValueError("A negative expire is not acceptable.")
271 elif self._expire is not None:
272 expire = self._expire
273 else:
274 raise TypeError(
275 "To extend a lock 'expire' must be provided as an "
276 "argument to extend() method or at initialization time."
277 )
278
279 error = self.extend_script(client=self._client, keys=(self._name, self._signal), args=(self._id, expire))
280 if error == 1:
281 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
282 elif error == 2:
283 raise NotExpirable("Lock %s has no assigned expiration time" % self._name)
284 elif error:
285 raise RuntimeError("Unsupported error code %s from EXTEND script" % error)
286
287 @staticmethod
288 def _lock_renewer(lockref, interval, stop):
289 """
290 Renew the lock key in redis every `interval` seconds for as long
291 as `self._lock_renewal_thread.should_exit` is False.
292 """
293 while not stop.wait(timeout=interval):
294 loggers["refresh.thread.start"].debug("Refreshing lock")
295 lock = lockref()
296 if lock is None:
297 loggers["refresh.thread.stop"].debug(
298 "The lock no longer exists, stopping lock refreshing"
299 )
300 break
301 lock.extend(expire=lock._expire)
302 del lock
303 loggers["refresh.thread.exit"].debug("Exit requested, stopping lock refreshing")
304
305 def _start_lock_renewer(self):
306 """
307 Starts the lock refresher thread.
308 """
309 if self._lock_renewal_thread is not None:
310 raise AlreadyStarted("Lock refresh thread already started")
311
312 loggers["refresh.start"].debug(
313 "Starting thread to refresh lock every %s seconds",
314 self._lock_renewal_interval
315 )
316 self._lock_renewal_stop = threading.Event()
317 self._lock_renewal_thread = threading.Thread(
318 group=None,
319 target=self._lock_renewer,
320 kwargs={'lockref': weakref.ref(self),
321 'interval': self._lock_renewal_interval,
322 'stop': self._lock_renewal_stop}
323 )
324 self._lock_renewal_thread.setDaemon(True)
325 self._lock_renewal_thread.start()
326
327 def _stop_lock_renewer(self):
328 """
329 Stop the lock renewer.
330
331 This signals the renewal thread and waits for its exit.
332 """
333 if self._lock_renewal_thread is None or not self._lock_renewal_thread.is_alive():
334 return
335 loggers["refresh.shutdown"].debug("Signalling the lock refresher to stop")
336 self._lock_renewal_stop.set()
337 self._lock_renewal_thread.join()
338 self._lock_renewal_thread = None
339 loggers["refresh.exit"].debug("Lock refresher has stopped")
340
341 def __enter__(self):
342 acquired = self.acquire(blocking=True)
343 assert acquired, "Lock wasn't acquired, but blocking=True"
344 return self
345
346 def __exit__(self, exc_type=None, exc_value=None, traceback=None):
347 self.release()
348
349 def release(self):
350 """Releases the lock, that was acquired with the same object.
351
352 .. note::
353
354 If you want to release a lock that you acquired in a different place you have two choices:
355
356 * Use ``Lock("name", id=id_from_other_place).release()``
357 * Use ``Lock("name").reset()``
358 """
359 if self._lock_renewal_thread is not None:
360 self._stop_lock_renewer()
361 loggers["release"].debug("Releasing %r.", self._name)
362 error = self.unlock_script(client=self._client, keys=(self._name, self._signal), args=(self._id, self._signal_expire))
363 if error == 1:
364 raise NotAcquired("Lock %s is not acquired or it already expired." % self._name)
365 elif error:
366 raise RuntimeError("Unsupported error code %s from EXTEND script." % error)
367
368 def locked(self):
369 """
370 Return true if the lock is acquired.
371
372 Checks that lock with same name already exists. This method returns true, even if
373 lock have another id.
374 """
375 return self._client.exists(self._name) == 1
376
377
378 reset_all_script = None
379
380
381 def reset_all(redis_client):
382 """
383 Forcibly deletes all locks if its remains (like a crash reason). Use this with care.
384
385 :param redis_client:
386 An instance of :class:`~StrictRedis`.
387 """
388 Lock.register_scripts(redis_client)
389
390 reset_all_script(client=redis_client) # noqa
@@ -0,0 +1,52 b''
1 from __future__ import absolute_import, division, unicode_literals
2
3 import logging
4
5 from .stream import TCPStatsClient, UnixSocketStatsClient # noqa
6 from .udp import StatsClient # noqa
7
8 HOST = 'localhost'
9 PORT = 8125
10 IPV6 = False
11 PREFIX = None
12 MAXUDPSIZE = 512
13
14 log = logging.getLogger('rhodecode.statsd')
15
16
17 def statsd_config(config, prefix='statsd.'):
18 _config = {}
19 for key in config.keys():
20 if key.startswith(prefix):
21 _config[key[len(prefix):]] = config[key]
22 return _config
23
24
25 def client_from_config(configuration, prefix='statsd.', **kwargs):
26 from pyramid.settings import asbool
27
28 _config = statsd_config(configuration, prefix)
29 statsd_enabled = asbool(_config.pop('enabled', False))
30 if not statsd_enabled:
31 log.debug('statsd client not enabled by statsd.enabled = flag, skipping...')
32 return
33
34 host = _config.pop('statsd_host', HOST)
35 port = _config.pop('statsd_port', PORT)
36 prefix = _config.pop('statsd_prefix', PREFIX)
37 maxudpsize = _config.pop('statsd_maxudpsize', MAXUDPSIZE)
38 ipv6 = asbool(_config.pop('statsd_ipv6', IPV6))
39 log.debug('configured statsd client %s:%s', host, port)
40
41 try:
42 client = StatsClient(
43 host=host, port=port, prefix=prefix, maxudpsize=maxudpsize, ipv6=ipv6)
44 except Exception:
45 log.exception('StatsD is enabled, but failed to connect to statsd server, fallback: disable statsd')
46 client = None
47
48 return client
49
50
51 def get_statsd_client(request):
52 return client_from_config(request.registry.settings)
@@ -0,0 +1,156 b''
1 from __future__ import absolute_import, division, unicode_literals
2
3 import re
4 import random
5 from collections import deque
6 from datetime import timedelta
7 from repoze.lru import lru_cache
8
9 from .timer import Timer
10
11 TAG_INVALID_CHARS_RE = re.compile(
12 r"[^\w\d_\-:/\.]",
13 #re.UNICODE
14 )
15 TAG_INVALID_CHARS_SUBS = "_"
16
17 # we save and expose methods called by statsd for discovery
18 buckets_dict = {
19
20 }
21
22
23 @lru_cache(maxsize=500)
24 def _normalize_tags_with_cache(tag_list):
25 return [TAG_INVALID_CHARS_RE.sub(TAG_INVALID_CHARS_SUBS, tag) for tag in tag_list]
26
27
28 def normalize_tags(tag_list):
29 # We have to turn our input tag list into a non-mutable tuple for it to
30 # be hashable (and thus usable) by the @lru_cache decorator.
31 return _normalize_tags_with_cache(tuple(tag_list))
32
33
34 class StatsClientBase(object):
35 """A Base class for various statsd clients."""
36
37 def close(self):
38 """Used to close and clean up any underlying resources."""
39 raise NotImplementedError()
40
41 def _send(self):
42 raise NotImplementedError()
43
44 def pipeline(self):
45 raise NotImplementedError()
46
47 def timer(self, stat, rate=1, tags=None, auto_send=True):
48 """
49 statsd = StatsdClient.statsd
50 with statsd.timer('bucket_name', auto_send=True) as tmr:
51 # This block will be timed.
52 for i in xrange(0, 100000):
53 i ** 2
54 # you can access time here...
55 elapsed_ms = tmr.ms
56 """
57 return Timer(self, stat, rate, tags, auto_send=auto_send)
58
59 def timing(self, stat, delta, rate=1, tags=None, use_decimals=True):
60 """
61 Send new timing information.
62
63 `delta` can be either a number of milliseconds or a timedelta.
64 """
65 if isinstance(delta, timedelta):
66 # Convert timedelta to number of milliseconds.
67 delta = delta.total_seconds() * 1000.
68 if use_decimals:
69 fmt = '%0.6f|ms'
70 else:
71 fmt = '%s|ms'
72 self._send_stat(stat, fmt % delta, rate, tags)
73
74 def incr(self, stat, count=1, rate=1, tags=None):
75 """Increment a stat by `count`."""
76 self._send_stat(stat, '%s|c' % count, rate, tags)
77
78 def decr(self, stat, count=1, rate=1, tags=None):
79 """Decrement a stat by `count`."""
80 self.incr(stat, -count, rate, tags)
81
82 def gauge(self, stat, value, rate=1, delta=False, tags=None):
83 """Set a gauge value."""
84 if value < 0 and not delta:
85 if rate < 1:
86 if random.random() > rate:
87 return
88 with self.pipeline() as pipe:
89 pipe._send_stat(stat, '0|g', 1)
90 pipe._send_stat(stat, '%s|g' % value, 1)
91 else:
92 prefix = '+' if delta and value >= 0 else ''
93 self._send_stat(stat, '%s%s|g' % (prefix, value), rate, tags)
94
95 def set(self, stat, value, rate=1):
96 """Set a set value."""
97 self._send_stat(stat, '%s|s' % value, rate)
98
99 def histogram(self, stat, value, rate=1, tags=None):
100 """Set a histogram"""
101 self._send_stat(stat, '%s|h' % value, rate, tags)
102
103 def _send_stat(self, stat, value, rate, tags=None):
104 self._after(self._prepare(stat, value, rate, tags))
105
106 def _prepare(self, stat, value, rate, tags=None):
107 global buckets_dict
108 buckets_dict[stat] = 1
109
110 if rate < 1:
111 if random.random() > rate:
112 return
113 value = '%s|@%s' % (value, rate)
114
115 if self._prefix:
116 stat = '%s.%s' % (self._prefix, stat)
117
118 res = '%s:%s%s' % (
119 stat,
120 value,
121 ("|#" + ",".join(normalize_tags(tags))) if tags else "",
122 )
123 return res
124
125 def _after(self, data):
126 if data:
127 self._send(data)
128
129
130 class PipelineBase(StatsClientBase):
131
132 def __init__(self, client):
133 self._client = client
134 self._prefix = client._prefix
135 self._stats = deque()
136
137 def _send(self):
138 raise NotImplementedError()
139
140 def _after(self, data):
141 if data is not None:
142 self._stats.append(data)
143
144 def __enter__(self):
145 return self
146
147 def __exit__(self, typ, value, tb):
148 self.send()
149
150 def send(self):
151 if not self._stats:
152 return
153 self._send()
154
155 def pipeline(self):
156 return self.__class__(self)
@@ -0,0 +1,75 b''
1 from __future__ import absolute_import, division, unicode_literals
2
3 import socket
4
5 from .base import StatsClientBase, PipelineBase
6
7
8 class StreamPipeline(PipelineBase):
9 def _send(self):
10 self._client._after('\n'.join(self._stats))
11 self._stats.clear()
12
13
14 class StreamClientBase(StatsClientBase):
15 def connect(self):
16 raise NotImplementedError()
17
18 def close(self):
19 if self._sock and hasattr(self._sock, 'close'):
20 self._sock.close()
21 self._sock = None
22
23 def reconnect(self):
24 self.close()
25 self.connect()
26
27 def pipeline(self):
28 return StreamPipeline(self)
29
30 def _send(self, data):
31 """Send data to statsd."""
32 if not self._sock:
33 self.connect()
34 self._do_send(data)
35
36 def _do_send(self, data):
37 self._sock.sendall(data.encode('ascii') + b'\n')
38
39
40 class TCPStatsClient(StreamClientBase):
41 """TCP version of StatsClient."""
42
43 def __init__(self, host='localhost', port=8125, prefix=None,
44 timeout=None, ipv6=False):
45 """Create a new client."""
46 self._host = host
47 self._port = port
48 self._ipv6 = ipv6
49 self._timeout = timeout
50 self._prefix = prefix
51 self._sock = None
52
53 def connect(self):
54 fam = socket.AF_INET6 if self._ipv6 else socket.AF_INET
55 family, _, _, _, addr = socket.getaddrinfo(
56 self._host, self._port, fam, socket.SOCK_STREAM)[0]
57 self._sock = socket.socket(family, socket.SOCK_STREAM)
58 self._sock.settimeout(self._timeout)
59 self._sock.connect(addr)
60
61
62 class UnixSocketStatsClient(StreamClientBase):
63 """Unix domain socket version of StatsClient."""
64
65 def __init__(self, socket_path, prefix=None, timeout=None):
66 """Create a new client."""
67 self._socket_path = socket_path
68 self._timeout = timeout
69 self._prefix = prefix
70 self._sock = None
71
72 def connect(self):
73 self._sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
74 self._sock.settimeout(self._timeout)
75 self._sock.connect(self._socket_path)
@@ -0,0 +1,75 b''
1 from __future__ import absolute_import, division, unicode_literals
2
3 import functools
4
5 # Use timer that's not susceptible to time of day adjustments.
6 try:
7 # perf_counter is only present on Py3.3+
8 from time import perf_counter as time_now
9 except ImportError:
10 # fall back to using time
11 from time import time as time_now
12
13
14 def safe_wraps(wrapper, *args, **kwargs):
15 """Safely wraps partial functions."""
16 while isinstance(wrapper, functools.partial):
17 wrapper = wrapper.func
18 return functools.wraps(wrapper, *args, **kwargs)
19
20
21 class Timer(object):
22 """A context manager/decorator for statsd.timing()."""
23
24 def __init__(self, client, stat, rate=1, tags=None, use_decimals=True, auto_send=True):
25 self.client = client
26 self.stat = stat
27 self.rate = rate
28 self.tags = tags
29 self.ms = None
30 self._sent = False
31 self._start_time = None
32 self.use_decimals = use_decimals
33 self.auto_send = auto_send
34
35 def __call__(self, f):
36 """Thread-safe timing function decorator."""
37 @safe_wraps(f)
38 def _wrapped(*args, **kwargs):
39 start_time = time_now()
40 try:
41 return f(*args, **kwargs)
42 finally:
43 elapsed_time_ms = 1000.0 * (time_now() - start_time)
44 self.client.timing(self.stat, elapsed_time_ms, self.rate, self.tags, self.use_decimals)
45 self._sent = True
46 return _wrapped
47
48 def __enter__(self):
49 return self.start()
50
51 def __exit__(self, typ, value, tb):
52 self.stop(send=self.auto_send)
53
54 def start(self):
55 self.ms = None
56 self._sent = False
57 self._start_time = time_now()
58 return self
59
60 def stop(self, send=True):
61 if self._start_time is None:
62 raise RuntimeError('Timer has not started.')
63 dt = time_now() - self._start_time
64 self.ms = 1000.0 * dt # Convert to milliseconds.
65 if send:
66 self.send()
67 return self
68
69 def send(self):
70 if self.ms is None:
71 raise RuntimeError('No data recorded.')
72 if self._sent:
73 raise RuntimeError('Already sent data.')
74 self._sent = True
75 self.client.timing(self.stat, self.ms, self.rate, self.tags, self.use_decimals)
@@ -0,0 +1,55 b''
1 from __future__ import absolute_import, division, unicode_literals
2
3 import socket
4
5 from .base import StatsClientBase, PipelineBase
6
7
8 class Pipeline(PipelineBase):
9
10 def __init__(self, client):
11 super(Pipeline, self).__init__(client)
12 self._maxudpsize = client._maxudpsize
13
14 def _send(self):
15 data = self._stats.popleft()
16 while self._stats:
17 # Use popleft to preserve the order of the stats.
18 stat = self._stats.popleft()
19 if len(stat) + len(data) + 1 >= self._maxudpsize:
20 self._client._after(data)
21 data = stat
22 else:
23 data += '\n' + stat
24 self._client._after(data)
25
26
27 class StatsClient(StatsClientBase):
28 """A client for statsd."""
29
30 def __init__(self, host='localhost', port=8125, prefix=None,
31 maxudpsize=512, ipv6=False):
32 """Create a new client."""
33 fam = socket.AF_INET6 if ipv6 else socket.AF_INET
34 family, _, _, _, addr = socket.getaddrinfo(
35 host, port, fam, socket.SOCK_DGRAM)[0]
36 self._addr = addr
37 self._sock = socket.socket(family, socket.SOCK_DGRAM)
38 self._prefix = prefix
39 self._maxudpsize = maxudpsize
40
41 def _send(self, data):
42 """Send data to statsd."""
43 try:
44 self._sock.sendto(data.encode('ascii'), self._addr)
45 except (socket.error, RuntimeError):
46 # No time for love, Dr. Jones!
47 pass
48
49 def close(self):
50 if self._sock and hasattr(self._sock, 'close'):
51 self._sock.close()
52 self._sock = None
53
54 def pipeline(self):
55 return Pipeline(self)
@@ -0,0 +1,49 b''
1 from vcsserver.lib._vendor.statsd import client_from_config
2
3
4 class StatsdClientNotInitialised(Exception):
5 pass
6
7
8 class _Singleton(type):
9 """A metaclass that creates a Singleton base class when called."""
10
11 _instances = {}
12
13 def __call__(cls, *args, **kwargs):
14 if cls not in cls._instances:
15 cls._instances[cls] = super(_Singleton, cls).__call__(*args, **kwargs)
16 return cls._instances[cls]
17
18
19 class Singleton(_Singleton("SingletonMeta", (object,), {})):
20 pass
21
22
23 class StatsdClientClass(Singleton):
24 setup_run = False
25 statsd_client = None
26 statsd = None
27
28 def __getattribute__(self, name):
29
30 if name.startswith("statsd"):
31 if self.setup_run:
32 return super(StatsdClientClass, self).__getattribute__(name)
33 else:
34 return None
35 #raise StatsdClientNotInitialised("requested key was %s" % name)
36
37 return super(StatsdClientClass, self).__getattribute__(name)
38
39 def setup(self, settings):
40 """
41 Initialize the client
42 """
43 statsd = client_from_config(settings)
44 self.statsd = statsd
45 self.statsd_client = statsd
46 self.setup_run = True
47
48
49 StatsdClient = StatsdClientClass()
@@ -1,6 +1,5 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 5.0.0
2 current_version = 5.0.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
@@ -1,63 +1,82 b''
1 c6fad7d1e61f22b1f4a4863eff207a04c27e9462 v4.0.0
1 c6fad7d1e61f22b1f4a4863eff207a04c27e9462 v4.0.0
2 77b6e243b4cc5b702c15abd6d737798edbac60dc v4.0.1
2 77b6e243b4cc5b702c15abd6d737798edbac60dc v4.0.1
3 a359c072337fdd8e1e71df72cc520b8a9b042f80 v4.1.0
3 a359c072337fdd8e1e71df72cc520b8a9b042f80 v4.1.0
4 49aa7ed030a36b7ceba149a21e587cb5d20b4946 v4.1.1
4 49aa7ed030a36b7ceba149a21e587cb5d20b4946 v4.1.1
5 f38ed1e1a31dce3c170b4d31585ba43471cf0705 v4.1.2
5 f38ed1e1a31dce3c170b4d31585ba43471cf0705 v4.1.2
6 21269ba7bafd8f0c77e79dd86a31eb9bce7643d2 v4.2.0
6 21269ba7bafd8f0c77e79dd86a31eb9bce7643d2 v4.2.0
7 b53930c918c25b2c8f69ceddc6641e511be27fd3 v4.2.1
7 b53930c918c25b2c8f69ceddc6641e511be27fd3 v4.2.1
8 6627ff4119723d8b2b60918e8b1aa49e9f055aab v4.3.0
8 6627ff4119723d8b2b60918e8b1aa49e9f055aab v4.3.0
9 d38f2c2b861dde6c4178923f7cf15ea58b85aa92 v4.3.1
9 d38f2c2b861dde6c4178923f7cf15ea58b85aa92 v4.3.1
10 1232313f9e6adac5ce5399c2a891dc1e72b79022 v4.4.0
10 1232313f9e6adac5ce5399c2a891dc1e72b79022 v4.4.0
11 cbb9f1d329ae5768379cdec55a62ebdd546c4e27 v4.4.1
11 cbb9f1d329ae5768379cdec55a62ebdd546c4e27 v4.4.1
12 24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17 v4.4.2
12 24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17 v4.4.2
13 beaeeaa440cd17471110d4621b8816506c0dff4a v4.5.0
13 beaeeaa440cd17471110d4621b8816506c0dff4a v4.5.0
14 668e5c656f61dd94595611844e1106d1361aa6a7 v4.5.1
14 668e5c656f61dd94595611844e1106d1361aa6a7 v4.5.1
15 ae0640240cb7a77e6dc8c77e64dd80d79732cb5b v4.5.2
15 ae0640240cb7a77e6dc8c77e64dd80d79732cb5b v4.5.2
16 7af06899f426813583fe60449d7517cc49c15b28 v4.6.0
16 7af06899f426813583fe60449d7517cc49c15b28 v4.6.0
17 8f7f4299bf341b43f94dadafa1ea73d6cea2c9ba v4.6.1
17 8f7f4299bf341b43f94dadafa1ea73d6cea2c9ba v4.6.1
18 de00a831a0709ffaac57f948738ea927b97223a9 v4.7.0
18 de00a831a0709ffaac57f948738ea927b97223a9 v4.7.0
19 57f527e0646d731768fb5e0fe742b12a35bdc63b v4.7.1
19 57f527e0646d731768fb5e0fe742b12a35bdc63b v4.7.1
20 f9b09787da9845e4a105f4bffdc252099902cefb v4.7.2
20 f9b09787da9845e4a105f4bffdc252099902cefb v4.7.2
21 0b7c790b726f08385e6ebdf4f257c905787b9244 v4.8.0
21 0b7c790b726f08385e6ebdf4f257c905787b9244 v4.8.0
22 f4123e725b74d0e82fe89982ab8791a66062e2b3 v4.9.0
22 f4123e725b74d0e82fe89982ab8791a66062e2b3 v4.9.0
23 940bac044a0fe1ec839759df81399b50141be720 v4.9.1
23 940bac044a0fe1ec839759df81399b50141be720 v4.9.1
24 582d9ebbe46bdddac4b26eacae36ee5ecabca267 v4.10.0
24 582d9ebbe46bdddac4b26eacae36ee5ecabca267 v4.10.0
25 12fbd08d0ab57acce9c0bdccee75633cfa08d7f4 v4.10.1
25 12fbd08d0ab57acce9c0bdccee75633cfa08d7f4 v4.10.1
26 78352f95021a9d128f5803fdbca7036daef5dabe v4.10.2
26 78352f95021a9d128f5803fdbca7036daef5dabe v4.10.2
27 a47ccfb020cda78c8680e3844aaf0b82b1390f3b v4.10.3
27 a47ccfb020cda78c8680e3844aaf0b82b1390f3b v4.10.3
28 347ae9ae544bba8deb417995285287a3b6be1611 v4.10.4
28 347ae9ae544bba8deb417995285287a3b6be1611 v4.10.4
29 9b257ac49841f850434be0d518baca0827e6c8cc v4.10.5
29 9b257ac49841f850434be0d518baca0827e6c8cc v4.10.5
30 e8bf26eea118694edc4ffe50c6c5aa91022bc434 v4.10.6
30 e8bf26eea118694edc4ffe50c6c5aa91022bc434 v4.10.6
31 71fa9274ba59fb982104f0b9b3d0d024c78675f7 v4.11.0
31 71fa9274ba59fb982104f0b9b3d0d024c78675f7 v4.11.0
32 92471577ef25636e5babe8001d47fc8e51521522 v4.11.1
32 92471577ef25636e5babe8001d47fc8e51521522 v4.11.1
33 0277edbcda5a8d075e1e41a95bcee6dcf21f3f77 v4.11.2
33 0277edbcda5a8d075e1e41a95bcee6dcf21f3f77 v4.11.2
34 6c5ecbf0778ef870e5b23d9fad5340135b563356 v4.11.3
34 6c5ecbf0778ef870e5b23d9fad5340135b563356 v4.11.3
35 be788a89a939ebd63606220064bd624fa9d5c9c9 v4.11.4
35 be788a89a939ebd63606220064bd624fa9d5c9c9 v4.11.4
36 15c90a04098a373ac761fab07695fd80dde3bcdb v4.11.5
36 15c90a04098a373ac761fab07695fd80dde3bcdb v4.11.5
37 77aff155b3251cc00394a49f5e8f2c99e33149a7 v4.11.6
37 77aff155b3251cc00394a49f5e8f2c99e33149a7 v4.11.6
38 c218a1ce5d370c2e671d42a91684b3fc2c91b81d v4.12.0
38 c218a1ce5d370c2e671d42a91684b3fc2c91b81d v4.12.0
39 80085fb846cc948195a5c76b579ca34cbc49b59b v4.12.1
39 80085fb846cc948195a5c76b579ca34cbc49b59b v4.12.1
40 346f04fc8a18df3235defbe6e71bd552c0d46481 v4.12.2
40 346f04fc8a18df3235defbe6e71bd552c0d46481 v4.12.2
41 764fdd752322f3e0c13ea00957f2d548bf4363a7 v4.12.3
41 764fdd752322f3e0c13ea00957f2d548bf4363a7 v4.12.3
42 b58038974a5cecbb9c100d32ad2e4c68582f1a78 v4.12.4
42 b58038974a5cecbb9c100d32ad2e4c68582f1a78 v4.12.4
43 e1d42d92a0fec0c80b56c82f37bc7b5472613706 v4.13.0
43 e1d42d92a0fec0c80b56c82f37bc7b5472613706 v4.13.0
44 c3ded3ff17e9bb2a47002a808984a7a946f58a1c v4.13.1
44 c3ded3ff17e9bb2a47002a808984a7a946f58a1c v4.13.1
45 7ff81aa47b1b40cdef9dd5bcdd439f59c269db3d v4.13.2
45 7ff81aa47b1b40cdef9dd5bcdd439f59c269db3d v4.13.2
46 628a08e6aaeff2c3f9e0e268e854f870e6778e53 v4.13.3
46 628a08e6aaeff2c3f9e0e268e854f870e6778e53 v4.13.3
47 941d675f10cfa7d774815bfacfb37085751b7a0d v4.14.0
47 941d675f10cfa7d774815bfacfb37085751b7a0d v4.14.0
48 75e11d32c0be0a457198f07888e7ef650cfa6888 v4.14.1
48 75e11d32c0be0a457198f07888e7ef650cfa6888 v4.14.1
49 6c6f49fda0191c4641dcd43aa0d4376b8b728d40 v4.15.0
49 6c6f49fda0191c4641dcd43aa0d4376b8b728d40 v4.15.0
50 184dea5e01c36e6474c83d3bb34719cdfec22b0d v4.15.1
50 184dea5e01c36e6474c83d3bb34719cdfec22b0d v4.15.1
51 a4dc3669345553582296b2ce1485229a6c6f0522 v4.15.2
51 a4dc3669345553582296b2ce1485229a6c6f0522 v4.15.2
52 d2a4a1a66f204668841da1cdccfa29083e1ef7a3 v4.16.0
52 d2a4a1a66f204668841da1cdccfa29083e1ef7a3 v4.16.0
53 744cf8f2c8f23051978fc293404bf475cc5a31f6 v4.16.1
53 744cf8f2c8f23051978fc293404bf475cc5a31f6 v4.16.1
54 e68aff93ce4ad11fea13420e914f7dfb05c39566 v4.16.2
54 e68aff93ce4ad11fea13420e914f7dfb05c39566 v4.16.2
55 647aeff9752dc1aa00796fa280d0d2ce2f511bc9 v4.17.0
55 647aeff9752dc1aa00796fa280d0d2ce2f511bc9 v4.17.0
56 5e0c2990e095bba1dc903cf0e6ef6ac035e0ccf9 v4.17.1
56 5e0c2990e095bba1dc903cf0e6ef6ac035e0ccf9 v4.17.1
57 8a824544d95037d76d99b104b5d2363858101d53 v4.17.2
57 8a824544d95037d76d99b104b5d2363858101d53 v4.17.2
58 ccd806a2d9482f61bd7e8956a02a28eb24a1d46a v4.17.3
58 ccd806a2d9482f61bd7e8956a02a28eb24a1d46a v4.17.3
59 e533ca02ccc205189b7bad9f227a312212772022 v4.17.4
59 e533ca02ccc205189b7bad9f227a312212772022 v4.17.4
60 ba6a6dc9ecd7fd8b1dcd6eb0c4ee0210e897c426 v4.18.0
60 ba6a6dc9ecd7fd8b1dcd6eb0c4ee0210e897c426 v4.18.0
61 17bc818b41bcf6883b9ff0da31f01d8c2a5d0781 v4.18.1
61 17bc818b41bcf6883b9ff0da31f01d8c2a5d0781 v4.18.1
62 1e9f12aa01f82c335abc9017efe94ce1c30b52ba v4.18.2
62 1e9f12aa01f82c335abc9017efe94ce1c30b52ba v4.18.2
63 f4cc6b3c5680bdf4541d7d442fbb7086640fb547 v4.18.3
63 f4cc6b3c5680bdf4541d7d442fbb7086640fb547 v4.18.3
64 5dc0277e4f77bd4cc3042d99625bb5d3ba480c8c v4.19.0
65 3a815eeb1b1efa340dda9b81a8da3cf24a7d605b v4.19.1
66 8841da3680fba841e5a54ebccd8ca56c078f7553 v4.19.2
67 4b0dec7fd80b1ca38e5073e5e562a5a450f73669 v4.19.3
68 1485aa75ffe1b1ec48352dce7b7492d92f85e95f v4.20.0
69 5b740274011766ef2f73803cc196d081e1e7f1d4 v4.20.1
70 5a7835234e2c45e8fb8184c60f548a64b5842af8 v4.21.0
71 26af88343015f8b89d5a66f92bc7547c51fcf0df v4.22.0
72 cf54e5f700fe5dc50af1a1bdf5197c18cf52105f v4.23.0
73 179d989bcfe02c6227f9f6aa9236cbbe1c14c400 v4.23.1
74 383aee8b1652affaa26aefe336a89ee366b2b26d v4.23.2
75 bc1a8141cc51fc23c455ebc50c6609c810b46f8d v4.24.0
76 530a1c03caabc806ea1ef34605f8f67f18c70e55 v4.24.1
77 5908ae65cee1043982e1b26d7b618af5fcfebbb3 v4.25.0
78 cce8bcdf75090d5943a1e9706fe5212d7b5d1fa1 v4.25.1
79 8610c4bf846c63bbc95d3ddfb53fadaaa9c7aa42 v4.25.2
80 d46b7d1be72c76c9f9aaeab6a342951d54459f49 v4.26.0
81 6fba0daab1e20a9e18fb70fa59bd21753e0a5b90 v4.27.0
82 6195da4fc454087173918ae59cae946289458676 v4.27.1
@@ -1,237 +1,275 b''
1 ## -*- coding: utf-8 -*-
1 ## -*- coding: utf-8 -*-
2
2
3 ; #################################
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
4 ; RHODECODE VCSSERVER CONFIGURATION
5 ; #################################
5 ; #################################
6
6
7 [server:main]
7 [server:main]
8 ; COMMON HOST/IP CONFIG
8 ; COMMON HOST/IP CONFIG
9 host = 0.0.0.0
9 host = 0.0.0.0
10 port = 9900
10 port = 9900
11
11
12 ; ##################################################
12 ; ##################################################
13 ; WAITRESS WSGI SERVER - Recommended for Development
13 ; WAITRESS WSGI SERVER - Recommended for Development
14 ; ##################################################
14 ; ##################################################
15
15
16 ; use server type
16 ; use server type
17 use = egg:waitress#main
17 use = egg:waitress#main
18
18
19 ; number of worker threads
19 ; number of worker threads
20 threads = 5
20 threads = 5
21
21
22 ; MAX BODY SIZE 100GB
22 ; MAX BODY SIZE 100GB
23 max_request_body_size = 107374182400
23 max_request_body_size = 107374182400
24
24
25 ; Use poll instead of select, fixes file descriptors limits problems.
25 ; Use poll instead of select, fixes file descriptors limits problems.
26 ; May not work on old windows systems.
26 ; May not work on old windows systems.
27 asyncore_use_poll = true
27 asyncore_use_poll = true
28
28
29
29
30 ; ###########################
30 ; ###########################
31 ; GUNICORN APPLICATION SERVER
31 ; GUNICORN APPLICATION SERVER
32 ; ###########################
32 ; ###########################
33
33
34 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
34 ; run with gunicorn --paste rhodecode.ini
35
35
36 ; Module to use, this setting shouldn't be changed
36 ; Module to use, this setting shouldn't be changed
37 #use = egg:gunicorn#main
37 #use = egg:gunicorn#main
38
38
39 ; Sets the number of process workers. More workers means more concurrent connections
39 ; Sets the number of process workers. More workers means more concurrent connections
40 ; RhodeCode can handle at the same time. Each additional worker also it increases
40 ; RhodeCode can handle at the same time. Each additional worker also it increases
41 ; memory usage as each has it's own set of caches.
41 ; memory usage as each has it's own set of caches.
42 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
42 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
43 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
43 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
44 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
44 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
45 ; when using more than 1 worker.
45 ; when using more than 1 worker.
46 #workers = 2
46 #workers = 2
47
47
48 ; Gunicorn access log level
48 ; Gunicorn access log level
49 #loglevel = info
49 #loglevel = info
50
50
51 ; Process name visible in process list
51 ; Process name visible in process list
52 #proc_name = rhodecode_vcsserver
52 #proc_name = rhodecode_vcsserver
53
53
54 ; Type of worker class, one of `sync`, `gevent`
54 ; Type of worker class, one of `sync`, `gevent`
55 ; currently `sync` is the only option allowed.
55 ; currently `sync` is the only option allowed.
56 #worker_class = sync
56 #worker_class = sync
57
57
58 ; The maximum number of simultaneous clients. Valid only for gevent
58 ; The maximum number of simultaneous clients. Valid only for gevent
59 #worker_connections = 10
59 #worker_connections = 10
60
60
61 ; Max number of requests that worker will handle before being gracefully restarted.
61 ; Max number of requests that worker will handle before being gracefully restarted.
62 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
62 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
63 #max_requests = 1000
63 #max_requests = 1000
64 #max_requests_jitter = 30
64 #max_requests_jitter = 30
65
65
66 ; Amount of time a worker can spend with handling a request before it
66 ; Amount of time a worker can spend with handling a request before it
67 ; gets killed and restarted. By default set to 21600 (6hrs)
67 ; gets killed and restarted. By default set to 21600 (6hrs)
68 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
68 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
69 #timeout = 21600
69 #timeout = 21600
70
70
71 ; The maximum size of HTTP request line in bytes.
71 ; The maximum size of HTTP request line in bytes.
72 ; 0 for unlimited
72 ; 0 for unlimited
73 #limit_request_line = 0
73 #limit_request_line = 0
74
74
75 ; Limit the number of HTTP headers fields in a request.
75 ; Limit the number of HTTP headers fields in a request.
76 ; By default this value is 100 and can't be larger than 32768.
76 ; By default this value is 100 and can't be larger than 32768.
77 #limit_request_fields = 32768
77 #limit_request_fields = 32768
78
78
79 ; Limit the allowed size of an HTTP request header field.
79 ; Limit the allowed size of an HTTP request header field.
80 ; Value is a positive number or 0.
80 ; Value is a positive number or 0.
81 ; Setting it to 0 will allow unlimited header field sizes.
81 ; Setting it to 0 will allow unlimited header field sizes.
82 #limit_request_field_size = 0
82 #limit_request_field_size = 0
83
83
84 ; Timeout for graceful workers restart.
84 ; Timeout for graceful workers restart.
85 ; After receiving a restart signal, workers have this much time to finish
85 ; After receiving a restart signal, workers have this much time to finish
86 ; serving requests. Workers still alive after the timeout (starting from the
86 ; serving requests. Workers still alive after the timeout (starting from the
87 ; receipt of the restart signal) are force killed.
87 ; receipt of the restart signal) are force killed.
88 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
88 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
89 #graceful_timeout = 3600
89 #graceful_timeout = 21600
90
90
91 # The number of seconds to wait for requests on a Keep-Alive connection.
91 # The number of seconds to wait for requests on a Keep-Alive connection.
92 # Generally set in the 1-5 seconds range.
92 # Generally set in the 1-5 seconds range.
93 #keepalive = 2
93 #keepalive = 2
94
94
95 ; Maximum memory usage that each worker can use before it will receive a
95 ; Maximum memory usage that each worker can use before it will receive a
96 ; graceful restart signal 0 = memory monitoring is disabled
96 ; graceful restart signal 0 = memory monitoring is disabled
97 ; Examples: 268435456 (256MB), 536870912 (512MB)
97 ; Examples: 268435456 (256MB), 536870912 (512MB)
98 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
98 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
99 #memory_max_usage = 0
99 #memory_max_usage = 0
100
100
101 ; How often in seconds to check for memory usage for each gunicorn worker
101 ; How often in seconds to check for memory usage for each gunicorn worker
102 #memory_usage_check_interval = 60
102 #memory_usage_check_interval = 60
103
103
104 ; Threshold value for which we don't recycle worker if GarbageCollection
104 ; Threshold value for which we don't recycle worker if GarbageCollection
105 ; frees up enough resources. Before each restart we try to run GC on worker
105 ; frees up enough resources. Before each restart we try to run GC on worker
106 ; in case we get enough free memory after that, restart will not happen.
106 ; in case we get enough free memory after that, restart will not happen.
107 #memory_usage_recovery_threshold = 0.8
107 #memory_usage_recovery_threshold = 0.8
108
108
109
109
110 [app:main]
110 [app:main]
111 ; The %(here)s variable will be replaced with the absolute path of parent directory
111 ; The %(here)s variable will be replaced with the absolute path of parent directory
112 ; of this file
112 ; of this file
113 ; Each option in the app:main can be override by an environmental variable
114 ;
115 ;To override an option:
116 ;
117 ;RC_<KeyName>
118 ;Everything should be uppercase, . and - should be replaced by _.
119 ;For example, if you have these configuration settings:
120 ;rc_cache.repo_object.backend = foo
121 ;can be overridden by
122 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
123
113 use = egg:rhodecode-vcsserver
124 use = egg:rhodecode-vcsserver
114
125
115
126
116 ; #############
127 ; #############
117 ; DEBUG OPTIONS
128 ; DEBUG OPTIONS
118 ; #############
129 ; #############
119
130
120 # During development the we want to have the debug toolbar enabled
131 # During development the we want to have the debug toolbar enabled
121 pyramid.includes =
132 pyramid.includes =
122 pyramid_debugtoolbar
133 pyramid_debugtoolbar
123
134
124 debugtoolbar.hosts = 0.0.0.0/0
135 debugtoolbar.hosts = 0.0.0.0/0
125 debugtoolbar.exclude_prefixes =
136 debugtoolbar.exclude_prefixes =
126 /css
137 /css
127 /fonts
138 /fonts
128 /images
139 /images
129 /js
140 /js
130
141
131 ; #################
142 ; #################
132 ; END DEBUG OPTIONS
143 ; END DEBUG OPTIONS
133 ; #################
144 ; #################
134
145
135 ; Pyramid default locales, we need this to be set
146 ; Pyramid default locales, we need this to be set
136 pyramid.default_locale_name = en
147 #pyramid.default_locale_name = en
137
148
138 ; default locale used by VCS systems
149 ; default locale used by VCS systems
139 locale = en_US.UTF-8
150 #locale = en_US.UTF-8
140
151
141 ; path to binaries for vcsserver, it should be set by the installer
152 ; path to binaries for vcsserver, it should be set by the installer
142 ; at installation time, e.g /home/user/vcsserver-1/profile/bin
153 ; at installation time, e.g /home/user/.rccontrol/vcsserver-1/profile/bin
143 ; it can also be a path to nix-build output in case of development
154 ; it can also be a path to nix-build output in case of development
144 core.binary_dir = ""
155 core.binary_dir = ""
145
156
146 ; Custom exception store path, defaults to TMPDIR
157 ; Custom exception store path, defaults to TMPDIR
147 ; This is used to store exception from RhodeCode in shared directory
158 ; This is used to store exception from RhodeCode in shared directory
148 #exception_tracker.store_path =
159 #exception_tracker.store_path =
149
160
150 ; #############
161 ; #############
151 ; DOGPILE CACHE
162 ; DOGPILE CACHE
152 ; #############
163 ; #############
153
164
154 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
165 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
155 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
166 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
156 cache_dir = %(here)s/data
167 #cache_dir = %(here)s/data
157
168
158 ; ***************************************
169 ; ***************************************
159 ; `repo_object` cache, default file based
170 ; `repo_object` cache, default file based
160 ; ***************************************
171 ; ***************************************
161
172
162 ; `repo_object` cache settings for vcs methods for repositories
173 ; `repo_object` cache settings for vcs methods for repositories
163 rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
174 #rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
164
175
165 ; cache auto-expires after N seconds
176 ; cache auto-expires after N seconds
166 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
177 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
167 rc_cache.repo_object.expiration_time = 2592000
178 #rc_cache.repo_object.expiration_time = 2592000
168
179
169 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
180 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
170 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db
181 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache_repo_object.db
171
182
172 ; ***********************************************************
183 ; ***********************************************************
173 ; `repo_object` cache with redis backend
184 ; `repo_object` cache with redis backend
174 ; recommended for larger instance, and for better performance
185 ; recommended for larger instance, and for better performance
175 ; ***********************************************************
186 ; ***********************************************************
176
187
177 ; `repo_object` cache settings for vcs methods for repositories
188 ; `repo_object` cache settings for vcs methods for repositories
178 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
189 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
179
190
180 ; cache auto-expires after N seconds
191 ; cache auto-expires after N seconds
181 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
192 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
182 #rc_cache.repo_object.expiration_time = 2592000
193 #rc_cache.repo_object.expiration_time = 2592000
183
194
184 ; redis_expiration_time needs to be greater then expiration_time
195 ; redis_expiration_time needs to be greater then expiration_time
185 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
196 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
186
197
187 #rc_cache.repo_object.arguments.host = localhost
198 #rc_cache.repo_object.arguments.host = localhost
188 #rc_cache.repo_object.arguments.port = 6379
199 #rc_cache.repo_object.arguments.port = 6379
189 #rc_cache.repo_object.arguments.db = 5
200 #rc_cache.repo_object.arguments.db = 5
190 #rc_cache.repo_object.arguments.socket_timeout = 30
201 #rc_cache.repo_object.arguments.socket_timeout = 30
191 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
202 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
192 #rc_cache.repo_object.arguments.distributed_lock = true
203 #rc_cache.repo_object.arguments.distributed_lock = true
193
204
205 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
206 #rc_cache.repo_object.arguments.lock_auto_renewal = true
207
208 ; Statsd client config, this is used to send metrics to statsd
209 ; We recommend setting statsd_exported and scrape them using Promethues
210 #statsd.enabled = false
211 #statsd.statsd_host = 0.0.0.0
212 #statsd.statsd_port = 8125
213 #statsd.statsd_prefix =
214 #statsd.statsd_ipv6 = false
215
216 ; configure logging automatically at server startup set to false
217 ; to use the below custom logging config.
218 ; RC_LOGGING_FORMATTER
219 ; RC_LOGGING_LEVEL
220 ; env variables can control the settings for logging in case of autoconfigure
221
222 #logging.autoconfigure = true
223
224 ; specify your own custom logging config file to configure logging
225 #logging.logging_conf_file = /path/to/custom_logging.ini
194
226
195 ; #####################
227 ; #####################
196 ; LOGGING CONFIGURATION
228 ; LOGGING CONFIGURATION
197 ; #####################
229 ; #####################
230
198 [loggers]
231 [loggers]
199 keys = root, vcsserver
232 keys = root, vcsserver
200
233
201 [handlers]
234 [handlers]
202 keys = console
235 keys = console
203
236
204 [formatters]
237 [formatters]
205 keys = generic
238 keys = generic, json
206
239
207 ; #######
240 ; #######
208 ; LOGGERS
241 ; LOGGERS
209 ; #######
242 ; #######
210 [logger_root]
243 [logger_root]
211 level = NOTSET
244 level = NOTSET
212 handlers = console
245 handlers = console
213
246
214 [logger_vcsserver]
247 [logger_vcsserver]
215 level = DEBUG
248 level = DEBUG
216 handlers =
249 handlers =
217 qualname = vcsserver
250 qualname = vcsserver
218 propagate = 1
251 propagate = 1
219
252
220
221 ; ########
253 ; ########
222 ; HANDLERS
254 ; HANDLERS
223 ; ########
255 ; ########
224
256
225 [handler_console]
257 [handler_console]
226 class = StreamHandler
258 class = StreamHandler
227 args = (sys.stderr, )
259 args = (sys.stderr, )
228 level = DEBUG
260 level = DEBUG
261 ; To enable JSON formatted logs replace 'generic' with 'json'
262 ; This allows sending properly formatted logs to grafana loki or elasticsearch
229 formatter = generic
263 formatter = generic
230
264
231 ; ##########
265 ; ##########
232 ; FORMATTERS
266 ; FORMATTERS
233 ; ##########
267 ; ##########
234
268
235 [formatter_generic]
269 [formatter_generic]
236 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
270 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
237 datefmt = %Y-%m-%d %H:%M:%S
271 datefmt = %Y-%m-%d %H:%M:%S
272
273 [formatter_json]
274 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
275 class = vcsserver.lib._vendor.jsonlogger.JsonFormatter
@@ -1,262 +1,393 b''
1 """
1 """
2 Gunicorn config extension and hooks. This config file adds some extra settings and memory management.
2 Gunicorn config extension and hooks. This config file adds some extra settings and memory management.
3 Gunicorn configuration should be managed by .ini files entries of RhodeCode or VCSServer
3 Gunicorn configuration should be managed by .ini files entries of RhodeCode or VCSServer
4 """
4 """
5
5
6 import gc
6 import gc
7 import os
7 import os
8 import sys
8 import sys
9 import math
9 import math
10 import time
10 import time
11 import threading
11 import threading
12 import traceback
12 import traceback
13 import random
13 import random
14 import socket
14 from gunicorn.glogging import Logger
15 from gunicorn.glogging import Logger
15
16
16
17
17 def get_workers():
18 def get_workers():
18 import multiprocessing
19 import multiprocessing
19 return multiprocessing.cpu_count() * 2 + 1
20 return multiprocessing.cpu_count() * 2 + 1
20
21
21 # GLOBAL
22 # GLOBAL
22 errorlog = '-'
23 errorlog = '-'
23 accesslog = '-'
24 accesslog = '-'
24
25
25
26
26 # SERVER MECHANICS
27 # SERVER MECHANICS
27 # None == system temp dir
28 # None == system temp dir
28 # worker_tmp_dir is recommended to be set to some tmpfs
29 # worker_tmp_dir is recommended to be set to some tmpfs
29 worker_tmp_dir = None
30 worker_tmp_dir = None
30 tmp_upload_dir = None
31 tmp_upload_dir = None
31
32
33 #reuse_port = True
34
32 # Custom log format
35 # Custom log format
36 #access_log_format = (
37 # '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
38
39 # loki format for easier parsing in grafana
33 access_log_format = (
40 access_log_format = (
34 '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
41 'time="%(t)s" pid=%(p)s level="INFO" type="[GNCRN]" ip="%(h)-15s" rqt="%(L)s" response_code="%(s)s" response_bytes="%(b)-6s" uri="%(m)s:%(U)s %(q)s" user=":%(u)s" user_agent="%(a)s"')
35
42
36 # self adjust workers based on CPU count
43 # self adjust workers based on CPU count
37 # workers = get_workers()
44 # workers = get_workers()
38
45
39
46
40 def _get_process_rss(pid=None):
47 def _get_process_rss(pid=None):
41 try:
48 try:
42 import psutil
49 import psutil
43 if pid:
50 if pid:
44 proc = psutil.Process(pid)
51 proc = psutil.Process(pid)
45 else:
52 else:
46 proc = psutil.Process()
53 proc = psutil.Process()
47 return proc.memory_info().rss
54 return proc.memory_info().rss
48 except Exception:
55 except Exception:
49 return None
56 return None
50
57
51
58
52 def _get_config(ini_path):
59 def _get_config(ini_path):
53 import configparser
60 import configparser
54
61
55 try:
62 try:
56 config = configparser.RawConfigParser()
63 config = configparser.RawConfigParser()
57 config.read(ini_path)
64 config.read(ini_path)
58 return config
65 return config
59 except Exception:
66 except Exception:
60 return None
67 return None
61
68
62
69
63 def _time_with_offset(memory_usage_check_interval):
70 def _time_with_offset(memory_usage_check_interval):
64 return time.time() - random.randint(0, memory_usage_check_interval/2.0)
71 return time.time() - random.randint(0, memory_usage_check_interval/2.0)
65
72
66
73
67 def pre_fork(server, worker):
74 def pre_fork(server, worker):
68 pass
75 pass
69
76
70
77
71 def post_fork(server, worker):
78 def post_fork(server, worker):
72
79
73 # memory spec defaults
80 # memory spec defaults
74 _memory_max_usage = 0
81 _memory_max_usage = 0
75 _memory_usage_check_interval = 60
82 _memory_usage_check_interval = 60
76 _memory_usage_recovery_threshold = 0.8
83 _memory_usage_recovery_threshold = 0.8
77
84
78 ini_path = os.path.abspath(server.cfg.paste)
85 ini_path = os.path.abspath(server.cfg.paste)
79 conf = _get_config(ini_path)
86 conf = _get_config(ini_path)
80
87
81 section = 'server:main'
88 section = 'server:main'
82 if conf and conf.has_section(section):
89 if conf and conf.has_section(section):
83
90
84 if conf.has_option(section, 'memory_max_usage'):
91 if conf.has_option(section, 'memory_max_usage'):
85 _memory_max_usage = conf.getint(section, 'memory_max_usage')
92 _memory_max_usage = conf.getint(section, 'memory_max_usage')
86
93
87 if conf.has_option(section, 'memory_usage_check_interval'):
94 if conf.has_option(section, 'memory_usage_check_interval'):
88 _memory_usage_check_interval = conf.getint(section, 'memory_usage_check_interval')
95 _memory_usage_check_interval = conf.getint(section, 'memory_usage_check_interval')
89
96
90 if conf.has_option(section, 'memory_usage_recovery_threshold'):
97 if conf.has_option(section, 'memory_usage_recovery_threshold'):
91 _memory_usage_recovery_threshold = conf.getfloat(section, 'memory_usage_recovery_threshold')
98 _memory_usage_recovery_threshold = conf.getfloat(section, 'memory_usage_recovery_threshold')
92
99
93 worker._memory_max_usage = _memory_max_usage
100 worker._memory_max_usage = int(os.environ.get('RC_GUNICORN_MEMORY_MAX_USAGE', '')
94 worker._memory_usage_check_interval = _memory_usage_check_interval
101 or _memory_max_usage)
95 worker._memory_usage_recovery_threshold = _memory_usage_recovery_threshold
102 worker._memory_usage_check_interval = int(os.environ.get('RC_GUNICORN_MEMORY_USAGE_CHECK_INTERVAL', '')
103 or _memory_usage_check_interval)
104 worker._memory_usage_recovery_threshold = float(os.environ.get('RC_GUNICORN_MEMORY_USAGE_RECOVERY_THRESHOLD', '')
105 or _memory_usage_recovery_threshold)
96
106
97 # register memory last check time, with some random offset so we don't recycle all
107 # register memory last check time, with some random offset so we don't recycle all
98 # at once
108 # at once
99 worker._last_memory_check_time = _time_with_offset(_memory_usage_check_interval)
109 worker._last_memory_check_time = _time_with_offset(_memory_usage_check_interval)
100
110
101 if _memory_max_usage:
111 if _memory_max_usage:
102 server.log.info("[%-10s] WORKER spawned with max memory set at %s", worker.pid,
112 server.log.info("[%-10s] WORKER spawned with max memory set at %s", worker.pid,
103 _format_data_size(_memory_max_usage))
113 _format_data_size(_memory_max_usage))
104 else:
114 else:
105 server.log.info("[%-10s] WORKER spawned", worker.pid)
115 server.log.info("[%-10s] WORKER spawned", worker.pid)
106
116
107
117
108 def pre_exec(server):
118 def pre_exec(server):
109 server.log.info("Forked child, re-executing.")
119 server.log.info("Forked child, re-executing.")
110
120
111
121
112 def on_starting(server):
122 def on_starting(server):
113 server_lbl = '{} {}'.format(server.proc_name, server.address)
123 server_lbl = '{} {}'.format(server.proc_name, server.address)
114 server.log.info("Server %s is starting.", server_lbl)
124 server.log.info("Server %s is starting.", server_lbl)
115
125
116
126
117 def when_ready(server):
127 def when_ready(server):
118 server.log.info("Server %s is ready. Spawning workers", server)
128 server.log.info("Server %s is ready. Spawning workers", server)
119
129
120
130
121 def on_reload(server):
131 def on_reload(server):
122 pass
132 pass
123
133
124
134
125 def _format_data_size(size, unit="B", precision=1, binary=True):
135 def _format_data_size(size, unit="B", precision=1, binary=True):
126 """Format a number using SI units (kilo, mega, etc.).
136 """Format a number using SI units (kilo, mega, etc.).
127
137
128 ``size``: The number as a float or int.
138 ``size``: The number as a float or int.
129
139
130 ``unit``: The unit name in plural form. Examples: "bytes", "B".
140 ``unit``: The unit name in plural form. Examples: "bytes", "B".
131
141
132 ``precision``: How many digits to the right of the decimal point. Default
142 ``precision``: How many digits to the right of the decimal point. Default
133 is 1. 0 suppresses the decimal point.
143 is 1. 0 suppresses the decimal point.
134
144
135 ``binary``: If false, use base-10 decimal prefixes (kilo = K = 1000).
145 ``binary``: If false, use base-10 decimal prefixes (kilo = K = 1000).
136 If true, use base-2 binary prefixes (kibi = Ki = 1024).
146 If true, use base-2 binary prefixes (kibi = Ki = 1024).
137
147
138 ``full_name``: If false (default), use the prefix abbreviation ("k" or
148 ``full_name``: If false (default), use the prefix abbreviation ("k" or
139 "Ki"). If true, use the full prefix ("kilo" or "kibi"). If false,
149 "Ki"). If true, use the full prefix ("kilo" or "kibi"). If false,
140 use abbreviation ("k" or "Ki").
150 use abbreviation ("k" or "Ki").
141
151
142 """
152 """
143
153
144 if not binary:
154 if not binary:
145 base = 1000
155 base = 1000
146 multiples = ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
156 multiples = ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
147 else:
157 else:
148 base = 1024
158 base = 1024
149 multiples = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi')
159 multiples = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi')
150
160
151 sign = ""
161 sign = ""
152 if size > 0:
162 if size > 0:
153 m = int(math.log(size, base))
163 m = int(math.log(size, base))
154 elif size < 0:
164 elif size < 0:
155 sign = "-"
165 sign = "-"
156 size = -size
166 size = -size
157 m = int(math.log(size, base))
167 m = int(math.log(size, base))
158 else:
168 else:
159 m = 0
169 m = 0
160 if m > 8:
170 if m > 8:
161 m = 8
171 m = 8
162
172
163 if m == 0:
173 if m == 0:
164 precision = '%.0f'
174 precision = '%.0f'
165 else:
175 else:
166 precision = '%%.%df' % precision
176 precision = '%%.%df' % precision
167
177
168 size = precision % (size / math.pow(base, m))
178 size = precision % (size / math.pow(base, m))
169
179
170 return '%s%s %s%s' % (sign, size.strip(), multiples[m], unit)
180 return '%s%s %s%s' % (sign, size.strip(), multiples[m], unit)
171
181
172
182
173 def _check_memory_usage(worker):
183 def _check_memory_usage(worker):
174 memory_max_usage = worker._memory_max_usage
184 memory_max_usage = worker._memory_max_usage
175 if not memory_max_usage:
185 if not memory_max_usage:
176 return
186 return
177
187
178 memory_usage_check_interval = worker._memory_usage_check_interval
188 memory_usage_check_interval = worker._memory_usage_check_interval
179 memory_usage_recovery_threshold = memory_max_usage * worker._memory_usage_recovery_threshold
189 memory_usage_recovery_threshold = memory_max_usage * worker._memory_usage_recovery_threshold
180
190
181 elapsed = time.time() - worker._last_memory_check_time
191 elapsed = time.time() - worker._last_memory_check_time
182 if elapsed > memory_usage_check_interval:
192 if elapsed > memory_usage_check_interval:
183 mem_usage = _get_process_rss()
193 mem_usage = _get_process_rss()
184 if mem_usage and mem_usage > memory_max_usage:
194 if mem_usage and mem_usage > memory_max_usage:
185 worker.log.info(
195 worker.log.info(
186 "memory usage %s > %s, forcing gc",
196 "memory usage %s > %s, forcing gc",
187 _format_data_size(mem_usage), _format_data_size(memory_max_usage))
197 _format_data_size(mem_usage), _format_data_size(memory_max_usage))
188 # Try to clean it up by forcing a full collection.
198 # Try to clean it up by forcing a full collection.
189 gc.collect()
199 gc.collect()
190 mem_usage = _get_process_rss()
200 mem_usage = _get_process_rss()
191 if mem_usage > memory_usage_recovery_threshold:
201 if mem_usage > memory_usage_recovery_threshold:
192 # Didn't clean up enough, we'll have to terminate.
202 # Didn't clean up enough, we'll have to terminate.
193 worker.log.warning(
203 worker.log.warning(
194 "memory usage %s > %s after gc, quitting",
204 "memory usage %s > %s after gc, quitting",
195 _format_data_size(mem_usage), _format_data_size(memory_max_usage))
205 _format_data_size(mem_usage), _format_data_size(memory_max_usage))
196 # This will cause worker to auto-restart itself
206 # This will cause worker to auto-restart itself
197 worker.alive = False
207 worker.alive = False
198 worker._last_memory_check_time = time.time()
208 worker._last_memory_check_time = time.time()
199
209
200
210
201 def worker_int(worker):
211 def worker_int(worker):
202 worker.log.info("[%-10s] worker received INT or QUIT signal", worker.pid)
212 worker.log.info("[%-10s] worker received INT or QUIT signal", worker.pid)
203
213
204 # get traceback info, on worker crash
214 # get traceback info, on worker crash
205 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
215 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
206 code = []
216 code = []
207 for thread_id, stack in sys._current_frames().items():
217 for thread_id, stack in sys._current_frames().items():
208 code.append(
218 code.append(
209 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
219 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
210 for fname, lineno, name, line in traceback.extract_stack(stack):
220 for fname, lineno, name, line in traceback.extract_stack(stack):
211 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
221 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
212 if line:
222 if line:
213 code.append(" %s" % (line.strip()))
223 code.append(" %s" % (line.strip()))
214 worker.log.debug("\n".join(code))
224 worker.log.debug("\n".join(code))
215
225
216
226
217 def worker_abort(worker):
227 def worker_abort(worker):
218 worker.log.info("[%-10s] worker received SIGABRT signal", worker.pid)
228 worker.log.info("[%-10s] worker received SIGABRT signal", worker.pid)
219
229
220
230
221 def worker_exit(server, worker):
231 def worker_exit(server, worker):
222 worker.log.info("[%-10s] worker exit", worker.pid)
232 worker.log.info("[%-10s] worker exit", worker.pid)
223
233
224
234
225 def child_exit(server, worker):
235 def child_exit(server, worker):
226 worker.log.info("[%-10s] worker child exit", worker.pid)
236 worker.log.info("[%-10s] worker child exit", worker.pid)
227
237
228
238
229 def pre_request(worker, req):
239 def pre_request(worker, req):
230 worker.start_time = time.time()
240 worker.start_time = time.time()
231 worker.log.debug(
241 worker.log.debug(
232 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
242 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
233
243
234
244
235 def post_request(worker, req, environ, resp):
245 def post_request(worker, req, environ, resp):
236 total_time = time.time() - worker.start_time
246 total_time = time.time() - worker.start_time
237 # Gunicorn sometimes has problems with reading the status_code
247 # Gunicorn sometimes has problems with reading the status_code
238 status_code = getattr(resp, 'status_code', '')
248 status_code = getattr(resp, 'status_code', '')
239 worker.log.debug(
249 worker.log.debug(
240 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.4fs",
250 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.4fs",
241 worker.nr, req.method, req.path, status_code, total_time)
251 worker.nr, req.method, req.path, status_code, total_time)
242 _check_memory_usage(worker)
252 _check_memory_usage(worker)
243
253
244
254
255 def _filter_proxy(ip):
256 """
257 Passed in IP addresses in HEADERS can be in a special format of multiple
258 ips. Those comma separated IPs are passed from various proxies in the
259 chain of request processing. The left-most being the original client.
260 We only care about the first IP which came from the org. client.
261
262 :param ip: ip string from headers
263 """
264 if ',' in ip:
265 _ips = ip.split(',')
266 _first_ip = _ips[0].strip()
267 return _first_ip
268 return ip
269
270
271 def _filter_port(ip):
272 """
273 Removes a port from ip, there are 4 main cases to handle here.
274 - ipv4 eg. 127.0.0.1
275 - ipv6 eg. ::1
276 - ipv4+port eg. 127.0.0.1:8080
277 - ipv6+port eg. [::1]:8080
278
279 :param ip:
280 """
281 def is_ipv6(ip_addr):
282 if hasattr(socket, 'inet_pton'):
283 try:
284 socket.inet_pton(socket.AF_INET6, ip_addr)
285 except socket.error:
286 return False
287 else:
288 return False
289 return True
290
291 if ':' not in ip: # must be ipv4 pure ip
292 return ip
293
294 if '[' in ip and ']' in ip: # ipv6 with port
295 return ip.split(']')[0][1:].lower()
296
297 # must be ipv6 or ipv4 with port
298 if is_ipv6(ip):
299 return ip
300 else:
301 ip, _port = ip.split(':')[:2] # means ipv4+port
302 return ip
303
304
305 def get_ip_addr(environ):
306 proxy_key = 'HTTP_X_REAL_IP'
307 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
308 def_key = 'REMOTE_ADDR'
309 _filters = lambda x: _filter_port(_filter_proxy(x))
310
311 ip = environ.get(proxy_key)
312 if ip:
313 return _filters(ip)
314
315 ip = environ.get(proxy_key2)
316 if ip:
317 return _filters(ip)
318
319 ip = environ.get(def_key, '0.0.0.0')
320 return _filters(ip)
321
322
245 class RhodeCodeLogger(Logger):
323 class RhodeCodeLogger(Logger):
246 """
324 """
247 Custom Logger that allows some customization that gunicorn doesn't allow
325 Custom Logger that allows some customization that gunicorn doesn't allow
248 """
326 """
249
327
250 datefmt = r"%Y-%m-%d %H:%M:%S"
328 datefmt = r"%Y-%m-%d %H:%M:%S"
251
329
252 def __init__(self, cfg):
330 def __init__(self, cfg):
253 Logger.__init__(self, cfg)
331 Logger.__init__(self, cfg)
254
332
255 def now(self):
333 def now(self):
256 """ return date in RhodeCode Log format """
334 """ return date in RhodeCode Log format """
257 now = time.time()
335 now = time.time()
258 msecs = int((now - long(now)) * 1000)
336 msecs = int((now - long(now)) * 1000)
259 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
337 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
260
338
339 def atoms(self, resp, req, environ, request_time):
340 """ Gets atoms for log formatting.
341 """
342 status = resp.status
343 if isinstance(status, str):
344 status = status.split(None, 1)[0]
345 atoms = {
346 'h': get_ip_addr(environ),
347 'l': '-',
348 'u': self._get_user(environ) or '-',
349 't': self.now(),
350 'r': "%s %s %s" % (environ['REQUEST_METHOD'],
351 environ['RAW_URI'],
352 environ["SERVER_PROTOCOL"]),
353 's': status,
354 'm': environ.get('REQUEST_METHOD'),
355 'U': environ.get('PATH_INFO'),
356 'q': environ.get('QUERY_STRING'),
357 'H': environ.get('SERVER_PROTOCOL'),
358 'b': getattr(resp, 'sent', None) is not None and str(resp.sent) or '-',
359 'B': getattr(resp, 'sent', None),
360 'f': environ.get('HTTP_REFERER', '-'),
361 'a': environ.get('HTTP_USER_AGENT', '-'),
362 'T': request_time.seconds,
363 'D': (request_time.seconds * 1000000) + request_time.microseconds,
364 'M': (request_time.seconds * 1000) + int(request_time.microseconds/1000),
365 'L': "%d.%06d" % (request_time.seconds, request_time.microseconds),
366 'p': "<%s>" % os.getpid()
367 }
368
369 # add request headers
370 if hasattr(req, 'headers'):
371 req_headers = req.headers
372 else:
373 req_headers = req
374
375 if hasattr(req_headers, "items"):
376 req_headers = req_headers.items()
377
378 atoms.update({"{%s}i" % k.lower(): v for k, v in req_headers})
379
380 resp_headers = resp.headers
381 if hasattr(resp_headers, "items"):
382 resp_headers = resp_headers.items()
383
384 # add response headers
385 atoms.update({"{%s}o" % k.lower(): v for k, v in resp_headers})
386
387 # add environ variables
388 environ_variables = environ.items()
389 atoms.update({"{%s}e" % k.lower(): v for k, v in environ_variables})
390
391 return atoms
261
392
262 logger_class = RhodeCodeLogger
393 logger_class = RhodeCodeLogger
@@ -1,200 +1,238 b''
1 ## -*- coding: utf-8 -*-
1 ## -*- coding: utf-8 -*-
2
2
3 ; #################################
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
4 ; RHODECODE VCSSERVER CONFIGURATION
5 ; #################################
5 ; #################################
6
6
7 [server:main]
7 [server:main]
8 ; COMMON HOST/IP CONFIG
8 ; COMMON HOST/IP CONFIG
9 host = 127.0.0.1
9 host = 127.0.0.1
10 port = 9900
10 port = 9900
11
11
12
12
13 ; ###########################
13 ; ###########################
14 ; GUNICORN APPLICATION SERVER
14 ; GUNICORN APPLICATION SERVER
15 ; ###########################
15 ; ###########################
16
16
17 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
17 ; run with gunicorn --paste rhodecode.ini
18
18
19 ; Module to use, this setting shouldn't be changed
19 ; Module to use, this setting shouldn't be changed
20 use = egg:gunicorn#main
20 use = egg:gunicorn#main
21
21
22 ; Sets the number of process workers. More workers means more concurrent connections
22 ; Sets the number of process workers. More workers means more concurrent connections
23 ; RhodeCode can handle at the same time. Each additional worker also it increases
23 ; RhodeCode can handle at the same time. Each additional worker also it increases
24 ; memory usage as each has it's own set of caches.
24 ; memory usage as each has it's own set of caches.
25 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
25 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
26 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
26 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
27 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
27 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
28 ; when using more than 1 worker.
28 ; when using more than 1 worker.
29 workers = 2
29 workers = 2
30
30
31 ; Gunicorn access log level
31 ; Gunicorn access log level
32 loglevel = info
32 loglevel = info
33
33
34 ; Process name visible in process list
34 ; Process name visible in process list
35 proc_name = rhodecode_vcsserver
35 proc_name = rhodecode_vcsserver
36
36
37 ; Type of worker class, one of `sync`, `gevent`
37 ; Type of worker class, one of `sync`, `gevent`
38 ; currently `sync` is the only option allowed.
38 ; currently `sync` is the only option allowed.
39 worker_class = sync
39 worker_class = sync
40
40
41 ; The maximum number of simultaneous clients. Valid only for gevent
41 ; The maximum number of simultaneous clients. Valid only for gevent
42 worker_connections = 10
42 worker_connections = 10
43
43
44 ; Max number of requests that worker will handle before being gracefully restarted.
44 ; Max number of requests that worker will handle before being gracefully restarted.
45 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
45 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
46 max_requests = 1000
46 max_requests = 1000
47 max_requests_jitter = 30
47 max_requests_jitter = 30
48
48
49 ; Amount of time a worker can spend with handling a request before it
49 ; Amount of time a worker can spend with handling a request before it
50 ; gets killed and restarted. By default set to 21600 (6hrs)
50 ; gets killed and restarted. By default set to 21600 (6hrs)
51 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
51 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
52 timeout = 21600
52 timeout = 21600
53
53
54 ; The maximum size of HTTP request line in bytes.
54 ; The maximum size of HTTP request line in bytes.
55 ; 0 for unlimited
55 ; 0 for unlimited
56 limit_request_line = 0
56 limit_request_line = 0
57
57
58 ; Limit the number of HTTP headers fields in a request.
58 ; Limit the number of HTTP headers fields in a request.
59 ; By default this value is 100 and can't be larger than 32768.
59 ; By default this value is 100 and can't be larger than 32768.
60 limit_request_fields = 32768
60 limit_request_fields = 32768
61
61
62 ; Limit the allowed size of an HTTP request header field.
62 ; Limit the allowed size of an HTTP request header field.
63 ; Value is a positive number or 0.
63 ; Value is a positive number or 0.
64 ; Setting it to 0 will allow unlimited header field sizes.
64 ; Setting it to 0 will allow unlimited header field sizes.
65 limit_request_field_size = 0
65 limit_request_field_size = 0
66
66
67 ; Timeout for graceful workers restart.
67 ; Timeout for graceful workers restart.
68 ; After receiving a restart signal, workers have this much time to finish
68 ; After receiving a restart signal, workers have this much time to finish
69 ; serving requests. Workers still alive after the timeout (starting from the
69 ; serving requests. Workers still alive after the timeout (starting from the
70 ; receipt of the restart signal) are force killed.
70 ; receipt of the restart signal) are force killed.
71 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
71 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
72 graceful_timeout = 3600
72 graceful_timeout = 21600
73
73
74 # The number of seconds to wait for requests on a Keep-Alive connection.
74 # The number of seconds to wait for requests on a Keep-Alive connection.
75 # Generally set in the 1-5 seconds range.
75 # Generally set in the 1-5 seconds range.
76 keepalive = 2
76 keepalive = 2
77
77
78 ; Maximum memory usage that each worker can use before it will receive a
78 ; Maximum memory usage that each worker can use before it will receive a
79 ; graceful restart signal 0 = memory monitoring is disabled
79 ; graceful restart signal 0 = memory monitoring is disabled
80 ; Examples: 268435456 (256MB), 536870912 (512MB)
80 ; Examples: 268435456 (256MB), 536870912 (512MB)
81 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
81 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
82 memory_max_usage = 0
82 memory_max_usage = 0
83
83
84 ; How often in seconds to check for memory usage for each gunicorn worker
84 ; How often in seconds to check for memory usage for each gunicorn worker
85 memory_usage_check_interval = 60
85 memory_usage_check_interval = 60
86
86
87 ; Threshold value for which we don't recycle worker if GarbageCollection
87 ; Threshold value for which we don't recycle worker if GarbageCollection
88 ; frees up enough resources. Before each restart we try to run GC on worker
88 ; frees up enough resources. Before each restart we try to run GC on worker
89 ; in case we get enough free memory after that, restart will not happen.
89 ; in case we get enough free memory after that, restart will not happen.
90 memory_usage_recovery_threshold = 0.8
90 memory_usage_recovery_threshold = 0.8
91
91
92
92
93 [app:main]
93 [app:main]
94 ; The %(here)s variable will be replaced with the absolute path of parent directory
94 ; The %(here)s variable will be replaced with the absolute path of parent directory
95 ; of this file
95 ; of this file
96 ; Each option in the app:main can be override by an environmental variable
97 ;
98 ;To override an option:
99 ;
100 ;RC_<KeyName>
101 ;Everything should be uppercase, . and - should be replaced by _.
102 ;For example, if you have these configuration settings:
103 ;rc_cache.repo_object.backend = foo
104 ;can be overridden by
105 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
106
96 use = egg:rhodecode-vcsserver
107 use = egg:rhodecode-vcsserver
97
108
98 ; Pyramid default locales, we need this to be set
109 ; Pyramid default locales, we need this to be set
99 pyramid.default_locale_name = en
110 #pyramid.default_locale_name = en
100
111
101 ; default locale used by VCS systems
112 ; default locale used by VCS systems
102 locale = en_US.UTF-8
113 #locale = en_US.UTF-8
103
114
104 ; path to binaries for vcsserver, it should be set by the installer
115 ; path to binaries for vcsserver, it should be set by the installer
105 ; at installation time, e.g /home/user/vcsserver-1/profile/bin
116 ; at installation time, e.g /home/user/.rccontrol/vcsserver-1/profile/bin
106 ; it can also be a path to nix-build output in case of development
117 ; it can also be a path to nix-build output in case of development
107 core.binary_dir = ""
118 core.binary_dir = ""
108
119
109 ; Custom exception store path, defaults to TMPDIR
120 ; Custom exception store path, defaults to TMPDIR
110 ; This is used to store exception from RhodeCode in shared directory
121 ; This is used to store exception from RhodeCode in shared directory
111 #exception_tracker.store_path =
122 #exception_tracker.store_path =
112
123
113 ; #############
124 ; #############
114 ; DOGPILE CACHE
125 ; DOGPILE CACHE
115 ; #############
126 ; #############
116
127
117 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
128 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
118 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
129 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
119 cache_dir = %(here)s/data
130 #cache_dir = %(here)s/data
120
131
121 ; ***************************************
132 ; ***************************************
122 ; `repo_object` cache, default file based
133 ; `repo_object` cache, default file based
123 ; ***************************************
134 ; ***************************************
124
135
125 ; `repo_object` cache settings for vcs methods for repositories
136 ; `repo_object` cache settings for vcs methods for repositories
126 rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
137 #rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
127
138
128 ; cache auto-expires after N seconds
139 ; cache auto-expires after N seconds
129 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
140 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
130 rc_cache.repo_object.expiration_time = 2592000
141 #rc_cache.repo_object.expiration_time = 2592000
131
142
132 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
143 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
133 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db
144 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache_repo_object.db
134
145
135 ; ***********************************************************
146 ; ***********************************************************
136 ; `repo_object` cache with redis backend
147 ; `repo_object` cache with redis backend
137 ; recommended for larger instance, and for better performance
148 ; recommended for larger instance, and for better performance
138 ; ***********************************************************
149 ; ***********************************************************
139
150
140 ; `repo_object` cache settings for vcs methods for repositories
151 ; `repo_object` cache settings for vcs methods for repositories
141 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
152 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
142
153
143 ; cache auto-expires after N seconds
154 ; cache auto-expires after N seconds
144 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
155 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
145 #rc_cache.repo_object.expiration_time = 2592000
156 #rc_cache.repo_object.expiration_time = 2592000
146
157
147 ; redis_expiration_time needs to be greater then expiration_time
158 ; redis_expiration_time needs to be greater then expiration_time
148 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
159 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
149
160
150 #rc_cache.repo_object.arguments.host = localhost
161 #rc_cache.repo_object.arguments.host = localhost
151 #rc_cache.repo_object.arguments.port = 6379
162 #rc_cache.repo_object.arguments.port = 6379
152 #rc_cache.repo_object.arguments.db = 5
163 #rc_cache.repo_object.arguments.db = 5
153 #rc_cache.repo_object.arguments.socket_timeout = 30
164 #rc_cache.repo_object.arguments.socket_timeout = 30
154 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
165 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
155 #rc_cache.repo_object.arguments.distributed_lock = true
166 #rc_cache.repo_object.arguments.distributed_lock = true
156
167
168 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
169 #rc_cache.repo_object.arguments.lock_auto_renewal = true
170
171 ; Statsd client config, this is used to send metrics to statsd
172 ; We recommend setting statsd_exported and scrape them using Promethues
173 #statsd.enabled = false
174 #statsd.statsd_host = 0.0.0.0
175 #statsd.statsd_port = 8125
176 #statsd.statsd_prefix =
177 #statsd.statsd_ipv6 = false
178
179 ; configure logging automatically at server startup set to false
180 ; to use the below custom logging config.
181 ; RC_LOGGING_FORMATTER
182 ; RC_LOGGING_LEVEL
183 ; env variables can control the settings for logging in case of autoconfigure
184
185 #logging.autoconfigure = true
186
187 ; specify your own custom logging config file to configure logging
188 #logging.logging_conf_file = /path/to/custom_logging.ini
157
189
158 ; #####################
190 ; #####################
159 ; LOGGING CONFIGURATION
191 ; LOGGING CONFIGURATION
160 ; #####################
192 ; #####################
193
161 [loggers]
194 [loggers]
162 keys = root, vcsserver
195 keys = root, vcsserver
163
196
164 [handlers]
197 [handlers]
165 keys = console
198 keys = console
166
199
167 [formatters]
200 [formatters]
168 keys = generic
201 keys = generic, json
169
202
170 ; #######
203 ; #######
171 ; LOGGERS
204 ; LOGGERS
172 ; #######
205 ; #######
173 [logger_root]
206 [logger_root]
174 level = NOTSET
207 level = NOTSET
175 handlers = console
208 handlers = console
176
209
177 [logger_vcsserver]
210 [logger_vcsserver]
178 level = DEBUG
211 level = INFO
179 handlers =
212 handlers =
180 qualname = vcsserver
213 qualname = vcsserver
181 propagate = 1
214 propagate = 1
182
215
183
184 ; ########
216 ; ########
185 ; HANDLERS
217 ; HANDLERS
186 ; ########
218 ; ########
187
219
188 [handler_console]
220 [handler_console]
189 class = StreamHandler
221 class = StreamHandler
190 args = (sys.stderr, )
222 args = (sys.stderr, )
191 level = INFO
223 level = INFO
224 ; To enable JSON formatted logs replace 'generic' with 'json'
225 ; This allows sending properly formatted logs to grafana loki or elasticsearch
192 formatter = generic
226 formatter = generic
193
227
194 ; ##########
228 ; ##########
195 ; FORMATTERS
229 ; FORMATTERS
196 ; ##########
230 ; ##########
197
231
198 [formatter_generic]
232 [formatter_generic]
199 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
233 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
200 datefmt = %Y-%m-%d %H:%M:%S
234 datefmt = %Y-%m-%d %H:%M:%S
235
236 [formatter_json]
237 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
238 class = vcsserver.lib._vendor.jsonlogger.JsonFormatter
@@ -1,134 +1,163 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # Copyright (C) 2014-2019 RodeCode GmbH
4 # Copyright (C) 2014-2019 RodeCode GmbH
4 #
5 #
5 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
8 # (at your option) any later version.
9 # (at your option) any later version.
9 #
10 #
10 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 # GNU General Public License for more details.
14 #
15 #
15 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
17 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
19
19 # Import early to make sure things are patched up properly
20 # Import early to make sure things are patched up properly
20 from setuptools import setup, find_packages
21 from setuptools import setup, find_packages
21
22
22 import os
23 import os
24 import re
23 import sys
25 import sys
24 import pkgutil
26 import pkgutil
25 import platform
27 import platform
26 import codecs
28 import codecs
27
29
28 try: # for pip >= 10
30 import pip
31
32 pip_major_version = int(pip.__version__.split(".")[0])
33 if pip_major_version >= 20:
29 from pip._internal.req import parse_requirements
34 from pip._internal.req import parse_requirements
30 except ImportError: # for pip <= 9.0.3
35 from pip._internal.network.session import PipSession
36 elif pip_major_version >= 10:
37 from pip._internal.req import parse_requirements
38 from pip._internal.download import PipSession
39 else:
31 from pip.req import parse_requirements
40 from pip.req import parse_requirements
41 from pip.download import PipSession
42
32
43
33 try: # for pip >= 10
44 def get_package_name(req_object):
34 from pip._internal.download import PipSession
45 package_name = None
35 except ImportError: # for pip <= 9.0.3
46 try:
36 from pip.download import PipSession
47 from pip._internal.req.constructors import install_req_from_parsed_requirement
48 except ImportError:
49 install_req_from_parsed_requirement = None
50
51 # In 20.1 of pip, the requirements object changed
52 if hasattr(req_object, 'req'):
53 package_name = req_object.req.name
54
55 if package_name is None:
56 if install_req_from_parsed_requirement:
57 package = install_req_from_parsed_requirement(req_object)
58 package_name = package.req.name
59
60 if package_name is None:
61 # fallback for older pip
62 package_name = re.split('===|<=|!=|==|>=|~=|<|>', req_object.requirement)[0]
63
64 return package_name
37
65
38
66
39 if sys.version_info < (2, 7):
67 if sys.version_info < (2, 7):
40 raise Exception('VCSServer requires Python 2.7 or later')
68 raise Exception('VCSServer requires Python 2.7 or later')
41
69
42 here = os.path.abspath(os.path.dirname(__file__))
70 here = os.path.abspath(os.path.dirname(__file__))
43
71
44 # defines current platform
72 # defines current platform
45 __platform__ = platform.system()
73 __platform__ = platform.system()
46 __license__ = 'GPL V3'
74 __license__ = 'GPL V3'
47 __author__ = 'RhodeCode GmbH'
75 __author__ = 'RhodeCode GmbH'
48 __url__ = 'https://code.rhodecode.com'
76 __url__ = 'https://code.rhodecode.com'
49 is_windows = __platform__ in ('Windows',)
77 is_windows = __platform__ in ('Windows',)
50
78
51
79
52 def _get_requirements(req_filename, exclude=None, extras=None):
80 def _get_requirements(req_filename, exclude=None, extras=None):
53 extras = extras or []
81 extras = extras or []
54 exclude = exclude or []
82 exclude = exclude or []
55
83
56 try:
84 try:
57 parsed = parse_requirements(
85 parsed = parse_requirements(
58 os.path.join(here, req_filename), session=PipSession())
86 os.path.join(here, req_filename), session=PipSession())
59 except TypeError:
87 except TypeError:
60 # try pip < 6.0.0, that doesn't support session
88 # try pip < 6.0.0, that doesn't support session
61 parsed = parse_requirements(os.path.join(here, req_filename))
89 parsed = parse_requirements(os.path.join(here, req_filename))
62
90
63 requirements = []
91 requirements = []
64 for ir in parsed:
92 for int_req in parsed:
65 if ir.req and ir.name not in exclude:
93 req_name = get_package_name(int_req)
66 requirements.append(str(ir.req))
94 if req_name not in exclude:
95 requirements.append(req_name)
67 return requirements + extras
96 return requirements + extras
68
97
69
98
70 # requirements extract
99 # requirements extract
71 setup_requirements = ['pytest-runner']
100 setup_requirements = []
72 install_requirements = _get_requirements(
101 install_requirements = _get_requirements(
73 'requirements.txt', exclude=['setuptools'])
102 'requirements.txt', exclude=['setuptools'])
74 test_requirements = _get_requirements(
103 test_requirements = _get_requirements(
75 'requirements_test.txt', extras=['configobj'])
104 'requirements_test.txt', extras=['configobj'])
76
105
77
106
78 def get_version():
107 def get_version():
79 version = pkgutil.get_data('vcsserver', 'VERSION')
108 version = pkgutil.get_data('vcsserver', 'VERSION')
80 return version.decode().strip()
109 return version.decode().strip()
81
110
82
111
83 # additional files that goes into package itself
112 # additional files that goes into package itself
84 package_data = {
113 package_data = {
85 '': ['*.txt', '*.rst'],
114 '': ['*.txt', '*.rst'],
86 'configs': ['*.ini'],
115 'configs': ['*.ini'],
87 'vcsserver': ['VERSION'],
116 'vcsserver': ['VERSION'],
88 }
117 }
89
118
90 description = 'Version Control System Server'
119 description = 'Version Control System Server'
91 keywords = ' '.join(['Version Control System'])
120 keywords = ' '.join(['Version Control System'])
92
121
93 # README/DESCRIPTION generation
122 # README/DESCRIPTION generation
94 readme_file = 'README.rst'
123 readme_file = 'README.rst'
95 changelog_file = 'CHANGES.rst'
124 changelog_file = 'CHANGES.rst'
96 try:
125 try:
97 long_description = codecs.open(readme_file).read() + '\n\n' + \
126 long_description = codecs.open(readme_file).read() + '\n\n' + \
98 codecs.open(changelog_file).read()
127 codecs.open(changelog_file).read()
99 except IOError as err:
128 except IOError as err:
100 sys.stderr.write(
129 sys.stderr.write(
101 "[WARNING] Cannot find file specified as long_description (%s)\n "
130 "[WARNING] Cannot find file specified as long_description (%s)\n "
102 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
131 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
103 long_description = description
132 long_description = description
104
133
105
134
106 setup(
135 setup(
107 name='rhodecode-vcsserver',
136 name='rhodecode-vcsserver',
108 version=get_version(),
137 version=get_version(),
109 description=description,
138 description=description,
110 long_description=long_description,
139 long_description=long_description,
111 keywords=keywords,
140 keywords=keywords,
112 license=__license__,
141 license=__license__,
113 author=__author__,
142 author=__author__,
114 author_email='support@rhodecode.com',
143 author_email='support@rhodecode.com',
115 url=__url__,
144 url=__url__,
116 setup_requires=setup_requirements,
145 setup_requires=setup_requirements,
117 install_requires=install_requirements,
146 install_requires=install_requirements,
118 tests_require=test_requirements,
147 tests_require=test_requirements,
119 zip_safe=False,
148 zip_safe=False,
120 packages=find_packages(exclude=["docs", "tests*"]),
149 packages=find_packages(exclude=["docs", "tests*"]),
121 package_data=package_data,
150 package_data=package_data,
122 include_package_data=True,
151 include_package_data=True,
123 classifiers=[
152 classifiers=[
124 'Development Status :: 6 - Mature',
153 'Development Status :: 6 - Mature',
125 'Intended Audience :: Developers',
154 'Intended Audience :: Developers',
126 'Operating System :: OS Independent',
155 'Operating System :: OS Independent',
127 'Topic :: Software Development :: Version Control',
156 'Topic :: Software Development :: Version Control',
128 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
157 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
129 'Programming Language :: Python :: 2.7',
158 'Programming Language :: Python :: 2.7',
130 ],
159 ],
131 entry_points={
160 entry_points={
132 'paste.app_factory': ['main=vcsserver.http_main:main']
161 'paste.app_factory': ['main=vcsserver.http_main:main']
133 },
162 },
134 )
163 )
@@ -1,76 +1,132 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import os
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urllib.parse
21 import urllib.parse
22
22
23 from vcsserver.lib.rc_cache import region_meta
23 from vcsserver.lib.rc_cache import region_meta
24
25 from vcsserver import exceptions
26 from vcsserver.exceptions import NoContentException
27 from vcsserver.hgcompat import (archival)
28
24 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
25
30
26
31
27 class RepoFactory(object):
32 class RepoFactory(object):
28 """
33 """
29 Utility to create instances of repository
34 Utility to create instances of repository
30
35
31 It provides internal caching of the `repo` object based on
36 It provides internal caching of the `repo` object based on
32 the :term:`call context`.
37 the :term:`call context`.
33 """
38 """
34 repo_type = None
39 repo_type = None
35
40
36 def __init__(self):
41 def __init__(self):
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
42 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38
43
39 def _create_config(self, path, config):
44 def _create_config(self, path, config):
40 config = {}
45 config = {}
41 return config
46 return config
42
47
43 def _create_repo(self, wire, create):
48 def _create_repo(self, wire, create):
44 raise NotImplementedError()
49 raise NotImplementedError()
45
50
46 def repo(self, wire, create=False):
51 def repo(self, wire, create=False):
47 raise NotImplementedError()
52 raise NotImplementedError()
48
53
49
54
50 def obfuscate_qs(query_string):
55 def obfuscate_qs(query_string):
51 if query_string is None:
56 if query_string is None:
52 return None
57 return None
53
58
54 parsed = []
59 parsed = []
55 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
60 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
56 if k in ['auth_token', 'api_key']:
61 if k in ['auth_token', 'api_key']:
57 v = "*****"
62 v = "*****"
58 parsed.append((k, v))
63 parsed.append((k, v))
59
64
60 return '&'.join('{}{}'.format(
65 return '&'.join('{}{}'.format(
61 k, '={}'.format(v) if v else '') for k, v in parsed)
66 k, '={}'.format(v) if v else '') for k, v in parsed)
62
67
63
68
64 def raise_from_original(new_type):
69 def raise_from_original(new_type):
65 """
70 """
66 Raise a new exception type with original args and traceback.
71 Raise a new exception type with original args and traceback.
67 """
72 """
68 exc_type, exc_value, exc_traceback = sys.exc_info()
73 exc_type, exc_value, exc_traceback = sys.exc_info()
69 new_exc = new_type(*exc_value.args)
74 new_exc = new_type(*exc_value.args)
70 # store the original traceback into the new exc
75 # store the original traceback into the new exc
71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
76 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
72
77
73 try:
78 try:
74 raise new_exc.with_traceback(exc_traceback)
79 raise new_exc.with_traceback(exc_traceback)
75 finally:
80 finally:
76 del exc_traceback
81 del exc_traceback
82
83
84 class ArchiveNode(object):
85 def __init__(self, path, mode, is_link, raw_bytes):
86 self.path = path
87 self.mode = mode
88 self.is_link = is_link
89 self.raw_bytes = raw_bytes
90
91
92 def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
93 archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
94 """
95 walker should be a file walker, for example:
96 def walker():
97 for file_info in files:
98 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
99 """
100 extra_metadata = extra_metadata or {}
101
102 if kind == "tgz":
103 archiver = archival.tarit(archive_dest_path, mtime, "gz")
104 elif kind == "tbz2":
105 archiver = archival.tarit(archive_dest_path, mtime, "bz2")
106 elif kind == 'zip':
107 archiver = archival.zipit(archive_dest_path, mtime)
108 else:
109 raise exceptions.ArchiveException()(
110 'Remote does not support: "%s" archive type.' % kind)
111
112 for f in walker(commit_id, archive_at_path):
113 f_path = os.path.join(archive_dir_name, f.path.lstrip('/'))
114 try:
115 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
116 except NoContentException:
117 # NOTE(marcink): this is a special case for SVN so we can create "empty"
118 # directories which arent supported by archiver
119 archiver.addfile(os.path.join(f_path, '.dir'), f.mode, f.is_link, '')
120
121 if write_metadata:
122 metadata = dict([
123 ('commit_id', commit_id),
124 ('mtime', mtime),
125 ])
126 metadata.update(extra_metadata)
127
128 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata.items()]
129 f_path = os.path.join(archive_dir_name, '.archival.txt')
130 archiver.addfile(f_path, 0o644, False, '\n'.join(meta))
131
132 return archiver.done()
@@ -1,121 +1,125 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Special exception handling over the wire.
19 Special exception handling over the wire.
20
20
21 Since we cannot assume that our client is able to import our exception classes,
21 Since we cannot assume that our client is able to import our exception classes,
22 this module provides a "wrapping" mechanism to raise plain exceptions
22 this module provides a "wrapping" mechanism to raise plain exceptions
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28
28
29
29
30 def _make_exception(kind, org_exc, *args):
30 def _make_exception(kind, org_exc, *args):
31 """
31 """
32 Prepares a base `Exception` instance to be sent over the wire.
32 Prepares a base `Exception` instance to be sent over the wire.
33
33
34 To give our caller a hint what this is about, it will attach an attribute
34 To give our caller a hint what this is about, it will attach an attribute
35 `_vcs_kind` to the exception.
35 `_vcs_kind` to the exception.
36 """
36 """
37 exc = Exception(*args)
37 exc = Exception(*args)
38 exc._vcs_kind = kind
38 exc._vcs_kind = kind
39 exc._org_exc = org_exc
39 exc._org_exc = org_exc
40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
41 return exc
41 return exc
42
42
43
43
44 def AbortException(org_exc=None):
44 def AbortException(org_exc=None):
45 def _make_exception_wrapper(*args):
45 def _make_exception_wrapper(*args):
46 return _make_exception('abort', org_exc, *args)
46 return _make_exception('abort', org_exc, *args)
47 return _make_exception_wrapper
47 return _make_exception_wrapper
48
48
49
49
50 def ArchiveException(org_exc=None):
50 def ArchiveException(org_exc=None):
51 def _make_exception_wrapper(*args):
51 def _make_exception_wrapper(*args):
52 return _make_exception('archive', org_exc, *args)
52 return _make_exception('archive', org_exc, *args)
53 return _make_exception_wrapper
53 return _make_exception_wrapper
54
54
55
55
56 def LookupException(org_exc=None):
56 def LookupException(org_exc=None):
57 def _make_exception_wrapper(*args):
57 def _make_exception_wrapper(*args):
58 return _make_exception('lookup', org_exc, *args)
58 return _make_exception('lookup', org_exc, *args)
59 return _make_exception_wrapper
59 return _make_exception_wrapper
60
60
61
61
62 def VcsException(org_exc=None):
62 def VcsException(org_exc=None):
63 def _make_exception_wrapper(*args):
63 def _make_exception_wrapper(*args):
64 return _make_exception('error', org_exc, *args)
64 return _make_exception('error', org_exc, *args)
65 return _make_exception_wrapper
65 return _make_exception_wrapper
66
66
67
67
68 def RepositoryLockedException(org_exc=None):
68 def RepositoryLockedException(org_exc=None):
69 def _make_exception_wrapper(*args):
69 def _make_exception_wrapper(*args):
70 return _make_exception('repo_locked', org_exc, *args)
70 return _make_exception('repo_locked', org_exc, *args)
71 return _make_exception_wrapper
71 return _make_exception_wrapper
72
72
73
73
74 def RepositoryBranchProtectedException(org_exc=None):
74 def RepositoryBranchProtectedException(org_exc=None):
75 def _make_exception_wrapper(*args):
75 def _make_exception_wrapper(*args):
76 return _make_exception('repo_branch_protected', org_exc, *args)
76 return _make_exception('repo_branch_protected', org_exc, *args)
77 return _make_exception_wrapper
77 return _make_exception_wrapper
78
78
79
79
80 def RequirementException(org_exc=None):
80 def RequirementException(org_exc=None):
81 def _make_exception_wrapper(*args):
81 def _make_exception_wrapper(*args):
82 return _make_exception('requirement', org_exc, *args)
82 return _make_exception('requirement', org_exc, *args)
83 return _make_exception_wrapper
83 return _make_exception_wrapper
84
84
85
85
86 def UnhandledException(org_exc=None):
86 def UnhandledException(org_exc=None):
87 def _make_exception_wrapper(*args):
87 def _make_exception_wrapper(*args):
88 return _make_exception('unhandled', org_exc, *args)
88 return _make_exception('unhandled', org_exc, *args)
89 return _make_exception_wrapper
89 return _make_exception_wrapper
90
90
91
91
92 def URLError(org_exc=None):
92 def URLError(org_exc=None):
93 def _make_exception_wrapper(*args):
93 def _make_exception_wrapper(*args):
94 return _make_exception('url_error', org_exc, *args)
94 return _make_exception('url_error', org_exc, *args)
95 return _make_exception_wrapper
95 return _make_exception_wrapper
96
96
97
97
98 def SubrepoMergeException(org_exc=None):
98 def SubrepoMergeException(org_exc=None):
99 def _make_exception_wrapper(*args):
99 def _make_exception_wrapper(*args):
100 return _make_exception('subrepo_merge_error', org_exc, *args)
100 return _make_exception('subrepo_merge_error', org_exc, *args)
101 return _make_exception_wrapper
101 return _make_exception_wrapper
102
102
103
103
104 class HTTPRepoLocked(HTTPLocked):
104 class HTTPRepoLocked(HTTPLocked):
105 """
105 """
106 Subclass of HTTPLocked response that allows to set the title and status
106 Subclass of HTTPLocked response that allows to set the title and status
107 code via constructor arguments.
107 code via constructor arguments.
108 """
108 """
109 def __init__(self, title, status_code=None, **kwargs):
109 def __init__(self, title, status_code=None, **kwargs):
110 self.code = status_code or HTTPLocked.code
110 self.code = status_code or HTTPLocked.code
111 self.title = title
111 self.title = title
112 super(HTTPRepoLocked, self).__init__(**kwargs)
112 super(HTTPRepoLocked, self).__init__(**kwargs)
113
113
114
114
115 class HTTPRepoBranchProtected(HTTPForbidden):
115 class HTTPRepoBranchProtected(HTTPForbidden):
116 def __init__(self, *args, **kwargs):
116 def __init__(self, *args, **kwargs):
117 super(HTTPForbidden, self).__init__(*args, **kwargs)
117 super(HTTPForbidden, self).__init__(*args, **kwargs)
118
118
119
119
120 class RefNotFoundException(KeyError):
120 class RefNotFoundException(KeyError):
121 pass
121 pass
122
123
124 class NoContentException(ValueError):
125 pass
@@ -1,1192 +1,1281 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib.request, urllib.parse, urllib.error
25 import urllib.request, urllib.parse, urllib.error
26 import urllib.request, urllib.error, urllib.parse
26 import urllib.request, urllib.error, urllib.parse
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from dulwich import index, objects
33 from dulwich import index, objects
33 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.errors import (
35 from dulwich.errors import (
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
37 UnexpectedCommandError)
38 UnexpectedCommandError)
38 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
40
41
41 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.base import RepoFactory, obfuscate_qs
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
44 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
48
49
49 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
50 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
51 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
52 PEELED_REF_MARKER = '^{}'
53 PEELED_REF_MARKER = '^{}'
53
54
54
55
55 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
56
57
57
58
58 def str_to_dulwich(value):
59 def str_to_dulwich(value):
59 """
60 """
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 """
62 """
62 return value.decode(settings.WIRE_ENCODING)
63 return value.decode(settings.WIRE_ENCODING)
63
64
64
65
65 def reraise_safe_exceptions(func):
66 def reraise_safe_exceptions(func):
66 """Converts Dulwich exceptions to something neutral."""
67 """Converts Dulwich exceptions to something neutral."""
67
68
68 @wraps(func)
69 @wraps(func)
69 def wrapper(*args, **kwargs):
70 def wrapper(*args, **kwargs):
70 try:
71 try:
71 return func(*args, **kwargs)
72 return func(*args, **kwargs)
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 exc = exceptions.LookupException(org_exc=e)
74 exc = exceptions.LookupException(org_exc=e)
74 raise exc(safe_str(e))
75 raise exc(safe_str(e))
75 except (HangupException, UnexpectedCommandError) as e:
76 except (HangupException, UnexpectedCommandError) as e:
76 exc = exceptions.VcsException(org_exc=e)
77 exc = exceptions.VcsException(org_exc=e)
77 raise exc(safe_str(e))
78 raise exc(safe_str(e))
78 except Exception as e:
79 except Exception as e:
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # (KeyError on empty repos), we cannot track this and catch all
81 # (KeyError on empty repos), we cannot track this and catch all
81 # exceptions, it's an exceptions from other handlers
82 # exceptions, it's an exceptions from other handlers
82 #if not hasattr(e, '_vcs_kind'):
83 #if not hasattr(e, '_vcs_kind'):
83 #log.exception("Unhandled exception in git remote call")
84 #log.exception("Unhandled exception in git remote call")
84 #raise_from_original(exceptions.UnhandledException)
85 #raise_from_original(exceptions.UnhandledException)
85 raise
86 raise
86 return wrapper
87 return wrapper
87
88
88
89
89 class Repo(DulwichRepo):
90 class Repo(DulwichRepo):
90 """
91 """
91 A wrapper for dulwich Repo class.
92 A wrapper for dulwich Repo class.
92
93
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 "Too many open files" error. We need to close all opened file descriptors
95 "Too many open files" error. We need to close all opened file descriptors
95 once the repo object is destroyed.
96 once the repo object is destroyed.
96 """
97 """
97 def __del__(self):
98 def __del__(self):
98 if hasattr(self, 'object_store'):
99 if hasattr(self, 'object_store'):
99 self.close()
100 self.close()
100
101
101
102
102 class Repository(LibGit2Repo):
103 class Repository(LibGit2Repo):
103
104
104 def __enter__(self):
105 def __enter__(self):
105 return self
106 return self
106
107
107 def __exit__(self, exc_type, exc_val, exc_tb):
108 def __exit__(self, exc_type, exc_val, exc_tb):
108 self.free()
109 self.free()
109
110
110
111
111 class GitFactory(RepoFactory):
112 class GitFactory(RepoFactory):
112 repo_type = 'git'
113 repo_type = 'git'
113
114
114 def _create_repo(self, wire, create, use_libgit2=False):
115 def _create_repo(self, wire, create, use_libgit2=False):
115 if use_libgit2:
116 if use_libgit2:
116 return Repository(wire['path'])
117 return Repository(wire['path'])
117 else:
118 else:
118 repo_path = str_to_dulwich(wire['path'])
119 repo_path = str_to_dulwich(wire['path'])
119 return Repo(repo_path)
120 return Repo(repo_path)
120
121
121 def repo(self, wire, create=False, use_libgit2=False):
122 def repo(self, wire, create=False, use_libgit2=False):
122 """
123 """
123 Get a repository instance for the given path.
124 Get a repository instance for the given path.
124 """
125 """
125 return self._create_repo(wire, create, use_libgit2)
126 return self._create_repo(wire, create, use_libgit2)
126
127
127 def repo_libgit2(self, wire):
128 def repo_libgit2(self, wire):
128 return self.repo(wire, use_libgit2=True)
129 return self.repo(wire, use_libgit2=True)
129
130
130
131
131 class GitRemote(RemoteBase):
132 class GitRemote(RemoteBase):
132
133
133 def __init__(self, factory):
134 def __init__(self, factory):
134 self._factory = factory
135 self._factory = factory
135 self._bulk_methods = {
136 self._bulk_methods = {
136 "date": self.date,
137 "date": self.date,
137 "author": self.author,
138 "author": self.author,
138 "branch": self.branch,
139 "branch": self.branch,
139 "message": self.message,
140 "message": self.message,
140 "parents": self.parents,
141 "parents": self.parents,
141 "_commit": self.revision,
142 "_commit": self.revision,
142 }
143 }
143
144
144 def _wire_to_config(self, wire):
145 def _wire_to_config(self, wire):
145 if 'config' in wire:
146 if 'config' in wire:
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return {}
148 return {}
148
149
149 def _remote_conf(self, config):
150 def _remote_conf(self, config):
150 params = [
151 params = [
151 '-c', 'core.askpass=""',
152 '-c', 'core.askpass=""',
152 ]
153 ]
153 ssl_cert_dir = config.get('vcs_ssl_dir')
154 ssl_cert_dir = config.get('vcs_ssl_dir')
154 if ssl_cert_dir:
155 if ssl_cert_dir:
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 return params
157 return params
157
158
158 @reraise_safe_exceptions
159 @reraise_safe_exceptions
159 def discover_git_version(self):
160 def discover_git_version(self):
160 stdout, _ = self.run_git_command(
161 stdout, _ = self.run_git_command(
161 {}, ['--version'], _bare=True, _safe=True)
162 {}, ['--version'], _bare=True, _safe=True)
162 prefix = 'git version'
163 prefix = 'git version'
163 if stdout.startswith(prefix):
164 if stdout.startswith(prefix):
164 stdout = stdout[len(prefix):]
165 stdout = stdout[len(prefix):]
165 return stdout.strip()
166 return stdout.strip()
166
167
167 @reraise_safe_exceptions
168 @reraise_safe_exceptions
168 def is_empty(self, wire):
169 def is_empty(self, wire):
169 repo_init = self._factory.repo_libgit2(wire)
170 repo_init = self._factory.repo_libgit2(wire)
170 with repo_init as repo:
171 with repo_init as repo:
171
172
172 try:
173 try:
173 has_head = repo.head.name
174 has_head = repo.head.name
174 if has_head:
175 if has_head:
175 return False
176 return False
176
177
177 # NOTE(marcink): check again using more expensive method
178 # NOTE(marcink): check again using more expensive method
178 return repo.is_empty
179 return repo.is_empty
179 except Exception:
180 except Exception:
180 pass
181 pass
181
182
182 return True
183 return True
183
184
184 @reraise_safe_exceptions
185 @reraise_safe_exceptions
185 def assert_correct_path(self, wire):
186 def assert_correct_path(self, wire):
186 cache_on, context_uid, repo_id = self._cache_on(wire)
187 cache_on, context_uid, repo_id = self._cache_on(wire)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 region = self._region(wire)
189 @region.conditional_cache_on_arguments(condition=cache_on)
188 def _assert_correct_path(_context_uid, _repo_id):
190 def _assert_correct_path(_context_uid, _repo_id):
189 try:
191 try:
190 repo_init = self._factory.repo_libgit2(wire)
192 repo_init = self._factory.repo_libgit2(wire)
191 with repo_init as repo:
193 with repo_init as repo:
192 pass
194 pass
193 except pygit2.GitError:
195 except pygit2.GitError:
194 path = wire.get('path')
196 path = wire.get('path')
195 tb = traceback.format_exc()
197 tb = traceback.format_exc()
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
198 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 return False
199 return False
198
200
199 return True
201 return True
200 return _assert_correct_path(context_uid, repo_id)
202 return _assert_correct_path(context_uid, repo_id)
201
203
202 @reraise_safe_exceptions
204 @reraise_safe_exceptions
203 def bare(self, wire):
205 def bare(self, wire):
204 repo_init = self._factory.repo_libgit2(wire)
206 repo_init = self._factory.repo_libgit2(wire)
205 with repo_init as repo:
207 with repo_init as repo:
206 return repo.is_bare
208 return repo.is_bare
207
209
208 @reraise_safe_exceptions
210 @reraise_safe_exceptions
209 def blob_as_pretty_string(self, wire, sha):
211 def blob_as_pretty_string(self, wire, sha):
210 repo_init = self._factory.repo_libgit2(wire)
212 repo_init = self._factory.repo_libgit2(wire)
211 with repo_init as repo:
213 with repo_init as repo:
212 blob_obj = repo[sha]
214 blob_obj = repo[sha]
213 blob = blob_obj.data
215 blob = blob_obj.data
214 return blob
216 return blob
215
217
216 @reraise_safe_exceptions
218 @reraise_safe_exceptions
217 def blob_raw_length(self, wire, sha):
219 def blob_raw_length(self, wire, sha):
218 cache_on, context_uid, repo_id = self._cache_on(wire)
220 cache_on, context_uid, repo_id = self._cache_on(wire)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
221 region = self._region(wire)
222 @region.conditional_cache_on_arguments(condition=cache_on)
220 def _blob_raw_length(_repo_id, _sha):
223 def _blob_raw_length(_repo_id, _sha):
221
224
222 repo_init = self._factory.repo_libgit2(wire)
225 repo_init = self._factory.repo_libgit2(wire)
223 with repo_init as repo:
226 with repo_init as repo:
224 blob = repo[sha]
227 blob = repo[sha]
225 return blob.size
228 return blob.size
226
229
227 return _blob_raw_length(repo_id, sha)
230 return _blob_raw_length(repo_id, sha)
228
231
229 def _parse_lfs_pointer(self, raw_content):
232 def _parse_lfs_pointer(self, raw_content):
230
233
231 spec_string = 'version https://git-lfs.github.com/spec'
234 spec_string = 'version https://git-lfs.github.com/spec'
232 if raw_content and raw_content.startswith(spec_string):
235 if raw_content and raw_content.startswith(spec_string):
233 pattern = re.compile(r"""
236 pattern = re.compile(r"""
234 (?:\n)?
237 (?:\n)?
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
238 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
239 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
240 ^size[ ](?P<oid_size>[0-9]+)\n
238 (?:\n)?
241 (?:\n)?
239 """, re.VERBOSE | re.MULTILINE)
242 """, re.VERBOSE | re.MULTILINE)
240 match = pattern.match(raw_content)
243 match = pattern.match(raw_content)
241 if match:
244 if match:
242 return match.groupdict()
245 return match.groupdict()
243
246
244 return {}
247 return {}
245
248
246 @reraise_safe_exceptions
249 @reraise_safe_exceptions
247 def is_large_file(self, wire, commit_id):
250 def is_large_file(self, wire, commit_id):
248 cache_on, context_uid, repo_id = self._cache_on(wire)
251 cache_on, context_uid, repo_id = self._cache_on(wire)
249
252
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
253 region = self._region(wire)
254 @region.conditional_cache_on_arguments(condition=cache_on)
251 def _is_large_file(_repo_id, _sha):
255 def _is_large_file(_repo_id, _sha):
252 repo_init = self._factory.repo_libgit2(wire)
256 repo_init = self._factory.repo_libgit2(wire)
253 with repo_init as repo:
257 with repo_init as repo:
254 blob = repo[commit_id]
258 blob = repo[commit_id]
255 if blob.is_binary:
259 if blob.is_binary:
256 return {}
260 return {}
257
261
258 return self._parse_lfs_pointer(blob.data)
262 return self._parse_lfs_pointer(blob.data)
259
263
260 return _is_large_file(repo_id, commit_id)
264 return _is_large_file(repo_id, commit_id)
261
265
262 @reraise_safe_exceptions
266 @reraise_safe_exceptions
263 def is_binary(self, wire, tree_id):
267 def is_binary(self, wire, tree_id):
264 cache_on, context_uid, repo_id = self._cache_on(wire)
268 cache_on, context_uid, repo_id = self._cache_on(wire)
265
269
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
270 region = self._region(wire)
271 @region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
272 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
273 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
274 with repo_init as repo:
270 blob_obj = repo[tree_id]
275 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
276 return blob_obj.is_binary
272
277
273 return _is_binary(repo_id, tree_id)
278 return _is_binary(repo_id, tree_id)
274
279
275 @reraise_safe_exceptions
280 @reraise_safe_exceptions
276 def in_largefiles_store(self, wire, oid):
281 def in_largefiles_store(self, wire, oid):
277 conf = self._wire_to_config(wire)
282 conf = self._wire_to_config(wire)
278 repo_init = self._factory.repo_libgit2(wire)
283 repo_init = self._factory.repo_libgit2(wire)
279 with repo_init as repo:
284 with repo_init as repo:
280 repo_name = repo.path
285 repo_name = repo.path
281
286
282 store_location = conf.get('vcs_git_lfs_store_location')
287 store_location = conf.get('vcs_git_lfs_store_location')
283 if store_location:
288 if store_location:
284
289
285 store = LFSOidStore(
290 store = LFSOidStore(
286 oid=oid, repo=repo_name, store_location=store_location)
291 oid=oid, repo=repo_name, store_location=store_location)
287 return store.has_oid()
292 return store.has_oid()
288
293
289 return False
294 return False
290
295
291 @reraise_safe_exceptions
296 @reraise_safe_exceptions
292 def store_path(self, wire, oid):
297 def store_path(self, wire, oid):
293 conf = self._wire_to_config(wire)
298 conf = self._wire_to_config(wire)
294 repo_init = self._factory.repo_libgit2(wire)
299 repo_init = self._factory.repo_libgit2(wire)
295 with repo_init as repo:
300 with repo_init as repo:
296 repo_name = repo.path
301 repo_name = repo.path
297
302
298 store_location = conf.get('vcs_git_lfs_store_location')
303 store_location = conf.get('vcs_git_lfs_store_location')
299 if store_location:
304 if store_location:
300 store = LFSOidStore(
305 store = LFSOidStore(
301 oid=oid, repo=repo_name, store_location=store_location)
306 oid=oid, repo=repo_name, store_location=store_location)
302 return store.oid_path
307 return store.oid_path
303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
308 raise ValueError('Unable to fetch oid with path {}'.format(oid))
304
309
305 @reraise_safe_exceptions
310 @reraise_safe_exceptions
306 def bulk_request(self, wire, rev, pre_load):
311 def bulk_request(self, wire, rev, pre_load):
307 cache_on, context_uid, repo_id = self._cache_on(wire)
312 cache_on, context_uid, repo_id = self._cache_on(wire)
308 @self.region.conditional_cache_on_arguments(condition=cache_on)
313 region = self._region(wire)
314 @region.conditional_cache_on_arguments(condition=cache_on)
309 def _bulk_request(_repo_id, _rev, _pre_load):
315 def _bulk_request(_repo_id, _rev, _pre_load):
310 result = {}
316 result = {}
311 for attr in pre_load:
317 for attr in pre_load:
312 try:
318 try:
313 method = self._bulk_methods[attr]
319 method = self._bulk_methods[attr]
314 args = [wire, rev]
320 args = [wire, rev]
315 result[attr] = method(*args)
321 result[attr] = method(*args)
316 except KeyError as e:
322 except KeyError as e:
317 raise exceptions.VcsException(e)(
323 raise exceptions.VcsException(e)(
318 "Unknown bulk attribute: %s" % attr)
324 "Unknown bulk attribute: %s" % attr)
319 return result
325 return result
320
326
321 return _bulk_request(repo_id, rev, sorted(pre_load))
327 return _bulk_request(repo_id, rev, sorted(pre_load))
322
328
323 def _build_opener(self, url):
329 def _build_opener(self, url):
324 handlers = []
330 handlers = []
325 url_obj = url_parser(url)
331 url_obj = url_parser(url)
326 _, authinfo = url_obj.authinfo()
332 _, authinfo = url_obj.authinfo()
327
333
328 if authinfo:
334 if authinfo:
329 # create a password manager
335 # create a password manager
330 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
336 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
331 passmgr.add_password(*authinfo)
337 passmgr.add_password(*authinfo)
332
338
333 handlers.extend((httpbasicauthhandler(passmgr),
339 handlers.extend((httpbasicauthhandler(passmgr),
334 httpdigestauthhandler(passmgr)))
340 httpdigestauthhandler(passmgr)))
335
341
336 return urllib.request.build_opener(*handlers)
342 return urllib.request.build_opener(*handlers)
337
343
338 def _type_id_to_name(self, type_id):
344 def _type_id_to_name(self, type_id):
339 return {
345 return {
340 1: b'commit',
346 1: b'commit',
341 2: b'tree',
347 2: b'tree',
342 3: b'blob',
348 3: b'blob',
343 4: b'tag'
349 4: b'tag'
344 }[type_id]
350 }[type_id]
345
351
346 @reraise_safe_exceptions
352 @reraise_safe_exceptions
347 def check_url(self, url, config):
353 def check_url(self, url, config):
348 url_obj = url_parser(url)
354 url_obj = url_parser(url)
349 test_uri, _ = url_obj.authinfo()
355 test_uri, _ = url_obj.authinfo()
350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
356 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
351 url_obj.query = obfuscate_qs(url_obj.query)
357 url_obj.query = obfuscate_qs(url_obj.query)
352 cleaned_uri = str(url_obj)
358 cleaned_uri = str(url_obj)
353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
359 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
354
360
355 if not test_uri.endswith('info/refs'):
361 if not test_uri.endswith('info/refs'):
356 test_uri = test_uri.rstrip('/') + '/info/refs'
362 test_uri = test_uri.rstrip('/') + '/info/refs'
357
363
358 o = self._build_opener(url)
364 o = self._build_opener(url)
359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
365 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
360
366
361 q = {"service": 'git-upload-pack'}
367 q = {"service": 'git-upload-pack'}
362 qs = '?%s' % urllib.parse.urlencode(q)
368 qs = '?%s' % urllib.parse.urlencode(q)
363 cu = "%s%s" % (test_uri, qs)
369 cu = "%s%s" % (test_uri, qs)
364 req = urllib.request.Request(cu, None, {})
370 req = urllib.request.Request(cu, None, {})
365
371
366 try:
372 try:
367 log.debug("Trying to open URL %s", cleaned_uri)
373 log.debug("Trying to open URL %s", cleaned_uri)
368 resp = o.open(req)
374 resp = o.open(req)
369 if resp.code != 200:
375 if resp.code != 200:
370 raise exceptions.URLError()('Return Code is not 200')
376 raise exceptions.URLError()('Return Code is not 200')
371 except Exception as e:
377 except Exception as e:
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
378 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 # means it cannot be cloned
379 # means it cannot be cloned
374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
380 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
375
381
376 # now detect if it's proper git repo
382 # now detect if it's proper git repo
377 gitdata = resp.read()
383 gitdata = resp.read()
378 if 'service=git-upload-pack' in gitdata:
384 if 'service=git-upload-pack' in gitdata:
379 pass
385 pass
380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
386 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
381 # old style git can return some other format !
387 # old style git can return some other format !
382 pass
388 pass
383 else:
389 else:
384 raise exceptions.URLError()(
390 raise exceptions.URLError()(
385 "url [%s] does not look like an git" % (cleaned_uri,))
391 "url [%s] does not look like an git" % (cleaned_uri,))
386
392
387 return True
393 return True
388
394
389 @reraise_safe_exceptions
395 @reraise_safe_exceptions
390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
396 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
397 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
392 remote_refs = self.pull(wire, url, apply_refs=False)
398 remote_refs = self.pull(wire, url, apply_refs=False)
393 repo = self._factory.repo(wire)
399 repo = self._factory.repo(wire)
394 if isinstance(valid_refs, list):
400 if isinstance(valid_refs, list):
395 valid_refs = tuple(valid_refs)
401 valid_refs = tuple(valid_refs)
396
402
397 for k in remote_refs:
403 for k in remote_refs:
398 # only parse heads/tags and skip so called deferred tags
404 # only parse heads/tags and skip so called deferred tags
399 if k.startswith(valid_refs) and not k.endswith(deferred):
405 if k.startswith(valid_refs) and not k.endswith(deferred):
400 repo[k] = remote_refs[k]
406 repo[k] = remote_refs[k]
401
407
402 if update_after_clone:
408 if update_after_clone:
403 # we want to checkout HEAD
409 # we want to checkout HEAD
404 repo["HEAD"] = remote_refs["HEAD"]
410 repo["HEAD"] = remote_refs["HEAD"]
405 index.build_index_from_tree(repo.path, repo.index_path(),
411 index.build_index_from_tree(repo.path, repo.index_path(),
406 repo.object_store, repo["HEAD"].tree)
412 repo.object_store, repo["HEAD"].tree)
407
413
408 @reraise_safe_exceptions
414 @reraise_safe_exceptions
409 def branch(self, wire, commit_id):
415 def branch(self, wire, commit_id):
410 cache_on, context_uid, repo_id = self._cache_on(wire)
416 cache_on, context_uid, repo_id = self._cache_on(wire)
411 @self.region.conditional_cache_on_arguments(condition=cache_on)
417 region = self._region(wire)
418 @region.conditional_cache_on_arguments(condition=cache_on)
412 def _branch(_context_uid, _repo_id, _commit_id):
419 def _branch(_context_uid, _repo_id, _commit_id):
413 regex = re.compile('^refs/heads')
420 regex = re.compile('^refs/heads')
414
421
415 def filter_with(ref):
422 def filter_with(ref):
416 return regex.match(ref[0]) and ref[1] == _commit_id
423 return regex.match(ref[0]) and ref[1] == _commit_id
417
424
418 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
425 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
419 return [x[0].split('refs/heads/')[-1] for x in branches]
426 return [x[0].split('refs/heads/')[-1] for x in branches]
420
427
421 return _branch(context_uid, repo_id, commit_id)
428 return _branch(context_uid, repo_id, commit_id)
422
429
423 @reraise_safe_exceptions
430 @reraise_safe_exceptions
424 def commit_branches(self, wire, commit_id):
431 def commit_branches(self, wire, commit_id):
425 cache_on, context_uid, repo_id = self._cache_on(wire)
432 cache_on, context_uid, repo_id = self._cache_on(wire)
426 @self.region.conditional_cache_on_arguments(condition=cache_on)
433 region = self._region(wire)
434 @region.conditional_cache_on_arguments(condition=cache_on)
427 def _commit_branches(_context_uid, _repo_id, _commit_id):
435 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 repo_init = self._factory.repo_libgit2(wire)
436 repo_init = self._factory.repo_libgit2(wire)
429 with repo_init as repo:
437 with repo_init as repo:
430 branches = [x for x in repo.branches.with_commit(_commit_id)]
438 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 return branches
439 return branches
432
440
433 return _commit_branches(context_uid, repo_id, commit_id)
441 return _commit_branches(context_uid, repo_id, commit_id)
434
442
435 @reraise_safe_exceptions
443 @reraise_safe_exceptions
436 def add_object(self, wire, content):
444 def add_object(self, wire, content):
437 repo_init = self._factory.repo_libgit2(wire)
445 repo_init = self._factory.repo_libgit2(wire)
438 with repo_init as repo:
446 with repo_init as repo:
439 blob = objects.Blob()
447 blob = objects.Blob()
440 blob.set_raw_string(content)
448 blob.set_raw_string(content)
441 repo.object_store.add_object(blob)
449 repo.object_store.add_object(blob)
442 return blob.id
450 return blob.id
443
451
444 # TODO: this is quite complex, check if that can be simplified
452 # TODO: this is quite complex, check if that can be simplified
445 @reraise_safe_exceptions
453 @reraise_safe_exceptions
446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
454 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
455 # Defines the root tree
456 class _Root(object):
457 def __repr__(self):
458 return 'ROOT TREE'
459 ROOT = _Root()
460
447 repo = self._factory.repo(wire)
461 repo = self._factory.repo(wire)
448 object_store = repo.object_store
462 object_store = repo.object_store
449
463
450 # Create tree and populates it with blobs
464 # Create tree and populates it with blobs
451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
465
466 if commit_tree and repo[commit_tree]:
467 git_commit = repo[commit_data['parents'][0]]
468 commit_tree = repo[git_commit.tree] # root tree
469 else:
470 commit_tree = objects.Tree()
452
471
453 for node in updated:
472 for node in updated:
454 # Compute subdirs if needed
473 # Compute subdirs if needed
455 dirpath, nodename = vcspath.split(node['path'])
474 dirpath, nodename = vcspath.split(node['path'])
456 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
475 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
457 parent = commit_tree
476 parent = commit_tree
458 ancestors = [('', parent)]
477 ancestors = [('', parent)]
459
478
460 # Tries to dig for the deepest existing tree
479 # Tries to dig for the deepest existing tree
461 while dirnames:
480 while dirnames:
462 curdir = dirnames.pop(0)
481 curdir = dirnames.pop(0)
463 try:
482 try:
464 dir_id = parent[curdir][1]
483 dir_id = parent[curdir][1]
465 except KeyError:
484 except KeyError:
466 # put curdir back into dirnames and stops
485 # put curdir back into dirnames and stops
467 dirnames.insert(0, curdir)
486 dirnames.insert(0, curdir)
468 break
487 break
469 else:
488 else:
470 # If found, updates parent
489 # If found, updates parent
471 parent = repo[dir_id]
490 parent = repo[dir_id]
472 ancestors.append((curdir, parent))
491 ancestors.append((curdir, parent))
473 # Now parent is deepest existing tree and we need to create
492 # Now parent is deepest existing tree and we need to create
474 # subtrees for dirnames (in reverse order)
493 # subtrees for dirnames (in reverse order)
475 # [this only applies for nodes from added]
494 # [this only applies for nodes from added]
476 new_trees = []
495 new_trees = []
477
496
478 blob = objects.Blob.from_string(node['content'])
497 blob = objects.Blob.from_string(node['content'])
479
498
480 if dirnames:
499 if dirnames:
481 # If there are trees which should be created we need to build
500 # If there are trees which should be created we need to build
482 # them now (in reverse order)
501 # them now (in reverse order)
483 reversed_dirnames = list(reversed(dirnames))
502 reversed_dirnames = list(reversed(dirnames))
484 curtree = objects.Tree()
503 curtree = objects.Tree()
485 curtree[node['node_path']] = node['mode'], blob.id
504 curtree[node['node_path']] = node['mode'], blob.id
486 new_trees.append(curtree)
505 new_trees.append(curtree)
487 for dirname in reversed_dirnames[:-1]:
506 for dirname in reversed_dirnames[:-1]:
488 newtree = objects.Tree()
507 newtree = objects.Tree()
489 newtree[dirname] = (DIR_STAT, curtree.id)
508 newtree[dirname] = (DIR_STAT, curtree.id)
490 new_trees.append(newtree)
509 new_trees.append(newtree)
491 curtree = newtree
510 curtree = newtree
492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
511 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
493 else:
512 else:
494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
513 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
495
514
496 new_trees.append(parent)
515 new_trees.append(parent)
497 # Update ancestors
516 # Update ancestors
498 reversed_ancestors = reversed(
517 reversed_ancestors = reversed(
499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
518 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
500 for parent, tree, path in reversed_ancestors:
519 for parent, tree, path in reversed_ancestors:
501 parent[path] = (DIR_STAT, tree.id)
520 parent[path] = (DIR_STAT, tree.id)
502 object_store.add_object(tree)
521 object_store.add_object(tree)
503
522
504 object_store.add_object(blob)
523 object_store.add_object(blob)
505 for tree in new_trees:
524 for tree in new_trees:
506 object_store.add_object(tree)
525 object_store.add_object(tree)
507
526
508 for node_path in removed:
527 for node_path in removed:
509 paths = node_path.split('/')
528 paths = node_path.split('/')
510 tree = commit_tree
529 tree = commit_tree # start with top-level
511 trees = [tree]
530 trees = [{'tree': tree, 'path': ROOT}]
512 # Traverse deep into the forest...
531 # Traverse deep into the forest...
532 # resolve final tree by iterating the path.
533 # e.g a/b/c.txt will get
534 # - root as tree then
535 # - 'a' as tree,
536 # - 'b' as tree,
537 # - stop at c as blob.
513 for path in paths:
538 for path in paths:
514 try:
539 try:
515 obj = repo[tree[path][1]]
540 obj = repo[tree[path][1]]
516 if isinstance(obj, objects.Tree):
541 if isinstance(obj, objects.Tree):
517 trees.append(obj)
542 trees.append({'tree': obj, 'path': path})
518 tree = obj
543 tree = obj
519 except KeyError:
544 except KeyError:
520 break
545 break
546 #PROBLEM:
547 """
548 We're not editing same reference tree object
549 """
521 # Cut down the blob and all rotten trees on the way back...
550 # Cut down the blob and all rotten trees on the way back...
522 for path, tree in reversed(list(zip(paths, trees))):
551 for path, tree_data in reversed(list(zip(paths, trees))):
523 del tree[path]
552 tree = tree_data['tree']
524 if tree:
553 tree.__delitem__(path)
554 # This operation edits the tree, we need to mark new commit back
555
556 if len(tree) > 0:
525 # This tree still has elements - don't remove it or any
557 # This tree still has elements - don't remove it or any
526 # of it's parents
558 # of it's parents
527 break
559 break
528
560
529 object_store.add_object(commit_tree)
561 object_store.add_object(commit_tree)
530
562
531 # Create commit
563 # Create commit
532 commit = objects.Commit()
564 commit = objects.Commit()
533 commit.tree = commit_tree.id
565 commit.tree = commit_tree.id
534 for k, v in commit_data.items():
566 for k, v in commit_data.items():
535 setattr(commit, k, v)
567 setattr(commit, k, v)
536 object_store.add_object(commit)
568 object_store.add_object(commit)
537
569
538 self.create_branch(wire, branch, commit.id)
570 self.create_branch(wire, branch, commit.id)
539
571
540 # dulwich set-ref
572 # dulwich set-ref
541 ref = 'refs/heads/%s' % branch
573 ref = 'refs/heads/%s' % branch
542 repo.refs[ref] = commit.id
574 repo.refs[ref] = commit.id
543
575
544 return commit.id
576 return commit.id
545
577
546 @reraise_safe_exceptions
578 @reraise_safe_exceptions
547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
579 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
548 if url != 'default' and '://' not in url:
580 if url != 'default' and '://' not in url:
549 client = LocalGitClient(url)
581 client = LocalGitClient(url)
550 else:
582 else:
551 url_obj = url_parser(url)
583 url_obj = url_parser(url)
552 o = self._build_opener(url)
584 o = self._build_opener(url)
553 url, _ = url_obj.authinfo()
585 url, _ = url_obj.authinfo()
554 client = HttpGitClient(base_url=url, opener=o)
586 client = HttpGitClient(base_url=url, opener=o)
555 repo = self._factory.repo(wire)
587 repo = self._factory.repo(wire)
556
588
557 determine_wants = repo.object_store.determine_wants_all
589 determine_wants = repo.object_store.determine_wants_all
558 if refs:
590 if refs:
559 def determine_wants_requested(references):
591 def determine_wants_requested(references):
560 return [references[r] for r in references if r in refs]
592 return [references[r] for r in references if r in refs]
561 determine_wants = determine_wants_requested
593 determine_wants = determine_wants_requested
562
594
563 try:
595 try:
564 remote_refs = client.fetch(
596 remote_refs = client.fetch(
565 path=url, target=repo, determine_wants=determine_wants)
597 path=url, target=repo, determine_wants=determine_wants)
566 except NotGitRepository as e:
598 except NotGitRepository as e:
567 log.warning(
599 log.warning(
568 'Trying to fetch from "%s" failed, not a Git repository.', url)
600 'Trying to fetch from "%s" failed, not a Git repository.', url)
569 # Exception can contain unicode which we convert
601 # Exception can contain unicode which we convert
570 raise exceptions.AbortException(e)(repr(e))
602 raise exceptions.AbortException(e)(repr(e))
571
603
572 # mikhail: client.fetch() returns all the remote refs, but fetches only
604 # mikhail: client.fetch() returns all the remote refs, but fetches only
573 # refs filtered by `determine_wants` function. We need to filter result
605 # refs filtered by `determine_wants` function. We need to filter result
574 # as well
606 # as well
575 if refs:
607 if refs:
576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
608 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
577
609
578 if apply_refs:
610 if apply_refs:
579 # TODO: johbo: Needs proper test coverage with a git repository
611 # TODO: johbo: Needs proper test coverage with a git repository
580 # that contains a tag object, so that we would end up with
612 # that contains a tag object, so that we would end up with
581 # a peeled ref at this point.
613 # a peeled ref at this point.
582 for k in remote_refs:
614 for k in remote_refs:
583 if k.endswith(PEELED_REF_MARKER):
615 if k.endswith(PEELED_REF_MARKER):
584 log.debug("Skipping peeled reference %s", k)
616 log.debug("Skipping peeled reference %s", k)
585 continue
617 continue
586 repo[k] = remote_refs[k]
618 repo[k] = remote_refs[k]
587
619
588 if refs and not update_after:
620 if refs and not update_after:
589 # mikhail: explicitly set the head to the last ref.
621 # mikhail: explicitly set the head to the last ref.
590 repo['HEAD'] = remote_refs[refs[-1]]
622 repo["HEAD"] = remote_refs[refs[-1]]
591
623
592 if update_after:
624 if update_after:
593 # we want to checkout HEAD
625 # we want to checkout HEAD
594 repo["HEAD"] = remote_refs["HEAD"]
626 repo["HEAD"] = remote_refs["HEAD"]
595 index.build_index_from_tree(repo.path, repo.index_path(),
627 index.build_index_from_tree(repo.path, repo.index_path(),
596 repo.object_store, repo["HEAD"].tree)
628 repo.object_store, repo["HEAD"].tree)
597 return remote_refs
629 return remote_refs
598
630
599 @reraise_safe_exceptions
631 @reraise_safe_exceptions
600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
632 def sync_fetch(self, wire, url, refs=None, all_refs=False):
601 repo = self._factory.repo(wire)
633 repo = self._factory.repo(wire)
602 if refs and not isinstance(refs, (list, tuple)):
634 if refs and not isinstance(refs, (list, tuple)):
603 refs = [refs]
635 refs = [refs]
604
636
605 config = self._wire_to_config(wire)
637 config = self._wire_to_config(wire)
606 # get all remote refs we'll use to fetch later
638 # get all remote refs we'll use to fetch later
607 cmd = ['ls-remote']
639 cmd = ['ls-remote']
608 if not all_refs:
640 if not all_refs:
609 cmd += ['--heads', '--tags']
641 cmd += ['--heads', '--tags']
610 cmd += [url]
642 cmd += [url]
611 output, __ = self.run_git_command(
643 output, __ = self.run_git_command(
612 wire, cmd, fail_on_stderr=False,
644 wire, cmd, fail_on_stderr=False,
613 _copts=self._remote_conf(config),
645 _copts=self._remote_conf(config),
614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
646 extra_env={'GIT_TERMINAL_PROMPT': '0'})
615
647
616 remote_refs = collections.OrderedDict()
648 remote_refs = collections.OrderedDict()
617 fetch_refs = []
649 fetch_refs = []
618
650
619 for ref_line in output.splitlines():
651 for ref_line in output.splitlines():
620 sha, ref = ref_line.split('\t')
652 sha, ref = ref_line.split('\t')
621 sha = sha.strip()
653 sha = sha.strip()
622 if ref in remote_refs:
654 if ref in remote_refs:
623 # duplicate, skip
655 # duplicate, skip
624 continue
656 continue
625 if ref.endswith(PEELED_REF_MARKER):
657 if ref.endswith(PEELED_REF_MARKER):
626 log.debug("Skipping peeled reference %s", ref)
658 log.debug("Skipping peeled reference %s", ref)
627 continue
659 continue
628 # don't sync HEAD
660 # don't sync HEAD
629 if ref in ['HEAD']:
661 if ref in ['HEAD']:
630 continue
662 continue
631
663
632 remote_refs[ref] = sha
664 remote_refs[ref] = sha
633
665
634 if refs and sha in refs:
666 if refs and sha in refs:
635 # we filter fetch using our specified refs
667 # we filter fetch using our specified refs
636 fetch_refs.append('{}:{}'.format(ref, ref))
668 fetch_refs.append('{}:{}'.format(ref, ref))
637 elif not refs:
669 elif not refs:
638 fetch_refs.append('{}:{}'.format(ref, ref))
670 fetch_refs.append('{}:{}'.format(ref, ref))
639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
671 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640
672
641 if fetch_refs:
673 if fetch_refs:
642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
674 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
643 fetch_refs_chunks = list(chunk)
675 fetch_refs_chunks = list(chunk)
644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
676 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
645 _out, _err = self.run_git_command(
677 _out, _err = self.run_git_command(
646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
678 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
647 fail_on_stderr=False,
679 fail_on_stderr=False,
648 _copts=self._remote_conf(config),
680 _copts=self._remote_conf(config),
649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
681 extra_env={'GIT_TERMINAL_PROMPT': '0'})
650
682
651 return remote_refs
683 return remote_refs
652
684
653 @reraise_safe_exceptions
685 @reraise_safe_exceptions
654 def sync_push(self, wire, url, refs=None):
686 def sync_push(self, wire, url, refs=None):
655 if not self.check_url(url, wire):
687 if not self.check_url(url, wire):
656 return
688 return
657 config = self._wire_to_config(wire)
689 config = self._wire_to_config(wire)
658 self._factory.repo(wire)
690 self._factory.repo(wire)
659 self.run_git_command(
691 self.run_git_command(
660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
692 wire, ['push', url, '--mirror'], fail_on_stderr=False,
661 _copts=self._remote_conf(config),
693 _copts=self._remote_conf(config),
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
694 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663
695
664 @reraise_safe_exceptions
696 @reraise_safe_exceptions
665 def get_remote_refs(self, wire, url):
697 def get_remote_refs(self, wire, url):
666 repo = Repo(url)
698 repo = Repo(url)
667 return repo.get_refs()
699 return repo.get_refs()
668
700
669 @reraise_safe_exceptions
701 @reraise_safe_exceptions
670 def get_description(self, wire):
702 def get_description(self, wire):
671 repo = self._factory.repo(wire)
703 repo = self._factory.repo(wire)
672 return repo.get_description()
704 return repo.get_description()
673
705
674 @reraise_safe_exceptions
706 @reraise_safe_exceptions
675 def get_missing_revs(self, wire, rev1, rev2, path2):
707 def get_missing_revs(self, wire, rev1, rev2, path2):
676 repo = self._factory.repo(wire)
708 repo = self._factory.repo(wire)
677 LocalGitClient(thin_packs=False).fetch(path2, repo)
709 LocalGitClient(thin_packs=False).fetch(path2, repo)
678
710
679 wire_remote = wire.copy()
711 wire_remote = wire.copy()
680 wire_remote['path'] = path2
712 wire_remote['path'] = path2
681 repo_remote = self._factory.repo(wire_remote)
713 repo_remote = self._factory.repo(wire_remote)
682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
714 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
683
715
684 revs = [
716 revs = [
685 x.commit.id
717 x.commit.id
686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
718 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
687 return revs
719 return revs
688
720
689 @reraise_safe_exceptions
721 @reraise_safe_exceptions
690 def get_object(self, wire, sha, maybe_unreachable=False):
722 def get_object(self, wire, sha, maybe_unreachable=False):
691 cache_on, context_uid, repo_id = self._cache_on(wire)
723 cache_on, context_uid, repo_id = self._cache_on(wire)
692 @self.region.conditional_cache_on_arguments(condition=cache_on)
724 region = self._region(wire)
725 @region.conditional_cache_on_arguments(condition=cache_on)
693 def _get_object(_context_uid, _repo_id, _sha):
726 def _get_object(_context_uid, _repo_id, _sha):
694 repo_init = self._factory.repo_libgit2(wire)
727 repo_init = self._factory.repo_libgit2(wire)
695 with repo_init as repo:
728 with repo_init as repo:
696
729
697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
730 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
698 try:
731 try:
699 commit = repo.revparse_single(sha)
732 commit = repo.revparse_single(sha)
700 except KeyError:
733 except KeyError:
701 # NOTE(marcink): KeyError doesn't give us any meaningful information
734 # NOTE(marcink): KeyError doesn't give us any meaningful information
702 # here, we instead give something more explicit
735 # here, we instead give something more explicit
703 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
736 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
704 raise exceptions.LookupException(e)(missing_commit_err)
737 raise exceptions.LookupException(e)(missing_commit_err)
705 except ValueError as e:
738 except ValueError as e:
706 raise exceptions.LookupException(e)(missing_commit_err)
739 raise exceptions.LookupException(e)(missing_commit_err)
707
740
708 is_tag = False
741 is_tag = False
709 if isinstance(commit, pygit2.Tag):
742 if isinstance(commit, pygit2.Tag):
710 commit = repo.get(commit.target)
743 commit = repo.get(commit.target)
711 is_tag = True
744 is_tag = True
712
745
713 check_dangling = True
746 check_dangling = True
714 if is_tag:
747 if is_tag:
715 check_dangling = False
748 check_dangling = False
716
749
717 if check_dangling and maybe_unreachable:
750 if check_dangling and maybe_unreachable:
718 check_dangling = False
751 check_dangling = False
719
752
720 # we used a reference and it parsed means we're not having a dangling commit
753 # we used a reference and it parsed means we're not having a dangling commit
721 if sha != commit.hex:
754 if sha != commit.hex:
722 check_dangling = False
755 check_dangling = False
723
756
724 if check_dangling:
757 if check_dangling:
725 # check for dangling commit
758 # check for dangling commit
726 for branch in repo.branches.with_commit(commit.hex):
759 for branch in repo.branches.with_commit(commit.hex):
727 if branch:
760 if branch:
728 break
761 break
729 else:
762 else:
730 # NOTE(marcink): Empty error doesn't give us any meaningful information
763 # NOTE(marcink): Empty error doesn't give us any meaningful information
731 # here, we instead give something more explicit
764 # here, we instead give something more explicit
732 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
765 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
733 raise exceptions.LookupException(e)(missing_commit_err)
766 raise exceptions.LookupException(e)(missing_commit_err)
734
767
735 commit_id = commit.hex
768 commit_id = commit.hex
736 type_id = commit.type_str
769 type_id = commit.type_str
737
770
738 return {
771 return {
739 'id': commit_id,
772 'id': commit_id,
740 'type': self._type_id_to_name(type_id),
773 'type': self._type_id_to_name(type_id),
741 'commit_id': commit_id,
774 'commit_id': commit_id,
742 'idx': 0
775 'idx': 0
743 }
776 }
744
777
745 return _get_object(context_uid, repo_id, sha)
778 return _get_object(context_uid, repo_id, sha)
746
779
747 @reraise_safe_exceptions
780 @reraise_safe_exceptions
748 def get_refs(self, wire):
781 def get_refs(self, wire):
749 cache_on, context_uid, repo_id = self._cache_on(wire)
782 cache_on, context_uid, repo_id = self._cache_on(wire)
750 @self.region.conditional_cache_on_arguments(condition=cache_on)
783 region = self._region(wire)
784 @region.conditional_cache_on_arguments(condition=cache_on)
751 def _get_refs(_context_uid, _repo_id):
785 def _get_refs(_context_uid, _repo_id):
752
786
753 repo_init = self._factory.repo_libgit2(wire)
787 repo_init = self._factory.repo_libgit2(wire)
754 with repo_init as repo:
788 with repo_init as repo:
755 regex = re.compile('^refs/(heads|tags)/')
789 regex = re.compile('^refs/(heads|tags)/')
756 return {x.name: x.target.hex for x in
790 return {x.name: x.target.hex for x in
757 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
791 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
758
792
759 return _get_refs(context_uid, repo_id)
793 return _get_refs(context_uid, repo_id)
760
794
761 @reraise_safe_exceptions
795 @reraise_safe_exceptions
762 def get_branch_pointers(self, wire):
796 def get_branch_pointers(self, wire):
763 cache_on, context_uid, repo_id = self._cache_on(wire)
797 cache_on, context_uid, repo_id = self._cache_on(wire)
764 @self.region.conditional_cache_on_arguments(condition=cache_on)
798 region = self._region(wire)
799 @region.conditional_cache_on_arguments(condition=cache_on)
765 def _get_branch_pointers(_context_uid, _repo_id):
800 def _get_branch_pointers(_context_uid, _repo_id):
766
801
767 repo_init = self._factory.repo_libgit2(wire)
802 repo_init = self._factory.repo_libgit2(wire)
768 regex = re.compile('^refs/heads')
803 regex = re.compile('^refs/heads')
769 with repo_init as repo:
804 with repo_init as repo:
770 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
805 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
771 return {x.target.hex: x.shorthand for x in branches}
806 return {x.target.hex: x.shorthand for x in branches}
772
807
773 return _get_branch_pointers(context_uid, repo_id)
808 return _get_branch_pointers(context_uid, repo_id)
774
809
775 @reraise_safe_exceptions
810 @reraise_safe_exceptions
776 def head(self, wire, show_exc=True):
811 def head(self, wire, show_exc=True):
777 cache_on, context_uid, repo_id = self._cache_on(wire)
812 cache_on, context_uid, repo_id = self._cache_on(wire)
778 @self.region.conditional_cache_on_arguments(condition=cache_on)
813 region = self._region(wire)
814 @region.conditional_cache_on_arguments(condition=cache_on)
779 def _head(_context_uid, _repo_id, _show_exc):
815 def _head(_context_uid, _repo_id, _show_exc):
780 repo_init = self._factory.repo_libgit2(wire)
816 repo_init = self._factory.repo_libgit2(wire)
781 with repo_init as repo:
817 with repo_init as repo:
782 try:
818 try:
783 return repo.head.peel().hex
819 return repo.head.peel().hex
784 except Exception:
820 except Exception:
785 if show_exc:
821 if show_exc:
786 raise
822 raise
787 return _head(context_uid, repo_id, show_exc)
823 return _head(context_uid, repo_id, show_exc)
788
824
789 @reraise_safe_exceptions
825 @reraise_safe_exceptions
790 def init(self, wire):
826 def init(self, wire):
791 repo_path = str_to_dulwich(wire['path'])
827 repo_path = str_to_dulwich(wire['path'])
792 self.repo = Repo.init(repo_path)
828 self.repo = Repo.init(repo_path)
793
829
794 @reraise_safe_exceptions
830 @reraise_safe_exceptions
795 def init_bare(self, wire):
831 def init_bare(self, wire):
796 repo_path = str_to_dulwich(wire['path'])
832 repo_path = str_to_dulwich(wire['path'])
797 self.repo = Repo.init_bare(repo_path)
833 self.repo = Repo.init_bare(repo_path)
798
834
799 @reraise_safe_exceptions
835 @reraise_safe_exceptions
800 def revision(self, wire, rev):
836 def revision(self, wire, rev):
801
837
802 cache_on, context_uid, repo_id = self._cache_on(wire)
838 cache_on, context_uid, repo_id = self._cache_on(wire)
803 @self.region.conditional_cache_on_arguments(condition=cache_on)
839 region = self._region(wire)
840 @region.conditional_cache_on_arguments(condition=cache_on)
804 def _revision(_context_uid, _repo_id, _rev):
841 def _revision(_context_uid, _repo_id, _rev):
805 repo_init = self._factory.repo_libgit2(wire)
842 repo_init = self._factory.repo_libgit2(wire)
806 with repo_init as repo:
843 with repo_init as repo:
807 commit = repo[rev]
844 commit = repo[rev]
808 obj_data = {
845 obj_data = {
809 'id': commit.id.hex,
846 'id': commit.id.hex,
810 }
847 }
811 # tree objects itself don't have tree_id attribute
848 # tree objects itself don't have tree_id attribute
812 if hasattr(commit, 'tree_id'):
849 if hasattr(commit, 'tree_id'):
813 obj_data['tree'] = commit.tree_id.hex
850 obj_data['tree'] = commit.tree_id.hex
814
851
815 return obj_data
852 return obj_data
816 return _revision(context_uid, repo_id, rev)
853 return _revision(context_uid, repo_id, rev)
817
854
818 @reraise_safe_exceptions
855 @reraise_safe_exceptions
819 def date(self, wire, commit_id):
856 def date(self, wire, commit_id):
820 cache_on, context_uid, repo_id = self._cache_on(wire)
857 cache_on, context_uid, repo_id = self._cache_on(wire)
821 @self.region.conditional_cache_on_arguments(condition=cache_on)
858 region = self._region(wire)
859 @region.conditional_cache_on_arguments(condition=cache_on)
822 def _date(_repo_id, _commit_id):
860 def _date(_repo_id, _commit_id):
823 repo_init = self._factory.repo_libgit2(wire)
861 repo_init = self._factory.repo_libgit2(wire)
824 with repo_init as repo:
862 with repo_init as repo:
825 commit = repo[commit_id]
863 commit = repo[commit_id]
826
864
827 if hasattr(commit, 'commit_time'):
865 if hasattr(commit, 'commit_time'):
828 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
866 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
829 else:
867 else:
830 commit = commit.get_object()
868 commit = commit.get_object()
831 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
869 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
832
870
833 # TODO(marcink): check dulwich difference of offset vs timezone
871 # TODO(marcink): check dulwich difference of offset vs timezone
834 return [commit_time, commit_time_offset]
872 return [commit_time, commit_time_offset]
835 return _date(repo_id, commit_id)
873 return _date(repo_id, commit_id)
836
874
837 @reraise_safe_exceptions
875 @reraise_safe_exceptions
838 def author(self, wire, commit_id):
876 def author(self, wire, commit_id):
839 cache_on, context_uid, repo_id = self._cache_on(wire)
877 cache_on, context_uid, repo_id = self._cache_on(wire)
840 @self.region.conditional_cache_on_arguments(condition=cache_on)
878 region = self._region(wire)
879 @region.conditional_cache_on_arguments(condition=cache_on)
841 def _author(_repo_id, _commit_id):
880 def _author(_repo_id, _commit_id):
842 repo_init = self._factory.repo_libgit2(wire)
881 repo_init = self._factory.repo_libgit2(wire)
843 with repo_init as repo:
882 with repo_init as repo:
844 commit = repo[commit_id]
883 commit = repo[commit_id]
845
884
846 if hasattr(commit, 'author'):
885 if hasattr(commit, 'author'):
847 author = commit.author
886 author = commit.author
848 else:
887 else:
849 author = commit.get_object().author
888 author = commit.get_object().author
850
889
851 if author.email:
890 if author.email:
852 return "{} <{}>".format(author.name, author.email)
891 return "{} <{}>".format(author.name, author.email)
853
892
854 try:
893 try:
855 return "{}".format(author.name)
894 return "{}".format(author.name)
856 except Exception:
895 except Exception:
857 return "{}".format(safe_unicode(author.raw_name))
896 return "{}".format(safe_unicode(author.raw_name))
858
897
859 return _author(repo_id, commit_id)
898 return _author(repo_id, commit_id)
860
899
861 @reraise_safe_exceptions
900 @reraise_safe_exceptions
862 def message(self, wire, commit_id):
901 def message(self, wire, commit_id):
863 cache_on, context_uid, repo_id = self._cache_on(wire)
902 cache_on, context_uid, repo_id = self._cache_on(wire)
864 @self.region.conditional_cache_on_arguments(condition=cache_on)
903 region = self._region(wire)
904 @region.conditional_cache_on_arguments(condition=cache_on)
865 def _message(_repo_id, _commit_id):
905 def _message(_repo_id, _commit_id):
866 repo_init = self._factory.repo_libgit2(wire)
906 repo_init = self._factory.repo_libgit2(wire)
867 with repo_init as repo:
907 with repo_init as repo:
868 commit = repo[commit_id]
908 commit = repo[commit_id]
869 return commit.message
909 return commit.message
870 return _message(repo_id, commit_id)
910 return _message(repo_id, commit_id)
871
911
872 @reraise_safe_exceptions
912 @reraise_safe_exceptions
873 def parents(self, wire, commit_id):
913 def parents(self, wire, commit_id):
874 cache_on, context_uid, repo_id = self._cache_on(wire)
914 cache_on, context_uid, repo_id = self._cache_on(wire)
875 @self.region.conditional_cache_on_arguments(condition=cache_on)
915 region = self._region(wire)
916 @region.conditional_cache_on_arguments(condition=cache_on)
876 def _parents(_repo_id, _commit_id):
917 def _parents(_repo_id, _commit_id):
877 repo_init = self._factory.repo_libgit2(wire)
918 repo_init = self._factory.repo_libgit2(wire)
878 with repo_init as repo:
919 with repo_init as repo:
879 commit = repo[commit_id]
920 commit = repo[commit_id]
880 if hasattr(commit, 'parent_ids'):
921 if hasattr(commit, 'parent_ids'):
881 parent_ids = commit.parent_ids
922 parent_ids = commit.parent_ids
882 else:
923 else:
883 parent_ids = commit.get_object().parent_ids
924 parent_ids = commit.get_object().parent_ids
884
925
885 return [x.hex for x in parent_ids]
926 return [x.hex for x in parent_ids]
886 return _parents(repo_id, commit_id)
927 return _parents(repo_id, commit_id)
887
928
888 @reraise_safe_exceptions
929 @reraise_safe_exceptions
889 def children(self, wire, commit_id):
930 def children(self, wire, commit_id):
890 cache_on, context_uid, repo_id = self._cache_on(wire)
931 cache_on, context_uid, repo_id = self._cache_on(wire)
891 @self.region.conditional_cache_on_arguments(condition=cache_on)
932 region = self._region(wire)
933 @region.conditional_cache_on_arguments(condition=cache_on)
892 def _children(_repo_id, _commit_id):
934 def _children(_repo_id, _commit_id):
893 output, __ = self.run_git_command(
935 output, __ = self.run_git_command(
894 wire, ['rev-list', '--all', '--children'])
936 wire, ['rev-list', '--all', '--children'])
895
937
896 child_ids = []
938 child_ids = []
897 pat = re.compile(r'^%s' % commit_id)
939 pat = re.compile(r'^%s' % commit_id)
898 for l in output.splitlines():
940 for l in output.splitlines():
899 if pat.match(l):
941 if pat.match(l):
900 found_ids = l.split(' ')[1:]
942 found_ids = l.split(' ')[1:]
901 child_ids.extend(found_ids)
943 child_ids.extend(found_ids)
902
944
903 return child_ids
945 return child_ids
904 return _children(repo_id, commit_id)
946 return _children(repo_id, commit_id)
905
947
906 @reraise_safe_exceptions
948 @reraise_safe_exceptions
907 def set_refs(self, wire, key, value):
949 def set_refs(self, wire, key, value):
908 repo_init = self._factory.repo_libgit2(wire)
950 repo_init = self._factory.repo_libgit2(wire)
909 with repo_init as repo:
951 with repo_init as repo:
910 repo.references.create(key, value, force=True)
952 repo.references.create(key, value, force=True)
911
953
912 @reraise_safe_exceptions
954 @reraise_safe_exceptions
913 def create_branch(self, wire, branch_name, commit_id, force=False):
955 def create_branch(self, wire, branch_name, commit_id, force=False):
914 repo_init = self._factory.repo_libgit2(wire)
956 repo_init = self._factory.repo_libgit2(wire)
915 with repo_init as repo:
957 with repo_init as repo:
916 commit = repo[commit_id]
958 commit = repo[commit_id]
917
959
918 if force:
960 if force:
919 repo.branches.local.create(branch_name, commit, force=force)
961 repo.branches.local.create(branch_name, commit, force=force)
920 elif not repo.branches.get(branch_name):
962 elif not repo.branches.get(branch_name):
921 # create only if that branch isn't existing
963 # create only if that branch isn't existing
922 repo.branches.local.create(branch_name, commit, force=force)
964 repo.branches.local.create(branch_name, commit, force=force)
923
965
924 @reraise_safe_exceptions
966 @reraise_safe_exceptions
925 def remove_ref(self, wire, key):
967 def remove_ref(self, wire, key):
926 repo_init = self._factory.repo_libgit2(wire)
968 repo_init = self._factory.repo_libgit2(wire)
927 with repo_init as repo:
969 with repo_init as repo:
928 repo.references.delete(key)
970 repo.references.delete(key)
929
971
930 @reraise_safe_exceptions
972 @reraise_safe_exceptions
931 def tag_remove(self, wire, tag_name):
973 def tag_remove(self, wire, tag_name):
932 repo_init = self._factory.repo_libgit2(wire)
974 repo_init = self._factory.repo_libgit2(wire)
933 with repo_init as repo:
975 with repo_init as repo:
934 key = 'refs/tags/{}'.format(tag_name)
976 key = 'refs/tags/{}'.format(tag_name)
935 repo.references.delete(key)
977 repo.references.delete(key)
936
978
937 @reraise_safe_exceptions
979 @reraise_safe_exceptions
938 def tree_changes(self, wire, source_id, target_id):
980 def tree_changes(self, wire, source_id, target_id):
939 # TODO(marcink): remove this seems it's only used by tests
981 # TODO(marcink): remove this seems it's only used by tests
940 repo = self._factory.repo(wire)
982 repo = self._factory.repo(wire)
941 source = repo[source_id].tree if source_id else None
983 source = repo[source_id].tree if source_id else None
942 target = repo[target_id].tree
984 target = repo[target_id].tree
943 result = repo.object_store.tree_changes(source, target)
985 result = repo.object_store.tree_changes(source, target)
944 return list(result)
986 return list(result)
945
987
946 @reraise_safe_exceptions
988 @reraise_safe_exceptions
947 def tree_and_type_for_path(self, wire, commit_id, path):
989 def tree_and_type_for_path(self, wire, commit_id, path):
948
990
949 cache_on, context_uid, repo_id = self._cache_on(wire)
991 cache_on, context_uid, repo_id = self._cache_on(wire)
950 @self.region.conditional_cache_on_arguments(condition=cache_on)
992 region = self._region(wire)
993 @region.conditional_cache_on_arguments(condition=cache_on)
951 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
994 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
952 repo_init = self._factory.repo_libgit2(wire)
995 repo_init = self._factory.repo_libgit2(wire)
953
996
954 with repo_init as repo:
997 with repo_init as repo:
955 commit = repo[commit_id]
998 commit = repo[commit_id]
956 try:
999 try:
957 tree = commit.tree[path]
1000 tree = commit.tree[path]
958 except KeyError:
1001 except KeyError:
959 return None, None, None
1002 return None, None, None
960
1003
961 return tree.id.hex, tree.type_str, tree.filemode
1004 return tree.id.hex, tree.type_str, tree.filemode
962 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1005 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
963
1006
964 @reraise_safe_exceptions
1007 @reraise_safe_exceptions
965 def tree_items(self, wire, tree_id):
1008 def tree_items(self, wire, tree_id):
966 cache_on, context_uid, repo_id = self._cache_on(wire)
1009 cache_on, context_uid, repo_id = self._cache_on(wire)
967 @self.region.conditional_cache_on_arguments(condition=cache_on)
1010 region = self._region(wire)
1011 @region.conditional_cache_on_arguments(condition=cache_on)
968 def _tree_items(_repo_id, _tree_id):
1012 def _tree_items(_repo_id, _tree_id):
969
1013
970 repo_init = self._factory.repo_libgit2(wire)
1014 repo_init = self._factory.repo_libgit2(wire)
971 with repo_init as repo:
1015 with repo_init as repo:
972 try:
1016 try:
973 tree = repo[tree_id]
1017 tree = repo[tree_id]
974 except KeyError:
1018 except KeyError:
975 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1019 raise ObjectMissing('No tree with id: {}'.format(tree_id))
976
1020
977 result = []
1021 result = []
978 for item in tree:
1022 for item in tree:
979 item_sha = item.hex
1023 item_sha = item.hex
980 item_mode = item.filemode
1024 item_mode = item.filemode
981 item_type = item.type_str
1025 item_type = item.type_str
982
1026
983 if item_type == 'commit':
1027 if item_type == 'commit':
984 # NOTE(marcink): submodules we translate to 'link' for backward compat
1028 # NOTE(marcink): submodules we translate to 'link' for backward compat
985 item_type = 'link'
1029 item_type = 'link'
986
1030
987 result.append((item.name, item_mode, item_sha, item_type))
1031 result.append((item.name, item_mode, item_sha, item_type))
988 return result
1032 return result
989 return _tree_items(repo_id, tree_id)
1033 return _tree_items(repo_id, tree_id)
990
1034
991 @reraise_safe_exceptions
1035 @reraise_safe_exceptions
992 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1036 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
993 """
1037 """
994 Old version that uses subprocess to call diff
1038 Old version that uses subprocess to call diff
995 """
1039 """
996
1040
997 flags = [
1041 flags = [
998 '-U%s' % context, '--patch',
1042 '-U%s' % context, '--patch',
999 '--binary',
1043 '--binary',
1000 '--find-renames',
1044 '--find-renames',
1001 '--no-indent-heuristic',
1045 '--no-indent-heuristic',
1002 # '--indent-heuristic',
1046 # '--indent-heuristic',
1003 #'--full-index',
1047 #'--full-index',
1004 #'--abbrev=40'
1048 #'--abbrev=40'
1005 ]
1049 ]
1006
1050
1007 if opt_ignorews:
1051 if opt_ignorews:
1008 flags.append('--ignore-all-space')
1052 flags.append('--ignore-all-space')
1009
1053
1010 if commit_id_1 == self.EMPTY_COMMIT:
1054 if commit_id_1 == self.EMPTY_COMMIT:
1011 cmd = ['show'] + flags + [commit_id_2]
1055 cmd = ['show'] + flags + [commit_id_2]
1012 else:
1056 else:
1013 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1057 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1014
1058
1015 if file_filter:
1059 if file_filter:
1016 cmd.extend(['--', file_filter])
1060 cmd.extend(['--', file_filter])
1017
1061
1018 diff, __ = self.run_git_command(wire, cmd)
1062 diff, __ = self.run_git_command(wire, cmd)
1019 # If we used 'show' command, strip first few lines (until actual diff
1063 # If we used 'show' command, strip first few lines (until actual diff
1020 # starts)
1064 # starts)
1021 if commit_id_1 == self.EMPTY_COMMIT:
1065 if commit_id_1 == self.EMPTY_COMMIT:
1022 lines = diff.splitlines()
1066 lines = diff.splitlines()
1023 x = 0
1067 x = 0
1024 for line in lines:
1068 for line in lines:
1025 if line.startswith('diff'):
1069 if line.startswith('diff'):
1026 break
1070 break
1027 x += 1
1071 x += 1
1028 # Append new line just like 'diff' command do
1072 # Append new line just like 'diff' command do
1029 diff = '\n'.join(lines[x:]) + '\n'
1073 diff = '\n'.join(lines[x:]) + '\n'
1030 return diff
1074 return diff
1031
1075
1032 @reraise_safe_exceptions
1076 @reraise_safe_exceptions
1033 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1077 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1034 repo_init = self._factory.repo_libgit2(wire)
1078 repo_init = self._factory.repo_libgit2(wire)
1035 with repo_init as repo:
1079 with repo_init as repo:
1036 swap = True
1080 swap = True
1037 flags = 0
1081 flags = 0
1038 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1082 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1039
1083
1040 if opt_ignorews:
1084 if opt_ignorews:
1041 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1085 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1042
1086
1043 if commit_id_1 == self.EMPTY_COMMIT:
1087 if commit_id_1 == self.EMPTY_COMMIT:
1044 comm1 = repo[commit_id_2]
1088 comm1 = repo[commit_id_2]
1045 diff_obj = comm1.tree.diff_to_tree(
1089 diff_obj = comm1.tree.diff_to_tree(
1046 flags=flags, context_lines=context, swap=swap)
1090 flags=flags, context_lines=context, swap=swap)
1047
1091
1048 else:
1092 else:
1049 comm1 = repo[commit_id_2]
1093 comm1 = repo[commit_id_2]
1050 comm2 = repo[commit_id_1]
1094 comm2 = repo[commit_id_1]
1051 diff_obj = comm1.tree.diff_to_tree(
1095 diff_obj = comm1.tree.diff_to_tree(
1052 comm2.tree, flags=flags, context_lines=context, swap=swap)
1096 comm2.tree, flags=flags, context_lines=context, swap=swap)
1053 similar_flags = 0
1097 similar_flags = 0
1054 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1098 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1055 diff_obj.find_similar(flags=similar_flags)
1099 diff_obj.find_similar(flags=similar_flags)
1056
1100
1057 if file_filter:
1101 if file_filter:
1058 for p in diff_obj:
1102 for p in diff_obj:
1059 if p.delta.old_file.path == file_filter:
1103 if p.delta.old_file.path == file_filter:
1060 return p.patch or ''
1104 return p.patch or ''
1061 # fo matching path == no diff
1105 # fo matching path == no diff
1062 return ''
1106 return ''
1063 return diff_obj.patch or ''
1107 return diff_obj.patch or ''
1064
1108
1065 @reraise_safe_exceptions
1109 @reraise_safe_exceptions
1066 def node_history(self, wire, commit_id, path, limit):
1110 def node_history(self, wire, commit_id, path, limit):
1067 cache_on, context_uid, repo_id = self._cache_on(wire)
1111 cache_on, context_uid, repo_id = self._cache_on(wire)
1068 @self.region.conditional_cache_on_arguments(condition=cache_on)
1112 region = self._region(wire)
1113 @region.conditional_cache_on_arguments(condition=cache_on)
1069 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1114 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1070 # optimize for n==1, rev-list is much faster for that use-case
1115 # optimize for n==1, rev-list is much faster for that use-case
1071 if limit == 1:
1116 if limit == 1:
1072 cmd = ['rev-list', '-1', commit_id, '--', path]
1117 cmd = ['rev-list', '-1', commit_id, '--', path]
1073 else:
1118 else:
1074 cmd = ['log']
1119 cmd = ['log']
1075 if limit:
1120 if limit:
1076 cmd.extend(['-n', str(safe_int(limit, 0))])
1121 cmd.extend(['-n', str(safe_int(limit, 0))])
1077 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1122 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1078
1123
1079 output, __ = self.run_git_command(wire, cmd)
1124 output, __ = self.run_git_command(wire, cmd)
1080 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1125 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1081
1126
1082 return [x for x in commit_ids]
1127 return [x for x in commit_ids]
1083 return _node_history(context_uid, repo_id, commit_id, path, limit)
1128 return _node_history(context_uid, repo_id, commit_id, path, limit)
1084
1129
1085 @reraise_safe_exceptions
1130 @reraise_safe_exceptions
1086 def node_annotate(self, wire, commit_id, path):
1131 def node_annotate(self, wire, commit_id, path):
1087
1132
1088 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1133 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1089 # -l ==> outputs long shas (and we need all 40 characters)
1134 # -l ==> outputs long shas (and we need all 40 characters)
1090 # --root ==> doesn't put '^' character for boundaries
1135 # --root ==> doesn't put '^' character for boundaries
1091 # -r commit_id ==> blames for the given commit
1136 # -r commit_id ==> blames for the given commit
1092 output, __ = self.run_git_command(wire, cmd)
1137 output, __ = self.run_git_command(wire, cmd)
1093
1138
1094 result = []
1139 result = []
1095 for i, blame_line in enumerate(output.split('\n')[:-1]):
1140 for i, blame_line in enumerate(output.split('\n')[:-1]):
1096 line_no = i + 1
1141 line_no = i + 1
1097 commit_id, line = re.split(r' ', blame_line, 1)
1142 commit_id, line = re.split(r' ', blame_line, 1)
1098 result.append((line_no, commit_id, line))
1143 result.append((line_no, commit_id, line))
1099 return result
1144 return result
1100
1145
1101 @reraise_safe_exceptions
1146 @reraise_safe_exceptions
1102 def update_server_info(self, wire):
1147 def update_server_info(self, wire):
1103 repo = self._factory.repo(wire)
1148 repo = self._factory.repo(wire)
1104 update_server_info(repo)
1149 update_server_info(repo)
1105
1150
1106 @reraise_safe_exceptions
1151 @reraise_safe_exceptions
1107 def get_all_commit_ids(self, wire):
1152 def get_all_commit_ids(self, wire):
1108
1153
1109 cache_on, context_uid, repo_id = self._cache_on(wire)
1154 cache_on, context_uid, repo_id = self._cache_on(wire)
1110 @self.region.conditional_cache_on_arguments(condition=cache_on)
1155 region = self._region(wire)
1156 @region.conditional_cache_on_arguments(condition=cache_on)
1111 def _get_all_commit_ids(_context_uid, _repo_id):
1157 def _get_all_commit_ids(_context_uid, _repo_id):
1112
1158
1113 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1159 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1114 try:
1160 try:
1115 output, __ = self.run_git_command(wire, cmd)
1161 output, __ = self.run_git_command(wire, cmd)
1116 return output.splitlines()
1162 return output.splitlines()
1117 except Exception:
1163 except Exception:
1118 # Can be raised for empty repositories
1164 # Can be raised for empty repositories
1119 return []
1165 return []
1120 return _get_all_commit_ids(context_uid, repo_id)
1166 return _get_all_commit_ids(context_uid, repo_id)
1121
1167
1122 @reraise_safe_exceptions
1168 @reraise_safe_exceptions
1123 def run_git_command(self, wire, cmd, **opts):
1169 def run_git_command(self, wire, cmd, **opts):
1124 path = wire.get('path', None)
1170 path = wire.get('path', None)
1125
1171
1126 if path and os.path.isdir(path):
1172 if path and os.path.isdir(path):
1127 opts['cwd'] = path
1173 opts['cwd'] = path
1128
1174
1129 if '_bare' in opts:
1175 if '_bare' in opts:
1130 _copts = []
1176 _copts = []
1131 del opts['_bare']
1177 del opts['_bare']
1132 else:
1178 else:
1133 _copts = ['-c', 'core.quotepath=false', ]
1179 _copts = ['-c', 'core.quotepath=false', ]
1134 safe_call = False
1180 safe_call = False
1135 if '_safe' in opts:
1181 if '_safe' in opts:
1136 # no exc on failure
1182 # no exc on failure
1137 del opts['_safe']
1183 del opts['_safe']
1138 safe_call = True
1184 safe_call = True
1139
1185
1140 if '_copts' in opts:
1186 if '_copts' in opts:
1141 _copts.extend(opts['_copts'] or [])
1187 _copts.extend(opts['_copts'] or [])
1142 del opts['_copts']
1188 del opts['_copts']
1143
1189
1144 gitenv = os.environ.copy()
1190 gitenv = os.environ.copy()
1145 gitenv.update(opts.pop('extra_env', {}))
1191 gitenv.update(opts.pop('extra_env', {}))
1146 # need to clean fix GIT_DIR !
1192 # need to clean fix GIT_DIR !
1147 if 'GIT_DIR' in gitenv:
1193 if 'GIT_DIR' in gitenv:
1148 del gitenv['GIT_DIR']
1194 del gitenv['GIT_DIR']
1149 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1195 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1150 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1196 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1151
1197
1152 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1198 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1153 _opts = {'env': gitenv, 'shell': False}
1199 _opts = {'env': gitenv, 'shell': False}
1154
1200
1155 proc = None
1201 proc = None
1156 try:
1202 try:
1157 _opts.update(opts)
1203 _opts.update(opts)
1158 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1204 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1159
1205
1160 return ''.join(proc), ''.join(proc.error)
1206 return ''.join(proc), ''.join(proc.error)
1161 except (EnvironmentError, OSError) as err:
1207 except (EnvironmentError, OSError) as err:
1162 cmd = ' '.join(cmd) # human friendly CMD
1208 cmd = ' '.join(cmd) # human friendly CMD
1163 tb_err = ("Couldn't run git command (%s).\n"
1209 tb_err = ("Couldn't run git command (%s).\n"
1164 "Original error was:%s\n"
1210 "Original error was:%s\n"
1165 "Call options:%s\n"
1211 "Call options:%s\n"
1166 % (cmd, err, _opts))
1212 % (cmd, err, _opts))
1167 log.exception(tb_err)
1213 log.exception(tb_err)
1168 if safe_call:
1214 if safe_call:
1169 return '', err
1215 return '', err
1170 else:
1216 else:
1171 raise exceptions.VcsException()(tb_err)
1217 raise exceptions.VcsException()(tb_err)
1172 finally:
1218 finally:
1173 if proc:
1219 if proc:
1174 proc.close()
1220 proc.close()
1175
1221
1176 @reraise_safe_exceptions
1222 @reraise_safe_exceptions
1177 def install_hooks(self, wire, force=False):
1223 def install_hooks(self, wire, force=False):
1178 from vcsserver.hook_utils import install_git_hooks
1224 from vcsserver.hook_utils import install_git_hooks
1179 bare = self.bare(wire)
1225 bare = self.bare(wire)
1180 path = wire['path']
1226 path = wire['path']
1181 return install_git_hooks(path, bare, force_create=force)
1227 return install_git_hooks(path, bare, force_create=force)
1182
1228
1183 @reraise_safe_exceptions
1229 @reraise_safe_exceptions
1184 def get_hooks_info(self, wire):
1230 def get_hooks_info(self, wire):
1185 from vcsserver.hook_utils import (
1231 from vcsserver.hook_utils import (
1186 get_git_pre_hook_version, get_git_post_hook_version)
1232 get_git_pre_hook_version, get_git_post_hook_version)
1187 bare = self.bare(wire)
1233 bare = self.bare(wire)
1188 path = wire['path']
1234 path = wire['path']
1189 return {
1235 return {
1190 'pre_version': get_git_pre_hook_version(path, bare),
1236 'pre_version': get_git_pre_hook_version(path, bare),
1191 'post_version': get_git_post_hook_version(path, bare),
1237 'post_version': get_git_post_hook_version(path, bare),
1192 }
1238 }
1239
1240 @reraise_safe_exceptions
1241 def set_head_ref(self, wire, head_name):
1242 log.debug('Setting refs/head to `%s`', head_name)
1243 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1244 output, __ = self.run_git_command(wire, cmd)
1245 return [head_name] + output.splitlines()
1246
1247 @reraise_safe_exceptions
1248 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1249 archive_dir_name, commit_id):
1250
1251 def file_walker(_commit_id, path):
1252 repo_init = self._factory.repo_libgit2(wire)
1253
1254 with repo_init as repo:
1255 commit = repo[commit_id]
1256
1257 if path in ['', '/']:
1258 tree = commit.tree
1259 else:
1260 tree = commit.tree[path.rstrip('/')]
1261 tree_id = tree.id.hex
1262 try:
1263 tree = repo[tree_id]
1264 except KeyError:
1265 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1266
1267 index = LibGit2Index.Index()
1268 index.read_tree(tree)
1269 file_iter = index
1270
1271 for fn in file_iter:
1272 file_path = fn.path
1273 mode = fn.mode
1274 is_link = stat.S_ISLNK(mode)
1275 if mode == pygit2.GIT_FILEMODE_COMMIT:
1276 log.debug('Skipping path %s as a commit node', file_path)
1277 continue
1278 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1279
1280 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1281 archive_dir_name, commit_id)
@@ -1,1009 +1,1047 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import functools
18 import io
18 import io
19 import logging
19 import logging
20 import os
20 import stat
21 import stat
21 import urllib.request, urllib.parse, urllib.error
22 import urllib.request, urllib.parse, urllib.error
22 import urllib.request, urllib.error, urllib.parse
23 import urllib.request, urllib.error, urllib.parse
23 import traceback
24 import traceback
24
25
25 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
26 from hgext.strip import strip as hgext_strip
27 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
28 from mercurial import commands
28 from mercurial import unionrepo
29 from mercurial import unionrepo
29 from mercurial import verify
30 from mercurial import verify
30 from mercurial import repair
31 from mercurial import repair
31
32
32 import vcsserver
33 import vcsserver
33 from vcsserver import exceptions
34 from vcsserver import exceptions
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 from vcsserver.hgcompat import (
36 from vcsserver.hgcompat import (
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 RepoLookupError, InterventionRequired, RequirementError)
41 RepoLookupError, InterventionRequired, RequirementError,
42 alwaysmatcher, patternmatcher, hgutil)
41 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.vcs_base import RemoteBase
42
44
43 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
44
46
45
47
46 def make_ui_from_config(repo_config):
48 def make_ui_from_config(repo_config):
47
49
48 class LoggingUI(ui.ui):
50 class LoggingUI(ui.ui):
49 def status(self, *msg, **opts):
51 def status(self, *msg, **opts):
50 log.info(' '.join(msg).rstrip('\n'))
52 log.info(' '.join(msg).rstrip('\n'))
51 super(LoggingUI, self).status(*msg, **opts)
53 super(LoggingUI, self).status(*msg, **opts)
52
54
53 def warn(self, *msg, **opts):
55 def warn(self, *msg, **opts):
54 log.warn(' '.join(msg).rstrip('\n'))
56 log.warn(' '.join(msg).rstrip('\n'))
55 super(LoggingUI, self).warn(*msg, **opts)
57 super(LoggingUI, self).warn(*msg, **opts)
56
58
57 def error(self, *msg, **opts):
59 def error(self, *msg, **opts):
58 log.error(' '.join(msg).rstrip('\n'))
60 log.error(' '.join(msg).rstrip('\n'))
59 super(LoggingUI, self).error(*msg, **opts)
61 super(LoggingUI, self).error(*msg, **opts)
60
62
61 def note(self, *msg, **opts):
63 def note(self, *msg, **opts):
62 log.info(' '.join(msg).rstrip('\n'))
64 log.info(' '.join(msg).rstrip('\n'))
63 super(LoggingUI, self).note(*msg, **opts)
65 super(LoggingUI, self).note(*msg, **opts)
64
66
65 def debug(self, *msg, **opts):
67 def debug(self, *msg, **opts):
66 log.debug(' '.join(msg).rstrip('\n'))
68 log.debug(' '.join(msg).rstrip('\n'))
67 super(LoggingUI, self).debug(*msg, **opts)
69 super(LoggingUI, self).debug(*msg, **opts)
68
70
69 baseui = LoggingUI()
71 baseui = LoggingUI()
70
72
71 # clean the baseui object
73 # clean the baseui object
72 baseui._ocfg = hgconfig.config()
74 baseui._ocfg = hgconfig.config()
73 baseui._ucfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
74 baseui._tcfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
75
77
76 for section, option, value in repo_config:
78 for section, option, value in repo_config:
77 baseui.setconfig(section, option, value)
79 baseui.setconfig(section, option, value)
78
80
79 # make our hgweb quiet so it doesn't print output
81 # make our hgweb quiet so it doesn't print output
80 baseui.setconfig('ui', 'quiet', 'true')
82 baseui.setconfig('ui', 'quiet', 'true')
81
83
82 baseui.setconfig('ui', 'paginate', 'never')
84 baseui.setconfig('ui', 'paginate', 'never')
83 # for better Error reporting of Mercurial
85 # for better Error reporting of Mercurial
84 baseui.setconfig('ui', 'message-output', 'stderr')
86 baseui.setconfig('ui', 'message-output', 'stderr')
85
87
86 # force mercurial to only use 1 thread, otherwise it may try to set a
88 # force mercurial to only use 1 thread, otherwise it may try to set a
87 # signal in a non-main thread, thus generating a ValueError.
89 # signal in a non-main thread, thus generating a ValueError.
88 baseui.setconfig('worker', 'numcpus', 1)
90 baseui.setconfig('worker', 'numcpus', 1)
89
91
90 # If there is no config for the largefiles extension, we explicitly disable
92 # If there is no config for the largefiles extension, we explicitly disable
91 # it here. This overrides settings from repositories hgrc file. Recent
93 # it here. This overrides settings from repositories hgrc file. Recent
92 # mercurial versions enable largefiles in hgrc on clone from largefile
94 # mercurial versions enable largefiles in hgrc on clone from largefile
93 # repo.
95 # repo.
94 if not baseui.hasconfig('extensions', 'largefiles'):
96 if not baseui.hasconfig('extensions', 'largefiles'):
95 log.debug('Explicitly disable largefiles extension for repo.')
97 log.debug('Explicitly disable largefiles extension for repo.')
96 baseui.setconfig('extensions', 'largefiles', '!')
98 baseui.setconfig('extensions', 'largefiles', '!')
97
99
98 return baseui
100 return baseui
99
101
100
102
101 def reraise_safe_exceptions(func):
103 def reraise_safe_exceptions(func):
102 """Decorator for converting mercurial exceptions to something neutral."""
104 """Decorator for converting mercurial exceptions to something neutral."""
103
105
104 def wrapper(*args, **kwargs):
106 def wrapper(*args, **kwargs):
105 try:
107 try:
106 return func(*args, **kwargs)
108 return func(*args, **kwargs)
107 except (Abort, InterventionRequired) as e:
109 except (Abort, InterventionRequired) as e:
108 raise_from_original(exceptions.AbortException(e))
110 raise_from_original(exceptions.AbortException(e))
109 except RepoLookupError as e:
111 except RepoLookupError as e:
110 raise_from_original(exceptions.LookupException(e))
112 raise_from_original(exceptions.LookupException(e))
111 except RequirementError as e:
113 except RequirementError as e:
112 raise_from_original(exceptions.RequirementException(e))
114 raise_from_original(exceptions.RequirementException(e))
113 except RepoError as e:
115 except RepoError as e:
114 raise_from_original(exceptions.VcsException(e))
116 raise_from_original(exceptions.VcsException(e))
115 except LookupError as e:
117 except LookupError as e:
116 raise_from_original(exceptions.LookupException(e))
118 raise_from_original(exceptions.LookupException(e))
117 except Exception as e:
119 except Exception as e:
118 if not hasattr(e, '_vcs_kind'):
120 if not hasattr(e, '_vcs_kind'):
119 log.exception("Unhandled exception in hg remote call")
121 log.exception("Unhandled exception in hg remote call")
120 raise_from_original(exceptions.UnhandledException(e))
122 raise_from_original(exceptions.UnhandledException(e))
121
123
122 raise
124 raise
123 return wrapper
125 return wrapper
124
126
125
127
126 class MercurialFactory(RepoFactory):
128 class MercurialFactory(RepoFactory):
127 repo_type = 'hg'
129 repo_type = 'hg'
128
130
129 def _create_config(self, config, hooks=True):
131 def _create_config(self, config, hooks=True):
130 if not hooks:
132 if not hooks:
131 hooks_to_clean = frozenset((
133 hooks_to_clean = frozenset((
132 'changegroup.repo_size', 'preoutgoing.pre_pull',
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
134 new_config = []
136 new_config = []
135 for section, option, value in config:
137 for section, option, value in config:
136 if section == 'hooks' and option in hooks_to_clean:
138 if section == 'hooks' and option in hooks_to_clean:
137 continue
139 continue
138 new_config.append((section, option, value))
140 new_config.append((section, option, value))
139 config = new_config
141 config = new_config
140
142
141 baseui = make_ui_from_config(config)
143 baseui = make_ui_from_config(config)
142 return baseui
144 return baseui
143
145
144 def _create_repo(self, wire, create):
146 def _create_repo(self, wire, create):
145 baseui = self._create_config(wire["config"])
147 baseui = self._create_config(wire["config"])
146 return instance(baseui, wire["path"], create)
148 return instance(baseui, wire["path"], create)
147
149
148 def repo(self, wire, create=False):
150 def repo(self, wire, create=False):
149 """
151 """
150 Get a repository instance for the given path.
152 Get a repository instance for the given path.
151 """
153 """
152 return self._create_repo(wire, create)
154 return self._create_repo(wire, create)
153
155
154
156
155 def patch_ui_message_output(baseui):
157 def patch_ui_message_output(baseui):
156 baseui.setconfig('ui', 'quiet', 'false')
158 baseui.setconfig('ui', 'quiet', 'false')
157 output = io.BytesIO()
159 output = io.BytesIO()
158
160
159 def write(data, **unused_kwargs):
161 def write(data, **unused_kwargs):
160 output.write(data)
162 output.write(data)
161
163
162 baseui.status = write
164 baseui.status = write
163 baseui.write = write
165 baseui.write = write
164 baseui.warn = write
166 baseui.warn = write
165 baseui.debug = write
167 baseui.debug = write
166
168
167 return baseui, output
169 return baseui, output
168
170
169
171
170 class HgRemote(RemoteBase):
172 class HgRemote(RemoteBase):
171
173
172 def __init__(self, factory):
174 def __init__(self, factory):
173 self._factory = factory
175 self._factory = factory
174 self._bulk_methods = {
176 self._bulk_methods = {
175 "affected_files": self.ctx_files,
177 "affected_files": self.ctx_files,
176 "author": self.ctx_user,
178 "author": self.ctx_user,
177 "branch": self.ctx_branch,
179 "branch": self.ctx_branch,
178 "children": self.ctx_children,
180 "children": self.ctx_children,
179 "date": self.ctx_date,
181 "date": self.ctx_date,
180 "message": self.ctx_description,
182 "message": self.ctx_description,
181 "parents": self.ctx_parents,
183 "parents": self.ctx_parents,
182 "status": self.ctx_status,
184 "status": self.ctx_status,
183 "obsolete": self.ctx_obsolete,
185 "obsolete": self.ctx_obsolete,
184 "phase": self.ctx_phase,
186 "phase": self.ctx_phase,
185 "hidden": self.ctx_hidden,
187 "hidden": self.ctx_hidden,
186 "_file_paths": self.ctx_list,
188 "_file_paths": self.ctx_list,
187 }
189 }
188
190
189 def _get_ctx(self, repo, ref):
191 def _get_ctx(self, repo, ref):
190 return get_ctx(repo, ref)
192 return get_ctx(repo, ref)
191
193
192 @reraise_safe_exceptions
194 @reraise_safe_exceptions
193 def discover_hg_version(self):
195 def discover_hg_version(self):
194 from mercurial import util
196 from mercurial import util
195 return util.version()
197 return util.version()
196
198
197 @reraise_safe_exceptions
199 @reraise_safe_exceptions
198 def is_empty(self, wire):
200 def is_empty(self, wire):
199 repo = self._factory.repo(wire)
201 repo = self._factory.repo(wire)
200
202
201 try:
203 try:
202 return len(repo) == 0
204 return len(repo) == 0
203 except Exception:
205 except Exception:
204 log.exception("failed to read object_store")
206 log.exception("failed to read object_store")
205 return False
207 return False
206
208
207 @reraise_safe_exceptions
209 @reraise_safe_exceptions
208 def archive_repo(self, archive_path, mtime, file_info, kind):
209 if kind == "tgz":
210 archiver = archival.tarit(archive_path, mtime, "gz")
211 elif kind == "tbz2":
212 archiver = archival.tarit(archive_path, mtime, "bz2")
213 elif kind == 'zip':
214 archiver = archival.zipit(archive_path, mtime)
215 else:
216 raise exceptions.ArchiveException()(
217 'Remote does not support: "%s".' % kind)
218
219 for f_path, f_mode, f_is_link, f_content in file_info:
220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
221 archiver.done()
222
223 @reraise_safe_exceptions
224 def bookmarks(self, wire):
210 def bookmarks(self, wire):
225 cache_on, context_uid, repo_id = self._cache_on(wire)
211 cache_on, context_uid, repo_id = self._cache_on(wire)
226 @self.region.conditional_cache_on_arguments(condition=cache_on)
212 region = self._region(wire)
213 @region.conditional_cache_on_arguments(condition=cache_on)
227 def _bookmarks(_context_uid, _repo_id):
214 def _bookmarks(_context_uid, _repo_id):
228 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
229 return dict(repo._bookmarks)
216 return dict(repo._bookmarks)
230
217
231 return _bookmarks(context_uid, repo_id)
218 return _bookmarks(context_uid, repo_id)
232
219
233 @reraise_safe_exceptions
220 @reraise_safe_exceptions
234 def branches(self, wire, normal, closed):
221 def branches(self, wire, normal, closed):
235 cache_on, context_uid, repo_id = self._cache_on(wire)
222 cache_on, context_uid, repo_id = self._cache_on(wire)
236 @self.region.conditional_cache_on_arguments(condition=cache_on)
223 region = self._region(wire)
224 @region.conditional_cache_on_arguments(condition=cache_on)
237 def _branches(_context_uid, _repo_id, _normal, _closed):
225 def _branches(_context_uid, _repo_id, _normal, _closed):
238 repo = self._factory.repo(wire)
226 repo = self._factory.repo(wire)
239 iter_branches = repo.branchmap().iterbranches()
227 iter_branches = repo.branchmap().iterbranches()
240 bt = {}
228 bt = {}
241 for branch_name, _heads, tip, is_closed in iter_branches:
229 for branch_name, _heads, tip, is_closed in iter_branches:
242 if normal and not is_closed:
230 if normal and not is_closed:
243 bt[branch_name] = tip
231 bt[branch_name] = tip
244 if closed and is_closed:
232 if closed and is_closed:
245 bt[branch_name] = tip
233 bt[branch_name] = tip
246
234
247 return bt
235 return bt
248
236
249 return _branches(context_uid, repo_id, normal, closed)
237 return _branches(context_uid, repo_id, normal, closed)
250
238
251 @reraise_safe_exceptions
239 @reraise_safe_exceptions
252 def bulk_request(self, wire, commit_id, pre_load):
240 def bulk_request(self, wire, commit_id, pre_load):
253 cache_on, context_uid, repo_id = self._cache_on(wire)
241 cache_on, context_uid, repo_id = self._cache_on(wire)
254 @self.region.conditional_cache_on_arguments(condition=cache_on)
242 region = self._region(wire)
243 @region.conditional_cache_on_arguments(condition=cache_on)
255 def _bulk_request(_repo_id, _commit_id, _pre_load):
244 def _bulk_request(_repo_id, _commit_id, _pre_load):
256 result = {}
245 result = {}
257 for attr in pre_load:
246 for attr in pre_load:
258 try:
247 try:
259 method = self._bulk_methods[attr]
248 method = self._bulk_methods[attr]
260 result[attr] = method(wire, commit_id)
249 result[attr] = method(wire, commit_id)
261 except KeyError as e:
250 except KeyError as e:
262 raise exceptions.VcsException(e)(
251 raise exceptions.VcsException(e)(
263 'Unknown bulk attribute: "%s"' % attr)
252 'Unknown bulk attribute: "%s"' % attr)
264 return result
253 return result
265
254
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
255 return _bulk_request(repo_id, commit_id, sorted(pre_load))
267
256
268 @reraise_safe_exceptions
257 @reraise_safe_exceptions
269 def ctx_branch(self, wire, commit_id):
258 def ctx_branch(self, wire, commit_id):
270 cache_on, context_uid, repo_id = self._cache_on(wire)
259 cache_on, context_uid, repo_id = self._cache_on(wire)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
260 region = self._region(wire)
261 @region.conditional_cache_on_arguments(condition=cache_on)
272 def _ctx_branch(_repo_id, _commit_id):
262 def _ctx_branch(_repo_id, _commit_id):
273 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
274 ctx = self._get_ctx(repo, commit_id)
264 ctx = self._get_ctx(repo, commit_id)
275 return ctx.branch()
265 return ctx.branch()
276 return _ctx_branch(repo_id, commit_id)
266 return _ctx_branch(repo_id, commit_id)
277
267
278 @reraise_safe_exceptions
268 @reraise_safe_exceptions
279 def ctx_date(self, wire, commit_id):
269 def ctx_date(self, wire, commit_id):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
271 region = self._region(wire)
272 @region.conditional_cache_on_arguments(condition=cache_on)
282 def _ctx_date(_repo_id, _commit_id):
273 def _ctx_date(_repo_id, _commit_id):
283 repo = self._factory.repo(wire)
274 repo = self._factory.repo(wire)
284 ctx = self._get_ctx(repo, commit_id)
275 ctx = self._get_ctx(repo, commit_id)
285 return ctx.date()
276 return ctx.date()
286 return _ctx_date(repo_id, commit_id)
277 return _ctx_date(repo_id, commit_id)
287
278
288 @reraise_safe_exceptions
279 @reraise_safe_exceptions
289 def ctx_description(self, wire, revision):
280 def ctx_description(self, wire, revision):
290 repo = self._factory.repo(wire)
281 repo = self._factory.repo(wire)
291 ctx = self._get_ctx(repo, revision)
282 ctx = self._get_ctx(repo, revision)
292 return ctx.description()
283 return ctx.description()
293
284
294 @reraise_safe_exceptions
285 @reraise_safe_exceptions
295 def ctx_files(self, wire, commit_id):
286 def ctx_files(self, wire, commit_id):
296 cache_on, context_uid, repo_id = self._cache_on(wire)
287 cache_on, context_uid, repo_id = self._cache_on(wire)
297 @self.region.conditional_cache_on_arguments(condition=cache_on)
288 region = self._region(wire)
289 @region.conditional_cache_on_arguments(condition=cache_on)
298 def _ctx_files(_repo_id, _commit_id):
290 def _ctx_files(_repo_id, _commit_id):
299 repo = self._factory.repo(wire)
291 repo = self._factory.repo(wire)
300 ctx = self._get_ctx(repo, commit_id)
292 ctx = self._get_ctx(repo, commit_id)
301 return ctx.files()
293 return ctx.files()
302
294
303 return _ctx_files(repo_id, commit_id)
295 return _ctx_files(repo_id, commit_id)
304
296
305 @reraise_safe_exceptions
297 @reraise_safe_exceptions
306 def ctx_list(self, path, revision):
298 def ctx_list(self, path, revision):
307 repo = self._factory.repo(path)
299 repo = self._factory.repo(path)
308 ctx = self._get_ctx(repo, revision)
300 ctx = self._get_ctx(repo, revision)
309 return list(ctx)
301 return list(ctx)
310
302
311 @reraise_safe_exceptions
303 @reraise_safe_exceptions
312 def ctx_parents(self, wire, commit_id):
304 def ctx_parents(self, wire, commit_id):
313 cache_on, context_uid, repo_id = self._cache_on(wire)
305 cache_on, context_uid, repo_id = self._cache_on(wire)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
306 region = self._region(wire)
307 @region.conditional_cache_on_arguments(condition=cache_on)
315 def _ctx_parents(_repo_id, _commit_id):
308 def _ctx_parents(_repo_id, _commit_id):
316 repo = self._factory.repo(wire)
309 repo = self._factory.repo(wire)
317 ctx = self._get_ctx(repo, commit_id)
310 ctx = self._get_ctx(repo, commit_id)
318 return [parent.hex() for parent in ctx.parents()
311 return [parent.hex() for parent in ctx.parents()
319 if not (parent.hidden() or parent.obsolete())]
312 if not (parent.hidden() or parent.obsolete())]
320
313
321 return _ctx_parents(repo_id, commit_id)
314 return _ctx_parents(repo_id, commit_id)
322
315
323 @reraise_safe_exceptions
316 @reraise_safe_exceptions
324 def ctx_children(self, wire, commit_id):
317 def ctx_children(self, wire, commit_id):
325 cache_on, context_uid, repo_id = self._cache_on(wire)
318 cache_on, context_uid, repo_id = self._cache_on(wire)
326 @self.region.conditional_cache_on_arguments(condition=cache_on)
319 region = self._region(wire)
320 @region.conditional_cache_on_arguments(condition=cache_on)
327 def _ctx_children(_repo_id, _commit_id):
321 def _ctx_children(_repo_id, _commit_id):
328 repo = self._factory.repo(wire)
322 repo = self._factory.repo(wire)
329 ctx = self._get_ctx(repo, commit_id)
323 ctx = self._get_ctx(repo, commit_id)
330 return [child.hex() for child in ctx.children()
324 return [child.hex() for child in ctx.children()
331 if not (child.hidden() or child.obsolete())]
325 if not (child.hidden() or child.obsolete())]
332
326
333 return _ctx_children(repo_id, commit_id)
327 return _ctx_children(repo_id, commit_id)
334
328
335 @reraise_safe_exceptions
329 @reraise_safe_exceptions
336 def ctx_phase(self, wire, commit_id):
330 def ctx_phase(self, wire, commit_id):
337 cache_on, context_uid, repo_id = self._cache_on(wire)
331 cache_on, context_uid, repo_id = self._cache_on(wire)
338 @self.region.conditional_cache_on_arguments(condition=cache_on)
332 region = self._region(wire)
333 @region.conditional_cache_on_arguments(condition=cache_on)
339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
334 def _ctx_phase(_context_uid, _repo_id, _commit_id):
340 repo = self._factory.repo(wire)
335 repo = self._factory.repo(wire)
341 ctx = self._get_ctx(repo, commit_id)
336 ctx = self._get_ctx(repo, commit_id)
342 # public=0, draft=1, secret=3
337 # public=0, draft=1, secret=3
343 return ctx.phase()
338 return ctx.phase()
344 return _ctx_phase(context_uid, repo_id, commit_id)
339 return _ctx_phase(context_uid, repo_id, commit_id)
345
340
346 @reraise_safe_exceptions
341 @reraise_safe_exceptions
347 def ctx_obsolete(self, wire, commit_id):
342 def ctx_obsolete(self, wire, commit_id):
348 cache_on, context_uid, repo_id = self._cache_on(wire)
343 cache_on, context_uid, repo_id = self._cache_on(wire)
349 @self.region.conditional_cache_on_arguments(condition=cache_on)
344 region = self._region(wire)
345 @region.conditional_cache_on_arguments(condition=cache_on)
350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
346 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
351 repo = self._factory.repo(wire)
347 repo = self._factory.repo(wire)
352 ctx = self._get_ctx(repo, commit_id)
348 ctx = self._get_ctx(repo, commit_id)
353 return ctx.obsolete()
349 return ctx.obsolete()
354 return _ctx_obsolete(context_uid, repo_id, commit_id)
350 return _ctx_obsolete(context_uid, repo_id, commit_id)
355
351
356 @reraise_safe_exceptions
352 @reraise_safe_exceptions
357 def ctx_hidden(self, wire, commit_id):
353 def ctx_hidden(self, wire, commit_id):
358 cache_on, context_uid, repo_id = self._cache_on(wire)
354 cache_on, context_uid, repo_id = self._cache_on(wire)
359 @self.region.conditional_cache_on_arguments(condition=cache_on)
355 region = self._region(wire)
356 @region.conditional_cache_on_arguments(condition=cache_on)
360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
357 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
361 repo = self._factory.repo(wire)
358 repo = self._factory.repo(wire)
362 ctx = self._get_ctx(repo, commit_id)
359 ctx = self._get_ctx(repo, commit_id)
363 return ctx.hidden()
360 return ctx.hidden()
364 return _ctx_hidden(context_uid, repo_id, commit_id)
361 return _ctx_hidden(context_uid, repo_id, commit_id)
365
362
366 @reraise_safe_exceptions
363 @reraise_safe_exceptions
367 def ctx_substate(self, wire, revision):
364 def ctx_substate(self, wire, revision):
368 repo = self._factory.repo(wire)
365 repo = self._factory.repo(wire)
369 ctx = self._get_ctx(repo, revision)
366 ctx = self._get_ctx(repo, revision)
370 return ctx.substate
367 return ctx.substate
371
368
372 @reraise_safe_exceptions
369 @reraise_safe_exceptions
373 def ctx_status(self, wire, revision):
370 def ctx_status(self, wire, revision):
374 repo = self._factory.repo(wire)
371 repo = self._factory.repo(wire)
375 ctx = self._get_ctx(repo, revision)
372 ctx = self._get_ctx(repo, revision)
376 status = repo[ctx.p1().node()].status(other=ctx.node())
373 status = repo[ctx.p1().node()].status(other=ctx.node())
377 # object of status (odd, custom named tuple in mercurial) is not
374 # object of status (odd, custom named tuple in mercurial) is not
378 # correctly serializable, we make it a list, as the underling
375 # correctly serializable, we make it a list, as the underling
379 # API expects this to be a list
376 # API expects this to be a list
380 return list(status)
377 return list(status)
381
378
382 @reraise_safe_exceptions
379 @reraise_safe_exceptions
383 def ctx_user(self, wire, revision):
380 def ctx_user(self, wire, revision):
384 repo = self._factory.repo(wire)
381 repo = self._factory.repo(wire)
385 ctx = self._get_ctx(repo, revision)
382 ctx = self._get_ctx(repo, revision)
386 return ctx.user()
383 return ctx.user()
387
384
388 @reraise_safe_exceptions
385 @reraise_safe_exceptions
389 def check_url(self, url, config):
386 def check_url(self, url, config):
390 _proto = None
387 _proto = None
391 if '+' in url[:url.find('://')]:
388 if '+' in url[:url.find('://')]:
392 _proto = url[0:url.find('+')]
389 _proto = url[0:url.find('+')]
393 url = url[url.find('+') + 1:]
390 url = url[url.find('+') + 1:]
394 handlers = []
391 handlers = []
395 url_obj = url_parser(url)
392 url_obj = url_parser(url)
396 test_uri, authinfo = url_obj.authinfo()
393 test_uri, authinfo = url_obj.authinfo()
397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
394 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
398 url_obj.query = obfuscate_qs(url_obj.query)
395 url_obj.query = obfuscate_qs(url_obj.query)
399
396
400 cleaned_uri = str(url_obj)
397 cleaned_uri = str(url_obj)
401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
398 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
402
399
403 if authinfo:
400 if authinfo:
404 # create a password manager
401 # create a password manager
405 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
402 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
406 passmgr.add_password(*authinfo)
403 passmgr.add_password(*authinfo)
407
404
408 handlers.extend((httpbasicauthhandler(passmgr),
405 handlers.extend((httpbasicauthhandler(passmgr),
409 httpdigestauthhandler(passmgr)))
406 httpdigestauthhandler(passmgr)))
410
407
411 o = urllib.request.build_opener(*handlers)
408 o = urllib.request.build_opener(*handlers)
412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
409 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
413 ('Accept', 'application/mercurial-0.1')]
410 ('Accept', 'application/mercurial-0.1')]
414
411
415 q = {"cmd": 'between'}
412 q = {"cmd": 'between'}
416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
413 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
417 qs = '?%s' % urllib.parse.urlencode(q)
414 qs = '?%s' % urllib.parse.urlencode(q)
418 cu = "%s%s" % (test_uri, qs)
415 cu = "%s%s" % (test_uri, qs)
419 req = urllib.request.Request(cu, None, {})
416 req = urllib.request.Request(cu, None, {})
420
417
421 try:
418 try:
422 log.debug("Trying to open URL %s", cleaned_uri)
419 log.debug("Trying to open URL %s", cleaned_uri)
423 resp = o.open(req)
420 resp = o.open(req)
424 if resp.code != 200:
421 if resp.code != 200:
425 raise exceptions.URLError()('Return Code is not 200')
422 raise exceptions.URLError()('Return Code is not 200')
426 except Exception as e:
423 except Exception as e:
427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
424 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
428 # means it cannot be cloned
425 # means it cannot be cloned
429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
426 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
430
427
431 # now check if it's a proper hg repo, but don't do it for svn
428 # now check if it's a proper hg repo, but don't do it for svn
432 try:
429 try:
433 if _proto == 'svn':
430 if _proto == 'svn':
434 pass
431 pass
435 else:
432 else:
436 # check for pure hg repos
433 # check for pure hg repos
437 log.debug(
434 log.debug(
438 "Verifying if URL is a Mercurial repository: %s",
435 "Verifying if URL is a Mercurial repository: %s",
439 cleaned_uri)
436 cleaned_uri)
440 ui = make_ui_from_config(config)
437 ui = make_ui_from_config(config)
441 peer_checker = makepeer(ui, url)
438 peer_checker = makepeer(ui, url)
442 peer_checker.lookup('tip')
439 peer_checker.lookup('tip')
443 except Exception as e:
440 except Exception as e:
444 log.warning("URL is not a valid Mercurial repository: %s",
441 log.warning("URL is not a valid Mercurial repository: %s",
445 cleaned_uri)
442 cleaned_uri)
446 raise exceptions.URLError(e)(
443 raise exceptions.URLError(e)(
447 "url [%s] does not look like an hg repo org_exc: %s"
444 "url [%s] does not look like an hg repo org_exc: %s"
448 % (cleaned_uri, e))
445 % (cleaned_uri, e))
449
446
450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
447 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
451 return True
448 return True
452
449
453 @reraise_safe_exceptions
450 @reraise_safe_exceptions
454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
451 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
455 repo = self._factory.repo(wire)
452 repo = self._factory.repo(wire)
456
453
457 if file_filter:
454 if file_filter:
458 match_filter = match(file_filter[0], '', [file_filter[1]])
455 match_filter = match(file_filter[0], '', [file_filter[1]])
459 else:
456 else:
460 match_filter = file_filter
457 match_filter = file_filter
461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
458 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
462
459
463 try:
460 try:
464 return "".join(patch.diff(
461 return "".join(patch.diff(
465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
462 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
466 except RepoLookupError as e:
463 except RepoLookupError as e:
467 raise exceptions.LookupException(e)()
464 raise exceptions.LookupException(e)()
468
465
469 @reraise_safe_exceptions
466 @reraise_safe_exceptions
470 def node_history(self, wire, revision, path, limit):
467 def node_history(self, wire, revision, path, limit):
471 cache_on, context_uid, repo_id = self._cache_on(wire)
468 cache_on, context_uid, repo_id = self._cache_on(wire)
472 @self.region.conditional_cache_on_arguments(condition=cache_on)
469 region = self._region(wire)
470 @region.conditional_cache_on_arguments(condition=cache_on)
473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
471 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
474 repo = self._factory.repo(wire)
472 repo = self._factory.repo(wire)
475
473
476 ctx = self._get_ctx(repo, revision)
474 ctx = self._get_ctx(repo, revision)
477 fctx = ctx.filectx(path)
475 fctx = ctx.filectx(path)
478
476
479 def history_iter():
477 def history_iter():
480 limit_rev = fctx.rev()
478 limit_rev = fctx.rev()
481 for obj in reversed(list(fctx.filelog())):
479 for obj in reversed(list(fctx.filelog())):
482 obj = fctx.filectx(obj)
480 obj = fctx.filectx(obj)
483 ctx = obj.changectx()
481 ctx = obj.changectx()
484 if ctx.hidden() or ctx.obsolete():
482 if ctx.hidden() or ctx.obsolete():
485 continue
483 continue
486
484
487 if limit_rev >= obj.rev():
485 if limit_rev >= obj.rev():
488 yield obj
486 yield obj
489
487
490 history = []
488 history = []
491 for cnt, obj in enumerate(history_iter()):
489 for cnt, obj in enumerate(history_iter()):
492 if limit and cnt >= limit:
490 if limit and cnt >= limit:
493 break
491 break
494 history.append(hex(obj.node()))
492 history.append(hex(obj.node()))
495
493
496 return [x for x in history]
494 return [x for x in history]
497 return _node_history(context_uid, repo_id, revision, path, limit)
495 return _node_history(context_uid, repo_id, revision, path, limit)
498
496
499 @reraise_safe_exceptions
497 @reraise_safe_exceptions
500 def node_history_untill(self, wire, revision, path, limit):
498 def node_history_untill(self, wire, revision, path, limit):
501 cache_on, context_uid, repo_id = self._cache_on(wire)
499 cache_on, context_uid, repo_id = self._cache_on(wire)
502 @self.region.conditional_cache_on_arguments(condition=cache_on)
500 region = self._region(wire)
501 @region.conditional_cache_on_arguments(condition=cache_on)
503 def _node_history_until(_context_uid, _repo_id):
502 def _node_history_until(_context_uid, _repo_id):
504 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
505 ctx = self._get_ctx(repo, revision)
504 ctx = self._get_ctx(repo, revision)
506 fctx = ctx.filectx(path)
505 fctx = ctx.filectx(path)
507
506
508 file_log = list(fctx.filelog())
507 file_log = list(fctx.filelog())
509 if limit:
508 if limit:
510 # Limit to the last n items
509 # Limit to the last n items
511 file_log = file_log[-limit:]
510 file_log = file_log[-limit:]
512
511
513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
512 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
514 return _node_history_until(context_uid, repo_id, revision, path, limit)
513 return _node_history_until(context_uid, repo_id, revision, path, limit)
515
514
516 @reraise_safe_exceptions
515 @reraise_safe_exceptions
517 def fctx_annotate(self, wire, revision, path):
516 def fctx_annotate(self, wire, revision, path):
518 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
519 ctx = self._get_ctx(repo, revision)
518 ctx = self._get_ctx(repo, revision)
520 fctx = ctx.filectx(path)
519 fctx = ctx.filectx(path)
521
520
522 result = []
521 result = []
523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
524 ln_no = i
523 ln_no = i
525 sha = hex(annotate_obj.fctx.node())
524 sha = hex(annotate_obj.fctx.node())
526 content = annotate_obj.text
525 content = annotate_obj.text
527 result.append((ln_no, sha, content))
526 result.append((ln_no, sha, content))
528 return result
527 return result
529
528
530 @reraise_safe_exceptions
529 @reraise_safe_exceptions
531 def fctx_node_data(self, wire, revision, path):
530 def fctx_node_data(self, wire, revision, path):
532 repo = self._factory.repo(wire)
531 repo = self._factory.repo(wire)
533 ctx = self._get_ctx(repo, revision)
532 ctx = self._get_ctx(repo, revision)
534 fctx = ctx.filectx(path)
533 fctx = ctx.filectx(path)
535 return fctx.data()
534 return fctx.data()
536
535
537 @reraise_safe_exceptions
536 @reraise_safe_exceptions
538 def fctx_flags(self, wire, commit_id, path):
537 def fctx_flags(self, wire, commit_id, path):
539 cache_on, context_uid, repo_id = self._cache_on(wire)
538 cache_on, context_uid, repo_id = self._cache_on(wire)
540 @self.region.conditional_cache_on_arguments(condition=cache_on)
539 region = self._region(wire)
540 @region.conditional_cache_on_arguments(condition=cache_on)
541 def _fctx_flags(_repo_id, _commit_id, _path):
541 def _fctx_flags(_repo_id, _commit_id, _path):
542 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
543 ctx = self._get_ctx(repo, commit_id)
543 ctx = self._get_ctx(repo, commit_id)
544 fctx = ctx.filectx(path)
544 fctx = ctx.filectx(path)
545 return fctx.flags()
545 return fctx.flags()
546
546
547 return _fctx_flags(repo_id, commit_id, path)
547 return _fctx_flags(repo_id, commit_id, path)
548
548
549 @reraise_safe_exceptions
549 @reraise_safe_exceptions
550 def fctx_size(self, wire, commit_id, path):
550 def fctx_size(self, wire, commit_id, path):
551 cache_on, context_uid, repo_id = self._cache_on(wire)
551 cache_on, context_uid, repo_id = self._cache_on(wire)
552 @self.region.conditional_cache_on_arguments(condition=cache_on)
552 region = self._region(wire)
553 @region.conditional_cache_on_arguments(condition=cache_on)
553 def _fctx_size(_repo_id, _revision, _path):
554 def _fctx_size(_repo_id, _revision, _path):
554 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
555 ctx = self._get_ctx(repo, commit_id)
556 ctx = self._get_ctx(repo, commit_id)
556 fctx = ctx.filectx(path)
557 fctx = ctx.filectx(path)
557 return fctx.size()
558 return fctx.size()
558 return _fctx_size(repo_id, commit_id, path)
559 return _fctx_size(repo_id, commit_id, path)
559
560
560 @reraise_safe_exceptions
561 @reraise_safe_exceptions
561 def get_all_commit_ids(self, wire, name):
562 def get_all_commit_ids(self, wire, name):
562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 cache_on, context_uid, repo_id = self._cache_on(wire)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 region = self._region(wire)
565 @region.conditional_cache_on_arguments(condition=cache_on)
564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
566 def _get_all_commit_ids(_context_uid, _repo_id, _name):
565 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
566 repo = repo.filtered(name)
568 repo = repo.filtered(name)
567 revs = [hex(x[7]) for x in repo.changelog.index]
569 revs = [hex(x[7]) for x in repo.changelog.index]
568 return revs
570 return revs
569 return _get_all_commit_ids(context_uid, repo_id, name)
571 return _get_all_commit_ids(context_uid, repo_id, name)
570
572
571 @reraise_safe_exceptions
573 @reraise_safe_exceptions
572 def get_config_value(self, wire, section, name, untrusted=False):
574 def get_config_value(self, wire, section, name, untrusted=False):
573 repo = self._factory.repo(wire)
575 repo = self._factory.repo(wire)
574 return repo.ui.config(section, name, untrusted=untrusted)
576 return repo.ui.config(section, name, untrusted=untrusted)
575
577
576 @reraise_safe_exceptions
578 @reraise_safe_exceptions
577 def is_large_file(self, wire, commit_id, path):
579 def is_large_file(self, wire, commit_id, path):
578 cache_on, context_uid, repo_id = self._cache_on(wire)
580 cache_on, context_uid, repo_id = self._cache_on(wire)
579 @self.region.conditional_cache_on_arguments(condition=cache_on)
581 region = self._region(wire)
582 @region.conditional_cache_on_arguments(condition=cache_on)
580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
583 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
581 return largefiles.lfutil.isstandin(path)
584 return largefiles.lfutil.isstandin(path)
582
585
583 return _is_large_file(context_uid, repo_id, commit_id, path)
586 return _is_large_file(context_uid, repo_id, commit_id, path)
584
587
585 @reraise_safe_exceptions
588 @reraise_safe_exceptions
586 def is_binary(self, wire, revision, path):
589 def is_binary(self, wire, revision, path):
587 cache_on, context_uid, repo_id = self._cache_on(wire)
590 cache_on, context_uid, repo_id = self._cache_on(wire)
588
591
589 @self.region.conditional_cache_on_arguments(condition=cache_on)
592 region = self._region(wire)
593 @region.conditional_cache_on_arguments(condition=cache_on)
590 def _is_binary(_repo_id, _sha, _path):
594 def _is_binary(_repo_id, _sha, _path):
591 repo = self._factory.repo(wire)
595 repo = self._factory.repo(wire)
592 ctx = self._get_ctx(repo, revision)
596 ctx = self._get_ctx(repo, revision)
593 fctx = ctx.filectx(path)
597 fctx = ctx.filectx(path)
594 return fctx.isbinary()
598 return fctx.isbinary()
595
599
596 return _is_binary(repo_id, revision, path)
600 return _is_binary(repo_id, revision, path)
597
601
598 @reraise_safe_exceptions
602 @reraise_safe_exceptions
599 def in_largefiles_store(self, wire, sha):
603 def in_largefiles_store(self, wire, sha):
600 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
601 return largefiles.lfutil.instore(repo, sha)
605 return largefiles.lfutil.instore(repo, sha)
602
606
603 @reraise_safe_exceptions
607 @reraise_safe_exceptions
604 def in_user_cache(self, wire, sha):
608 def in_user_cache(self, wire, sha):
605 repo = self._factory.repo(wire)
609 repo = self._factory.repo(wire)
606 return largefiles.lfutil.inusercache(repo.ui, sha)
610 return largefiles.lfutil.inusercache(repo.ui, sha)
607
611
608 @reraise_safe_exceptions
612 @reraise_safe_exceptions
609 def store_path(self, wire, sha):
613 def store_path(self, wire, sha):
610 repo = self._factory.repo(wire)
614 repo = self._factory.repo(wire)
611 return largefiles.lfutil.storepath(repo, sha)
615 return largefiles.lfutil.storepath(repo, sha)
612
616
613 @reraise_safe_exceptions
617 @reraise_safe_exceptions
614 def link(self, wire, sha, path):
618 def link(self, wire, sha, path):
615 repo = self._factory.repo(wire)
619 repo = self._factory.repo(wire)
616 largefiles.lfutil.link(
620 largefiles.lfutil.link(
617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
621 largefiles.lfutil.usercachepath(repo.ui, sha), path)
618
622
619 @reraise_safe_exceptions
623 @reraise_safe_exceptions
620 def localrepository(self, wire, create=False):
624 def localrepository(self, wire, create=False):
621 self._factory.repo(wire, create=create)
625 self._factory.repo(wire, create=create)
622
626
623 @reraise_safe_exceptions
627 @reraise_safe_exceptions
624 def lookup(self, wire, revision, both):
628 def lookup(self, wire, revision, both):
625 cache_on, context_uid, repo_id = self._cache_on(wire)
629 cache_on, context_uid, repo_id = self._cache_on(wire)
626 @self.region.conditional_cache_on_arguments(condition=cache_on)
630
631 region = self._region(wire)
632 @region.conditional_cache_on_arguments(condition=cache_on)
627 def _lookup(_context_uid, _repo_id, _revision, _both):
633 def _lookup(_context_uid, _repo_id, _revision, _both):
628
634
629 repo = self._factory.repo(wire)
635 repo = self._factory.repo(wire)
630 rev = _revision
636 rev = _revision
631 if isinstance(rev, int):
637 if isinstance(rev, int):
632 # NOTE(marcink):
638 # NOTE(marcink):
633 # since Mercurial doesn't support negative indexes properly
639 # since Mercurial doesn't support negative indexes properly
634 # we need to shift accordingly by one to get proper index, e.g
640 # we need to shift accordingly by one to get proper index, e.g
635 # repo[-1] => repo[-2]
641 # repo[-1] => repo[-2]
636 # repo[0] => repo[-1]
642 # repo[0] => repo[-1]
637 if rev <= 0:
643 if rev <= 0:
638 rev = rev + -1
644 rev = rev + -1
639 try:
645 try:
640 ctx = self._get_ctx(repo, rev)
646 ctx = self._get_ctx(repo, rev)
641 except (TypeError, RepoLookupError) as e:
647 except (TypeError, RepoLookupError) as e:
642 e._org_exc_tb = traceback.format_exc()
648 e._org_exc_tb = traceback.format_exc()
643 raise exceptions.LookupException(e)(rev)
649 raise exceptions.LookupException(e)(rev)
644 except LookupError as e:
650 except LookupError as e:
645 e._org_exc_tb = traceback.format_exc()
651 e._org_exc_tb = traceback.format_exc()
646 raise exceptions.LookupException(e)(e.name)
652 raise exceptions.LookupException(e)(e.name)
647
653
648 if not both:
654 if not both:
649 return ctx.hex()
655 return ctx.hex()
650
656
651 ctx = repo[ctx.hex()]
657 ctx = repo[ctx.hex()]
652 return ctx.hex(), ctx.rev()
658 return ctx.hex(), ctx.rev()
653
659
654 return _lookup(context_uid, repo_id, revision, both)
660 return _lookup(context_uid, repo_id, revision, both)
655
661
656 @reraise_safe_exceptions
662 @reraise_safe_exceptions
657 def sync_push(self, wire, url):
663 def sync_push(self, wire, url):
658 if not self.check_url(url, wire['config']):
664 if not self.check_url(url, wire['config']):
659 return
665 return
660
666
661 repo = self._factory.repo(wire)
667 repo = self._factory.repo(wire)
662
668
663 # Disable any prompts for this repo
669 # Disable any prompts for this repo
664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
670 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
665
671
666 bookmarks = list(dict(repo._bookmarks).keys())
672 bookmarks = list(dict(repo._bookmarks).keys())
667 remote = peer(repo, {}, url)
673 remote = peer(repo, {}, url)
668 # Disable any prompts for this remote
674 # Disable any prompts for this remote
669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
675 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
670
676
671 return exchange.push(
677 return exchange.push(
672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
678 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
673
679
674 @reraise_safe_exceptions
680 @reraise_safe_exceptions
675 def revision(self, wire, rev):
681 def revision(self, wire, rev):
676 repo = self._factory.repo(wire)
682 repo = self._factory.repo(wire)
677 ctx = self._get_ctx(repo, rev)
683 ctx = self._get_ctx(repo, rev)
678 return ctx.rev()
684 return ctx.rev()
679
685
680 @reraise_safe_exceptions
686 @reraise_safe_exceptions
681 def rev_range(self, wire, commit_filter):
687 def rev_range(self, wire, commit_filter):
682 cache_on, context_uid, repo_id = self._cache_on(wire)
688 cache_on, context_uid, repo_id = self._cache_on(wire)
683
689
684 @self.region.conditional_cache_on_arguments(condition=cache_on)
690 region = self._region(wire)
691 @region.conditional_cache_on_arguments(condition=cache_on)
685 def _rev_range(_context_uid, _repo_id, _filter):
692 def _rev_range(_context_uid, _repo_id, _filter):
686 repo = self._factory.repo(wire)
693 repo = self._factory.repo(wire)
687 revisions = [rev for rev in revrange(repo, commit_filter)]
694 revisions = [rev for rev in revrange(repo, commit_filter)]
688 return revisions
695 return revisions
689
696
690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
697 return _rev_range(context_uid, repo_id, sorted(commit_filter))
691
698
692 @reraise_safe_exceptions
699 @reraise_safe_exceptions
693 def rev_range_hash(self, wire, node):
700 def rev_range_hash(self, wire, node):
694 repo = self._factory.repo(wire)
701 repo = self._factory.repo(wire)
695
702
696 def get_revs(repo, rev_opt):
703 def get_revs(repo, rev_opt):
697 if rev_opt:
704 if rev_opt:
698 revs = revrange(repo, rev_opt)
705 revs = revrange(repo, rev_opt)
699 if len(revs) == 0:
706 if len(revs) == 0:
700 return (nullrev, nullrev)
707 return (nullrev, nullrev)
701 return max(revs), min(revs)
708 return max(revs), min(revs)
702 else:
709 else:
703 return len(repo) - 1, 0
710 return len(repo) - 1, 0
704
711
705 stop, start = get_revs(repo, [node + ':'])
712 stop, start = get_revs(repo, [node + ':'])
706 revs = [hex(repo[r].node()) for r in range(start, stop + 1)]
713 revs = [hex(repo[r].node()) for r in range(start, stop + 1)]
707 return revs
714 return revs
708
715
709 @reraise_safe_exceptions
716 @reraise_safe_exceptions
710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
717 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
711 other_path = kwargs.pop('other_path', None)
718 other_path = kwargs.pop('other_path', None)
712
719
713 # case when we want to compare two independent repositories
720 # case when we want to compare two independent repositories
714 if other_path and other_path != wire["path"]:
721 if other_path and other_path != wire["path"]:
715 baseui = self._factory._create_config(wire["config"])
722 baseui = self._factory._create_config(wire["config"])
716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
723 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
717 else:
724 else:
718 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
719 return list(repo.revs(rev_spec, *args))
726 return list(repo.revs(rev_spec, *args))
720
727
721 @reraise_safe_exceptions
728 @reraise_safe_exceptions
722 def verify(self, wire,):
729 def verify(self, wire,):
723 repo = self._factory.repo(wire)
730 repo = self._factory.repo(wire)
724 baseui = self._factory._create_config(wire['config'])
731 baseui = self._factory._create_config(wire['config'])
725
732
726 baseui, output = patch_ui_message_output(baseui)
733 baseui, output = patch_ui_message_output(baseui)
727
734
728 repo.ui = baseui
735 repo.ui = baseui
729 verify.verify(repo)
736 verify.verify(repo)
730 return output.getvalue()
737 return output.getvalue()
731
738
732 @reraise_safe_exceptions
739 @reraise_safe_exceptions
733 def hg_update_cache(self, wire,):
740 def hg_update_cache(self, wire,):
734 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
735 baseui = self._factory._create_config(wire['config'])
742 baseui = self._factory._create_config(wire['config'])
736 baseui, output = patch_ui_message_output(baseui)
743 baseui, output = patch_ui_message_output(baseui)
737
744
738 repo.ui = baseui
745 repo.ui = baseui
739 with repo.wlock(), repo.lock():
746 with repo.wlock(), repo.lock():
740 repo.updatecaches(full=True)
747 repo.updatecaches(full=True)
741
748
742 return output.getvalue()
749 return output.getvalue()
743
750
744 @reraise_safe_exceptions
751 @reraise_safe_exceptions
745 def hg_rebuild_fn_cache(self, wire,):
752 def hg_rebuild_fn_cache(self, wire,):
746 repo = self._factory.repo(wire)
753 repo = self._factory.repo(wire)
747 baseui = self._factory._create_config(wire['config'])
754 baseui = self._factory._create_config(wire['config'])
748 baseui, output = patch_ui_message_output(baseui)
755 baseui, output = patch_ui_message_output(baseui)
749
756
750 repo.ui = baseui
757 repo.ui = baseui
751
758
752 repair.rebuildfncache(baseui, repo)
759 repair.rebuildfncache(baseui, repo)
753
760
754 return output.getvalue()
761 return output.getvalue()
755
762
756 @reraise_safe_exceptions
763 @reraise_safe_exceptions
757 def tags(self, wire):
764 def tags(self, wire):
758 cache_on, context_uid, repo_id = self._cache_on(wire)
765 cache_on, context_uid, repo_id = self._cache_on(wire)
759 @self.region.conditional_cache_on_arguments(condition=cache_on)
766 region = self._region(wire)
767 @region.conditional_cache_on_arguments(condition=cache_on)
760 def _tags(_context_uid, _repo_id):
768 def _tags(_context_uid, _repo_id):
761 repo = self._factory.repo(wire)
769 repo = self._factory.repo(wire)
762 return repo.tags()
770 return repo.tags()
763
771
764 return _tags(context_uid, repo_id)
772 return _tags(context_uid, repo_id)
765
773
766 @reraise_safe_exceptions
774 @reraise_safe_exceptions
767 def update(self, wire, node=None, clean=False):
775 def update(self, wire, node=None, clean=False):
768 repo = self._factory.repo(wire)
776 repo = self._factory.repo(wire)
769 baseui = self._factory._create_config(wire['config'])
777 baseui = self._factory._create_config(wire['config'])
770 commands.update(baseui, repo, node=node, clean=clean)
778 commands.update(baseui, repo, node=node, clean=clean)
771
779
772 @reraise_safe_exceptions
780 @reraise_safe_exceptions
773 def identify(self, wire):
781 def identify(self, wire):
774 repo = self._factory.repo(wire)
782 repo = self._factory.repo(wire)
775 baseui = self._factory._create_config(wire['config'])
783 baseui = self._factory._create_config(wire['config'])
776 output = io.BytesIO()
784 output = io.BytesIO()
777 baseui.write = output.write
785 baseui.write = output.write
778 # This is required to get a full node id
786 # This is required to get a full node id
779 baseui.debugflag = True
787 baseui.debugflag = True
780 commands.identify(baseui, repo, id=True)
788 commands.identify(baseui, repo, id=True)
781
789
782 return output.getvalue()
790 return output.getvalue()
783
791
784 @reraise_safe_exceptions
792 @reraise_safe_exceptions
785 def heads(self, wire, branch=None):
793 def heads(self, wire, branch=None):
786 repo = self._factory.repo(wire)
794 repo = self._factory.repo(wire)
787 baseui = self._factory._create_config(wire['config'])
795 baseui = self._factory._create_config(wire['config'])
788 output = io.BytesIO()
796 output = io.BytesIO()
789
797
790 def write(data, **unused_kwargs):
798 def write(data, **unused_kwargs):
791 output.write(data)
799 output.write(data)
792
800
793 baseui.write = write
801 baseui.write = write
794 if branch:
802 if branch:
795 args = [branch]
803 args = [branch]
796 else:
804 else:
797 args = []
805 args = []
798 commands.heads(baseui, repo, template='{node} ', *args)
806 commands.heads(baseui, repo, template='{node} ', *args)
799
807
800 return output.getvalue()
808 return output.getvalue()
801
809
802 @reraise_safe_exceptions
810 @reraise_safe_exceptions
803 def ancestor(self, wire, revision1, revision2):
811 def ancestor(self, wire, revision1, revision2):
804 repo = self._factory.repo(wire)
812 repo = self._factory.repo(wire)
805 changelog = repo.changelog
813 changelog = repo.changelog
806 lookup = repo.lookup
814 lookup = repo.lookup
807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
815 a = changelog.ancestor(lookup(revision1), lookup(revision2))
808 return hex(a)
816 return hex(a)
809
817
810 @reraise_safe_exceptions
818 @reraise_safe_exceptions
811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
819 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
820 baseui = self._factory._create_config(wire["config"], hooks=hooks)
813 clone(baseui, source, dest, noupdate=not update_after_clone)
821 clone(baseui, source, dest, noupdate=not update_after_clone)
814
822
815 @reraise_safe_exceptions
823 @reraise_safe_exceptions
816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
824 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
817
825
818 repo = self._factory.repo(wire)
826 repo = self._factory.repo(wire)
819 baseui = self._factory._create_config(wire['config'])
827 baseui = self._factory._create_config(wire['config'])
820 publishing = baseui.configbool('phases', 'publish')
828 publishing = baseui.configbool('phases', 'publish')
821 if publishing:
829 if publishing:
822 new_commit = 'public'
830 new_commit = 'public'
823 else:
831 else:
824 new_commit = 'draft'
832 new_commit = 'draft'
825
833
826 def _filectxfn(_repo, ctx, path):
834 def _filectxfn(_repo, ctx, path):
827 """
835 """
828 Marks given path as added/changed/removed in a given _repo. This is
836 Marks given path as added/changed/removed in a given _repo. This is
829 for internal mercurial commit function.
837 for internal mercurial commit function.
830 """
838 """
831
839
832 # check if this path is removed
840 # check if this path is removed
833 if path in removed:
841 if path in removed:
834 # returning None is a way to mark node for removal
842 # returning None is a way to mark node for removal
835 return None
843 return None
836
844
837 # check if this path is added
845 # check if this path is added
838 for node in updated:
846 for node in updated:
839 if node['path'] == path:
847 if node['path'] == path:
840 return memfilectx(
848 return memfilectx(
841 _repo,
849 _repo,
842 changectx=ctx,
850 changectx=ctx,
843 path=node['path'],
851 path=node['path'],
844 data=node['content'],
852 data=node['content'],
845 islink=False,
853 islink=False,
846 isexec=bool(node['mode'] & stat.S_IXUSR),
854 isexec=bool(node['mode'] & stat.S_IXUSR),
847 copysource=False)
855 copysource=False)
848
856
849 raise exceptions.AbortException()(
857 raise exceptions.AbortException()(
850 "Given path haven't been marked as added, "
858 "Given path haven't been marked as added, "
851 "changed or removed (%s)" % path)
859 "changed or removed (%s)" % path)
852
860
853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
861 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
854
862
855 commit_ctx = memctx(
863 commit_ctx = memctx(
856 repo=repo,
864 repo=repo,
857 parents=parents,
865 parents=parents,
858 text=message,
866 text=message,
859 files=files,
867 files=files,
860 filectxfn=_filectxfn,
868 filectxfn=_filectxfn,
861 user=user,
869 user=user,
862 date=(commit_time, commit_timezone),
870 date=(commit_time, commit_timezone),
863 extra=extra)
871 extra=extra)
864
872
865 n = repo.commitctx(commit_ctx)
873 n = repo.commitctx(commit_ctx)
866 new_id = hex(n)
874 new_id = hex(n)
867
875
868 return new_id
876 return new_id
869
877
870 @reraise_safe_exceptions
878 @reraise_safe_exceptions
871 def pull(self, wire, url, commit_ids=None):
879 def pull(self, wire, url, commit_ids=None):
872 repo = self._factory.repo(wire)
880 repo = self._factory.repo(wire)
873 # Disable any prompts for this repo
881 # Disable any prompts for this repo
874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
882 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
875
883
876 remote = peer(repo, {}, url)
884 remote = peer(repo, {}, url)
877 # Disable any prompts for this remote
885 # Disable any prompts for this remote
878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
886 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
879
887
880 if commit_ids:
888 if commit_ids:
881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
889 commit_ids = [bin(commit_id) for commit_id in commit_ids]
882
890
883 return exchange.pull(
891 return exchange.pull(
884 repo, remote, heads=commit_ids, force=None).cgresult
892 repo, remote, heads=commit_ids, force=None).cgresult
885
893
886 @reraise_safe_exceptions
894 @reraise_safe_exceptions
887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
895 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
888 repo = self._factory.repo(wire)
896 repo = self._factory.repo(wire)
889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
897 baseui = self._factory._create_config(wire['config'], hooks=hooks)
890
898
891 # Mercurial internally has a lot of logic that checks ONLY if
899 # Mercurial internally has a lot of logic that checks ONLY if
892 # option is defined, we just pass those if they are defined then
900 # option is defined, we just pass those if they are defined then
893 opts = {}
901 opts = {}
894 if bookmark:
902 if bookmark:
895 opts['bookmark'] = bookmark
903 opts['bookmark'] = bookmark
896 if branch:
904 if branch:
897 opts['branch'] = branch
905 opts['branch'] = branch
898 if revision:
906 if revision:
899 opts['rev'] = revision
907 opts['rev'] = revision
900
908
901 commands.pull(baseui, repo, source, **opts)
909 commands.pull(baseui, repo, source, **opts)
902
910
903 @reraise_safe_exceptions
911 @reraise_safe_exceptions
904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
912 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
905 repo = self._factory.repo(wire)
913 repo = self._factory.repo(wire)
906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
914 baseui = self._factory._create_config(wire['config'], hooks=hooks)
907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
915 commands.push(baseui, repo, dest=dest_path, rev=revisions,
908 new_branch=push_branches)
916 new_branch=push_branches)
909
917
910 @reraise_safe_exceptions
918 @reraise_safe_exceptions
911 def strip(self, wire, revision, update, backup):
919 def strip(self, wire, revision, update, backup):
912 repo = self._factory.repo(wire)
920 repo = self._factory.repo(wire)
913 ctx = self._get_ctx(repo, revision)
921 ctx = self._get_ctx(repo, revision)
914 hgext_strip(
922 hgext_strip(
915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
923 repo.baseui, repo, ctx.node(), update=update, backup=backup)
916
924
917 @reraise_safe_exceptions
925 @reraise_safe_exceptions
918 def get_unresolved_files(self, wire):
926 def get_unresolved_files(self, wire):
919 repo = self._factory.repo(wire)
927 repo = self._factory.repo(wire)
920
928
921 log.debug('Calculating unresolved files for repo: %s', repo)
929 log.debug('Calculating unresolved files for repo: %s', repo)
922 output = io.BytesIO()
930 output = io.BytesIO()
923
931
924 def write(data, **unused_kwargs):
932 def write(data, **unused_kwargs):
925 output.write(data)
933 output.write(data)
926
934
927 baseui = self._factory._create_config(wire['config'])
935 baseui = self._factory._create_config(wire['config'])
928 baseui.write = write
936 baseui.write = write
929
937
930 commands.resolve(baseui, repo, list=True)
938 commands.resolve(baseui, repo, list=True)
931 unresolved = output.getvalue().splitlines(0)
939 unresolved = output.getvalue().splitlines(0)
932 return unresolved
940 return unresolved
933
941
934 @reraise_safe_exceptions
942 @reraise_safe_exceptions
935 def merge(self, wire, revision):
943 def merge(self, wire, revision):
936 repo = self._factory.repo(wire)
944 repo = self._factory.repo(wire)
937 baseui = self._factory._create_config(wire['config'])
945 baseui = self._factory._create_config(wire['config'])
938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
946 repo.ui.setconfig('ui', 'merge', 'internal:dump')
939
947
940 # In case of sub repositories are used mercurial prompts the user in
948 # In case of sub repositories are used mercurial prompts the user in
941 # case of merge conflicts or different sub repository sources. By
949 # case of merge conflicts or different sub repository sources. By
942 # setting the interactive flag to `False` mercurial doesn't prompt the
950 # setting the interactive flag to `False` mercurial doesn't prompt the
943 # used but instead uses a default value.
951 # used but instead uses a default value.
944 repo.ui.setconfig('ui', 'interactive', False)
952 repo.ui.setconfig('ui', 'interactive', False)
945 commands.merge(baseui, repo, rev=revision)
953 commands.merge(baseui, repo, rev=revision)
946
954
947 @reraise_safe_exceptions
955 @reraise_safe_exceptions
948 def merge_state(self, wire):
956 def merge_state(self, wire):
949 repo = self._factory.repo(wire)
957 repo = self._factory.repo(wire)
950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
951
959
952 # In case of sub repositories are used mercurial prompts the user in
960 # In case of sub repositories are used mercurial prompts the user in
953 # case of merge conflicts or different sub repository sources. By
961 # case of merge conflicts or different sub repository sources. By
954 # setting the interactive flag to `False` mercurial doesn't prompt the
962 # setting the interactive flag to `False` mercurial doesn't prompt the
955 # used but instead uses a default value.
963 # used but instead uses a default value.
956 repo.ui.setconfig('ui', 'interactive', False)
964 repo.ui.setconfig('ui', 'interactive', False)
957 ms = hg_merge.mergestate(repo)
965 ms = hg_merge.mergestate(repo)
958 return [x for x in ms.unresolved()]
966 return [x for x in ms.unresolved()]
959
967
960 @reraise_safe_exceptions
968 @reraise_safe_exceptions
961 def commit(self, wire, message, username, close_branch=False):
969 def commit(self, wire, message, username, close_branch=False):
962 repo = self._factory.repo(wire)
970 repo = self._factory.repo(wire)
963 baseui = self._factory._create_config(wire['config'])
971 baseui = self._factory._create_config(wire['config'])
964 repo.ui.setconfig('ui', 'username', username)
972 repo.ui.setconfig('ui', 'username', username)
965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
973 commands.commit(baseui, repo, message=message, close_branch=close_branch)
966
974
967 @reraise_safe_exceptions
975 @reraise_safe_exceptions
968 def rebase(self, wire, source=None, dest=None, abort=False):
976 def rebase(self, wire, source=None, dest=None, abort=False):
969 repo = self._factory.repo(wire)
977 repo = self._factory.repo(wire)
970 baseui = self._factory._create_config(wire['config'])
978 baseui = self._factory._create_config(wire['config'])
971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
979 repo.ui.setconfig('ui', 'merge', 'internal:dump')
972 # In case of sub repositories are used mercurial prompts the user in
980 # In case of sub repositories are used mercurial prompts the user in
973 # case of merge conflicts or different sub repository sources. By
981 # case of merge conflicts or different sub repository sources. By
974 # setting the interactive flag to `False` mercurial doesn't prompt the
982 # setting the interactive flag to `False` mercurial doesn't prompt the
975 # used but instead uses a default value.
983 # used but instead uses a default value.
976 repo.ui.setconfig('ui', 'interactive', False)
984 repo.ui.setconfig('ui', 'interactive', False)
977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
985 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
978
986
979 @reraise_safe_exceptions
987 @reraise_safe_exceptions
980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
988 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
981 repo = self._factory.repo(wire)
989 repo = self._factory.repo(wire)
982 ctx = self._get_ctx(repo, revision)
990 ctx = self._get_ctx(repo, revision)
983 node = ctx.node()
991 node = ctx.node()
984
992
985 date = (tag_time, tag_timezone)
993 date = (tag_time, tag_timezone)
986 try:
994 try:
987 hg_tag.tag(repo, name, node, message, local, user, date)
995 hg_tag.tag(repo, name, node, message, local, user, date)
988 except Abort as e:
996 except Abort as e:
989 log.exception("Tag operation aborted")
997 log.exception("Tag operation aborted")
990 # Exception can contain unicode which we convert
998 # Exception can contain unicode which we convert
991 raise exceptions.AbortException(e)(repr(e))
999 raise exceptions.AbortException(e)(repr(e))
992
1000
993 @reraise_safe_exceptions
1001 @reraise_safe_exceptions
994 def bookmark(self, wire, bookmark, revision=None):
1002 def bookmark(self, wire, bookmark, revision=None):
995 repo = self._factory.repo(wire)
1003 repo = self._factory.repo(wire)
996 baseui = self._factory._create_config(wire['config'])
1004 baseui = self._factory._create_config(wire['config'])
997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1005 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
998
1006
999 @reraise_safe_exceptions
1007 @reraise_safe_exceptions
1000 def install_hooks(self, wire, force=False):
1008 def install_hooks(self, wire, force=False):
1001 # we don't need any special hooks for Mercurial
1009 # we don't need any special hooks for Mercurial
1002 pass
1010 pass
1003
1011
1004 @reraise_safe_exceptions
1012 @reraise_safe_exceptions
1005 def get_hooks_info(self, wire):
1013 def get_hooks_info(self, wire):
1006 return {
1014 return {
1007 'pre_version': vcsserver.__version__,
1015 'pre_version': vcsserver.__version__,
1008 'post_version': vcsserver.__version__,
1016 'post_version': vcsserver.__version__,
1009 }
1017 }
1018
1019 @reraise_safe_exceptions
1020 def set_head_ref(self, wire, head_name):
1021 pass
1022
1023 @reraise_safe_exceptions
1024 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1025 archive_dir_name, commit_id):
1026
1027 def file_walker(_commit_id, path):
1028 repo = self._factory.repo(wire)
1029 ctx = repo[_commit_id]
1030 is_root = path in ['', '/']
1031 if is_root:
1032 matcher = alwaysmatcher(badfn=None)
1033 else:
1034 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1035 file_iter = ctx.manifest().walk(matcher)
1036
1037 for fn in file_iter:
1038 file_path = fn
1039 flags = ctx.flags(fn)
1040 mode = b'x' in flags and 0o755 or 0o644
1041 is_link = b'l' in flags
1042
1043 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1044
1045 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1046 archive_dir_name, commit_id)
1047
@@ -1,79 +1,79 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 from mercurial import demandimport
23 from mercurial import demandimport
24 # patch demandimport, due to bug in mercurial when it always triggers
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
25 # demandimport.enable()
26 demandimport.enable = lambda *args, **kwargs: 1
26 demandimport.enable = lambda *args, **kwargs: 1
27
27
28 from mercurial import ui
28 from mercurial import ui
29 from mercurial import patch
29 from mercurial import patch
30 from mercurial import config
30 from mercurial import config
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial import scmutil
32 from mercurial import scmutil
33 from mercurial import archival
33 from mercurial import archival
34 from mercurial import discovery
34 from mercurial import discovery
35 from mercurial import unionrepo
35 from mercurial import unionrepo
36 from mercurial import localrepo
36 from mercurial import localrepo
37 from mercurial import merge as hg_merge
37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38 from mercurial import subrepo
39 from mercurial import subrepoutil
39 from mercurial import subrepoutil
40 from mercurial import tags as hg_tag
40 from mercurial import tags as hg_tag
41
41 from mercurial import util as hgutil
42 from mercurial.commands import clone, nullid, pull
42 from mercurial.commands import clone, nullid, pull
43 from mercurial.context import memctx, memfilectx
43 from mercurial.context import memctx, memfilectx
44 from mercurial.error import (
44 from mercurial.error import (
45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
46 RequirementError, ProgrammingError)
46 RequirementError, ProgrammingError)
47 from mercurial.hgweb import hgweb_mod
47 from mercurial.hgweb import hgweb_mod
48 from mercurial.localrepo import instance
48 from mercurial.localrepo import instance
49 from mercurial.match import match
49 from mercurial.match import match, alwaysmatcher, patternmatcher
50 from mercurial.mdiff import diffopts
50 from mercurial.mdiff import diffopts
51 from mercurial.node import bin, hex
51 from mercurial.node import bin, hex
52 from mercurial.encoding import tolocal
52 from mercurial.encoding import tolocal
53 from mercurial.discovery import findcommonoutgoing
53 from mercurial.discovery import findcommonoutgoing
54 from mercurial.hg import peer
54 from mercurial.hg import peer
55 from mercurial.httppeer import makepeer
55 from mercurial.httppeer import makepeer
56 from mercurial.util import url as hg_url
56 from mercurial.util import url as hg_url
57 from mercurial.scmutil import revrange, revsymbol
57 from mercurial.scmutil import revrange, revsymbol
58 from mercurial.node import nullrev
58 from mercurial.node import nullrev
59 from mercurial import exchange
59 from mercurial import exchange
60 from hgext import largefiles
60 from hgext import largefiles
61
61
62 # those authnadlers are patched for python 2.6.5 bug an
62 # those authnadlers are patched for python 2.6.5 bug an
63 # infinit looping when given invalid resources
63 # infinit looping when given invalid resources
64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
65
65
66
66
67 def get_ctx(repo, ref):
67 def get_ctx(repo, ref):
68 try:
68 try:
69 ctx = repo[ref]
69 ctx = repo[ref]
70 except ProgrammingError:
70 except (ProgrammingError, TypeError):
71 # we're unable to find the rev using a regular lookup, we fallback
71 # we're unable to find the rev using a regular lookup, we fallback
72 # to slower, but backward compat revsymbol usage
72 # to slower, but backward compat revsymbol usage
73 ctx = revsymbol(repo, ref)
73 ctx = revsymbol(repo, ref)
74 except (LookupError, RepoLookupError):
74 except (LookupError, RepoLookupError):
75 # Similar case as above but only for refs that are not numeric
75 # Similar case as above but only for refs that are not numeric
76 if isinstance(ref, (int, long)):
76 if isinstance(ref, (int, long)):
77 raise
77 raise
78 ctx = revsymbol(repo, ref)
78 ctx = revsymbol(repo, ref)
79 return ctx
79 return ctx
@@ -1,729 +1,729 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from http.client import HTTPConnection
28 from http.client import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 try:
49 try:
50 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 except Exception:
51 except Exception:
52 log.error('Connection failed on %s', connection)
52 log.error('Hooks calling Connection failed on %s', connection.__dict__)
53 raise
53 raise
54 response = connection.getresponse()
54 response = connection.getresponse()
55
55
56 response_data = response.read()
56 response_data = response.read()
57
57
58 try:
58 try:
59 return json.loads(response_data)
59 return json.loads(response_data)
60 except Exception:
60 except Exception:
61 log.exception('Failed to decode hook response json data. '
61 log.exception('Failed to decode hook response json data. '
62 'response_code:%s, raw_data:%s',
62 'response_code:%s, raw_data:%s',
63 response.status, response_data)
63 response.status, response_data)
64 raise
64 raise
65
65
66 def _serialize(self, hook_name, extras):
66 def _serialize(self, hook_name, extras):
67 data = {
67 data = {
68 'method': hook_name,
68 'method': hook_name,
69 'extras': extras
69 'extras': extras
70 }
70 }
71 return json.dumps(data)
71 return json.dumps(data)
72
72
73
73
74 class HooksDummyClient(object):
74 class HooksDummyClient(object):
75 def __init__(self, hooks_module):
75 def __init__(self, hooks_module):
76 self._hooks_module = importlib.import_module(hooks_module)
76 self._hooks_module = importlib.import_module(hooks_module)
77
77
78 def __call__(self, hook_name, extras):
78 def __call__(self, hook_name, extras):
79 with self._hooks_module.Hooks() as hooks:
79 with self._hooks_module.Hooks() as hooks:
80 return getattr(hooks, hook_name)(extras)
80 return getattr(hooks, hook_name)(extras)
81
81
82
82
83 class HooksShadowRepoClient(object):
83 class HooksShadowRepoClient(object):
84
84
85 def __call__(self, hook_name, extras):
85 def __call__(self, hook_name, extras):
86 return {'output': '', 'status': 0}
86 return {'output': '', 'status': 0}
87
87
88
88
89 class RemoteMessageWriter(object):
89 class RemoteMessageWriter(object):
90 """Writer base class."""
90 """Writer base class."""
91 def write(self, message):
91 def write(self, message):
92 raise NotImplementedError()
92 raise NotImplementedError()
93
93
94
94
95 class HgMessageWriter(RemoteMessageWriter):
95 class HgMessageWriter(RemoteMessageWriter):
96 """Writer that knows how to send messages to mercurial clients."""
96 """Writer that knows how to send messages to mercurial clients."""
97
97
98 def __init__(self, ui):
98 def __init__(self, ui):
99 self.ui = ui
99 self.ui = ui
100
100
101 def write(self, message):
101 def write(self, message):
102 # TODO: Check why the quiet flag is set by default.
102 # TODO: Check why the quiet flag is set by default.
103 old = self.ui.quiet
103 old = self.ui.quiet
104 self.ui.quiet = False
104 self.ui.quiet = False
105 self.ui.status(message.encode('utf-8'))
105 self.ui.status(message.encode('utf-8'))
106 self.ui.quiet = old
106 self.ui.quiet = old
107
107
108
108
109 class GitMessageWriter(RemoteMessageWriter):
109 class GitMessageWriter(RemoteMessageWriter):
110 """Writer that knows how to send messages to git clients."""
110 """Writer that knows how to send messages to git clients."""
111
111
112 def __init__(self, stdout=None):
112 def __init__(self, stdout=None):
113 self.stdout = stdout or sys.stdout
113 self.stdout = stdout or sys.stdout
114
114
115 def write(self, message):
115 def write(self, message):
116 self.stdout.write(message.encode('utf-8'))
116 self.stdout.write(message.encode('utf-8'))
117
117
118
118
119 class SvnMessageWriter(RemoteMessageWriter):
119 class SvnMessageWriter(RemoteMessageWriter):
120 """Writer that knows how to send messages to svn clients."""
120 """Writer that knows how to send messages to svn clients."""
121
121
122 def __init__(self, stderr=None):
122 def __init__(self, stderr=None):
123 # SVN needs data sent to stderr for back-to-client messaging
123 # SVN needs data sent to stderr for back-to-client messaging
124 self.stderr = stderr or sys.stderr
124 self.stderr = stderr or sys.stderr
125
125
126 def write(self, message):
126 def write(self, message):
127 self.stderr.write(message.encode('utf-8'))
127 self.stderr.write(message.encode('utf-8'))
128
128
129
129
130 def _handle_exception(result):
130 def _handle_exception(result):
131 exception_class = result.get('exception')
131 exception_class = result.get('exception')
132 exception_traceback = result.get('exception_traceback')
132 exception_traceback = result.get('exception_traceback')
133
133
134 if exception_traceback:
134 if exception_traceback:
135 log.error('Got traceback from remote call:%s', exception_traceback)
135 log.error('Got traceback from remote call:%s', exception_traceback)
136
136
137 if exception_class == 'HTTPLockedRC':
137 if exception_class == 'HTTPLockedRC':
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 elif exception_class == 'HTTPBranchProtected':
139 elif exception_class == 'HTTPBranchProtected':
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 elif exception_class == 'RepositoryError':
141 elif exception_class == 'RepositoryError':
142 raise exceptions.VcsException()(*result['exception_args'])
142 raise exceptions.VcsException()(*result['exception_args'])
143 elif exception_class:
143 elif exception_class:
144 raise Exception('Got remote exception "%s" with args "%s"' %
144 raise Exception('Got remote exception "%s" with args "%s"' %
145 (exception_class, result['exception_args']))
145 (exception_class, result['exception_args']))
146
146
147
147
148 def _get_hooks_client(extras):
148 def _get_hooks_client(extras):
149 hooks_uri = extras.get('hooks_uri')
149 hooks_uri = extras.get('hooks_uri')
150 is_shadow_repo = extras.get('is_shadow_repo')
150 is_shadow_repo = extras.get('is_shadow_repo')
151 if hooks_uri:
151 if hooks_uri:
152 return HooksHttpClient(extras['hooks_uri'])
152 return HooksHttpClient(extras['hooks_uri'])
153 elif is_shadow_repo:
153 elif is_shadow_repo:
154 return HooksShadowRepoClient()
154 return HooksShadowRepoClient()
155 else:
155 else:
156 return HooksDummyClient(extras['hooks_module'])
156 return HooksDummyClient(extras['hooks_module'])
157
157
158
158
159 def _call_hook(hook_name, extras, writer):
159 def _call_hook(hook_name, extras, writer):
160 hooks_client = _get_hooks_client(extras)
160 hooks_client = _get_hooks_client(extras)
161 log.debug('Hooks, using client:%s', hooks_client)
161 log.debug('Hooks, using client:%s', hooks_client)
162 result = hooks_client(hook_name, extras)
162 result = hooks_client(hook_name, extras)
163 log.debug('Hooks got result: %s', result)
163 log.debug('Hooks got result: %s', result)
164
164
165 _handle_exception(result)
165 _handle_exception(result)
166 writer.write(result['output'])
166 writer.write(result['output'])
167
167
168 return result['status']
168 return result['status']
169
169
170
170
171 def _extras_from_ui(ui):
171 def _extras_from_ui(ui):
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 if not hook_data:
173 if not hook_data:
174 # maybe it's inside environ ?
174 # maybe it's inside environ ?
175 env_hook_data = os.environ.get('RC_SCM_DATA')
175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 if env_hook_data:
176 if env_hook_data:
177 hook_data = env_hook_data
177 hook_data = env_hook_data
178
178
179 extras = {}
179 extras = {}
180 if hook_data:
180 if hook_data:
181 extras = json.loads(hook_data)
181 extras = json.loads(hook_data)
182 return extras
182 return extras
183
183
184
184
185 def _rev_range_hash(repo, node, check_heads=False):
185 def _rev_range_hash(repo, node, check_heads=False):
186 from vcsserver.hgcompat import get_ctx
186 from vcsserver.hgcompat import get_ctx
187
187
188 commits = []
188 commits = []
189 revs = []
189 revs = []
190 start = get_ctx(repo, node).rev()
190 start = get_ctx(repo, node).rev()
191 end = len(repo)
191 end = len(repo)
192 for rev in range(start, end):
192 for rev in range(start, end):
193 revs.append(rev)
193 revs.append(rev)
194 ctx = get_ctx(repo, rev)
194 ctx = get_ctx(repo, rev)
195 commit_id = mercurial.node.hex(ctx.node())
195 commit_id = mercurial.node.hex(ctx.node())
196 branch = ctx.branch()
196 branch = ctx.branch()
197 commits.append((commit_id, branch))
197 commits.append((commit_id, branch))
198
198
199 parent_heads = []
199 parent_heads = []
200 if check_heads:
200 if check_heads:
201 parent_heads = _check_heads(repo, start, end, revs)
201 parent_heads = _check_heads(repo, start, end, revs)
202 return commits, parent_heads
202 return commits, parent_heads
203
203
204
204
205 def _check_heads(repo, start, end, commits):
205 def _check_heads(repo, start, end, commits):
206 from vcsserver.hgcompat import get_ctx
206 from vcsserver.hgcompat import get_ctx
207 changelog = repo.changelog
207 changelog = repo.changelog
208 parents = set()
208 parents = set()
209
209
210 for new_rev in commits:
210 for new_rev in commits:
211 for p in changelog.parentrevs(new_rev):
211 for p in changelog.parentrevs(new_rev):
212 if p == mercurial.node.nullrev:
212 if p == mercurial.node.nullrev:
213 continue
213 continue
214 if p < start:
214 if p < start:
215 parents.add(p)
215 parents.add(p)
216
216
217 for p in parents:
217 for p in parents:
218 branch = get_ctx(repo, p).branch()
218 branch = get_ctx(repo, p).branch()
219 # The heads descending from that parent, on the same branch
219 # The heads descending from that parent, on the same branch
220 parent_heads = set([p])
220 parent_heads = set([p])
221 reachable = set([p])
221 reachable = set([p])
222 for x in range(p + 1, end):
222 for x in range(p + 1, end):
223 if get_ctx(repo, x).branch() != branch:
223 if get_ctx(repo, x).branch() != branch:
224 continue
224 continue
225 for pp in changelog.parentrevs(x):
225 for pp in changelog.parentrevs(x):
226 if pp in reachable:
226 if pp in reachable:
227 reachable.add(x)
227 reachable.add(x)
228 parent_heads.discard(pp)
228 parent_heads.discard(pp)
229 parent_heads.add(x)
229 parent_heads.add(x)
230 # More than one head? Suggest merging
230 # More than one head? Suggest merging
231 if len(parent_heads) > 1:
231 if len(parent_heads) > 1:
232 return list(parent_heads)
232 return list(parent_heads)
233
233
234 return []
234 return []
235
235
236
236
237 def _get_git_env():
237 def _get_git_env():
238 env = {}
238 env = {}
239 for k, v in os.environ.items():
239 for k, v in os.environ.items():
240 if k.startswith('GIT'):
240 if k.startswith('GIT'):
241 env[k] = v
241 env[k] = v
242
242
243 # serialized version
243 # serialized version
244 return [(k, v) for k, v in env.items()]
244 return [(k, v) for k, v in env.items()]
245
245
246
246
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 env = {}
248 env = {}
249 for k, v in os.environ.items():
249 for k, v in os.environ.items():
250 if k.startswith('HG'):
250 if k.startswith('HG'):
251 env[k] = v
251 env[k] = v
252
252
253 env['HG_NODE'] = old_rev
253 env['HG_NODE'] = old_rev
254 env['HG_NODE_LAST'] = new_rev
254 env['HG_NODE_LAST'] = new_rev
255 env['HG_TXNID'] = txnid
255 env['HG_TXNID'] = txnid
256 env['HG_PENDING'] = repo_path
256 env['HG_PENDING'] = repo_path
257
257
258 return [(k, v) for k, v in env.items()]
258 return [(k, v) for k, v in env.items()]
259
259
260
260
261 def repo_size(ui, repo, **kwargs):
261 def repo_size(ui, repo, **kwargs):
262 extras = _extras_from_ui(ui)
262 extras = _extras_from_ui(ui)
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264
264
265
265
266 def pre_pull(ui, repo, **kwargs):
266 def pre_pull(ui, repo, **kwargs):
267 extras = _extras_from_ui(ui)
267 extras = _extras_from_ui(ui)
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269
269
270
270
271 def pre_pull_ssh(ui, repo, **kwargs):
271 def pre_pull_ssh(ui, repo, **kwargs):
272 extras = _extras_from_ui(ui)
272 extras = _extras_from_ui(ui)
273 if extras and extras.get('SSH'):
273 if extras and extras.get('SSH'):
274 return pre_pull(ui, repo, **kwargs)
274 return pre_pull(ui, repo, **kwargs)
275 return 0
275 return 0
276
276
277
277
278 def post_pull(ui, repo, **kwargs):
278 def post_pull(ui, repo, **kwargs):
279 extras = _extras_from_ui(ui)
279 extras = _extras_from_ui(ui)
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281
281
282
282
283 def post_pull_ssh(ui, repo, **kwargs):
283 def post_pull_ssh(ui, repo, **kwargs):
284 extras = _extras_from_ui(ui)
284 extras = _extras_from_ui(ui)
285 if extras and extras.get('SSH'):
285 if extras and extras.get('SSH'):
286 return post_pull(ui, repo, **kwargs)
286 return post_pull(ui, repo, **kwargs)
287 return 0
287 return 0
288
288
289
289
290 def pre_push(ui, repo, node=None, **kwargs):
290 def pre_push(ui, repo, node=None, **kwargs):
291 """
291 """
292 Mercurial pre_push hook
292 Mercurial pre_push hook
293 """
293 """
294 extras = _extras_from_ui(ui)
294 extras = _extras_from_ui(ui)
295 detect_force_push = extras.get('detect_force_push')
295 detect_force_push = extras.get('detect_force_push')
296
296
297 rev_data = []
297 rev_data = []
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 branches = collections.defaultdict(list)
299 branches = collections.defaultdict(list)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 for commit_id, branch in commits:
301 for commit_id, branch in commits:
302 branches[branch].append(commit_id)
302 branches[branch].append(commit_id)
303
303
304 for branch, commits in branches.items():
304 for branch, commits in branches.items():
305 old_rev = kwargs.get('node_last') or commits[0]
305 old_rev = kwargs.get('node_last') or commits[0]
306 rev_data.append({
306 rev_data.append({
307 'total_commits': len(commits),
307 'total_commits': len(commits),
308 'old_rev': old_rev,
308 'old_rev': old_rev,
309 'new_rev': commits[-1],
309 'new_rev': commits[-1],
310 'ref': '',
310 'ref': '',
311 'type': 'branch',
311 'type': 'branch',
312 'name': branch,
312 'name': branch,
313 })
313 })
314
314
315 for push_ref in rev_data:
315 for push_ref in rev_data:
316 push_ref['multiple_heads'] = _heads
316 push_ref['multiple_heads'] = _heads
317
317
318 repo_path = os.path.join(
318 repo_path = os.path.join(
319 extras.get('repo_store', ''), extras.get('repository', ''))
319 extras.get('repo_store', ''), extras.get('repository', ''))
320 push_ref['hg_env'] = _get_hg_env(
320 push_ref['hg_env'] = _get_hg_env(
321 old_rev=push_ref['old_rev'],
321 old_rev=push_ref['old_rev'],
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 repo_path=repo_path)
323 repo_path=repo_path)
324
324
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 extras['commit_ids'] = rev_data
326 extras['commit_ids'] = rev_data
327
327
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329
329
330
330
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 extras = _extras_from_ui(ui)
332 extras = _extras_from_ui(ui)
333 if extras.get('SSH'):
333 if extras.get('SSH'):
334 return pre_push(ui, repo, node, **kwargs)
334 return pre_push(ui, repo, node, **kwargs)
335
335
336 return 0
336 return 0
337
337
338
338
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 """
340 """
341 Mercurial pre_push hook for SSH
341 Mercurial pre_push hook for SSH
342 """
342 """
343 extras = _extras_from_ui(ui)
343 extras = _extras_from_ui(ui)
344 if extras.get('SSH'):
344 if extras.get('SSH'):
345 permission = extras['SSH_PERMISSIONS']
345 permission = extras['SSH_PERMISSIONS']
346
346
347 if 'repository.write' == permission or 'repository.admin' == permission:
347 if 'repository.write' == permission or 'repository.admin' == permission:
348 return 0
348 return 0
349
349
350 # non-zero ret code
350 # non-zero ret code
351 return 1
351 return 1
352
352
353 return 0
353 return 0
354
354
355
355
356 def post_push(ui, repo, node, **kwargs):
356 def post_push(ui, repo, node, **kwargs):
357 """
357 """
358 Mercurial post_push hook
358 Mercurial post_push hook
359 """
359 """
360 extras = _extras_from_ui(ui)
360 extras = _extras_from_ui(ui)
361
361
362 commit_ids = []
362 commit_ids = []
363 branches = []
363 branches = []
364 bookmarks = []
364 bookmarks = []
365 tags = []
365 tags = []
366
366
367 commits, _heads = _rev_range_hash(repo, node)
367 commits, _heads = _rev_range_hash(repo, node)
368 for commit_id, branch in commits:
368 for commit_id, branch in commits:
369 commit_ids.append(commit_id)
369 commit_ids.append(commit_id)
370 if branch not in branches:
370 if branch not in branches:
371 branches.append(branch)
371 branches.append(branch)
372
372
373 if hasattr(ui, '_rc_pushkey_branches'):
373 if hasattr(ui, '_rc_pushkey_branches'):
374 bookmarks = ui._rc_pushkey_branches
374 bookmarks = ui._rc_pushkey_branches
375
375
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 extras['commit_ids'] = commit_ids
377 extras['commit_ids'] = commit_ids
378 extras['new_refs'] = {
378 extras['new_refs'] = {
379 'branches': branches,
379 'branches': branches,
380 'bookmarks': bookmarks,
380 'bookmarks': bookmarks,
381 'tags': tags
381 'tags': tags
382 }
382 }
383
383
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385
385
386
386
387 def post_push_ssh(ui, repo, node, **kwargs):
387 def post_push_ssh(ui, repo, node, **kwargs):
388 """
388 """
389 Mercurial post_push hook for SSH
389 Mercurial post_push hook for SSH
390 """
390 """
391 if _extras_from_ui(ui).get('SSH'):
391 if _extras_from_ui(ui).get('SSH'):
392 return post_push(ui, repo, node, **kwargs)
392 return post_push(ui, repo, node, **kwargs)
393 return 0
393 return 0
394
394
395
395
396 def key_push(ui, repo, **kwargs):
396 def key_push(ui, repo, **kwargs):
397 from vcsserver.hgcompat import get_ctx
397 from vcsserver.hgcompat import get_ctx
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 # store new bookmarks in our UI object propagated later to post_push
399 # store new bookmarks in our UI object propagated later to post_push
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 return
401 return
402
402
403
403
404 # backward compat
404 # backward compat
405 log_pull_action = post_pull
405 log_pull_action = post_pull
406
406
407 # backward compat
407 # backward compat
408 log_push_action = post_push
408 log_push_action = post_push
409
409
410
410
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 """
412 """
413 Old hook name: keep here for backward compatibility.
413 Old hook name: keep here for backward compatibility.
414
414
415 This is only required when the installed git hooks are not upgraded.
415 This is only required when the installed git hooks are not upgraded.
416 """
416 """
417 pass
417 pass
418
418
419
419
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 """
421 """
422 Old hook name: keep here for backward compatibility.
422 Old hook name: keep here for backward compatibility.
423
423
424 This is only required when the installed git hooks are not upgraded.
424 This is only required when the installed git hooks are not upgraded.
425 """
425 """
426 pass
426 pass
427
427
428
428
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430
430
431
431
432 def git_pre_pull(extras):
432 def git_pre_pull(extras):
433 """
433 """
434 Pre pull hook.
434 Pre pull hook.
435
435
436 :param extras: dictionary containing the keys defined in simplevcs
436 :param extras: dictionary containing the keys defined in simplevcs
437 :type extras: dict
437 :type extras: dict
438
438
439 :return: status code of the hook. 0 for success.
439 :return: status code of the hook. 0 for success.
440 :rtype: int
440 :rtype: int
441 """
441 """
442 if 'pull' not in extras['hooks']:
442 if 'pull' not in extras['hooks']:
443 return HookResponse(0, '')
443 return HookResponse(0, '')
444
444
445 stdout = io.BytesIO()
445 stdout = io.BytesIO()
446 try:
446 try:
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 except Exception as error:
448 except Exception as error:
449 status = 128
449 status = 128
450 stdout.write('ERROR: %s\n' % str(error))
450 stdout.write('ERROR: %s\n' % str(error))
451
451
452 return HookResponse(status, stdout.getvalue())
452 return HookResponse(status, stdout.getvalue())
453
453
454
454
455 def git_post_pull(extras):
455 def git_post_pull(extras):
456 """
456 """
457 Post pull hook.
457 Post pull hook.
458
458
459 :param extras: dictionary containing the keys defined in simplevcs
459 :param extras: dictionary containing the keys defined in simplevcs
460 :type extras: dict
460 :type extras: dict
461
461
462 :return: status code of the hook. 0 for success.
462 :return: status code of the hook. 0 for success.
463 :rtype: int
463 :rtype: int
464 """
464 """
465 if 'pull' not in extras['hooks']:
465 if 'pull' not in extras['hooks']:
466 return HookResponse(0, '')
466 return HookResponse(0, '')
467
467
468 stdout = io.BytesIO()
468 stdout = io.BytesIO()
469 try:
469 try:
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 except Exception as error:
471 except Exception as error:
472 status = 128
472 status = 128
473 stdout.write('ERROR: %s\n' % error)
473 stdout.write('ERROR: %s\n' % error)
474
474
475 return HookResponse(status, stdout.getvalue())
475 return HookResponse(status, stdout.getvalue())
476
476
477
477
478 def _parse_git_ref_lines(revision_lines):
478 def _parse_git_ref_lines(revision_lines):
479 rev_data = []
479 rev_data = []
480 for revision_line in revision_lines or []:
480 for revision_line in revision_lines or []:
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 ref_data = ref.split('/', 2)
482 ref_data = ref.split('/', 2)
483 if ref_data[1] in ('tags', 'heads'):
483 if ref_data[1] in ('tags', 'heads'):
484 rev_data.append({
484 rev_data.append({
485 # NOTE(marcink):
485 # NOTE(marcink):
486 # we're unable to tell total_commits for git at this point
486 # we're unable to tell total_commits for git at this point
487 # but we set the variable for consistency with GIT
487 # but we set the variable for consistency with GIT
488 'total_commits': -1,
488 'total_commits': -1,
489 'old_rev': old_rev,
489 'old_rev': old_rev,
490 'new_rev': new_rev,
490 'new_rev': new_rev,
491 'ref': ref,
491 'ref': ref,
492 'type': ref_data[1],
492 'type': ref_data[1],
493 'name': ref_data[2],
493 'name': ref_data[2],
494 })
494 })
495 return rev_data
495 return rev_data
496
496
497
497
498 def git_pre_receive(unused_repo_path, revision_lines, env):
498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 """
499 """
500 Pre push hook.
500 Pre push hook.
501
501
502 :param extras: dictionary containing the keys defined in simplevcs
502 :param extras: dictionary containing the keys defined in simplevcs
503 :type extras: dict
503 :type extras: dict
504
504
505 :return: status code of the hook. 0 for success.
505 :return: status code of the hook. 0 for success.
506 :rtype: int
506 :rtype: int
507 """
507 """
508 extras = json.loads(env['RC_SCM_DATA'])
508 extras = json.loads(env['RC_SCM_DATA'])
509 rev_data = _parse_git_ref_lines(revision_lines)
509 rev_data = _parse_git_ref_lines(revision_lines)
510 if 'push' not in extras['hooks']:
510 if 'push' not in extras['hooks']:
511 return 0
511 return 0
512 empty_commit_id = '0' * 40
512 empty_commit_id = '0' * 40
513
513
514 detect_force_push = extras.get('detect_force_push')
514 detect_force_push = extras.get('detect_force_push')
515
515
516 for push_ref in rev_data:
516 for push_ref in rev_data:
517 # store our git-env which holds the temp store
517 # store our git-env which holds the temp store
518 push_ref['git_env'] = _get_git_env()
518 push_ref['git_env'] = _get_git_env()
519 push_ref['pruned_sha'] = ''
519 push_ref['pruned_sha'] = ''
520 if not detect_force_push:
520 if not detect_force_push:
521 # don't check for forced-push when we don't need to
521 # don't check for forced-push when we don't need to
522 continue
522 continue
523
523
524 type_ = push_ref['type']
524 type_ = push_ref['type']
525 new_branch = push_ref['old_rev'] == empty_commit_id
525 new_branch = push_ref['old_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 if type_ == 'heads' and not (new_branch or delete_branch):
527 if type_ == 'heads' and not (new_branch or delete_branch):
528 old_rev = push_ref['old_rev']
528 old_rev = push_ref['old_rev']
529 new_rev = push_ref['new_rev']
529 new_rev = push_ref['new_rev']
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 stdout, stderr = subprocessio.run_command(
531 stdout, stderr = subprocessio.run_command(
532 cmd, env=os.environ.copy())
532 cmd, env=os.environ.copy())
533 # means we're having some non-reachable objects, this forced push was used
533 # means we're having some non-reachable objects, this forced push was used
534 if stdout:
534 if stdout:
535 push_ref['pruned_sha'] = stdout.splitlines()
535 push_ref['pruned_sha'] = stdout.splitlines()
536
536
537 extras['hook_type'] = 'pre_receive'
537 extras['hook_type'] = 'pre_receive'
538 extras['commit_ids'] = rev_data
538 extras['commit_ids'] = rev_data
539 return _call_hook('pre_push', extras, GitMessageWriter())
539 return _call_hook('pre_push', extras, GitMessageWriter())
540
540
541
541
542 def git_post_receive(unused_repo_path, revision_lines, env):
542 def git_post_receive(unused_repo_path, revision_lines, env):
543 """
543 """
544 Post push hook.
544 Post push hook.
545
545
546 :param extras: dictionary containing the keys defined in simplevcs
546 :param extras: dictionary containing the keys defined in simplevcs
547 :type extras: dict
547 :type extras: dict
548
548
549 :return: status code of the hook. 0 for success.
549 :return: status code of the hook. 0 for success.
550 :rtype: int
550 :rtype: int
551 """
551 """
552 extras = json.loads(env['RC_SCM_DATA'])
552 extras = json.loads(env['RC_SCM_DATA'])
553 if 'push' not in extras['hooks']:
553 if 'push' not in extras['hooks']:
554 return 0
554 return 0
555
555
556 rev_data = _parse_git_ref_lines(revision_lines)
556 rev_data = _parse_git_ref_lines(revision_lines)
557
557
558 git_revs = []
558 git_revs = []
559
559
560 # N.B.(skreft): it is ok to just call git, as git before calling a
560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 # subcommand sets the PATH environment variable so that it point to the
561 # subcommand sets the PATH environment variable so that it point to the
562 # correct version of the git executable.
562 # correct version of the git executable.
563 empty_commit_id = '0' * 40
563 empty_commit_id = '0' * 40
564 branches = []
564 branches = []
565 tags = []
565 tags = []
566 for push_ref in rev_data:
566 for push_ref in rev_data:
567 type_ = push_ref['type']
567 type_ = push_ref['type']
568
568
569 if type_ == 'heads':
569 if type_ == 'heads':
570 if push_ref['old_rev'] == empty_commit_id:
570 if push_ref['old_rev'] == empty_commit_id:
571 # starting new branch case
571 # starting new branch case
572 if push_ref['name'] not in branches:
572 if push_ref['name'] not in branches:
573 branches.append(push_ref['name'])
573 branches.append(push_ref['name'])
574
574
575 # Fix up head revision if needed
575 # Fix up head revision if needed
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 try:
577 try:
578 subprocessio.run_command(cmd, env=os.environ.copy())
578 subprocessio.run_command(cmd, env=os.environ.copy())
579 except Exception:
579 except Exception:
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', '"HEAD"',
581 'refs/heads/%s' % push_ref['name']]
581 '"refs/heads/%s"' % push_ref['name']]
582 print("Setting default branch to %s" % push_ref['name'])
582 print("Setting default branch to %s" % push_ref['name'])
583 subprocessio.run_command(cmd, env=os.environ.copy())
583 subprocessio.run_command(cmd, env=os.environ.copy())
584
584
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 '--format=%(refname)', 'refs/heads/*']
586 '--format=%(refname)', 'refs/heads/*']
587 stdout, stderr = subprocessio.run_command(
587 stdout, stderr = subprocessio.run_command(
588 cmd, env=os.environ.copy())
588 cmd, env=os.environ.copy())
589 heads = stdout
589 heads = stdout
590 heads = heads.replace(push_ref['ref'], '')
590 heads = heads.replace(push_ref['ref'], '')
591 heads = ' '.join(head for head
591 heads = ' '.join(head for head
592 in heads.splitlines() if head) or '.'
592 in heads.splitlines() if head) or '.'
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 '--pretty=format:%H', '--', push_ref['new_rev'],
594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 '--not', heads]
595 '--not', heads]
596 stdout, stderr = subprocessio.run_command(
596 stdout, stderr = subprocessio.run_command(
597 cmd, env=os.environ.copy())
597 cmd, env=os.environ.copy())
598 git_revs.extend(stdout.splitlines())
598 git_revs.extend(stdout.splitlines())
599 elif push_ref['new_rev'] == empty_commit_id:
599 elif push_ref['new_rev'] == empty_commit_id:
600 # delete branch case
600 # delete branch case
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 else:
602 else:
603 if push_ref['name'] not in branches:
603 if push_ref['name'] not in branches:
604 branches.append(push_ref['name'])
604 branches.append(push_ref['name'])
605
605
606 cmd = [settings.GIT_EXECUTABLE, 'log',
606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 '{old_rev}..{new_rev}'.format(**push_ref),
607 '{old_rev}..{new_rev}'.format(**push_ref),
608 '--reverse', '--pretty=format:%H']
608 '--reverse', '--pretty=format:%H']
609 stdout, stderr = subprocessio.run_command(
609 stdout, stderr = subprocessio.run_command(
610 cmd, env=os.environ.copy())
610 cmd, env=os.environ.copy())
611 git_revs.extend(stdout.splitlines())
611 git_revs.extend(stdout.splitlines())
612 elif type_ == 'tags':
612 elif type_ == 'tags':
613 if push_ref['name'] not in tags:
613 if push_ref['name'] not in tags:
614 tags.append(push_ref['name'])
614 tags.append(push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
616
616
617 extras['hook_type'] = 'post_receive'
617 extras['hook_type'] = 'post_receive'
618 extras['commit_ids'] = git_revs
618 extras['commit_ids'] = git_revs
619 extras['new_refs'] = {
619 extras['new_refs'] = {
620 'branches': branches,
620 'branches': branches,
621 'bookmarks': [],
621 'bookmarks': [],
622 'tags': tags,
622 'tags': tags,
623 }
623 }
624
624
625 if 'repo_size' in extras['hooks']:
625 if 'repo_size' in extras['hooks']:
626 try:
626 try:
627 _call_hook('repo_size', extras, GitMessageWriter())
627 _call_hook('repo_size', extras, GitMessageWriter())
628 except:
628 except:
629 pass
629 pass
630
630
631 return _call_hook('post_push', extras, GitMessageWriter())
631 return _call_hook('post_push', extras, GitMessageWriter())
632
632
633
633
634 def _get_extras_from_txn_id(path, txn_id):
634 def _get_extras_from_txn_id(path, txn_id):
635 extras = {}
635 extras = {}
636 try:
636 try:
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 '-t', txn_id,
638 '-t', txn_id,
639 '--revprop', path, 'rc-scm-extras']
639 '--revprop', path, 'rc-scm-extras']
640 stdout, stderr = subprocessio.run_command(
640 stdout, stderr = subprocessio.run_command(
641 cmd, env=os.environ.copy())
641 cmd, env=os.environ.copy())
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 except Exception:
643 except Exception:
644 log.exception('Failed to extract extras info from txn_id')
644 log.exception('Failed to extract extras info from txn_id')
645
645
646 return extras
646 return extras
647
647
648
648
649 def _get_extras_from_commit_id(commit_id, path):
649 def _get_extras_from_commit_id(commit_id, path):
650 extras = {}
650 extras = {}
651 try:
651 try:
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 '-r', commit_id,
653 '-r', commit_id,
654 '--revprop', path, 'rc-scm-extras']
654 '--revprop', path, 'rc-scm-extras']
655 stdout, stderr = subprocessio.run_command(
655 stdout, stderr = subprocessio.run_command(
656 cmd, env=os.environ.copy())
656 cmd, env=os.environ.copy())
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 except Exception:
658 except Exception:
659 log.exception('Failed to extract extras info from commit_id')
659 log.exception('Failed to extract extras info from commit_id')
660
660
661 return extras
661 return extras
662
662
663
663
664 def svn_pre_commit(repo_path, commit_data, env):
664 def svn_pre_commit(repo_path, commit_data, env):
665 path, txn_id = commit_data
665 path, txn_id = commit_data
666 branches = []
666 branches = []
667 tags = []
667 tags = []
668
668
669 if env.get('RC_SCM_DATA'):
669 if env.get('RC_SCM_DATA'):
670 extras = json.loads(env['RC_SCM_DATA'])
670 extras = json.loads(env['RC_SCM_DATA'])
671 else:
671 else:
672 # fallback method to read from TXN-ID stored data
672 # fallback method to read from TXN-ID stored data
673 extras = _get_extras_from_txn_id(path, txn_id)
673 extras = _get_extras_from_txn_id(path, txn_id)
674 if not extras:
674 if not extras:
675 return 0
675 return 0
676
676
677 extras['hook_type'] = 'pre_commit'
677 extras['hook_type'] = 'pre_commit'
678 extras['commit_ids'] = [txn_id]
678 extras['commit_ids'] = [txn_id]
679 extras['txn_id'] = txn_id
679 extras['txn_id'] = txn_id
680 extras['new_refs'] = {
680 extras['new_refs'] = {
681 'total_commits': 1,
681 'total_commits': 1,
682 'branches': branches,
682 'branches': branches,
683 'bookmarks': [],
683 'bookmarks': [],
684 'tags': tags,
684 'tags': tags,
685 }
685 }
686
686
687 return _call_hook('pre_push', extras, SvnMessageWriter())
687 return _call_hook('pre_push', extras, SvnMessageWriter())
688
688
689
689
690 def svn_post_commit(repo_path, commit_data, env):
690 def svn_post_commit(repo_path, commit_data, env):
691 """
691 """
692 commit_data is path, rev, txn_id
692 commit_data is path, rev, txn_id
693 """
693 """
694 if len(commit_data) == 3:
694 if len(commit_data) == 3:
695 path, commit_id, txn_id = commit_data
695 path, commit_id, txn_id = commit_data
696 elif len(commit_data) == 2:
696 elif len(commit_data) == 2:
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 'Some functionality might be limited')
698 'Some functionality might be limited')
699 path, commit_id = commit_data
699 path, commit_id = commit_data
700 txn_id = None
700 txn_id = None
701
701
702 branches = []
702 branches = []
703 tags = []
703 tags = []
704
704
705 if env.get('RC_SCM_DATA'):
705 if env.get('RC_SCM_DATA'):
706 extras = json.loads(env['RC_SCM_DATA'])
706 extras = json.loads(env['RC_SCM_DATA'])
707 else:
707 else:
708 # fallback method to read from TXN-ID stored data
708 # fallback method to read from TXN-ID stored data
709 extras = _get_extras_from_commit_id(commit_id, path)
709 extras = _get_extras_from_commit_id(commit_id, path)
710 if not extras:
710 if not extras:
711 return 0
711 return 0
712
712
713 extras['hook_type'] = 'post_commit'
713 extras['hook_type'] = 'post_commit'
714 extras['commit_ids'] = [commit_id]
714 extras['commit_ids'] = [commit_id]
715 extras['txn_id'] = txn_id
715 extras['txn_id'] = txn_id
716 extras['new_refs'] = {
716 extras['new_refs'] = {
717 'branches': branches,
717 'branches': branches,
718 'bookmarks': [],
718 'bookmarks': [],
719 'tags': tags,
719 'tags': tags,
720 'total_commits': 1,
720 'total_commits': 1,
721 }
721 }
722
722
723 if 'repo_size' in extras['hooks']:
723 if 'repo_size' in extras['hooks']:
724 try:
724 try:
725 _call_hook('repo_size', extras, SvnMessageWriter())
725 _call_hook('repo_size', extras, SvnMessageWriter())
726 except Exception:
726 except Exception:
727 pass
727 pass
728
728
729 return _call_hook('post_push', extras, SvnMessageWriter())
729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,702 +1,740 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import base64
20 import base64
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import time
24 import wsgiref.util
25 import wsgiref.util
25 import traceback
26 import traceback
26 import tempfile
27 import tempfile
28 import psutil
29
27 from itertools import chain
30 from itertools import chain
28 from io import StringIO
31 from io import StringIO
29
32
30 import simplejson as json
33 import simplejson as json
31 import msgpack
34 import msgpack
32 import configparser
35 import configparser
36
33 from pyramid.config import Configurator
37 from pyramid.config import Configurator
34 from pyramid.settings import asbool, aslist
38 from pyramid.settings import asbool, aslist
35 from pyramid.wsgi import wsgiapp
39 from pyramid.wsgi import wsgiapp
36 from pyramid.response import Response
40 from pyramid.response import Response
37
41
42 from vcsserver.config.settings_maker import SettingsMaker
38 from vcsserver.utils import safe_int
43 from vcsserver.utils import safe_int
44 from vcsserver.lib.statsd_client import StatsdClient
39
45
40 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
41
47
42 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
48 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
43 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
49 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
44
50
45 try:
51 try:
46 locale.setlocale(locale.LC_ALL, '')
52 locale.setlocale(locale.LC_ALL, '')
47 except locale.Error as e:
53 except locale.Error as e:
48 log.error(
54 log.error(
49 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
55 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
50 os.environ['LC_ALL'] = 'C'
56 os.environ['LC_ALL'] = 'C'
51
57
58
52 import vcsserver
59 import vcsserver
53 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
60 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
54 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
61 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
55 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
62 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
56 from vcsserver.echo_stub.echo_app import EchoApp
63 from vcsserver.echo_stub.echo_app import EchoApp
57 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
64 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
58 from vcsserver.lib.exc_tracking import store_exception
65 from vcsserver.lib.exc_tracking import store_exception
59 from vcsserver.server import VcsServer
66 from vcsserver.server import VcsServer
60
67
61 strict_vcs = True
68 strict_vcs = True
62
69
63 git_import_err = None
70 git_import_err = None
64 try:
71 try:
65 from vcsserver.git import GitFactory, GitRemote
72 from vcsserver.git import GitFactory, GitRemote
66 except ImportError as e:
73 except ImportError as e:
67 GitFactory = None
74 GitFactory = None
68 GitRemote = None
75 GitRemote = None
69 git_import_err = e
76 git_import_err = e
70 if strict_vcs:
77 if strict_vcs:
71 raise
78 raise
72
79
73
80
74 hg_import_err = None
81 hg_import_err = None
75 try:
82 try:
76 from vcsserver.hg import MercurialFactory, HgRemote
83 from vcsserver.hg import MercurialFactory, HgRemote
77 except ImportError as e:
84 except ImportError as e:
78 MercurialFactory = None
85 MercurialFactory = None
79 HgRemote = None
86 HgRemote = None
80 hg_import_err = e
87 hg_import_err = e
81 if strict_vcs:
88 if strict_vcs:
82 raise
89 raise
83
90
84
91
85 svn_import_err = None
92 svn_import_err = None
86 try:
93 try:
87 from vcsserver.svn import SubversionFactory, SvnRemote
94 from vcsserver.svn import SubversionFactory, SvnRemote
88 except ImportError as e:
95 except ImportError as e:
89 SubversionFactory = None
96 SubversionFactory = None
90 SvnRemote = None
97 SvnRemote = None
91 svn_import_err = e
98 svn_import_err = e
92 if strict_vcs:
99 if strict_vcs:
93 raise
100 raise
94
101
95
102
96 def _is_request_chunked(environ):
103 def _is_request_chunked(environ):
97 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
104 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
98 return stream
105 return stream
99
106
100
107
101 def _int_setting(settings, name, default):
108 def log_max_fd():
102 settings[name] = int(settings.get(name, default))
109 try:
103 return settings[name]
110 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
104
111 log.info('Max file descriptors value: %s', maxfd)
105
112 except Exception:
106 def _bool_setting(settings, name, default):
113 pass
107 input_val = settings.get(name, default)
108 if isinstance(input_val, str):
109 input_val = input_val.encode('utf8')
110 settings[name] = asbool(input_val)
111 return settings[name]
112
113
114 def _list_setting(settings, name, default):
115 raw_value = settings.get(name, default)
116
117 # Otherwise we assume it uses pyramids space/newline separation.
118 settings[name] = aslist(raw_value)
119 return settings[name]
120
121
122 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
123 value = settings.get(name, default)
124
125 if default_when_empty and not value:
126 # use default value when value is empty
127 value = default
128
129 if lower:
130 value = value.lower()
131 settings[name] = value
132 return settings[name]
133
114
134
115
135 class VCS(object):
116 class VCS(object):
136 def __init__(self, locale_conf=None, cache_config=None):
117 def __init__(self, locale_conf=None, cache_config=None):
137 self.locale = locale_conf
118 self.locale = locale_conf
138 self.cache_config = cache_config
119 self.cache_config = cache_config
139 self._configure_locale()
120 self._configure_locale()
140
121
122 log_max_fd()
123
141 if GitFactory and GitRemote:
124 if GitFactory and GitRemote:
142 git_factory = GitFactory()
125 git_factory = GitFactory()
143 self._git_remote = GitRemote(git_factory)
126 self._git_remote = GitRemote(git_factory)
144 else:
127 else:
145 log.error("Git client import failed: %s", git_import_err)
128 log.error("Git client import failed: %s", git_import_err)
146
129
147 if MercurialFactory and HgRemote:
130 if MercurialFactory and HgRemote:
148 hg_factory = MercurialFactory()
131 hg_factory = MercurialFactory()
149 self._hg_remote = HgRemote(hg_factory)
132 self._hg_remote = HgRemote(hg_factory)
150 else:
133 else:
151 log.error("Mercurial client import failed: %s", hg_import_err)
134 log.error("Mercurial client import failed: %s", hg_import_err)
152
135
153 if SubversionFactory and SvnRemote:
136 if SubversionFactory and SvnRemote:
154 svn_factory = SubversionFactory()
137 svn_factory = SubversionFactory()
155
138
156 # hg factory is used for svn url validation
139 # hg factory is used for svn url validation
157 hg_factory = MercurialFactory()
140 hg_factory = MercurialFactory()
158 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
141 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
159 else:
142 else:
160 log.error("Subversion client import failed: %s", svn_import_err)
143 log.error("Subversion client import failed: %s", svn_import_err)
161
144
162 self._vcsserver = VcsServer()
145 self._vcsserver = VcsServer()
163
146
164 def _configure_locale(self):
147 def _configure_locale(self):
165 if self.locale:
148 if self.locale:
166 log.info('Settings locale: `LC_ALL` to %s', self.locale)
149 log.info('Settings locale: `LC_ALL` to %s', self.locale)
167 else:
150 else:
168 log.info('Configuring locale subsystem based on environment variables')
151 log.info('Configuring locale subsystem based on environment variables')
169 try:
152 try:
170 # If self.locale is the empty string, then the locale
153 # If self.locale is the empty string, then the locale
171 # module will use the environment variables. See the
154 # module will use the environment variables. See the
172 # documentation of the package `locale`.
155 # documentation of the package `locale`.
173 locale.setlocale(locale.LC_ALL, self.locale)
156 locale.setlocale(locale.LC_ALL, self.locale)
174
157
175 language_code, encoding = locale.getlocale()
158 language_code, encoding = locale.getlocale()
176 log.info(
159 log.info(
177 'Locale set to language code "%s" with encoding "%s".',
160 'Locale set to language code "%s" with encoding "%s".',
178 language_code, encoding)
161 language_code, encoding)
179 except locale.Error:
162 except locale.Error:
180 log.exception('Cannot set locale, not configuring the locale system')
163 log.exception('Cannot set locale, not configuring the locale system')
181
164
182
165
183 class WsgiProxy(object):
166 class WsgiProxy(object):
184 def __init__(self, wsgi):
167 def __init__(self, wsgi):
185 self.wsgi = wsgi
168 self.wsgi = wsgi
186
169
187 def __call__(self, environ, start_response):
170 def __call__(self, environ, start_response):
188 input_data = environ['wsgi.input'].read()
171 input_data = environ['wsgi.input'].read()
189 input_data = msgpack.unpackb(input_data)
172 input_data = msgpack.unpackb(input_data)
190
173
191 error = None
174 error = None
192 try:
175 try:
193 data, status, headers = self.wsgi.handle(
176 data, status, headers = self.wsgi.handle(
194 input_data['environment'], input_data['input_data'],
177 input_data['environment'], input_data['input_data'],
195 *input_data['args'], **input_data['kwargs'])
178 *input_data['args'], **input_data['kwargs'])
196 except Exception as e:
179 except Exception as e:
197 data, status, headers = [], None, None
180 data, status, headers = [], None, None
198 error = {
181 error = {
199 'message': str(e),
182 'message': str(e),
200 '_vcs_kind': getattr(e, '_vcs_kind', None)
183 '_vcs_kind': getattr(e, '_vcs_kind', None)
201 }
184 }
202
185
203 start_response(200, {})
186 start_response(200, {})
204 return self._iterator(error, status, headers, data)
187 return self._iterator(error, status, headers, data)
205
188
206 def _iterator(self, error, status, headers, data):
189 def _iterator(self, error, status, headers, data):
207 initial_data = [
190 initial_data = [
208 error,
191 error,
209 status,
192 status,
210 headers,
193 headers,
211 ]
194 ]
212
195
213 for d in chain(initial_data, data):
196 for d in chain(initial_data, data):
214 yield msgpack.packb(d)
197 yield msgpack.packb(d)
215
198
216
199
217 def not_found(request):
200 def not_found(request):
218 return {'status': '404 NOT FOUND'}
201 return {'status': '404 NOT FOUND'}
219
202
220
203
221 class VCSViewPredicate(object):
204 class VCSViewPredicate(object):
222 def __init__(self, val, config):
205 def __init__(self, val, config):
223 self.remotes = val
206 self.remotes = val
224
207
225 def text(self):
208 def text(self):
226 return 'vcs view method = %s' % (list(self.remotes.keys()),)
209 return 'vcs view method = %s' % (list(self.remotes.keys()),)
227
210
228 phash = text
211 phash = text
229
212
230 def __call__(self, context, request):
213 def __call__(self, context, request):
231 """
214 """
232 View predicate that returns true if given backend is supported by
215 View predicate that returns true if given backend is supported by
233 defined remotes.
216 defined remotes.
234 """
217 """
235 backend = request.matchdict.get('backend')
218 backend = request.matchdict.get('backend')
236 return backend in self.remotes
219 return backend in self.remotes
237
220
238
221
239 class HTTPApplication(object):
222 class HTTPApplication(object):
240 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
223 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
241
224
242 remote_wsgi = remote_wsgi
225 remote_wsgi = remote_wsgi
243 _use_echo_app = False
226 _use_echo_app = False
244
227
245 def __init__(self, settings=None, global_config=None):
228 def __init__(self, settings=None, global_config=None):
246 self._sanitize_settings_and_apply_defaults(settings)
247
229
248 self.config = Configurator(settings=settings)
230 self.config = Configurator(settings=settings)
231 # Init our statsd at very start
232 self.config.registry.statsd = StatsdClient.statsd
233
249 self.global_config = global_config
234 self.global_config = global_config
250 self.config.include('vcsserver.lib.rc_cache')
235 self.config.include('vcsserver.lib.rc_cache')
251
236
252 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
237 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
253 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
238 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
254 self._remotes = {
239 self._remotes = {
255 'hg': vcs._hg_remote,
240 'hg': vcs._hg_remote,
256 'git': vcs._git_remote,
241 'git': vcs._git_remote,
257 'svn': vcs._svn_remote,
242 'svn': vcs._svn_remote,
258 'server': vcs._vcsserver,
243 'server': vcs._vcsserver,
259 }
244 }
260 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
245 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
261 self._use_echo_app = True
246 self._use_echo_app = True
262 log.warning("Using EchoApp for VCS operations.")
247 log.warning("Using EchoApp for VCS operations.")
263 self.remote_wsgi = remote_wsgi_stub
248 self.remote_wsgi = remote_wsgi_stub
264
249
265 self._configure_settings(global_config, settings)
250 self._configure_settings(global_config, settings)
251
266 self._configure()
252 self._configure()
267
253
268 def _configure_settings(self, global_config, app_settings):
254 def _configure_settings(self, global_config, app_settings):
269 """
255 """
270 Configure the settings module.
256 Configure the settings module.
271 """
257 """
272 settings_merged = global_config.copy()
258 settings_merged = global_config.copy()
273 settings_merged.update(app_settings)
259 settings_merged.update(app_settings)
274
260
275 git_path = app_settings.get('git_path', None)
261 git_path = app_settings.get('git_path', None)
276 if git_path:
262 if git_path:
277 settings.GIT_EXECUTABLE = git_path
263 settings.GIT_EXECUTABLE = git_path
278 binary_dir = app_settings.get('core.binary_dir', None)
264 binary_dir = app_settings.get('core.binary_dir', None)
279 if binary_dir:
265 if binary_dir:
280 settings.BINARY_DIR = binary_dir
266 settings.BINARY_DIR = binary_dir
281
267
282 # Store the settings to make them available to other modules.
268 # Store the settings to make them available to other modules.
283 vcsserver.PYRAMID_SETTINGS = settings_merged
269 vcsserver.PYRAMID_SETTINGS = settings_merged
284 vcsserver.CONFIG = settings_merged
270 vcsserver.CONFIG = settings_merged
285
271
286 def _sanitize_settings_and_apply_defaults(self, settings):
287 temp_store = tempfile.gettempdir()
288 default_cache_dir = os.path.join(temp_store, 'rc_cache')
289
290 # save default, cache dir, and use it for all backends later.
291 default_cache_dir = _string_setting(
292 settings,
293 'cache_dir',
294 default_cache_dir, lower=False, default_when_empty=True)
295
296 # ensure we have our dir created
297 if not os.path.isdir(default_cache_dir):
298 os.makedirs(default_cache_dir, mode=0o755)
299
300 # exception store cache
301 _string_setting(
302 settings,
303 'exception_tracker.store_path',
304 temp_store, lower=False, default_when_empty=True)
305
306 # repo_object cache
307 _string_setting(
308 settings,
309 'rc_cache.repo_object.backend',
310 'dogpile.cache.rc.file_namespace', lower=False)
311 _int_setting(
312 settings,
313 'rc_cache.repo_object.expiration_time',
314 30 * 24 * 60 * 60)
315 _string_setting(
316 settings,
317 'rc_cache.repo_object.arguments.filename',
318 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
319
320 def _configure(self):
272 def _configure(self):
321 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
273 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
322
274
323 self.config.add_route('service', '/_service')
275 self.config.add_route('service', '/_service')
324 self.config.add_route('status', '/status')
276 self.config.add_route('status', '/status')
325 self.config.add_route('hg_proxy', '/proxy/hg')
277 self.config.add_route('hg_proxy', '/proxy/hg')
326 self.config.add_route('git_proxy', '/proxy/git')
278 self.config.add_route('git_proxy', '/proxy/git')
327
279
328 # rpc methods
280 # rpc methods
329 self.config.add_route('vcs', '/{backend}')
281 self.config.add_route('vcs', '/{backend}')
330
282
331 # streaming rpc remote methods
283 # streaming rpc remote methods
332 self.config.add_route('vcs_stream', '/{backend}/stream')
284 self.config.add_route('vcs_stream', '/{backend}/stream')
333
285
334 # vcs operations clone/push as streaming
286 # vcs operations clone/push as streaming
335 self.config.add_route('stream_git', '/stream/git/*repo_name')
287 self.config.add_route('stream_git', '/stream/git/*repo_name')
336 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
288 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
337
289
338 self.config.add_view(self.status_view, route_name='status', renderer='json')
290 self.config.add_view(self.status_view, route_name='status', renderer='json')
339 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
291 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
340
292
341 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
293 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
342 self.config.add_view(self.git_proxy(), route_name='git_proxy')
294 self.config.add_view(self.git_proxy(), route_name='git_proxy')
343 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
295 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
344 vcs_view=self._remotes)
296 vcs_view=self._remotes)
345 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
297 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
346 vcs_view=self._remotes)
298 vcs_view=self._remotes)
347
299
348 self.config.add_view(self.hg_stream(), route_name='stream_hg')
300 self.config.add_view(self.hg_stream(), route_name='stream_hg')
349 self.config.add_view(self.git_stream(), route_name='stream_git')
301 self.config.add_view(self.git_stream(), route_name='stream_git')
350
302
351 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
303 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
352
304
353 self.config.add_notfound_view(not_found, renderer='json')
305 self.config.add_notfound_view(not_found, renderer='json')
354
306
355 self.config.add_view(self.handle_vcs_exception, context=Exception)
307 self.config.add_view(self.handle_vcs_exception, context=Exception)
356
308
357 self.config.add_tween(
309 self.config.add_tween(
358 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
310 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
359 )
311 )
360 self.config.add_request_method(
312 self.config.add_request_method(
361 'vcsserver.lib.request_counter.get_request_counter',
313 'vcsserver.lib.request_counter.get_request_counter',
362 'request_count')
314 'request_count')
363
315
364 def wsgi_app(self):
316 def wsgi_app(self):
365 return self.config.make_wsgi_app()
317 return self.config.make_wsgi_app()
366
318
367 def _vcs_view_params(self, request):
319 def _vcs_view_params(self, request):
368 remote = self._remotes[request.matchdict['backend']]
320 remote = self._remotes[request.matchdict['backend']]
369 payload = msgpack.unpackb(request.body, use_list=True)
321 payload = msgpack.unpackb(request.body, use_list=True)
370 method = payload.get('method')
322 method = payload.get('method')
371 params = payload['params']
323 params = payload['params']
372 wire = params.get('wire')
324 wire = params.get('wire')
373 args = params.get('args')
325 args = params.get('args')
374 kwargs = params.get('kwargs')
326 kwargs = params.get('kwargs')
375 context_uid = None
327 context_uid = None
376
328
377 if wire:
329 if wire:
378 try:
330 try:
379 wire['context'] = context_uid = uuid.UUID(wire['context'])
331 wire['context'] = context_uid = uuid.UUID(wire['context'])
380 except KeyError:
332 except KeyError:
381 pass
333 pass
382 args.insert(0, wire)
334 args.insert(0, wire)
383 repo_state_uid = wire.get('repo_state_uid') if wire else None
335 repo_state_uid = wire.get('repo_state_uid') if wire else None
384
336
385 # NOTE(marcink): trading complexity for slight performance
337 # NOTE(marcink): trading complexity for slight performance
386 if log.isEnabledFor(logging.DEBUG):
338 if log.isEnabledFor(logging.DEBUG):
387 no_args_methods = [
339 no_args_methods = [
388 'archive_repo'
340
389 ]
341 ]
390 if method in no_args_methods:
342 if method in no_args_methods:
391 call_args = ''
343 call_args = ''
392 else:
344 else:
393 call_args = args[1:]
345 call_args = args[1:]
394
346
395 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
347 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
396 method, call_args, kwargs, context_uid, repo_state_uid)
348 method, call_args, kwargs, context_uid, repo_state_uid)
397
349
350 statsd = request.registry.statsd
351 if statsd:
352 statsd.incr(
353 'vcsserver_method_total', tags=[
354 "method:{}".format(method),
355 ])
398 return payload, remote, method, args, kwargs
356 return payload, remote, method, args, kwargs
399
357
400 def vcs_view(self, request):
358 def vcs_view(self, request):
401
359
402 payload, remote, method, args, kwargs = self._vcs_view_params(request)
360 payload, remote, method, args, kwargs = self._vcs_view_params(request)
403 payload_id = payload.get('id')
361 payload_id = payload.get('id')
404
362
405 try:
363 try:
406 resp = getattr(remote, method)(*args, **kwargs)
364 resp = getattr(remote, method)(*args, **kwargs)
407 except Exception as e:
365 except Exception as e:
408 exc_info = list(sys.exc_info())
366 exc_info = list(sys.exc_info())
409 exc_type, exc_value, exc_traceback = exc_info
367 exc_type, exc_value, exc_traceback = exc_info
410
368
411 org_exc = getattr(e, '_org_exc', None)
369 org_exc = getattr(e, '_org_exc', None)
412 org_exc_name = None
370 org_exc_name = None
413 org_exc_tb = ''
371 org_exc_tb = ''
414 if org_exc:
372 if org_exc:
415 org_exc_name = org_exc.__class__.__name__
373 org_exc_name = org_exc.__class__.__name__
416 org_exc_tb = getattr(e, '_org_exc_tb', '')
374 org_exc_tb = getattr(e, '_org_exc_tb', '')
417 # replace our "faked" exception with our org
375 # replace our "faked" exception with our org
418 exc_info[0] = org_exc.__class__
376 exc_info[0] = org_exc.__class__
419 exc_info[1] = org_exc
377 exc_info[1] = org_exc
420
378
421 should_store_exc = True
379 should_store_exc = True
422 if org_exc:
380 if org_exc:
423 def get_exc_fqn(_exc_obj):
381 def get_exc_fqn(_exc_obj):
424 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
382 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
425 return module_name + '.' + org_exc_name
383 return module_name + '.' + org_exc_name
426
384
427 exc_fqn = get_exc_fqn(org_exc)
385 exc_fqn = get_exc_fqn(org_exc)
428
386
429 if exc_fqn in ['mercurial.error.RepoLookupError',
387 if exc_fqn in ['mercurial.error.RepoLookupError',
430 'vcsserver.exceptions.RefNotFoundException']:
388 'vcsserver.exceptions.RefNotFoundException']:
431 should_store_exc = False
389 should_store_exc = False
432
390
433 if should_store_exc:
391 if should_store_exc:
434 store_exception(id(exc_info), exc_info)
392 store_exception(id(exc_info), exc_info, request_path=request.path)
435
393
436 tb_info = ''.join(
394 tb_info = ''.join(
437 traceback.format_exception(exc_type, exc_value, exc_traceback))
395 traceback.format_exception(exc_type, exc_value, exc_traceback))
438
396
439 type_ = e.__class__.__name__
397 type_ = e.__class__.__name__
440 if type_ not in self.ALLOWED_EXCEPTIONS:
398 if type_ not in self.ALLOWED_EXCEPTIONS:
441 type_ = None
399 type_ = None
442
400
443 resp = {
401 resp = {
444 'id': payload_id,
402 'id': payload_id,
445 'error': {
403 'error': {
446 'message': e.message,
404 'message': e.message,
447 'traceback': tb_info,
405 'traceback': tb_info,
448 'org_exc': org_exc_name,
406 'org_exc': org_exc_name,
449 'org_exc_tb': org_exc_tb,
407 'org_exc_tb': org_exc_tb,
450 'type': type_
408 'type': type_
451 }
409 }
452 }
410 }
411
453 try:
412 try:
454 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
413 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
455 except AttributeError:
414 except AttributeError:
456 pass
415 pass
457 else:
416 else:
458 resp = {
417 resp = {
459 'id': payload_id,
418 'id': payload_id,
460 'result': resp
419 'result': resp
461 }
420 }
462
421
463 return resp
422 return resp
464
423
465 def vcs_stream_view(self, request):
424 def vcs_stream_view(self, request):
466 payload, remote, method, args, kwargs = self._vcs_view_params(request)
425 payload, remote, method, args, kwargs = self._vcs_view_params(request)
467 # this method has a stream: marker we remove it here
426 # this method has a stream: marker we remove it here
468 method = method.split('stream:')[-1]
427 method = method.split('stream:')[-1]
469 chunk_size = safe_int(payload.get('chunk_size')) or 4096
428 chunk_size = safe_int(payload.get('chunk_size')) or 4096
470
429
471 try:
430 try:
472 resp = getattr(remote, method)(*args, **kwargs)
431 resp = getattr(remote, method)(*args, **kwargs)
473 except Exception as e:
432 except Exception as e:
474 raise
433 raise
475
434
476 def get_chunked_data(method_resp):
435 def get_chunked_data(method_resp):
477 stream = StringIO(method_resp)
436 stream = StringIO(method_resp)
478 while 1:
437 while 1:
479 chunk = stream.read(chunk_size)
438 chunk = stream.read(chunk_size)
480 if not chunk:
439 if not chunk:
481 break
440 break
482 yield chunk
441 yield chunk
483
442
484 response = Response(app_iter=get_chunked_data(resp))
443 response = Response(app_iter=get_chunked_data(resp))
485 response.content_type = 'application/octet-stream'
444 response.content_type = 'application/octet-stream'
486
445
487 return response
446 return response
488
447
489 def status_view(self, request):
448 def status_view(self, request):
490 import vcsserver
449 import vcsserver
491 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
450 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
492 'pid': os.getpid()}
451 'pid': os.getpid()}
493
452
494 def service_view(self, request):
453 def service_view(self, request):
495 import vcsserver
454 import vcsserver
496
455
497 payload = msgpack.unpackb(request.body, use_list=True)
456 payload = msgpack.unpackb(request.body, use_list=True)
498 server_config, app_config = {}, {}
457 server_config, app_config = {}, {}
499
458
500 try:
459 try:
501 path = self.global_config['__file__']
460 path = self.global_config['__file__']
502 config = configparser.RawConfigParser()
461 config = configparser.RawConfigParser()
503
462
504 config.read(path)
463 config.read(path)
505
464
506 if config.has_section('server:main'):
465 if config.has_section('server:main'):
507 server_config = dict(config.items('server:main'))
466 server_config = dict(config.items('server:main'))
508 if config.has_section('app:main'):
467 if config.has_section('app:main'):
509 app_config = dict(config.items('app:main'))
468 app_config = dict(config.items('app:main'))
510
469
511 except Exception:
470 except Exception:
512 log.exception('Failed to read .ini file for display')
471 log.exception('Failed to read .ini file for display')
513
472
514 environ = list(os.environ.items())
473 environ = list(os.environ.items())
515
474
516 resp = {
475 resp = {
517 'id': payload.get('id'),
476 'id': payload.get('id'),
518 'result': dict(
477 'result': dict(
519 version=vcsserver.__version__,
478 version=vcsserver.__version__,
520 config=server_config,
479 config=server_config,
521 app_config=app_config,
480 app_config=app_config,
522 environ=environ,
481 environ=environ,
523 payload=payload,
482 payload=payload,
524 )
483 )
525 }
484 }
526 return resp
485 return resp
527
486
528 def _msgpack_renderer_factory(self, info):
487 def _msgpack_renderer_factory(self, info):
529 def _render(value, system):
488 def _render(value, system):
530 request = system.get('request')
489 request = system.get('request')
531 if request is not None:
490 if request is not None:
532 response = request.response
491 response = request.response
533 ct = response.content_type
492 ct = response.content_type
534 if ct == response.default_content_type:
493 if ct == response.default_content_type:
535 response.content_type = 'application/x-msgpack'
494 response.content_type = 'application/x-msgpack'
536 return msgpack.packb(value)
495 return msgpack.packb(value)
537 return _render
496 return _render
538
497
539 def set_env_from_config(self, environ, config):
498 def set_env_from_config(self, environ, config):
540 dict_conf = {}
499 dict_conf = {}
541 try:
500 try:
542 for elem in config:
501 for elem in config:
543 if elem[0] == 'rhodecode':
502 if elem[0] == 'rhodecode':
544 dict_conf = json.loads(elem[2])
503 dict_conf = json.loads(elem[2])
545 break
504 break
546 except Exception:
505 except Exception:
547 log.exception('Failed to fetch SCM CONFIG')
506 log.exception('Failed to fetch SCM CONFIG')
548 return
507 return
549
508
550 username = dict_conf.get('username')
509 username = dict_conf.get('username')
551 if username:
510 if username:
552 environ['REMOTE_USER'] = username
511 environ['REMOTE_USER'] = username
553 # mercurial specific, some extension api rely on this
512 # mercurial specific, some extension api rely on this
554 environ['HGUSER'] = username
513 environ['HGUSER'] = username
555
514
556 ip = dict_conf.get('ip')
515 ip = dict_conf.get('ip')
557 if ip:
516 if ip:
558 environ['REMOTE_HOST'] = ip
517 environ['REMOTE_HOST'] = ip
559
518
560 if _is_request_chunked(environ):
519 if _is_request_chunked(environ):
561 # set the compatibility flag for webob
520 # set the compatibility flag for webob
562 environ['wsgi.input_terminated'] = True
521 environ['wsgi.input_terminated'] = True
563
522
564 def hg_proxy(self):
523 def hg_proxy(self):
565 @wsgiapp
524 @wsgiapp
566 def _hg_proxy(environ, start_response):
525 def _hg_proxy(environ, start_response):
567 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
526 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
568 return app(environ, start_response)
527 return app(environ, start_response)
569 return _hg_proxy
528 return _hg_proxy
570
529
571 def git_proxy(self):
530 def git_proxy(self):
572 @wsgiapp
531 @wsgiapp
573 def _git_proxy(environ, start_response):
532 def _git_proxy(environ, start_response):
574 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
533 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
575 return app(environ, start_response)
534 return app(environ, start_response)
576 return _git_proxy
535 return _git_proxy
577
536
578 def hg_stream(self):
537 def hg_stream(self):
579 if self._use_echo_app:
538 if self._use_echo_app:
580 @wsgiapp
539 @wsgiapp
581 def _hg_stream(environ, start_response):
540 def _hg_stream(environ, start_response):
582 app = EchoApp('fake_path', 'fake_name', None)
541 app = EchoApp('fake_path', 'fake_name', None)
583 return app(environ, start_response)
542 return app(environ, start_response)
584 return _hg_stream
543 return _hg_stream
585 else:
544 else:
586 @wsgiapp
545 @wsgiapp
587 def _hg_stream(environ, start_response):
546 def _hg_stream(environ, start_response):
588 log.debug('http-app: handling hg stream')
547 log.debug('http-app: handling hg stream')
589 repo_path = environ['HTTP_X_RC_REPO_PATH']
548 repo_path = environ['HTTP_X_RC_REPO_PATH']
590 repo_name = environ['HTTP_X_RC_REPO_NAME']
549 repo_name = environ['HTTP_X_RC_REPO_NAME']
591 packed_config = base64.b64decode(
550 packed_config = base64.b64decode(
592 environ['HTTP_X_RC_REPO_CONFIG'])
551 environ['HTTP_X_RC_REPO_CONFIG'])
593 config = msgpack.unpackb(packed_config)
552 config = msgpack.unpackb(packed_config)
594 app = scm_app.create_hg_wsgi_app(
553 app = scm_app.create_hg_wsgi_app(
595 repo_path, repo_name, config)
554 repo_path, repo_name, config)
596
555
597 # Consistent path information for hgweb
556 # Consistent path information for hgweb
598 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
557 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
599 environ['REPO_NAME'] = repo_name
558 environ['REPO_NAME'] = repo_name
600 self.set_env_from_config(environ, config)
559 self.set_env_from_config(environ, config)
601
560
602 log.debug('http-app: starting app handler '
561 log.debug('http-app: starting app handler '
603 'with %s and process request', app)
562 'with %s and process request', app)
604 return app(environ, ResponseFilter(start_response))
563 return app(environ, ResponseFilter(start_response))
605 return _hg_stream
564 return _hg_stream
606
565
607 def git_stream(self):
566 def git_stream(self):
608 if self._use_echo_app:
567 if self._use_echo_app:
609 @wsgiapp
568 @wsgiapp
610 def _git_stream(environ, start_response):
569 def _git_stream(environ, start_response):
611 app = EchoApp('fake_path', 'fake_name', None)
570 app = EchoApp('fake_path', 'fake_name', None)
612 return app(environ, start_response)
571 return app(environ, start_response)
613 return _git_stream
572 return _git_stream
614 else:
573 else:
615 @wsgiapp
574 @wsgiapp
616 def _git_stream(environ, start_response):
575 def _git_stream(environ, start_response):
617 log.debug('http-app: handling git stream')
576 log.debug('http-app: handling git stream')
618 repo_path = environ['HTTP_X_RC_REPO_PATH']
577 repo_path = environ['HTTP_X_RC_REPO_PATH']
619 repo_name = environ['HTTP_X_RC_REPO_NAME']
578 repo_name = environ['HTTP_X_RC_REPO_NAME']
620 packed_config = base64.b64decode(
579 packed_config = base64.b64decode(
621 environ['HTTP_X_RC_REPO_CONFIG'])
580 environ['HTTP_X_RC_REPO_CONFIG'])
622 config = msgpack.unpackb(packed_config)
581 config = msgpack.unpackb(packed_config)
623
582
624 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
583 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
625 self.set_env_from_config(environ, config)
584 self.set_env_from_config(environ, config)
626
585
627 content_type = environ.get('CONTENT_TYPE', '')
586 content_type = environ.get('CONTENT_TYPE', '')
628
587
629 path = environ['PATH_INFO']
588 path = environ['PATH_INFO']
630 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
589 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
631 log.debug(
590 log.debug(
632 'LFS: Detecting if request `%s` is LFS server path based '
591 'LFS: Detecting if request `%s` is LFS server path based '
633 'on content type:`%s`, is_lfs:%s',
592 'on content type:`%s`, is_lfs:%s',
634 path, content_type, is_lfs_request)
593 path, content_type, is_lfs_request)
635
594
636 if not is_lfs_request:
595 if not is_lfs_request:
637 # fallback detection by path
596 # fallback detection by path
638 if GIT_LFS_PROTO_PAT.match(path):
597 if GIT_LFS_PROTO_PAT.match(path):
639 is_lfs_request = True
598 is_lfs_request = True
640 log.debug(
599 log.debug(
641 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
600 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
642 path, is_lfs_request)
601 path, is_lfs_request)
643
602
644 if is_lfs_request:
603 if is_lfs_request:
645 app = scm_app.create_git_lfs_wsgi_app(
604 app = scm_app.create_git_lfs_wsgi_app(
646 repo_path, repo_name, config)
605 repo_path, repo_name, config)
647 else:
606 else:
648 app = scm_app.create_git_wsgi_app(
607 app = scm_app.create_git_wsgi_app(
649 repo_path, repo_name, config)
608 repo_path, repo_name, config)
650
609
651 log.debug('http-app: starting app handler '
610 log.debug('http-app: starting app handler '
652 'with %s and process request', app)
611 'with %s and process request', app)
653
612
654 return app(environ, start_response)
613 return app(environ, start_response)
655
614
656 return _git_stream
615 return _git_stream
657
616
658 def handle_vcs_exception(self, exception, request):
617 def handle_vcs_exception(self, exception, request):
659 _vcs_kind = getattr(exception, '_vcs_kind', '')
618 _vcs_kind = getattr(exception, '_vcs_kind', '')
660 if _vcs_kind == 'repo_locked':
619 if _vcs_kind == 'repo_locked':
661 # Get custom repo-locked status code if present.
620 # Get custom repo-locked status code if present.
662 status_code = request.headers.get('X-RC-Locked-Status-Code')
621 status_code = request.headers.get('X-RC-Locked-Status-Code')
663 return HTTPRepoLocked(
622 return HTTPRepoLocked(
664 title=exception.message, status_code=status_code)
623 title=exception.message, status_code=status_code)
665
624
666 elif _vcs_kind == 'repo_branch_protected':
625 elif _vcs_kind == 'repo_branch_protected':
667 # Get custom repo-branch-protected status code if present.
626 # Get custom repo-branch-protected status code if present.
668 return HTTPRepoBranchProtected(title=exception.message)
627 return HTTPRepoBranchProtected(title=exception.message)
669
628
670 exc_info = request.exc_info
629 exc_info = request.exc_info
671 store_exception(id(exc_info), exc_info)
630 store_exception(id(exc_info), exc_info)
672
631
673 traceback_info = 'unavailable'
632 traceback_info = 'unavailable'
674 if request.exc_info:
633 if request.exc_info:
675 exc_type, exc_value, exc_tb = request.exc_info
634 exc_type, exc_value, exc_tb = request.exc_info
676 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
635 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
677
636
678 log.error(
637 log.error(
679 'error occurred handling this request for path: %s, \n tb: %s',
638 'error occurred handling this request for path: %s, \n tb: %s',
680 request.path, traceback_info)
639 request.path, traceback_info)
640
641 statsd = request.registry.statsd
642 if statsd:
643 exc_type = "{}.{}".format(exception.__class__.__module__, exception.__class__.__name__)
644 statsd.incr('vcsserver_exception_total',
645 tags=["type:{}".format(exc_type)])
681 raise exception
646 raise exception
682
647
683
648
684 class ResponseFilter(object):
649 class ResponseFilter(object):
685
650
686 def __init__(self, start_response):
651 def __init__(self, start_response):
687 self._start_response = start_response
652 self._start_response = start_response
688
653
689 def __call__(self, status, response_headers, exc_info=None):
654 def __call__(self, status, response_headers, exc_info=None):
690 headers = tuple(
655 headers = tuple(
691 (h, v) for h, v in response_headers
656 (h, v) for h, v in response_headers
692 if not wsgiref.util.is_hop_by_hop(h))
657 if not wsgiref.util.is_hop_by_hop(h))
693 return self._start_response(status, headers, exc_info)
658 return self._start_response(status, headers, exc_info)
694
659
695
660
661 def sanitize_settings_and_apply_defaults(global_config, settings):
662 global_settings_maker = SettingsMaker(global_config)
663 settings_maker = SettingsMaker(settings)
664
665 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
666
667 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
668 settings_maker.enable_logging(logging_conf)
669
670 # Default includes, possible to change as a user
671 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
672 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
673
674 settings_maker.make_setting('__file__', global_config.get('__file__'))
675
676 settings_maker.make_setting('pyramid.default_locale_name', 'en')
677 settings_maker.make_setting('locale', 'en_US.UTF-8')
678
679 settings_maker.make_setting('core.binary_dir', '')
680
681 temp_store = tempfile.gettempdir()
682 default_cache_dir = os.path.join(temp_store, 'rc_cache')
683 # save default, cache dir, and use it for all backends later.
684 default_cache_dir = settings_maker.make_setting(
685 'cache_dir',
686 default=default_cache_dir, default_when_empty=True,
687 parser='dir:ensured')
688
689 # exception store cache
690 settings_maker.make_setting(
691 'exception_tracker.store_path',
692 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
693 parser='dir:ensured'
694 )
695
696 # repo_object cache defaults
697 settings_maker.make_setting(
698 'rc_cache.repo_object.backend',
699 default='dogpile.cache.rc.file_namespace',
700 parser='string')
701 settings_maker.make_setting(
702 'rc_cache.repo_object.expiration_time',
703 default=30 * 24 * 60 * 60, # 30days
704 parser='int')
705 settings_maker.make_setting(
706 'rc_cache.repo_object.arguments.filename',
707 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
708 parser='string')
709
710 # statsd
711 settings_maker.make_setting('statsd.enabled', False, parser='bool')
712 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
713 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
714 settings_maker.make_setting('statsd.statsd_prefix', '')
715 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
716
717 settings_maker.env_expand()
718
719
696 def main(global_config, **settings):
720 def main(global_config, **settings):
721 start_time = time.time()
722 log.info('Pyramid app config starting')
723
697 if MercurialFactory:
724 if MercurialFactory:
698 hgpatches.patch_largefiles_capabilities()
725 hgpatches.patch_largefiles_capabilities()
699 hgpatches.patch_subrepo_type_mapping()
726 hgpatches.patch_subrepo_type_mapping()
700
727
701 app = HTTPApplication(settings=settings, global_config=global_config)
728 # Fill in and sanitize the defaults & do ENV expansion
702 return app.wsgi_app()
729 sanitize_settings_and_apply_defaults(global_config, settings)
730
731 # init and bootstrap StatsdClient
732 StatsdClient.setup(settings)
733
734 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
735 total_time = time.time() - start_time
736 log.info('Pyramid app `%s` created and configured in %.2fs',
737 getattr(pyramid_app, 'func_name', 'pyramid_app'), total_time)
738 return pyramid_app
739
740
@@ -1,167 +1,173 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20
20
21 import os
21 import os
22 import time
22 import time
23 import datetime
23 import datetime
24 import msgpack
24 import msgpack
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import tempfile
27 import tempfile
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
31 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
32 global_prefix = 'vcsserver'
32 global_prefix = 'vcsserver'
33 exc_store_dir_name = 'rc_exception_store_v1'
33 exc_store_dir_name = 'rc_exception_store_v1'
34
34
35
35
36 def exc_serialize(exc_id, tb, exc_type):
36 def exc_serialize(exc_id, tb, exc_type):
37
37
38 data = {
38 data = {
39 'version': 'v1',
39 'version': 'v1',
40 'exc_id': exc_id,
40 'exc_id': exc_id,
41 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
41 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
42 'exc_timestamp': repr(time.time()),
42 'exc_timestamp': repr(time.time()),
43 'exc_message': tb,
43 'exc_message': tb,
44 'exc_type': exc_type,
44 'exc_type': exc_type,
45 }
45 }
46 return msgpack.packb(data), data
46 return msgpack.packb(data), data
47
47
48
48
49 def exc_unserialize(tb):
49 def exc_unserialize(tb):
50 return msgpack.unpackb(tb)
50 return msgpack.unpackb(tb)
51
51
52
52
53 def get_exc_store():
53 def get_exc_store():
54 """
54 """
55 Get and create exception store if it's not existing
55 Get and create exception store if it's not existing
56 """
56 """
57 import vcsserver as app
57 import vcsserver as app
58
58
59 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
59 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
60 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
60 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
61
61
62 _exc_store_path = os.path.abspath(_exc_store_path)
62 _exc_store_path = os.path.abspath(_exc_store_path)
63 if not os.path.isdir(_exc_store_path):
63 if not os.path.isdir(_exc_store_path):
64 os.makedirs(_exc_store_path)
64 os.makedirs(_exc_store_path)
65 log.debug('Initializing exceptions store at %s', _exc_store_path)
65 log.debug('Initializing exceptions store at %s', _exc_store_path)
66 return _exc_store_path
66 return _exc_store_path
67
67
68
68
69 def _store_exception(exc_id, exc_info, prefix):
69 def _store_exception(exc_id, exc_info, prefix, request_path=''):
70 exc_type, exc_value, exc_traceback = exc_info
70 exc_type, exc_value, exc_traceback = exc_info
71
71
72 tb = ''.join(traceback.format_exception(
72 tb = ''.join(traceback.format_exception(
73 exc_type, exc_value, exc_traceback, None))
73 exc_type, exc_value, exc_traceback, None))
74
74
75 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
75 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
76
76
77 if detailed_tb:
77 if detailed_tb:
78 if isinstance(detailed_tb, str):
78 if isinstance(detailed_tb, str):
79 remote_tb = [detailed_tb]
79 remote_tb = [detailed_tb]
80
80
81 tb += (
81 tb += (
82 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
82 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
83 '{}\n'
83 '{}\n'
84 '+++ END SOURCE EXCEPTION +++\n'
84 '+++ END SOURCE EXCEPTION +++\n'
85 ''.format('\n'.join(remote_tb))
85 ''.format('\n'.join(remote_tb))
86 )
86 )
87
87
88 # Avoid that remote_tb also appears in the frame
88 # Avoid that remote_tb also appears in the frame
89 del remote_tb
89 del remote_tb
90
90
91 exc_type_name = exc_type.__name__
91 exc_type_name = exc_type.__name__
92 exc_store_path = get_exc_store()
92 exc_store_path = get_exc_store()
93 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
93 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
94 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
94 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
95 if not os.path.isdir(exc_store_path):
95 if not os.path.isdir(exc_store_path):
96 os.makedirs(exc_store_path)
96 os.makedirs(exc_store_path)
97 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
97 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
98 with open(stored_exc_path, 'wb') as f:
98 with open(stored_exc_path, 'wb') as f:
99 f.write(exc_data)
99 f.write(exc_data)
100 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
100 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
101
101
102 log.error(
103 'error occurred handling this request.\n'
104 'Path: `%s`, tb: %s',
105 request_path, tb)
102
106
103 def store_exception(exc_id, exc_info, prefix=global_prefix):
107
108 def store_exception(exc_id, exc_info, prefix=global_prefix, request_path=''):
104 """
109 """
105 Example usage::
110 Example usage::
106
111
107 exc_info = sys.exc_info()
112 exc_info = sys.exc_info()
108 store_exception(id(exc_info), exc_info)
113 store_exception(id(exc_info), exc_info)
109 """
114 """
110
115
111 try:
116 try:
112 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
117 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix,
118 request_path=request_path)
113 except Exception:
119 except Exception:
114 log.exception('Failed to store exception `%s` information', exc_id)
120 log.exception('Failed to store exception `%s` information', exc_id)
115 # there's no way this can fail, it will crash server badly if it does.
121 # there's no way this can fail, it will crash server badly if it does.
116 pass
122 pass
117
123
118
124
119 def _find_exc_file(exc_id, prefix=global_prefix):
125 def _find_exc_file(exc_id, prefix=global_prefix):
120 exc_store_path = get_exc_store()
126 exc_store_path = get_exc_store()
121 if prefix:
127 if prefix:
122 exc_id = '{}_{}'.format(exc_id, prefix)
128 exc_id = '{}_{}'.format(exc_id, prefix)
123 else:
129 else:
124 # search without a prefix
130 # search without a prefix
125 exc_id = '{}'.format(exc_id)
131 exc_id = '{}'.format(exc_id)
126
132
127 # we need to search the store for such start pattern as above
133 # we need to search the store for such start pattern as above
128 for fname in os.listdir(exc_store_path):
134 for fname in os.listdir(exc_store_path):
129 if fname.startswith(exc_id):
135 if fname.startswith(exc_id):
130 exc_id = os.path.join(exc_store_path, fname)
136 exc_id = os.path.join(exc_store_path, fname)
131 break
137 break
132 continue
138 continue
133 else:
139 else:
134 exc_id = None
140 exc_id = None
135
141
136 return exc_id
142 return exc_id
137
143
138
144
139 def _read_exception(exc_id, prefix):
145 def _read_exception(exc_id, prefix):
140 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
146 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
141 if exc_id_file_path:
147 if exc_id_file_path:
142 with open(exc_id_file_path, 'rb') as f:
148 with open(exc_id_file_path, 'rb') as f:
143 return exc_unserialize(f.read())
149 return exc_unserialize(f.read())
144 else:
150 else:
145 log.debug('Exception File `%s` not found', exc_id_file_path)
151 log.debug('Exception File `%s` not found', exc_id_file_path)
146 return None
152 return None
147
153
148
154
149 def read_exception(exc_id, prefix=global_prefix):
155 def read_exception(exc_id, prefix=global_prefix):
150 try:
156 try:
151 return _read_exception(exc_id=exc_id, prefix=prefix)
157 return _read_exception(exc_id=exc_id, prefix=prefix)
152 except Exception:
158 except Exception:
153 log.exception('Failed to read exception `%s` information', exc_id)
159 log.exception('Failed to read exception `%s` information', exc_id)
154 # there's no way this can fail, it will crash server badly if it does.
160 # there's no way this can fail, it will crash server badly if it does.
155 return None
161 return None
156
162
157
163
158 def delete_exception(exc_id, prefix=global_prefix):
164 def delete_exception(exc_id, prefix=global_prefix):
159 try:
165 try:
160 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
166 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
161 if exc_id_file_path:
167 if exc_id_file_path:
162 os.remove(exc_id_file_path)
168 os.remove(exc_id_file_path)
163
169
164 except Exception:
170 except Exception:
165 log.exception('Failed to remove exception `%s` information', exc_id)
171 log.exception('Failed to remove exception `%s` information', exc_id)
166 # there's no way this can fail, it will crash server badly if it does.
172 # there's no way this can fail, it will crash server badly if it does.
167 pass
173 pass
@@ -1,72 +1,79 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 from dogpile.cache import register_backend
19 from dogpile.cache import register_backend
20
20
21 register_backend(
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
23 "LRUMemoryBackend")
24
24
25 register_backend(
25 register_backend(
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 "FileNamespaceBackend")
27 "FileNamespaceBackend")
28
28
29 register_backend(
29 register_backend(
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 "RedisPickleBackend")
31 "RedisPickleBackend")
32
32
33 register_backend(
33 register_backend(
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 "RedisMsgPackBackend")
35 "RedisMsgPackBackend")
36
36
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40 from . import region_meta
40 from . import region_meta
41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
41 from .utils import (
42 get_default_cache_settings, backend_key_generator, get_or_create_region,
43 clear_cache_namespace, make_region)
42
44
43
45
44 def configure_dogpile_cache(settings):
46 def configure_dogpile_cache(settings):
45 cache_dir = settings.get('cache_dir')
47 cache_dir = settings.get('cache_dir')
46 if cache_dir:
48 if cache_dir:
47 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
49 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
48
50
49 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
51 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
50
52
51 # inspect available namespaces
53 # inspect available namespaces
52 avail_regions = set()
54 avail_regions = set()
53 for key in rc_cache_data.keys():
55 for key in rc_cache_data.keys():
54 namespace_name = key.split('.', 1)[0]
56 namespace_name = key.split('.', 1)[0]
55 avail_regions.add(namespace_name)
57 if namespace_name in avail_regions:
56 log.debug('dogpile: found following cache regions: %s', avail_regions)
58 continue
57
59
58 # register them into namespace
60 avail_regions.add(namespace_name)
59 for region_name in avail_regions:
61 log.debug('dogpile: found following cache regions: %s', namespace_name)
62
60 new_region = make_region(
63 new_region = make_region(
61 name=region_name,
64 name=namespace_name,
62 function_key_generator=None
65 function_key_generator=None
63 )
66 )
64
67
65 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
68 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(namespace_name))
66 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
69 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
67 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
70 if log.isEnabledFor(logging.DEBUG):
68 region_meta.dogpile_cache_regions[region_name] = new_region
71 region_args = dict(backend=new_region.actual_backend.__class__,
72 region_invalidator=new_region.region_invalidator.__class__)
73 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
74
75 region_meta.dogpile_cache_regions[namespace_name] = new_region
69
76
70
77
71 def includeme(config):
78 def includeme(config):
72 configure_dogpile_cache(config.registry.settings)
79 configure_dogpile_cache(config.registry.settings)
@@ -1,253 +1,329 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21
21
22 import msgpack
22 import msgpack
23 import redis
23 import redis
24
24
25 from dogpile.cache.api import CachedValue
25 from dogpile.cache.api import CachedValue
26 from dogpile.cache.backends import memory as memory_backend
26 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import file as file_backend
27 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import redis as redis_backend
28 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, FileLock
29 from dogpile.cache.backends.file import NO_VALUE, FileLock
30 from dogpile.cache.util import memoized_property
30 from dogpile.cache.util import memoized_property
31
31
32 from pyramid.settings import asbool
33
32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
34 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
35 from vcsserver.utils import safe_str, safe_unicode
33
36
34
37
35 _default_max_size = 1024
38 _default_max_size = 1024
36
39
37 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
38
41
39
42
40 class LRUMemoryBackend(memory_backend.MemoryBackend):
43 class LRUMemoryBackend(memory_backend.MemoryBackend):
41 key_prefix = 'lru_mem_backend'
44 key_prefix = 'lru_mem_backend'
42 pickle_values = False
45 pickle_values = False
43
46
44 def __init__(self, arguments):
47 def __init__(self, arguments):
45 max_size = arguments.pop('max_size', _default_max_size)
48 max_size = arguments.pop('max_size', _default_max_size)
46
49
47 LRUDictClass = LRUDict
50 LRUDictClass = LRUDict
48 if arguments.pop('log_key_count', None):
51 if arguments.pop('log_key_count', None):
49 LRUDictClass = LRUDictDebug
52 LRUDictClass = LRUDictDebug
50
53
51 arguments['cache_dict'] = LRUDictClass(max_size)
54 arguments['cache_dict'] = LRUDictClass(max_size)
52 super(LRUMemoryBackend, self).__init__(arguments)
55 super(LRUMemoryBackend, self).__init__(arguments)
53
56
54 def delete(self, key):
57 def delete(self, key):
55 try:
58 try:
56 del self._cache[key]
59 del self._cache[key]
57 except KeyError:
60 except KeyError:
58 # we don't care if key isn't there at deletion
61 # we don't care if key isn't there at deletion
59 pass
62 pass
60
63
61 def delete_multi(self, keys):
64 def delete_multi(self, keys):
62 for key in keys:
65 for key in keys:
63 self.delete(key)
66 self.delete(key)
64
67
65
68
66 class PickleSerializer(object):
69 class PickleSerializer(object):
67
70
68 def _dumps(self, value, safe=False):
71 def _dumps(self, value, safe=False):
69 try:
72 try:
70 return pickle.dumps(value)
73 return pickle.dumps(value)
71 except Exception:
74 except Exception:
72 if safe:
75 if safe:
73 return NO_VALUE
76 return NO_VALUE
74 else:
77 else:
75 raise
78 raise
76
79
77 def _loads(self, value, safe=True):
80 def _loads(self, value, safe=True):
78 try:
81 try:
79 return pickle.loads(value)
82 return pickle.loads(value)
80 except Exception:
83 except Exception:
81 if safe:
84 if safe:
82 return NO_VALUE
85 return NO_VALUE
83 else:
86 else:
84 raise
87 raise
85
88
86
89
87 class MsgPackSerializer(object):
90 class MsgPackSerializer(object):
88
91
89 def _dumps(self, value, safe=False):
92 def _dumps(self, value, safe=False):
90 try:
93 try:
91 return msgpack.packb(value)
94 return msgpack.packb(value)
92 except Exception:
95 except Exception:
93 if safe:
96 if safe:
94 return NO_VALUE
97 return NO_VALUE
95 else:
98 else:
96 raise
99 raise
97
100
98 def _loads(self, value, safe=True):
101 def _loads(self, value, safe=True):
99 """
102 """
100 pickle maintained the `CachedValue` wrapper of the tuple
103 pickle maintained the `CachedValue` wrapper of the tuple
101 msgpack does not, so it must be added back in.
104 msgpack does not, so it must be added back in.
102 """
105 """
103 try:
106 try:
104 value = msgpack.unpackb(value, use_list=False)
107 value = msgpack.unpackb(value, use_list=False)
105 return CachedValue(*value)
108 return CachedValue(*value)
106 except Exception:
109 except Exception:
107 if safe:
110 if safe:
108 return NO_VALUE
111 return NO_VALUE
109 else:
112 else:
110 raise
113 raise
111
114
112
115
113 import fcntl
116 import fcntl
114 flock_org = fcntl.flock
117 flock_org = fcntl.flock
115
118
116
119
117 class CustomLockFactory(FileLock):
120 class CustomLockFactory(FileLock):
118
121
119 pass
122 pass
120
123
121
124
122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
125 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
123 key_prefix = 'file_backend'
126 key_prefix = 'file_backend'
124
127
125 def __init__(self, arguments):
128 def __init__(self, arguments):
126 arguments['lock_factory'] = CustomLockFactory
129 arguments['lock_factory'] = CustomLockFactory
130 db_file = arguments.get('filename')
131
132 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
133 try:
127 super(FileNamespaceBackend, self).__init__(arguments)
134 super(FileNamespaceBackend, self).__init__(arguments)
135 except Exception:
136 log.exception('Failed to initialize db at: %s', db_file)
137 raise
128
138
129 def __repr__(self):
139 def __repr__(self):
130 return '{} `{}`'.format(self.__class__, self.filename)
140 return '{} `{}`'.format(self.__class__, self.filename)
131
141
132 def list_keys(self, prefix=''):
142 def list_keys(self, prefix=''):
133 prefix = '{}:{}'.format(self.key_prefix, prefix)
143 prefix = '{}:{}'.format(self.key_prefix, prefix)
134
144
135 def cond(v):
145 def cond(v):
136 if not prefix:
146 if not prefix:
137 return True
147 return True
138
148
139 if v.startswith(prefix):
149 if v.startswith(prefix):
140 return True
150 return True
141 return False
151 return False
142
152
143 with self._dbm_file(True) as dbm:
153 with self._dbm_file(True) as dbm:
144
154 try:
145 return filter(cond, dbm.keys())
155 return filter(cond, dbm.keys())
156 except Exception:
157 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
158 raise
146
159
147 def get_store(self):
160 def get_store(self):
148 return self.filename
161 return self.filename
149
162
150 def get(self, key):
163 def _dbm_get(self, key):
151 with self._dbm_file(False) as dbm:
164 with self._dbm_file(False) as dbm:
152 if hasattr(dbm, 'get'):
165 if hasattr(dbm, 'get'):
153 value = dbm.get(key, NO_VALUE)
166 value = dbm.get(key, NO_VALUE)
154 else:
167 else:
155 # gdbm objects lack a .get method
168 # gdbm objects lack a .get method
156 try:
169 try:
157 value = dbm[key]
170 value = dbm[key]
158 except KeyError:
171 except KeyError:
159 value = NO_VALUE
172 value = NO_VALUE
160 if value is not NO_VALUE:
173 if value is not NO_VALUE:
161 value = self._loads(value)
174 value = self._loads(value)
162 return value
175 return value
163
176
177 def get(self, key):
178 try:
179 return self._dbm_get(key)
180 except Exception:
181 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
182 raise
183
164 def set(self, key, value):
184 def set(self, key, value):
165 with self._dbm_file(True) as dbm:
185 with self._dbm_file(True) as dbm:
166 dbm[key] = self._dumps(value)
186 dbm[key] = self._dumps(value)
167
187
168 def set_multi(self, mapping):
188 def set_multi(self, mapping):
169 with self._dbm_file(True) as dbm:
189 with self._dbm_file(True) as dbm:
170 for key, value in mapping.items():
190 for key, value in mapping.items():
171 dbm[key] = self._dumps(value)
191 dbm[key] = self._dumps(value)
172
192
173
193
174 class BaseRedisBackend(redis_backend.RedisBackend):
194 class BaseRedisBackend(redis_backend.RedisBackend):
195 key_prefix = ''
196
197 def __init__(self, arguments):
198 super(BaseRedisBackend, self).__init__(arguments)
199 self._lock_timeout = self.lock_timeout
200 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
201
202 if self._lock_auto_renewal and not self._lock_timeout:
203 # set default timeout for auto_renewal
204 self._lock_timeout = 30
175
205
176 def _create_client(self):
206 def _create_client(self):
177 args = {}
207 args = {}
178
208
179 if self.url is not None:
209 if self.url is not None:
180 args.update(url=self.url)
210 args.update(url=self.url)
181
211
182 else:
212 else:
183 args.update(
213 args.update(
184 host=self.host, password=self.password,
214 host=self.host, password=self.password,
185 port=self.port, db=self.db
215 port=self.port, db=self.db
186 )
216 )
187
217
188 connection_pool = redis.ConnectionPool(**args)
218 connection_pool = redis.ConnectionPool(**args)
189
219
190 return redis.StrictRedis(connection_pool=connection_pool)
220 return redis.StrictRedis(connection_pool=connection_pool)
191
221
192 def list_keys(self, prefix=''):
222 def list_keys(self, prefix=''):
193 prefix = '{}:{}*'.format(self.key_prefix, prefix)
223 prefix = '{}:{}*'.format(self.key_prefix, prefix)
194 return self.client.keys(prefix)
224 return self.client.keys(prefix)
195
225
196 def get_store(self):
226 def get_store(self):
197 return self.client.connection_pool
227 return self.client.connection_pool
198
228
199 def get(self, key):
229 def get(self, key):
200 value = self.client.get(key)
230 value = self.client.get(key)
201 if value is None:
231 if value is None:
202 return NO_VALUE
232 return NO_VALUE
203 return self._loads(value)
233 return self._loads(value)
204
234
205 def get_multi(self, keys):
235 def get_multi(self, keys):
206 if not keys:
236 if not keys:
207 return []
237 return []
208 values = self.client.mget(keys)
238 values = self.client.mget(keys)
209 loads = self._loads
239 loads = self._loads
210 return [
240 return [
211 loads(v) if v is not None else NO_VALUE
241 loads(v) if v is not None else NO_VALUE
212 for v in values]
242 for v in values]
213
243
214 def set(self, key, value):
244 def set(self, key, value):
215 if self.redis_expiration_time:
245 if self.redis_expiration_time:
216 self.client.setex(key, self.redis_expiration_time,
246 self.client.setex(key, self.redis_expiration_time,
217 self._dumps(value))
247 self._dumps(value))
218 else:
248 else:
219 self.client.set(key, self._dumps(value))
249 self.client.set(key, self._dumps(value))
220
250
221 def set_multi(self, mapping):
251 def set_multi(self, mapping):
222 dumps = self._dumps
252 dumps = self._dumps
223 mapping = dict(
253 mapping = dict(
224 (k, dumps(v))
254 (k, dumps(v))
225 for k, v in mapping.items()
255 for k, v in mapping.items()
226 )
256 )
227
257
228 if not self.redis_expiration_time:
258 if not self.redis_expiration_time:
229 self.client.mset(mapping)
259 self.client.mset(mapping)
230 else:
260 else:
231 pipe = self.client.pipeline()
261 pipe = self.client.pipeline()
232 for key, value in mapping.items():
262 for key, value in mapping.items():
233 pipe.setex(key, self.redis_expiration_time, value)
263 pipe.setex(key, self.redis_expiration_time, value)
234 pipe.execute()
264 pipe.execute()
235
265
236 def get_mutex(self, key):
266 def get_mutex(self, key):
237 u = redis_backend.u
238 if self.distributed_lock:
267 if self.distributed_lock:
239 lock_key = u('_lock_{0}').format(key)
268 lock_key = u'_lock_{0}'.format(safe_unicode(key))
240 log.debug('Trying to acquire Redis lock for key %s', lock_key)
269 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
270 auto_renewal=self._lock_auto_renewal)
242 else:
271 else:
243 return None
272 return None
244
273
245
274
246 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
275 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
247 key_prefix = 'redis_pickle_backend'
276 key_prefix = 'redis_pickle_backend'
248 pass
277 pass
249
278
250
279
251 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
280 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
252 key_prefix = 'redis_msgpack_backend'
281 key_prefix = 'redis_msgpack_backend'
253 pass
282 pass
283
284
285 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
286 import redis_lock
287
288 class _RedisLockWrapper(object):
289 """LockWrapper for redis_lock"""
290
291 @classmethod
292 def get_lock(cls):
293 return redis_lock.Lock(
294 redis_client=client,
295 name=lock_key,
296 expire=lock_timeout,
297 auto_renewal=auto_renewal,
298 strict=True,
299 )
300
301 def __repr__(self):
302 return "{}:{}".format(self.__class__.__name__, lock_key)
303
304 def __str__(self):
305 return "{}:{}".format(self.__class__.__name__, lock_key)
306
307 def __init__(self):
308 self.lock = self.get_lock()
309 self.lock_key = lock_key
310
311 def acquire(self, wait=True):
312 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
313 try:
314 acquired = self.lock.acquire(wait)
315 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
316 return acquired
317 except redis_lock.AlreadyAcquired:
318 return False
319 except redis_lock.AlreadyStarted:
320 # refresh thread exists, but it also means we acquired the lock
321 return True
322
323 def release(self):
324 try:
325 self.lock.release()
326 except redis_lock.NotAcquired:
327 pass
328
329 return _RedisLockWrapper()
@@ -1,153 +1,263 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import time
19 import logging
20 import logging
20 import functools
21 import functools
21 from decorator import decorate
22
22
23 from dogpile.cache import CacheRegion
23 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
24 from dogpile.cache.util import compat
25
25
26 from vcsserver.utils import safe_str, sha1
26 from vcsserver.utils import safe_str, sha1
27
27
28 from vcsserver.lib.rc_cache import region_meta
28
29
29 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
30
31
31
32
32 class RhodeCodeCacheRegion(CacheRegion):
33 class RhodeCodeCacheRegion(CacheRegion):
33
34
34 def conditional_cache_on_arguments(
35 def conditional_cache_on_arguments(
35 self, namespace=None,
36 self, namespace=None,
36 expiration_time=None,
37 expiration_time=None,
37 should_cache_fn=None,
38 should_cache_fn=None,
38 to_str=compat.string_type,
39 to_str=compat.string_type,
39 function_key_generator=None,
40 function_key_generator=None,
40 condition=True):
41 condition=True):
41 """
42 """
42 Custom conditional decorator, that will not touch any dogpile internals if
43 Custom conditional decorator, that will not touch any dogpile internals if
43 condition isn't meet. This works a bit different than should_cache_fn
44 condition isn't meet. This works a bit different than should_cache_fn
44 And it's faster in cases we don't ever want to compute cached values
45 And it's faster in cases we don't ever want to compute cached values
45 """
46 """
46 expiration_time_is_callable = compat.callable(expiration_time)
47 expiration_time_is_callable = compat.callable(expiration_time)
47
48
48 if function_key_generator is None:
49 if function_key_generator is None:
49 function_key_generator = self.function_key_generator
50 function_key_generator = self.function_key_generator
50
51
52 # workaround for py2 and cython problems, this block should be removed
53 # once we've migrated to py3
54 if 'cython' == 'cython':
55 def decorator(fn):
56 if to_str is compat.string_type:
57 # backwards compatible
58 key_generator = function_key_generator(namespace, fn)
59 else:
60 key_generator = function_key_generator(namespace, fn, to_str=to_str)
61
62 @functools.wraps(fn)
63 def decorate(*arg, **kw):
64 key = key_generator(*arg, **kw)
65
66 @functools.wraps(fn)
67 def creator():
68 return fn(*arg, **kw)
69
70 if not condition:
71 return creator()
72
73 timeout = expiration_time() if expiration_time_is_callable \
74 else expiration_time
75
76 return self.get_or_create(key, creator, timeout, should_cache_fn)
77
78 def invalidate(*arg, **kw):
79 key = key_generator(*arg, **kw)
80 self.delete(key)
81
82 def set_(value, *arg, **kw):
83 key = key_generator(*arg, **kw)
84 self.set(key, value)
85
86 def get(*arg, **kw):
87 key = key_generator(*arg, **kw)
88 return self.get(key)
89
90 def refresh(*arg, **kw):
91 key = key_generator(*arg, **kw)
92 value = fn(*arg, **kw)
93 self.set(key, value)
94 return value
95
96 decorate.set = set_
97 decorate.invalidate = invalidate
98 decorate.refresh = refresh
99 decorate.get = get
100 decorate.original = fn
101 decorate.key_generator = key_generator
102 decorate.__wrapped__ = fn
103
104 return decorate
105 return decorator
106
51 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
107 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
52
108
53 if not condition:
109 if not condition:
54 log.debug('Calling un-cached func:%s', user_func.func_name)
110 log.debug('Calling un-cached method:%s', user_func.func_name)
55 return user_func(*arg, **kw)
111 start = time.time()
112 result = user_func(*arg, **kw)
113 total = time.time() - start
114 log.debug('un-cached method:%s took %.4fs', user_func.func_name, total)
115 return result
56
116
57 key = key_generator(*arg, **kw)
117 key = key_generator(*arg, **kw)
58
118
59 timeout = expiration_time() if expiration_time_is_callable \
119 timeout = expiration_time() if expiration_time_is_callable \
60 else expiration_time
120 else expiration_time
61
121
62 log.debug('Calling cached fn:%s', user_func.func_name)
122 log.debug('Calling cached method:`%s`', user_func.func_name)
63 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
123 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
64
124
65 def cache_decorator(user_func):
125 def cache_decorator(user_func):
66 if to_str is compat.string_type:
126 if to_str is compat.string_type:
67 # backwards compatible
127 # backwards compatible
68 key_generator = function_key_generator(namespace, user_func)
128 key_generator = function_key_generator(namespace, user_func)
69 else:
129 else:
70 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
130 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
71
131
72 def refresh(*arg, **kw):
132 def refresh(*arg, **kw):
73 """
133 """
74 Like invalidate, but regenerates the value instead
134 Like invalidate, but regenerates the value instead
75 """
135 """
76 key = key_generator(*arg, **kw)
136 key = key_generator(*arg, **kw)
77 value = user_func(*arg, **kw)
137 value = user_func(*arg, **kw)
78 self.set(key, value)
138 self.set(key, value)
79 return value
139 return value
80
140
81 def invalidate(*arg, **kw):
141 def invalidate(*arg, **kw):
82 key = key_generator(*arg, **kw)
142 key = key_generator(*arg, **kw)
83 self.delete(key)
143 self.delete(key)
84
144
85 def set_(value, *arg, **kw):
145 def set_(value, *arg, **kw):
86 key = key_generator(*arg, **kw)
146 key = key_generator(*arg, **kw)
87 self.set(key, value)
147 self.set(key, value)
88
148
89 def get(*arg, **kw):
149 def get(*arg, **kw):
90 key = key_generator(*arg, **kw)
150 key = key_generator(*arg, **kw)
91 return self.get(key)
151 return self.get(key)
92
152
93 user_func.set = set_
153 user_func.set = set_
94 user_func.invalidate = invalidate
154 user_func.invalidate = invalidate
95 user_func.get = get
155 user_func.get = get
96 user_func.refresh = refresh
156 user_func.refresh = refresh
97 user_func.key_generator = key_generator
157 user_func.key_generator = key_generator
98 user_func.original = user_func
158 user_func.original = user_func
99
159
100 # Use `decorate` to preserve the signature of :param:`user_func`.
160 # Use `decorate` to preserve the signature of :param:`user_func`.
101
161 return decorator.decorate(user_func, functools.partial(
102 return decorate(user_func, functools.partial(
103 get_or_create_for_user_func, key_generator))
162 get_or_create_for_user_func, key_generator))
104
163
105 return cache_decorator
164 return cache_decorator
106
165
107
166
108 def make_region(*arg, **kw):
167 def make_region(*arg, **kw):
109 return RhodeCodeCacheRegion(*arg, **kw)
168 return RhodeCodeCacheRegion(*arg, **kw)
110
169
111
170
112 def get_default_cache_settings(settings, prefixes=None):
171 def get_default_cache_settings(settings, prefixes=None):
113 prefixes = prefixes or []
172 prefixes = prefixes or []
114 cache_settings = {}
173 cache_settings = {}
115 for key in settings.keys():
174 for key in settings.keys():
116 for prefix in prefixes:
175 for prefix in prefixes:
117 if key.startswith(prefix):
176 if key.startswith(prefix):
118 name = key.split(prefix)[1].strip()
177 name = key.split(prefix)[1].strip()
119 val = settings[key]
178 val = settings[key]
120 if isinstance(val, compat.string_types):
179 if isinstance(val, compat.string_types):
121 val = val.strip()
180 val = val.strip()
122 cache_settings[name] = val
181 cache_settings[name] = val
123 return cache_settings
182 return cache_settings
124
183
125
184
126 def compute_key_from_params(*args):
185 def compute_key_from_params(*args):
127 """
186 """
128 Helper to compute key from given params to be used in cache manager
187 Helper to compute key from given params to be used in cache manager
129 """
188 """
130 return sha1("_".join(map(safe_str, args)))
189 return sha1("_".join(map(safe_str, args)))
131
190
132
191
133 def backend_key_generator(backend):
192 def backend_key_generator(backend):
134 """
193 """
135 Special wrapper that also sends over the backend to the key generator
194 Special wrapper that also sends over the backend to the key generator
136 """
195 """
137 def wrapper(namespace, fn):
196 def wrapper(namespace, fn):
138 return key_generator(backend, namespace, fn)
197 return key_generator(backend, namespace, fn)
139 return wrapper
198 return wrapper
140
199
141
200
142 def key_generator(backend, namespace, fn):
201 def key_generator(backend, namespace, fn):
143 fname = fn.__name__
202 fname = fn.__name__
144
203
145 def generate_key(*args):
204 def generate_key(*args):
146 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
205 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
147 namespace_pref = namespace or 'default_namespace'
206 namespace_pref = namespace or 'default_namespace'
148 arg_key = compute_key_from_params(*args)
207 arg_key = compute_key_from_params(*args)
149 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
208 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
150
209
151 return final_key
210 return final_key
152
211
153 return generate_key
212 return generate_key
213
214
215 def get_or_create_region(region_name, region_namespace=None):
216 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
217 region_obj = region_meta.dogpile_cache_regions.get(region_name)
218 if not region_obj:
219 raise EnvironmentError(
220 'Region `{}` not in configured: {}.'.format(
221 region_name, region_meta.dogpile_cache_regions.keys()))
222
223 region_uid_name = '{}:{}'.format(region_name, region_namespace)
224 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
225 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
226 if region_exist:
227 log.debug('Using already configured region: %s', region_namespace)
228 return region_exist
229 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
230 expiration_time = region_obj.expiration_time
231
232 if not os.path.isdir(cache_dir):
233 os.makedirs(cache_dir)
234 new_region = make_region(
235 name=region_uid_name,
236 function_key_generator=backend_key_generator(region_obj.actual_backend)
237 )
238 namespace_filename = os.path.join(
239 cache_dir, "{}.cache.dbm".format(region_namespace))
240 # special type that allows 1db per namespace
241 new_region.configure(
242 backend='dogpile.cache.rc.file_namespace',
243 expiration_time=expiration_time,
244 arguments={"filename": namespace_filename}
245 )
246
247 # create and save in region caches
248 log.debug('configuring new region: %s', region_uid_name)
249 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
250
251 return region_obj
252
253
254 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
255 region = get_or_create_region(cache_region, cache_namespace_uid)
256 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
257 num_delete_keys = len(cache_keys)
258 if invalidate:
259 region.invalidate(hard=False)
260 else:
261 if num_delete_keys:
262 region.delete_multi(cache_keys)
263 return num_delete_keys
@@ -1,791 +1,867 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 import time
22 from urllib.error import URLError
23 from urllib.error import URLError
23 import urllib.parse
24 import urllib.parse
24 import logging
25 import logging
25 import posixpath as vcspath
26 import posixpath as vcspath
26 import io
27 import io
27 import urllib.request, urllib.parse, urllib.error
28 import urllib.request, urllib.parse, urllib.error
28 import traceback
29 import traceback
29
30
30 import svn.client
31 import svn.client
31 import svn.core
32 import svn.core
32 import svn.delta
33 import svn.delta
33 import svn.diff
34 import svn.diff
34 import svn.fs
35 import svn.fs
35 import svn.repos
36 import svn.repos
36
37
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 from vcsserver.exceptions import NoContentException
41 from vcsserver.utils import safe_str
39 from vcsserver.vcs_base import RemoteBase
42 from vcsserver.vcs_base import RemoteBase
40
43
41 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
42
45
43
46
44 svn_compatible_versions_map = {
47 svn_compatible_versions_map = {
45 'pre-1.4-compatible': '1.3',
48 'pre-1.4-compatible': '1.3',
46 'pre-1.5-compatible': '1.4',
49 'pre-1.5-compatible': '1.4',
47 'pre-1.6-compatible': '1.5',
50 'pre-1.6-compatible': '1.5',
48 'pre-1.8-compatible': '1.7',
51 'pre-1.8-compatible': '1.7',
49 'pre-1.9-compatible': '1.8',
52 'pre-1.9-compatible': '1.8',
50 }
53 }
51
54
52 current_compatible_version = '1.12'
55 current_compatible_version = '1.12'
53
56
54
57
55 def reraise_safe_exceptions(func):
58 def reraise_safe_exceptions(func):
56 """Decorator for converting svn exceptions to something neutral."""
59 """Decorator for converting svn exceptions to something neutral."""
57 def wrapper(*args, **kwargs):
60 def wrapper(*args, **kwargs):
58 try:
61 try:
59 return func(*args, **kwargs)
62 return func(*args, **kwargs)
60 except Exception as e:
63 except Exception as e:
61 if not hasattr(e, '_vcs_kind'):
64 if not hasattr(e, '_vcs_kind'):
62 log.exception("Unhandled exception in svn remote call")
65 log.exception("Unhandled exception in svn remote call")
63 raise_from_original(exceptions.UnhandledException(e))
66 raise_from_original(exceptions.UnhandledException(e))
64 raise
67 raise
65 return wrapper
68 return wrapper
66
69
67
70
68 class SubversionFactory(RepoFactory):
71 class SubversionFactory(RepoFactory):
69 repo_type = 'svn'
72 repo_type = 'svn'
70
73
71 def _create_repo(self, wire, create, compatible_version):
74 def _create_repo(self, wire, create, compatible_version):
72 path = svn.core.svn_path_canonicalize(wire['path'])
75 path = svn.core.svn_path_canonicalize(wire['path'])
73 if create:
76 if create:
74 fs_config = {'compatible-version': current_compatible_version}
77 fs_config = {'compatible-version': current_compatible_version}
75 if compatible_version:
78 if compatible_version:
76
79
77 compatible_version_string = \
80 compatible_version_string = \
78 svn_compatible_versions_map.get(compatible_version) \
81 svn_compatible_versions_map.get(compatible_version) \
79 or compatible_version
82 or compatible_version
80 fs_config['compatible-version'] = compatible_version_string
83 fs_config['compatible-version'] = compatible_version_string
81
84
82 log.debug('Create SVN repo with config "%s"', fs_config)
85 log.debug('Create SVN repo with config "%s"', fs_config)
83 repo = svn.repos.create(path, "", "", None, fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
84 else:
87 else:
85 repo = svn.repos.open(path)
88 repo = svn.repos.open(path)
86
89
87 log.debug('Got SVN object: %s', repo)
90 log.debug('Got SVN object: %s', repo)
88 return repo
91 return repo
89
92
90 def repo(self, wire, create=False, compatible_version=None):
93 def repo(self, wire, create=False, compatible_version=None):
91 """
94 """
92 Get a repository instance for the given path.
95 Get a repository instance for the given path.
93 """
96 """
94 return self._create_repo(wire, create, compatible_version)
97 return self._create_repo(wire, create, compatible_version)
95
98
96
99
97 NODE_TYPE_MAPPING = {
100 NODE_TYPE_MAPPING = {
98 svn.core.svn_node_file: 'file',
101 svn.core.svn_node_file: 'file',
99 svn.core.svn_node_dir: 'dir',
102 svn.core.svn_node_dir: 'dir',
100 }
103 }
101
104
102
105
103 class SvnRemote(RemoteBase):
106 class SvnRemote(RemoteBase):
104
107
105 def __init__(self, factory, hg_factory=None):
108 def __init__(self, factory, hg_factory=None):
106 self._factory = factory
109 self._factory = factory
107 # TODO: Remove once we do not use internal Mercurial objects anymore
110 # TODO: Remove once we do not use internal Mercurial objects anymore
108 # for subversion
111 # for subversion
109 self._hg_factory = hg_factory
112 self._hg_factory = hg_factory
110
113
111 @reraise_safe_exceptions
114 @reraise_safe_exceptions
112 def discover_svn_version(self):
115 def discover_svn_version(self):
113 try:
116 try:
114 import svn.core
117 import svn.core
115 svn_ver = svn.core.SVN_VERSION
118 svn_ver = svn.core.SVN_VERSION
116 except ImportError:
119 except ImportError:
117 svn_ver = None
120 svn_ver = None
118 return svn_ver
121 return svn_ver
119
122
120 @reraise_safe_exceptions
123 @reraise_safe_exceptions
121 def is_empty(self, wire):
124 def is_empty(self, wire):
122
125
123 try:
126 try:
124 return self.lookup(wire, -1) == 0
127 return self.lookup(wire, -1) == 0
125 except Exception:
128 except Exception:
126 log.exception("failed to read object_store")
129 log.exception("failed to read object_store")
127 return False
130 return False
128
131
129 def check_url(self, url, config_items):
132 def check_url(self, url, config_items):
130 # this can throw exception if not installed, but we detect this
133 # this can throw exception if not installed, but we detect this
131 from hgsubversion import svnrepo
134 from hgsubversion import svnrepo
132
135
133 baseui = self._hg_factory._create_config(config_items)
136 baseui = self._hg_factory._create_config(config_items)
134 # uuid function get's only valid UUID from proper repo, else
137 # uuid function get's only valid UUID from proper repo, else
135 # throws exception
138 # throws exception
136 try:
139 try:
137 svnrepo.svnremoterepo(baseui, url).svn.uuid
140 svnrepo.svnremoterepo(baseui, url).svn.uuid
138 except Exception:
141 except Exception:
139 tb = traceback.format_exc()
142 tb = traceback.format_exc()
140 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
141 raise URLError(
144 raise URLError(
142 '"%s" is not a valid Subversion source url.' % (url, ))
145 '"%s" is not a valid Subversion source url.' % (url, ))
143 return True
146 return True
144
147
145 def is_path_valid_repository(self, wire, path):
148 def is_path_valid_repository(self, wire, path):
146
149
147 # NOTE(marcink): short circuit the check for SVN repo
150 # NOTE(marcink): short circuit the check for SVN repo
148 # the repos.open might be expensive to check, but we have one cheap
151 # the repos.open might be expensive to check, but we have one cheap
149 # pre condition that we can use, to check for 'format' file
152 # pre condition that we can use, to check for 'format' file
150
153
151 if not os.path.isfile(os.path.join(path, 'format')):
154 if not os.path.isfile(os.path.join(path, 'format')):
152 return False
155 return False
153
156
154 try:
157 try:
155 svn.repos.open(path)
158 svn.repos.open(path)
156 except svn.core.SubversionException:
159 except svn.core.SubversionException:
157 tb = traceback.format_exc()
160 tb = traceback.format_exc()
158 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
161 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
159 return False
162 return False
160 return True
163 return True
161
164
162 @reraise_safe_exceptions
165 @reraise_safe_exceptions
163 def verify(self, wire,):
166 def verify(self, wire,):
164 repo_path = wire['path']
167 repo_path = wire['path']
165 if not self.is_path_valid_repository(wire, repo_path):
168 if not self.is_path_valid_repository(wire, repo_path):
166 raise Exception(
169 raise Exception(
167 "Path %s is not a valid Subversion repository." % repo_path)
170 "Path %s is not a valid Subversion repository." % repo_path)
168
171
169 cmd = ['svnadmin', 'info', repo_path]
172 cmd = ['svnadmin', 'info', repo_path]
170 stdout, stderr = subprocessio.run_command(cmd)
173 stdout, stderr = subprocessio.run_command(cmd)
171 return stdout
174 return stdout
172
175
173 def lookup(self, wire, revision):
176 def lookup(self, wire, revision):
174 if revision not in [-1, None, 'HEAD']:
177 if revision not in [-1, None, 'HEAD']:
175 raise NotImplementedError
178 raise NotImplementedError
176 repo = self._factory.repo(wire)
179 repo = self._factory.repo(wire)
177 fs_ptr = svn.repos.fs(repo)
180 fs_ptr = svn.repos.fs(repo)
178 head = svn.fs.youngest_rev(fs_ptr)
181 head = svn.fs.youngest_rev(fs_ptr)
179 return head
182 return head
180
183
181 def lookup_interval(self, wire, start_ts, end_ts):
184 def lookup_interval(self, wire, start_ts, end_ts):
182 repo = self._factory.repo(wire)
185 repo = self._factory.repo(wire)
183 fsobj = svn.repos.fs(repo)
186 fsobj = svn.repos.fs(repo)
184 start_rev = None
187 start_rev = None
185 end_rev = None
188 end_rev = None
186 if start_ts:
189 if start_ts:
187 start_ts_svn = apr_time_t(start_ts)
190 start_ts_svn = apr_time_t(start_ts)
188 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
191 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
189 else:
192 else:
190 start_rev = 1
193 start_rev = 1
191 if end_ts:
194 if end_ts:
192 end_ts_svn = apr_time_t(end_ts)
195 end_ts_svn = apr_time_t(end_ts)
193 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
196 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
194 else:
197 else:
195 end_rev = svn.fs.youngest_rev(fsobj)
198 end_rev = svn.fs.youngest_rev(fsobj)
196 return start_rev, end_rev
199 return start_rev, end_rev
197
200
198 def revision_properties(self, wire, revision):
201 def revision_properties(self, wire, revision):
199
202
200 cache_on, context_uid, repo_id = self._cache_on(wire)
203 cache_on, context_uid, repo_id = self._cache_on(wire)
201 @self.region.conditional_cache_on_arguments(condition=cache_on)
204 region = self._region(wire)
205 @region.conditional_cache_on_arguments(condition=cache_on)
202 def _revision_properties(_repo_id, _revision):
206 def _revision_properties(_repo_id, _revision):
203 repo = self._factory.repo(wire)
207 repo = self._factory.repo(wire)
204 fs_ptr = svn.repos.fs(repo)
208 fs_ptr = svn.repos.fs(repo)
205 return svn.fs.revision_proplist(fs_ptr, revision)
209 return svn.fs.revision_proplist(fs_ptr, revision)
206 return _revision_properties(repo_id, revision)
210 return _revision_properties(repo_id, revision)
207
211
208 def revision_changes(self, wire, revision):
212 def revision_changes(self, wire, revision):
209
213
210 repo = self._factory.repo(wire)
214 repo = self._factory.repo(wire)
211 fsobj = svn.repos.fs(repo)
215 fsobj = svn.repos.fs(repo)
212 rev_root = svn.fs.revision_root(fsobj, revision)
216 rev_root = svn.fs.revision_root(fsobj, revision)
213
217
214 editor = svn.repos.ChangeCollector(fsobj, rev_root)
218 editor = svn.repos.ChangeCollector(fsobj, rev_root)
215 editor_ptr, editor_baton = svn.delta.make_editor(editor)
219 editor_ptr, editor_baton = svn.delta.make_editor(editor)
216 base_dir = ""
220 base_dir = ""
217 send_deltas = False
221 send_deltas = False
218 svn.repos.replay2(
222 svn.repos.replay2(
219 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
223 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
220 editor_ptr, editor_baton, None)
224 editor_ptr, editor_baton, None)
221
225
222 added = []
226 added = []
223 changed = []
227 changed = []
224 removed = []
228 removed = []
225
229
226 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
230 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
227 for path, change in editor.changes.items():
231 for path, change in editor.changes.items():
228 # TODO: Decide what to do with directory nodes. Subversion can add
232 # TODO: Decide what to do with directory nodes. Subversion can add
229 # empty directories.
233 # empty directories.
230
234
231 if change.item_kind == svn.core.svn_node_dir:
235 if change.item_kind == svn.core.svn_node_dir:
232 continue
236 continue
233 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
237 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
234 added.append(path)
238 added.append(path)
235 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
239 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
236 svn.repos.CHANGE_ACTION_REPLACE]:
240 svn.repos.CHANGE_ACTION_REPLACE]:
237 changed.append(path)
241 changed.append(path)
238 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
242 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
239 removed.append(path)
243 removed.append(path)
240 else:
244 else:
241 raise NotImplementedError(
245 raise NotImplementedError(
242 "Action %s not supported on path %s" % (
246 "Action %s not supported on path %s" % (
243 change.action, path))
247 change.action, path))
244
248
245 changes = {
249 changes = {
246 'added': added,
250 'added': added,
247 'changed': changed,
251 'changed': changed,
248 'removed': removed,
252 'removed': removed,
249 }
253 }
250 return changes
254 return changes
251
255
252 @reraise_safe_exceptions
256 @reraise_safe_exceptions
253 def node_history(self, wire, path, revision, limit):
257 def node_history(self, wire, path, revision, limit):
254 cache_on, context_uid, repo_id = self._cache_on(wire)
258 cache_on, context_uid, repo_id = self._cache_on(wire)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
259 region = self._region(wire)
260 @region.conditional_cache_on_arguments(condition=cache_on)
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
261 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 cross_copies = False
262 cross_copies = False
258 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
259 fsobj = svn.repos.fs(repo)
264 fsobj = svn.repos.fs(repo)
260 rev_root = svn.fs.revision_root(fsobj, revision)
265 rev_root = svn.fs.revision_root(fsobj, revision)
261
266
262 history_revisions = []
267 history_revisions = []
263 history = svn.fs.node_history(rev_root, path)
268 history = svn.fs.node_history(rev_root, path)
264 history = svn.fs.history_prev(history, cross_copies)
269 history = svn.fs.history_prev(history, cross_copies)
265 while history:
270 while history:
266 __, node_revision = svn.fs.history_location(history)
271 __, node_revision = svn.fs.history_location(history)
267 history_revisions.append(node_revision)
272 history_revisions.append(node_revision)
268 if limit and len(history_revisions) >= limit:
273 if limit and len(history_revisions) >= limit:
269 break
274 break
270 history = svn.fs.history_prev(history, cross_copies)
275 history = svn.fs.history_prev(history, cross_copies)
271 return history_revisions
276 return history_revisions
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
277 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273
278
274 def node_properties(self, wire, path, revision):
279 def node_properties(self, wire, path, revision):
275 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
276 @self.region.conditional_cache_on_arguments(condition=cache_on)
281 region = self._region(wire)
282 @region.conditional_cache_on_arguments(condition=cache_on)
277 def _node_properties(_repo_id, _path, _revision):
283 def _node_properties(_repo_id, _path, _revision):
278 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
279 fsobj = svn.repos.fs(repo)
285 fsobj = svn.repos.fs(repo)
280 rev_root = svn.fs.revision_root(fsobj, revision)
286 rev_root = svn.fs.revision_root(fsobj, revision)
281 return svn.fs.node_proplist(rev_root, path)
287 return svn.fs.node_proplist(rev_root, path)
282 return _node_properties(repo_id, path, revision)
288 return _node_properties(repo_id, path, revision)
283
289
284 def file_annotate(self, wire, path, revision):
290 def file_annotate(self, wire, path, revision):
285 abs_path = 'file://' + urllib.request.pathname2url(
291 abs_path = 'file://' + urllib.pathname2url(
286 vcspath.join(wire['path'], path))
292 vcspath.join(wire['path'], path))
287 file_uri = svn.core.svn_path_canonicalize(abs_path)
293 file_uri = svn.core.svn_path_canonicalize(abs_path)
288
294
289 start_rev = svn_opt_revision_value_t(0)
295 start_rev = svn_opt_revision_value_t(0)
290 peg_rev = svn_opt_revision_value_t(revision)
296 peg_rev = svn_opt_revision_value_t(revision)
291 end_rev = peg_rev
297 end_rev = peg_rev
292
298
293 annotations = []
299 annotations = []
294
300
295 def receiver(line_no, revision, author, date, line, pool):
301 def receiver(line_no, revision, author, date, line, pool):
296 annotations.append((line_no, revision, line))
302 annotations.append((line_no, revision, line))
297
303
298 # TODO: Cannot use blame5, missing typemap function in the swig code
304 # TODO: Cannot use blame5, missing typemap function in the swig code
299 try:
305 try:
300 svn.client.blame2(
306 svn.client.blame2(
301 file_uri, peg_rev, start_rev, end_rev,
307 file_uri, peg_rev, start_rev, end_rev,
302 receiver, svn.client.create_context())
308 receiver, svn.client.create_context())
303 except svn.core.SubversionException as exc:
309 except svn.core.SubversionException as exc:
304 log.exception("Error during blame operation.")
310 log.exception("Error during blame operation.")
305 raise Exception(
311 raise Exception(
306 "Blame not supported or file does not exist at path %s. "
312 "Blame not supported or file does not exist at path %s. "
307 "Error %s." % (path, exc))
313 "Error %s." % (path, exc))
308
314
309 return annotations
315 return annotations
310
316
311 def get_node_type(self, wire, path, revision=None):
317 def get_node_type(self, wire, path, revision=None):
312
318
313 cache_on, context_uid, repo_id = self._cache_on(wire)
319 cache_on, context_uid, repo_id = self._cache_on(wire)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
320 region = self._region(wire)
321 @region.conditional_cache_on_arguments(condition=cache_on)
315 def _get_node_type(_repo_id, _path, _revision):
322 def _get_node_type(_repo_id, _path, _revision):
316 repo = self._factory.repo(wire)
323 repo = self._factory.repo(wire)
317 fs_ptr = svn.repos.fs(repo)
324 fs_ptr = svn.repos.fs(repo)
318 if _revision is None:
325 if _revision is None:
319 _revision = svn.fs.youngest_rev(fs_ptr)
326 _revision = svn.fs.youngest_rev(fs_ptr)
320 root = svn.fs.revision_root(fs_ptr, _revision)
327 root = svn.fs.revision_root(fs_ptr, _revision)
321 node = svn.fs.check_path(root, path)
328 node = svn.fs.check_path(root, path)
322 return NODE_TYPE_MAPPING.get(node, None)
329 return NODE_TYPE_MAPPING.get(node, None)
323 return _get_node_type(repo_id, path, revision)
330 return _get_node_type(repo_id, path, revision)
324
331
325 def get_nodes(self, wire, path, revision=None):
332 def get_nodes(self, wire, path, revision=None):
326
333
327 cache_on, context_uid, repo_id = self._cache_on(wire)
334 cache_on, context_uid, repo_id = self._cache_on(wire)
328 @self.region.conditional_cache_on_arguments(condition=cache_on)
335 region = self._region(wire)
336 @region.conditional_cache_on_arguments(condition=cache_on)
329 def _get_nodes(_repo_id, _path, _revision):
337 def _get_nodes(_repo_id, _path, _revision):
330 repo = self._factory.repo(wire)
338 repo = self._factory.repo(wire)
331 fsobj = svn.repos.fs(repo)
339 fsobj = svn.repos.fs(repo)
332 if _revision is None:
340 if _revision is None:
333 _revision = svn.fs.youngest_rev(fsobj)
341 _revision = svn.fs.youngest_rev(fsobj)
334 root = svn.fs.revision_root(fsobj, _revision)
342 root = svn.fs.revision_root(fsobj, _revision)
335 entries = svn.fs.dir_entries(root, path)
343 entries = svn.fs.dir_entries(root, path)
336 result = []
344 result = []
337 for entry_path, entry_info in entries.items():
345 for entry_path, entry_info in entries.items():
338 result.append(
346 result.append(
339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
340 return result
348 return result
341 return _get_nodes(repo_id, path, revision)
349 return _get_nodes(repo_id, path, revision)
342
350
343 def get_file_content(self, wire, path, rev=None):
351 def get_file_content(self, wire, path, rev=None):
344 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
345 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
346 if rev is None:
354 if rev is None:
347 rev = svn.fs.youngest_revision(fsobj)
355 rev = svn.fs.youngest_revision(fsobj)
348 root = svn.fs.revision_root(fsobj, rev)
356 root = svn.fs.revision_root(fsobj, rev)
349 content = svn.core.Stream(svn.fs.file_contents(root, path))
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
350 return content.read()
358 return content.read()
351
359
352 def get_file_size(self, wire, path, revision=None):
360 def get_file_size(self, wire, path, revision=None):
353
361
354 cache_on, context_uid, repo_id = self._cache_on(wire)
362 cache_on, context_uid, repo_id = self._cache_on(wire)
355 @self.region.conditional_cache_on_arguments(condition=cache_on)
363 region = self._region(wire)
364 @region.conditional_cache_on_arguments(condition=cache_on)
356 def _get_file_size(_repo_id, _path, _revision):
365 def _get_file_size(_repo_id, _path, _revision):
357 repo = self._factory.repo(wire)
366 repo = self._factory.repo(wire)
358 fsobj = svn.repos.fs(repo)
367 fsobj = svn.repos.fs(repo)
359 if _revision is None:
368 if _revision is None:
360 _revision = svn.fs.youngest_revision(fsobj)
369 _revision = svn.fs.youngest_revision(fsobj)
361 root = svn.fs.revision_root(fsobj, _revision)
370 root = svn.fs.revision_root(fsobj, _revision)
362 size = svn.fs.file_length(root, path)
371 size = svn.fs.file_length(root, path)
363 return size
372 return size
364 return _get_file_size(repo_id, path, revision)
373 return _get_file_size(repo_id, path, revision)
365
374
366 def create_repository(self, wire, compatible_version=None):
375 def create_repository(self, wire, compatible_version=None):
367 log.info('Creating Subversion repository in path "%s"', wire['path'])
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
368 self._factory.repo(wire, create=True,
377 self._factory.repo(wire, create=True,
369 compatible_version=compatible_version)
378 compatible_version=compatible_version)
370
379
371 def get_url_and_credentials(self, src_url):
380 def get_url_and_credentials(self, src_url):
372 obj = urllib.parse.urlparse(src_url)
381 obj = urllib.parse.urlparse(src_url)
373 username = obj.username or None
382 username = obj.username or None
374 password = obj.password or None
383 password = obj.password or None
375 return username, password, src_url
384 return username, password, src_url
376
385
377 def import_remote_repository(self, wire, src_url):
386 def import_remote_repository(self, wire, src_url):
378 repo_path = wire['path']
387 repo_path = wire['path']
379 if not self.is_path_valid_repository(wire, repo_path):
388 if not self.is_path_valid_repository(wire, repo_path):
380 raise Exception(
389 raise Exception(
381 "Path %s is not a valid Subversion repository." % repo_path)
390 "Path %s is not a valid Subversion repository." % repo_path)
382
391
383 username, password, src_url = self.get_url_and_credentials(src_url)
392 username, password, src_url = self.get_url_and_credentials(src_url)
384 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
385 '--trust-server-cert-failures=unknown-ca']
394 '--trust-server-cert-failures=unknown-ca']
386 if username and password:
395 if username and password:
387 rdump_cmd += ['--username', username, '--password', password]
396 rdump_cmd += ['--username', username, '--password', password]
388 rdump_cmd += [src_url]
397 rdump_cmd += [src_url]
389
398
390 rdump = subprocess.Popen(
399 rdump = subprocess.Popen(
391 rdump_cmd,
400 rdump_cmd,
392 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
393 load = subprocess.Popen(
402 load = subprocess.Popen(
394 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
395
404
396 # TODO: johbo: This can be a very long operation, might be better
405 # TODO: johbo: This can be a very long operation, might be better
397 # to track some kind of status and provide an api to check if the
406 # to track some kind of status and provide an api to check if the
398 # import is done.
407 # import is done.
399 rdump.wait()
408 rdump.wait()
400 load.wait()
409 load.wait()
401
410
402 log.debug('Return process ended with code: %s', rdump.returncode)
411 log.debug('Return process ended with code: %s', rdump.returncode)
403 if rdump.returncode != 0:
412 if rdump.returncode != 0:
404 errors = rdump.stderr.read()
413 errors = rdump.stderr.read()
405 log.error('svnrdump dump failed: statuscode %s: message: %s',
414 log.error('svnrdump dump failed: statuscode %s: message: %s',
406 rdump.returncode, errors)
415 rdump.returncode, errors)
407 reason = 'UNKNOWN'
416 reason = 'UNKNOWN'
408 if 'svnrdump: E230001:' in errors:
417 if 'svnrdump: E230001:' in errors:
409 reason = 'INVALID_CERTIFICATE'
418 reason = 'INVALID_CERTIFICATE'
410
419
411 if reason == 'UNKNOWN':
420 if reason == 'UNKNOWN':
412 reason = 'UNKNOWN:{}'.format(errors)
421 reason = 'UNKNOWN:{}'.format(errors)
413 raise Exception(
422 raise Exception(
414 'Failed to dump the remote repository from %s. Reason:%s' % (
423 'Failed to dump the remote repository from %s. Reason:%s' % (
415 src_url, reason))
424 src_url, reason))
416 if load.returncode != 0:
425 if load.returncode != 0:
417 raise Exception(
426 raise Exception(
418 'Failed to load the dump of remote repository from %s.' %
427 'Failed to load the dump of remote repository from %s.' %
419 (src_url, ))
428 (src_url, ))
420
429
421 def commit(self, wire, message, author, timestamp, updated, removed):
430 def commit(self, wire, message, author, timestamp, updated, removed):
422 assert isinstance(message, str)
431 assert isinstance(message, str)
423 assert isinstance(author, str)
432 assert isinstance(author, str)
424
433
425 repo = self._factory.repo(wire)
434 repo = self._factory.repo(wire)
426 fsobj = svn.repos.fs(repo)
435 fsobj = svn.repos.fs(repo)
427
436
428 rev = svn.fs.youngest_rev(fsobj)
437 rev = svn.fs.youngest_rev(fsobj)
429 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
438 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
430 txn_root = svn.fs.txn_root(txn)
439 txn_root = svn.fs.txn_root(txn)
431
440
432 for node in updated:
441 for node in updated:
433 TxnNodeProcessor(node, txn_root).update()
442 TxnNodeProcessor(node, txn_root).update()
434 for node in removed:
443 for node in removed:
435 TxnNodeProcessor(node, txn_root).remove()
444 TxnNodeProcessor(node, txn_root).remove()
436
445
437 commit_id = svn.repos.fs_commit_txn(repo, txn)
446 commit_id = svn.repos.fs_commit_txn(repo, txn)
438
447
439 if timestamp:
448 if timestamp:
440 apr_time = apr_time_t(timestamp)
449 apr_time = apr_time_t(timestamp)
441 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
450 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
442 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
451 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
443
452
444 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
445 return commit_id
454 return commit_id
446
455
447 def diff(self, wire, rev1, rev2, path1=None, path2=None,
456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
448 ignore_whitespace=False, context=3):
457 ignore_whitespace=False, context=3):
449
458
450 wire.update(cache=False)
459 wire.update(cache=False)
451 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
452 diff_creator = SvnDiffer(
461 diff_creator = SvnDiffer(
453 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
454 try:
463 try:
455 return diff_creator.generate_diff()
464 return diff_creator.generate_diff()
456 except svn.core.SubversionException as e:
465 except svn.core.SubversionException as e:
457 log.exception(
466 log.exception(
458 "Error during diff operation operation. "
467 "Error during diff operation operation. "
459 "Path might not exist %s, %s" % (path1, path2))
468 "Path might not exist %s, %s" % (path1, path2))
460 return ""
469 return ""
461
470
462 @reraise_safe_exceptions
471 @reraise_safe_exceptions
463 def is_large_file(self, wire, path):
472 def is_large_file(self, wire, path):
464 return False
473 return False
465
474
466 @reraise_safe_exceptions
475 @reraise_safe_exceptions
467 def is_binary(self, wire, rev, path):
476 def is_binary(self, wire, rev, path):
468 cache_on, context_uid, repo_id = self._cache_on(wire)
477 cache_on, context_uid, repo_id = self._cache_on(wire)
469
478
470 @self.region.conditional_cache_on_arguments(condition=cache_on)
479 region = self._region(wire)
480 @region.conditional_cache_on_arguments(condition=cache_on)
471 def _is_binary(_repo_id, _rev, _path):
481 def _is_binary(_repo_id, _rev, _path):
472 raw_bytes = self.get_file_content(wire, path, rev)
482 raw_bytes = self.get_file_content(wire, path, rev)
473 return raw_bytes and '\0' in raw_bytes
483 return raw_bytes and '\0' in raw_bytes
474
484
475 return _is_binary(repo_id, rev, path)
485 return _is_binary(repo_id, rev, path)
476
486
477 @reraise_safe_exceptions
487 @reraise_safe_exceptions
478 def run_svn_command(self, wire, cmd, **opts):
488 def run_svn_command(self, wire, cmd, **opts):
479 path = wire.get('path', None)
489 path = wire.get('path', None)
480
490
481 if path and os.path.isdir(path):
491 if path and os.path.isdir(path):
482 opts['cwd'] = path
492 opts['cwd'] = path
483
493
484 safe_call = False
494 safe_call = opts.pop('_safe', False)
485 if '_safe' in opts:
486 safe_call = True
487
495
488 svnenv = os.environ.copy()
496 svnenv = os.environ.copy()
489 svnenv.update(opts.pop('extra_env', {}))
497 svnenv.update(opts.pop('extra_env', {}))
490
498
491 _opts = {'env': svnenv, 'shell': False}
499 _opts = {'env': svnenv, 'shell': False}
492
500
493 try:
501 try:
494 _opts.update(opts)
502 _opts.update(opts)
495 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
496
504
497 return ''.join(p), ''.join(p.error)
505 return ''.join(p), ''.join(p.error)
498 except (EnvironmentError, OSError) as err:
506 except (EnvironmentError, OSError) as err:
507 if safe_call:
508 return '', safe_str(err).strip()
509 else:
499 cmd = ' '.join(cmd) # human friendly CMD
510 cmd = ' '.join(cmd) # human friendly CMD
500 tb_err = ("Couldn't run svn command (%s).\n"
511 tb_err = ("Couldn't run svn command (%s).\n"
501 "Original error was:%s\n"
512 "Original error was:%s\n"
502 "Call options:%s\n"
513 "Call options:%s\n"
503 % (cmd, err, _opts))
514 % (cmd, err, _opts))
504 log.exception(tb_err)
515 log.exception(tb_err)
505 if safe_call:
506 return '', err
507 else:
508 raise exceptions.VcsException()(tb_err)
516 raise exceptions.VcsException()(tb_err)
509
517
510 @reraise_safe_exceptions
518 @reraise_safe_exceptions
511 def install_hooks(self, wire, force=False):
519 def install_hooks(self, wire, force=False):
512 from vcsserver.hook_utils import install_svn_hooks
520 from vcsserver.hook_utils import install_svn_hooks
513 repo_path = wire['path']
521 repo_path = wire['path']
514 binary_dir = settings.BINARY_DIR
522 binary_dir = settings.BINARY_DIR
515 executable = None
523 executable = None
516 if binary_dir:
524 if binary_dir:
517 executable = os.path.join(binary_dir, 'python')
525 executable = os.path.join(binary_dir, 'python')
518 return install_svn_hooks(
526 return install_svn_hooks(
519 repo_path, executable=executable, force_create=force)
527 repo_path, executable=executable, force_create=force)
520
528
521 @reraise_safe_exceptions
529 @reraise_safe_exceptions
522 def get_hooks_info(self, wire):
530 def get_hooks_info(self, wire):
523 from vcsserver.hook_utils import (
531 from vcsserver.hook_utils import (
524 get_svn_pre_hook_version, get_svn_post_hook_version)
532 get_svn_pre_hook_version, get_svn_post_hook_version)
525 repo_path = wire['path']
533 repo_path = wire['path']
526 return {
534 return {
527 'pre_version': get_svn_pre_hook_version(repo_path),
535 'pre_version': get_svn_pre_hook_version(repo_path),
528 'post_version': get_svn_post_hook_version(repo_path),
536 'post_version': get_svn_post_hook_version(repo_path),
529 }
537 }
530
538
539 @reraise_safe_exceptions
540 def set_head_ref(self, wire, head_name):
541 pass
542
543 @reraise_safe_exceptions
544 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
545 archive_dir_name, commit_id):
546
547 def walk_tree(root, root_dir, _commit_id):
548 """
549 Special recursive svn repo walker
550 """
551
552 filemode_default = 0o100644
553 filemode_executable = 0o100755
554
555 file_iter = svn.fs.dir_entries(root, root_dir)
556 for f_name in file_iter:
557 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
558
559 if f_type == 'dir':
560 # return only DIR, and then all entries in that dir
561 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
562 new_root = os.path.join(root_dir, f_name)
563 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
564 yield _f_name, _f_data, _f_type
565 else:
566 f_path = os.path.join(root_dir, f_name).rstrip('/')
567 prop_list = svn.fs.node_proplist(root, f_path)
568
569 f_mode = filemode_default
570 if prop_list.get('svn:executable'):
571 f_mode = filemode_executable
572
573 f_is_link = False
574 if prop_list.get('svn:special'):
575 f_is_link = True
576
577 data = {
578 'is_link': f_is_link,
579 'mode': f_mode,
580 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
581 }
582
583 yield f_path, data, f_type
584
585 def file_walker(_commit_id, path):
586 repo = self._factory.repo(wire)
587 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
588
589 def no_content():
590 raise NoContentException()
591
592 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
593 file_path = f_name
594
595 if f_type == 'dir':
596 mode = f_data['mode']
597 yield ArchiveNode(file_path, mode, False, no_content)
598 else:
599 mode = f_data['mode']
600 is_link = f_data['is_link']
601 data_stream = f_data['content_stream']
602 yield ArchiveNode(file_path, mode, is_link, data_stream)
603
604 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
605 archive_dir_name, commit_id)
606
531
607
532 class SvnDiffer(object):
608 class SvnDiffer(object):
533 """
609 """
534 Utility to create diffs based on difflib and the Subversion api
610 Utility to create diffs based on difflib and the Subversion api
535 """
611 """
536
612
537 binary_content = False
613 binary_content = False
538
614
539 def __init__(
615 def __init__(
540 self, repo, src_rev, src_path, tgt_rev, tgt_path,
616 self, repo, src_rev, src_path, tgt_rev, tgt_path,
541 ignore_whitespace, context):
617 ignore_whitespace, context):
542 self.repo = repo
618 self.repo = repo
543 self.ignore_whitespace = ignore_whitespace
619 self.ignore_whitespace = ignore_whitespace
544 self.context = context
620 self.context = context
545
621
546 fsobj = svn.repos.fs(repo)
622 fsobj = svn.repos.fs(repo)
547
623
548 self.tgt_rev = tgt_rev
624 self.tgt_rev = tgt_rev
549 self.tgt_path = tgt_path or ''
625 self.tgt_path = tgt_path or ''
550 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
626 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
551 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
627 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
552
628
553 self.src_rev = src_rev
629 self.src_rev = src_rev
554 self.src_path = src_path or self.tgt_path
630 self.src_path = src_path or self.tgt_path
555 self.src_root = svn.fs.revision_root(fsobj, src_rev)
631 self.src_root = svn.fs.revision_root(fsobj, src_rev)
556 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
632 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
557
633
558 self._validate()
634 self._validate()
559
635
560 def _validate(self):
636 def _validate(self):
561 if (self.tgt_kind != svn.core.svn_node_none and
637 if (self.tgt_kind != svn.core.svn_node_none and
562 self.src_kind != svn.core.svn_node_none and
638 self.src_kind != svn.core.svn_node_none and
563 self.src_kind != self.tgt_kind):
639 self.src_kind != self.tgt_kind):
564 # TODO: johbo: proper error handling
640 # TODO: johbo: proper error handling
565 raise Exception(
641 raise Exception(
566 "Source and target are not compatible for diff generation. "
642 "Source and target are not compatible for diff generation. "
567 "Source type: %s, target type: %s" %
643 "Source type: %s, target type: %s" %
568 (self.src_kind, self.tgt_kind))
644 (self.src_kind, self.tgt_kind))
569
645
570 def generate_diff(self):
646 def generate_diff(self):
571 buf = io.StringIO()
647 buf = io.StringIO()
572 if self.tgt_kind == svn.core.svn_node_dir:
648 if self.tgt_kind == svn.core.svn_node_dir:
573 self._generate_dir_diff(buf)
649 self._generate_dir_diff(buf)
574 else:
650 else:
575 self._generate_file_diff(buf)
651 self._generate_file_diff(buf)
576 return buf.getvalue()
652 return buf.getvalue()
577
653
578 def _generate_dir_diff(self, buf):
654 def _generate_dir_diff(self, buf):
579 editor = DiffChangeEditor()
655 editor = DiffChangeEditor()
580 editor_ptr, editor_baton = svn.delta.make_editor(editor)
656 editor_ptr, editor_baton = svn.delta.make_editor(editor)
581 svn.repos.dir_delta2(
657 svn.repos.dir_delta2(
582 self.src_root,
658 self.src_root,
583 self.src_path,
659 self.src_path,
584 '', # src_entry
660 '', # src_entry
585 self.tgt_root,
661 self.tgt_root,
586 self.tgt_path,
662 self.tgt_path,
587 editor_ptr, editor_baton,
663 editor_ptr, editor_baton,
588 authorization_callback_allow_all,
664 authorization_callback_allow_all,
589 False, # text_deltas
665 False, # text_deltas
590 svn.core.svn_depth_infinity, # depth
666 svn.core.svn_depth_infinity, # depth
591 False, # entry_props
667 False, # entry_props
592 False, # ignore_ancestry
668 False, # ignore_ancestry
593 )
669 )
594
670
595 for path, __, change in sorted(editor.changes):
671 for path, __, change in sorted(editor.changes):
596 self._generate_node_diff(
672 self._generate_node_diff(
597 buf, change, path, self.tgt_path, path, self.src_path)
673 buf, change, path, self.tgt_path, path, self.src_path)
598
674
599 def _generate_file_diff(self, buf):
675 def _generate_file_diff(self, buf):
600 change = None
676 change = None
601 if self.src_kind == svn.core.svn_node_none:
677 if self.src_kind == svn.core.svn_node_none:
602 change = "add"
678 change = "add"
603 elif self.tgt_kind == svn.core.svn_node_none:
679 elif self.tgt_kind == svn.core.svn_node_none:
604 change = "delete"
680 change = "delete"
605 tgt_base, tgt_path = vcspath.split(self.tgt_path)
681 tgt_base, tgt_path = vcspath.split(self.tgt_path)
606 src_base, src_path = vcspath.split(self.src_path)
682 src_base, src_path = vcspath.split(self.src_path)
607 self._generate_node_diff(
683 self._generate_node_diff(
608 buf, change, tgt_path, tgt_base, src_path, src_base)
684 buf, change, tgt_path, tgt_base, src_path, src_base)
609
685
610 def _generate_node_diff(
686 def _generate_node_diff(
611 self, buf, change, tgt_path, tgt_base, src_path, src_base):
687 self, buf, change, tgt_path, tgt_base, src_path, src_base):
612
688
613 if self.src_rev == self.tgt_rev and tgt_base == src_base:
689 if self.src_rev == self.tgt_rev and tgt_base == src_base:
614 # makes consistent behaviour with git/hg to return empty diff if
690 # makes consistent behaviour with git/hg to return empty diff if
615 # we compare same revisions
691 # we compare same revisions
616 return
692 return
617
693
618 tgt_full_path = vcspath.join(tgt_base, tgt_path)
694 tgt_full_path = vcspath.join(tgt_base, tgt_path)
619 src_full_path = vcspath.join(src_base, src_path)
695 src_full_path = vcspath.join(src_base, src_path)
620
696
621 self.binary_content = False
697 self.binary_content = False
622 mime_type = self._get_mime_type(tgt_full_path)
698 mime_type = self._get_mime_type(tgt_full_path)
623
699
624 if mime_type and not mime_type.startswith('text'):
700 if mime_type and not mime_type.startswith('text'):
625 self.binary_content = True
701 self.binary_content = True
626 buf.write("=" * 67 + '\n')
702 buf.write("=" * 67 + '\n')
627 buf.write("Cannot display: file marked as a binary type.\n")
703 buf.write("Cannot display: file marked as a binary type.\n")
628 buf.write("svn:mime-type = %s\n" % mime_type)
704 buf.write("svn:mime-type = %s\n" % mime_type)
629 buf.write("Index: %s\n" % (tgt_path, ))
705 buf.write("Index: %s\n" % (tgt_path, ))
630 buf.write("=" * 67 + '\n')
706 buf.write("=" * 67 + '\n')
631 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
707 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
632 'tgt_path': tgt_path})
708 'tgt_path': tgt_path})
633
709
634 if change == 'add':
710 if change == 'add':
635 # TODO: johbo: SVN is missing a zero here compared to git
711 # TODO: johbo: SVN is missing a zero here compared to git
636 buf.write("new file mode 10644\n")
712 buf.write("new file mode 10644\n")
637
713
638 #TODO(marcink): intro to binary detection of svn patches
714 #TODO(marcink): intro to binary detection of svn patches
639 # if self.binary_content:
715 # if self.binary_content:
640 # buf.write('GIT binary patch\n')
716 # buf.write('GIT binary patch\n')
641
717
642 buf.write("--- /dev/null\t(revision 0)\n")
718 buf.write("--- /dev/null\t(revision 0)\n")
643 src_lines = []
719 src_lines = []
644 else:
720 else:
645 if change == 'delete':
721 if change == 'delete':
646 buf.write("deleted file mode 10644\n")
722 buf.write("deleted file mode 10644\n")
647
723
648 #TODO(marcink): intro to binary detection of svn patches
724 #TODO(marcink): intro to binary detection of svn patches
649 # if self.binary_content:
725 # if self.binary_content:
650 # buf.write('GIT binary patch\n')
726 # buf.write('GIT binary patch\n')
651
727
652 buf.write("--- a/%s\t(revision %s)\n" % (
728 buf.write("--- a/%s\t(revision %s)\n" % (
653 src_path, self.src_rev))
729 src_path, self.src_rev))
654 src_lines = self._svn_readlines(self.src_root, src_full_path)
730 src_lines = self._svn_readlines(self.src_root, src_full_path)
655
731
656 if change == 'delete':
732 if change == 'delete':
657 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
733 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
658 tgt_lines = []
734 tgt_lines = []
659 else:
735 else:
660 buf.write("+++ b/%s\t(revision %s)\n" % (
736 buf.write("+++ b/%s\t(revision %s)\n" % (
661 tgt_path, self.tgt_rev))
737 tgt_path, self.tgt_rev))
662 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
738 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
663
739
664 if not self.binary_content:
740 if not self.binary_content:
665 udiff = svn_diff.unified_diff(
741 udiff = svn_diff.unified_diff(
666 src_lines, tgt_lines, context=self.context,
742 src_lines, tgt_lines, context=self.context,
667 ignore_blank_lines=self.ignore_whitespace,
743 ignore_blank_lines=self.ignore_whitespace,
668 ignore_case=False,
744 ignore_case=False,
669 ignore_space_changes=self.ignore_whitespace)
745 ignore_space_changes=self.ignore_whitespace)
670 buf.writelines(udiff)
746 buf.writelines(udiff)
671
747
672 def _get_mime_type(self, path):
748 def _get_mime_type(self, path):
673 try:
749 try:
674 mime_type = svn.fs.node_prop(
750 mime_type = svn.fs.node_prop(
675 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
751 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
676 except svn.core.SubversionException:
752 except svn.core.SubversionException:
677 mime_type = svn.fs.node_prop(
753 mime_type = svn.fs.node_prop(
678 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
754 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
679 return mime_type
755 return mime_type
680
756
681 def _svn_readlines(self, fs_root, node_path):
757 def _svn_readlines(self, fs_root, node_path):
682 if self.binary_content:
758 if self.binary_content:
683 return []
759 return []
684 node_kind = svn.fs.check_path(fs_root, node_path)
760 node_kind = svn.fs.check_path(fs_root, node_path)
685 if node_kind not in (
761 if node_kind not in (
686 svn.core.svn_node_file, svn.core.svn_node_symlink):
762 svn.core.svn_node_file, svn.core.svn_node_symlink):
687 return []
763 return []
688 content = svn.core.Stream(
764 content = svn.core.Stream(
689 svn.fs.file_contents(fs_root, node_path)).read()
765 svn.fs.file_contents(fs_root, node_path)).read()
690 return content.splitlines(True)
766 return content.splitlines(True)
691
767
692
768
693 class DiffChangeEditor(svn.delta.Editor):
769 class DiffChangeEditor(svn.delta.Editor):
694 """
770 """
695 Records changes between two given revisions
771 Records changes between two given revisions
696 """
772 """
697
773
698 def __init__(self):
774 def __init__(self):
699 self.changes = []
775 self.changes = []
700
776
701 def delete_entry(self, path, revision, parent_baton, pool=None):
777 def delete_entry(self, path, revision, parent_baton, pool=None):
702 self.changes.append((path, None, 'delete'))
778 self.changes.append((path, None, 'delete'))
703
779
704 def add_file(
780 def add_file(
705 self, path, parent_baton, copyfrom_path, copyfrom_revision,
781 self, path, parent_baton, copyfrom_path, copyfrom_revision,
706 file_pool=None):
782 file_pool=None):
707 self.changes.append((path, 'file', 'add'))
783 self.changes.append((path, 'file', 'add'))
708
784
709 def open_file(self, path, parent_baton, base_revision, file_pool=None):
785 def open_file(self, path, parent_baton, base_revision, file_pool=None):
710 self.changes.append((path, 'file', 'change'))
786 self.changes.append((path, 'file', 'change'))
711
787
712
788
713 def authorization_callback_allow_all(root, path, pool):
789 def authorization_callback_allow_all(root, path, pool):
714 return True
790 return True
715
791
716
792
717 class TxnNodeProcessor(object):
793 class TxnNodeProcessor(object):
718 """
794 """
719 Utility to process the change of one node within a transaction root.
795 Utility to process the change of one node within a transaction root.
720
796
721 It encapsulates the knowledge of how to add, update or remove
797 It encapsulates the knowledge of how to add, update or remove
722 a node for a given transaction root. The purpose is to support the method
798 a node for a given transaction root. The purpose is to support the method
723 `SvnRemote.commit`.
799 `SvnRemote.commit`.
724 """
800 """
725
801
726 def __init__(self, node, txn_root):
802 def __init__(self, node, txn_root):
727 assert isinstance(node['path'], str)
803 assert isinstance(node['path'], str)
728
804
729 self.node = node
805 self.node = node
730 self.txn_root = txn_root
806 self.txn_root = txn_root
731
807
732 def update(self):
808 def update(self):
733 self._ensure_parent_dirs()
809 self._ensure_parent_dirs()
734 self._add_file_if_node_does_not_exist()
810 self._add_file_if_node_does_not_exist()
735 self._update_file_content()
811 self._update_file_content()
736 self._update_file_properties()
812 self._update_file_properties()
737
813
738 def remove(self):
814 def remove(self):
739 svn.fs.delete(self.txn_root, self.node['path'])
815 svn.fs.delete(self.txn_root, self.node['path'])
740 # TODO: Clean up directory if empty
816 # TODO: Clean up directory if empty
741
817
742 def _ensure_parent_dirs(self):
818 def _ensure_parent_dirs(self):
743 curdir = vcspath.dirname(self.node['path'])
819 curdir = vcspath.dirname(self.node['path'])
744 dirs_to_create = []
820 dirs_to_create = []
745 while not self._svn_path_exists(curdir):
821 while not self._svn_path_exists(curdir):
746 dirs_to_create.append(curdir)
822 dirs_to_create.append(curdir)
747 curdir = vcspath.dirname(curdir)
823 curdir = vcspath.dirname(curdir)
748
824
749 for curdir in reversed(dirs_to_create):
825 for curdir in reversed(dirs_to_create):
750 log.debug('Creating missing directory "%s"', curdir)
826 log.debug('Creating missing directory "%s"', curdir)
751 svn.fs.make_dir(self.txn_root, curdir)
827 svn.fs.make_dir(self.txn_root, curdir)
752
828
753 def _svn_path_exists(self, path):
829 def _svn_path_exists(self, path):
754 path_status = svn.fs.check_path(self.txn_root, path)
830 path_status = svn.fs.check_path(self.txn_root, path)
755 return path_status != svn.core.svn_node_none
831 return path_status != svn.core.svn_node_none
756
832
757 def _add_file_if_node_does_not_exist(self):
833 def _add_file_if_node_does_not_exist(self):
758 kind = svn.fs.check_path(self.txn_root, self.node['path'])
834 kind = svn.fs.check_path(self.txn_root, self.node['path'])
759 if kind == svn.core.svn_node_none:
835 if kind == svn.core.svn_node_none:
760 svn.fs.make_file(self.txn_root, self.node['path'])
836 svn.fs.make_file(self.txn_root, self.node['path'])
761
837
762 def _update_file_content(self):
838 def _update_file_content(self):
763 assert isinstance(self.node['content'], str)
839 assert isinstance(self.node['content'], str)
764 handler, baton = svn.fs.apply_textdelta(
840 handler, baton = svn.fs.apply_textdelta(
765 self.txn_root, self.node['path'], None, None)
841 self.txn_root, self.node['path'], None, None)
766 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
842 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
767
843
768 def _update_file_properties(self):
844 def _update_file_properties(self):
769 properties = self.node.get('properties', {})
845 properties = self.node.get('properties', {})
770 for key, value in properties.items():
846 for key, value in properties.items():
771 svn.fs.change_node_prop(
847 svn.fs.change_node_prop(
772 self.txn_root, self.node['path'], key, value)
848 self.txn_root, self.node['path'], key, value)
773
849
774
850
775 def apr_time_t(timestamp):
851 def apr_time_t(timestamp):
776 """
852 """
777 Convert a Python timestamp into APR timestamp type apr_time_t
853 Convert a Python timestamp into APR timestamp type apr_time_t
778 """
854 """
779 return timestamp * 1E6
855 return timestamp * 1E6
780
856
781
857
782 def svn_opt_revision_value_t(num):
858 def svn_opt_revision_value_t(num):
783 """
859 """
784 Put `num` into a `svn_opt_revision_value_t` structure.
860 Put `num` into a `svn_opt_revision_value_t` structure.
785 """
861 """
786 value = svn.core.svn_opt_revision_value_t()
862 value = svn.core.svn_opt_revision_value_t()
787 value.number = num
863 value.number = num
788 revision = svn.core.svn_opt_revision_t()
864 revision = svn.core.svn_opt_revision_t()
789 revision.kind = svn.core.svn_opt_revision_number
865 revision.kind = svn.core.svn_opt_revision_number
790 revision.value = value
866 revision.value = value
791 return revision
867 return revision
@@ -1,57 +1,56 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import socket
18 import socket
19
20 import pytest
19 import pytest
21
20
22
21
23 def pytest_addoption(parser):
22 def pytest_addoption(parser):
24 parser.addoption(
23 parser.addoption(
25 '--repeat', type=int, default=100,
24 '--perf-repeat-vcs', type=int, default=100,
26 help="Number of repetitions in performance tests.")
25 help="Number of repetitions in performance tests.")
27
26
28
27
29 @pytest.fixture(scope='session')
28 @pytest.fixture(scope='session')
30 def repeat(request):
29 def repeat(request):
31 """
30 """
32 The number of repetitions is based on this fixture.
31 The number of repetitions is based on this fixture.
33
32
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
33 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 tests are not too slow in our default test suite.
34 tests are not too slow in our default test suite.
36 """
35 """
37 return request.config.getoption('--repeat')
36 return request.config.getoption('--perf-repeat-vcs')
38
37
39
38
40 @pytest.fixture(scope='session')
39 @pytest.fixture(scope='session')
41 def vcsserver_port(request):
40 def vcsserver_port(request):
42 port = get_available_port()
41 port = get_available_port()
43 print('Using vcsserver port %s' % (port, ))
42 print('Using vcsserver port %s' % (port, ))
44 return port
43 return port
45
44
46
45
47 def get_available_port():
46 def get_available_port():
48 family = socket.AF_INET
47 family = socket.AF_INET
49 socktype = socket.SOCK_STREAM
48 socktype = socket.SOCK_STREAM
50 host = '127.0.0.1'
49 host = '127.0.0.1'
51
50
52 mysocket = socket.socket(family, socktype)
51 mysocket = socket.socket(family, socktype)
53 mysocket.bind((host, 0))
52 mysocket.bind((host, 0))
54 port = mysocket.getsockname()[1]
53 port = mysocket.getsockname()[1]
55 mysocket.close()
54 mysocket.close()
56 del mysocket
55 del mysocket
57 return port
56 return port
@@ -1,160 +1,160 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.pull(
81 self.remote_git.pull(
82 wire={}, url='/tmp/', apply_refs=False,
82 wire={}, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102
102
103 class TestReraiseSafeExceptions(object):
103 class TestReraiseSafeExceptions(object):
104
104
105 def test_method_decorated_with_reraise_safe_exceptions(self):
105 def test_method_decorated_with_reraise_safe_exceptions(self):
106 factory = Mock()
106 factory = Mock()
107 git_remote = git.GitRemote(factory)
107 git_remote = git.GitRemote(factory)
108
108
109 def fake_function():
109 def fake_function():
110 return None
110 return None
111
111
112 decorator = git.reraise_safe_exceptions(fake_function)
112 decorator = git.reraise_safe_exceptions(fake_function)
113
113
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
115 for method_name, method in methods:
115 for method_name, method in methods:
116 if not method_name.startswith('_'):
116 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
117 assert method.im_func.__code__ == decorator.__code__
117 assert method.im_func.__code__ == decorator.__code__
118
118
119 @pytest.mark.parametrize('side_effect, expected_type', [
119 @pytest.mark.parametrize('side_effect, expected_type', [
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
124 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.HangupException(), 'error'),
125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
126 ])
126 ])
127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
128 @git.reraise_safe_exceptions
128 @git.reraise_safe_exceptions
129 def fake_method():
129 def fake_method():
130 raise side_effect
130 raise side_effect
131
131
132 with pytest.raises(Exception) as exc_info:
132 with pytest.raises(Exception) as exc_info:
133 fake_method()
133 fake_method()
134 assert type(exc_info.value) == Exception
134 assert type(exc_info.value) == Exception
135 assert exc_info.value._vcs_kind == expected_type
135 assert exc_info.value._vcs_kind == expected_type
136
136
137
137
138 class TestDulwichRepoWrapper(object):
138 class TestDulwichRepoWrapper(object):
139 def test_calls_close_on_delete(self):
139 def test_calls_close_on_delete(self):
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
141 with isdir_patcher:
141 with isdir_patcher:
142 repo = git.Repo('/tmp/abcde')
142 repo = git.Repo('/tmp/abcde')
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
144 del repo
144 del repo
145 close_mock.assert_called_once_with()
145 close_mock.assert_called_once_with()
146
146
147
147
148 class TestGitFactory(object):
148 class TestGitFactory(object):
149 def test_create_repo_returns_dulwich_wrapper(self):
149 def test_create_repo_returns_dulwich_wrapper(self):
150
150
151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
152 mock.side_effect = {'repo_objects': ''}
152 mock.side_effect = {'repo_objects': ''}
153 factory = git.GitFactory()
153 factory = git.GitFactory()
154 wire = {
154 wire = {
155 'path': '/tmp/abcde'
155 'path': '/tmp/abcde'
156 }
156 }
157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
158 with isdir_patcher:
158 with isdir_patcher:
159 result = factory._create_repo(wire, True)
159 result = factory._create_repo(wire, True)
160 assert isinstance(result, git.Repo)
160 assert isinstance(result, git.Repo)
@@ -1,108 +1,108 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, MagicMock, patch
24 from mock import Mock, MagicMock, patch
25
25
26 from vcsserver import exceptions, hg, hgcompat
26 from vcsserver import exceptions, hg, hgcompat
27
27
28
28
29 class TestDiff(object):
29 class TestDiff(object):
30 def test_raising_safe_exception_when_lookup_failed(self):
30 def test_raising_safe_exception_when_lookup_failed(self):
31
31
32 factory = Mock()
32 factory = Mock()
33 hg_remote = hg.HgRemote(factory)
33 hg_remote = hg.HgRemote(factory)
34 with patch('mercurial.patch.diff') as diff_mock:
34 with patch('mercurial.patch.diff') as diff_mock:
35 diff_mock.side_effect = LookupError(
35 diff_mock.side_effect = LookupError(
36 'deadbeef', 'index', 'message')
36 'deadbeef', 'index', 'message')
37 with pytest.raises(Exception) as exc_info:
37 with pytest.raises(Exception) as exc_info:
38 hg_remote.diff(
38 hg_remote.diff(
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 file_filter=None, opt_git=True, opt_ignorews=True,
40 file_filter=None, opt_git=True, opt_ignorews=True,
41 context=3)
41 context=3)
42 assert type(exc_info.value) == Exception
42 assert type(exc_info.value) == Exception
43 assert exc_info.value._vcs_kind == 'lookup'
43 assert exc_info.value._vcs_kind == 'lookup'
44
44
45
45
46 class TestReraiseSafeExceptions(object):
46 class TestReraiseSafeExceptions(object):
47 def test_method_decorated_with_reraise_safe_exceptions(self):
47 def test_method_decorated_with_reraise_safe_exceptions(self):
48 factory = Mock()
48 factory = Mock()
49 hg_remote = hg.HgRemote(factory)
49 hg_remote = hg.HgRemote(factory)
50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
51 decorator = hg.reraise_safe_exceptions(None)
51 decorator = hg.reraise_safe_exceptions(None)
52 for method_name, method in methods:
52 for method_name, method in methods:
53 if not method_name.startswith('_'):
53 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
54 assert method.im_func.__code__ == decorator.__code__
54 assert method.im_func.__code__ == decorator.__code__
55
55
56 @pytest.mark.parametrize('side_effect, expected_type', [
56 @pytest.mark.parametrize('side_effect, expected_type', [
57 (hgcompat.Abort(), 'abort'),
57 (hgcompat.Abort(), 'abort'),
58 (hgcompat.InterventionRequired(), 'abort'),
58 (hgcompat.InterventionRequired(), 'abort'),
59 (hgcompat.RepoLookupError(), 'lookup'),
59 (hgcompat.RepoLookupError(), 'lookup'),
60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
61 (hgcompat.RepoError(), 'error'),
61 (hgcompat.RepoError(), 'error'),
62 (hgcompat.RequirementError(), 'requirement'),
62 (hgcompat.RequirementError(), 'requirement'),
63 ])
63 ])
64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
65 @hg.reraise_safe_exceptions
65 @hg.reraise_safe_exceptions
66 def fake_method():
66 def fake_method():
67 raise side_effect
67 raise side_effect
68
68
69 with pytest.raises(Exception) as exc_info:
69 with pytest.raises(Exception) as exc_info:
70 fake_method()
70 fake_method()
71 assert type(exc_info.value) == Exception
71 assert type(exc_info.value) == Exception
72 assert exc_info.value._vcs_kind == expected_type
72 assert exc_info.value._vcs_kind == expected_type
73
73
74 def test_keeps_original_traceback(self):
74 def test_keeps_original_traceback(self):
75 @hg.reraise_safe_exceptions
75 @hg.reraise_safe_exceptions
76 def fake_method():
76 def fake_method():
77 try:
77 try:
78 raise hgcompat.Abort()
78 raise hgcompat.Abort()
79 except:
79 except:
80 self.original_traceback = traceback.format_tb(
80 self.original_traceback = traceback.format_tb(
81 sys.exc_info()[2])
81 sys.exc_info()[2])
82 raise
82 raise
83
83
84 try:
84 try:
85 fake_method()
85 fake_method()
86 except Exception:
86 except Exception:
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
88
88
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 assert new_traceback_tail == self.original_traceback
90 assert new_traceback_tail == self.original_traceback
91
91
92 def test_maps_unknow_exceptions_to_unhandled(self):
92 def test_maps_unknow_exceptions_to_unhandled(self):
93 @hg.reraise_safe_exceptions
93 @hg.reraise_safe_exceptions
94 def stub_method():
94 def stub_method():
95 raise ValueError('stub')
95 raise ValueError('stub')
96
96
97 with pytest.raises(Exception) as exc_info:
97 with pytest.raises(Exception) as exc_info:
98 stub_method()
98 stub_method()
99 assert exc_info.value._vcs_kind == 'unhandled'
99 assert exc_info.value._vcs_kind == 'unhandled'
100
100
101 def test_does_not_map_known_exceptions(self):
101 def test_does_not_map_known_exceptions(self):
102 @hg.reraise_safe_exceptions
102 @hg.reraise_safe_exceptions
103 def stub_method():
103 def stub_method():
104 raise exceptions.LookupException()('stub')
104 raise exceptions.LookupException()('stub')
105
105
106 with pytest.raises(Exception) as exc_info:
106 with pytest.raises(Exception) as exc_info:
107 stub_method()
107 stub_method()
108 assert exc_info.value._vcs_kind == 'lookup'
108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,39 +1,42 b''
1 """
1 """
2 Tests used to profile the HTTP based implementation.
2 Tests used to profile the HTTP based implementation.
3 """
3 """
4
4
5 import pytest
5 import pytest
6 import webtest
6 import webtest
7
7
8 from vcsserver.http_main import main
8 from vcsserver.http_main import main
9
9
10
10
11 @pytest.fixture
11 @pytest.fixture
12 def vcs_app():
12 def vcs_app():
13 stub_settings = {
13 stub_settings = {
14 'dev.use_echo_app': 'true',
14 'dev.use_echo_app': 'true',
15 'locale': 'en_US.UTF-8',
15 'locale': 'en_US.UTF-8',
16 }
16 }
17 vcs_app = main({}, **stub_settings)
17 stub_global_conf = {
18 '__file__': ''
19 }
20 vcs_app = main(stub_global_conf, **stub_settings)
18 app = webtest.TestApp(vcs_app)
21 app = webtest.TestApp(vcs_app)
19 return app
22 return app
20
23
21
24
22 @pytest.fixture(scope='module')
25 @pytest.fixture(scope='module')
23 def data():
26 def data():
24 one_kb = 'x' * 1024
27 one_kb = 'x' * 1024
25 return one_kb * 1024 * 10
28 return one_kb * 1024 * 10
26
29
27
30
28 def test_http_app_streaming_with_data(data, repeat, vcs_app):
31 def test_http_app_streaming_with_data(data, repeat, vcs_app):
29 app = vcs_app
32 app = vcs_app
30 for x in range(repeat / 10):
33 for x in range(repeat / 10):
31 response = app.post('/stream/git/', params=data)
34 response = app.post('/stream/git/', params=data)
32 assert response.status_code == 200
35 assert response.status_code == 200
33
36
34
37
35 def test_http_app_streaming_no_data(repeat, vcs_app):
38 def test_http_app_streaming_no_data(repeat, vcs_app):
36 app = vcs_app
39 app = vcs_app
37 for x in range(repeat / 10):
40 for x in range(repeat / 10):
38 response = app.post('/stream/git/')
41 response = app.post('/stream/git/')
39 assert response.status_code == 200
42 assert response.status_code == 200
@@ -1,57 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import http_main
21 from vcsserver import http_main
22 from vcsserver.base import obfuscate_qs
22 from vcsserver.base import obfuscate_qs
23
23
24
24
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
28 http_main.main({})
28 http_main.main({'__file__': ''})
29 patch_largefiles_capabilities.assert_called_once_with()
29 patch_largefiles_capabilities.assert_called_once_with()
30
30
31
31
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
34 @mock.patch(
34 @mock.patch(
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
36 mock.Mock(side_effect=Exception("Must not be called")))
36 mock.Mock(side_effect=Exception("Must not be called")))
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
38 http_main.main({})
38 http_main.main({'__file__': ''})
39
39
40
40
41 @pytest.mark.parametrize('given, expected', [
41 @pytest.mark.parametrize('given, expected', [
42 ('bad', 'bad'),
42 ('bad', 'bad'),
43 ('query&foo=bar', 'query&foo=bar'),
43 ('query&foo=bar', 'query&foo=bar'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret',
45 ('a;b;c;query&foo=bar&auth_token=secret',
46 'a&b&c&query&foo=bar&auth_token=*****'),
46 'a&b&c&query&foo=bar&auth_token=*****'),
47 ('', ''),
47 ('', ''),
48 (None, None),
48 (None, None),
49 ('foo=bar', 'foo=bar'),
49 ('foo=bar', 'foo=bar'),
50 ('auth_token=secret', 'auth_token=*****'),
50 ('auth_token=secret', 'auth_token=*****'),
51 ('auth_token=secret&api_key=secret2',
51 ('auth_token=secret&api_key=secret2',
52 'auth_token=*****&api_key=*****'),
52 'auth_token=*****&api_key=*****'),
53 ('auth_token=secret&api_key=secret2&param=value',
53 ('auth_token=secret&api_key=secret2&param=value',
54 'auth_token=*****&api_key=*****&param=value'),
54 'auth_token=*****&api_key=*****&param=value'),
55 ])
55 ])
56 def test_obfuscate_qs(given, expected):
56 def test_obfuscate_qs(given, expected):
57 assert expected == obfuscate_qs(given)
57 assert expected == obfuscate_qs(given)
@@ -1,64 +1,107 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import logging
19 import logging
20
20
21 import vcsserver
21 import vcsserver
22 from vcsserver.utils import safe_str
22 from vcsserver.utils import safe_str
23
23
24
24
25 log = logging.getLogger(__name__)
25 log = logging.getLogger(__name__)
26
26
27
27
28 def get_access_path(request):
28 def get_access_path(environ):
29 environ = request.environ
29 path = environ.get('PATH_INFO')
30 return environ.get('PATH_INFO')
30 return path
31
31
32
32
33 def get_user_agent(environ):
33 def get_user_agent(environ):
34 return environ.get('HTTP_USER_AGENT')
34 return environ.get('HTTP_USER_AGENT')
35
35
36
36
37 def get_vcs_method(environ):
38 return environ.get('HTTP_X_RC_METHOD')
39
40
41 def get_vcs_repo(environ):
42 return environ.get('HTTP_X_RC_REPO_NAME')
43
44
37 class RequestWrapperTween(object):
45 class RequestWrapperTween(object):
38 def __init__(self, handler, registry):
46 def __init__(self, handler, registry):
39 self.handler = handler
47 self.handler = handler
40 self.registry = registry
48 self.registry = registry
41
49
42 # one-time configuration code goes here
50 # one-time configuration code goes here
43
51
44 def __call__(self, request):
52 def __call__(self, request):
45 start = time.time()
53 start = time.time()
54 log.debug('Starting request time measurement')
55 response = None
56
57 ua = get_user_agent(request.environ)
58 vcs_method = get_vcs_method(request.environ)
59 repo_name = get_vcs_repo(request.environ)
60
46 try:
61 try:
47 response = self.handler(request)
62 response = self.handler(request)
48 finally:
63 finally:
49 end = time.time()
50 total = end - start
51 count = request.request_count()
64 count = request.request_count()
52 _ver_ = vcsserver.__version__
65 _ver_ = vcsserver.__version__
66 _path = safe_str(get_access_path(request.environ))
67 ip = '127.0.0.1'
68 match_route = request.matched_route.name if request.matched_route else "NOT_FOUND"
69 resp_code = getattr(response, 'status_code', 'UNDEFINED')
70
71 total = time.time() - start
72
73 _view_path = "{}/{}@{}".format(_path, vcs_method, repo_name)
53 log.info(
74 log.info(
54 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
75 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
55 count, '127.0.0.1', request.environ.get('REQUEST_METHOD'),
76 count, ip, request.environ.get('REQUEST_METHOD'),
56 safe_str(get_access_path(request)), total, get_user_agent(request.environ), _ver_)
77 _view_path, total, ua, _ver_,
78 extra={"time": total, "ver": _ver_, "code": resp_code,
79 "path": _path, "view_name": match_route, "user_agent": ua,
80 "vcs_method": vcs_method, "repo_name": repo_name}
81 )
82
83 statsd = request.registry.statsd
84 if statsd:
85 match_route = request.matched_route.name if request.matched_route else _path
86 elapsed_time_ms = round(1000.0 * total) # use ms only
87 statsd.timing(
88 "vcsserver_req_timing.histogram", elapsed_time_ms,
89 tags=[
90 "view_name:{}".format(match_route),
91 "code:{}".format(resp_code)
92 ],
93 use_decimals=False
94 )
95 statsd.incr(
96 "vcsserver_req_total", tags=[
97 "view_name:{}".format(match_route),
98 "code:{}".format(resp_code)
99 ])
57
100
58 return response
101 return response
59
102
60
103
61 def includeme(config):
104 def includeme(config):
62 config.add_tween(
105 config.add_tween(
63 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
106 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
64 )
107 )
@@ -1,32 +1,46 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver.lib import rc_cache
18
19
19 class RemoteBase(object):
20 class RemoteBase(object):
20 EMPTY_COMMIT = '0' * 40
21 EMPTY_COMMIT = '0' * 40
21
22
22 @property
23 def _region(self, wire):
23 def region(self):
24 cache_repo_id = wire.get('cache_repo_id', '')
24 return self._factory._cache_region
25 cache_namespace_uid = 'cache_repo.{}'.format(cache_repo_id)
26 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
25
27
26 def _cache_on(self, wire):
28 def _cache_on(self, wire):
27 context = wire.get('context', '')
29 context = wire.get('context', '')
28 context_uid = '{}'.format(context)
30 context_uid = '{}'.format(context)
29 repo_id = wire.get('repo_id', '')
31 repo_id = wire.get('repo_id', '')
30 cache = wire.get('cache', True)
32 cache = wire.get('cache', True)
31 cache_on = context and cache
33 cache_on = context and cache
32 return cache_on, context_uid, repo_id
34 return cache_on, context_uid, repo_id
35
36 def vcsserver_invalidate_cache(self, wire, delete):
37 from vcsserver.lib import rc_cache
38 repo_id = wire.get('repo_id', '')
39 cache_repo_id = wire.get('cache_repo_id', '')
40 cache_namespace_uid = 'cache_repo.{}'.format(cache_repo_id)
41
42 if delete:
43 rc_cache.clear_cache_namespace(
44 'repo_object', cache_namespace_uid, invalidate=True)
45
46 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
General Comments 0
You need to be logged in to leave comments. Login now