##// END OF EJS Templates
python3: another 2to3 pass
super-admin -
r1054:545da466 python3
parent child Browse files
Show More
@@ -1,243 +1,243 b''
1 '''
1 '''
2 This library is provided to allow standard python logging
2 This library is provided to allow standard python logging
3 to output log data as JSON formatted strings
3 to output log data as JSON formatted strings
4 '''
4 '''
5 import logging
5 import logging
6 import json
6 import json
7 import re
7 import re
8 from datetime import date, datetime, time, tzinfo, timedelta
8 from datetime import date, datetime, time, tzinfo, timedelta
9 import traceback
9 import traceback
10 import importlib
10 import importlib
11
11
12 from inspect import istraceback
12 from inspect import istraceback
13
13
14 from collections import OrderedDict
14 from collections import OrderedDict
15
15
16
16
17 def _inject_req_id(record, *args, **kwargs):
17 def _inject_req_id(record, *args, **kwargs):
18 return record
18 return record
19
19
20
20
21 ExceptionAwareFormatter = logging.Formatter
21 ExceptionAwareFormatter = logging.Formatter
22
22
23
23
24 ZERO = timedelta(0)
24 ZERO = timedelta(0)
25 HOUR = timedelta(hours=1)
25 HOUR = timedelta(hours=1)
26
26
27
27
28 class UTC(tzinfo):
28 class UTC(tzinfo):
29 """UTC"""
29 """UTC"""
30
30
31 def utcoffset(self, dt):
31 def utcoffset(self, dt):
32 return ZERO
32 return ZERO
33
33
34 def tzname(self, dt):
34 def tzname(self, dt):
35 return "UTC"
35 return "UTC"
36
36
37 def dst(self, dt):
37 def dst(self, dt):
38 return ZERO
38 return ZERO
39
39
40 utc = UTC()
40 utc = UTC()
41
41
42
42
43 # skip natural LogRecord attributes
43 # skip natural LogRecord attributes
44 # http://docs.python.org/library/logging.html#logrecord-attributes
44 # http://docs.python.org/library/logging.html#logrecord-attributes
45 RESERVED_ATTRS = (
45 RESERVED_ATTRS = (
46 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
46 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
47 'funcName', 'levelname', 'levelno', 'lineno', 'module',
47 'funcName', 'levelname', 'levelno', 'lineno', 'module',
48 'msecs', 'message', 'msg', 'name', 'pathname', 'process',
48 'msecs', 'message', 'msg', 'name', 'pathname', 'process',
49 'processName', 'relativeCreated', 'stack_info', 'thread', 'threadName')
49 'processName', 'relativeCreated', 'stack_info', 'thread', 'threadName')
50
50
51
51
52 def merge_record_extra(record, target, reserved):
52 def merge_record_extra(record, target, reserved):
53 """
53 """
54 Merges extra attributes from LogRecord object into target dictionary
54 Merges extra attributes from LogRecord object into target dictionary
55
55
56 :param record: logging.LogRecord
56 :param record: logging.LogRecord
57 :param target: dict to update
57 :param target: dict to update
58 :param reserved: dict or list with reserved keys to skip
58 :param reserved: dict or list with reserved keys to skip
59 """
59 """
60 for key, value in record.__dict__.items():
60 for key, value in record.__dict__.items():
61 # this allows to have numeric keys
61 # this allows to have numeric keys
62 if (key not in reserved
62 if (key not in reserved
63 and not (hasattr(key, "startswith")
63 and not (hasattr(key, "startswith")
64 and key.startswith('_'))):
64 and key.startswith('_'))):
65 target[key] = value
65 target[key] = value
66 return target
66 return target
67
67
68
68
69 class JsonEncoder(json.JSONEncoder):
69 class JsonEncoder(json.JSONEncoder):
70 """
70 """
71 A custom encoder extending the default JSONEncoder
71 A custom encoder extending the default JSONEncoder
72 """
72 """
73
73
74 def default(self, obj):
74 def default(self, obj):
75 if isinstance(obj, (date, datetime, time)):
75 if isinstance(obj, (date, datetime, time)):
76 return self.format_datetime_obj(obj)
76 return self.format_datetime_obj(obj)
77
77
78 elif istraceback(obj):
78 elif istraceback(obj):
79 return ''.join(traceback.format_tb(obj)).strip()
79 return ''.join(traceback.format_tb(obj)).strip()
80
80
81 elif type(obj) == Exception \
81 elif type(obj) == Exception \
82 or isinstance(obj, Exception) \
82 or isinstance(obj, Exception) \
83 or type(obj) == type:
83 or type(obj) == type:
84 return str(obj)
84 return str(obj)
85
85
86 try:
86 try:
87 return super(JsonEncoder, self).default(obj)
87 return super(JsonEncoder, self).default(obj)
88
88
89 except TypeError:
89 except TypeError:
90 try:
90 try:
91 return str(obj)
91 return str(obj)
92
92
93 except Exception:
93 except Exception:
94 return None
94 return None
95
95
96 def format_datetime_obj(self, obj):
96 def format_datetime_obj(self, obj):
97 return obj.isoformat()
97 return obj.isoformat()
98
98
99
99
100 class JsonFormatter(ExceptionAwareFormatter):
100 class JsonFormatter(ExceptionAwareFormatter):
101 """
101 """
102 A custom formatter to format logging records as json strings.
102 A custom formatter to format logging records as json strings.
103 Extra values will be formatted as str() if not supported by
103 Extra values will be formatted as str() if not supported by
104 json default encoder
104 json default encoder
105 """
105 """
106
106
107 def __init__(self, *args, **kwargs):
107 def __init__(self, *args, **kwargs):
108 """
108 """
109 :param json_default: a function for encoding non-standard objects
109 :param json_default: a function for encoding non-standard objects
110 as outlined in http://docs.python.org/2/library/json.html
110 as outlined in http://docs.python.org/2/library/json.html
111 :param json_encoder: optional custom encoder
111 :param json_encoder: optional custom encoder
112 :param json_serializer: a :meth:`json.dumps`-compatible callable
112 :param json_serializer: a :meth:`json.dumps`-compatible callable
113 that will be used to serialize the log record.
113 that will be used to serialize the log record.
114 :param json_indent: an optional :meth:`json.dumps`-compatible numeric value
114 :param json_indent: an optional :meth:`json.dumps`-compatible numeric value
115 that will be used to customize the indent of the output json.
115 that will be used to customize the indent of the output json.
116 :param prefix: an optional string prefix added at the beginning of
116 :param prefix: an optional string prefix added at the beginning of
117 the formatted string
117 the formatted string
118 :param json_indent: indent parameter for json.dumps
118 :param json_indent: indent parameter for json.dumps
119 :param json_ensure_ascii: ensure_ascii parameter for json.dumps
119 :param json_ensure_ascii: ensure_ascii parameter for json.dumps
120 :param reserved_attrs: an optional list of fields that will be skipped when
120 :param reserved_attrs: an optional list of fields that will be skipped when
121 outputting json log record. Defaults to all log record attributes:
121 outputting json log record. Defaults to all log record attributes:
122 http://docs.python.org/library/logging.html#logrecord-attributes
122 http://docs.python.org/library/logging.html#logrecord-attributes
123 :param timestamp: an optional string/boolean field to add a timestamp when
123 :param timestamp: an optional string/boolean field to add a timestamp when
124 outputting the json log record. If string is passed, timestamp will be added
124 outputting the json log record. If string is passed, timestamp will be added
125 to log record using string as key. If True boolean is passed, timestamp key
125 to log record using string as key. If True boolean is passed, timestamp key
126 will be "timestamp". Defaults to False/off.
126 will be "timestamp". Defaults to False/off.
127 """
127 """
128 self.json_default = self._str_to_fn(kwargs.pop("json_default", None))
128 self.json_default = self._str_to_fn(kwargs.pop("json_default", None))
129 self.json_encoder = self._str_to_fn(kwargs.pop("json_encoder", None))
129 self.json_encoder = self._str_to_fn(kwargs.pop("json_encoder", None))
130 self.json_serializer = self._str_to_fn(kwargs.pop("json_serializer", json.dumps))
130 self.json_serializer = self._str_to_fn(kwargs.pop("json_serializer", json.dumps))
131 self.json_indent = kwargs.pop("json_indent", None)
131 self.json_indent = kwargs.pop("json_indent", None)
132 self.json_ensure_ascii = kwargs.pop("json_ensure_ascii", True)
132 self.json_ensure_ascii = kwargs.pop("json_ensure_ascii", True)
133 self.prefix = kwargs.pop("prefix", "")
133 self.prefix = kwargs.pop("prefix", "")
134 reserved_attrs = kwargs.pop("reserved_attrs", RESERVED_ATTRS)
134 reserved_attrs = kwargs.pop("reserved_attrs", RESERVED_ATTRS)
135 self.reserved_attrs = dict(zip(reserved_attrs, reserved_attrs))
135 self.reserved_attrs = dict(list(zip(reserved_attrs, reserved_attrs)))
136 self.timestamp = kwargs.pop("timestamp", True)
136 self.timestamp = kwargs.pop("timestamp", True)
137
137
138 # super(JsonFormatter, self).__init__(*args, **kwargs)
138 # super(JsonFormatter, self).__init__(*args, **kwargs)
139 logging.Formatter.__init__(self, *args, **kwargs)
139 logging.Formatter.__init__(self, *args, **kwargs)
140 if not self.json_encoder and not self.json_default:
140 if not self.json_encoder and not self.json_default:
141 self.json_encoder = JsonEncoder
141 self.json_encoder = JsonEncoder
142
142
143 self._required_fields = self.parse()
143 self._required_fields = self.parse()
144 self._skip_fields = dict(zip(self._required_fields,
144 self._skip_fields = dict(list(zip(self._required_fields,
145 self._required_fields))
145 self._required_fields)))
146 self._skip_fields.update(self.reserved_attrs)
146 self._skip_fields.update(self.reserved_attrs)
147
147
148 def _str_to_fn(self, fn_as_str):
148 def _str_to_fn(self, fn_as_str):
149 """
149 """
150 If the argument is not a string, return whatever was passed in.
150 If the argument is not a string, return whatever was passed in.
151 Parses a string such as package.module.function, imports the module
151 Parses a string such as package.module.function, imports the module
152 and returns the function.
152 and returns the function.
153
153
154 :param fn_as_str: The string to parse. If not a string, return it.
154 :param fn_as_str: The string to parse. If not a string, return it.
155 """
155 """
156 if not isinstance(fn_as_str, str):
156 if not isinstance(fn_as_str, str):
157 return fn_as_str
157 return fn_as_str
158
158
159 path, _, function = fn_as_str.rpartition('.')
159 path, _, function = fn_as_str.rpartition('.')
160 module = importlib.import_module(path)
160 module = importlib.import_module(path)
161 return getattr(module, function)
161 return getattr(module, function)
162
162
163 def parse(self):
163 def parse(self):
164 """
164 """
165 Parses format string looking for substitutions
165 Parses format string looking for substitutions
166
166
167 This method is responsible for returning a list of fields (as strings)
167 This method is responsible for returning a list of fields (as strings)
168 to include in all log messages.
168 to include in all log messages.
169 """
169 """
170 standard_formatters = re.compile(r'\((.+?)\)', re.IGNORECASE)
170 standard_formatters = re.compile(r'\((.+?)\)', re.IGNORECASE)
171 return standard_formatters.findall(self._fmt)
171 return standard_formatters.findall(self._fmt)
172
172
173 def add_fields(self, log_record, record, message_dict):
173 def add_fields(self, log_record, record, message_dict):
174 """
174 """
175 Override this method to implement custom logic for adding fields.
175 Override this method to implement custom logic for adding fields.
176 """
176 """
177 for field in self._required_fields:
177 for field in self._required_fields:
178 log_record[field] = record.__dict__.get(field)
178 log_record[field] = record.__dict__.get(field)
179 log_record.update(message_dict)
179 log_record.update(message_dict)
180 merge_record_extra(record, log_record, reserved=self._skip_fields)
180 merge_record_extra(record, log_record, reserved=self._skip_fields)
181
181
182 if self.timestamp:
182 if self.timestamp:
183 key = self.timestamp if type(self.timestamp) == str else 'timestamp'
183 key = self.timestamp if type(self.timestamp) == str else 'timestamp'
184 log_record[key] = datetime.fromtimestamp(record.created, tz=utc)
184 log_record[key] = datetime.fromtimestamp(record.created, tz=utc)
185
185
186 def process_log_record(self, log_record):
186 def process_log_record(self, log_record):
187 """
187 """
188 Override this method to implement custom logic
188 Override this method to implement custom logic
189 on the possibly ordered dictionary.
189 on the possibly ordered dictionary.
190 """
190 """
191 return log_record
191 return log_record
192
192
193 def jsonify_log_record(self, log_record):
193 def jsonify_log_record(self, log_record):
194 """Returns a json string of the log record."""
194 """Returns a json string of the log record."""
195 return self.json_serializer(log_record,
195 return self.json_serializer(log_record,
196 default=self.json_default,
196 default=self.json_default,
197 cls=self.json_encoder,
197 cls=self.json_encoder,
198 indent=self.json_indent,
198 indent=self.json_indent,
199 ensure_ascii=self.json_ensure_ascii)
199 ensure_ascii=self.json_ensure_ascii)
200
200
201 def serialize_log_record(self, log_record):
201 def serialize_log_record(self, log_record):
202 """Returns the final representation of the log record."""
202 """Returns the final representation of the log record."""
203 return "%s%s" % (self.prefix, self.jsonify_log_record(log_record))
203 return "%s%s" % (self.prefix, self.jsonify_log_record(log_record))
204
204
205 def format(self, record):
205 def format(self, record):
206 """Formats a log record and serializes to json"""
206 """Formats a log record and serializes to json"""
207 message_dict = {}
207 message_dict = {}
208 # FIXME: logging.LogRecord.msg and logging.LogRecord.message in typeshed
208 # FIXME: logging.LogRecord.msg and logging.LogRecord.message in typeshed
209 # are always type of str. We shouldn't need to override that.
209 # are always type of str. We shouldn't need to override that.
210 if isinstance(record.msg, dict):
210 if isinstance(record.msg, dict):
211 message_dict = record.msg
211 message_dict = record.msg
212 record.message = None
212 record.message = None
213 else:
213 else:
214 record.message = record.getMessage()
214 record.message = record.getMessage()
215 # only format time if needed
215 # only format time if needed
216 if "asctime" in self._required_fields:
216 if "asctime" in self._required_fields:
217 record.asctime = self.formatTime(record, self.datefmt)
217 record.asctime = self.formatTime(record, self.datefmt)
218
218
219 # Display formatted exception, but allow overriding it in the
219 # Display formatted exception, but allow overriding it in the
220 # user-supplied dict.
220 # user-supplied dict.
221 if record.exc_info and not message_dict.get('exc_info'):
221 if record.exc_info and not message_dict.get('exc_info'):
222 message_dict['exc_info'] = self.formatException(record.exc_info)
222 message_dict['exc_info'] = self.formatException(record.exc_info)
223 if not message_dict.get('exc_info') and record.exc_text:
223 if not message_dict.get('exc_info') and record.exc_text:
224 message_dict['exc_info'] = record.exc_text
224 message_dict['exc_info'] = record.exc_text
225 # Display formatted record of stack frames
225 # Display formatted record of stack frames
226 # default format is a string returned from :func:`traceback.print_stack`
226 # default format is a string returned from :func:`traceback.print_stack`
227 try:
227 try:
228 if record.stack_info and not message_dict.get('stack_info'):
228 if record.stack_info and not message_dict.get('stack_info'):
229 message_dict['stack_info'] = self.formatStack(record.stack_info)
229 message_dict['stack_info'] = self.formatStack(record.stack_info)
230 except AttributeError:
230 except AttributeError:
231 # Python2.7 doesn't have stack_info.
231 # Python2.7 doesn't have stack_info.
232 pass
232 pass
233
233
234 try:
234 try:
235 log_record = OrderedDict()
235 log_record = OrderedDict()
236 except NameError:
236 except NameError:
237 log_record = {}
237 log_record = {}
238
238
239 _inject_req_id(record, with_prefix=False)
239 _inject_req_id(record, with_prefix=False)
240 self.add_fields(log_record, record, message_dict)
240 self.add_fields(log_record, record, message_dict)
241 log_record = self.process_log_record(log_record)
241 log_record = self.process_log_record(log_record)
242
242
243 return self.serialize_log_record(log_record)
243 return self.serialize_log_record(log_record)
@@ -1,65 +1,65 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20
20
21 import logging
21 import logging
22
22
23 from repoze.lru import LRUCache
23 from repoze.lru import LRUCache
24
24
25 from vcsserver.utils import safe_str
25 from vcsserver.utils import safe_str
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29
29
30 class LRUDict(LRUCache):
30 class LRUDict(LRUCache):
31 """
31 """
32 Wrapper to provide partial dict access
32 Wrapper to provide partial dict access
33 """
33 """
34
34
35 def __setitem__(self, key, value):
35 def __setitem__(self, key, value):
36 return self.put(key, value)
36 return self.put(key, value)
37
37
38 def __getitem__(self, key):
38 def __getitem__(self, key):
39 return self.get(key)
39 return self.get(key)
40
40
41 def __contains__(self, key):
41 def __contains__(self, key):
42 return bool(self.get(key))
42 return bool(self.get(key))
43
43
44 def __delitem__(self, key):
44 def __delitem__(self, key):
45 del self.data[key]
45 del self.data[key]
46
46
47 def keys(self):
47 def keys(self):
48 return self.data.keys()
48 return list(self.data.keys())
49
49
50
50
51 class LRUDictDebug(LRUDict):
51 class LRUDictDebug(LRUDict):
52 """
52 """
53 Wrapper to provide some debug options
53 Wrapper to provide some debug options
54 """
54 """
55 def _report_keys(self):
55 def _report_keys(self):
56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
56 elems_cnt = '%s/%s' % (len(list(self.keys())), self.size)
57 # trick for pformat print it more nicely
57 # trick for pformat print it more nicely
58 fmt = '\n'
58 fmt = '\n'
59 for cnt, elem in enumerate(self.keys()):
59 for cnt, elem in enumerate(self.keys()):
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
62
62
63 def __getitem__(self, key):
63 def __getitem__(self, key):
64 self._report_keys()
64 self._report_keys()
65 return self.get(key)
65 return self.get(key)
@@ -1,330 +1,330 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21
21
22 import msgpack
22 import msgpack
23 import redis
23 import redis
24 import pickle
24 import pickle
25
25
26 from dogpile.cache.api import CachedValue
26 from dogpile.cache.api import CachedValue
27 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import memory as memory_backend
28 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import file as file_backend
29 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends import redis as redis_backend
30 from dogpile.cache.backends.file import NO_VALUE, FileLock
30 from dogpile.cache.backends.file import NO_VALUE, FileLock
31 from dogpile.cache.util import memoized_property
31 from dogpile.cache.util import memoized_property
32
32
33 from pyramid.settings import asbool
33 from pyramid.settings import asbool
34
34
35 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
35 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
36 from vcsserver.utils import safe_str
36 from vcsserver.utils import safe_str
37
37
38
38
39 _default_max_size = 1024
39 _default_max_size = 1024
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 class LRUMemoryBackend(memory_backend.MemoryBackend):
44 class LRUMemoryBackend(memory_backend.MemoryBackend):
45 key_prefix = 'lru_mem_backend'
45 key_prefix = 'lru_mem_backend'
46 pickle_values = False
46 pickle_values = False
47
47
48 def __init__(self, arguments):
48 def __init__(self, arguments):
49 max_size = arguments.pop('max_size', _default_max_size)
49 max_size = arguments.pop('max_size', _default_max_size)
50
50
51 LRUDictClass = LRUDict
51 LRUDictClass = LRUDict
52 if arguments.pop('log_key_count', None):
52 if arguments.pop('log_key_count', None):
53 LRUDictClass = LRUDictDebug
53 LRUDictClass = LRUDictDebug
54
54
55 arguments['cache_dict'] = LRUDictClass(max_size)
55 arguments['cache_dict'] = LRUDictClass(max_size)
56 super(LRUMemoryBackend, self).__init__(arguments)
56 super(LRUMemoryBackend, self).__init__(arguments)
57
57
58 def delete(self, key):
58 def delete(self, key):
59 try:
59 try:
60 del self._cache[key]
60 del self._cache[key]
61 except KeyError:
61 except KeyError:
62 # we don't care if key isn't there at deletion
62 # we don't care if key isn't there at deletion
63 pass
63 pass
64
64
65 def delete_multi(self, keys):
65 def delete_multi(self, keys):
66 for key in keys:
66 for key in keys:
67 self.delete(key)
67 self.delete(key)
68
68
69
69
70 class PickleSerializer(object):
70 class PickleSerializer(object):
71
71
72 def _dumps(self, value, safe=False):
72 def _dumps(self, value, safe=False):
73 try:
73 try:
74 return pickle.dumps(value)
74 return pickle.dumps(value)
75 except Exception:
75 except Exception:
76 if safe:
76 if safe:
77 return NO_VALUE
77 return NO_VALUE
78 else:
78 else:
79 raise
79 raise
80
80
81 def _loads(self, value, safe=True):
81 def _loads(self, value, safe=True):
82 try:
82 try:
83 return pickle.loads(value)
83 return pickle.loads(value)
84 except Exception:
84 except Exception:
85 if safe:
85 if safe:
86 return NO_VALUE
86 return NO_VALUE
87 else:
87 else:
88 raise
88 raise
89
89
90
90
91 class MsgPackSerializer(object):
91 class MsgPackSerializer(object):
92
92
93 def _dumps(self, value, safe=False):
93 def _dumps(self, value, safe=False):
94 try:
94 try:
95 return msgpack.packb(value)
95 return msgpack.packb(value)
96 except Exception:
96 except Exception:
97 if safe:
97 if safe:
98 return NO_VALUE
98 return NO_VALUE
99 else:
99 else:
100 raise
100 raise
101
101
102 def _loads(self, value, safe=True):
102 def _loads(self, value, safe=True):
103 """
103 """
104 pickle maintained the `CachedValue` wrapper of the tuple
104 pickle maintained the `CachedValue` wrapper of the tuple
105 msgpack does not, so it must be added back in.
105 msgpack does not, so it must be added back in.
106 """
106 """
107 try:
107 try:
108 value = msgpack.unpackb(value, use_list=False)
108 value = msgpack.unpackb(value, use_list=False)
109 return CachedValue(*value)
109 return CachedValue(*value)
110 except Exception:
110 except Exception:
111 if safe:
111 if safe:
112 return NO_VALUE
112 return NO_VALUE
113 else:
113 else:
114 raise
114 raise
115
115
116
116
117 import fcntl
117 import fcntl
118 flock_org = fcntl.flock
118 flock_org = fcntl.flock
119
119
120
120
121 class CustomLockFactory(FileLock):
121 class CustomLockFactory(FileLock):
122
122
123 pass
123 pass
124
124
125
125
126 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
126 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
127 key_prefix = 'file_backend'
127 key_prefix = 'file_backend'
128
128
129 def __init__(self, arguments):
129 def __init__(self, arguments):
130 arguments['lock_factory'] = CustomLockFactory
130 arguments['lock_factory'] = CustomLockFactory
131 db_file = arguments.get('filename')
131 db_file = arguments.get('filename')
132
132
133 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
133 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
134 try:
134 try:
135 super(FileNamespaceBackend, self).__init__(arguments)
135 super(FileNamespaceBackend, self).__init__(arguments)
136 except Exception:
136 except Exception:
137 log.exception('Failed to initialize db at: %s', db_file)
137 log.exception('Failed to initialize db at: %s', db_file)
138 raise
138 raise
139
139
140 def __repr__(self):
140 def __repr__(self):
141 return '{} `{}`'.format(self.__class__, self.filename)
141 return '{} `{}`'.format(self.__class__, self.filename)
142
142
143 def list_keys(self, prefix=''):
143 def list_keys(self, prefix=''):
144 prefix = '{}:{}'.format(self.key_prefix, prefix)
144 prefix = '{}:{}'.format(self.key_prefix, prefix)
145
145
146 def cond(v):
146 def cond(v):
147 if not prefix:
147 if not prefix:
148 return True
148 return True
149
149
150 if v.startswith(prefix):
150 if v.startswith(prefix):
151 return True
151 return True
152 return False
152 return False
153
153
154 with self._dbm_file(True) as dbm:
154 with self._dbm_file(True) as dbm:
155 try:
155 try:
156 return filter(cond, dbm.keys())
156 return list(filter(cond, list(dbm.keys())))
157 except Exception:
157 except Exception:
158 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
158 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
159 raise
159 raise
160
160
161 def get_store(self):
161 def get_store(self):
162 return self.filename
162 return self.filename
163
163
164 def _dbm_get(self, key):
164 def _dbm_get(self, key):
165 with self._dbm_file(False) as dbm:
165 with self._dbm_file(False) as dbm:
166 if hasattr(dbm, 'get'):
166 if hasattr(dbm, 'get'):
167 value = dbm.get(key, NO_VALUE)
167 value = dbm.get(key, NO_VALUE)
168 else:
168 else:
169 # gdbm objects lack a .get method
169 # gdbm objects lack a .get method
170 try:
170 try:
171 value = dbm[key]
171 value = dbm[key]
172 except KeyError:
172 except KeyError:
173 value = NO_VALUE
173 value = NO_VALUE
174 if value is not NO_VALUE:
174 if value is not NO_VALUE:
175 value = self._loads(value)
175 value = self._loads(value)
176 return value
176 return value
177
177
178 def get(self, key):
178 def get(self, key):
179 try:
179 try:
180 return self._dbm_get(key)
180 return self._dbm_get(key)
181 except Exception:
181 except Exception:
182 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
182 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
183 raise
183 raise
184
184
185 def set(self, key, value):
185 def set(self, key, value):
186 with self._dbm_file(True) as dbm:
186 with self._dbm_file(True) as dbm:
187 dbm[key] = self._dumps(value)
187 dbm[key] = self._dumps(value)
188
188
189 def set_multi(self, mapping):
189 def set_multi(self, mapping):
190 with self._dbm_file(True) as dbm:
190 with self._dbm_file(True) as dbm:
191 for key, value in mapping.items():
191 for key, value in mapping.items():
192 dbm[key] = self._dumps(value)
192 dbm[key] = self._dumps(value)
193
193
194
194
195 class BaseRedisBackend(redis_backend.RedisBackend):
195 class BaseRedisBackend(redis_backend.RedisBackend):
196 key_prefix = ''
196 key_prefix = ''
197
197
198 def __init__(self, arguments):
198 def __init__(self, arguments):
199 super(BaseRedisBackend, self).__init__(arguments)
199 super(BaseRedisBackend, self).__init__(arguments)
200 self._lock_timeout = self.lock_timeout
200 self._lock_timeout = self.lock_timeout
201 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
201 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
202
202
203 if self._lock_auto_renewal and not self._lock_timeout:
203 if self._lock_auto_renewal and not self._lock_timeout:
204 # set default timeout for auto_renewal
204 # set default timeout for auto_renewal
205 self._lock_timeout = 30
205 self._lock_timeout = 30
206
206
207 def _create_client(self):
207 def _create_client(self):
208 args = {}
208 args = {}
209
209
210 if self.url is not None:
210 if self.url is not None:
211 args.update(url=self.url)
211 args.update(url=self.url)
212
212
213 else:
213 else:
214 args.update(
214 args.update(
215 host=self.host, password=self.password,
215 host=self.host, password=self.password,
216 port=self.port, db=self.db
216 port=self.port, db=self.db
217 )
217 )
218
218
219 connection_pool = redis.ConnectionPool(**args)
219 connection_pool = redis.ConnectionPool(**args)
220
220
221 return redis.StrictRedis(connection_pool=connection_pool)
221 return redis.StrictRedis(connection_pool=connection_pool)
222
222
223 def list_keys(self, prefix=''):
223 def list_keys(self, prefix=''):
224 prefix = '{}:{}*'.format(self.key_prefix, prefix)
224 prefix = '{}:{}*'.format(self.key_prefix, prefix)
225 return self.client.keys(prefix)
225 return self.client.keys(prefix)
226
226
227 def get_store(self):
227 def get_store(self):
228 return self.client.connection_pool
228 return self.client.connection_pool
229
229
230 def get(self, key):
230 def get(self, key):
231 value = self.client.get(key)
231 value = self.client.get(key)
232 if value is None:
232 if value is None:
233 return NO_VALUE
233 return NO_VALUE
234 return self._loads(value)
234 return self._loads(value)
235
235
236 def get_multi(self, keys):
236 def get_multi(self, keys):
237 if not keys:
237 if not keys:
238 return []
238 return []
239 values = self.client.mget(keys)
239 values = self.client.mget(keys)
240 loads = self._loads
240 loads = self._loads
241 return [
241 return [
242 loads(v) if v is not None else NO_VALUE
242 loads(v) if v is not None else NO_VALUE
243 for v in values]
243 for v in values]
244
244
245 def set(self, key, value):
245 def set(self, key, value):
246 if self.redis_expiration_time:
246 if self.redis_expiration_time:
247 self.client.setex(key, self.redis_expiration_time,
247 self.client.setex(key, self.redis_expiration_time,
248 self._dumps(value))
248 self._dumps(value))
249 else:
249 else:
250 self.client.set(key, self._dumps(value))
250 self.client.set(key, self._dumps(value))
251
251
252 def set_multi(self, mapping):
252 def set_multi(self, mapping):
253 dumps = self._dumps
253 dumps = self._dumps
254 mapping = dict(
254 mapping = dict(
255 (k, dumps(v))
255 (k, dumps(v))
256 for k, v in mapping.items()
256 for k, v in mapping.items()
257 )
257 )
258
258
259 if not self.redis_expiration_time:
259 if not self.redis_expiration_time:
260 self.client.mset(mapping)
260 self.client.mset(mapping)
261 else:
261 else:
262 pipe = self.client.pipeline()
262 pipe = self.client.pipeline()
263 for key, value in mapping.items():
263 for key, value in mapping.items():
264 pipe.setex(key, self.redis_expiration_time, value)
264 pipe.setex(key, self.redis_expiration_time, value)
265 pipe.execute()
265 pipe.execute()
266
266
267 def get_mutex(self, key):
267 def get_mutex(self, key):
268 if self.distributed_lock:
268 if self.distributed_lock:
269 lock_key = '_lock_{0}'.format(safe_str(key))
269 lock_key = '_lock_{0}'.format(safe_str(key))
270 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
270 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
271 auto_renewal=self._lock_auto_renewal)
271 auto_renewal=self._lock_auto_renewal)
272 else:
272 else:
273 return None
273 return None
274
274
275
275
276 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
276 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
277 key_prefix = 'redis_pickle_backend'
277 key_prefix = 'redis_pickle_backend'
278 pass
278 pass
279
279
280
280
281 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
281 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
282 key_prefix = 'redis_msgpack_backend'
282 key_prefix = 'redis_msgpack_backend'
283 pass
283 pass
284
284
285
285
286 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
286 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
287 import redis_lock
287 import redis_lock
288
288
289 class _RedisLockWrapper(object):
289 class _RedisLockWrapper(object):
290 """LockWrapper for redis_lock"""
290 """LockWrapper for redis_lock"""
291
291
292 @classmethod
292 @classmethod
293 def get_lock(cls):
293 def get_lock(cls):
294 return redis_lock.Lock(
294 return redis_lock.Lock(
295 redis_client=client,
295 redis_client=client,
296 name=lock_key,
296 name=lock_key,
297 expire=lock_timeout,
297 expire=lock_timeout,
298 auto_renewal=auto_renewal,
298 auto_renewal=auto_renewal,
299 strict=True,
299 strict=True,
300 )
300 )
301
301
302 def __repr__(self):
302 def __repr__(self):
303 return "{}:{}".format(self.__class__.__name__, lock_key)
303 return "{}:{}".format(self.__class__.__name__, lock_key)
304
304
305 def __str__(self):
305 def __str__(self):
306 return "{}:{}".format(self.__class__.__name__, lock_key)
306 return "{}:{}".format(self.__class__.__name__, lock_key)
307
307
308 def __init__(self):
308 def __init__(self):
309 self.lock = self.get_lock()
309 self.lock = self.get_lock()
310 self.lock_key = lock_key
310 self.lock_key = lock_key
311
311
312 def acquire(self, wait=True):
312 def acquire(self, wait=True):
313 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
313 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
314 try:
314 try:
315 acquired = self.lock.acquire(wait)
315 acquired = self.lock.acquire(wait)
316 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
316 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
317 return acquired
317 return acquired
318 except redis_lock.AlreadyAcquired:
318 except redis_lock.AlreadyAcquired:
319 return False
319 return False
320 except redis_lock.AlreadyStarted:
320 except redis_lock.AlreadyStarted:
321 # refresh thread exists, but it also means we acquired the lock
321 # refresh thread exists, but it also means we acquired the lock
322 return True
322 return True
323
323
324 def release(self):
324 def release(self):
325 try:
325 try:
326 self.lock.release()
326 self.lock.release()
327 except redis_lock.NotAcquired:
327 except redis_lock.NotAcquired:
328 pass
328 pass
329
329
330 return _RedisLockWrapper()
330 return _RedisLockWrapper()
@@ -1,207 +1,207 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import time
19 import time
20 import logging
20 import logging
21 import functools
21 import functools
22 import decorator
22 import decorator
23
23
24 from dogpile.cache import CacheRegion
24 from dogpile.cache import CacheRegion
25
25
26 from vcsserver.utils import safe_bytes, sha1
26 from vcsserver.utils import safe_bytes, sha1
27 from vcsserver.lib.rc_cache import region_meta
27 from vcsserver.lib.rc_cache import region_meta
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31
31
32 class RhodeCodeCacheRegion(CacheRegion):
32 class RhodeCodeCacheRegion(CacheRegion):
33
33
34 def conditional_cache_on_arguments(
34 def conditional_cache_on_arguments(
35 self, namespace=None,
35 self, namespace=None,
36 expiration_time=None,
36 expiration_time=None,
37 should_cache_fn=None,
37 should_cache_fn=None,
38 to_str=str,
38 to_str=str,
39 function_key_generator=None,
39 function_key_generator=None,
40 condition=True):
40 condition=True):
41 """
41 """
42 Custom conditional decorator, that will not touch any dogpile internals if
42 Custom conditional decorator, that will not touch any dogpile internals if
43 condition isn't meet. This works a bit different than should_cache_fn
43 condition isn't meet. This works a bit different than should_cache_fn
44 And it's faster in cases we don't ever want to compute cached values
44 And it's faster in cases we don't ever want to compute cached values
45 """
45 """
46 expiration_time_is_callable = callable(expiration_time)
46 expiration_time_is_callable = callable(expiration_time)
47
47
48 if function_key_generator is None:
48 if function_key_generator is None:
49 function_key_generator = self.function_key_generator
49 function_key_generator = self.function_key_generator
50
50
51 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
51 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
52
52
53 if not condition:
53 if not condition:
54 log.debug('Calling un-cached method:%s', user_func.__name__)
54 log.debug('Calling un-cached method:%s', user_func.__name__)
55 start = time.time()
55 start = time.time()
56 result = user_func(*arg, **kw)
56 result = user_func(*arg, **kw)
57 total = time.time() - start
57 total = time.time() - start
58 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
58 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
59 return result
59 return result
60
60
61 key = key_generator(*arg, **kw)
61 key = key_generator(*arg, **kw)
62
62
63 timeout = expiration_time() if expiration_time_is_callable \
63 timeout = expiration_time() if expiration_time_is_callable \
64 else expiration_time
64 else expiration_time
65
65
66 log.debug('Calling cached method:`%s`', user_func.__name__)
66 log.debug('Calling cached method:`%s`', user_func.__name__)
67 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
67 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
68
68
69 def cache_decorator(user_func):
69 def cache_decorator(user_func):
70 if to_str is str:
70 if to_str is str:
71 # backwards compatible
71 # backwards compatible
72 key_generator = function_key_generator(namespace, user_func)
72 key_generator = function_key_generator(namespace, user_func)
73 else:
73 else:
74 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
74 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
75
75
76 def refresh(*arg, **kw):
76 def refresh(*arg, **kw):
77 """
77 """
78 Like invalidate, but regenerates the value instead
78 Like invalidate, but regenerates the value instead
79 """
79 """
80 key = key_generator(*arg, **kw)
80 key = key_generator(*arg, **kw)
81 value = user_func(*arg, **kw)
81 value = user_func(*arg, **kw)
82 self.set(key, value)
82 self.set(key, value)
83 return value
83 return value
84
84
85 def invalidate(*arg, **kw):
85 def invalidate(*arg, **kw):
86 key = key_generator(*arg, **kw)
86 key = key_generator(*arg, **kw)
87 self.delete(key)
87 self.delete(key)
88
88
89 def set_(value, *arg, **kw):
89 def set_(value, *arg, **kw):
90 key = key_generator(*arg, **kw)
90 key = key_generator(*arg, **kw)
91 self.set(key, value)
91 self.set(key, value)
92
92
93 def get(*arg, **kw):
93 def get(*arg, **kw):
94 key = key_generator(*arg, **kw)
94 key = key_generator(*arg, **kw)
95 return self.get(key)
95 return self.get(key)
96
96
97 user_func.set = set_
97 user_func.set = set_
98 user_func.invalidate = invalidate
98 user_func.invalidate = invalidate
99 user_func.get = get
99 user_func.get = get
100 user_func.refresh = refresh
100 user_func.refresh = refresh
101 user_func.key_generator = key_generator
101 user_func.key_generator = key_generator
102 user_func.original = user_func
102 user_func.original = user_func
103
103
104 # Use `decorate` to preserve the signature of :param:`user_func`.
104 # Use `decorate` to preserve the signature of :param:`user_func`.
105 return decorator.decorate(user_func, functools.partial(
105 return decorator.decorate(user_func, functools.partial(
106 get_or_create_for_user_func, key_generator))
106 get_or_create_for_user_func, key_generator))
107
107
108 return cache_decorator
108 return cache_decorator
109
109
110
110
111 def make_region(*arg, **kw):
111 def make_region(*arg, **kw):
112 return RhodeCodeCacheRegion(*arg, **kw)
112 return RhodeCodeCacheRegion(*arg, **kw)
113
113
114
114
115 def get_default_cache_settings(settings, prefixes=None):
115 def get_default_cache_settings(settings, prefixes=None):
116 prefixes = prefixes or []
116 prefixes = prefixes or []
117 cache_settings = {}
117 cache_settings = {}
118 for key in settings.keys():
118 for key in settings.keys():
119 for prefix in prefixes:
119 for prefix in prefixes:
120 if key.startswith(prefix):
120 if key.startswith(prefix):
121 name = key.split(prefix)[1].strip()
121 name = key.split(prefix)[1].strip()
122 val = settings[key]
122 val = settings[key]
123 if isinstance(val, str):
123 if isinstance(val, str):
124 val = val.strip()
124 val = val.strip()
125 cache_settings[name] = val
125 cache_settings[name] = val
126 return cache_settings
126 return cache_settings
127
127
128
128
129 def compute_key_from_params(*args):
129 def compute_key_from_params(*args):
130 """
130 """
131 Helper to compute key from given params to be used in cache manager
131 Helper to compute key from given params to be used in cache manager
132 """
132 """
133 return sha1(safe_bytes("_".join(map(str, args))))
133 return sha1(safe_bytes("_".join(map(str, args))))
134
134
135
135
136 def backend_key_generator(backend):
136 def backend_key_generator(backend):
137 """
137 """
138 Special wrapper that also sends over the backend to the key generator
138 Special wrapper that also sends over the backend to the key generator
139 """
139 """
140 def wrapper(namespace, fn):
140 def wrapper(namespace, fn):
141 return key_generator(backend, namespace, fn)
141 return key_generator(backend, namespace, fn)
142 return wrapper
142 return wrapper
143
143
144
144
145 def key_generator(backend, namespace, fn):
145 def key_generator(backend, namespace, fn):
146 fname = fn.__name__
146 fname = fn.__name__
147
147
148 def generate_key(*args):
148 def generate_key(*args):
149 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
149 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
150 namespace_pref = namespace or 'default_namespace'
150 namespace_pref = namespace or 'default_namespace'
151 arg_key = compute_key_from_params(*args)
151 arg_key = compute_key_from_params(*args)
152 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
152 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
153
153
154 return final_key
154 return final_key
155
155
156 return generate_key
156 return generate_key
157
157
158
158
159 def get_or_create_region(region_name, region_namespace=None):
159 def get_or_create_region(region_name, region_namespace=None):
160 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
160 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
161 region_obj = region_meta.dogpile_cache_regions.get(region_name)
161 region_obj = region_meta.dogpile_cache_regions.get(region_name)
162 if not region_obj:
162 if not region_obj:
163 raise EnvironmentError(
163 raise EnvironmentError(
164 'Region `{}` not in configured: {}.'.format(
164 'Region `{}` not in configured: {}.'.format(
165 region_name, region_meta.dogpile_cache_regions.keys()))
165 region_name, list(region_meta.dogpile_cache_regions.keys())))
166
166
167 region_uid_name = '{}:{}'.format(region_name, region_namespace)
167 region_uid_name = '{}:{}'.format(region_name, region_namespace)
168 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
168 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
169 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
169 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
170 if region_exist:
170 if region_exist:
171 log.debug('Using already configured region: %s', region_namespace)
171 log.debug('Using already configured region: %s', region_namespace)
172 return region_exist
172 return region_exist
173 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
173 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
174 expiration_time = region_obj.expiration_time
174 expiration_time = region_obj.expiration_time
175
175
176 if not os.path.isdir(cache_dir):
176 if not os.path.isdir(cache_dir):
177 os.makedirs(cache_dir)
177 os.makedirs(cache_dir)
178 new_region = make_region(
178 new_region = make_region(
179 name=region_uid_name,
179 name=region_uid_name,
180 function_key_generator=backend_key_generator(region_obj.actual_backend)
180 function_key_generator=backend_key_generator(region_obj.actual_backend)
181 )
181 )
182 namespace_filename = os.path.join(
182 namespace_filename = os.path.join(
183 cache_dir, "{}.cache.dbm".format(region_namespace))
183 cache_dir, "{}.cache.dbm".format(region_namespace))
184 # special type that allows 1db per namespace
184 # special type that allows 1db per namespace
185 new_region.configure(
185 new_region.configure(
186 backend='dogpile.cache.rc.file_namespace',
186 backend='dogpile.cache.rc.file_namespace',
187 expiration_time=expiration_time,
187 expiration_time=expiration_time,
188 arguments={"filename": namespace_filename}
188 arguments={"filename": namespace_filename}
189 )
189 )
190
190
191 # create and save in region caches
191 # create and save in region caches
192 log.debug('configuring new region: %s', region_uid_name)
192 log.debug('configuring new region: %s', region_uid_name)
193 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
193 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
194
194
195 return region_obj
195 return region_obj
196
196
197
197
198 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
198 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
199 region = get_or_create_region(cache_region, cache_namespace_uid)
199 region = get_or_create_region(cache_region, cache_namespace_uid)
200 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
200 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
201 num_delete_keys = len(cache_keys)
201 num_delete_keys = len(cache_keys)
202 if invalidate:
202 if invalidate:
203 region.invalidate(hard=False)
203 region.invalidate(hard=False)
204 else:
204 else:
205 if num_delete_keys:
205 if num_delete_keys:
206 region.delete_multi(cache_keys)
206 region.delete_multi(cache_keys)
207 return num_delete_keys
207 return num_delete_keys
@@ -1,864 +1,864 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31 import svn.client
31 import svn.client
32 import svn.core
32 import svn.core
33 import svn.delta
33 import svn.delta
34 import svn.diff
34 import svn.diff
35 import svn.fs
35 import svn.fs
36 import svn.repos
36 import svn.repos
37
37
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 from vcsserver.exceptions import NoContentException
40 from vcsserver.exceptions import NoContentException
41 from vcsserver.utils import safe_str
41 from vcsserver.utils import safe_str
42 from vcsserver.vcs_base import RemoteBase
42 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.lib.svnremoterepo import svnremoterepo
43 from vcsserver.lib.svnremoterepo import svnremoterepo
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 svn_compatible_versions_map = {
47 svn_compatible_versions_map = {
48 'pre-1.4-compatible': '1.3',
48 'pre-1.4-compatible': '1.3',
49 'pre-1.5-compatible': '1.4',
49 'pre-1.5-compatible': '1.4',
50 'pre-1.6-compatible': '1.5',
50 'pre-1.6-compatible': '1.5',
51 'pre-1.8-compatible': '1.7',
51 'pre-1.8-compatible': '1.7',
52 'pre-1.9-compatible': '1.8',
52 'pre-1.9-compatible': '1.8',
53 }
53 }
54
54
55 current_compatible_version = '1.14'
55 current_compatible_version = '1.14'
56
56
57
57
58 def reraise_safe_exceptions(func):
58 def reraise_safe_exceptions(func):
59 """Decorator for converting svn exceptions to something neutral."""
59 """Decorator for converting svn exceptions to something neutral."""
60 def wrapper(*args, **kwargs):
60 def wrapper(*args, **kwargs):
61 try:
61 try:
62 return func(*args, **kwargs)
62 return func(*args, **kwargs)
63 except Exception as e:
63 except Exception as e:
64 if not hasattr(e, '_vcs_kind'):
64 if not hasattr(e, '_vcs_kind'):
65 log.exception("Unhandled exception in svn remote call")
65 log.exception("Unhandled exception in svn remote call")
66 raise_from_original(exceptions.UnhandledException(e))
66 raise_from_original(exceptions.UnhandledException(e))
67 raise
67 raise
68 return wrapper
68 return wrapper
69
69
70
70
71 class SubversionFactory(RepoFactory):
71 class SubversionFactory(RepoFactory):
72 repo_type = 'svn'
72 repo_type = 'svn'
73
73
74 def _create_repo(self, wire, create, compatible_version):
74 def _create_repo(self, wire, create, compatible_version):
75 path = svn.core.svn_path_canonicalize(wire['path'])
75 path = svn.core.svn_path_canonicalize(wire['path'])
76 if create:
76 if create:
77 fs_config = {'compatible-version': current_compatible_version}
77 fs_config = {'compatible-version': current_compatible_version}
78 if compatible_version:
78 if compatible_version:
79
79
80 compatible_version_string = \
80 compatible_version_string = \
81 svn_compatible_versions_map.get(compatible_version) \
81 svn_compatible_versions_map.get(compatible_version) \
82 or compatible_version
82 or compatible_version
83 fs_config['compatible-version'] = compatible_version_string
83 fs_config['compatible-version'] = compatible_version_string
84
84
85 log.debug('Create SVN repo with config "%s"', fs_config)
85 log.debug('Create SVN repo with config "%s"', fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
87 else:
87 else:
88 repo = svn.repos.open(path)
88 repo = svn.repos.open(path)
89
89
90 log.debug('Got SVN object: %s', repo)
90 log.debug('Got SVN object: %s', repo)
91 return repo
91 return repo
92
92
93 def repo(self, wire, create=False, compatible_version=None):
93 def repo(self, wire, create=False, compatible_version=None):
94 """
94 """
95 Get a repository instance for the given path.
95 Get a repository instance for the given path.
96 """
96 """
97 return self._create_repo(wire, create, compatible_version)
97 return self._create_repo(wire, create, compatible_version)
98
98
99
99
100 NODE_TYPE_MAPPING = {
100 NODE_TYPE_MAPPING = {
101 svn.core.svn_node_file: 'file',
101 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_dir: 'dir',
102 svn.core.svn_node_dir: 'dir',
103 }
103 }
104
104
105
105
106 class SvnRemote(RemoteBase):
106 class SvnRemote(RemoteBase):
107
107
108 def __init__(self, factory, hg_factory=None):
108 def __init__(self, factory, hg_factory=None):
109 self._factory = factory
109 self._factory = factory
110
110
111 @reraise_safe_exceptions
111 @reraise_safe_exceptions
112 def discover_svn_version(self):
112 def discover_svn_version(self):
113 try:
113 try:
114 import svn.core
114 import svn.core
115 svn_ver = svn.core.SVN_VERSION
115 svn_ver = svn.core.SVN_VERSION
116 except ImportError:
116 except ImportError:
117 svn_ver = None
117 svn_ver = None
118 return svn_ver
118 return svn_ver
119
119
120 @reraise_safe_exceptions
120 @reraise_safe_exceptions
121 def is_empty(self, wire):
121 def is_empty(self, wire):
122
122
123 try:
123 try:
124 return self.lookup(wire, -1) == 0
124 return self.lookup(wire, -1) == 0
125 except Exception:
125 except Exception:
126 log.exception("failed to read object_store")
126 log.exception("failed to read object_store")
127 return False
127 return False
128
128
129 def check_url(self, url):
129 def check_url(self, url):
130
130
131 # uuid function get's only valid UUID from proper repo, else
131 # uuid function get's only valid UUID from proper repo, else
132 # throws exception
132 # throws exception
133 username, password, src_url = self.get_url_and_credentials(url)
133 username, password, src_url = self.get_url_and_credentials(url)
134 try:
134 try:
135 svnremoterepo(username, password, src_url).svn().uuid
135 svnremoterepo(username, password, src_url).svn().uuid
136 except Exception:
136 except Exception:
137 tb = traceback.format_exc()
137 tb = traceback.format_exc()
138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
139 raise URLError(
139 raise URLError(
140 '"%s" is not a valid Subversion source url.' % (url, ))
140 '"%s" is not a valid Subversion source url.' % (url, ))
141 return True
141 return True
142
142
143 def is_path_valid_repository(self, wire, path):
143 def is_path_valid_repository(self, wire, path):
144
144
145 # NOTE(marcink): short circuit the check for SVN repo
145 # NOTE(marcink): short circuit the check for SVN repo
146 # the repos.open might be expensive to check, but we have one cheap
146 # the repos.open might be expensive to check, but we have one cheap
147 # pre condition that we can use, to check for 'format' file
147 # pre condition that we can use, to check for 'format' file
148
148
149 if not os.path.isfile(os.path.join(path, 'format')):
149 if not os.path.isfile(os.path.join(path, 'format')):
150 return False
150 return False
151
151
152 try:
152 try:
153 svn.repos.open(path)
153 svn.repos.open(path)
154 except svn.core.SubversionException:
154 except svn.core.SubversionException:
155 tb = traceback.format_exc()
155 tb = traceback.format_exc()
156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
157 return False
157 return False
158 return True
158 return True
159
159
160 @reraise_safe_exceptions
160 @reraise_safe_exceptions
161 def verify(self, wire,):
161 def verify(self, wire,):
162 repo_path = wire['path']
162 repo_path = wire['path']
163 if not self.is_path_valid_repository(wire, repo_path):
163 if not self.is_path_valid_repository(wire, repo_path):
164 raise Exception(
164 raise Exception(
165 "Path %s is not a valid Subversion repository." % repo_path)
165 "Path %s is not a valid Subversion repository." % repo_path)
166
166
167 cmd = ['svnadmin', 'info', repo_path]
167 cmd = ['svnadmin', 'info', repo_path]
168 stdout, stderr = subprocessio.run_command(cmd)
168 stdout, stderr = subprocessio.run_command(cmd)
169 return stdout
169 return stdout
170
170
171 def lookup(self, wire, revision):
171 def lookup(self, wire, revision):
172 if revision not in [-1, None, 'HEAD']:
172 if revision not in [-1, None, 'HEAD']:
173 raise NotImplementedError
173 raise NotImplementedError
174 repo = self._factory.repo(wire)
174 repo = self._factory.repo(wire)
175 fs_ptr = svn.repos.fs(repo)
175 fs_ptr = svn.repos.fs(repo)
176 head = svn.fs.youngest_rev(fs_ptr)
176 head = svn.fs.youngest_rev(fs_ptr)
177 return head
177 return head
178
178
179 def lookup_interval(self, wire, start_ts, end_ts):
179 def lookup_interval(self, wire, start_ts, end_ts):
180 repo = self._factory.repo(wire)
180 repo = self._factory.repo(wire)
181 fsobj = svn.repos.fs(repo)
181 fsobj = svn.repos.fs(repo)
182 start_rev = None
182 start_rev = None
183 end_rev = None
183 end_rev = None
184 if start_ts:
184 if start_ts:
185 start_ts_svn = apr_time_t(start_ts)
185 start_ts_svn = apr_time_t(start_ts)
186 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
186 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
187 else:
187 else:
188 start_rev = 1
188 start_rev = 1
189 if end_ts:
189 if end_ts:
190 end_ts_svn = apr_time_t(end_ts)
190 end_ts_svn = apr_time_t(end_ts)
191 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
191 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
192 else:
192 else:
193 end_rev = svn.fs.youngest_rev(fsobj)
193 end_rev = svn.fs.youngest_rev(fsobj)
194 return start_rev, end_rev
194 return start_rev, end_rev
195
195
196 def revision_properties(self, wire, revision):
196 def revision_properties(self, wire, revision):
197
197
198 cache_on, context_uid, repo_id = self._cache_on(wire)
198 cache_on, context_uid, repo_id = self._cache_on(wire)
199 region = self._region(wire)
199 region = self._region(wire)
200 @region.conditional_cache_on_arguments(condition=cache_on)
200 @region.conditional_cache_on_arguments(condition=cache_on)
201 def _revision_properties(_repo_id, _revision):
201 def _revision_properties(_repo_id, _revision):
202 repo = self._factory.repo(wire)
202 repo = self._factory.repo(wire)
203 fs_ptr = svn.repos.fs(repo)
203 fs_ptr = svn.repos.fs(repo)
204 return svn.fs.revision_proplist(fs_ptr, revision)
204 return svn.fs.revision_proplist(fs_ptr, revision)
205 return _revision_properties(repo_id, revision)
205 return _revision_properties(repo_id, revision)
206
206
207 def revision_changes(self, wire, revision):
207 def revision_changes(self, wire, revision):
208
208
209 repo = self._factory.repo(wire)
209 repo = self._factory.repo(wire)
210 fsobj = svn.repos.fs(repo)
210 fsobj = svn.repos.fs(repo)
211 rev_root = svn.fs.revision_root(fsobj, revision)
211 rev_root = svn.fs.revision_root(fsobj, revision)
212
212
213 editor = svn.repos.ChangeCollector(fsobj, rev_root)
213 editor = svn.repos.ChangeCollector(fsobj, rev_root)
214 editor_ptr, editor_baton = svn.delta.make_editor(editor)
214 editor_ptr, editor_baton = svn.delta.make_editor(editor)
215 base_dir = ""
215 base_dir = ""
216 send_deltas = False
216 send_deltas = False
217 svn.repos.replay2(
217 svn.repos.replay2(
218 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
218 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
219 editor_ptr, editor_baton, None)
219 editor_ptr, editor_baton, None)
220
220
221 added = []
221 added = []
222 changed = []
222 changed = []
223 removed = []
223 removed = []
224
224
225 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
225 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
226 for path, change in editor.changes.items():
226 for path, change in editor.changes.items():
227 # TODO: Decide what to do with directory nodes. Subversion can add
227 # TODO: Decide what to do with directory nodes. Subversion can add
228 # empty directories.
228 # empty directories.
229
229
230 if change.item_kind == svn.core.svn_node_dir:
230 if change.item_kind == svn.core.svn_node_dir:
231 continue
231 continue
232 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
232 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
233 added.append(path)
233 added.append(path)
234 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
234 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
235 svn.repos.CHANGE_ACTION_REPLACE]:
235 svn.repos.CHANGE_ACTION_REPLACE]:
236 changed.append(path)
236 changed.append(path)
237 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
237 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
238 removed.append(path)
238 removed.append(path)
239 else:
239 else:
240 raise NotImplementedError(
240 raise NotImplementedError(
241 "Action %s not supported on path %s" % (
241 "Action %s not supported on path %s" % (
242 change.action, path))
242 change.action, path))
243
243
244 changes = {
244 changes = {
245 'added': added,
245 'added': added,
246 'changed': changed,
246 'changed': changed,
247 'removed': removed,
247 'removed': removed,
248 }
248 }
249 return changes
249 return changes
250
250
251 @reraise_safe_exceptions
251 @reraise_safe_exceptions
252 def node_history(self, wire, path, revision, limit):
252 def node_history(self, wire, path, revision, limit):
253 cache_on, context_uid, repo_id = self._cache_on(wire)
253 cache_on, context_uid, repo_id = self._cache_on(wire)
254 region = self._region(wire)
254 region = self._region(wire)
255 @region.conditional_cache_on_arguments(condition=cache_on)
255 @region.conditional_cache_on_arguments(condition=cache_on)
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 cross_copies = False
257 cross_copies = False
258 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
259 fsobj = svn.repos.fs(repo)
259 fsobj = svn.repos.fs(repo)
260 rev_root = svn.fs.revision_root(fsobj, revision)
260 rev_root = svn.fs.revision_root(fsobj, revision)
261
261
262 history_revisions = []
262 history_revisions = []
263 history = svn.fs.node_history(rev_root, path)
263 history = svn.fs.node_history(rev_root, path)
264 history = svn.fs.history_prev(history, cross_copies)
264 history = svn.fs.history_prev(history, cross_copies)
265 while history:
265 while history:
266 __, node_revision = svn.fs.history_location(history)
266 __, node_revision = svn.fs.history_location(history)
267 history_revisions.append(node_revision)
267 history_revisions.append(node_revision)
268 if limit and len(history_revisions) >= limit:
268 if limit and len(history_revisions) >= limit:
269 break
269 break
270 history = svn.fs.history_prev(history, cross_copies)
270 history = svn.fs.history_prev(history, cross_copies)
271 return history_revisions
271 return history_revisions
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273
273
274 def node_properties(self, wire, path, revision):
274 def node_properties(self, wire, path, revision):
275 cache_on, context_uid, repo_id = self._cache_on(wire)
275 cache_on, context_uid, repo_id = self._cache_on(wire)
276 region = self._region(wire)
276 region = self._region(wire)
277 @region.conditional_cache_on_arguments(condition=cache_on)
277 @region.conditional_cache_on_arguments(condition=cache_on)
278 def _node_properties(_repo_id, _path, _revision):
278 def _node_properties(_repo_id, _path, _revision):
279 repo = self._factory.repo(wire)
279 repo = self._factory.repo(wire)
280 fsobj = svn.repos.fs(repo)
280 fsobj = svn.repos.fs(repo)
281 rev_root = svn.fs.revision_root(fsobj, revision)
281 rev_root = svn.fs.revision_root(fsobj, revision)
282 return svn.fs.node_proplist(rev_root, path)
282 return svn.fs.node_proplist(rev_root, path)
283 return _node_properties(repo_id, path, revision)
283 return _node_properties(repo_id, path, revision)
284
284
285 def file_annotate(self, wire, path, revision):
285 def file_annotate(self, wire, path, revision):
286 abs_path = 'file://' + urllib.pathname2url(
286 abs_path = 'file://' + urllib.request.pathname2url(
287 vcspath.join(wire['path'], path))
287 vcspath.join(wire['path'], path))
288 file_uri = svn.core.svn_path_canonicalize(abs_path)
288 file_uri = svn.core.svn_path_canonicalize(abs_path)
289
289
290 start_rev = svn_opt_revision_value_t(0)
290 start_rev = svn_opt_revision_value_t(0)
291 peg_rev = svn_opt_revision_value_t(revision)
291 peg_rev = svn_opt_revision_value_t(revision)
292 end_rev = peg_rev
292 end_rev = peg_rev
293
293
294 annotations = []
294 annotations = []
295
295
296 def receiver(line_no, revision, author, date, line, pool):
296 def receiver(line_no, revision, author, date, line, pool):
297 annotations.append((line_no, revision, line))
297 annotations.append((line_no, revision, line))
298
298
299 # TODO: Cannot use blame5, missing typemap function in the swig code
299 # TODO: Cannot use blame5, missing typemap function in the swig code
300 try:
300 try:
301 svn.client.blame2(
301 svn.client.blame2(
302 file_uri, peg_rev, start_rev, end_rev,
302 file_uri, peg_rev, start_rev, end_rev,
303 receiver, svn.client.create_context())
303 receiver, svn.client.create_context())
304 except svn.core.SubversionException as exc:
304 except svn.core.SubversionException as exc:
305 log.exception("Error during blame operation.")
305 log.exception("Error during blame operation.")
306 raise Exception(
306 raise Exception(
307 "Blame not supported or file does not exist at path %s. "
307 "Blame not supported or file does not exist at path %s. "
308 "Error %s." % (path, exc))
308 "Error %s." % (path, exc))
309
309
310 return annotations
310 return annotations
311
311
312 def get_node_type(self, wire, path, revision=None):
312 def get_node_type(self, wire, path, revision=None):
313
313
314 cache_on, context_uid, repo_id = self._cache_on(wire)
314 cache_on, context_uid, repo_id = self._cache_on(wire)
315 region = self._region(wire)
315 region = self._region(wire)
316 @region.conditional_cache_on_arguments(condition=cache_on)
316 @region.conditional_cache_on_arguments(condition=cache_on)
317 def _get_node_type(_repo_id, _path, _revision):
317 def _get_node_type(_repo_id, _path, _revision):
318 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
319 fs_ptr = svn.repos.fs(repo)
319 fs_ptr = svn.repos.fs(repo)
320 if _revision is None:
320 if _revision is None:
321 _revision = svn.fs.youngest_rev(fs_ptr)
321 _revision = svn.fs.youngest_rev(fs_ptr)
322 root = svn.fs.revision_root(fs_ptr, _revision)
322 root = svn.fs.revision_root(fs_ptr, _revision)
323 node = svn.fs.check_path(root, path)
323 node = svn.fs.check_path(root, path)
324 return NODE_TYPE_MAPPING.get(node, None)
324 return NODE_TYPE_MAPPING.get(node, None)
325 return _get_node_type(repo_id, path, revision)
325 return _get_node_type(repo_id, path, revision)
326
326
327 def get_nodes(self, wire, path, revision=None):
327 def get_nodes(self, wire, path, revision=None):
328
328
329 cache_on, context_uid, repo_id = self._cache_on(wire)
329 cache_on, context_uid, repo_id = self._cache_on(wire)
330 region = self._region(wire)
330 region = self._region(wire)
331 @region.conditional_cache_on_arguments(condition=cache_on)
331 @region.conditional_cache_on_arguments(condition=cache_on)
332 def _get_nodes(_repo_id, _path, _revision):
332 def _get_nodes(_repo_id, _path, _revision):
333 repo = self._factory.repo(wire)
333 repo = self._factory.repo(wire)
334 fsobj = svn.repos.fs(repo)
334 fsobj = svn.repos.fs(repo)
335 if _revision is None:
335 if _revision is None:
336 _revision = svn.fs.youngest_rev(fsobj)
336 _revision = svn.fs.youngest_rev(fsobj)
337 root = svn.fs.revision_root(fsobj, _revision)
337 root = svn.fs.revision_root(fsobj, _revision)
338 entries = svn.fs.dir_entries(root, path)
338 entries = svn.fs.dir_entries(root, path)
339 result = []
339 result = []
340 for entry_path, entry_info in entries.items():
340 for entry_path, entry_info in entries.items():
341 result.append(
341 result.append(
342 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
342 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
343 return result
343 return result
344 return _get_nodes(repo_id, path, revision)
344 return _get_nodes(repo_id, path, revision)
345
345
346 def get_file_content(self, wire, path, rev=None):
346 def get_file_content(self, wire, path, rev=None):
347 repo = self._factory.repo(wire)
347 repo = self._factory.repo(wire)
348 fsobj = svn.repos.fs(repo)
348 fsobj = svn.repos.fs(repo)
349 if rev is None:
349 if rev is None:
350 rev = svn.fs.youngest_revision(fsobj)
350 rev = svn.fs.youngest_revision(fsobj)
351 root = svn.fs.revision_root(fsobj, rev)
351 root = svn.fs.revision_root(fsobj, rev)
352 content = svn.core.Stream(svn.fs.file_contents(root, path))
352 content = svn.core.Stream(svn.fs.file_contents(root, path))
353 return content.read()
353 return content.read()
354
354
355 def get_file_size(self, wire, path, revision=None):
355 def get_file_size(self, wire, path, revision=None):
356
356
357 cache_on, context_uid, repo_id = self._cache_on(wire)
357 cache_on, context_uid, repo_id = self._cache_on(wire)
358 region = self._region(wire)
358 region = self._region(wire)
359
359
360 @region.conditional_cache_on_arguments(condition=cache_on)
360 @region.conditional_cache_on_arguments(condition=cache_on)
361 def _get_file_size(_repo_id, _path, _revision):
361 def _get_file_size(_repo_id, _path, _revision):
362 repo = self._factory.repo(wire)
362 repo = self._factory.repo(wire)
363 fsobj = svn.repos.fs(repo)
363 fsobj = svn.repos.fs(repo)
364 if _revision is None:
364 if _revision is None:
365 _revision = svn.fs.youngest_revision(fsobj)
365 _revision = svn.fs.youngest_revision(fsobj)
366 root = svn.fs.revision_root(fsobj, _revision)
366 root = svn.fs.revision_root(fsobj, _revision)
367 size = svn.fs.file_length(root, path)
367 size = svn.fs.file_length(root, path)
368 return size
368 return size
369 return _get_file_size(repo_id, path, revision)
369 return _get_file_size(repo_id, path, revision)
370
370
371 def create_repository(self, wire, compatible_version=None):
371 def create_repository(self, wire, compatible_version=None):
372 log.info('Creating Subversion repository in path "%s"', wire['path'])
372 log.info('Creating Subversion repository in path "%s"', wire['path'])
373 self._factory.repo(wire, create=True,
373 self._factory.repo(wire, create=True,
374 compatible_version=compatible_version)
374 compatible_version=compatible_version)
375
375
376 def get_url_and_credentials(self, src_url):
376 def get_url_and_credentials(self, src_url):
377 obj = urllib.parse.urlparse(src_url)
377 obj = urllib.parse.urlparse(src_url)
378 username = obj.username or None
378 username = obj.username or None
379 password = obj.password or None
379 password = obj.password or None
380 return username, password, src_url
380 return username, password, src_url
381
381
382 def import_remote_repository(self, wire, src_url):
382 def import_remote_repository(self, wire, src_url):
383 repo_path = wire['path']
383 repo_path = wire['path']
384 if not self.is_path_valid_repository(wire, repo_path):
384 if not self.is_path_valid_repository(wire, repo_path):
385 raise Exception(
385 raise Exception(
386 "Path %s is not a valid Subversion repository." % repo_path)
386 "Path %s is not a valid Subversion repository." % repo_path)
387
387
388 username, password, src_url = self.get_url_and_credentials(src_url)
388 username, password, src_url = self.get_url_and_credentials(src_url)
389 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
389 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
390 '--trust-server-cert-failures=unknown-ca']
390 '--trust-server-cert-failures=unknown-ca']
391 if username and password:
391 if username and password:
392 rdump_cmd += ['--username', username, '--password', password]
392 rdump_cmd += ['--username', username, '--password', password]
393 rdump_cmd += [src_url]
393 rdump_cmd += [src_url]
394
394
395 rdump = subprocess.Popen(
395 rdump = subprocess.Popen(
396 rdump_cmd,
396 rdump_cmd,
397 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
397 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
398 load = subprocess.Popen(
398 load = subprocess.Popen(
399 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
399 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
400
400
401 # TODO: johbo: This can be a very long operation, might be better
401 # TODO: johbo: This can be a very long operation, might be better
402 # to track some kind of status and provide an api to check if the
402 # to track some kind of status and provide an api to check if the
403 # import is done.
403 # import is done.
404 rdump.wait()
404 rdump.wait()
405 load.wait()
405 load.wait()
406
406
407 log.debug('Return process ended with code: %s', rdump.returncode)
407 log.debug('Return process ended with code: %s', rdump.returncode)
408 if rdump.returncode != 0:
408 if rdump.returncode != 0:
409 errors = rdump.stderr.read()
409 errors = rdump.stderr.read()
410 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
410 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
411
411
412 reason = 'UNKNOWN'
412 reason = 'UNKNOWN'
413 if b'svnrdump: E230001:' in errors:
413 if b'svnrdump: E230001:' in errors:
414 reason = 'INVALID_CERTIFICATE'
414 reason = 'INVALID_CERTIFICATE'
415
415
416 if reason == 'UNKNOWN':
416 if reason == 'UNKNOWN':
417 reason = 'UNKNOWN:{}'.format(safe_str(errors))
417 reason = 'UNKNOWN:{}'.format(safe_str(errors))
418
418
419 raise Exception(
419 raise Exception(
420 'Failed to dump the remote repository from %s. Reason:%s' % (
420 'Failed to dump the remote repository from %s. Reason:%s' % (
421 src_url, reason))
421 src_url, reason))
422 if load.returncode != 0:
422 if load.returncode != 0:
423 raise Exception(
423 raise Exception(
424 'Failed to load the dump of remote repository from %s.' %
424 'Failed to load the dump of remote repository from %s.' %
425 (src_url, ))
425 (src_url, ))
426
426
427 def commit(self, wire, message, author, timestamp, updated, removed):
427 def commit(self, wire, message, author, timestamp, updated, removed):
428 assert isinstance(message, str)
428 assert isinstance(message, str)
429 assert isinstance(author, str)
429 assert isinstance(author, str)
430
430
431 repo = self._factory.repo(wire)
431 repo = self._factory.repo(wire)
432 fsobj = svn.repos.fs(repo)
432 fsobj = svn.repos.fs(repo)
433
433
434 rev = svn.fs.youngest_rev(fsobj)
434 rev = svn.fs.youngest_rev(fsobj)
435 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
435 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
436 txn_root = svn.fs.txn_root(txn)
436 txn_root = svn.fs.txn_root(txn)
437
437
438 for node in updated:
438 for node in updated:
439 TxnNodeProcessor(node, txn_root).update()
439 TxnNodeProcessor(node, txn_root).update()
440 for node in removed:
440 for node in removed:
441 TxnNodeProcessor(node, txn_root).remove()
441 TxnNodeProcessor(node, txn_root).remove()
442
442
443 commit_id = svn.repos.fs_commit_txn(repo, txn)
443 commit_id = svn.repos.fs_commit_txn(repo, txn)
444
444
445 if timestamp:
445 if timestamp:
446 apr_time = apr_time_t(timestamp)
446 apr_time = apr_time_t(timestamp)
447 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
447 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
448 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
448 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
449
449
450 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
450 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
451 return commit_id
451 return commit_id
452
452
453 def diff(self, wire, rev1, rev2, path1=None, path2=None,
453 def diff(self, wire, rev1, rev2, path1=None, path2=None,
454 ignore_whitespace=False, context=3):
454 ignore_whitespace=False, context=3):
455
455
456 wire.update(cache=False)
456 wire.update(cache=False)
457 repo = self._factory.repo(wire)
457 repo = self._factory.repo(wire)
458 diff_creator = SvnDiffer(
458 diff_creator = SvnDiffer(
459 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
459 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
460 try:
460 try:
461 return diff_creator.generate_diff()
461 return diff_creator.generate_diff()
462 except svn.core.SubversionException as e:
462 except svn.core.SubversionException as e:
463 log.exception(
463 log.exception(
464 "Error during diff operation operation. "
464 "Error during diff operation operation. "
465 "Path might not exist %s, %s" % (path1, path2))
465 "Path might not exist %s, %s" % (path1, path2))
466 return ""
466 return ""
467
467
468 @reraise_safe_exceptions
468 @reraise_safe_exceptions
469 def is_large_file(self, wire, path):
469 def is_large_file(self, wire, path):
470 return False
470 return False
471
471
472 @reraise_safe_exceptions
472 @reraise_safe_exceptions
473 def is_binary(self, wire, rev, path):
473 def is_binary(self, wire, rev, path):
474 cache_on, context_uid, repo_id = self._cache_on(wire)
474 cache_on, context_uid, repo_id = self._cache_on(wire)
475
475
476 region = self._region(wire)
476 region = self._region(wire)
477 @region.conditional_cache_on_arguments(condition=cache_on)
477 @region.conditional_cache_on_arguments(condition=cache_on)
478 def _is_binary(_repo_id, _rev, _path):
478 def _is_binary(_repo_id, _rev, _path):
479 raw_bytes = self.get_file_content(wire, path, rev)
479 raw_bytes = self.get_file_content(wire, path, rev)
480 return raw_bytes and '\0' in raw_bytes
480 return raw_bytes and '\0' in raw_bytes
481
481
482 return _is_binary(repo_id, rev, path)
482 return _is_binary(repo_id, rev, path)
483
483
484 @reraise_safe_exceptions
484 @reraise_safe_exceptions
485 def run_svn_command(self, wire, cmd, **opts):
485 def run_svn_command(self, wire, cmd, **opts):
486 path = wire.get('path', None)
486 path = wire.get('path', None)
487
487
488 if path and os.path.isdir(path):
488 if path and os.path.isdir(path):
489 opts['cwd'] = path
489 opts['cwd'] = path
490
490
491 safe_call = opts.pop('_safe', False)
491 safe_call = opts.pop('_safe', False)
492
492
493 svnenv = os.environ.copy()
493 svnenv = os.environ.copy()
494 svnenv.update(opts.pop('extra_env', {}))
494 svnenv.update(opts.pop('extra_env', {}))
495
495
496 _opts = {'env': svnenv, 'shell': False}
496 _opts = {'env': svnenv, 'shell': False}
497
497
498 try:
498 try:
499 _opts.update(opts)
499 _opts.update(opts)
500 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
500 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
501
501
502 return b''.join(proc), b''.join(proc.stderr)
502 return b''.join(proc), b''.join(proc.stderr)
503 except OSError as err:
503 except OSError as err:
504 if safe_call:
504 if safe_call:
505 return '', safe_str(err).strip()
505 return '', safe_str(err).strip()
506 else:
506 else:
507 cmd = ' '.join(cmd) # human friendly CMD
507 cmd = ' '.join(cmd) # human friendly CMD
508 tb_err = ("Couldn't run svn command (%s).\n"
508 tb_err = ("Couldn't run svn command (%s).\n"
509 "Original error was:%s\n"
509 "Original error was:%s\n"
510 "Call options:%s\n"
510 "Call options:%s\n"
511 % (cmd, err, _opts))
511 % (cmd, err, _opts))
512 log.exception(tb_err)
512 log.exception(tb_err)
513 raise exceptions.VcsException()(tb_err)
513 raise exceptions.VcsException()(tb_err)
514
514
515 @reraise_safe_exceptions
515 @reraise_safe_exceptions
516 def install_hooks(self, wire, force=False):
516 def install_hooks(self, wire, force=False):
517 from vcsserver.hook_utils import install_svn_hooks
517 from vcsserver.hook_utils import install_svn_hooks
518 repo_path = wire['path']
518 repo_path = wire['path']
519 binary_dir = settings.BINARY_DIR
519 binary_dir = settings.BINARY_DIR
520 executable = None
520 executable = None
521 if binary_dir:
521 if binary_dir:
522 executable = os.path.join(binary_dir, 'python')
522 executable = os.path.join(binary_dir, 'python')
523 return install_svn_hooks(
523 return install_svn_hooks(
524 repo_path, executable=executable, force_create=force)
524 repo_path, executable=executable, force_create=force)
525
525
526 @reraise_safe_exceptions
526 @reraise_safe_exceptions
527 def get_hooks_info(self, wire):
527 def get_hooks_info(self, wire):
528 from vcsserver.hook_utils import (
528 from vcsserver.hook_utils import (
529 get_svn_pre_hook_version, get_svn_post_hook_version)
529 get_svn_pre_hook_version, get_svn_post_hook_version)
530 repo_path = wire['path']
530 repo_path = wire['path']
531 return {
531 return {
532 'pre_version': get_svn_pre_hook_version(repo_path),
532 'pre_version': get_svn_pre_hook_version(repo_path),
533 'post_version': get_svn_post_hook_version(repo_path),
533 'post_version': get_svn_post_hook_version(repo_path),
534 }
534 }
535
535
536 @reraise_safe_exceptions
536 @reraise_safe_exceptions
537 def set_head_ref(self, wire, head_name):
537 def set_head_ref(self, wire, head_name):
538 pass
538 pass
539
539
540 @reraise_safe_exceptions
540 @reraise_safe_exceptions
541 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
541 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
542 archive_dir_name, commit_id):
542 archive_dir_name, commit_id):
543
543
544 def walk_tree(root, root_dir, _commit_id):
544 def walk_tree(root, root_dir, _commit_id):
545 """
545 """
546 Special recursive svn repo walker
546 Special recursive svn repo walker
547 """
547 """
548
548
549 filemode_default = 0o100644
549 filemode_default = 0o100644
550 filemode_executable = 0o100755
550 filemode_executable = 0o100755
551
551
552 file_iter = svn.fs.dir_entries(root, root_dir)
552 file_iter = svn.fs.dir_entries(root, root_dir)
553 for f_name in file_iter:
553 for f_name in file_iter:
554 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
554 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
555
555
556 if f_type == 'dir':
556 if f_type == 'dir':
557 # return only DIR, and then all entries in that dir
557 # return only DIR, and then all entries in that dir
558 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
558 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
559 new_root = os.path.join(root_dir, f_name)
559 new_root = os.path.join(root_dir, f_name)
560 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
560 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
561 yield _f_name, _f_data, _f_type
561 yield _f_name, _f_data, _f_type
562 else:
562 else:
563 f_path = os.path.join(root_dir, f_name).rstrip('/')
563 f_path = os.path.join(root_dir, f_name).rstrip('/')
564 prop_list = svn.fs.node_proplist(root, f_path)
564 prop_list = svn.fs.node_proplist(root, f_path)
565
565
566 f_mode = filemode_default
566 f_mode = filemode_default
567 if prop_list.get('svn:executable'):
567 if prop_list.get('svn:executable'):
568 f_mode = filemode_executable
568 f_mode = filemode_executable
569
569
570 f_is_link = False
570 f_is_link = False
571 if prop_list.get('svn:special'):
571 if prop_list.get('svn:special'):
572 f_is_link = True
572 f_is_link = True
573
573
574 data = {
574 data = {
575 'is_link': f_is_link,
575 'is_link': f_is_link,
576 'mode': f_mode,
576 'mode': f_mode,
577 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
577 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
578 }
578 }
579
579
580 yield f_path, data, f_type
580 yield f_path, data, f_type
581
581
582 def file_walker(_commit_id, path):
582 def file_walker(_commit_id, path):
583 repo = self._factory.repo(wire)
583 repo = self._factory.repo(wire)
584 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
584 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
585
585
586 def no_content():
586 def no_content():
587 raise NoContentException()
587 raise NoContentException()
588
588
589 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
589 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
590 file_path = f_name
590 file_path = f_name
591
591
592 if f_type == 'dir':
592 if f_type == 'dir':
593 mode = f_data['mode']
593 mode = f_data['mode']
594 yield ArchiveNode(file_path, mode, False, no_content)
594 yield ArchiveNode(file_path, mode, False, no_content)
595 else:
595 else:
596 mode = f_data['mode']
596 mode = f_data['mode']
597 is_link = f_data['is_link']
597 is_link = f_data['is_link']
598 data_stream = f_data['content_stream']
598 data_stream = f_data['content_stream']
599 yield ArchiveNode(file_path, mode, is_link, data_stream)
599 yield ArchiveNode(file_path, mode, is_link, data_stream)
600
600
601 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
601 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
602 archive_dir_name, commit_id)
602 archive_dir_name, commit_id)
603
603
604
604
605 class SvnDiffer(object):
605 class SvnDiffer(object):
606 """
606 """
607 Utility to create diffs based on difflib and the Subversion api
607 Utility to create diffs based on difflib and the Subversion api
608 """
608 """
609
609
610 binary_content = False
610 binary_content = False
611
611
612 def __init__(
612 def __init__(
613 self, repo, src_rev, src_path, tgt_rev, tgt_path,
613 self, repo, src_rev, src_path, tgt_rev, tgt_path,
614 ignore_whitespace, context):
614 ignore_whitespace, context):
615 self.repo = repo
615 self.repo = repo
616 self.ignore_whitespace = ignore_whitespace
616 self.ignore_whitespace = ignore_whitespace
617 self.context = context
617 self.context = context
618
618
619 fsobj = svn.repos.fs(repo)
619 fsobj = svn.repos.fs(repo)
620
620
621 self.tgt_rev = tgt_rev
621 self.tgt_rev = tgt_rev
622 self.tgt_path = tgt_path or ''
622 self.tgt_path = tgt_path or ''
623 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
623 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
624 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
624 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
625
625
626 self.src_rev = src_rev
626 self.src_rev = src_rev
627 self.src_path = src_path or self.tgt_path
627 self.src_path = src_path or self.tgt_path
628 self.src_root = svn.fs.revision_root(fsobj, src_rev)
628 self.src_root = svn.fs.revision_root(fsobj, src_rev)
629 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
629 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
630
630
631 self._validate()
631 self._validate()
632
632
633 def _validate(self):
633 def _validate(self):
634 if (self.tgt_kind != svn.core.svn_node_none and
634 if (self.tgt_kind != svn.core.svn_node_none and
635 self.src_kind != svn.core.svn_node_none and
635 self.src_kind != svn.core.svn_node_none and
636 self.src_kind != self.tgt_kind):
636 self.src_kind != self.tgt_kind):
637 # TODO: johbo: proper error handling
637 # TODO: johbo: proper error handling
638 raise Exception(
638 raise Exception(
639 "Source and target are not compatible for diff generation. "
639 "Source and target are not compatible for diff generation. "
640 "Source type: %s, target type: %s" %
640 "Source type: %s, target type: %s" %
641 (self.src_kind, self.tgt_kind))
641 (self.src_kind, self.tgt_kind))
642
642
643 def generate_diff(self):
643 def generate_diff(self):
644 buf = io.StringIO()
644 buf = io.StringIO()
645 if self.tgt_kind == svn.core.svn_node_dir:
645 if self.tgt_kind == svn.core.svn_node_dir:
646 self._generate_dir_diff(buf)
646 self._generate_dir_diff(buf)
647 else:
647 else:
648 self._generate_file_diff(buf)
648 self._generate_file_diff(buf)
649 return buf.getvalue()
649 return buf.getvalue()
650
650
651 def _generate_dir_diff(self, buf):
651 def _generate_dir_diff(self, buf):
652 editor = DiffChangeEditor()
652 editor = DiffChangeEditor()
653 editor_ptr, editor_baton = svn.delta.make_editor(editor)
653 editor_ptr, editor_baton = svn.delta.make_editor(editor)
654 svn.repos.dir_delta2(
654 svn.repos.dir_delta2(
655 self.src_root,
655 self.src_root,
656 self.src_path,
656 self.src_path,
657 '', # src_entry
657 '', # src_entry
658 self.tgt_root,
658 self.tgt_root,
659 self.tgt_path,
659 self.tgt_path,
660 editor_ptr, editor_baton,
660 editor_ptr, editor_baton,
661 authorization_callback_allow_all,
661 authorization_callback_allow_all,
662 False, # text_deltas
662 False, # text_deltas
663 svn.core.svn_depth_infinity, # depth
663 svn.core.svn_depth_infinity, # depth
664 False, # entry_props
664 False, # entry_props
665 False, # ignore_ancestry
665 False, # ignore_ancestry
666 )
666 )
667
667
668 for path, __, change in sorted(editor.changes):
668 for path, __, change in sorted(editor.changes):
669 self._generate_node_diff(
669 self._generate_node_diff(
670 buf, change, path, self.tgt_path, path, self.src_path)
670 buf, change, path, self.tgt_path, path, self.src_path)
671
671
672 def _generate_file_diff(self, buf):
672 def _generate_file_diff(self, buf):
673 change = None
673 change = None
674 if self.src_kind == svn.core.svn_node_none:
674 if self.src_kind == svn.core.svn_node_none:
675 change = "add"
675 change = "add"
676 elif self.tgt_kind == svn.core.svn_node_none:
676 elif self.tgt_kind == svn.core.svn_node_none:
677 change = "delete"
677 change = "delete"
678 tgt_base, tgt_path = vcspath.split(self.tgt_path)
678 tgt_base, tgt_path = vcspath.split(self.tgt_path)
679 src_base, src_path = vcspath.split(self.src_path)
679 src_base, src_path = vcspath.split(self.src_path)
680 self._generate_node_diff(
680 self._generate_node_diff(
681 buf, change, tgt_path, tgt_base, src_path, src_base)
681 buf, change, tgt_path, tgt_base, src_path, src_base)
682
682
683 def _generate_node_diff(
683 def _generate_node_diff(
684 self, buf, change, tgt_path, tgt_base, src_path, src_base):
684 self, buf, change, tgt_path, tgt_base, src_path, src_base):
685
685
686 if self.src_rev == self.tgt_rev and tgt_base == src_base:
686 if self.src_rev == self.tgt_rev and tgt_base == src_base:
687 # makes consistent behaviour with git/hg to return empty diff if
687 # makes consistent behaviour with git/hg to return empty diff if
688 # we compare same revisions
688 # we compare same revisions
689 return
689 return
690
690
691 tgt_full_path = vcspath.join(tgt_base, tgt_path)
691 tgt_full_path = vcspath.join(tgt_base, tgt_path)
692 src_full_path = vcspath.join(src_base, src_path)
692 src_full_path = vcspath.join(src_base, src_path)
693
693
694 self.binary_content = False
694 self.binary_content = False
695 mime_type = self._get_mime_type(tgt_full_path)
695 mime_type = self._get_mime_type(tgt_full_path)
696
696
697 if mime_type and not mime_type.startswith('text'):
697 if mime_type and not mime_type.startswith('text'):
698 self.binary_content = True
698 self.binary_content = True
699 buf.write("=" * 67 + '\n')
699 buf.write("=" * 67 + '\n')
700 buf.write("Cannot display: file marked as a binary type.\n")
700 buf.write("Cannot display: file marked as a binary type.\n")
701 buf.write("svn:mime-type = %s\n" % mime_type)
701 buf.write("svn:mime-type = %s\n" % mime_type)
702 buf.write("Index: %s\n" % (tgt_path, ))
702 buf.write("Index: %s\n" % (tgt_path, ))
703 buf.write("=" * 67 + '\n')
703 buf.write("=" * 67 + '\n')
704 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
704 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
705 'tgt_path': tgt_path})
705 'tgt_path': tgt_path})
706
706
707 if change == 'add':
707 if change == 'add':
708 # TODO: johbo: SVN is missing a zero here compared to git
708 # TODO: johbo: SVN is missing a zero here compared to git
709 buf.write("new file mode 10644\n")
709 buf.write("new file mode 10644\n")
710
710
711 #TODO(marcink): intro to binary detection of svn patches
711 #TODO(marcink): intro to binary detection of svn patches
712 # if self.binary_content:
712 # if self.binary_content:
713 # buf.write('GIT binary patch\n')
713 # buf.write('GIT binary patch\n')
714
714
715 buf.write("--- /dev/null\t(revision 0)\n")
715 buf.write("--- /dev/null\t(revision 0)\n")
716 src_lines = []
716 src_lines = []
717 else:
717 else:
718 if change == 'delete':
718 if change == 'delete':
719 buf.write("deleted file mode 10644\n")
719 buf.write("deleted file mode 10644\n")
720
720
721 #TODO(marcink): intro to binary detection of svn patches
721 #TODO(marcink): intro to binary detection of svn patches
722 # if self.binary_content:
722 # if self.binary_content:
723 # buf.write('GIT binary patch\n')
723 # buf.write('GIT binary patch\n')
724
724
725 buf.write("--- a/%s\t(revision %s)\n" % (
725 buf.write("--- a/%s\t(revision %s)\n" % (
726 src_path, self.src_rev))
726 src_path, self.src_rev))
727 src_lines = self._svn_readlines(self.src_root, src_full_path)
727 src_lines = self._svn_readlines(self.src_root, src_full_path)
728
728
729 if change == 'delete':
729 if change == 'delete':
730 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
730 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
731 tgt_lines = []
731 tgt_lines = []
732 else:
732 else:
733 buf.write("+++ b/%s\t(revision %s)\n" % (
733 buf.write("+++ b/%s\t(revision %s)\n" % (
734 tgt_path, self.tgt_rev))
734 tgt_path, self.tgt_rev))
735 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
735 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
736
736
737 if not self.binary_content:
737 if not self.binary_content:
738 udiff = svn_diff.unified_diff(
738 udiff = svn_diff.unified_diff(
739 src_lines, tgt_lines, context=self.context,
739 src_lines, tgt_lines, context=self.context,
740 ignore_blank_lines=self.ignore_whitespace,
740 ignore_blank_lines=self.ignore_whitespace,
741 ignore_case=False,
741 ignore_case=False,
742 ignore_space_changes=self.ignore_whitespace)
742 ignore_space_changes=self.ignore_whitespace)
743 buf.writelines(udiff)
743 buf.writelines(udiff)
744
744
745 def _get_mime_type(self, path):
745 def _get_mime_type(self, path):
746 try:
746 try:
747 mime_type = svn.fs.node_prop(
747 mime_type = svn.fs.node_prop(
748 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
748 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
749 except svn.core.SubversionException:
749 except svn.core.SubversionException:
750 mime_type = svn.fs.node_prop(
750 mime_type = svn.fs.node_prop(
751 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
751 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
752 return mime_type
752 return mime_type
753
753
754 def _svn_readlines(self, fs_root, node_path):
754 def _svn_readlines(self, fs_root, node_path):
755 if self.binary_content:
755 if self.binary_content:
756 return []
756 return []
757 node_kind = svn.fs.check_path(fs_root, node_path)
757 node_kind = svn.fs.check_path(fs_root, node_path)
758 if node_kind not in (
758 if node_kind not in (
759 svn.core.svn_node_file, svn.core.svn_node_symlink):
759 svn.core.svn_node_file, svn.core.svn_node_symlink):
760 return []
760 return []
761 content = svn.core.Stream(
761 content = svn.core.Stream(
762 svn.fs.file_contents(fs_root, node_path)).read()
762 svn.fs.file_contents(fs_root, node_path)).read()
763 return content.splitlines(True)
763 return content.splitlines(True)
764
764
765
765
766 class DiffChangeEditor(svn.delta.Editor):
766 class DiffChangeEditor(svn.delta.Editor):
767 """
767 """
768 Records changes between two given revisions
768 Records changes between two given revisions
769 """
769 """
770
770
771 def __init__(self):
771 def __init__(self):
772 self.changes = []
772 self.changes = []
773
773
774 def delete_entry(self, path, revision, parent_baton, pool=None):
774 def delete_entry(self, path, revision, parent_baton, pool=None):
775 self.changes.append((path, None, 'delete'))
775 self.changes.append((path, None, 'delete'))
776
776
777 def add_file(
777 def add_file(
778 self, path, parent_baton, copyfrom_path, copyfrom_revision,
778 self, path, parent_baton, copyfrom_path, copyfrom_revision,
779 file_pool=None):
779 file_pool=None):
780 self.changes.append((path, 'file', 'add'))
780 self.changes.append((path, 'file', 'add'))
781
781
782 def open_file(self, path, parent_baton, base_revision, file_pool=None):
782 def open_file(self, path, parent_baton, base_revision, file_pool=None):
783 self.changes.append((path, 'file', 'change'))
783 self.changes.append((path, 'file', 'change'))
784
784
785
785
786 def authorization_callback_allow_all(root, path, pool):
786 def authorization_callback_allow_all(root, path, pool):
787 return True
787 return True
788
788
789
789
790 class TxnNodeProcessor(object):
790 class TxnNodeProcessor(object):
791 """
791 """
792 Utility to process the change of one node within a transaction root.
792 Utility to process the change of one node within a transaction root.
793
793
794 It encapsulates the knowledge of how to add, update or remove
794 It encapsulates the knowledge of how to add, update or remove
795 a node for a given transaction root. The purpose is to support the method
795 a node for a given transaction root. The purpose is to support the method
796 `SvnRemote.commit`.
796 `SvnRemote.commit`.
797 """
797 """
798
798
799 def __init__(self, node, txn_root):
799 def __init__(self, node, txn_root):
800 assert isinstance(node['path'], str)
800 assert isinstance(node['path'], str)
801
801
802 self.node = node
802 self.node = node
803 self.txn_root = txn_root
803 self.txn_root = txn_root
804
804
805 def update(self):
805 def update(self):
806 self._ensure_parent_dirs()
806 self._ensure_parent_dirs()
807 self._add_file_if_node_does_not_exist()
807 self._add_file_if_node_does_not_exist()
808 self._update_file_content()
808 self._update_file_content()
809 self._update_file_properties()
809 self._update_file_properties()
810
810
811 def remove(self):
811 def remove(self):
812 svn.fs.delete(self.txn_root, self.node['path'])
812 svn.fs.delete(self.txn_root, self.node['path'])
813 # TODO: Clean up directory if empty
813 # TODO: Clean up directory if empty
814
814
815 def _ensure_parent_dirs(self):
815 def _ensure_parent_dirs(self):
816 curdir = vcspath.dirname(self.node['path'])
816 curdir = vcspath.dirname(self.node['path'])
817 dirs_to_create = []
817 dirs_to_create = []
818 while not self._svn_path_exists(curdir):
818 while not self._svn_path_exists(curdir):
819 dirs_to_create.append(curdir)
819 dirs_to_create.append(curdir)
820 curdir = vcspath.dirname(curdir)
820 curdir = vcspath.dirname(curdir)
821
821
822 for curdir in reversed(dirs_to_create):
822 for curdir in reversed(dirs_to_create):
823 log.debug('Creating missing directory "%s"', curdir)
823 log.debug('Creating missing directory "%s"', curdir)
824 svn.fs.make_dir(self.txn_root, curdir)
824 svn.fs.make_dir(self.txn_root, curdir)
825
825
826 def _svn_path_exists(self, path):
826 def _svn_path_exists(self, path):
827 path_status = svn.fs.check_path(self.txn_root, path)
827 path_status = svn.fs.check_path(self.txn_root, path)
828 return path_status != svn.core.svn_node_none
828 return path_status != svn.core.svn_node_none
829
829
830 def _add_file_if_node_does_not_exist(self):
830 def _add_file_if_node_does_not_exist(self):
831 kind = svn.fs.check_path(self.txn_root, self.node['path'])
831 kind = svn.fs.check_path(self.txn_root, self.node['path'])
832 if kind == svn.core.svn_node_none:
832 if kind == svn.core.svn_node_none:
833 svn.fs.make_file(self.txn_root, self.node['path'])
833 svn.fs.make_file(self.txn_root, self.node['path'])
834
834
835 def _update_file_content(self):
835 def _update_file_content(self):
836 assert isinstance(self.node['content'], str)
836 assert isinstance(self.node['content'], str)
837 handler, baton = svn.fs.apply_textdelta(
837 handler, baton = svn.fs.apply_textdelta(
838 self.txn_root, self.node['path'], None, None)
838 self.txn_root, self.node['path'], None, None)
839 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
839 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
840
840
841 def _update_file_properties(self):
841 def _update_file_properties(self):
842 properties = self.node.get('properties', {})
842 properties = self.node.get('properties', {})
843 for key, value in properties.items():
843 for key, value in properties.items():
844 svn.fs.change_node_prop(
844 svn.fs.change_node_prop(
845 self.txn_root, self.node['path'], key, value)
845 self.txn_root, self.node['path'], key, value)
846
846
847
847
848 def apr_time_t(timestamp):
848 def apr_time_t(timestamp):
849 """
849 """
850 Convert a Python timestamp into APR timestamp type apr_time_t
850 Convert a Python timestamp into APR timestamp type apr_time_t
851 """
851 """
852 return timestamp * 1E6
852 return timestamp * 1E6
853
853
854
854
855 def svn_opt_revision_value_t(num):
855 def svn_opt_revision_value_t(num):
856 """
856 """
857 Put `num` into a `svn_opt_revision_value_t` structure.
857 Put `num` into a `svn_opt_revision_value_t` structure.
858 """
858 """
859 value = svn.core.svn_opt_revision_value_t()
859 value = svn.core.svn_opt_revision_value_t()
860 value.number = num
860 value.number = num
861 revision = svn.core.svn_opt_revision_t()
861 revision = svn.core.svn_opt_revision_t()
862 revision.kind = svn.core.svn_opt_revision_number
862 revision.kind = svn.core.svn_opt_revision_number
863 revision.value = value
863 revision.value = value
864 return revision
864 return revision
@@ -1,86 +1,86 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import tempfile
20 import tempfile
21
21
22 import configobj
22 import configobj
23
23
24
24
25 class ContextINI(object):
25 class ContextINI(object):
26 """
26 """
27 Allows to create a new test.ini file as a copy of existing one with edited
27 Allows to create a new test.ini file as a copy of existing one with edited
28 data. If existing file is not present, it creates a new one. Example usage::
28 data. If existing file is not present, it creates a new one. Example usage::
29
29
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 print 'vcsserver --config=%s' % new_test_ini
31 print 'vcsserver --config=%s' % new_test_ini
32 """
32 """
33
33
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 destroy=True):
35 destroy=True):
36 self.ini_file_path = ini_file_path
36 self.ini_file_path = ini_file_path
37 self.ini_params = ini_params
37 self.ini_params = ini_params
38 self.new_path = None
38 self.new_path = None
39 self.new_path_prefix = new_file_prefix or 'test'
39 self.new_path_prefix = new_file_prefix or 'test'
40 self.destroy = destroy
40 self.destroy = destroy
41
41
42 def __enter__(self):
42 def __enter__(self):
43 _, pref = tempfile.mkstemp()
43 _, pref = tempfile.mkstemp()
44 loc = tempfile.gettempdir()
44 loc = tempfile.gettempdir()
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 pref, self.new_path_prefix, self.ini_file_path))
46 pref, self.new_path_prefix, self.ini_file_path))
47
47
48 # copy ini file and modify according to the params, if we re-use a file
48 # copy ini file and modify according to the params, if we re-use a file
49 if os.path.isfile(self.ini_file_path):
49 if os.path.isfile(self.ini_file_path):
50 shutil.copy(self.ini_file_path, self.new_path)
50 shutil.copy(self.ini_file_path, self.new_path)
51 else:
51 else:
52 # create new dump file for configObj to write to.
52 # create new dump file for configObj to write to.
53 with open(self.new_path, 'wb'):
53 with open(self.new_path, 'wb'):
54 pass
54 pass
55
55
56 config = configobj.ConfigObj(
56 config = configobj.ConfigObj(
57 self.new_path, file_error=True, write_empty_values=True)
57 self.new_path, file_error=True, write_empty_values=True)
58
58
59 for data in self.ini_params:
59 for data in self.ini_params:
60 section, ini_params = data.items()[0]
60 section, ini_params = list(data.items())[0]
61 key, val = ini_params.items()[0]
61 key, val = list(ini_params.items())[0]
62 if section not in config:
62 if section not in config:
63 config[section] = {}
63 config[section] = {}
64 config[section][key] = val
64 config[section][key] = val
65
65
66 config.write()
66 config.write()
67 return self.new_path
67 return self.new_path
68
68
69 def __exit__(self, exc_type, exc_val, exc_tb):
69 def __exit__(self, exc_type, exc_val, exc_tb):
70 if self.destroy:
70 if self.destroy:
71 os.remove(self.new_path)
71 os.remove(self.new_path)
72
72
73
73
74 def no_newline_id_generator(test_name):
74 def no_newline_id_generator(test_name):
75 """
75 """
76 Generates a test name without spaces or newlines characters. Used for
76 Generates a test name without spaces or newlines characters. Used for
77 nicer output of progress of test
77 nicer output of progress of test
78 """
78 """
79 org_name = test_name
79 org_name = test_name
80 test_name = str(test_name)\
80 test_name = str(test_name)\
81 .replace('\n', '_N') \
81 .replace('\n', '_N') \
82 .replace('\r', '_N') \
82 .replace('\r', '_N') \
83 .replace('\t', '_T') \
83 .replace('\t', '_T') \
84 .replace(' ', '_S')
84 .replace(' ', '_S')
85
85
86 return test_name or 'test-with-empty-name'
86 return test_name or 'test-with-empty-name'
@@ -1,162 +1,162 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver.remote import git
24 from vcsserver.remote import git
25
25
26 SAMPLE_REFS = {
26 SAMPLE_REFS = {
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 }
32 }
33
33
34
34
35 @pytest.fixture
35 @pytest.fixture
36 def git_remote():
36 def git_remote():
37 """
37 """
38 A GitRemote instance with a mock factory.
38 A GitRemote instance with a mock factory.
39 """
39 """
40 factory = Mock()
40 factory = Mock()
41 remote = git.GitRemote(factory)
41 remote = git.GitRemote(factory)
42 return remote
42 return remote
43
43
44
44
45 def test_discover_git_version(git_remote):
45 def test_discover_git_version(git_remote):
46 version = git_remote.discover_git_version()
46 version = git_remote.discover_git_version()
47 assert version
47 assert version
48
48
49
49
50 class TestGitFetch(object):
50 class TestGitFetch(object):
51 def setup_method(self):
51 def setup_method(self):
52 self.mock_repo = Mock()
52 self.mock_repo = Mock()
53 factory = Mock()
53 factory = Mock()
54 factory.repo = Mock(return_value=self.mock_repo)
54 factory.repo = Mock(return_value=self.mock_repo)
55 self.remote_git = git.GitRemote(factory)
55 self.remote_git = git.GitRemote(factory)
56
56
57 def test_fetches_all_when_no_commit_ids_specified(self):
57 def test_fetches_all_when_no_commit_ids_specified(self):
58 def side_effect(determine_wants, *args, **kwargs):
58 def side_effect(determine_wants, *args, **kwargs):
59 determine_wants(SAMPLE_REFS)
59 determine_wants(SAMPLE_REFS)
60
60
61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 mock_fetch.side_effect = side_effect
62 mock_fetch.side_effect = side_effect
63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 determine_wants = self.mock_repo.object_store.determine_wants_all
64 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants.assert_called_once_with(SAMPLE_REFS)
65 determine_wants.assert_called_once_with(SAMPLE_REFS)
66
66
67 def test_fetches_specified_commits(self):
67 def test_fetches_specified_commits(self):
68 selected_refs = {
68 selected_refs = {
69 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
69 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
70 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 }
71 }
72
72
73 def side_effect(determine_wants, *args, **kwargs):
73 def side_effect(determine_wants, *args, **kwargs):
74 result = determine_wants(SAMPLE_REFS)
74 result = determine_wants(SAMPLE_REFS)
75 assert sorted(result) == sorted(selected_refs.values())
75 assert sorted(result) == sorted(selected_refs.values())
76 return result
76 return result
77
77
78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 mock_fetch.side_effect = side_effect
79 mock_fetch.side_effect = side_effect
80 self.remote_git.pull(
80 self.remote_git.pull(
81 wire={}, url='/tmp/', apply_refs=False,
81 wire={}, url='/tmp/', apply_refs=False,
82 refs=selected_refs.keys())
82 refs=list(selected_refs.keys()))
83 determine_wants = self.mock_repo.object_store.determine_wants_all
83 determine_wants = self.mock_repo.object_store.determine_wants_all
84 assert determine_wants.call_count == 0
84 assert determine_wants.call_count == 0
85
85
86 def test_get_remote_refs(self):
86 def test_get_remote_refs(self):
87 factory = Mock()
87 factory = Mock()
88 remote_git = git.GitRemote(factory)
88 remote_git = git.GitRemote(factory)
89 url = 'http://example.com/test/test.git'
89 url = 'http://example.com/test/test.git'
90 sample_refs = {
90 sample_refs = {
91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 }
93 }
94
94
95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
96 mock_repo().get_refs.return_value = sample_refs
96 mock_repo().get_refs.return_value = sample_refs
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 mock_repo().get_refs.assert_called_once_with()
98 mock_repo().get_refs.assert_called_once_with()
99 assert remote_refs == sample_refs
99 assert remote_refs == sample_refs
100
100
101
101
102 class TestReraiseSafeExceptions(object):
102 class TestReraiseSafeExceptions(object):
103
103
104 def test_method_decorated_with_reraise_safe_exceptions(self):
104 def test_method_decorated_with_reraise_safe_exceptions(self):
105 factory = Mock()
105 factory = Mock()
106 git_remote = git.GitRemote(factory)
106 git_remote = git.GitRemote(factory)
107
107
108 def fake_function():
108 def fake_function():
109 return None
109 return None
110
110
111 decorator = git.reraise_safe_exceptions(fake_function)
111 decorator = git.reraise_safe_exceptions(fake_function)
112
112
113 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
113 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
114 for method_name, method in methods:
114 for method_name, method in methods:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
116 assert method.__func__.__code__ == decorator.__code__
116 assert method.__func__.__code__ == decorator.__code__
117
117
118 @pytest.mark.parametrize('side_effect, expected_type', [
118 @pytest.mark.parametrize('side_effect, expected_type', [
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.HangupException(), 'error'),
123 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 ])
125 ])
126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 @git.reraise_safe_exceptions
127 @git.reraise_safe_exceptions
128 def fake_method():
128 def fake_method():
129 raise side_effect
129 raise side_effect
130
130
131 with pytest.raises(Exception) as exc_info:
131 with pytest.raises(Exception) as exc_info:
132 fake_method()
132 fake_method()
133 assert type(exc_info.value) == Exception
133 assert type(exc_info.value) == Exception
134 assert exc_info.value._vcs_kind == expected_type
134 assert exc_info.value._vcs_kind == expected_type
135
135
136
136
137 class TestDulwichRepoWrapper(object):
137 class TestDulwichRepoWrapper(object):
138 def test_calls_close_on_delete(self):
138 def test_calls_close_on_delete(self):
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 with patch.object(git.Repo, 'close') as close_mock:
140 with patch.object(git.Repo, 'close') as close_mock:
141 with isdir_patcher:
141 with isdir_patcher:
142 repo = git.Repo('/tmp/abcde')
142 repo = git.Repo('/tmp/abcde')
143 assert repo is not None
143 assert repo is not None
144 repo.__del__()
144 repo.__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
145 # can't use del repo as in python3 this isn't always calling .__del__()
146
146
147 close_mock.assert_called_once_with()
147 close_mock.assert_called_once_with()
148
148
149
149
150 class TestGitFactory(object):
150 class TestGitFactory(object):
151 def test_create_repo_returns_dulwich_wrapper(self):
151 def test_create_repo_returns_dulwich_wrapper(self):
152
152
153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
154 mock.side_effect = {'repo_objects': ''}
154 mock.side_effect = {'repo_objects': ''}
155 factory = git.GitFactory()
155 factory = git.GitFactory()
156 wire = {
156 wire = {
157 'path': '/tmp/abcde'
157 'path': '/tmp/abcde'
158 }
158 }
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 with isdir_patcher:
160 with isdir_patcher:
161 result = factory._create_repo(wire, True)
161 result = factory._create_repo(wire, True)
162 assert isinstance(result, git.Repo)
162 assert isinstance(result, git.Repo)
@@ -1,53 +1,53 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import pytest
18 import pytest
19 from vcsserver.utils import ascii_bytes, ascii_str
19 from vcsserver.utils import ascii_bytes, ascii_str
20
20
21
21
22 @pytest.mark.parametrize('given, expected', [
22 @pytest.mark.parametrize('given, expected', [
23 ('a', b'a'),
23 ('a', b'a'),
24 (u'a', b'a'),
24 ('a', b'a'),
25 ])
25 ])
26 def test_ascii_bytes(given, expected):
26 def test_ascii_bytes(given, expected):
27 assert ascii_bytes(given) == expected
27 assert ascii_bytes(given) == expected
28
28
29
29
30 @pytest.mark.parametrize('given', [
30 @pytest.mark.parametrize('given', [
31 'å',
31 'å',
32 'å'.encode('utf8')
32 'å'.encode('utf8')
33 ])
33 ])
34 def test_ascii_bytes_raises(given):
34 def test_ascii_bytes_raises(given):
35 with pytest.raises(ValueError):
35 with pytest.raises(ValueError):
36 ascii_bytes(given)
36 ascii_bytes(given)
37
37
38
38
39 @pytest.mark.parametrize('given, expected', [
39 @pytest.mark.parametrize('given, expected', [
40 (b'a', 'a'),
40 (b'a', 'a'),
41 ])
41 ])
42 def test_ascii_str(given, expected):
42 def test_ascii_str(given, expected):
43 assert ascii_str(given) == expected
43 assert ascii_str(given) == expected
44
44
45
45
46 @pytest.mark.parametrize('given', [
46 @pytest.mark.parametrize('given', [
47 u'a',
47 'a',
48 'å'.encode('utf8'),
48 'å'.encode('utf8'),
49 u'å'
49 'å'
50 ])
50 ])
51 def test_ascii_str_raises(given):
51 def test_ascii_str_raises(given):
52 with pytest.raises(ValueError):
52 with pytest.raises(ValueError):
53 ascii_str(given)
53 ascii_str(given)
General Comments 0
You need to be logged in to leave comments. Login now