##// END OF EJS Templates
python3: another 2to3 pass
super-admin -
r1054:545da466 python3
parent child Browse files
Show More
@@ -1,243 +1,243 b''
1 1 '''
2 2 This library is provided to allow standard python logging
3 3 to output log data as JSON formatted strings
4 4 '''
5 5 import logging
6 6 import json
7 7 import re
8 8 from datetime import date, datetime, time, tzinfo, timedelta
9 9 import traceback
10 10 import importlib
11 11
12 12 from inspect import istraceback
13 13
14 14 from collections import OrderedDict
15 15
16 16
17 17 def _inject_req_id(record, *args, **kwargs):
18 18 return record
19 19
20 20
21 21 ExceptionAwareFormatter = logging.Formatter
22 22
23 23
24 24 ZERO = timedelta(0)
25 25 HOUR = timedelta(hours=1)
26 26
27 27
28 28 class UTC(tzinfo):
29 29 """UTC"""
30 30
31 31 def utcoffset(self, dt):
32 32 return ZERO
33 33
34 34 def tzname(self, dt):
35 35 return "UTC"
36 36
37 37 def dst(self, dt):
38 38 return ZERO
39 39
40 40 utc = UTC()
41 41
42 42
43 43 # skip natural LogRecord attributes
44 44 # http://docs.python.org/library/logging.html#logrecord-attributes
45 45 RESERVED_ATTRS = (
46 46 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
47 47 'funcName', 'levelname', 'levelno', 'lineno', 'module',
48 48 'msecs', 'message', 'msg', 'name', 'pathname', 'process',
49 49 'processName', 'relativeCreated', 'stack_info', 'thread', 'threadName')
50 50
51 51
52 52 def merge_record_extra(record, target, reserved):
53 53 """
54 54 Merges extra attributes from LogRecord object into target dictionary
55 55
56 56 :param record: logging.LogRecord
57 57 :param target: dict to update
58 58 :param reserved: dict or list with reserved keys to skip
59 59 """
60 60 for key, value in record.__dict__.items():
61 61 # this allows to have numeric keys
62 62 if (key not in reserved
63 63 and not (hasattr(key, "startswith")
64 64 and key.startswith('_'))):
65 65 target[key] = value
66 66 return target
67 67
68 68
69 69 class JsonEncoder(json.JSONEncoder):
70 70 """
71 71 A custom encoder extending the default JSONEncoder
72 72 """
73 73
74 74 def default(self, obj):
75 75 if isinstance(obj, (date, datetime, time)):
76 76 return self.format_datetime_obj(obj)
77 77
78 78 elif istraceback(obj):
79 79 return ''.join(traceback.format_tb(obj)).strip()
80 80
81 81 elif type(obj) == Exception \
82 82 or isinstance(obj, Exception) \
83 83 or type(obj) == type:
84 84 return str(obj)
85 85
86 86 try:
87 87 return super(JsonEncoder, self).default(obj)
88 88
89 89 except TypeError:
90 90 try:
91 91 return str(obj)
92 92
93 93 except Exception:
94 94 return None
95 95
96 96 def format_datetime_obj(self, obj):
97 97 return obj.isoformat()
98 98
99 99
100 100 class JsonFormatter(ExceptionAwareFormatter):
101 101 """
102 102 A custom formatter to format logging records as json strings.
103 103 Extra values will be formatted as str() if not supported by
104 104 json default encoder
105 105 """
106 106
107 107 def __init__(self, *args, **kwargs):
108 108 """
109 109 :param json_default: a function for encoding non-standard objects
110 110 as outlined in http://docs.python.org/2/library/json.html
111 111 :param json_encoder: optional custom encoder
112 112 :param json_serializer: a :meth:`json.dumps`-compatible callable
113 113 that will be used to serialize the log record.
114 114 :param json_indent: an optional :meth:`json.dumps`-compatible numeric value
115 115 that will be used to customize the indent of the output json.
116 116 :param prefix: an optional string prefix added at the beginning of
117 117 the formatted string
118 118 :param json_indent: indent parameter for json.dumps
119 119 :param json_ensure_ascii: ensure_ascii parameter for json.dumps
120 120 :param reserved_attrs: an optional list of fields that will be skipped when
121 121 outputting json log record. Defaults to all log record attributes:
122 122 http://docs.python.org/library/logging.html#logrecord-attributes
123 123 :param timestamp: an optional string/boolean field to add a timestamp when
124 124 outputting the json log record. If string is passed, timestamp will be added
125 125 to log record using string as key. If True boolean is passed, timestamp key
126 126 will be "timestamp". Defaults to False/off.
127 127 """
128 128 self.json_default = self._str_to_fn(kwargs.pop("json_default", None))
129 129 self.json_encoder = self._str_to_fn(kwargs.pop("json_encoder", None))
130 130 self.json_serializer = self._str_to_fn(kwargs.pop("json_serializer", json.dumps))
131 131 self.json_indent = kwargs.pop("json_indent", None)
132 132 self.json_ensure_ascii = kwargs.pop("json_ensure_ascii", True)
133 133 self.prefix = kwargs.pop("prefix", "")
134 134 reserved_attrs = kwargs.pop("reserved_attrs", RESERVED_ATTRS)
135 self.reserved_attrs = dict(zip(reserved_attrs, reserved_attrs))
135 self.reserved_attrs = dict(list(zip(reserved_attrs, reserved_attrs)))
136 136 self.timestamp = kwargs.pop("timestamp", True)
137 137
138 138 # super(JsonFormatter, self).__init__(*args, **kwargs)
139 139 logging.Formatter.__init__(self, *args, **kwargs)
140 140 if not self.json_encoder and not self.json_default:
141 141 self.json_encoder = JsonEncoder
142 142
143 143 self._required_fields = self.parse()
144 self._skip_fields = dict(zip(self._required_fields,
145 self._required_fields))
144 self._skip_fields = dict(list(zip(self._required_fields,
145 self._required_fields)))
146 146 self._skip_fields.update(self.reserved_attrs)
147 147
148 148 def _str_to_fn(self, fn_as_str):
149 149 """
150 150 If the argument is not a string, return whatever was passed in.
151 151 Parses a string such as package.module.function, imports the module
152 152 and returns the function.
153 153
154 154 :param fn_as_str: The string to parse. If not a string, return it.
155 155 """
156 156 if not isinstance(fn_as_str, str):
157 157 return fn_as_str
158 158
159 159 path, _, function = fn_as_str.rpartition('.')
160 160 module = importlib.import_module(path)
161 161 return getattr(module, function)
162 162
163 163 def parse(self):
164 164 """
165 165 Parses format string looking for substitutions
166 166
167 167 This method is responsible for returning a list of fields (as strings)
168 168 to include in all log messages.
169 169 """
170 170 standard_formatters = re.compile(r'\((.+?)\)', re.IGNORECASE)
171 171 return standard_formatters.findall(self._fmt)
172 172
173 173 def add_fields(self, log_record, record, message_dict):
174 174 """
175 175 Override this method to implement custom logic for adding fields.
176 176 """
177 177 for field in self._required_fields:
178 178 log_record[field] = record.__dict__.get(field)
179 179 log_record.update(message_dict)
180 180 merge_record_extra(record, log_record, reserved=self._skip_fields)
181 181
182 182 if self.timestamp:
183 183 key = self.timestamp if type(self.timestamp) == str else 'timestamp'
184 184 log_record[key] = datetime.fromtimestamp(record.created, tz=utc)
185 185
186 186 def process_log_record(self, log_record):
187 187 """
188 188 Override this method to implement custom logic
189 189 on the possibly ordered dictionary.
190 190 """
191 191 return log_record
192 192
193 193 def jsonify_log_record(self, log_record):
194 194 """Returns a json string of the log record."""
195 195 return self.json_serializer(log_record,
196 196 default=self.json_default,
197 197 cls=self.json_encoder,
198 198 indent=self.json_indent,
199 199 ensure_ascii=self.json_ensure_ascii)
200 200
201 201 def serialize_log_record(self, log_record):
202 202 """Returns the final representation of the log record."""
203 203 return "%s%s" % (self.prefix, self.jsonify_log_record(log_record))
204 204
205 205 def format(self, record):
206 206 """Formats a log record and serializes to json"""
207 207 message_dict = {}
208 208 # FIXME: logging.LogRecord.msg and logging.LogRecord.message in typeshed
209 209 # are always type of str. We shouldn't need to override that.
210 210 if isinstance(record.msg, dict):
211 211 message_dict = record.msg
212 212 record.message = None
213 213 else:
214 214 record.message = record.getMessage()
215 215 # only format time if needed
216 216 if "asctime" in self._required_fields:
217 217 record.asctime = self.formatTime(record, self.datefmt)
218 218
219 219 # Display formatted exception, but allow overriding it in the
220 220 # user-supplied dict.
221 221 if record.exc_info and not message_dict.get('exc_info'):
222 222 message_dict['exc_info'] = self.formatException(record.exc_info)
223 223 if not message_dict.get('exc_info') and record.exc_text:
224 224 message_dict['exc_info'] = record.exc_text
225 225 # Display formatted record of stack frames
226 226 # default format is a string returned from :func:`traceback.print_stack`
227 227 try:
228 228 if record.stack_info and not message_dict.get('stack_info'):
229 229 message_dict['stack_info'] = self.formatStack(record.stack_info)
230 230 except AttributeError:
231 231 # Python2.7 doesn't have stack_info.
232 232 pass
233 233
234 234 try:
235 235 log_record = OrderedDict()
236 236 except NameError:
237 237 log_record = {}
238 238
239 239 _inject_req_id(record, with_prefix=False)
240 240 self.add_fields(log_record, record, message_dict)
241 241 log_record = self.process_log_record(log_record)
242 242
243 243 return self.serialize_log_record(log_record)
@@ -1,65 +1,65 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20
21 21 import logging
22 22
23 23 from repoze.lru import LRUCache
24 24
25 25 from vcsserver.utils import safe_str
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 30 class LRUDict(LRUCache):
31 31 """
32 32 Wrapper to provide partial dict access
33 33 """
34 34
35 35 def __setitem__(self, key, value):
36 36 return self.put(key, value)
37 37
38 38 def __getitem__(self, key):
39 39 return self.get(key)
40 40
41 41 def __contains__(self, key):
42 42 return bool(self.get(key))
43 43
44 44 def __delitem__(self, key):
45 45 del self.data[key]
46 46
47 47 def keys(self):
48 return self.data.keys()
48 return list(self.data.keys())
49 49
50 50
51 51 class LRUDictDebug(LRUDict):
52 52 """
53 53 Wrapper to provide some debug options
54 54 """
55 55 def _report_keys(self):
56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
56 elems_cnt = '%s/%s' % (len(list(self.keys())), self.size)
57 57 # trick for pformat print it more nicely
58 58 fmt = '\n'
59 59 for cnt, elem in enumerate(self.keys()):
60 60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
62 62
63 63 def __getitem__(self, key):
64 64 self._report_keys()
65 65 return self.get(key)
@@ -1,330 +1,330 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import time
19 19 import errno
20 20 import logging
21 21
22 22 import msgpack
23 23 import redis
24 24 import pickle
25 25
26 26 from dogpile.cache.api import CachedValue
27 27 from dogpile.cache.backends import memory as memory_backend
28 28 from dogpile.cache.backends import file as file_backend
29 29 from dogpile.cache.backends import redis as redis_backend
30 30 from dogpile.cache.backends.file import NO_VALUE, FileLock
31 31 from dogpile.cache.util import memoized_property
32 32
33 33 from pyramid.settings import asbool
34 34
35 35 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
36 36 from vcsserver.utils import safe_str
37 37
38 38
39 39 _default_max_size = 1024
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 class LRUMemoryBackend(memory_backend.MemoryBackend):
45 45 key_prefix = 'lru_mem_backend'
46 46 pickle_values = False
47 47
48 48 def __init__(self, arguments):
49 49 max_size = arguments.pop('max_size', _default_max_size)
50 50
51 51 LRUDictClass = LRUDict
52 52 if arguments.pop('log_key_count', None):
53 53 LRUDictClass = LRUDictDebug
54 54
55 55 arguments['cache_dict'] = LRUDictClass(max_size)
56 56 super(LRUMemoryBackend, self).__init__(arguments)
57 57
58 58 def delete(self, key):
59 59 try:
60 60 del self._cache[key]
61 61 except KeyError:
62 62 # we don't care if key isn't there at deletion
63 63 pass
64 64
65 65 def delete_multi(self, keys):
66 66 for key in keys:
67 67 self.delete(key)
68 68
69 69
70 70 class PickleSerializer(object):
71 71
72 72 def _dumps(self, value, safe=False):
73 73 try:
74 74 return pickle.dumps(value)
75 75 except Exception:
76 76 if safe:
77 77 return NO_VALUE
78 78 else:
79 79 raise
80 80
81 81 def _loads(self, value, safe=True):
82 82 try:
83 83 return pickle.loads(value)
84 84 except Exception:
85 85 if safe:
86 86 return NO_VALUE
87 87 else:
88 88 raise
89 89
90 90
91 91 class MsgPackSerializer(object):
92 92
93 93 def _dumps(self, value, safe=False):
94 94 try:
95 95 return msgpack.packb(value)
96 96 except Exception:
97 97 if safe:
98 98 return NO_VALUE
99 99 else:
100 100 raise
101 101
102 102 def _loads(self, value, safe=True):
103 103 """
104 104 pickle maintained the `CachedValue` wrapper of the tuple
105 105 msgpack does not, so it must be added back in.
106 106 """
107 107 try:
108 108 value = msgpack.unpackb(value, use_list=False)
109 109 return CachedValue(*value)
110 110 except Exception:
111 111 if safe:
112 112 return NO_VALUE
113 113 else:
114 114 raise
115 115
116 116
117 117 import fcntl
118 118 flock_org = fcntl.flock
119 119
120 120
121 121 class CustomLockFactory(FileLock):
122 122
123 123 pass
124 124
125 125
126 126 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
127 127 key_prefix = 'file_backend'
128 128
129 129 def __init__(self, arguments):
130 130 arguments['lock_factory'] = CustomLockFactory
131 131 db_file = arguments.get('filename')
132 132
133 133 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
134 134 try:
135 135 super(FileNamespaceBackend, self).__init__(arguments)
136 136 except Exception:
137 137 log.exception('Failed to initialize db at: %s', db_file)
138 138 raise
139 139
140 140 def __repr__(self):
141 141 return '{} `{}`'.format(self.__class__, self.filename)
142 142
143 143 def list_keys(self, prefix=''):
144 144 prefix = '{}:{}'.format(self.key_prefix, prefix)
145 145
146 146 def cond(v):
147 147 if not prefix:
148 148 return True
149 149
150 150 if v.startswith(prefix):
151 151 return True
152 152 return False
153 153
154 154 with self._dbm_file(True) as dbm:
155 155 try:
156 return filter(cond, dbm.keys())
156 return list(filter(cond, list(dbm.keys())))
157 157 except Exception:
158 158 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
159 159 raise
160 160
161 161 def get_store(self):
162 162 return self.filename
163 163
164 164 def _dbm_get(self, key):
165 165 with self._dbm_file(False) as dbm:
166 166 if hasattr(dbm, 'get'):
167 167 value = dbm.get(key, NO_VALUE)
168 168 else:
169 169 # gdbm objects lack a .get method
170 170 try:
171 171 value = dbm[key]
172 172 except KeyError:
173 173 value = NO_VALUE
174 174 if value is not NO_VALUE:
175 175 value = self._loads(value)
176 176 return value
177 177
178 178 def get(self, key):
179 179 try:
180 180 return self._dbm_get(key)
181 181 except Exception:
182 182 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
183 183 raise
184 184
185 185 def set(self, key, value):
186 186 with self._dbm_file(True) as dbm:
187 187 dbm[key] = self._dumps(value)
188 188
189 189 def set_multi(self, mapping):
190 190 with self._dbm_file(True) as dbm:
191 191 for key, value in mapping.items():
192 192 dbm[key] = self._dumps(value)
193 193
194 194
195 195 class BaseRedisBackend(redis_backend.RedisBackend):
196 196 key_prefix = ''
197 197
198 198 def __init__(self, arguments):
199 199 super(BaseRedisBackend, self).__init__(arguments)
200 200 self._lock_timeout = self.lock_timeout
201 201 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
202 202
203 203 if self._lock_auto_renewal and not self._lock_timeout:
204 204 # set default timeout for auto_renewal
205 205 self._lock_timeout = 30
206 206
207 207 def _create_client(self):
208 208 args = {}
209 209
210 210 if self.url is not None:
211 211 args.update(url=self.url)
212 212
213 213 else:
214 214 args.update(
215 215 host=self.host, password=self.password,
216 216 port=self.port, db=self.db
217 217 )
218 218
219 219 connection_pool = redis.ConnectionPool(**args)
220 220
221 221 return redis.StrictRedis(connection_pool=connection_pool)
222 222
223 223 def list_keys(self, prefix=''):
224 224 prefix = '{}:{}*'.format(self.key_prefix, prefix)
225 225 return self.client.keys(prefix)
226 226
227 227 def get_store(self):
228 228 return self.client.connection_pool
229 229
230 230 def get(self, key):
231 231 value = self.client.get(key)
232 232 if value is None:
233 233 return NO_VALUE
234 234 return self._loads(value)
235 235
236 236 def get_multi(self, keys):
237 237 if not keys:
238 238 return []
239 239 values = self.client.mget(keys)
240 240 loads = self._loads
241 241 return [
242 242 loads(v) if v is not None else NO_VALUE
243 243 for v in values]
244 244
245 245 def set(self, key, value):
246 246 if self.redis_expiration_time:
247 247 self.client.setex(key, self.redis_expiration_time,
248 248 self._dumps(value))
249 249 else:
250 250 self.client.set(key, self._dumps(value))
251 251
252 252 def set_multi(self, mapping):
253 253 dumps = self._dumps
254 254 mapping = dict(
255 255 (k, dumps(v))
256 256 for k, v in mapping.items()
257 257 )
258 258
259 259 if not self.redis_expiration_time:
260 260 self.client.mset(mapping)
261 261 else:
262 262 pipe = self.client.pipeline()
263 263 for key, value in mapping.items():
264 264 pipe.setex(key, self.redis_expiration_time, value)
265 265 pipe.execute()
266 266
267 267 def get_mutex(self, key):
268 268 if self.distributed_lock:
269 269 lock_key = '_lock_{0}'.format(safe_str(key))
270 270 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
271 271 auto_renewal=self._lock_auto_renewal)
272 272 else:
273 273 return None
274 274
275 275
276 276 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
277 277 key_prefix = 'redis_pickle_backend'
278 278 pass
279 279
280 280
281 281 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
282 282 key_prefix = 'redis_msgpack_backend'
283 283 pass
284 284
285 285
286 286 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
287 287 import redis_lock
288 288
289 289 class _RedisLockWrapper(object):
290 290 """LockWrapper for redis_lock"""
291 291
292 292 @classmethod
293 293 def get_lock(cls):
294 294 return redis_lock.Lock(
295 295 redis_client=client,
296 296 name=lock_key,
297 297 expire=lock_timeout,
298 298 auto_renewal=auto_renewal,
299 299 strict=True,
300 300 )
301 301
302 302 def __repr__(self):
303 303 return "{}:{}".format(self.__class__.__name__, lock_key)
304 304
305 305 def __str__(self):
306 306 return "{}:{}".format(self.__class__.__name__, lock_key)
307 307
308 308 def __init__(self):
309 309 self.lock = self.get_lock()
310 310 self.lock_key = lock_key
311 311
312 312 def acquire(self, wait=True):
313 313 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
314 314 try:
315 315 acquired = self.lock.acquire(wait)
316 316 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
317 317 return acquired
318 318 except redis_lock.AlreadyAcquired:
319 319 return False
320 320 except redis_lock.AlreadyStarted:
321 321 # refresh thread exists, but it also means we acquired the lock
322 322 return True
323 323
324 324 def release(self):
325 325 try:
326 326 self.lock.release()
327 327 except redis_lock.NotAcquired:
328 328 pass
329 329
330 330 return _RedisLockWrapper()
@@ -1,207 +1,207 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import time
20 20 import logging
21 21 import functools
22 22 import decorator
23 23
24 24 from dogpile.cache import CacheRegion
25 25
26 26 from vcsserver.utils import safe_bytes, sha1
27 27 from vcsserver.lib.rc_cache import region_meta
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31
32 32 class RhodeCodeCacheRegion(CacheRegion):
33 33
34 34 def conditional_cache_on_arguments(
35 35 self, namespace=None,
36 36 expiration_time=None,
37 37 should_cache_fn=None,
38 38 to_str=str,
39 39 function_key_generator=None,
40 40 condition=True):
41 41 """
42 42 Custom conditional decorator, that will not touch any dogpile internals if
43 43 condition isn't meet. This works a bit different than should_cache_fn
44 44 And it's faster in cases we don't ever want to compute cached values
45 45 """
46 46 expiration_time_is_callable = callable(expiration_time)
47 47
48 48 if function_key_generator is None:
49 49 function_key_generator = self.function_key_generator
50 50
51 51 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
52 52
53 53 if not condition:
54 54 log.debug('Calling un-cached method:%s', user_func.__name__)
55 55 start = time.time()
56 56 result = user_func(*arg, **kw)
57 57 total = time.time() - start
58 58 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
59 59 return result
60 60
61 61 key = key_generator(*arg, **kw)
62 62
63 63 timeout = expiration_time() if expiration_time_is_callable \
64 64 else expiration_time
65 65
66 66 log.debug('Calling cached method:`%s`', user_func.__name__)
67 67 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
68 68
69 69 def cache_decorator(user_func):
70 70 if to_str is str:
71 71 # backwards compatible
72 72 key_generator = function_key_generator(namespace, user_func)
73 73 else:
74 74 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
75 75
76 76 def refresh(*arg, **kw):
77 77 """
78 78 Like invalidate, but regenerates the value instead
79 79 """
80 80 key = key_generator(*arg, **kw)
81 81 value = user_func(*arg, **kw)
82 82 self.set(key, value)
83 83 return value
84 84
85 85 def invalidate(*arg, **kw):
86 86 key = key_generator(*arg, **kw)
87 87 self.delete(key)
88 88
89 89 def set_(value, *arg, **kw):
90 90 key = key_generator(*arg, **kw)
91 91 self.set(key, value)
92 92
93 93 def get(*arg, **kw):
94 94 key = key_generator(*arg, **kw)
95 95 return self.get(key)
96 96
97 97 user_func.set = set_
98 98 user_func.invalidate = invalidate
99 99 user_func.get = get
100 100 user_func.refresh = refresh
101 101 user_func.key_generator = key_generator
102 102 user_func.original = user_func
103 103
104 104 # Use `decorate` to preserve the signature of :param:`user_func`.
105 105 return decorator.decorate(user_func, functools.partial(
106 106 get_or_create_for_user_func, key_generator))
107 107
108 108 return cache_decorator
109 109
110 110
111 111 def make_region(*arg, **kw):
112 112 return RhodeCodeCacheRegion(*arg, **kw)
113 113
114 114
115 115 def get_default_cache_settings(settings, prefixes=None):
116 116 prefixes = prefixes or []
117 117 cache_settings = {}
118 118 for key in settings.keys():
119 119 for prefix in prefixes:
120 120 if key.startswith(prefix):
121 121 name = key.split(prefix)[1].strip()
122 122 val = settings[key]
123 123 if isinstance(val, str):
124 124 val = val.strip()
125 125 cache_settings[name] = val
126 126 return cache_settings
127 127
128 128
129 129 def compute_key_from_params(*args):
130 130 """
131 131 Helper to compute key from given params to be used in cache manager
132 132 """
133 133 return sha1(safe_bytes("_".join(map(str, args))))
134 134
135 135
136 136 def backend_key_generator(backend):
137 137 """
138 138 Special wrapper that also sends over the backend to the key generator
139 139 """
140 140 def wrapper(namespace, fn):
141 141 return key_generator(backend, namespace, fn)
142 142 return wrapper
143 143
144 144
145 145 def key_generator(backend, namespace, fn):
146 146 fname = fn.__name__
147 147
148 148 def generate_key(*args):
149 149 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
150 150 namespace_pref = namespace or 'default_namespace'
151 151 arg_key = compute_key_from_params(*args)
152 152 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
153 153
154 154 return final_key
155 155
156 156 return generate_key
157 157
158 158
159 159 def get_or_create_region(region_name, region_namespace=None):
160 160 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
161 161 region_obj = region_meta.dogpile_cache_regions.get(region_name)
162 162 if not region_obj:
163 163 raise EnvironmentError(
164 164 'Region `{}` not in configured: {}.'.format(
165 region_name, region_meta.dogpile_cache_regions.keys()))
165 region_name, list(region_meta.dogpile_cache_regions.keys())))
166 166
167 167 region_uid_name = '{}:{}'.format(region_name, region_namespace)
168 168 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
169 169 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
170 170 if region_exist:
171 171 log.debug('Using already configured region: %s', region_namespace)
172 172 return region_exist
173 173 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
174 174 expiration_time = region_obj.expiration_time
175 175
176 176 if not os.path.isdir(cache_dir):
177 177 os.makedirs(cache_dir)
178 178 new_region = make_region(
179 179 name=region_uid_name,
180 180 function_key_generator=backend_key_generator(region_obj.actual_backend)
181 181 )
182 182 namespace_filename = os.path.join(
183 183 cache_dir, "{}.cache.dbm".format(region_namespace))
184 184 # special type that allows 1db per namespace
185 185 new_region.configure(
186 186 backend='dogpile.cache.rc.file_namespace',
187 187 expiration_time=expiration_time,
188 188 arguments={"filename": namespace_filename}
189 189 )
190 190
191 191 # create and save in region caches
192 192 log.debug('configuring new region: %s', region_uid_name)
193 193 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
194 194
195 195 return region_obj
196 196
197 197
198 198 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
199 199 region = get_or_create_region(cache_region, cache_namespace_uid)
200 200 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
201 201 num_delete_keys = len(cache_keys)
202 202 if invalidate:
203 203 region.invalidate(hard=False)
204 204 else:
205 205 if num_delete_keys:
206 206 region.delete_multi(cache_keys)
207 207 return num_delete_keys
@@ -1,864 +1,864 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 19 import os
20 20 import subprocess
21 21 from urllib.error import URLError
22 22 import urllib.parse
23 23 import logging
24 24 import posixpath as vcspath
25 25 import io
26 26 import urllib.request
27 27 import urllib.parse
28 28 import urllib.error
29 29 import traceback
30 30
31 31 import svn.client
32 32 import svn.core
33 33 import svn.delta
34 34 import svn.diff
35 35 import svn.fs
36 36 import svn.repos
37 37
38 38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 40 from vcsserver.exceptions import NoContentException
41 41 from vcsserver.utils import safe_str
42 42 from vcsserver.vcs_base import RemoteBase
43 43 from vcsserver.lib.svnremoterepo import svnremoterepo
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 47 svn_compatible_versions_map = {
48 48 'pre-1.4-compatible': '1.3',
49 49 'pre-1.5-compatible': '1.4',
50 50 'pre-1.6-compatible': '1.5',
51 51 'pre-1.8-compatible': '1.7',
52 52 'pre-1.9-compatible': '1.8',
53 53 }
54 54
55 55 current_compatible_version = '1.14'
56 56
57 57
58 58 def reraise_safe_exceptions(func):
59 59 """Decorator for converting svn exceptions to something neutral."""
60 60 def wrapper(*args, **kwargs):
61 61 try:
62 62 return func(*args, **kwargs)
63 63 except Exception as e:
64 64 if not hasattr(e, '_vcs_kind'):
65 65 log.exception("Unhandled exception in svn remote call")
66 66 raise_from_original(exceptions.UnhandledException(e))
67 67 raise
68 68 return wrapper
69 69
70 70
71 71 class SubversionFactory(RepoFactory):
72 72 repo_type = 'svn'
73 73
74 74 def _create_repo(self, wire, create, compatible_version):
75 75 path = svn.core.svn_path_canonicalize(wire['path'])
76 76 if create:
77 77 fs_config = {'compatible-version': current_compatible_version}
78 78 if compatible_version:
79 79
80 80 compatible_version_string = \
81 81 svn_compatible_versions_map.get(compatible_version) \
82 82 or compatible_version
83 83 fs_config['compatible-version'] = compatible_version_string
84 84
85 85 log.debug('Create SVN repo with config "%s"', fs_config)
86 86 repo = svn.repos.create(path, "", "", None, fs_config)
87 87 else:
88 88 repo = svn.repos.open(path)
89 89
90 90 log.debug('Got SVN object: %s', repo)
91 91 return repo
92 92
93 93 def repo(self, wire, create=False, compatible_version=None):
94 94 """
95 95 Get a repository instance for the given path.
96 96 """
97 97 return self._create_repo(wire, create, compatible_version)
98 98
99 99
100 100 NODE_TYPE_MAPPING = {
101 101 svn.core.svn_node_file: 'file',
102 102 svn.core.svn_node_dir: 'dir',
103 103 }
104 104
105 105
106 106 class SvnRemote(RemoteBase):
107 107
108 108 def __init__(self, factory, hg_factory=None):
109 109 self._factory = factory
110 110
111 111 @reraise_safe_exceptions
112 112 def discover_svn_version(self):
113 113 try:
114 114 import svn.core
115 115 svn_ver = svn.core.SVN_VERSION
116 116 except ImportError:
117 117 svn_ver = None
118 118 return svn_ver
119 119
120 120 @reraise_safe_exceptions
121 121 def is_empty(self, wire):
122 122
123 123 try:
124 124 return self.lookup(wire, -1) == 0
125 125 except Exception:
126 126 log.exception("failed to read object_store")
127 127 return False
128 128
129 129 def check_url(self, url):
130 130
131 131 # uuid function get's only valid UUID from proper repo, else
132 132 # throws exception
133 133 username, password, src_url = self.get_url_and_credentials(url)
134 134 try:
135 135 svnremoterepo(username, password, src_url).svn().uuid
136 136 except Exception:
137 137 tb = traceback.format_exc()
138 138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
139 139 raise URLError(
140 140 '"%s" is not a valid Subversion source url.' % (url, ))
141 141 return True
142 142
143 143 def is_path_valid_repository(self, wire, path):
144 144
145 145 # NOTE(marcink): short circuit the check for SVN repo
146 146 # the repos.open might be expensive to check, but we have one cheap
147 147 # pre condition that we can use, to check for 'format' file
148 148
149 149 if not os.path.isfile(os.path.join(path, 'format')):
150 150 return False
151 151
152 152 try:
153 153 svn.repos.open(path)
154 154 except svn.core.SubversionException:
155 155 tb = traceback.format_exc()
156 156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
157 157 return False
158 158 return True
159 159
160 160 @reraise_safe_exceptions
161 161 def verify(self, wire,):
162 162 repo_path = wire['path']
163 163 if not self.is_path_valid_repository(wire, repo_path):
164 164 raise Exception(
165 165 "Path %s is not a valid Subversion repository." % repo_path)
166 166
167 167 cmd = ['svnadmin', 'info', repo_path]
168 168 stdout, stderr = subprocessio.run_command(cmd)
169 169 return stdout
170 170
171 171 def lookup(self, wire, revision):
172 172 if revision not in [-1, None, 'HEAD']:
173 173 raise NotImplementedError
174 174 repo = self._factory.repo(wire)
175 175 fs_ptr = svn.repos.fs(repo)
176 176 head = svn.fs.youngest_rev(fs_ptr)
177 177 return head
178 178
179 179 def lookup_interval(self, wire, start_ts, end_ts):
180 180 repo = self._factory.repo(wire)
181 181 fsobj = svn.repos.fs(repo)
182 182 start_rev = None
183 183 end_rev = None
184 184 if start_ts:
185 185 start_ts_svn = apr_time_t(start_ts)
186 186 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
187 187 else:
188 188 start_rev = 1
189 189 if end_ts:
190 190 end_ts_svn = apr_time_t(end_ts)
191 191 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
192 192 else:
193 193 end_rev = svn.fs.youngest_rev(fsobj)
194 194 return start_rev, end_rev
195 195
196 196 def revision_properties(self, wire, revision):
197 197
198 198 cache_on, context_uid, repo_id = self._cache_on(wire)
199 199 region = self._region(wire)
200 200 @region.conditional_cache_on_arguments(condition=cache_on)
201 201 def _revision_properties(_repo_id, _revision):
202 202 repo = self._factory.repo(wire)
203 203 fs_ptr = svn.repos.fs(repo)
204 204 return svn.fs.revision_proplist(fs_ptr, revision)
205 205 return _revision_properties(repo_id, revision)
206 206
207 207 def revision_changes(self, wire, revision):
208 208
209 209 repo = self._factory.repo(wire)
210 210 fsobj = svn.repos.fs(repo)
211 211 rev_root = svn.fs.revision_root(fsobj, revision)
212 212
213 213 editor = svn.repos.ChangeCollector(fsobj, rev_root)
214 214 editor_ptr, editor_baton = svn.delta.make_editor(editor)
215 215 base_dir = ""
216 216 send_deltas = False
217 217 svn.repos.replay2(
218 218 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
219 219 editor_ptr, editor_baton, None)
220 220
221 221 added = []
222 222 changed = []
223 223 removed = []
224 224
225 225 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
226 226 for path, change in editor.changes.items():
227 227 # TODO: Decide what to do with directory nodes. Subversion can add
228 228 # empty directories.
229 229
230 230 if change.item_kind == svn.core.svn_node_dir:
231 231 continue
232 232 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
233 233 added.append(path)
234 234 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
235 235 svn.repos.CHANGE_ACTION_REPLACE]:
236 236 changed.append(path)
237 237 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
238 238 removed.append(path)
239 239 else:
240 240 raise NotImplementedError(
241 241 "Action %s not supported on path %s" % (
242 242 change.action, path))
243 243
244 244 changes = {
245 245 'added': added,
246 246 'changed': changed,
247 247 'removed': removed,
248 248 }
249 249 return changes
250 250
251 251 @reraise_safe_exceptions
252 252 def node_history(self, wire, path, revision, limit):
253 253 cache_on, context_uid, repo_id = self._cache_on(wire)
254 254 region = self._region(wire)
255 255 @region.conditional_cache_on_arguments(condition=cache_on)
256 256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 257 cross_copies = False
258 258 repo = self._factory.repo(wire)
259 259 fsobj = svn.repos.fs(repo)
260 260 rev_root = svn.fs.revision_root(fsobj, revision)
261 261
262 262 history_revisions = []
263 263 history = svn.fs.node_history(rev_root, path)
264 264 history = svn.fs.history_prev(history, cross_copies)
265 265 while history:
266 266 __, node_revision = svn.fs.history_location(history)
267 267 history_revisions.append(node_revision)
268 268 if limit and len(history_revisions) >= limit:
269 269 break
270 270 history = svn.fs.history_prev(history, cross_copies)
271 271 return history_revisions
272 272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273 273
274 274 def node_properties(self, wire, path, revision):
275 275 cache_on, context_uid, repo_id = self._cache_on(wire)
276 276 region = self._region(wire)
277 277 @region.conditional_cache_on_arguments(condition=cache_on)
278 278 def _node_properties(_repo_id, _path, _revision):
279 279 repo = self._factory.repo(wire)
280 280 fsobj = svn.repos.fs(repo)
281 281 rev_root = svn.fs.revision_root(fsobj, revision)
282 282 return svn.fs.node_proplist(rev_root, path)
283 283 return _node_properties(repo_id, path, revision)
284 284
285 285 def file_annotate(self, wire, path, revision):
286 abs_path = 'file://' + urllib.pathname2url(
286 abs_path = 'file://' + urllib.request.pathname2url(
287 287 vcspath.join(wire['path'], path))
288 288 file_uri = svn.core.svn_path_canonicalize(abs_path)
289 289
290 290 start_rev = svn_opt_revision_value_t(0)
291 291 peg_rev = svn_opt_revision_value_t(revision)
292 292 end_rev = peg_rev
293 293
294 294 annotations = []
295 295
296 296 def receiver(line_no, revision, author, date, line, pool):
297 297 annotations.append((line_no, revision, line))
298 298
299 299 # TODO: Cannot use blame5, missing typemap function in the swig code
300 300 try:
301 301 svn.client.blame2(
302 302 file_uri, peg_rev, start_rev, end_rev,
303 303 receiver, svn.client.create_context())
304 304 except svn.core.SubversionException as exc:
305 305 log.exception("Error during blame operation.")
306 306 raise Exception(
307 307 "Blame not supported or file does not exist at path %s. "
308 308 "Error %s." % (path, exc))
309 309
310 310 return annotations
311 311
312 312 def get_node_type(self, wire, path, revision=None):
313 313
314 314 cache_on, context_uid, repo_id = self._cache_on(wire)
315 315 region = self._region(wire)
316 316 @region.conditional_cache_on_arguments(condition=cache_on)
317 317 def _get_node_type(_repo_id, _path, _revision):
318 318 repo = self._factory.repo(wire)
319 319 fs_ptr = svn.repos.fs(repo)
320 320 if _revision is None:
321 321 _revision = svn.fs.youngest_rev(fs_ptr)
322 322 root = svn.fs.revision_root(fs_ptr, _revision)
323 323 node = svn.fs.check_path(root, path)
324 324 return NODE_TYPE_MAPPING.get(node, None)
325 325 return _get_node_type(repo_id, path, revision)
326 326
327 327 def get_nodes(self, wire, path, revision=None):
328 328
329 329 cache_on, context_uid, repo_id = self._cache_on(wire)
330 330 region = self._region(wire)
331 331 @region.conditional_cache_on_arguments(condition=cache_on)
332 332 def _get_nodes(_repo_id, _path, _revision):
333 333 repo = self._factory.repo(wire)
334 334 fsobj = svn.repos.fs(repo)
335 335 if _revision is None:
336 336 _revision = svn.fs.youngest_rev(fsobj)
337 337 root = svn.fs.revision_root(fsobj, _revision)
338 338 entries = svn.fs.dir_entries(root, path)
339 339 result = []
340 340 for entry_path, entry_info in entries.items():
341 341 result.append(
342 342 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
343 343 return result
344 344 return _get_nodes(repo_id, path, revision)
345 345
346 346 def get_file_content(self, wire, path, rev=None):
347 347 repo = self._factory.repo(wire)
348 348 fsobj = svn.repos.fs(repo)
349 349 if rev is None:
350 350 rev = svn.fs.youngest_revision(fsobj)
351 351 root = svn.fs.revision_root(fsobj, rev)
352 352 content = svn.core.Stream(svn.fs.file_contents(root, path))
353 353 return content.read()
354 354
355 355 def get_file_size(self, wire, path, revision=None):
356 356
357 357 cache_on, context_uid, repo_id = self._cache_on(wire)
358 358 region = self._region(wire)
359 359
360 360 @region.conditional_cache_on_arguments(condition=cache_on)
361 361 def _get_file_size(_repo_id, _path, _revision):
362 362 repo = self._factory.repo(wire)
363 363 fsobj = svn.repos.fs(repo)
364 364 if _revision is None:
365 365 _revision = svn.fs.youngest_revision(fsobj)
366 366 root = svn.fs.revision_root(fsobj, _revision)
367 367 size = svn.fs.file_length(root, path)
368 368 return size
369 369 return _get_file_size(repo_id, path, revision)
370 370
371 371 def create_repository(self, wire, compatible_version=None):
372 372 log.info('Creating Subversion repository in path "%s"', wire['path'])
373 373 self._factory.repo(wire, create=True,
374 374 compatible_version=compatible_version)
375 375
376 376 def get_url_and_credentials(self, src_url):
377 377 obj = urllib.parse.urlparse(src_url)
378 378 username = obj.username or None
379 379 password = obj.password or None
380 380 return username, password, src_url
381 381
382 382 def import_remote_repository(self, wire, src_url):
383 383 repo_path = wire['path']
384 384 if not self.is_path_valid_repository(wire, repo_path):
385 385 raise Exception(
386 386 "Path %s is not a valid Subversion repository." % repo_path)
387 387
388 388 username, password, src_url = self.get_url_and_credentials(src_url)
389 389 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
390 390 '--trust-server-cert-failures=unknown-ca']
391 391 if username and password:
392 392 rdump_cmd += ['--username', username, '--password', password]
393 393 rdump_cmd += [src_url]
394 394
395 395 rdump = subprocess.Popen(
396 396 rdump_cmd,
397 397 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
398 398 load = subprocess.Popen(
399 399 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
400 400
401 401 # TODO: johbo: This can be a very long operation, might be better
402 402 # to track some kind of status and provide an api to check if the
403 403 # import is done.
404 404 rdump.wait()
405 405 load.wait()
406 406
407 407 log.debug('Return process ended with code: %s', rdump.returncode)
408 408 if rdump.returncode != 0:
409 409 errors = rdump.stderr.read()
410 410 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
411 411
412 412 reason = 'UNKNOWN'
413 413 if b'svnrdump: E230001:' in errors:
414 414 reason = 'INVALID_CERTIFICATE'
415 415
416 416 if reason == 'UNKNOWN':
417 417 reason = 'UNKNOWN:{}'.format(safe_str(errors))
418 418
419 419 raise Exception(
420 420 'Failed to dump the remote repository from %s. Reason:%s' % (
421 421 src_url, reason))
422 422 if load.returncode != 0:
423 423 raise Exception(
424 424 'Failed to load the dump of remote repository from %s.' %
425 425 (src_url, ))
426 426
427 427 def commit(self, wire, message, author, timestamp, updated, removed):
428 428 assert isinstance(message, str)
429 429 assert isinstance(author, str)
430 430
431 431 repo = self._factory.repo(wire)
432 432 fsobj = svn.repos.fs(repo)
433 433
434 434 rev = svn.fs.youngest_rev(fsobj)
435 435 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
436 436 txn_root = svn.fs.txn_root(txn)
437 437
438 438 for node in updated:
439 439 TxnNodeProcessor(node, txn_root).update()
440 440 for node in removed:
441 441 TxnNodeProcessor(node, txn_root).remove()
442 442
443 443 commit_id = svn.repos.fs_commit_txn(repo, txn)
444 444
445 445 if timestamp:
446 446 apr_time = apr_time_t(timestamp)
447 447 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
448 448 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
449 449
450 450 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
451 451 return commit_id
452 452
453 453 def diff(self, wire, rev1, rev2, path1=None, path2=None,
454 454 ignore_whitespace=False, context=3):
455 455
456 456 wire.update(cache=False)
457 457 repo = self._factory.repo(wire)
458 458 diff_creator = SvnDiffer(
459 459 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
460 460 try:
461 461 return diff_creator.generate_diff()
462 462 except svn.core.SubversionException as e:
463 463 log.exception(
464 464 "Error during diff operation operation. "
465 465 "Path might not exist %s, %s" % (path1, path2))
466 466 return ""
467 467
468 468 @reraise_safe_exceptions
469 469 def is_large_file(self, wire, path):
470 470 return False
471 471
472 472 @reraise_safe_exceptions
473 473 def is_binary(self, wire, rev, path):
474 474 cache_on, context_uid, repo_id = self._cache_on(wire)
475 475
476 476 region = self._region(wire)
477 477 @region.conditional_cache_on_arguments(condition=cache_on)
478 478 def _is_binary(_repo_id, _rev, _path):
479 479 raw_bytes = self.get_file_content(wire, path, rev)
480 480 return raw_bytes and '\0' in raw_bytes
481 481
482 482 return _is_binary(repo_id, rev, path)
483 483
484 484 @reraise_safe_exceptions
485 485 def run_svn_command(self, wire, cmd, **opts):
486 486 path = wire.get('path', None)
487 487
488 488 if path and os.path.isdir(path):
489 489 opts['cwd'] = path
490 490
491 491 safe_call = opts.pop('_safe', False)
492 492
493 493 svnenv = os.environ.copy()
494 494 svnenv.update(opts.pop('extra_env', {}))
495 495
496 496 _opts = {'env': svnenv, 'shell': False}
497 497
498 498 try:
499 499 _opts.update(opts)
500 500 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
501 501
502 502 return b''.join(proc), b''.join(proc.stderr)
503 503 except OSError as err:
504 504 if safe_call:
505 505 return '', safe_str(err).strip()
506 506 else:
507 507 cmd = ' '.join(cmd) # human friendly CMD
508 508 tb_err = ("Couldn't run svn command (%s).\n"
509 509 "Original error was:%s\n"
510 510 "Call options:%s\n"
511 511 % (cmd, err, _opts))
512 512 log.exception(tb_err)
513 513 raise exceptions.VcsException()(tb_err)
514 514
515 515 @reraise_safe_exceptions
516 516 def install_hooks(self, wire, force=False):
517 517 from vcsserver.hook_utils import install_svn_hooks
518 518 repo_path = wire['path']
519 519 binary_dir = settings.BINARY_DIR
520 520 executable = None
521 521 if binary_dir:
522 522 executable = os.path.join(binary_dir, 'python')
523 523 return install_svn_hooks(
524 524 repo_path, executable=executable, force_create=force)
525 525
526 526 @reraise_safe_exceptions
527 527 def get_hooks_info(self, wire):
528 528 from vcsserver.hook_utils import (
529 529 get_svn_pre_hook_version, get_svn_post_hook_version)
530 530 repo_path = wire['path']
531 531 return {
532 532 'pre_version': get_svn_pre_hook_version(repo_path),
533 533 'post_version': get_svn_post_hook_version(repo_path),
534 534 }
535 535
536 536 @reraise_safe_exceptions
537 537 def set_head_ref(self, wire, head_name):
538 538 pass
539 539
540 540 @reraise_safe_exceptions
541 541 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
542 542 archive_dir_name, commit_id):
543 543
544 544 def walk_tree(root, root_dir, _commit_id):
545 545 """
546 546 Special recursive svn repo walker
547 547 """
548 548
549 549 filemode_default = 0o100644
550 550 filemode_executable = 0o100755
551 551
552 552 file_iter = svn.fs.dir_entries(root, root_dir)
553 553 for f_name in file_iter:
554 554 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
555 555
556 556 if f_type == 'dir':
557 557 # return only DIR, and then all entries in that dir
558 558 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
559 559 new_root = os.path.join(root_dir, f_name)
560 560 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
561 561 yield _f_name, _f_data, _f_type
562 562 else:
563 563 f_path = os.path.join(root_dir, f_name).rstrip('/')
564 564 prop_list = svn.fs.node_proplist(root, f_path)
565 565
566 566 f_mode = filemode_default
567 567 if prop_list.get('svn:executable'):
568 568 f_mode = filemode_executable
569 569
570 570 f_is_link = False
571 571 if prop_list.get('svn:special'):
572 572 f_is_link = True
573 573
574 574 data = {
575 575 'is_link': f_is_link,
576 576 'mode': f_mode,
577 577 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
578 578 }
579 579
580 580 yield f_path, data, f_type
581 581
582 582 def file_walker(_commit_id, path):
583 583 repo = self._factory.repo(wire)
584 584 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
585 585
586 586 def no_content():
587 587 raise NoContentException()
588 588
589 589 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
590 590 file_path = f_name
591 591
592 592 if f_type == 'dir':
593 593 mode = f_data['mode']
594 594 yield ArchiveNode(file_path, mode, False, no_content)
595 595 else:
596 596 mode = f_data['mode']
597 597 is_link = f_data['is_link']
598 598 data_stream = f_data['content_stream']
599 599 yield ArchiveNode(file_path, mode, is_link, data_stream)
600 600
601 601 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
602 602 archive_dir_name, commit_id)
603 603
604 604
605 605 class SvnDiffer(object):
606 606 """
607 607 Utility to create diffs based on difflib and the Subversion api
608 608 """
609 609
610 610 binary_content = False
611 611
612 612 def __init__(
613 613 self, repo, src_rev, src_path, tgt_rev, tgt_path,
614 614 ignore_whitespace, context):
615 615 self.repo = repo
616 616 self.ignore_whitespace = ignore_whitespace
617 617 self.context = context
618 618
619 619 fsobj = svn.repos.fs(repo)
620 620
621 621 self.tgt_rev = tgt_rev
622 622 self.tgt_path = tgt_path or ''
623 623 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
624 624 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
625 625
626 626 self.src_rev = src_rev
627 627 self.src_path = src_path or self.tgt_path
628 628 self.src_root = svn.fs.revision_root(fsobj, src_rev)
629 629 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
630 630
631 631 self._validate()
632 632
633 633 def _validate(self):
634 634 if (self.tgt_kind != svn.core.svn_node_none and
635 635 self.src_kind != svn.core.svn_node_none and
636 636 self.src_kind != self.tgt_kind):
637 637 # TODO: johbo: proper error handling
638 638 raise Exception(
639 639 "Source and target are not compatible for diff generation. "
640 640 "Source type: %s, target type: %s" %
641 641 (self.src_kind, self.tgt_kind))
642 642
643 643 def generate_diff(self):
644 644 buf = io.StringIO()
645 645 if self.tgt_kind == svn.core.svn_node_dir:
646 646 self._generate_dir_diff(buf)
647 647 else:
648 648 self._generate_file_diff(buf)
649 649 return buf.getvalue()
650 650
651 651 def _generate_dir_diff(self, buf):
652 652 editor = DiffChangeEditor()
653 653 editor_ptr, editor_baton = svn.delta.make_editor(editor)
654 654 svn.repos.dir_delta2(
655 655 self.src_root,
656 656 self.src_path,
657 657 '', # src_entry
658 658 self.tgt_root,
659 659 self.tgt_path,
660 660 editor_ptr, editor_baton,
661 661 authorization_callback_allow_all,
662 662 False, # text_deltas
663 663 svn.core.svn_depth_infinity, # depth
664 664 False, # entry_props
665 665 False, # ignore_ancestry
666 666 )
667 667
668 668 for path, __, change in sorted(editor.changes):
669 669 self._generate_node_diff(
670 670 buf, change, path, self.tgt_path, path, self.src_path)
671 671
672 672 def _generate_file_diff(self, buf):
673 673 change = None
674 674 if self.src_kind == svn.core.svn_node_none:
675 675 change = "add"
676 676 elif self.tgt_kind == svn.core.svn_node_none:
677 677 change = "delete"
678 678 tgt_base, tgt_path = vcspath.split(self.tgt_path)
679 679 src_base, src_path = vcspath.split(self.src_path)
680 680 self._generate_node_diff(
681 681 buf, change, tgt_path, tgt_base, src_path, src_base)
682 682
683 683 def _generate_node_diff(
684 684 self, buf, change, tgt_path, tgt_base, src_path, src_base):
685 685
686 686 if self.src_rev == self.tgt_rev and tgt_base == src_base:
687 687 # makes consistent behaviour with git/hg to return empty diff if
688 688 # we compare same revisions
689 689 return
690 690
691 691 tgt_full_path = vcspath.join(tgt_base, tgt_path)
692 692 src_full_path = vcspath.join(src_base, src_path)
693 693
694 694 self.binary_content = False
695 695 mime_type = self._get_mime_type(tgt_full_path)
696 696
697 697 if mime_type and not mime_type.startswith('text'):
698 698 self.binary_content = True
699 699 buf.write("=" * 67 + '\n')
700 700 buf.write("Cannot display: file marked as a binary type.\n")
701 701 buf.write("svn:mime-type = %s\n" % mime_type)
702 702 buf.write("Index: %s\n" % (tgt_path, ))
703 703 buf.write("=" * 67 + '\n')
704 704 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
705 705 'tgt_path': tgt_path})
706 706
707 707 if change == 'add':
708 708 # TODO: johbo: SVN is missing a zero here compared to git
709 709 buf.write("new file mode 10644\n")
710 710
711 711 #TODO(marcink): intro to binary detection of svn patches
712 712 # if self.binary_content:
713 713 # buf.write('GIT binary patch\n')
714 714
715 715 buf.write("--- /dev/null\t(revision 0)\n")
716 716 src_lines = []
717 717 else:
718 718 if change == 'delete':
719 719 buf.write("deleted file mode 10644\n")
720 720
721 721 #TODO(marcink): intro to binary detection of svn patches
722 722 # if self.binary_content:
723 723 # buf.write('GIT binary patch\n')
724 724
725 725 buf.write("--- a/%s\t(revision %s)\n" % (
726 726 src_path, self.src_rev))
727 727 src_lines = self._svn_readlines(self.src_root, src_full_path)
728 728
729 729 if change == 'delete':
730 730 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
731 731 tgt_lines = []
732 732 else:
733 733 buf.write("+++ b/%s\t(revision %s)\n" % (
734 734 tgt_path, self.tgt_rev))
735 735 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
736 736
737 737 if not self.binary_content:
738 738 udiff = svn_diff.unified_diff(
739 739 src_lines, tgt_lines, context=self.context,
740 740 ignore_blank_lines=self.ignore_whitespace,
741 741 ignore_case=False,
742 742 ignore_space_changes=self.ignore_whitespace)
743 743 buf.writelines(udiff)
744 744
745 745 def _get_mime_type(self, path):
746 746 try:
747 747 mime_type = svn.fs.node_prop(
748 748 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
749 749 except svn.core.SubversionException:
750 750 mime_type = svn.fs.node_prop(
751 751 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
752 752 return mime_type
753 753
754 754 def _svn_readlines(self, fs_root, node_path):
755 755 if self.binary_content:
756 756 return []
757 757 node_kind = svn.fs.check_path(fs_root, node_path)
758 758 if node_kind not in (
759 759 svn.core.svn_node_file, svn.core.svn_node_symlink):
760 760 return []
761 761 content = svn.core.Stream(
762 762 svn.fs.file_contents(fs_root, node_path)).read()
763 763 return content.splitlines(True)
764 764
765 765
766 766 class DiffChangeEditor(svn.delta.Editor):
767 767 """
768 768 Records changes between two given revisions
769 769 """
770 770
771 771 def __init__(self):
772 772 self.changes = []
773 773
774 774 def delete_entry(self, path, revision, parent_baton, pool=None):
775 775 self.changes.append((path, None, 'delete'))
776 776
777 777 def add_file(
778 778 self, path, parent_baton, copyfrom_path, copyfrom_revision,
779 779 file_pool=None):
780 780 self.changes.append((path, 'file', 'add'))
781 781
782 782 def open_file(self, path, parent_baton, base_revision, file_pool=None):
783 783 self.changes.append((path, 'file', 'change'))
784 784
785 785
786 786 def authorization_callback_allow_all(root, path, pool):
787 787 return True
788 788
789 789
790 790 class TxnNodeProcessor(object):
791 791 """
792 792 Utility to process the change of one node within a transaction root.
793 793
794 794 It encapsulates the knowledge of how to add, update or remove
795 795 a node for a given transaction root. The purpose is to support the method
796 796 `SvnRemote.commit`.
797 797 """
798 798
799 799 def __init__(self, node, txn_root):
800 800 assert isinstance(node['path'], str)
801 801
802 802 self.node = node
803 803 self.txn_root = txn_root
804 804
805 805 def update(self):
806 806 self._ensure_parent_dirs()
807 807 self._add_file_if_node_does_not_exist()
808 808 self._update_file_content()
809 809 self._update_file_properties()
810 810
811 811 def remove(self):
812 812 svn.fs.delete(self.txn_root, self.node['path'])
813 813 # TODO: Clean up directory if empty
814 814
815 815 def _ensure_parent_dirs(self):
816 816 curdir = vcspath.dirname(self.node['path'])
817 817 dirs_to_create = []
818 818 while not self._svn_path_exists(curdir):
819 819 dirs_to_create.append(curdir)
820 820 curdir = vcspath.dirname(curdir)
821 821
822 822 for curdir in reversed(dirs_to_create):
823 823 log.debug('Creating missing directory "%s"', curdir)
824 824 svn.fs.make_dir(self.txn_root, curdir)
825 825
826 826 def _svn_path_exists(self, path):
827 827 path_status = svn.fs.check_path(self.txn_root, path)
828 828 return path_status != svn.core.svn_node_none
829 829
830 830 def _add_file_if_node_does_not_exist(self):
831 831 kind = svn.fs.check_path(self.txn_root, self.node['path'])
832 832 if kind == svn.core.svn_node_none:
833 833 svn.fs.make_file(self.txn_root, self.node['path'])
834 834
835 835 def _update_file_content(self):
836 836 assert isinstance(self.node['content'], str)
837 837 handler, baton = svn.fs.apply_textdelta(
838 838 self.txn_root, self.node['path'], None, None)
839 839 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
840 840
841 841 def _update_file_properties(self):
842 842 properties = self.node.get('properties', {})
843 843 for key, value in properties.items():
844 844 svn.fs.change_node_prop(
845 845 self.txn_root, self.node['path'], key, value)
846 846
847 847
848 848 def apr_time_t(timestamp):
849 849 """
850 850 Convert a Python timestamp into APR timestamp type apr_time_t
851 851 """
852 852 return timestamp * 1E6
853 853
854 854
855 855 def svn_opt_revision_value_t(num):
856 856 """
857 857 Put `num` into a `svn_opt_revision_value_t` structure.
858 858 """
859 859 value = svn.core.svn_opt_revision_value_t()
860 860 value.number = num
861 861 revision = svn.core.svn_opt_revision_t()
862 862 revision.kind = svn.core.svn_opt_revision_number
863 863 revision.value = value
864 864 return revision
@@ -1,86 +1,86 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import tempfile
21 21
22 22 import configobj
23 23
24 24
25 25 class ContextINI(object):
26 26 """
27 27 Allows to create a new test.ini file as a copy of existing one with edited
28 28 data. If existing file is not present, it creates a new one. Example usage::
29 29
30 30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 31 print 'vcsserver --config=%s' % new_test_ini
32 32 """
33 33
34 34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 35 destroy=True):
36 36 self.ini_file_path = ini_file_path
37 37 self.ini_params = ini_params
38 38 self.new_path = None
39 39 self.new_path_prefix = new_file_prefix or 'test'
40 40 self.destroy = destroy
41 41
42 42 def __enter__(self):
43 43 _, pref = tempfile.mkstemp()
44 44 loc = tempfile.gettempdir()
45 45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 46 pref, self.new_path_prefix, self.ini_file_path))
47 47
48 48 # copy ini file and modify according to the params, if we re-use a file
49 49 if os.path.isfile(self.ini_file_path):
50 50 shutil.copy(self.ini_file_path, self.new_path)
51 51 else:
52 52 # create new dump file for configObj to write to.
53 53 with open(self.new_path, 'wb'):
54 54 pass
55 55
56 56 config = configobj.ConfigObj(
57 57 self.new_path, file_error=True, write_empty_values=True)
58 58
59 59 for data in self.ini_params:
60 section, ini_params = data.items()[0]
61 key, val = ini_params.items()[0]
60 section, ini_params = list(data.items())[0]
61 key, val = list(ini_params.items())[0]
62 62 if section not in config:
63 63 config[section] = {}
64 64 config[section][key] = val
65 65
66 66 config.write()
67 67 return self.new_path
68 68
69 69 def __exit__(self, exc_type, exc_val, exc_tb):
70 70 if self.destroy:
71 71 os.remove(self.new_path)
72 72
73 73
74 74 def no_newline_id_generator(test_name):
75 75 """
76 76 Generates a test name without spaces or newlines characters. Used for
77 77 nicer output of progress of test
78 78 """
79 79 org_name = test_name
80 80 test_name = str(test_name)\
81 81 .replace('\n', '_N') \
82 82 .replace('\r', '_N') \
83 83 .replace('\t', '_T') \
84 84 .replace(' ', '_S')
85 85
86 86 return test_name or 'test-with-empty-name'
@@ -1,162 +1,162 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver.remote import git
25 25
26 26 SAMPLE_REFS = {
27 27 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 28 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 29 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 30 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 31 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 32 }
33 33
34 34
35 35 @pytest.fixture
36 36 def git_remote():
37 37 """
38 38 A GitRemote instance with a mock factory.
39 39 """
40 40 factory = Mock()
41 41 remote = git.GitRemote(factory)
42 42 return remote
43 43
44 44
45 45 def test_discover_git_version(git_remote):
46 46 version = git_remote.discover_git_version()
47 47 assert version
48 48
49 49
50 50 class TestGitFetch(object):
51 51 def setup_method(self):
52 52 self.mock_repo = Mock()
53 53 factory = Mock()
54 54 factory.repo = Mock(return_value=self.mock_repo)
55 55 self.remote_git = git.GitRemote(factory)
56 56
57 57 def test_fetches_all_when_no_commit_ids_specified(self):
58 58 def side_effect(determine_wants, *args, **kwargs):
59 59 determine_wants(SAMPLE_REFS)
60 60
61 61 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 62 mock_fetch.side_effect = side_effect
63 63 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 64 determine_wants = self.mock_repo.object_store.determine_wants_all
65 65 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 66
67 67 def test_fetches_specified_commits(self):
68 68 selected_refs = {
69 69 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 70 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 71 }
72 72
73 73 def side_effect(determine_wants, *args, **kwargs):
74 74 result = determine_wants(SAMPLE_REFS)
75 75 assert sorted(result) == sorted(selected_refs.values())
76 76 return result
77 77
78 78 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 79 mock_fetch.side_effect = side_effect
80 80 self.remote_git.pull(
81 81 wire={}, url='/tmp/', apply_refs=False,
82 refs=selected_refs.keys())
82 refs=list(selected_refs.keys()))
83 83 determine_wants = self.mock_repo.object_store.determine_wants_all
84 84 assert determine_wants.call_count == 0
85 85
86 86 def test_get_remote_refs(self):
87 87 factory = Mock()
88 88 remote_git = git.GitRemote(factory)
89 89 url = 'http://example.com/test/test.git'
90 90 sample_refs = {
91 91 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 92 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 93 }
94 94
95 95 with patch('vcsserver.remote.git.Repo', create=False) as mock_repo:
96 96 mock_repo().get_refs.return_value = sample_refs
97 97 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 98 mock_repo().get_refs.assert_called_once_with()
99 99 assert remote_refs == sample_refs
100 100
101 101
102 102 class TestReraiseSafeExceptions(object):
103 103
104 104 def test_method_decorated_with_reraise_safe_exceptions(self):
105 105 factory = Mock()
106 106 git_remote = git.GitRemote(factory)
107 107
108 108 def fake_function():
109 109 return None
110 110
111 111 decorator = git.reraise_safe_exceptions(fake_function)
112 112
113 113 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
114 114 for method_name, method in methods:
115 115 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
116 116 assert method.__func__.__code__ == decorator.__code__
117 117
118 118 @pytest.mark.parametrize('side_effect, expected_type', [
119 119 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 120 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 121 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 122 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 123 (dulwich.errors.HangupException(), 'error'),
124 124 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 125 ])
126 126 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 127 @git.reraise_safe_exceptions
128 128 def fake_method():
129 129 raise side_effect
130 130
131 131 with pytest.raises(Exception) as exc_info:
132 132 fake_method()
133 133 assert type(exc_info.value) == Exception
134 134 assert exc_info.value._vcs_kind == expected_type
135 135
136 136
137 137 class TestDulwichRepoWrapper(object):
138 138 def test_calls_close_on_delete(self):
139 139 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 140 with patch.object(git.Repo, 'close') as close_mock:
141 141 with isdir_patcher:
142 142 repo = git.Repo('/tmp/abcde')
143 143 assert repo is not None
144 144 repo.__del__()
145 145 # can't use del repo as in python3 this isn't always calling .__del__()
146 146
147 147 close_mock.assert_called_once_with()
148 148
149 149
150 150 class TestGitFactory(object):
151 151 def test_create_repo_returns_dulwich_wrapper(self):
152 152
153 153 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
154 154 mock.side_effect = {'repo_objects': ''}
155 155 factory = git.GitFactory()
156 156 wire = {
157 157 'path': '/tmp/abcde'
158 158 }
159 159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 160 with isdir_patcher:
161 161 result = factory._create_repo(wire, True)
162 162 assert isinstance(result, git.Repo)
@@ -1,53 +1,53 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import pytest
19 19 from vcsserver.utils import ascii_bytes, ascii_str
20 20
21 21
22 22 @pytest.mark.parametrize('given, expected', [
23 23 ('a', b'a'),
24 (u'a', b'a'),
24 ('a', b'a'),
25 25 ])
26 26 def test_ascii_bytes(given, expected):
27 27 assert ascii_bytes(given) == expected
28 28
29 29
30 30 @pytest.mark.parametrize('given', [
31 31 'å',
32 32 'å'.encode('utf8')
33 33 ])
34 34 def test_ascii_bytes_raises(given):
35 35 with pytest.raises(ValueError):
36 36 ascii_bytes(given)
37 37
38 38
39 39 @pytest.mark.parametrize('given, expected', [
40 40 (b'a', 'a'),
41 41 ])
42 42 def test_ascii_str(given, expected):
43 43 assert ascii_str(given) == expected
44 44
45 45
46 46 @pytest.mark.parametrize('given', [
47 u'a',
47 'a',
48 48 'å'.encode('utf8'),
49 u'å'
49 'å'
50 50 ])
51 51 def test_ascii_str_raises(given):
52 52 with pytest.raises(ValueError):
53 53 ascii_str(given)
General Comments 0
You need to be logged in to leave comments. Login now