##// END OF EJS Templates
caches: make sure we init caches on repo names withou '/' to not create a new cache subpath
super-admin -
r4766:77c2f985 default
parent child Browse files
Show More
@@ -1,364 +1,364 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2020 RhodeCode GmbH
3 # Copyright (C) 2015-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import errno
22 import errno
23 import logging
23 import logging
24
24
25 import msgpack
25 import msgpack
26 import gevent
26 import gevent
27 import redis
27 import redis
28
28
29 from dogpile.cache.api import CachedValue
29 from dogpile.cache.api import CachedValue
30 from dogpile.cache.backends import memory as memory_backend
30 from dogpile.cache.backends import memory as memory_backend
31 from dogpile.cache.backends import file as file_backend
31 from dogpile.cache.backends import file as file_backend
32 from dogpile.cache.backends import redis as redis_backend
32 from dogpile.cache.backends import redis as redis_backend
33 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
33 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
34 from dogpile.cache.util import memoized_property
34 from dogpile.cache.util import memoized_property
35
35
36 from pyramid.settings import asbool
36 from pyramid.settings import asbool
37
37
38 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
38 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
39 from rhodecode.lib.utils import safe_str
39 from rhodecode.lib.utils import safe_str
40
40
41
41
42 _default_max_size = 1024
42 _default_max_size = 1024
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 class LRUMemoryBackend(memory_backend.MemoryBackend):
47 class LRUMemoryBackend(memory_backend.MemoryBackend):
48 key_prefix = 'lru_mem_backend'
48 key_prefix = 'lru_mem_backend'
49 pickle_values = False
49 pickle_values = False
50
50
51 def __init__(self, arguments):
51 def __init__(self, arguments):
52 max_size = arguments.pop('max_size', _default_max_size)
52 max_size = arguments.pop('max_size', _default_max_size)
53
53
54 LRUDictClass = LRUDict
54 LRUDictClass = LRUDict
55 if arguments.pop('log_key_count', None):
55 if arguments.pop('log_key_count', None):
56 LRUDictClass = LRUDictDebug
56 LRUDictClass = LRUDictDebug
57
57
58 arguments['cache_dict'] = LRUDictClass(max_size)
58 arguments['cache_dict'] = LRUDictClass(max_size)
59 super(LRUMemoryBackend, self).__init__(arguments)
59 super(LRUMemoryBackend, self).__init__(arguments)
60
60
61 def delete(self, key):
61 def delete(self, key):
62 try:
62 try:
63 del self._cache[key]
63 del self._cache[key]
64 except KeyError:
64 except KeyError:
65 # we don't care if key isn't there at deletion
65 # we don't care if key isn't there at deletion
66 pass
66 pass
67
67
68 def delete_multi(self, keys):
68 def delete_multi(self, keys):
69 for key in keys:
69 for key in keys:
70 self.delete(key)
70 self.delete(key)
71
71
72
72
73 class PickleSerializer(object):
73 class PickleSerializer(object):
74
74
75 def _dumps(self, value, safe=False):
75 def _dumps(self, value, safe=False):
76 try:
76 try:
77 return compat.pickle.dumps(value)
77 return compat.pickle.dumps(value)
78 except Exception:
78 except Exception:
79 if safe:
79 if safe:
80 return NO_VALUE
80 return NO_VALUE
81 else:
81 else:
82 raise
82 raise
83
83
84 def _loads(self, value, safe=True):
84 def _loads(self, value, safe=True):
85 try:
85 try:
86 return compat.pickle.loads(value)
86 return compat.pickle.loads(value)
87 except Exception:
87 except Exception:
88 if safe:
88 if safe:
89 return NO_VALUE
89 return NO_VALUE
90 else:
90 else:
91 raise
91 raise
92
92
93
93
94 class MsgPackSerializer(object):
94 class MsgPackSerializer(object):
95
95
96 def _dumps(self, value, safe=False):
96 def _dumps(self, value, safe=False):
97 try:
97 try:
98 return msgpack.packb(value)
98 return msgpack.packb(value)
99 except Exception:
99 except Exception:
100 if safe:
100 if safe:
101 return NO_VALUE
101 return NO_VALUE
102 else:
102 else:
103 raise
103 raise
104
104
105 def _loads(self, value, safe=True):
105 def _loads(self, value, safe=True):
106 """
106 """
107 pickle maintained the `CachedValue` wrapper of the tuple
107 pickle maintained the `CachedValue` wrapper of the tuple
108 msgpack does not, so it must be added back in.
108 msgpack does not, so it must be added back in.
109 """
109 """
110 try:
110 try:
111 value = msgpack.unpackb(value, use_list=False)
111 value = msgpack.unpackb(value, use_list=False)
112 return CachedValue(*value)
112 return CachedValue(*value)
113 except Exception:
113 except Exception:
114 if safe:
114 if safe:
115 return NO_VALUE
115 return NO_VALUE
116 else:
116 else:
117 raise
117 raise
118
118
119
119
120 import fcntl
120 import fcntl
121 flock_org = fcntl.flock
121 flock_org = fcntl.flock
122
122
123
123
124 class CustomLockFactory(FileLock):
124 class CustomLockFactory(FileLock):
125
125
126 @memoized_property
126 @memoized_property
127 def _module(self):
127 def _module(self):
128
128
129 def gevent_flock(fd, operation):
129 def gevent_flock(fd, operation):
130 """
130 """
131 Gevent compatible flock
131 Gevent compatible flock
132 """
132 """
133 # set non-blocking, this will cause an exception if we cannot acquire a lock
133 # set non-blocking, this will cause an exception if we cannot acquire a lock
134 operation |= fcntl.LOCK_NB
134 operation |= fcntl.LOCK_NB
135 start_lock_time = time.time()
135 start_lock_time = time.time()
136 timeout = 60 * 15 # 15min
136 timeout = 60 * 15 # 15min
137 while True:
137 while True:
138 try:
138 try:
139 flock_org(fd, operation)
139 flock_org(fd, operation)
140 # lock has been acquired
140 # lock has been acquired
141 break
141 break
142 except (OSError, IOError) as e:
142 except (OSError, IOError) as e:
143 # raise on other errors than Resource temporarily unavailable
143 # raise on other errors than Resource temporarily unavailable
144 if e.errno != errno.EAGAIN:
144 if e.errno != errno.EAGAIN:
145 raise
145 raise
146 elif (time.time() - start_lock_time) > timeout:
146 elif (time.time() - start_lock_time) > timeout:
147 # waited to much time on a lock, better fail than loop for ever
147 # waited to much time on a lock, better fail than loop for ever
148 log.error('Failed to acquire lock on `%s` after waiting %ss',
148 log.error('Failed to acquire lock on `%s` after waiting %ss',
149 self.filename, timeout)
149 self.filename, timeout)
150 raise
150 raise
151 wait_timeout = 0.03
151 wait_timeout = 0.03
152 log.debug('Failed to acquire lock on `%s`, retry in %ss',
152 log.debug('Failed to acquire lock on `%s`, retry in %ss',
153 self.filename, wait_timeout)
153 self.filename, wait_timeout)
154 gevent.sleep(wait_timeout)
154 gevent.sleep(wait_timeout)
155
155
156 fcntl.flock = gevent_flock
156 fcntl.flock = gevent_flock
157 return fcntl
157 return fcntl
158
158
159
159
160 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
160 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
161 key_prefix = 'file_backend'
161 key_prefix = 'file_backend'
162
162
163 def __init__(self, arguments):
163 def __init__(self, arguments):
164 arguments['lock_factory'] = CustomLockFactory
164 arguments['lock_factory'] = CustomLockFactory
165 db_file = arguments.get('filename')
165 db_file = arguments.get('filename')
166
166
167 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
167 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
168 try:
168 try:
169 super(FileNamespaceBackend, self).__init__(arguments)
169 super(FileNamespaceBackend, self).__init__(arguments)
170 except Exception:
170 except Exception:
171 log.error('Failed to initialize db at: %s', db_file)
171 log.exception('Failed to initialize db at: %s', db_file)
172 raise
172 raise
173
173
174 def __repr__(self):
174 def __repr__(self):
175 return '{} `{}`'.format(self.__class__, self.filename)
175 return '{} `{}`'.format(self.__class__, self.filename)
176
176
177 def list_keys(self, prefix=''):
177 def list_keys(self, prefix=''):
178 prefix = '{}:{}'.format(self.key_prefix, prefix)
178 prefix = '{}:{}'.format(self.key_prefix, prefix)
179
179
180 def cond(v):
180 def cond(v):
181 if not prefix:
181 if not prefix:
182 return True
182 return True
183
183
184 if v.startswith(prefix):
184 if v.startswith(prefix):
185 return True
185 return True
186 return False
186 return False
187
187
188 with self._dbm_file(True) as dbm:
188 with self._dbm_file(True) as dbm:
189 try:
189 try:
190 return filter(cond, dbm.keys())
190 return filter(cond, dbm.keys())
191 except Exception:
191 except Exception:
192 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
192 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
193 raise
193 raise
194
194
195 def get_store(self):
195 def get_store(self):
196 return self.filename
196 return self.filename
197
197
198 def _dbm_get(self, key):
198 def _dbm_get(self, key):
199 with self._dbm_file(False) as dbm:
199 with self._dbm_file(False) as dbm:
200 if hasattr(dbm, 'get'):
200 if hasattr(dbm, 'get'):
201 value = dbm.get(key, NO_VALUE)
201 value = dbm.get(key, NO_VALUE)
202 else:
202 else:
203 # gdbm objects lack a .get method
203 # gdbm objects lack a .get method
204 try:
204 try:
205 value = dbm[key]
205 value = dbm[key]
206 except KeyError:
206 except KeyError:
207 value = NO_VALUE
207 value = NO_VALUE
208 if value is not NO_VALUE:
208 if value is not NO_VALUE:
209 value = self._loads(value)
209 value = self._loads(value)
210 return value
210 return value
211
211
212 def get(self, key):
212 def get(self, key):
213 try:
213 try:
214 return self._dbm_get(key)
214 return self._dbm_get(key)
215 except Exception:
215 except Exception:
216 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
216 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
217 raise
217 raise
218
218
219 def set(self, key, value):
219 def set(self, key, value):
220 with self._dbm_file(True) as dbm:
220 with self._dbm_file(True) as dbm:
221 dbm[key] = self._dumps(value)
221 dbm[key] = self._dumps(value)
222
222
223 def set_multi(self, mapping):
223 def set_multi(self, mapping):
224 with self._dbm_file(True) as dbm:
224 with self._dbm_file(True) as dbm:
225 for key, value in mapping.items():
225 for key, value in mapping.items():
226 dbm[key] = self._dumps(value)
226 dbm[key] = self._dumps(value)
227
227
228
228
229 class BaseRedisBackend(redis_backend.RedisBackend):
229 class BaseRedisBackend(redis_backend.RedisBackend):
230 key_prefix = ''
230 key_prefix = ''
231
231
232 def __init__(self, arguments):
232 def __init__(self, arguments):
233 super(BaseRedisBackend, self).__init__(arguments)
233 super(BaseRedisBackend, self).__init__(arguments)
234 self._lock_timeout = self.lock_timeout
234 self._lock_timeout = self.lock_timeout
235 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
235 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
236
236
237 if self._lock_auto_renewal and not self._lock_timeout:
237 if self._lock_auto_renewal and not self._lock_timeout:
238 # set default timeout for auto_renewal
238 # set default timeout for auto_renewal
239 self._lock_timeout = 30
239 self._lock_timeout = 30
240
240
241 def _create_client(self):
241 def _create_client(self):
242 args = {}
242 args = {}
243
243
244 if self.url is not None:
244 if self.url is not None:
245 args.update(url=self.url)
245 args.update(url=self.url)
246
246
247 else:
247 else:
248 args.update(
248 args.update(
249 host=self.host, password=self.password,
249 host=self.host, password=self.password,
250 port=self.port, db=self.db
250 port=self.port, db=self.db
251 )
251 )
252
252
253 connection_pool = redis.ConnectionPool(**args)
253 connection_pool = redis.ConnectionPool(**args)
254
254
255 return redis.StrictRedis(connection_pool=connection_pool)
255 return redis.StrictRedis(connection_pool=connection_pool)
256
256
257 def list_keys(self, prefix=''):
257 def list_keys(self, prefix=''):
258 prefix = '{}:{}*'.format(self.key_prefix, prefix)
258 prefix = '{}:{}*'.format(self.key_prefix, prefix)
259 return self.client.keys(prefix)
259 return self.client.keys(prefix)
260
260
261 def get_store(self):
261 def get_store(self):
262 return self.client.connection_pool
262 return self.client.connection_pool
263
263
264 def get(self, key):
264 def get(self, key):
265 value = self.client.get(key)
265 value = self.client.get(key)
266 if value is None:
266 if value is None:
267 return NO_VALUE
267 return NO_VALUE
268 return self._loads(value)
268 return self._loads(value)
269
269
270 def get_multi(self, keys):
270 def get_multi(self, keys):
271 if not keys:
271 if not keys:
272 return []
272 return []
273 values = self.client.mget(keys)
273 values = self.client.mget(keys)
274 loads = self._loads
274 loads = self._loads
275 return [
275 return [
276 loads(v) if v is not None else NO_VALUE
276 loads(v) if v is not None else NO_VALUE
277 for v in values]
277 for v in values]
278
278
279 def set(self, key, value):
279 def set(self, key, value):
280 if self.redis_expiration_time:
280 if self.redis_expiration_time:
281 self.client.setex(key, self.redis_expiration_time,
281 self.client.setex(key, self.redis_expiration_time,
282 self._dumps(value))
282 self._dumps(value))
283 else:
283 else:
284 self.client.set(key, self._dumps(value))
284 self.client.set(key, self._dumps(value))
285
285
286 def set_multi(self, mapping):
286 def set_multi(self, mapping):
287 dumps = self._dumps
287 dumps = self._dumps
288 mapping = dict(
288 mapping = dict(
289 (k, dumps(v))
289 (k, dumps(v))
290 for k, v in mapping.items()
290 for k, v in mapping.items()
291 )
291 )
292
292
293 if not self.redis_expiration_time:
293 if not self.redis_expiration_time:
294 self.client.mset(mapping)
294 self.client.mset(mapping)
295 else:
295 else:
296 pipe = self.client.pipeline()
296 pipe = self.client.pipeline()
297 for key, value in mapping.items():
297 for key, value in mapping.items():
298 pipe.setex(key, self.redis_expiration_time, value)
298 pipe.setex(key, self.redis_expiration_time, value)
299 pipe.execute()
299 pipe.execute()
300
300
301 def get_mutex(self, key):
301 def get_mutex(self, key):
302 if self.distributed_lock:
302 if self.distributed_lock:
303 lock_key = redis_backend.u('_lock_{0}').format(safe_str(key))
303 lock_key = redis_backend.u('_lock_{0}').format(safe_str(key))
304 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
304 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
305 auto_renewal=self._lock_auto_renewal)
305 auto_renewal=self._lock_auto_renewal)
306 else:
306 else:
307 return None
307 return None
308
308
309
309
310 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
310 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
311 key_prefix = 'redis_pickle_backend'
311 key_prefix = 'redis_pickle_backend'
312 pass
312 pass
313
313
314
314
315 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
315 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
316 key_prefix = 'redis_msgpack_backend'
316 key_prefix = 'redis_msgpack_backend'
317 pass
317 pass
318
318
319
319
320 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
320 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
321 import redis_lock
321 import redis_lock
322
322
323 class _RedisLockWrapper(object):
323 class _RedisLockWrapper(object):
324 """LockWrapper for redis_lock"""
324 """LockWrapper for redis_lock"""
325
325
326 @classmethod
326 @classmethod
327 def get_lock(cls):
327 def get_lock(cls):
328 return redis_lock.Lock(
328 return redis_lock.Lock(
329 redis_client=client,
329 redis_client=client,
330 name=lock_key,
330 name=lock_key,
331 expire=lock_timeout,
331 expire=lock_timeout,
332 auto_renewal=auto_renewal,
332 auto_renewal=auto_renewal,
333 strict=True,
333 strict=True,
334 )
334 )
335
335
336 def __repr__(self):
336 def __repr__(self):
337 return "{}:{}".format(self.__class__.__name__, lock_key)
337 return "{}:{}".format(self.__class__.__name__, lock_key)
338
338
339 def __str__(self):
339 def __str__(self):
340 return "{}:{}".format(self.__class__.__name__, lock_key)
340 return "{}:{}".format(self.__class__.__name__, lock_key)
341
341
342 def __init__(self):
342 def __init__(self):
343 self.lock = self.get_lock()
343 self.lock = self.get_lock()
344 self.lock_key = lock_key
344 self.lock_key = lock_key
345
345
346 def acquire(self, wait=True):
346 def acquire(self, wait=True):
347 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
347 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
348 try:
348 try:
349 acquired = self.lock.acquire(wait)
349 acquired = self.lock.acquire(wait)
350 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
350 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
351 return acquired
351 return acquired
352 except redis_lock.AlreadyAcquired:
352 except redis_lock.AlreadyAcquired:
353 return False
353 return False
354 except redis_lock.AlreadyStarted:
354 except redis_lock.AlreadyStarted:
355 # refresh thread exists, but it also means we acquired the lock
355 # refresh thread exists, but it also means we acquired the lock
356 return True
356 return True
357
357
358 def release(self):
358 def release(self):
359 try:
359 try:
360 self.lock.release()
360 self.lock.release()
361 except redis_lock.NotAcquired:
361 except redis_lock.NotAcquired:
362 pass
362 pass
363
363
364 return _RedisLockWrapper()
364 return _RedisLockWrapper()
@@ -1,378 +1,381 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Client for the VCSServer implemented based on HTTP.
22 Client for the VCSServer implemented based on HTTP.
23 """
23 """
24
24
25 import copy
25 import copy
26 import logging
26 import logging
27 import threading
27 import threading
28 import time
28 import time
29 import urllib2
29 import urllib2
30 import urlparse
30 import urlparse
31 import uuid
31 import uuid
32 import traceback
32 import traceback
33
33
34 import pycurl
34 import pycurl
35 import msgpack
35 import msgpack
36 import requests
36 import requests
37 from requests.packages.urllib3.util.retry import Retry
37 from requests.packages.urllib3.util.retry import Retry
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.lib import rc_cache
40 from rhodecode.lib import rc_cache
41 from rhodecode.lib.rc_cache.utils import compute_key_from_params
41 from rhodecode.lib.rc_cache.utils import compute_key_from_params
42 from rhodecode.lib.system_info import get_cert_path
42 from rhodecode.lib.system_info import get_cert_path
43 from rhodecode.lib.vcs import exceptions, CurlSession
43 from rhodecode.lib.vcs import exceptions, CurlSession
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 # TODO: mikhail: Keep it in sync with vcsserver's
48 # TODO: mikhail: Keep it in sync with vcsserver's
49 # HTTPApplication.ALLOWED_EXCEPTIONS
49 # HTTPApplication.ALLOWED_EXCEPTIONS
50 EXCEPTIONS_MAP = {
50 EXCEPTIONS_MAP = {
51 'KeyError': KeyError,
51 'KeyError': KeyError,
52 'URLError': urllib2.URLError,
52 'URLError': urllib2.URLError,
53 }
53 }
54
54
55
55
56 def _remote_call(url, payload, exceptions_map, session):
56 def _remote_call(url, payload, exceptions_map, session):
57 try:
57 try:
58 response = session.post(url, data=msgpack.packb(payload))
58 response = session.post(url, data=msgpack.packb(payload))
59 except pycurl.error as e:
59 except pycurl.error as e:
60 msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc())
60 msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc())
61 raise exceptions.HttpVCSCommunicationError(msg)
61 raise exceptions.HttpVCSCommunicationError(msg)
62 except Exception as e:
62 except Exception as e:
63 message = getattr(e, 'message', '')
63 message = getattr(e, 'message', '')
64 if 'Failed to connect' in message:
64 if 'Failed to connect' in message:
65 # gevent doesn't return proper pycurl errors
65 # gevent doesn't return proper pycurl errors
66 raise exceptions.HttpVCSCommunicationError(e)
66 raise exceptions.HttpVCSCommunicationError(e)
67 else:
67 else:
68 raise
68 raise
69
69
70 if response.status_code >= 400:
70 if response.status_code >= 400:
71 log.error('Call to %s returned non 200 HTTP code: %s',
71 log.error('Call to %s returned non 200 HTTP code: %s',
72 url, response.status_code)
72 url, response.status_code)
73 raise exceptions.HttpVCSCommunicationError(repr(response.content))
73 raise exceptions.HttpVCSCommunicationError(repr(response.content))
74
74
75 try:
75 try:
76 response = msgpack.unpackb(response.content)
76 response = msgpack.unpackb(response.content)
77 except Exception:
77 except Exception:
78 log.exception('Failed to decode response %r', response.content)
78 log.exception('Failed to decode response %r', response.content)
79 raise
79 raise
80
80
81 error = response.get('error')
81 error = response.get('error')
82 if error:
82 if error:
83 type_ = error.get('type', 'Exception')
83 type_ = error.get('type', 'Exception')
84 exc = exceptions_map.get(type_, Exception)
84 exc = exceptions_map.get(type_, Exception)
85 exc = exc(error.get('message'))
85 exc = exc(error.get('message'))
86 try:
86 try:
87 exc._vcs_kind = error['_vcs_kind']
87 exc._vcs_kind = error['_vcs_kind']
88 except KeyError:
88 except KeyError:
89 pass
89 pass
90
90
91 try:
91 try:
92 exc._vcs_server_traceback = error['traceback']
92 exc._vcs_server_traceback = error['traceback']
93 exc._vcs_server_org_exc_name = error['org_exc']
93 exc._vcs_server_org_exc_name = error['org_exc']
94 exc._vcs_server_org_exc_tb = error['org_exc_tb']
94 exc._vcs_server_org_exc_tb = error['org_exc_tb']
95 except KeyError:
95 except KeyError:
96 pass
96 pass
97
97
98 raise exc
98 raise exc
99 return response.get('result')
99 return response.get('result')
100
100
101
101
102 def _streaming_remote_call(url, payload, exceptions_map, session, chunk_size):
102 def _streaming_remote_call(url, payload, exceptions_map, session, chunk_size):
103 try:
103 try:
104 response = session.post(url, data=msgpack.packb(payload))
104 response = session.post(url, data=msgpack.packb(payload))
105 except pycurl.error as e:
105 except pycurl.error as e:
106 msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc())
106 msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc())
107 raise exceptions.HttpVCSCommunicationError(msg)
107 raise exceptions.HttpVCSCommunicationError(msg)
108 except Exception as e:
108 except Exception as e:
109 message = getattr(e, 'message', '')
109 message = getattr(e, 'message', '')
110 if 'Failed to connect' in message:
110 if 'Failed to connect' in message:
111 # gevent doesn't return proper pycurl errors
111 # gevent doesn't return proper pycurl errors
112 raise exceptions.HttpVCSCommunicationError(e)
112 raise exceptions.HttpVCSCommunicationError(e)
113 else:
113 else:
114 raise
114 raise
115
115
116 if response.status_code >= 400:
116 if response.status_code >= 400:
117 log.error('Call to %s returned non 200 HTTP code: %s',
117 log.error('Call to %s returned non 200 HTTP code: %s',
118 url, response.status_code)
118 url, response.status_code)
119 raise exceptions.HttpVCSCommunicationError(repr(response.content))
119 raise exceptions.HttpVCSCommunicationError(repr(response.content))
120
120
121 return response.iter_content(chunk_size=chunk_size)
121 return response.iter_content(chunk_size=chunk_size)
122
122
123
123
124 class ServiceConnection(object):
124 class ServiceConnection(object):
125 def __init__(self, server_and_port, backend_endpoint, session_factory):
125 def __init__(self, server_and_port, backend_endpoint, session_factory):
126 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
126 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
127 self._session_factory = session_factory
127 self._session_factory = session_factory
128
128
129 def __getattr__(self, name):
129 def __getattr__(self, name):
130 def f(*args, **kwargs):
130 def f(*args, **kwargs):
131 return self._call(name, *args, **kwargs)
131 return self._call(name, *args, **kwargs)
132 return f
132 return f
133
133
134 @exceptions.map_vcs_exceptions
134 @exceptions.map_vcs_exceptions
135 def _call(self, name, *args, **kwargs):
135 def _call(self, name, *args, **kwargs):
136 payload = {
136 payload = {
137 'id': str(uuid.uuid4()),
137 'id': str(uuid.uuid4()),
138 'method': name,
138 'method': name,
139 'params': {'args': args, 'kwargs': kwargs}
139 'params': {'args': args, 'kwargs': kwargs}
140 }
140 }
141 return _remote_call(
141 return _remote_call(
142 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
142 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
143
143
144
144
145 class RemoteVCSMaker(object):
145 class RemoteVCSMaker(object):
146
146
147 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
147 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
148 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
148 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
149 self.stream_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint+'/stream')
149 self.stream_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint+'/stream')
150
150
151 self._session_factory = session_factory
151 self._session_factory = session_factory
152 self.backend_type = backend_type
152 self.backend_type = backend_type
153
153
154 @classmethod
154 @classmethod
155 def init_cache_region(cls, repo_id):
155 def init_cache_region(cls, repo_id):
156 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
156 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
157 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
157 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
158 return region, cache_namespace_uid
158 return region, cache_namespace_uid
159
159
160 def __call__(self, path, repo_id, config, with_wire=None):
160 def __call__(self, path, repo_id, config, with_wire=None):
161 log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path)
161 log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path)
162 return RemoteRepo(path, repo_id, config, self, with_wire=with_wire)
162 return RemoteRepo(path, repo_id, config, self, with_wire=with_wire)
163
163
164 def __getattr__(self, name):
164 def __getattr__(self, name):
165 def remote_attr(*args, **kwargs):
165 def remote_attr(*args, **kwargs):
166 return self._call(name, *args, **kwargs)
166 return self._call(name, *args, **kwargs)
167 return remote_attr
167 return remote_attr
168
168
169 @exceptions.map_vcs_exceptions
169 @exceptions.map_vcs_exceptions
170 def _call(self, func_name, *args, **kwargs):
170 def _call(self, func_name, *args, **kwargs):
171 payload = {
171 payload = {
172 'id': str(uuid.uuid4()),
172 'id': str(uuid.uuid4()),
173 'method': func_name,
173 'method': func_name,
174 'backend': self.backend_type,
174 'backend': self.backend_type,
175 'params': {'args': args, 'kwargs': kwargs}
175 'params': {'args': args, 'kwargs': kwargs}
176 }
176 }
177 url = self.url
177 url = self.url
178 return _remote_call(url, payload, EXCEPTIONS_MAP, self._session_factory())
178 return _remote_call(url, payload, EXCEPTIONS_MAP, self._session_factory())
179
179
180
180
181 class RemoteRepo(object):
181 class RemoteRepo(object):
182 CHUNK_SIZE = 16384
182 CHUNK_SIZE = 16384
183
183
184 def __init__(self, path, repo_id, config, remote_maker, with_wire=None):
184 def __init__(self, path, repo_id, config, remote_maker, with_wire=None):
185 self.url = remote_maker.url
185 self.url = remote_maker.url
186 self.stream_url = remote_maker.stream_url
186 self.stream_url = remote_maker.stream_url
187 self._session = remote_maker._session_factory()
187 self._session = remote_maker._session_factory()
188 self._cache_region, self._cache_namespace = \
188 self._cache_region, self._cache_namespace = \
189 remote_maker.init_cache_region(repo_id)
189 remote_maker.init_cache_region(self._repo_id_sanitizer(repo_id))
190
190
191 with_wire = with_wire or {}
191 with_wire = with_wire or {}
192
192
193 repo_state_uid = with_wire.get('repo_state_uid') or 'state'
193 repo_state_uid = with_wire.get('repo_state_uid') or 'state'
194 self._wire = {
194 self._wire = {
195 "path": path, # repo path
195 "path": path, # repo path
196 "repo_id": repo_id,
196 "repo_id": repo_id,
197 "config": config,
197 "config": config,
198 "repo_state_uid": repo_state_uid,
198 "repo_state_uid": repo_state_uid,
199 "context": self._create_vcs_cache_context(path, repo_state_uid)
199 "context": self._create_vcs_cache_context(path, repo_state_uid)
200 }
200 }
201
201
202 if with_wire:
202 if with_wire:
203 self._wire.update(with_wire)
203 self._wire.update(with_wire)
204
204
205 # NOTE(johbo): Trading complexity for performance. Avoiding the call to
205 # NOTE(johbo): Trading complexity for performance. Avoiding the call to
206 # log.debug brings a few percent gain even if is is not active.
206 # log.debug brings a few percent gain even if is is not active.
207 if log.isEnabledFor(logging.DEBUG):
207 if log.isEnabledFor(logging.DEBUG):
208 self._call_with_logging = True
208 self._call_with_logging = True
209
209
210 self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__'))
210 self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__'))
211
211
212 def _repo_id_sanitizer(self, repo_id):
213 return repo_id.replace('/', '__')
214
212 def __getattr__(self, name):
215 def __getattr__(self, name):
213
216
214 if name.startswith('stream:'):
217 if name.startswith('stream:'):
215 def repo_remote_attr(*args, **kwargs):
218 def repo_remote_attr(*args, **kwargs):
216 return self._call_stream(name, *args, **kwargs)
219 return self._call_stream(name, *args, **kwargs)
217 else:
220 else:
218 def repo_remote_attr(*args, **kwargs):
221 def repo_remote_attr(*args, **kwargs):
219 return self._call(name, *args, **kwargs)
222 return self._call(name, *args, **kwargs)
220
223
221 return repo_remote_attr
224 return repo_remote_attr
222
225
223 def _base_call(self, name, *args, **kwargs):
226 def _base_call(self, name, *args, **kwargs):
224 # TODO: oliver: This is currently necessary pre-call since the
227 # TODO: oliver: This is currently necessary pre-call since the
225 # config object is being changed for hooking scenarios
228 # config object is being changed for hooking scenarios
226 wire = copy.deepcopy(self._wire)
229 wire = copy.deepcopy(self._wire)
227 wire["config"] = wire["config"].serialize()
230 wire["config"] = wire["config"].serialize()
228 wire["config"].append(('vcs', 'ssl_dir', self.cert_dir))
231 wire["config"].append(('vcs', 'ssl_dir', self.cert_dir))
229
232
230 payload = {
233 payload = {
231 'id': str(uuid.uuid4()),
234 'id': str(uuid.uuid4()),
232 'method': name,
235 'method': name,
233 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
236 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
234 }
237 }
235
238
236 context_uid = wire.get('context')
239 context_uid = wire.get('context')
237 return context_uid, payload
240 return context_uid, payload
238
241
239 @exceptions.map_vcs_exceptions
242 @exceptions.map_vcs_exceptions
240 def _call(self, name, *args, **kwargs):
243 def _call(self, name, *args, **kwargs):
241 context_uid, payload = self._base_call(name, *args, **kwargs)
244 context_uid, payload = self._base_call(name, *args, **kwargs)
242 url = self.url
245 url = self.url
243
246
244 start = time.time()
247 start = time.time()
245
248
246 cache_on = False
249 cache_on = False
247 cache_key = ''
250 cache_key = ''
248 local_cache_on = rhodecode.CONFIG.get('vcs.methods.cache')
251 local_cache_on = rhodecode.CONFIG.get('vcs.methods.cache')
249
252
250 cache_methods = [
253 cache_methods = [
251 'branches', 'tags', 'bookmarks',
254 'branches', 'tags', 'bookmarks',
252 'is_large_file', 'is_binary', 'fctx_size', 'node_history', 'blob_raw_length',
255 'is_large_file', 'is_binary', 'fctx_size', 'node_history', 'blob_raw_length',
253 'revision', 'tree_items',
256 'revision', 'tree_items',
254 'ctx_list',
257 'ctx_list',
255 'bulk_request',
258 'bulk_request',
256 ]
259 ]
257
260
258 if local_cache_on and name in cache_methods:
261 if local_cache_on and name in cache_methods:
259 cache_on = True
262 cache_on = True
260 repo_state_uid = self._wire['repo_state_uid']
263 repo_state_uid = self._wire['repo_state_uid']
261 call_args = [a for a in args]
264 call_args = [a for a in args]
262 cache_key = compute_key_from_params(repo_state_uid, name, *call_args)
265 cache_key = compute_key_from_params(repo_state_uid, name, *call_args)
263
266
264 @self._cache_region.conditional_cache_on_arguments(
267 @self._cache_region.conditional_cache_on_arguments(
265 namespace=self._cache_namespace, condition=cache_on and cache_key)
268 namespace=self._cache_namespace, condition=cache_on and cache_key)
266 def remote_call(_cache_key):
269 def remote_call(_cache_key):
267 if self._call_with_logging:
270 if self._call_with_logging:
268 log.debug('Calling %s@%s with args:%.10240r. wire_context: %s cache_on: %s',
271 log.debug('Calling %s@%s with args:%.10240r. wire_context: %s cache_on: %s',
269 url, name, args, context_uid, cache_on)
272 url, name, args, context_uid, cache_on)
270 return _remote_call(url, payload, EXCEPTIONS_MAP, self._session)
273 return _remote_call(url, payload, EXCEPTIONS_MAP, self._session)
271
274
272 result = remote_call(cache_key)
275 result = remote_call(cache_key)
273 if self._call_with_logging:
276 if self._call_with_logging:
274 log.debug('Call %s@%s took: %.4fs. wire_context: %s',
277 log.debug('Call %s@%s took: %.4fs. wire_context: %s',
275 url, name, time.time()-start, context_uid)
278 url, name, time.time()-start, context_uid)
276 return result
279 return result
277
280
278 @exceptions.map_vcs_exceptions
281 @exceptions.map_vcs_exceptions
279 def _call_stream(self, name, *args, **kwargs):
282 def _call_stream(self, name, *args, **kwargs):
280 context_uid, payload = self._base_call(name, *args, **kwargs)
283 context_uid, payload = self._base_call(name, *args, **kwargs)
281 payload['chunk_size'] = self.CHUNK_SIZE
284 payload['chunk_size'] = self.CHUNK_SIZE
282 url = self.stream_url
285 url = self.stream_url
283
286
284 start = time.time()
287 start = time.time()
285 if self._call_with_logging:
288 if self._call_with_logging:
286 log.debug('Calling %s@%s with args:%.10240r. wire_context: %s',
289 log.debug('Calling %s@%s with args:%.10240r. wire_context: %s',
287 url, name, args, context_uid)
290 url, name, args, context_uid)
288
291
289 result = _streaming_remote_call(url, payload, EXCEPTIONS_MAP, self._session,
292 result = _streaming_remote_call(url, payload, EXCEPTIONS_MAP, self._session,
290 self.CHUNK_SIZE)
293 self.CHUNK_SIZE)
291
294
292 if self._call_with_logging:
295 if self._call_with_logging:
293 log.debug('Call %s@%s took: %.4fs. wire_context: %s',
296 log.debug('Call %s@%s took: %.4fs. wire_context: %s',
294 url, name, time.time()-start, context_uid)
297 url, name, time.time()-start, context_uid)
295 return result
298 return result
296
299
297 def __getitem__(self, key):
300 def __getitem__(self, key):
298 return self.revision(key)
301 return self.revision(key)
299
302
300 def _create_vcs_cache_context(self, *args):
303 def _create_vcs_cache_context(self, *args):
301 """
304 """
302 Creates a unique string which is passed to the VCSServer on every
305 Creates a unique string which is passed to the VCSServer on every
303 remote call. It is used as cache key in the VCSServer.
306 remote call. It is used as cache key in the VCSServer.
304 """
307 """
305 hash_key = '-'.join(map(str, args))
308 hash_key = '-'.join(map(str, args))
306 return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key))
309 return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key))
307
310
308 def invalidate_vcs_cache(self):
311 def invalidate_vcs_cache(self):
309 """
312 """
310 This invalidates the context which is sent to the VCSServer on every
313 This invalidates the context which is sent to the VCSServer on every
311 call to a remote method. It forces the VCSServer to create a fresh
314 call to a remote method. It forces the VCSServer to create a fresh
312 repository instance on the next call to a remote method.
315 repository instance on the next call to a remote method.
313 """
316 """
314 self._wire['context'] = str(uuid.uuid4())
317 self._wire['context'] = str(uuid.uuid4())
315
318
316
319
317 class VcsHttpProxy(object):
320 class VcsHttpProxy(object):
318
321
319 CHUNK_SIZE = 16384
322 CHUNK_SIZE = 16384
320
323
321 def __init__(self, server_and_port, backend_endpoint):
324 def __init__(self, server_and_port, backend_endpoint):
322 retries = Retry(total=5, connect=None, read=None, redirect=None)
325 retries = Retry(total=5, connect=None, read=None, redirect=None)
323
326
324 adapter = requests.adapters.HTTPAdapter(max_retries=retries)
327 adapter = requests.adapters.HTTPAdapter(max_retries=retries)
325 self.base_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
328 self.base_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
326 self.session = requests.Session()
329 self.session = requests.Session()
327 self.session.mount('http://', adapter)
330 self.session.mount('http://', adapter)
328
331
329 def handle(self, environment, input_data, *args, **kwargs):
332 def handle(self, environment, input_data, *args, **kwargs):
330 data = {
333 data = {
331 'environment': environment,
334 'environment': environment,
332 'input_data': input_data,
335 'input_data': input_data,
333 'args': args,
336 'args': args,
334 'kwargs': kwargs
337 'kwargs': kwargs
335 }
338 }
336 result = self.session.post(
339 result = self.session.post(
337 self.base_url, msgpack.packb(data), stream=True)
340 self.base_url, msgpack.packb(data), stream=True)
338 return self._get_result(result)
341 return self._get_result(result)
339
342
340 def _deserialize_and_raise(self, error):
343 def _deserialize_and_raise(self, error):
341 exception = Exception(error['message'])
344 exception = Exception(error['message'])
342 try:
345 try:
343 exception._vcs_kind = error['_vcs_kind']
346 exception._vcs_kind = error['_vcs_kind']
344 except KeyError:
347 except KeyError:
345 pass
348 pass
346 raise exception
349 raise exception
347
350
348 def _iterate(self, result):
351 def _iterate(self, result):
349 unpacker = msgpack.Unpacker()
352 unpacker = msgpack.Unpacker()
350 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
353 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
351 unpacker.feed(line)
354 unpacker.feed(line)
352 for chunk in unpacker:
355 for chunk in unpacker:
353 yield chunk
356 yield chunk
354
357
355 def _get_result(self, result):
358 def _get_result(self, result):
356 iterator = self._iterate(result)
359 iterator = self._iterate(result)
357 error = iterator.next()
360 error = iterator.next()
358 if error:
361 if error:
359 self._deserialize_and_raise(error)
362 self._deserialize_and_raise(error)
360
363
361 status = iterator.next()
364 status = iterator.next()
362 headers = iterator.next()
365 headers = iterator.next()
363
366
364 return iterator, status, headers
367 return iterator, status, headers
365
368
366
369
367 class ThreadlocalSessionFactory(object):
370 class ThreadlocalSessionFactory(object):
368 """
371 """
369 Creates one CurlSession per thread on demand.
372 Creates one CurlSession per thread on demand.
370 """
373 """
371
374
372 def __init__(self):
375 def __init__(self):
373 self._thread_local = threading.local()
376 self._thread_local = threading.local()
374
377
375 def __call__(self):
378 def __call__(self):
376 if not hasattr(self._thread_local, 'curl_session'):
379 if not hasattr(self._thread_local, 'curl_session'):
377 self._thread_local.curl_session = CurlSession()
380 self._thread_local.curl_session = CurlSession()
378 return self._thread_local.curl_session
381 return self._thread_local.curl_session
@@ -1,1194 +1,1195 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import re
22 import re
23 import shutil
23 import shutil
24 import time
24 import time
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import datetime
27 import datetime
28
28
29 from pyramid.threadlocal import get_current_request
29 from pyramid.threadlocal import get_current_request
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.caching_query import FromCache
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 from rhodecode.lib import hooks_base
36 from rhodecode.lib import hooks_base
37 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.user_log_filter import user_log_filter
38 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils import make_db_config
39 from rhodecode.lib.utils2 import (
39 from rhodecode.lib.utils2 import (
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 get_current_rhodecode_user, safe_int, action_logger_generic)
41 get_current_rhodecode_user, safe_int, action_logger_generic)
42 from rhodecode.lib.vcs.backends import get_backend
42 from rhodecode.lib.vcs.backends import get_backend
43 from rhodecode.model import BaseModel
43 from rhodecode.model import BaseModel
44 from rhodecode.model.db import (
44 from rhodecode.model.db import (
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class RepoModel(BaseModel):
55 class RepoModel(BaseModel):
56
56
57 cls = Repository
57 cls = Repository
58
58
59 def _get_user_group(self, users_group):
59 def _get_user_group(self, users_group):
60 return self._get_instance(UserGroup, users_group,
60 return self._get_instance(UserGroup, users_group,
61 callback=UserGroup.get_by_group_name)
61 callback=UserGroup.get_by_group_name)
62
62
63 def _get_repo_group(self, repo_group):
63 def _get_repo_group(self, repo_group):
64 return self._get_instance(RepoGroup, repo_group,
64 return self._get_instance(RepoGroup, repo_group,
65 callback=RepoGroup.get_by_group_name)
65 callback=RepoGroup.get_by_group_name)
66
66
67 def _create_default_perms(self, repository, private):
67 def _create_default_perms(self, repository, private):
68 # create default permission
68 # create default permission
69 default = 'repository.read'
69 default = 'repository.read'
70 def_user = User.get_default_user()
70 def_user = User.get_default_user()
71 for p in def_user.user_perms:
71 for p in def_user.user_perms:
72 if p.permission.permission_name.startswith('repository.'):
72 if p.permission.permission_name.startswith('repository.'):
73 default = p.permission.permission_name
73 default = p.permission.permission_name
74 break
74 break
75
75
76 default_perm = 'repository.none' if private else default
76 default_perm = 'repository.none' if private else default
77
77
78 repo_to_perm = UserRepoToPerm()
78 repo_to_perm = UserRepoToPerm()
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
80
80
81 repo_to_perm.repository = repository
81 repo_to_perm.repository = repository
82 repo_to_perm.user_id = def_user.user_id
82 repo_to_perm.user_id = def_user.user_id
83
83
84 return repo_to_perm
84 return repo_to_perm
85
85
86 @LazyProperty
86 @LazyProperty
87 def repos_path(self):
87 def repos_path(self):
88 """
88 """
89 Gets the repositories root path from database
89 Gets the repositories root path from database
90 """
90 """
91 settings_model = VcsSettingsModel(sa=self.sa)
91 settings_model = VcsSettingsModel(sa=self.sa)
92 return settings_model.get_repos_location()
92 return settings_model.get_repos_location()
93
93
94 def get(self, repo_id):
94 def get(self, repo_id):
95 repo = self.sa.query(Repository) \
95 repo = self.sa.query(Repository) \
96 .filter(Repository.repo_id == repo_id)
96 .filter(Repository.repo_id == repo_id)
97
97
98 return repo.scalar()
98 return repo.scalar()
99
99
100 def get_repo(self, repository):
100 def get_repo(self, repository):
101 return self._get_repo(repository)
101 return self._get_repo(repository)
102
102
103 def get_by_repo_name(self, repo_name, cache=False):
103 def get_by_repo_name(self, repo_name, cache=False):
104 repo = self.sa.query(Repository) \
104 repo = self.sa.query(Repository) \
105 .filter(Repository.repo_name == repo_name)
105 .filter(Repository.repo_name == repo_name)
106
106
107 if cache:
107 if cache:
108 name_key = _hash_key(repo_name)
108 name_key = _hash_key(repo_name)
109 repo = repo.options(
109 repo = repo.options(
110 FromCache("sql_cache_short", "get_repo_%s" % name_key))
110 FromCache("sql_cache_short", "get_repo_%s" % name_key))
111 return repo.scalar()
111 return repo.scalar()
112
112
113 def _extract_id_from_repo_name(self, repo_name):
113 def _extract_id_from_repo_name(self, repo_name):
114 if repo_name.startswith('/'):
114 if repo_name.startswith('/'):
115 repo_name = repo_name.lstrip('/')
115 repo_name = repo_name.lstrip('/')
116 by_id_match = re.match(r'^_(\d{1,})', repo_name)
116 by_id_match = re.match(r'^_(\d{1,})', repo_name)
117 if by_id_match:
117 if by_id_match:
118 return by_id_match.groups()[0]
118 return by_id_match.groups()[0]
119
119
120 def get_repo_by_id(self, repo_name):
120 def get_repo_by_id(self, repo_name):
121 """
121 """
122 Extracts repo_name by id from special urls.
122 Extracts repo_name by id from special urls.
123 Example url is _11/repo_name
123 Example url is _11/repo_name
124
124
125 :param repo_name:
125 :param repo_name:
126 :return: repo object if matched else None
126 :return: repo object if matched else None
127 """
127 """
128 _repo_id = None
128 _repo_id = None
129 try:
129 try:
130 _repo_id = self._extract_id_from_repo_name(repo_name)
130 _repo_id = self._extract_id_from_repo_name(repo_name)
131 if _repo_id:
131 if _repo_id:
132 return self.get(_repo_id)
132 return self.get(_repo_id)
133 except Exception:
133 except Exception:
134 log.exception('Failed to extract repo_name from URL')
134 log.exception('Failed to extract repo_name from URL')
135 if _repo_id:
135 if _repo_id:
136 Session().rollback()
136 Session().rollback()
137
137
138 return None
138 return None
139
139
140 def get_repos_for_root(self, root, traverse=False):
140 def get_repos_for_root(self, root, traverse=False):
141 if traverse:
141 if traverse:
142 like_expression = u'{}%'.format(safe_unicode(root))
142 like_expression = u'{}%'.format(safe_unicode(root))
143 repos = Repository.query().filter(
143 repos = Repository.query().filter(
144 Repository.repo_name.like(like_expression)).all()
144 Repository.repo_name.like(like_expression)).all()
145 else:
145 else:
146 if root and not isinstance(root, RepoGroup):
146 if root and not isinstance(root, RepoGroup):
147 raise ValueError(
147 raise ValueError(
148 'Root must be an instance '
148 'Root must be an instance '
149 'of RepoGroup, got:{} instead'.format(type(root)))
149 'of RepoGroup, got:{} instead'.format(type(root)))
150 repos = Repository.query().filter(Repository.group == root).all()
150 repos = Repository.query().filter(Repository.group == root).all()
151 return repos
151 return repos
152
152
153 def get_url(self, repo, request=None, permalink=False):
153 def get_url(self, repo, request=None, permalink=False):
154 if not request:
154 if not request:
155 request = get_current_request()
155 request = get_current_request()
156
156
157 if not request:
157 if not request:
158 return
158 return
159
159
160 if permalink:
160 if permalink:
161 return request.route_url(
161 return request.route_url(
162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
163 else:
163 else:
164 return request.route_url(
164 return request.route_url(
165 'repo_summary', repo_name=safe_str(repo.repo_name))
165 'repo_summary', repo_name=safe_str(repo.repo_name))
166
166
167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
168 if not request:
168 if not request:
169 request = get_current_request()
169 request = get_current_request()
170
170
171 if not request:
171 if not request:
172 return
172 return
173
173
174 if permalink:
174 if permalink:
175 return request.route_url(
175 return request.route_url(
176 'repo_commit', repo_name=safe_str(repo.repo_id),
176 'repo_commit', repo_name=safe_str(repo.repo_id),
177 commit_id=commit_id)
177 commit_id=commit_id)
178
178
179 else:
179 else:
180 return request.route_url(
180 return request.route_url(
181 'repo_commit', repo_name=safe_str(repo.repo_name),
181 'repo_commit', repo_name=safe_str(repo.repo_name),
182 commit_id=commit_id)
182 commit_id=commit_id)
183
183
184 def get_repo_log(self, repo, filter_term):
184 def get_repo_log(self, repo, filter_term):
185 repo_log = UserLog.query()\
185 repo_log = UserLog.query()\
186 .filter(or_(UserLog.repository_id == repo.repo_id,
186 .filter(or_(UserLog.repository_id == repo.repo_id,
187 UserLog.repository_name == repo.repo_name))\
187 UserLog.repository_name == repo.repo_name))\
188 .options(joinedload(UserLog.user))\
188 .options(joinedload(UserLog.user))\
189 .options(joinedload(UserLog.repository))\
189 .options(joinedload(UserLog.repository))\
190 .order_by(UserLog.action_date.desc())
190 .order_by(UserLog.action_date.desc())
191
191
192 repo_log = user_log_filter(repo_log, filter_term)
192 repo_log = user_log_filter(repo_log, filter_term)
193 return repo_log
193 return repo_log
194
194
195 @classmethod
195 @classmethod
196 def update_commit_cache(cls, repositories=None):
196 def update_commit_cache(cls, repositories=None):
197 if not repositories:
197 if not repositories:
198 repositories = Repository.getAll()
198 repositories = Repository.getAll()
199 for repo in repositories:
199 for repo in repositories:
200 repo.update_commit_cache()
200 repo.update_commit_cache()
201
201
202 def get_repos_as_dict(self, repo_list=None, admin=False,
202 def get_repos_as_dict(self, repo_list=None, admin=False,
203 super_user_actions=False, short_name=None):
203 super_user_actions=False, short_name=None):
204
204
205 _render = get_current_request().get_partial_renderer(
205 _render = get_current_request().get_partial_renderer(
206 'rhodecode:templates/data_table/_dt_elements.mako')
206 'rhodecode:templates/data_table/_dt_elements.mako')
207 c = _render.get_call_context()
207 c = _render.get_call_context()
208 h = _render.get_helpers()
208 h = _render.get_helpers()
209
209
210 def quick_menu(repo_name):
210 def quick_menu(repo_name):
211 return _render('quick_menu', repo_name)
211 return _render('quick_menu', repo_name)
212
212
213 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
213 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
214 if short_name is not None:
214 if short_name is not None:
215 short_name_var = short_name
215 short_name_var = short_name
216 else:
216 else:
217 short_name_var = not admin
217 short_name_var = not admin
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
219 short_name=short_name_var, admin=False)
219 short_name=short_name_var, admin=False)
220
220
221 def last_change(last_change):
221 def last_change(last_change):
222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
223 ts = time.time()
223 ts = time.time()
224 utc_offset = (datetime.datetime.fromtimestamp(ts)
224 utc_offset = (datetime.datetime.fromtimestamp(ts)
225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
227
227
228 return _render("last_change", last_change)
228 return _render("last_change", last_change)
229
229
230 def rss_lnk(repo_name):
230 def rss_lnk(repo_name):
231 return _render("rss", repo_name)
231 return _render("rss", repo_name)
232
232
233 def atom_lnk(repo_name):
233 def atom_lnk(repo_name):
234 return _render("atom", repo_name)
234 return _render("atom", repo_name)
235
235
236 def last_rev(repo_name, cs_cache):
236 def last_rev(repo_name, cs_cache):
237 return _render('revision', repo_name, cs_cache.get('revision'),
237 return _render('revision', repo_name, cs_cache.get('revision'),
238 cs_cache.get('raw_id'), cs_cache.get('author'),
238 cs_cache.get('raw_id'), cs_cache.get('author'),
239 cs_cache.get('message'), cs_cache.get('date'))
239 cs_cache.get('message'), cs_cache.get('date'))
240
240
241 def desc(desc):
241 def desc(desc):
242 return _render('repo_desc', desc, c.visual.stylify_metatags)
242 return _render('repo_desc', desc, c.visual.stylify_metatags)
243
243
244 def state(repo_state):
244 def state(repo_state):
245 return _render("repo_state", repo_state)
245 return _render("repo_state", repo_state)
246
246
247 def repo_actions(repo_name):
247 def repo_actions(repo_name):
248 return _render('repo_actions', repo_name, super_user_actions)
248 return _render('repo_actions', repo_name, super_user_actions)
249
249
250 def user_profile(username):
250 def user_profile(username):
251 return _render('user_profile', username)
251 return _render('user_profile', username)
252
252
253 repos_data = []
253 repos_data = []
254 for repo in repo_list:
254 for repo in repo_list:
255 # NOTE(marcink): because we use only raw column we need to load it like that
255 # NOTE(marcink): because we use only raw column we need to load it like that
256 changeset_cache = Repository._load_changeset_cache(
256 changeset_cache = Repository._load_changeset_cache(
257 repo.repo_id, repo._changeset_cache)
257 repo.repo_id, repo._changeset_cache)
258
258
259 row = {
259 row = {
260 "menu": quick_menu(repo.repo_name),
260 "menu": quick_menu(repo.repo_name),
261
261
262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
263 repo.private, repo.archived, repo.fork),
263 repo.private, repo.archived, repo.fork),
264
264
265 "desc": desc(h.escape(repo.description)),
265 "desc": desc(h.escape(repo.description)),
266
266
267 "last_change": last_change(repo.updated_on),
267 "last_change": last_change(repo.updated_on),
268
268
269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
270 "last_changeset_raw": changeset_cache.get('revision'),
270 "last_changeset_raw": changeset_cache.get('revision'),
271
271
272 "owner": user_profile(repo.User.username),
272 "owner": user_profile(repo.User.username),
273
273
274 "state": state(repo.repo_state),
274 "state": state(repo.repo_state),
275 "rss": rss_lnk(repo.repo_name),
275 "rss": rss_lnk(repo.repo_name),
276 "atom": atom_lnk(repo.repo_name),
276 "atom": atom_lnk(repo.repo_name),
277 }
277 }
278 if admin:
278 if admin:
279 row.update({
279 row.update({
280 "action": repo_actions(repo.repo_name),
280 "action": repo_actions(repo.repo_name),
281 })
281 })
282 repos_data.append(row)
282 repos_data.append(row)
283
283
284 return repos_data
284 return repos_data
285
285
286 def get_repos_data_table(
286 def get_repos_data_table(
287 self, draw, start, limit,
287 self, draw, start, limit,
288 search_q, order_by, order_dir,
288 search_q, order_by, order_dir,
289 auth_user, repo_group_id):
289 auth_user, repo_group_id):
290 from rhodecode.model.scm import RepoList
290 from rhodecode.model.scm import RepoList
291
291
292 _perms = ['repository.read', 'repository.write', 'repository.admin']
292 _perms = ['repository.read', 'repository.write', 'repository.admin']
293
293
294 repos = Repository.query() \
294 repos = Repository.query() \
295 .filter(Repository.group_id == repo_group_id) \
295 .filter(Repository.group_id == repo_group_id) \
296 .all()
296 .all()
297 auth_repo_list = RepoList(
297 auth_repo_list = RepoList(
298 repos, perm_set=_perms,
298 repos, perm_set=_perms,
299 extra_kwargs=dict(user=auth_user))
299 extra_kwargs=dict(user=auth_user))
300
300
301 allowed_ids = [-1]
301 allowed_ids = [-1]
302 for repo in auth_repo_list:
302 for repo in auth_repo_list:
303 allowed_ids.append(repo.repo_id)
303 allowed_ids.append(repo.repo_id)
304
304
305 repos_data_total_count = Repository.query() \
305 repos_data_total_count = Repository.query() \
306 .filter(Repository.group_id == repo_group_id) \
306 .filter(Repository.group_id == repo_group_id) \
307 .filter(or_(
307 .filter(or_(
308 # generate multiple IN to fix limitation problems
308 # generate multiple IN to fix limitation problems
309 *in_filter_generator(Repository.repo_id, allowed_ids))
309 *in_filter_generator(Repository.repo_id, allowed_ids))
310 ) \
310 ) \
311 .count()
311 .count()
312
312
313 base_q = Session.query(
313 base_q = Session.query(
314 Repository.repo_id,
314 Repository.repo_id,
315 Repository.repo_name,
315 Repository.repo_name,
316 Repository.description,
316 Repository.description,
317 Repository.repo_type,
317 Repository.repo_type,
318 Repository.repo_state,
318 Repository.repo_state,
319 Repository.private,
319 Repository.private,
320 Repository.archived,
320 Repository.archived,
321 Repository.fork,
321 Repository.fork,
322 Repository.updated_on,
322 Repository.updated_on,
323 Repository._changeset_cache,
323 Repository._changeset_cache,
324 User,
324 User,
325 ) \
325 ) \
326 .filter(Repository.group_id == repo_group_id) \
326 .filter(Repository.group_id == repo_group_id) \
327 .filter(or_(
327 .filter(or_(
328 # generate multiple IN to fix limitation problems
328 # generate multiple IN to fix limitation problems
329 *in_filter_generator(Repository.repo_id, allowed_ids))
329 *in_filter_generator(Repository.repo_id, allowed_ids))
330 ) \
330 ) \
331 .join(User, User.user_id == Repository.user_id) \
331 .join(User, User.user_id == Repository.user_id) \
332 .group_by(Repository, User)
332 .group_by(Repository, User)
333
333
334 repos_data_total_filtered_count = base_q.count()
334 repos_data_total_filtered_count = base_q.count()
335
335
336 sort_defined = False
336 sort_defined = False
337 if order_by == 'repo_name':
337 if order_by == 'repo_name':
338 sort_col = func.lower(Repository.repo_name)
338 sort_col = func.lower(Repository.repo_name)
339 sort_defined = True
339 sort_defined = True
340 elif order_by == 'user_username':
340 elif order_by == 'user_username':
341 sort_col = User.username
341 sort_col = User.username
342 else:
342 else:
343 sort_col = getattr(Repository, order_by, None)
343 sort_col = getattr(Repository, order_by, None)
344
344
345 if sort_defined or sort_col:
345 if sort_defined or sort_col:
346 if order_dir == 'asc':
346 if order_dir == 'asc':
347 sort_col = sort_col.asc()
347 sort_col = sort_col.asc()
348 else:
348 else:
349 sort_col = sort_col.desc()
349 sort_col = sort_col.desc()
350
350
351 base_q = base_q.order_by(sort_col)
351 base_q = base_q.order_by(sort_col)
352 base_q = base_q.offset(start).limit(limit)
352 base_q = base_q.offset(start).limit(limit)
353
353
354 repos_list = base_q.all()
354 repos_list = base_q.all()
355
355
356 repos_data = RepoModel().get_repos_as_dict(
356 repos_data = RepoModel().get_repos_as_dict(
357 repo_list=repos_list, admin=False)
357 repo_list=repos_list, admin=False)
358
358
359 data = ({
359 data = ({
360 'draw': draw,
360 'draw': draw,
361 'data': repos_data,
361 'data': repos_data,
362 'recordsTotal': repos_data_total_count,
362 'recordsTotal': repos_data_total_count,
363 'recordsFiltered': repos_data_total_filtered_count,
363 'recordsFiltered': repos_data_total_filtered_count,
364 })
364 })
365 return data
365 return data
366
366
367 def _get_defaults(self, repo_name):
367 def _get_defaults(self, repo_name):
368 """
368 """
369 Gets information about repository, and returns a dict for
369 Gets information about repository, and returns a dict for
370 usage in forms
370 usage in forms
371
371
372 :param repo_name:
372 :param repo_name:
373 """
373 """
374
374
375 repo_info = Repository.get_by_repo_name(repo_name)
375 repo_info = Repository.get_by_repo_name(repo_name)
376
376
377 if repo_info is None:
377 if repo_info is None:
378 return None
378 return None
379
379
380 defaults = repo_info.get_dict()
380 defaults = repo_info.get_dict()
381 defaults['repo_name'] = repo_info.just_name
381 defaults['repo_name'] = repo_info.just_name
382
382
383 groups = repo_info.groups_with_parents
383 groups = repo_info.groups_with_parents
384 parent_group = groups[-1] if groups else None
384 parent_group = groups[-1] if groups else None
385
385
386 # we use -1 as this is how in HTML, we mark an empty group
386 # we use -1 as this is how in HTML, we mark an empty group
387 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
387 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
388
388
389 keys_to_process = (
389 keys_to_process = (
390 {'k': 'repo_type', 'strip': False},
390 {'k': 'repo_type', 'strip': False},
391 {'k': 'repo_enable_downloads', 'strip': True},
391 {'k': 'repo_enable_downloads', 'strip': True},
392 {'k': 'repo_description', 'strip': True},
392 {'k': 'repo_description', 'strip': True},
393 {'k': 'repo_enable_locking', 'strip': True},
393 {'k': 'repo_enable_locking', 'strip': True},
394 {'k': 'repo_landing_rev', 'strip': True},
394 {'k': 'repo_landing_rev', 'strip': True},
395 {'k': 'clone_uri', 'strip': False},
395 {'k': 'clone_uri', 'strip': False},
396 {'k': 'push_uri', 'strip': False},
396 {'k': 'push_uri', 'strip': False},
397 {'k': 'repo_private', 'strip': True},
397 {'k': 'repo_private', 'strip': True},
398 {'k': 'repo_enable_statistics', 'strip': True}
398 {'k': 'repo_enable_statistics', 'strip': True}
399 )
399 )
400
400
401 for item in keys_to_process:
401 for item in keys_to_process:
402 attr = item['k']
402 attr = item['k']
403 if item['strip']:
403 if item['strip']:
404 attr = remove_prefix(item['k'], 'repo_')
404 attr = remove_prefix(item['k'], 'repo_')
405
405
406 val = defaults[attr]
406 val = defaults[attr]
407 if item['k'] == 'repo_landing_rev':
407 if item['k'] == 'repo_landing_rev':
408 val = ':'.join(defaults[attr])
408 val = ':'.join(defaults[attr])
409 defaults[item['k']] = val
409 defaults[item['k']] = val
410 if item['k'] == 'clone_uri':
410 if item['k'] == 'clone_uri':
411 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
411 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
412 if item['k'] == 'push_uri':
412 if item['k'] == 'push_uri':
413 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
413 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
414
414
415 # fill owner
415 # fill owner
416 if repo_info.user:
416 if repo_info.user:
417 defaults.update({'user': repo_info.user.username})
417 defaults.update({'user': repo_info.user.username})
418 else:
418 else:
419 replacement_user = User.get_first_super_admin().username
419 replacement_user = User.get_first_super_admin().username
420 defaults.update({'user': replacement_user})
420 defaults.update({'user': replacement_user})
421
421
422 return defaults
422 return defaults
423
423
424 def update(self, repo, **kwargs):
424 def update(self, repo, **kwargs):
425 try:
425 try:
426 cur_repo = self._get_repo(repo)
426 cur_repo = self._get_repo(repo)
427 source_repo_name = cur_repo.repo_name
427 source_repo_name = cur_repo.repo_name
428
428
429 affected_user_ids = []
429 affected_user_ids = []
430 if 'user' in kwargs:
430 if 'user' in kwargs:
431 old_owner_id = cur_repo.user.user_id
431 old_owner_id = cur_repo.user.user_id
432 new_owner = User.get_by_username(kwargs['user'])
432 new_owner = User.get_by_username(kwargs['user'])
433 cur_repo.user = new_owner
433 cur_repo.user = new_owner
434
434
435 if old_owner_id != new_owner.user_id:
435 if old_owner_id != new_owner.user_id:
436 affected_user_ids = [new_owner.user_id, old_owner_id]
436 affected_user_ids = [new_owner.user_id, old_owner_id]
437
437
438 if 'repo_group' in kwargs:
438 if 'repo_group' in kwargs:
439 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
439 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
440 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
440 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
441
441
442 update_keys = [
442 update_keys = [
443 (1, 'repo_description'),
443 (1, 'repo_description'),
444 (1, 'repo_landing_rev'),
444 (1, 'repo_landing_rev'),
445 (1, 'repo_private'),
445 (1, 'repo_private'),
446 (1, 'repo_enable_downloads'),
446 (1, 'repo_enable_downloads'),
447 (1, 'repo_enable_locking'),
447 (1, 'repo_enable_locking'),
448 (1, 'repo_enable_statistics'),
448 (1, 'repo_enable_statistics'),
449 (0, 'clone_uri'),
449 (0, 'clone_uri'),
450 (0, 'push_uri'),
450 (0, 'push_uri'),
451 (0, 'fork_id')
451 (0, 'fork_id')
452 ]
452 ]
453 for strip, k in update_keys:
453 for strip, k in update_keys:
454 if k in kwargs:
454 if k in kwargs:
455 val = kwargs[k]
455 val = kwargs[k]
456 if strip:
456 if strip:
457 k = remove_prefix(k, 'repo_')
457 k = remove_prefix(k, 'repo_')
458
458
459 setattr(cur_repo, k, val)
459 setattr(cur_repo, k, val)
460
460
461 new_name = cur_repo.get_new_name(kwargs['repo_name'])
461 new_name = cur_repo.get_new_name(kwargs['repo_name'])
462 cur_repo.repo_name = new_name
462 cur_repo.repo_name = new_name
463
463
464 # if private flag is set, reset default permission to NONE
464 # if private flag is set, reset default permission to NONE
465 if kwargs.get('repo_private'):
465 if kwargs.get('repo_private'):
466 EMPTY_PERM = 'repository.none'
466 EMPTY_PERM = 'repository.none'
467 RepoModel().grant_user_permission(
467 RepoModel().grant_user_permission(
468 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
468 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
469 )
469 )
470 if kwargs.get('repo_landing_rev'):
470 if kwargs.get('repo_landing_rev'):
471 landing_rev_val = kwargs['repo_landing_rev']
471 landing_rev_val = kwargs['repo_landing_rev']
472 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
472 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
473
473
474 # handle extra fields
474 # handle extra fields
475 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
475 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
476 k = RepositoryField.un_prefix_key(field)
476 k = RepositoryField.un_prefix_key(field)
477 ex_field = RepositoryField.get_by_key_name(
477 ex_field = RepositoryField.get_by_key_name(
478 key=k, repo=cur_repo)
478 key=k, repo=cur_repo)
479 if ex_field:
479 if ex_field:
480 ex_field.field_value = kwargs[field]
480 ex_field.field_value = kwargs[field]
481 self.sa.add(ex_field)
481 self.sa.add(ex_field)
482
482
483 self.sa.add(cur_repo)
483 self.sa.add(cur_repo)
484
484
485 if source_repo_name != new_name:
485 if source_repo_name != new_name:
486 # rename repository
486 # rename repository
487 self._rename_filesystem_repo(
487 self._rename_filesystem_repo(
488 old=source_repo_name, new=new_name)
488 old=source_repo_name, new=new_name)
489
489
490 if affected_user_ids:
490 if affected_user_ids:
491 PermissionModel().trigger_permission_flush(affected_user_ids)
491 PermissionModel().trigger_permission_flush(affected_user_ids)
492
492
493 return cur_repo
493 return cur_repo
494 except Exception:
494 except Exception:
495 log.error(traceback.format_exc())
495 log.error(traceback.format_exc())
496 raise
496 raise
497
497
498 def _create_repo(self, repo_name, repo_type, description, owner,
498 def _create_repo(self, repo_name, repo_type, description, owner,
499 private=False, clone_uri=None, repo_group=None,
499 private=False, clone_uri=None, repo_group=None,
500 landing_rev='rev:tip', fork_of=None,
500 landing_rev='rev:tip', fork_of=None,
501 copy_fork_permissions=False, enable_statistics=False,
501 copy_fork_permissions=False, enable_statistics=False,
502 enable_locking=False, enable_downloads=False,
502 enable_locking=False, enable_downloads=False,
503 copy_group_permissions=False,
503 copy_group_permissions=False,
504 state=Repository.STATE_PENDING):
504 state=Repository.STATE_PENDING):
505 """
505 """
506 Create repository inside database with PENDING state, this should be
506 Create repository inside database with PENDING state, this should be
507 only executed by create() repo. With exception of importing existing
507 only executed by create() repo. With exception of importing existing
508 repos
508 repos
509 """
509 """
510 from rhodecode.model.scm import ScmModel
510 from rhodecode.model.scm import ScmModel
511
511
512 owner = self._get_user(owner)
512 owner = self._get_user(owner)
513 fork_of = self._get_repo(fork_of)
513 fork_of = self._get_repo(fork_of)
514 repo_group = self._get_repo_group(safe_int(repo_group))
514 repo_group = self._get_repo_group(safe_int(repo_group))
515
515
516 try:
516 try:
517 repo_name = safe_unicode(repo_name)
517 repo_name = safe_unicode(repo_name)
518 description = safe_unicode(description)
518 description = safe_unicode(description)
519 # repo name is just a name of repository
519 # repo name is just a name of repository
520 # while repo_name_full is a full qualified name that is combined
520 # while repo_name_full is a full qualified name that is combined
521 # with name and path of group
521 # with name and path of group
522 repo_name_full = repo_name
522 repo_name_full = repo_name
523 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
523 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
524
524
525 new_repo = Repository()
525 new_repo = Repository()
526 new_repo.repo_state = state
526 new_repo.repo_state = state
527 new_repo.enable_statistics = False
527 new_repo.enable_statistics = False
528 new_repo.repo_name = repo_name_full
528 new_repo.repo_name = repo_name_full
529 new_repo.repo_type = repo_type
529 new_repo.repo_type = repo_type
530 new_repo.user = owner
530 new_repo.user = owner
531 new_repo.group = repo_group
531 new_repo.group = repo_group
532 new_repo.description = description or repo_name
532 new_repo.description = description or repo_name
533 new_repo.private = private
533 new_repo.private = private
534 new_repo.archived = False
534 new_repo.archived = False
535 new_repo.clone_uri = clone_uri
535 new_repo.clone_uri = clone_uri
536 new_repo.landing_rev = landing_rev
536 new_repo.landing_rev = landing_rev
537
537
538 new_repo.enable_statistics = enable_statistics
538 new_repo.enable_statistics = enable_statistics
539 new_repo.enable_locking = enable_locking
539 new_repo.enable_locking = enable_locking
540 new_repo.enable_downloads = enable_downloads
540 new_repo.enable_downloads = enable_downloads
541
541
542 if repo_group:
542 if repo_group:
543 new_repo.enable_locking = repo_group.enable_locking
543 new_repo.enable_locking = repo_group.enable_locking
544
544
545 if fork_of:
545 if fork_of:
546 parent_repo = fork_of
546 parent_repo = fork_of
547 new_repo.fork = parent_repo
547 new_repo.fork = parent_repo
548
548
549 events.trigger(events.RepoPreCreateEvent(new_repo))
549 events.trigger(events.RepoPreCreateEvent(new_repo))
550
550
551 self.sa.add(new_repo)
551 self.sa.add(new_repo)
552
552
553 EMPTY_PERM = 'repository.none'
553 EMPTY_PERM = 'repository.none'
554 if fork_of and copy_fork_permissions:
554 if fork_of and copy_fork_permissions:
555 repo = fork_of
555 repo = fork_of
556 user_perms = UserRepoToPerm.query() \
556 user_perms = UserRepoToPerm.query() \
557 .filter(UserRepoToPerm.repository == repo).all()
557 .filter(UserRepoToPerm.repository == repo).all()
558 group_perms = UserGroupRepoToPerm.query() \
558 group_perms = UserGroupRepoToPerm.query() \
559 .filter(UserGroupRepoToPerm.repository == repo).all()
559 .filter(UserGroupRepoToPerm.repository == repo).all()
560
560
561 for perm in user_perms:
561 for perm in user_perms:
562 UserRepoToPerm.create(
562 UserRepoToPerm.create(
563 perm.user, new_repo, perm.permission)
563 perm.user, new_repo, perm.permission)
564
564
565 for perm in group_perms:
565 for perm in group_perms:
566 UserGroupRepoToPerm.create(
566 UserGroupRepoToPerm.create(
567 perm.users_group, new_repo, perm.permission)
567 perm.users_group, new_repo, perm.permission)
568 # in case we copy permissions and also set this repo to private
568 # in case we copy permissions and also set this repo to private
569 # override the default user permission to make it a private repo
569 # override the default user permission to make it a private repo
570 if private:
570 if private:
571 RepoModel(self.sa).grant_user_permission(
571 RepoModel(self.sa).grant_user_permission(
572 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
572 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
573
573
574 elif repo_group and copy_group_permissions:
574 elif repo_group and copy_group_permissions:
575 user_perms = UserRepoGroupToPerm.query() \
575 user_perms = UserRepoGroupToPerm.query() \
576 .filter(UserRepoGroupToPerm.group == repo_group).all()
576 .filter(UserRepoGroupToPerm.group == repo_group).all()
577
577
578 group_perms = UserGroupRepoGroupToPerm.query() \
578 group_perms = UserGroupRepoGroupToPerm.query() \
579 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
579 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
580
580
581 for perm in user_perms:
581 for perm in user_perms:
582 perm_name = perm.permission.permission_name.replace(
582 perm_name = perm.permission.permission_name.replace(
583 'group.', 'repository.')
583 'group.', 'repository.')
584 perm_obj = Permission.get_by_key(perm_name)
584 perm_obj = Permission.get_by_key(perm_name)
585 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
585 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
586
586
587 for perm in group_perms:
587 for perm in group_perms:
588 perm_name = perm.permission.permission_name.replace(
588 perm_name = perm.permission.permission_name.replace(
589 'group.', 'repository.')
589 'group.', 'repository.')
590 perm_obj = Permission.get_by_key(perm_name)
590 perm_obj = Permission.get_by_key(perm_name)
591 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
591 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
592
592
593 if private:
593 if private:
594 RepoModel(self.sa).grant_user_permission(
594 RepoModel(self.sa).grant_user_permission(
595 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
595 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
596
596
597 else:
597 else:
598 perm_obj = self._create_default_perms(new_repo, private)
598 perm_obj = self._create_default_perms(new_repo, private)
599 self.sa.add(perm_obj)
599 self.sa.add(perm_obj)
600
600
601 # now automatically start following this repository as owner
601 # now automatically start following this repository as owner
602 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
602 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
603
603
604 # we need to flush here, in order to check if database won't
604 # we need to flush here, in order to check if database won't
605 # throw any exceptions, create filesystem dirs at the very end
605 # throw any exceptions, create filesystem dirs at the very end
606 self.sa.flush()
606 self.sa.flush()
607 events.trigger(events.RepoCreateEvent(new_repo))
607 events.trigger(events.RepoCreateEvent(new_repo))
608 return new_repo
608 return new_repo
609
609
610 except Exception:
610 except Exception:
611 log.error(traceback.format_exc())
611 log.error(traceback.format_exc())
612 raise
612 raise
613
613
614 def create(self, form_data, cur_user):
614 def create(self, form_data, cur_user):
615 """
615 """
616 Create repository using celery tasks
616 Create repository using celery tasks
617
617
618 :param form_data:
618 :param form_data:
619 :param cur_user:
619 :param cur_user:
620 """
620 """
621 from rhodecode.lib.celerylib import tasks, run_task
621 from rhodecode.lib.celerylib import tasks, run_task
622 return run_task(tasks.create_repo, form_data, cur_user)
622 return run_task(tasks.create_repo, form_data, cur_user)
623
623
624 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
624 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
625 perm_deletions=None, check_perms=True,
625 perm_deletions=None, check_perms=True,
626 cur_user=None):
626 cur_user=None):
627 if not perm_additions:
627 if not perm_additions:
628 perm_additions = []
628 perm_additions = []
629 if not perm_updates:
629 if not perm_updates:
630 perm_updates = []
630 perm_updates = []
631 if not perm_deletions:
631 if not perm_deletions:
632 perm_deletions = []
632 perm_deletions = []
633
633
634 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
634 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
635
635
636 changes = {
636 changes = {
637 'added': [],
637 'added': [],
638 'updated': [],
638 'updated': [],
639 'deleted': [],
639 'deleted': [],
640 'default_user_changed': None
640 'default_user_changed': None
641 }
641 }
642
642
643 repo = self._get_repo(repo)
643 repo = self._get_repo(repo)
644
644
645 # update permissions
645 # update permissions
646 for member_id, perm, member_type in perm_updates:
646 for member_id, perm, member_type in perm_updates:
647 member_id = int(member_id)
647 member_id = int(member_id)
648 if member_type == 'user':
648 if member_type == 'user':
649 member_name = User.get(member_id).username
649 member_name = User.get(member_id).username
650 if member_name == User.DEFAULT_USER:
650 if member_name == User.DEFAULT_USER:
651 # NOTE(dan): detect if we changed permissions for default user
651 # NOTE(dan): detect if we changed permissions for default user
652 perm_obj = self.sa.query(UserRepoToPerm) \
652 perm_obj = self.sa.query(UserRepoToPerm) \
653 .filter(UserRepoToPerm.user_id == member_id) \
653 .filter(UserRepoToPerm.user_id == member_id) \
654 .filter(UserRepoToPerm.repository == repo) \
654 .filter(UserRepoToPerm.repository == repo) \
655 .scalar()
655 .scalar()
656 if perm_obj and perm_obj.permission.permission_name != perm:
656 if perm_obj and perm_obj.permission.permission_name != perm:
657 changes['default_user_changed'] = True
657 changes['default_user_changed'] = True
658
658
659 # this updates also current one if found
659 # this updates also current one if found
660 self.grant_user_permission(
660 self.grant_user_permission(
661 repo=repo, user=member_id, perm=perm)
661 repo=repo, user=member_id, perm=perm)
662 elif member_type == 'user_group':
662 elif member_type == 'user_group':
663 # check if we have permissions to alter this usergroup
663 # check if we have permissions to alter this usergroup
664 member_name = UserGroup.get(member_id).users_group_name
664 member_name = UserGroup.get(member_id).users_group_name
665 if not check_perms or HasUserGroupPermissionAny(
665 if not check_perms or HasUserGroupPermissionAny(
666 *req_perms)(member_name, user=cur_user):
666 *req_perms)(member_name, user=cur_user):
667 self.grant_user_group_permission(
667 self.grant_user_group_permission(
668 repo=repo, group_name=member_id, perm=perm)
668 repo=repo, group_name=member_id, perm=perm)
669 else:
669 else:
670 raise ValueError("member_type must be 'user' or 'user_group' "
670 raise ValueError("member_type must be 'user' or 'user_group' "
671 "got {} instead".format(member_type))
671 "got {} instead".format(member_type))
672 changes['updated'].append({'type': member_type, 'id': member_id,
672 changes['updated'].append({'type': member_type, 'id': member_id,
673 'name': member_name, 'new_perm': perm})
673 'name': member_name, 'new_perm': perm})
674
674
675 # set new permissions
675 # set new permissions
676 for member_id, perm, member_type in perm_additions:
676 for member_id, perm, member_type in perm_additions:
677 member_id = int(member_id)
677 member_id = int(member_id)
678 if member_type == 'user':
678 if member_type == 'user':
679 member_name = User.get(member_id).username
679 member_name = User.get(member_id).username
680 self.grant_user_permission(
680 self.grant_user_permission(
681 repo=repo, user=member_id, perm=perm)
681 repo=repo, user=member_id, perm=perm)
682 elif member_type == 'user_group':
682 elif member_type == 'user_group':
683 # check if we have permissions to alter this usergroup
683 # check if we have permissions to alter this usergroup
684 member_name = UserGroup.get(member_id).users_group_name
684 member_name = UserGroup.get(member_id).users_group_name
685 if not check_perms or HasUserGroupPermissionAny(
685 if not check_perms or HasUserGroupPermissionAny(
686 *req_perms)(member_name, user=cur_user):
686 *req_perms)(member_name, user=cur_user):
687 self.grant_user_group_permission(
687 self.grant_user_group_permission(
688 repo=repo, group_name=member_id, perm=perm)
688 repo=repo, group_name=member_id, perm=perm)
689 else:
689 else:
690 raise ValueError("member_type must be 'user' or 'user_group' "
690 raise ValueError("member_type must be 'user' or 'user_group' "
691 "got {} instead".format(member_type))
691 "got {} instead".format(member_type))
692
692
693 changes['added'].append({'type': member_type, 'id': member_id,
693 changes['added'].append({'type': member_type, 'id': member_id,
694 'name': member_name, 'new_perm': perm})
694 'name': member_name, 'new_perm': perm})
695 # delete permissions
695 # delete permissions
696 for member_id, perm, member_type in perm_deletions:
696 for member_id, perm, member_type in perm_deletions:
697 member_id = int(member_id)
697 member_id = int(member_id)
698 if member_type == 'user':
698 if member_type == 'user':
699 member_name = User.get(member_id).username
699 member_name = User.get(member_id).username
700 self.revoke_user_permission(repo=repo, user=member_id)
700 self.revoke_user_permission(repo=repo, user=member_id)
701 elif member_type == 'user_group':
701 elif member_type == 'user_group':
702 # check if we have permissions to alter this usergroup
702 # check if we have permissions to alter this usergroup
703 member_name = UserGroup.get(member_id).users_group_name
703 member_name = UserGroup.get(member_id).users_group_name
704 if not check_perms or HasUserGroupPermissionAny(
704 if not check_perms or HasUserGroupPermissionAny(
705 *req_perms)(member_name, user=cur_user):
705 *req_perms)(member_name, user=cur_user):
706 self.revoke_user_group_permission(
706 self.revoke_user_group_permission(
707 repo=repo, group_name=member_id)
707 repo=repo, group_name=member_id)
708 else:
708 else:
709 raise ValueError("member_type must be 'user' or 'user_group' "
709 raise ValueError("member_type must be 'user' or 'user_group' "
710 "got {} instead".format(member_type))
710 "got {} instead".format(member_type))
711
711
712 changes['deleted'].append({'type': member_type, 'id': member_id,
712 changes['deleted'].append({'type': member_type, 'id': member_id,
713 'name': member_name, 'new_perm': perm})
713 'name': member_name, 'new_perm': perm})
714 return changes
714 return changes
715
715
716 def create_fork(self, form_data, cur_user):
716 def create_fork(self, form_data, cur_user):
717 """
717 """
718 Simple wrapper into executing celery task for fork creation
718 Simple wrapper into executing celery task for fork creation
719
719
720 :param form_data:
720 :param form_data:
721 :param cur_user:
721 :param cur_user:
722 """
722 """
723 from rhodecode.lib.celerylib import tasks, run_task
723 from rhodecode.lib.celerylib import tasks, run_task
724 return run_task(tasks.create_repo_fork, form_data, cur_user)
724 return run_task(tasks.create_repo_fork, form_data, cur_user)
725
725
726 def archive(self, repo):
726 def archive(self, repo):
727 """
727 """
728 Archive given repository. Set archive flag.
728 Archive given repository. Set archive flag.
729
729
730 :param repo:
730 :param repo:
731 """
731 """
732 repo = self._get_repo(repo)
732 repo = self._get_repo(repo)
733 if repo:
733 if repo:
734
734
735 try:
735 try:
736 repo.archived = True
736 repo.archived = True
737 self.sa.add(repo)
737 self.sa.add(repo)
738 self.sa.commit()
738 self.sa.commit()
739 except Exception:
739 except Exception:
740 log.error(traceback.format_exc())
740 log.error(traceback.format_exc())
741 raise
741 raise
742
742
743 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
743 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
744 """
744 """
745 Delete given repository, forks parameter defines what do do with
745 Delete given repository, forks parameter defines what do do with
746 attached forks. Throws AttachedForksError if deleted repo has attached
746 attached forks. Throws AttachedForksError if deleted repo has attached
747 forks
747 forks
748
748
749 :param repo:
749 :param repo:
750 :param forks: str 'delete' or 'detach'
750 :param forks: str 'delete' or 'detach'
751 :param pull_requests: str 'delete' or None
751 :param pull_requests: str 'delete' or None
752 :param fs_remove: remove(archive) repo from filesystem
752 :param fs_remove: remove(archive) repo from filesystem
753 """
753 """
754 if not cur_user:
754 if not cur_user:
755 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
755 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
756 repo = self._get_repo(repo)
756 repo = self._get_repo(repo)
757 if repo:
757 if repo:
758 if forks == 'detach':
758 if forks == 'detach':
759 for r in repo.forks:
759 for r in repo.forks:
760 r.fork = None
760 r.fork = None
761 self.sa.add(r)
761 self.sa.add(r)
762 elif forks == 'delete':
762 elif forks == 'delete':
763 for r in repo.forks:
763 for r in repo.forks:
764 self.delete(r, forks='delete')
764 self.delete(r, forks='delete')
765 elif [f for f in repo.forks]:
765 elif [f for f in repo.forks]:
766 raise AttachedForksError()
766 raise AttachedForksError()
767
767
768 # check for pull requests
768 # check for pull requests
769 pr_sources = repo.pull_requests_source
769 pr_sources = repo.pull_requests_source
770 pr_targets = repo.pull_requests_target
770 pr_targets = repo.pull_requests_target
771 if pull_requests != 'delete' and (pr_sources or pr_targets):
771 if pull_requests != 'delete' and (pr_sources or pr_targets):
772 raise AttachedPullRequestsError()
772 raise AttachedPullRequestsError()
773
773
774 old_repo_dict = repo.get_dict()
774 old_repo_dict = repo.get_dict()
775 events.trigger(events.RepoPreDeleteEvent(repo))
775 events.trigger(events.RepoPreDeleteEvent(repo))
776 try:
776 try:
777 self.sa.delete(repo)
777 self.sa.delete(repo)
778 if fs_remove:
778 if fs_remove:
779 self._delete_filesystem_repo(repo)
779 self._delete_filesystem_repo(repo)
780 else:
780 else:
781 log.debug('skipping removal from filesystem')
781 log.debug('skipping removal from filesystem')
782 old_repo_dict.update({
782 old_repo_dict.update({
783 'deleted_by': cur_user,
783 'deleted_by': cur_user,
784 'deleted_on': time.time(),
784 'deleted_on': time.time(),
785 })
785 })
786 hooks_base.delete_repository(**old_repo_dict)
786 hooks_base.delete_repository(**old_repo_dict)
787 events.trigger(events.RepoDeleteEvent(repo))
787 events.trigger(events.RepoDeleteEvent(repo))
788 except Exception:
788 except Exception:
789 log.error(traceback.format_exc())
789 log.error(traceback.format_exc())
790 raise
790 raise
791
791
792 def grant_user_permission(self, repo, user, perm):
792 def grant_user_permission(self, repo, user, perm):
793 """
793 """
794 Grant permission for user on given repository, or update existing one
794 Grant permission for user on given repository, or update existing one
795 if found
795 if found
796
796
797 :param repo: Instance of Repository, repository_id, or repository name
797 :param repo: Instance of Repository, repository_id, or repository name
798 :param user: Instance of User, user_id or username
798 :param user: Instance of User, user_id or username
799 :param perm: Instance of Permission, or permission_name
799 :param perm: Instance of Permission, or permission_name
800 """
800 """
801 user = self._get_user(user)
801 user = self._get_user(user)
802 repo = self._get_repo(repo)
802 repo = self._get_repo(repo)
803 permission = self._get_perm(perm)
803 permission = self._get_perm(perm)
804
804
805 # check if we have that permission already
805 # check if we have that permission already
806 obj = self.sa.query(UserRepoToPerm) \
806 obj = self.sa.query(UserRepoToPerm) \
807 .filter(UserRepoToPerm.user == user) \
807 .filter(UserRepoToPerm.user == user) \
808 .filter(UserRepoToPerm.repository == repo) \
808 .filter(UserRepoToPerm.repository == repo) \
809 .scalar()
809 .scalar()
810 if obj is None:
810 if obj is None:
811 # create new !
811 # create new !
812 obj = UserRepoToPerm()
812 obj = UserRepoToPerm()
813 obj.repository = repo
813 obj.repository = repo
814 obj.user = user
814 obj.user = user
815 obj.permission = permission
815 obj.permission = permission
816 self.sa.add(obj)
816 self.sa.add(obj)
817 log.debug('Granted perm %s to %s on %s', perm, user, repo)
817 log.debug('Granted perm %s to %s on %s', perm, user, repo)
818 action_logger_generic(
818 action_logger_generic(
819 'granted permission: {} to user: {} on repo: {}'.format(
819 'granted permission: {} to user: {} on repo: {}'.format(
820 perm, user, repo), namespace='security.repo')
820 perm, user, repo), namespace='security.repo')
821 return obj
821 return obj
822
822
823 def revoke_user_permission(self, repo, user):
823 def revoke_user_permission(self, repo, user):
824 """
824 """
825 Revoke permission for user on given repository
825 Revoke permission for user on given repository
826
826
827 :param repo: Instance of Repository, repository_id, or repository name
827 :param repo: Instance of Repository, repository_id, or repository name
828 :param user: Instance of User, user_id or username
828 :param user: Instance of User, user_id or username
829 """
829 """
830
830
831 user = self._get_user(user)
831 user = self._get_user(user)
832 repo = self._get_repo(repo)
832 repo = self._get_repo(repo)
833
833
834 obj = self.sa.query(UserRepoToPerm) \
834 obj = self.sa.query(UserRepoToPerm) \
835 .filter(UserRepoToPerm.repository == repo) \
835 .filter(UserRepoToPerm.repository == repo) \
836 .filter(UserRepoToPerm.user == user) \
836 .filter(UserRepoToPerm.user == user) \
837 .scalar()
837 .scalar()
838 if obj:
838 if obj:
839 self.sa.delete(obj)
839 self.sa.delete(obj)
840 log.debug('Revoked perm on %s on %s', repo, user)
840 log.debug('Revoked perm on %s on %s', repo, user)
841 action_logger_generic(
841 action_logger_generic(
842 'revoked permission from user: {} on repo: {}'.format(
842 'revoked permission from user: {} on repo: {}'.format(
843 user, repo), namespace='security.repo')
843 user, repo), namespace='security.repo')
844
844
845 def grant_user_group_permission(self, repo, group_name, perm):
845 def grant_user_group_permission(self, repo, group_name, perm):
846 """
846 """
847 Grant permission for user group on given repository, or update
847 Grant permission for user group on given repository, or update
848 existing one if found
848 existing one if found
849
849
850 :param repo: Instance of Repository, repository_id, or repository name
850 :param repo: Instance of Repository, repository_id, or repository name
851 :param group_name: Instance of UserGroup, users_group_id,
851 :param group_name: Instance of UserGroup, users_group_id,
852 or user group name
852 or user group name
853 :param perm: Instance of Permission, or permission_name
853 :param perm: Instance of Permission, or permission_name
854 """
854 """
855 repo = self._get_repo(repo)
855 repo = self._get_repo(repo)
856 group_name = self._get_user_group(group_name)
856 group_name = self._get_user_group(group_name)
857 permission = self._get_perm(perm)
857 permission = self._get_perm(perm)
858
858
859 # check if we have that permission already
859 # check if we have that permission already
860 obj = self.sa.query(UserGroupRepoToPerm) \
860 obj = self.sa.query(UserGroupRepoToPerm) \
861 .filter(UserGroupRepoToPerm.users_group == group_name) \
861 .filter(UserGroupRepoToPerm.users_group == group_name) \
862 .filter(UserGroupRepoToPerm.repository == repo) \
862 .filter(UserGroupRepoToPerm.repository == repo) \
863 .scalar()
863 .scalar()
864
864
865 if obj is None:
865 if obj is None:
866 # create new
866 # create new
867 obj = UserGroupRepoToPerm()
867 obj = UserGroupRepoToPerm()
868
868
869 obj.repository = repo
869 obj.repository = repo
870 obj.users_group = group_name
870 obj.users_group = group_name
871 obj.permission = permission
871 obj.permission = permission
872 self.sa.add(obj)
872 self.sa.add(obj)
873 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
873 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
874 action_logger_generic(
874 action_logger_generic(
875 'granted permission: {} to usergroup: {} on repo: {}'.format(
875 'granted permission: {} to usergroup: {} on repo: {}'.format(
876 perm, group_name, repo), namespace='security.repo')
876 perm, group_name, repo), namespace='security.repo')
877
877
878 return obj
878 return obj
879
879
880 def revoke_user_group_permission(self, repo, group_name):
880 def revoke_user_group_permission(self, repo, group_name):
881 """
881 """
882 Revoke permission for user group on given repository
882 Revoke permission for user group on given repository
883
883
884 :param repo: Instance of Repository, repository_id, or repository name
884 :param repo: Instance of Repository, repository_id, or repository name
885 :param group_name: Instance of UserGroup, users_group_id,
885 :param group_name: Instance of UserGroup, users_group_id,
886 or user group name
886 or user group name
887 """
887 """
888 repo = self._get_repo(repo)
888 repo = self._get_repo(repo)
889 group_name = self._get_user_group(group_name)
889 group_name = self._get_user_group(group_name)
890
890
891 obj = self.sa.query(UserGroupRepoToPerm) \
891 obj = self.sa.query(UserGroupRepoToPerm) \
892 .filter(UserGroupRepoToPerm.repository == repo) \
892 .filter(UserGroupRepoToPerm.repository == repo) \
893 .filter(UserGroupRepoToPerm.users_group == group_name) \
893 .filter(UserGroupRepoToPerm.users_group == group_name) \
894 .scalar()
894 .scalar()
895 if obj:
895 if obj:
896 self.sa.delete(obj)
896 self.sa.delete(obj)
897 log.debug('Revoked perm to %s on %s', repo, group_name)
897 log.debug('Revoked perm to %s on %s', repo, group_name)
898 action_logger_generic(
898 action_logger_generic(
899 'revoked permission from usergroup: {} on repo: {}'.format(
899 'revoked permission from usergroup: {} on repo: {}'.format(
900 group_name, repo), namespace='security.repo')
900 group_name, repo), namespace='security.repo')
901
901
902 def delete_stats(self, repo_name):
902 def delete_stats(self, repo_name):
903 """
903 """
904 removes stats for given repo
904 removes stats for given repo
905
905
906 :param repo_name:
906 :param repo_name:
907 """
907 """
908 repo = self._get_repo(repo_name)
908 repo = self._get_repo(repo_name)
909 try:
909 try:
910 obj = self.sa.query(Statistics) \
910 obj = self.sa.query(Statistics) \
911 .filter(Statistics.repository == repo).scalar()
911 .filter(Statistics.repository == repo).scalar()
912 if obj:
912 if obj:
913 self.sa.delete(obj)
913 self.sa.delete(obj)
914 except Exception:
914 except Exception:
915 log.error(traceback.format_exc())
915 log.error(traceback.format_exc())
916 raise
916 raise
917
917
918 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
918 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
919 field_type='str', field_desc=''):
919 field_type='str', field_desc=''):
920
920
921 repo = self._get_repo(repo_name)
921 repo = self._get_repo(repo_name)
922
922
923 new_field = RepositoryField()
923 new_field = RepositoryField()
924 new_field.repository = repo
924 new_field.repository = repo
925 new_field.field_key = field_key
925 new_field.field_key = field_key
926 new_field.field_type = field_type # python type
926 new_field.field_type = field_type # python type
927 new_field.field_value = field_value
927 new_field.field_value = field_value
928 new_field.field_desc = field_desc
928 new_field.field_desc = field_desc
929 new_field.field_label = field_label
929 new_field.field_label = field_label
930 self.sa.add(new_field)
930 self.sa.add(new_field)
931 return new_field
931 return new_field
932
932
933 def delete_repo_field(self, repo_name, field_key):
933 def delete_repo_field(self, repo_name, field_key):
934 repo = self._get_repo(repo_name)
934 repo = self._get_repo(repo_name)
935 field = RepositoryField.get_by_key_name(field_key, repo)
935 field = RepositoryField.get_by_key_name(field_key, repo)
936 if field:
936 if field:
937 self.sa.delete(field)
937 self.sa.delete(field)
938
938
939 def set_landing_rev(self, repo, landing_rev_name):
939 def set_landing_rev(self, repo, landing_rev_name):
940 if landing_rev_name.startswith('branch:'):
940 if landing_rev_name.startswith('branch:'):
941 landing_rev_name = landing_rev_name.split('branch:')[-1]
941 landing_rev_name = landing_rev_name.split('branch:')[-1]
942 scm_instance = repo.scm_instance()
942 scm_instance = repo.scm_instance()
943 if scm_instance:
943 return scm_instance._remote.set_head_ref(landing_rev_name)
944 return scm_instance._remote.set_head_ref(landing_rev_name)
944
945
945 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
946 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
946 clone_uri=None, repo_store_location=None,
947 clone_uri=None, repo_store_location=None,
947 use_global_config=False, install_hooks=True):
948 use_global_config=False, install_hooks=True):
948 """
949 """
949 makes repository on filesystem. It's group aware means it'll create
950 makes repository on filesystem. It's group aware means it'll create
950 a repository within a group, and alter the paths accordingly of
951 a repository within a group, and alter the paths accordingly of
951 group location
952 group location
952
953
953 :param repo_name:
954 :param repo_name:
954 :param alias:
955 :param alias:
955 :param parent:
956 :param parent:
956 :param clone_uri:
957 :param clone_uri:
957 :param repo_store_location:
958 :param repo_store_location:
958 """
959 """
959 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
960 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
960 from rhodecode.model.scm import ScmModel
961 from rhodecode.model.scm import ScmModel
961
962
962 if Repository.NAME_SEP in repo_name:
963 if Repository.NAME_SEP in repo_name:
963 raise ValueError(
964 raise ValueError(
964 'repo_name must not contain groups got `%s`' % repo_name)
965 'repo_name must not contain groups got `%s`' % repo_name)
965
966
966 if isinstance(repo_group, RepoGroup):
967 if isinstance(repo_group, RepoGroup):
967 new_parent_path = os.sep.join(repo_group.full_path_splitted)
968 new_parent_path = os.sep.join(repo_group.full_path_splitted)
968 else:
969 else:
969 new_parent_path = repo_group or ''
970 new_parent_path = repo_group or ''
970
971
971 if repo_store_location:
972 if repo_store_location:
972 _paths = [repo_store_location]
973 _paths = [repo_store_location]
973 else:
974 else:
974 _paths = [self.repos_path, new_parent_path, repo_name]
975 _paths = [self.repos_path, new_parent_path, repo_name]
975 # we need to make it str for mercurial
976 # we need to make it str for mercurial
976 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
977 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
977
978
978 # check if this path is not a repository
979 # check if this path is not a repository
979 if is_valid_repo(repo_path, self.repos_path):
980 if is_valid_repo(repo_path, self.repos_path):
980 raise Exception('This path %s is a valid repository' % repo_path)
981 raise Exception('This path %s is a valid repository' % repo_path)
981
982
982 # check if this path is a group
983 # check if this path is a group
983 if is_valid_repo_group(repo_path, self.repos_path):
984 if is_valid_repo_group(repo_path, self.repos_path):
984 raise Exception('This path %s is a valid group' % repo_path)
985 raise Exception('This path %s is a valid group' % repo_path)
985
986
986 log.info('creating repo %s in %s from url: `%s`',
987 log.info('creating repo %s in %s from url: `%s`',
987 repo_name, safe_unicode(repo_path),
988 repo_name, safe_unicode(repo_path),
988 obfuscate_url_pw(clone_uri))
989 obfuscate_url_pw(clone_uri))
989
990
990 backend = get_backend(repo_type)
991 backend = get_backend(repo_type)
991
992
992 config_repo = None if use_global_config else repo_name
993 config_repo = None if use_global_config else repo_name
993 if config_repo and new_parent_path:
994 if config_repo and new_parent_path:
994 config_repo = Repository.NAME_SEP.join(
995 config_repo = Repository.NAME_SEP.join(
995 (new_parent_path, config_repo))
996 (new_parent_path, config_repo))
996 config = make_db_config(clear_session=False, repo=config_repo)
997 config = make_db_config(clear_session=False, repo=config_repo)
997 config.set('extensions', 'largefiles', '')
998 config.set('extensions', 'largefiles', '')
998
999
999 # patch and reset hooks section of UI config to not run any
1000 # patch and reset hooks section of UI config to not run any
1000 # hooks on creating remote repo
1001 # hooks on creating remote repo
1001 config.clear_section('hooks')
1002 config.clear_section('hooks')
1002
1003
1003 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1004 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1004 if repo_type == 'git':
1005 if repo_type == 'git':
1005 repo = backend(
1006 repo = backend(
1006 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1007 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1007 with_wire={"cache": False})
1008 with_wire={"cache": False})
1008 else:
1009 else:
1009 repo = backend(
1010 repo = backend(
1010 repo_path, config=config, create=True, src_url=clone_uri,
1011 repo_path, config=config, create=True, src_url=clone_uri,
1011 with_wire={"cache": False})
1012 with_wire={"cache": False})
1012
1013
1013 if install_hooks:
1014 if install_hooks:
1014 repo.install_hooks()
1015 repo.install_hooks()
1015
1016
1016 log.debug('Created repo %s with %s backend',
1017 log.debug('Created repo %s with %s backend',
1017 safe_unicode(repo_name), safe_unicode(repo_type))
1018 safe_unicode(repo_name), safe_unicode(repo_type))
1018 return repo
1019 return repo
1019
1020
1020 def _rename_filesystem_repo(self, old, new):
1021 def _rename_filesystem_repo(self, old, new):
1021 """
1022 """
1022 renames repository on filesystem
1023 renames repository on filesystem
1023
1024
1024 :param old: old name
1025 :param old: old name
1025 :param new: new name
1026 :param new: new name
1026 """
1027 """
1027 log.info('renaming repo from %s to %s', old, new)
1028 log.info('renaming repo from %s to %s', old, new)
1028
1029
1029 old_path = os.path.join(self.repos_path, old)
1030 old_path = os.path.join(self.repos_path, old)
1030 new_path = os.path.join(self.repos_path, new)
1031 new_path = os.path.join(self.repos_path, new)
1031 if os.path.isdir(new_path):
1032 if os.path.isdir(new_path):
1032 raise Exception(
1033 raise Exception(
1033 'Was trying to rename to already existing dir %s' % new_path
1034 'Was trying to rename to already existing dir %s' % new_path
1034 )
1035 )
1035 shutil.move(old_path, new_path)
1036 shutil.move(old_path, new_path)
1036
1037
1037 def _delete_filesystem_repo(self, repo):
1038 def _delete_filesystem_repo(self, repo):
1038 """
1039 """
1039 removes repo from filesystem, the removal is acctually made by
1040 removes repo from filesystem, the removal is acctually made by
1040 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1041 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1041 repository is no longer valid for rhodecode, can be undeleted later on
1042 repository is no longer valid for rhodecode, can be undeleted later on
1042 by reverting the renames on this repository
1043 by reverting the renames on this repository
1043
1044
1044 :param repo: repo object
1045 :param repo: repo object
1045 """
1046 """
1046 rm_path = os.path.join(self.repos_path, repo.repo_name)
1047 rm_path = os.path.join(self.repos_path, repo.repo_name)
1047 repo_group = repo.group
1048 repo_group = repo.group
1048 log.info("Removing repository %s", rm_path)
1049 log.info("Removing repository %s", rm_path)
1049 # disable hg/git internal that it doesn't get detected as repo
1050 # disable hg/git internal that it doesn't get detected as repo
1050 alias = repo.repo_type
1051 alias = repo.repo_type
1051
1052
1052 config = make_db_config(clear_session=False)
1053 config = make_db_config(clear_session=False)
1053 config.set('extensions', 'largefiles', '')
1054 config.set('extensions', 'largefiles', '')
1054 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1055 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1055
1056
1056 # skip this for bare git repos
1057 # skip this for bare git repos
1057 if not bare:
1058 if not bare:
1058 # disable VCS repo
1059 # disable VCS repo
1059 vcs_path = os.path.join(rm_path, '.%s' % alias)
1060 vcs_path = os.path.join(rm_path, '.%s' % alias)
1060 if os.path.exists(vcs_path):
1061 if os.path.exists(vcs_path):
1061 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1062 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1062
1063
1063 _now = datetime.datetime.now()
1064 _now = datetime.datetime.now()
1064 _ms = str(_now.microsecond).rjust(6, '0')
1065 _ms = str(_now.microsecond).rjust(6, '0')
1065 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1066 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1066 repo.just_name)
1067 repo.just_name)
1067 if repo_group:
1068 if repo_group:
1068 # if repository is in group, prefix the removal path with the group
1069 # if repository is in group, prefix the removal path with the group
1069 args = repo_group.full_path_splitted + [_d]
1070 args = repo_group.full_path_splitted + [_d]
1070 _d = os.path.join(*args)
1071 _d = os.path.join(*args)
1071
1072
1072 if os.path.isdir(rm_path):
1073 if os.path.isdir(rm_path):
1073 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1074 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1074
1075
1075 # finally cleanup diff-cache if it exists
1076 # finally cleanup diff-cache if it exists
1076 cached_diffs_dir = repo.cached_diffs_dir
1077 cached_diffs_dir = repo.cached_diffs_dir
1077 if os.path.isdir(cached_diffs_dir):
1078 if os.path.isdir(cached_diffs_dir):
1078 shutil.rmtree(cached_diffs_dir)
1079 shutil.rmtree(cached_diffs_dir)
1079
1080
1080
1081
1081 class ReadmeFinder:
1082 class ReadmeFinder:
1082 """
1083 """
1083 Utility which knows how to find a readme for a specific commit.
1084 Utility which knows how to find a readme for a specific commit.
1084
1085
1085 The main idea is that this is a configurable algorithm. When creating an
1086 The main idea is that this is a configurable algorithm. When creating an
1086 instance you can define parameters, currently only the `default_renderer`.
1087 instance you can define parameters, currently only the `default_renderer`.
1087 Based on this configuration the method :meth:`search` behaves slightly
1088 Based on this configuration the method :meth:`search` behaves slightly
1088 different.
1089 different.
1089 """
1090 """
1090
1091
1091 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1092 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1092 path_re = re.compile(r'^docs?', re.IGNORECASE)
1093 path_re = re.compile(r'^docs?', re.IGNORECASE)
1093
1094
1094 default_priorities = {
1095 default_priorities = {
1095 None: 0,
1096 None: 0,
1096 '.text': 2,
1097 '.text': 2,
1097 '.txt': 3,
1098 '.txt': 3,
1098 '.rst': 1,
1099 '.rst': 1,
1099 '.rest': 2,
1100 '.rest': 2,
1100 '.md': 1,
1101 '.md': 1,
1101 '.mkdn': 2,
1102 '.mkdn': 2,
1102 '.mdown': 3,
1103 '.mdown': 3,
1103 '.markdown': 4,
1104 '.markdown': 4,
1104 }
1105 }
1105
1106
1106 path_priority = {
1107 path_priority = {
1107 'doc': 0,
1108 'doc': 0,
1108 'docs': 1,
1109 'docs': 1,
1109 }
1110 }
1110
1111
1111 FALLBACK_PRIORITY = 99
1112 FALLBACK_PRIORITY = 99
1112
1113
1113 RENDERER_TO_EXTENSION = {
1114 RENDERER_TO_EXTENSION = {
1114 'rst': ['.rst', '.rest'],
1115 'rst': ['.rst', '.rest'],
1115 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1116 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1116 }
1117 }
1117
1118
1118 def __init__(self, default_renderer=None):
1119 def __init__(self, default_renderer=None):
1119 self._default_renderer = default_renderer
1120 self._default_renderer = default_renderer
1120 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1121 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1121 default_renderer, [])
1122 default_renderer, [])
1122
1123
1123 def search(self, commit, path=u'/'):
1124 def search(self, commit, path=u'/'):
1124 """
1125 """
1125 Find a readme in the given `commit`.
1126 Find a readme in the given `commit`.
1126 """
1127 """
1127 nodes = commit.get_nodes(path)
1128 nodes = commit.get_nodes(path)
1128 matches = self._match_readmes(nodes)
1129 matches = self._match_readmes(nodes)
1129 matches = self._sort_according_to_priority(matches)
1130 matches = self._sort_according_to_priority(matches)
1130 if matches:
1131 if matches:
1131 return matches[0].node
1132 return matches[0].node
1132
1133
1133 paths = self._match_paths(nodes)
1134 paths = self._match_paths(nodes)
1134 paths = self._sort_paths_according_to_priority(paths)
1135 paths = self._sort_paths_according_to_priority(paths)
1135 for path in paths:
1136 for path in paths:
1136 match = self.search(commit, path=path)
1137 match = self.search(commit, path=path)
1137 if match:
1138 if match:
1138 return match
1139 return match
1139
1140
1140 return None
1141 return None
1141
1142
1142 def _match_readmes(self, nodes):
1143 def _match_readmes(self, nodes):
1143 for node in nodes:
1144 for node in nodes:
1144 if not node.is_file():
1145 if not node.is_file():
1145 continue
1146 continue
1146 path = node.path.rsplit('/', 1)[-1]
1147 path = node.path.rsplit('/', 1)[-1]
1147 match = self.readme_re.match(path)
1148 match = self.readme_re.match(path)
1148 if match:
1149 if match:
1149 extension = match.group(1)
1150 extension = match.group(1)
1150 yield ReadmeMatch(node, match, self._priority(extension))
1151 yield ReadmeMatch(node, match, self._priority(extension))
1151
1152
1152 def _match_paths(self, nodes):
1153 def _match_paths(self, nodes):
1153 for node in nodes:
1154 for node in nodes:
1154 if not node.is_dir():
1155 if not node.is_dir():
1155 continue
1156 continue
1156 match = self.path_re.match(node.path)
1157 match = self.path_re.match(node.path)
1157 if match:
1158 if match:
1158 yield node.path
1159 yield node.path
1159
1160
1160 def _priority(self, extension):
1161 def _priority(self, extension):
1161 renderer_priority = (
1162 renderer_priority = (
1162 0 if extension in self._renderer_extensions else 1)
1163 0 if extension in self._renderer_extensions else 1)
1163 extension_priority = self.default_priorities.get(
1164 extension_priority = self.default_priorities.get(
1164 extension, self.FALLBACK_PRIORITY)
1165 extension, self.FALLBACK_PRIORITY)
1165 return (renderer_priority, extension_priority)
1166 return (renderer_priority, extension_priority)
1166
1167
1167 def _sort_according_to_priority(self, matches):
1168 def _sort_according_to_priority(self, matches):
1168
1169
1169 def priority_and_path(match):
1170 def priority_and_path(match):
1170 return (match.priority, match.path)
1171 return (match.priority, match.path)
1171
1172
1172 return sorted(matches, key=priority_and_path)
1173 return sorted(matches, key=priority_and_path)
1173
1174
1174 def _sort_paths_according_to_priority(self, paths):
1175 def _sort_paths_according_to_priority(self, paths):
1175
1176
1176 def priority_and_path(path):
1177 def priority_and_path(path):
1177 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1178 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1178
1179
1179 return sorted(paths, key=priority_and_path)
1180 return sorted(paths, key=priority_and_path)
1180
1181
1181
1182
1182 class ReadmeMatch:
1183 class ReadmeMatch:
1183
1184
1184 def __init__(self, node, match, priority):
1185 def __init__(self, node, match, priority):
1185 self.node = node
1186 self.node = node
1186 self._match = match
1187 self._match = match
1187 self.priority = priority
1188 self.priority = priority
1188
1189
1189 @property
1190 @property
1190 def path(self):
1191 def path(self):
1191 return self.node.path
1192 return self.node.path
1192
1193
1193 def __repr__(self):
1194 def __repr__(self):
1194 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
1195 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
General Comments 0
You need to be logged in to leave comments. Login now