##// END OF EJS Templates
py3: trivial renaming of unicode to str
Mads Kiilerich -
r8081:620c13a3 default
parent child Browse files
Show More
@@ -1,269 +1,269 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.controllers.api
15 kallithea.controllers.api
16 ~~~~~~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~~~~~~
17
17
18 JSON RPC controller
18 JSON RPC controller
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: Aug 20, 2011
22 :created_on: Aug 20, 2011
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28 import inspect
28 import inspect
29 import itertools
29 import itertools
30 import logging
30 import logging
31 import time
31 import time
32 import traceback
32 import traceback
33 import types
33 import types
34
34
35 from tg import Response, TGController, request, response
35 from tg import Response, TGController, request, response
36 from webob.exc import HTTPError, HTTPException
36 from webob.exc import HTTPError, HTTPException
37
37
38 from kallithea.lib import ext_json
38 from kallithea.lib import ext_json
39 from kallithea.lib.auth import AuthUser
39 from kallithea.lib.auth import AuthUser
40 from kallithea.lib.base import _get_ip_addr as _get_ip
40 from kallithea.lib.base import _get_ip_addr as _get_ip
41 from kallithea.lib.base import get_path_info
41 from kallithea.lib.base import get_path_info
42 from kallithea.lib.utils2 import ascii_bytes
42 from kallithea.lib.utils2 import ascii_bytes
43 from kallithea.model.db import User
43 from kallithea.model.db import User
44
44
45
45
46 log = logging.getLogger('JSONRPC')
46 log = logging.getLogger('JSONRPC')
47
47
48
48
49 class JSONRPCError(BaseException):
49 class JSONRPCError(BaseException):
50
50
51 def __init__(self, message):
51 def __init__(self, message):
52 self.message = message
52 self.message = message
53 super(JSONRPCError, self).__init__()
53 super(JSONRPCError, self).__init__()
54
54
55 def __str__(self):
55 def __str__(self):
56 return self.message
56 return self.message
57
57
58
58
59 class JSONRPCErrorResponse(Response, HTTPException):
59 class JSONRPCErrorResponse(Response, HTTPException):
60 """
60 """
61 Generate a Response object with a JSON-RPC error body
61 Generate a Response object with a JSON-RPC error body
62 """
62 """
63
63
64 def __init__(self, message=None, retid=None, code=None):
64 def __init__(self, message=None, retid=None, code=None):
65 HTTPException.__init__(self, message, self)
65 HTTPException.__init__(self, message, self)
66 Response.__init__(self,
66 Response.__init__(self,
67 json_body=dict(id=retid, result=None, error=message),
67 json_body=dict(id=retid, result=None, error=message),
68 status=code,
68 status=code,
69 content_type='application/json')
69 content_type='application/json')
70
70
71
71
72 class JSONRPCController(TGController):
72 class JSONRPCController(TGController):
73 """
73 """
74 A WSGI-speaking JSON-RPC controller class
74 A WSGI-speaking JSON-RPC controller class
75
75
76 See the specification:
76 See the specification:
77 <http://json-rpc.org/wiki/specification>`.
77 <http://json-rpc.org/wiki/specification>`.
78
78
79 Valid controller return values should be json-serializable objects.
79 Valid controller return values should be json-serializable objects.
80
80
81 Sub-classes should catch their exceptions and raise JSONRPCError
81 Sub-classes should catch their exceptions and raise JSONRPCError
82 if they want to pass meaningful errors to the client.
82 if they want to pass meaningful errors to the client.
83
83
84 """
84 """
85
85
86 def _get_ip_addr(self, environ):
86 def _get_ip_addr(self, environ):
87 return _get_ip(environ)
87 return _get_ip(environ)
88
88
89 def _get_method_args(self):
89 def _get_method_args(self):
90 """
90 """
91 Return `self._rpc_args` to dispatched controller method
91 Return `self._rpc_args` to dispatched controller method
92 chosen by __call__
92 chosen by __call__
93 """
93 """
94 return self._rpc_args
94 return self._rpc_args
95
95
96 def _dispatch(self, state, remainder=None):
96 def _dispatch(self, state, remainder=None):
97 """
97 """
98 Parse the request body as JSON, look up the method on the
98 Parse the request body as JSON, look up the method on the
99 controller and if it exists, dispatch to it.
99 controller and if it exists, dispatch to it.
100 """
100 """
101 # Since we are here we should respond as JSON
101 # Since we are here we should respond as JSON
102 response.content_type = 'application/json'
102 response.content_type = 'application/json'
103
103
104 environ = state.request.environ
104 environ = state.request.environ
105 start = time.time()
105 start = time.time()
106 ip_addr = self._get_ip_addr(environ)
106 ip_addr = self._get_ip_addr(environ)
107 self._req_id = None
107 self._req_id = None
108 if 'CONTENT_LENGTH' not in environ:
108 if 'CONTENT_LENGTH' not in environ:
109 log.debug("No Content-Length")
109 log.debug("No Content-Length")
110 raise JSONRPCErrorResponse(retid=self._req_id,
110 raise JSONRPCErrorResponse(retid=self._req_id,
111 message="No Content-Length in request")
111 message="No Content-Length in request")
112 else:
112 else:
113 length = environ['CONTENT_LENGTH'] or 0
113 length = environ['CONTENT_LENGTH'] or 0
114 length = int(environ['CONTENT_LENGTH'])
114 length = int(environ['CONTENT_LENGTH'])
115 log.debug('Content-Length: %s', length)
115 log.debug('Content-Length: %s', length)
116
116
117 if length == 0:
117 if length == 0:
118 raise JSONRPCErrorResponse(retid=self._req_id,
118 raise JSONRPCErrorResponse(retid=self._req_id,
119 message="Content-Length is 0")
119 message="Content-Length is 0")
120
120
121 raw_body = environ['wsgi.input'].read(length)
121 raw_body = environ['wsgi.input'].read(length)
122
122
123 try:
123 try:
124 json_body = ext_json.loads(raw_body)
124 json_body = ext_json.loads(raw_body)
125 except ValueError as e:
125 except ValueError as e:
126 # catch JSON errors Here
126 # catch JSON errors Here
127 raise JSONRPCErrorResponse(retid=self._req_id,
127 raise JSONRPCErrorResponse(retid=self._req_id,
128 message="JSON parse error ERR:%s RAW:%r"
128 message="JSON parse error ERR:%s RAW:%r"
129 % (e, raw_body))
129 % (e, raw_body))
130
130
131 # check AUTH based on API key
131 # check AUTH based on API key
132 try:
132 try:
133 self._req_api_key = json_body['api_key']
133 self._req_api_key = json_body['api_key']
134 self._req_id = json_body['id']
134 self._req_id = json_body['id']
135 self._req_method = json_body['method']
135 self._req_method = json_body['method']
136 self._request_params = json_body['args']
136 self._request_params = json_body['args']
137 if not isinstance(self._request_params, dict):
137 if not isinstance(self._request_params, dict):
138 self._request_params = {}
138 self._request_params = {}
139
139
140 log.debug('method: %s, params: %s',
140 log.debug('method: %s, params: %s',
141 self._req_method, self._request_params)
141 self._req_method, self._request_params)
142 except KeyError as e:
142 except KeyError as e:
143 raise JSONRPCErrorResponse(retid=self._req_id,
143 raise JSONRPCErrorResponse(retid=self._req_id,
144 message='Incorrect JSON query missing %s' % e)
144 message='Incorrect JSON query missing %s' % e)
145
145
146 # check if we can find this session using api_key
146 # check if we can find this session using api_key
147 try:
147 try:
148 u = User.get_by_api_key(self._req_api_key)
148 u = User.get_by_api_key(self._req_api_key)
149 auth_user = AuthUser.make(dbuser=u, ip_addr=ip_addr)
149 auth_user = AuthUser.make(dbuser=u, ip_addr=ip_addr)
150 if auth_user is None:
150 if auth_user is None:
151 raise JSONRPCErrorResponse(retid=self._req_id,
151 raise JSONRPCErrorResponse(retid=self._req_id,
152 message='Invalid API key')
152 message='Invalid API key')
153 except Exception as e:
153 except Exception as e:
154 raise JSONRPCErrorResponse(retid=self._req_id,
154 raise JSONRPCErrorResponse(retid=self._req_id,
155 message='Invalid API key')
155 message='Invalid API key')
156
156
157 request.authuser = auth_user
157 request.authuser = auth_user
158 request.ip_addr = ip_addr
158 request.ip_addr = ip_addr
159
159
160 self._error = None
160 self._error = None
161 try:
161 try:
162 self._func = self._find_method()
162 self._func = self._find_method()
163 except AttributeError as e:
163 except AttributeError as e:
164 raise JSONRPCErrorResponse(retid=self._req_id,
164 raise JSONRPCErrorResponse(retid=self._req_id,
165 message=str(e))
165 message=str(e))
166
166
167 # now that we have a method, add self._req_params to
167 # now that we have a method, add self._req_params to
168 # self.kargs and dispatch control to WGIController
168 # self.kargs and dispatch control to WGIController
169 argspec = inspect.getfullargspec(self._func)
169 argspec = inspect.getfullargspec(self._func)
170 arglist = argspec.args[1:]
170 arglist = argspec.args[1:]
171 argtypes = [type(arg) for arg in argspec.defaults or []]
171 argtypes = [type(arg) for arg in argspec.defaults or []]
172 default_empty = type(NotImplemented)
172 default_empty = type(NotImplemented)
173
173
174 # kw arguments required by this method
174 # kw arguments required by this method
175 func_kwargs = dict(itertools.zip_longest(reversed(arglist), reversed(argtypes),
175 func_kwargs = dict(itertools.zip_longest(reversed(arglist), reversed(argtypes),
176 fillvalue=default_empty))
176 fillvalue=default_empty))
177
177
178 # This attribute will need to be first param of a method that uses
178 # This attribute will need to be first param of a method that uses
179 # api_key, which is translated to instance of user at that name
179 # api_key, which is translated to instance of user at that name
180 USER_SESSION_ATTR = 'apiuser'
180 USER_SESSION_ATTR = 'apiuser'
181
181
182 # get our arglist and check if we provided them as args
182 # get our arglist and check if we provided them as args
183 for arg, default in func_kwargs.items():
183 for arg, default in func_kwargs.items():
184 if arg == USER_SESSION_ATTR:
184 if arg == USER_SESSION_ATTR:
185 # USER_SESSION_ATTR is something translated from API key and
185 # USER_SESSION_ATTR is something translated from API key and
186 # this is checked before so we don't need validate it
186 # this is checked before so we don't need validate it
187 continue
187 continue
188
188
189 # skip the required param check if it's default value is
189 # skip the required param check if it's default value is
190 # NotImplementedType (default_empty)
190 # NotImplementedType (default_empty)
191 if default == default_empty and arg not in self._request_params:
191 if default == default_empty and arg not in self._request_params:
192 raise JSONRPCErrorResponse(
192 raise JSONRPCErrorResponse(
193 retid=self._req_id,
193 retid=self._req_id,
194 message='Missing non optional `%s` arg in JSON DATA' % arg,
194 message='Missing non optional `%s` arg in JSON DATA' % arg,
195 )
195 )
196
196
197 extra = set(self._request_params).difference(func_kwargs)
197 extra = set(self._request_params).difference(func_kwargs)
198 if extra:
198 if extra:
199 raise JSONRPCErrorResponse(
199 raise JSONRPCErrorResponse(
200 retid=self._req_id,
200 retid=self._req_id,
201 message='Unknown %s arg in JSON DATA' %
201 message='Unknown %s arg in JSON DATA' %
202 ', '.join('`%s`' % arg for arg in extra),
202 ', '.join('`%s`' % arg for arg in extra),
203 )
203 )
204
204
205 self._rpc_args = {}
205 self._rpc_args = {}
206 self._rpc_args.update(self._request_params)
206 self._rpc_args.update(self._request_params)
207 self._rpc_args['action'] = self._req_method
207 self._rpc_args['action'] = self._req_method
208 self._rpc_args['environ'] = environ
208 self._rpc_args['environ'] = environ
209
209
210 log.info('IP: %s Request to %s time: %.3fs' % (
210 log.info('IP: %s Request to %s time: %.3fs' % (
211 self._get_ip_addr(environ),
211 self._get_ip_addr(environ),
212 get_path_info(environ), time.time() - start)
212 get_path_info(environ), time.time() - start)
213 )
213 )
214
214
215 state.set_action(self._rpc_call, [])
215 state.set_action(self._rpc_call, [])
216 state.set_params(self._rpc_args)
216 state.set_params(self._rpc_args)
217 return state
217 return state
218
218
219 def _rpc_call(self, action, environ, **rpc_args):
219 def _rpc_call(self, action, environ, **rpc_args):
220 """
220 """
221 Call the specified RPC Method
221 Call the specified RPC Method
222 """
222 """
223 raw_response = ''
223 raw_response = ''
224 try:
224 try:
225 raw_response = getattr(self, action)(**rpc_args)
225 raw_response = getattr(self, action)(**rpc_args)
226 if isinstance(raw_response, HTTPError):
226 if isinstance(raw_response, HTTPError):
227 self._error = str(raw_response)
227 self._error = str(raw_response)
228 except JSONRPCError as e:
228 except JSONRPCError as e:
229 self._error = unicode(e)
229 self._error = str(e)
230 except Exception as e:
230 except Exception as e:
231 log.error('Encountered unhandled exception: %s',
231 log.error('Encountered unhandled exception: %s',
232 traceback.format_exc(),)
232 traceback.format_exc(),)
233 json_exc = JSONRPCError('Internal server error')
233 json_exc = JSONRPCError('Internal server error')
234 self._error = unicode(json_exc)
234 self._error = str(json_exc)
235
235
236 if self._error is not None:
236 if self._error is not None:
237 raw_response = None
237 raw_response = None
238
238
239 response = dict(id=self._req_id, result=raw_response, error=self._error)
239 response = dict(id=self._req_id, result=raw_response, error=self._error)
240 try:
240 try:
241 return ascii_bytes(ext_json.dumps(response))
241 return ascii_bytes(ext_json.dumps(response))
242 except TypeError as e:
242 except TypeError as e:
243 log.error('API FAILED. Error encoding response for %s %s: %s\n%s', action, rpc_args, e, traceback.format_exc())
243 log.error('API FAILED. Error encoding response for %s %s: %s\n%s', action, rpc_args, e, traceback.format_exc())
244 return ascii_bytes(ext_json.dumps(
244 return ascii_bytes(ext_json.dumps(
245 dict(
245 dict(
246 id=self._req_id,
246 id=self._req_id,
247 result=None,
247 result=None,
248 error="Error encoding response",
248 error="Error encoding response",
249 )
249 )
250 ))
250 ))
251
251
252 def _find_method(self):
252 def _find_method(self):
253 """
253 """
254 Return method named by `self._req_method` in controller if able
254 Return method named by `self._req_method` in controller if able
255 """
255 """
256 log.debug('Trying to find JSON-RPC method: %s', self._req_method)
256 log.debug('Trying to find JSON-RPC method: %s', self._req_method)
257 if self._req_method.startswith('_'):
257 if self._req_method.startswith('_'):
258 raise AttributeError("Method not allowed")
258 raise AttributeError("Method not allowed")
259
259
260 try:
260 try:
261 func = getattr(self, self._req_method, None)
261 func = getattr(self, self._req_method, None)
262 except UnicodeEncodeError:
262 except UnicodeEncodeError:
263 raise AttributeError("Problem decoding unicode in requested "
263 raise AttributeError("Problem decoding unicode in requested "
264 "method name.")
264 "method name.")
265
265
266 if isinstance(func, types.MethodType):
266 if isinstance(func, types.MethodType):
267 return func
267 return func
268 else:
268 else:
269 raise AttributeError("No such method: %s" % (self._req_method,))
269 raise AttributeError("No such method: %s" % (self._req_method,))
@@ -1,134 +1,134 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.lib.celerylib
15 kallithea.lib.celerylib
16 ~~~~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~~~~
17
17
18 celery libs for Kallithea
18 celery libs for Kallithea
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: Nov 27, 2010
22 :created_on: Nov 27, 2010
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28
28
29 import logging
29 import logging
30 import os
30 import os
31 from hashlib import md5
31 from hashlib import md5
32
32
33 from decorator import decorator
33 from decorator import decorator
34 from tg import config
34 from tg import config
35
35
36 from kallithea import CELERY_EAGER, CELERY_ON
36 from kallithea import CELERY_EAGER, CELERY_ON
37 from kallithea.lib.pidlock import DaemonLock, LockHeld
37 from kallithea.lib.pidlock import DaemonLock, LockHeld
38 from kallithea.lib.utils2 import safe_bytes
38 from kallithea.lib.utils2 import safe_bytes
39 from kallithea.model import meta
39 from kallithea.model import meta
40
40
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 class FakeTask(object):
45 class FakeTask(object):
46 """Fake a sync result to make it look like a finished task"""
46 """Fake a sync result to make it look like a finished task"""
47
47
48 def __init__(self, result):
48 def __init__(self, result):
49 self.result = result
49 self.result = result
50
50
51 def failed(self):
51 def failed(self):
52 return False
52 return False
53
53
54 traceback = None # if failed
54 traceback = None # if failed
55
55
56 task_id = None
56 task_id = None
57
57
58
58
59 def task(f_org):
59 def task(f_org):
60 """Wrapper of celery.task.task, running async if CELERY_ON
60 """Wrapper of celery.task.task, running async if CELERY_ON
61 """
61 """
62
62
63 if CELERY_ON:
63 if CELERY_ON:
64 def f_async(*args, **kwargs):
64 def f_async(*args, **kwargs):
65 log.info('executing %s task', f_org.__name__)
65 log.info('executing %s task', f_org.__name__)
66 try:
66 try:
67 f_org(*args, **kwargs)
67 f_org(*args, **kwargs)
68 finally:
68 finally:
69 log.info('executed %s task', f_org.__name__)
69 log.info('executed %s task', f_org.__name__)
70 f_async.__name__ = f_org.__name__
70 f_async.__name__ = f_org.__name__
71 from kallithea.lib import celerypylons
71 from kallithea.lib import celerypylons
72 runner = celerypylons.task(ignore_result=True)(f_async)
72 runner = celerypylons.task(ignore_result=True)(f_async)
73
73
74 def f_wrapped(*args, **kwargs):
74 def f_wrapped(*args, **kwargs):
75 t = runner.apply_async(args=args, kwargs=kwargs)
75 t = runner.apply_async(args=args, kwargs=kwargs)
76 log.info('executing task %s in async mode - id %s', f_org, t.task_id)
76 log.info('executing task %s in async mode - id %s', f_org, t.task_id)
77 return t
77 return t
78 else:
78 else:
79 def f_wrapped(*args, **kwargs):
79 def f_wrapped(*args, **kwargs):
80 log.info('executing task %s in sync', f_org.__name__)
80 log.info('executing task %s in sync', f_org.__name__)
81 try:
81 try:
82 result = f_org(*args, **kwargs)
82 result = f_org(*args, **kwargs)
83 except Exception as e:
83 except Exception as e:
84 log.error('exception executing sync task %s in sync: %r', f_org.__name__, e)
84 log.error('exception executing sync task %s in sync: %r', f_org.__name__, e)
85 raise # TODO: return this in FakeTask as with async tasks?
85 raise # TODO: return this in FakeTask as with async tasks?
86 return FakeTask(result)
86 return FakeTask(result)
87
87
88 return f_wrapped
88 return f_wrapped
89
89
90
90
91 def __get_lockkey(func, *fargs, **fkwargs):
91 def __get_lockkey(func, *fargs, **fkwargs):
92 params = list(fargs)
92 params = list(fargs)
93 params.extend(['%s-%s' % ar for ar in fkwargs.items()])
93 params.extend(['%s-%s' % ar for ar in fkwargs.items()])
94
94
95 func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
95 func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
96
96
97 lockkey = 'task_%s.lock' % \
97 lockkey = 'task_%s.lock' % \
98 md5(safe_bytes(func_name + '-' + '-'.join(unicode(x) for x in params))).hexdigest()
98 md5(safe_bytes(func_name + '-' + '-'.join(str(x) for x in params))).hexdigest()
99 return lockkey
99 return lockkey
100
100
101
101
102 def locked_task(func):
102 def locked_task(func):
103 def __wrapper(func, *fargs, **fkwargs):
103 def __wrapper(func, *fargs, **fkwargs):
104 lockkey = __get_lockkey(func, *fargs, **fkwargs)
104 lockkey = __get_lockkey(func, *fargs, **fkwargs)
105 lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir'] # Backward compatibility for TurboGears < 2.4
105 lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir'] # Backward compatibility for TurboGears < 2.4
106
106
107 log.info('running task with lockkey %s', lockkey)
107 log.info('running task with lockkey %s', lockkey)
108 try:
108 try:
109 l = DaemonLock(os.path.join(lockkey_path, lockkey))
109 l = DaemonLock(os.path.join(lockkey_path, lockkey))
110 ret = func(*fargs, **fkwargs)
110 ret = func(*fargs, **fkwargs)
111 l.release()
111 l.release()
112 return ret
112 return ret
113 except LockHeld:
113 except LockHeld:
114 log.info('LockHeld')
114 log.info('LockHeld')
115 return 'Task with key %s already running' % lockkey
115 return 'Task with key %s already running' % lockkey
116
116
117 return decorator(__wrapper, func)
117 return decorator(__wrapper, func)
118
118
119
119
120 def get_session():
120 def get_session():
121 sa = meta.Session()
121 sa = meta.Session()
122 return sa
122 return sa
123
123
124
124
125 def dbsession(func):
125 def dbsession(func):
126 def __wrapper(func, *fargs, **fkwargs):
126 def __wrapper(func, *fargs, **fkwargs):
127 try:
127 try:
128 ret = func(*fargs, **fkwargs)
128 ret = func(*fargs, **fkwargs)
129 return ret
129 return ret
130 finally:
130 finally:
131 if CELERY_ON and not CELERY_EAGER:
131 if CELERY_ON and not CELERY_EAGER:
132 meta.Session.remove()
132 meta.Session.remove()
133
133
134 return decorator(__wrapper, func)
134 return decorator(__wrapper, func)
@@ -1,1320 +1,1320 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 Helper functions
15 Helper functions
16
16
17 Consists of functions to typically be used within templates, but also
17 Consists of functions to typically be used within templates, but also
18 available to Controllers. This module is available to both as 'h'.
18 available to Controllers. This module is available to both as 'h'.
19 """
19 """
20 import hashlib
20 import hashlib
21 import json
21 import json
22 import logging
22 import logging
23 import random
23 import random
24 import re
24 import re
25 import textwrap
25 import textwrap
26 import urllib.parse
26 import urllib.parse
27
27
28 from beaker.cache import cache_region
28 from beaker.cache import cache_region
29 from pygments import highlight as code_highlight
29 from pygments import highlight as code_highlight
30 from pygments.formatters.html import HtmlFormatter
30 from pygments.formatters.html import HtmlFormatter
31 from tg.i18n import ugettext as _
31 from tg.i18n import ugettext as _
32 from webhelpers2.html import HTML, escape, literal
32 from webhelpers2.html import HTML, escape, literal
33 from webhelpers2.html.tags import NotGiven, Option, Options, _input, _make_safe_id_component, checkbox, end_form
33 from webhelpers2.html.tags import NotGiven, Option, Options, _input, _make_safe_id_component, checkbox, end_form
34 from webhelpers2.html.tags import form as insecure_form
34 from webhelpers2.html.tags import form as insecure_form
35 from webhelpers2.html.tags import hidden, link_to, password, radio
35 from webhelpers2.html.tags import hidden, link_to, password, radio
36 from webhelpers2.html.tags import select as webhelpers2_select
36 from webhelpers2.html.tags import select as webhelpers2_select
37 from webhelpers2.html.tags import submit, text, textarea
37 from webhelpers2.html.tags import submit, text, textarea
38 from webhelpers2.number import format_byte_size
38 from webhelpers2.number import format_byte_size
39 from webhelpers2.text import chop_at, truncate, wrap_paragraphs
39 from webhelpers2.text import chop_at, truncate, wrap_paragraphs
40
40
41 from kallithea.config.routing import url
41 from kallithea.config.routing import url
42 from kallithea.lib.annotate import annotate_highlight
42 from kallithea.lib.annotate import annotate_highlight
43 #==============================================================================
43 #==============================================================================
44 # PERMS
44 # PERMS
45 #==============================================================================
45 #==============================================================================
46 from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel
46 from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel
47 from kallithea.lib.markup_renderer import url_re
47 from kallithea.lib.markup_renderer import url_re
48 from kallithea.lib.pygmentsutils import get_custom_lexer
48 from kallithea.lib.pygmentsutils import get_custom_lexer
49 from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict
49 from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict
50 from kallithea.lib.utils2 import age as _age
50 from kallithea.lib.utils2 import age as _age
51 from kallithea.lib.utils2 import credentials_filter, safe_bytes, safe_int, safe_str, str2bool, time_to_datetime
51 from kallithea.lib.utils2 import credentials_filter, safe_bytes, safe_int, safe_str, str2bool, time_to_datetime
52 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
52 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
53 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
53 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
54 #==============================================================================
54 #==============================================================================
55 # SCM FILTERS available via h.
55 # SCM FILTERS available via h.
56 #==============================================================================
56 #==============================================================================
57 from kallithea.lib.vcs.utils import author_email, author_name
57 from kallithea.lib.vcs.utils import author_email, author_name
58
58
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62
62
63 def canonical_url(*args, **kargs):
63 def canonical_url(*args, **kargs):
64 '''Like url(x, qualified=True), but returns url that not only is qualified
64 '''Like url(x, qualified=True), but returns url that not only is qualified
65 but also canonical, as configured in canonical_url'''
65 but also canonical, as configured in canonical_url'''
66 from kallithea import CONFIG
66 from kallithea import CONFIG
67 try:
67 try:
68 parts = CONFIG.get('canonical_url', '').split('://', 1)
68 parts = CONFIG.get('canonical_url', '').split('://', 1)
69 kargs['host'] = parts[1]
69 kargs['host'] = parts[1]
70 kargs['protocol'] = parts[0]
70 kargs['protocol'] = parts[0]
71 except IndexError:
71 except IndexError:
72 kargs['qualified'] = True
72 kargs['qualified'] = True
73 return url(*args, **kargs)
73 return url(*args, **kargs)
74
74
75
75
76 def canonical_hostname():
76 def canonical_hostname():
77 '''Return canonical hostname of system'''
77 '''Return canonical hostname of system'''
78 from kallithea import CONFIG
78 from kallithea import CONFIG
79 try:
79 try:
80 parts = CONFIG.get('canonical_url', '').split('://', 1)
80 parts = CONFIG.get('canonical_url', '').split('://', 1)
81 return parts[1].split('/', 1)[0]
81 return parts[1].split('/', 1)[0]
82 except IndexError:
82 except IndexError:
83 parts = url('home', qualified=True).split('://', 1)
83 parts = url('home', qualified=True).split('://', 1)
84 return parts[1].split('/', 1)[0]
84 return parts[1].split('/', 1)[0]
85
85
86
86
87 def html_escape(s):
87 def html_escape(s):
88 """Return string with all html escaped.
88 """Return string with all html escaped.
89 This is also safe for javascript in html but not necessarily correct.
89 This is also safe for javascript in html but not necessarily correct.
90 """
90 """
91 return (s
91 return (s
92 .replace('&', '&amp;')
92 .replace('&', '&amp;')
93 .replace(">", "&gt;")
93 .replace(">", "&gt;")
94 .replace("<", "&lt;")
94 .replace("<", "&lt;")
95 .replace('"', "&quot;")
95 .replace('"', "&quot;")
96 .replace("'", "&apos;") # Note: this is HTML5 not HTML4 and might not work in mails
96 .replace("'", "&apos;") # Note: this is HTML5 not HTML4 and might not work in mails
97 )
97 )
98
98
99 def js(value):
99 def js(value):
100 """Convert Python value to the corresponding JavaScript representation.
100 """Convert Python value to the corresponding JavaScript representation.
101
101
102 This is necessary to safely insert arbitrary values into HTML <script>
102 This is necessary to safely insert arbitrary values into HTML <script>
103 sections e.g. using Mako template expression substitution.
103 sections e.g. using Mako template expression substitution.
104
104
105 Note: Rather than using this function, it's preferable to avoid the
105 Note: Rather than using this function, it's preferable to avoid the
106 insertion of values into HTML <script> sections altogether. Instead,
106 insertion of values into HTML <script> sections altogether. Instead,
107 data should (to the extent possible) be passed to JavaScript using
107 data should (to the extent possible) be passed to JavaScript using
108 data attributes or AJAX calls, eliminating the need for JS specific
108 data attributes or AJAX calls, eliminating the need for JS specific
109 escaping.
109 escaping.
110
110
111 Note: This is not safe for use in attributes (e.g. onclick), because
111 Note: This is not safe for use in attributes (e.g. onclick), because
112 quotes are not escaped.
112 quotes are not escaped.
113
113
114 Because the rules for parsing <script> varies between XHTML (where
114 Because the rules for parsing <script> varies between XHTML (where
115 normal rules apply for any special characters) and HTML (where
115 normal rules apply for any special characters) and HTML (where
116 entities are not interpreted, but the literal string "</script>"
116 entities are not interpreted, but the literal string "</script>"
117 is forbidden), the function ensures that the result never contains
117 is forbidden), the function ensures that the result never contains
118 '&', '<' and '>', thus making it safe in both those contexts (but
118 '&', '<' and '>', thus making it safe in both those contexts (but
119 not in attributes).
119 not in attributes).
120 """
120 """
121 return literal(
121 return literal(
122 ('(' + json.dumps(value) + ')')
122 ('(' + json.dumps(value) + ')')
123 # In JSON, the following can only appear in string literals.
123 # In JSON, the following can only appear in string literals.
124 .replace('&', r'\x26')
124 .replace('&', r'\x26')
125 .replace('<', r'\x3c')
125 .replace('<', r'\x3c')
126 .replace('>', r'\x3e')
126 .replace('>', r'\x3e')
127 )
127 )
128
128
129
129
130 def jshtml(val):
130 def jshtml(val):
131 """HTML escapes a string value, then converts the resulting string
131 """HTML escapes a string value, then converts the resulting string
132 to its corresponding JavaScript representation (see `js`).
132 to its corresponding JavaScript representation (see `js`).
133
133
134 This is used when a plain-text string (possibly containing special
134 This is used when a plain-text string (possibly containing special
135 HTML characters) will be used by a script in an HTML context (e.g.
135 HTML characters) will be used by a script in an HTML context (e.g.
136 element.innerHTML or jQuery's 'html' method).
136 element.innerHTML or jQuery's 'html' method).
137
137
138 If in doubt, err on the side of using `jshtml` over `js`, since it's
138 If in doubt, err on the side of using `jshtml` over `js`, since it's
139 better to escape too much than too little.
139 better to escape too much than too little.
140 """
140 """
141 return js(escape(val))
141 return js(escape(val))
142
142
143
143
144 def shorter(s, size=20, firstline=False, postfix='...'):
144 def shorter(s, size=20, firstline=False, postfix='...'):
145 """Truncate s to size, including the postfix string if truncating.
145 """Truncate s to size, including the postfix string if truncating.
146 If firstline, truncate at newline.
146 If firstline, truncate at newline.
147 """
147 """
148 if firstline:
148 if firstline:
149 s = s.split('\n', 1)[0].rstrip()
149 s = s.split('\n', 1)[0].rstrip()
150 if len(s) > size:
150 if len(s) > size:
151 return s[:size - len(postfix)] + postfix
151 return s[:size - len(postfix)] + postfix
152 return s
152 return s
153
153
154
154
155 def reset(name, value, id=NotGiven, **attrs):
155 def reset(name, value, id=NotGiven, **attrs):
156 """Create a reset button, similar to webhelpers2.html.tags.submit ."""
156 """Create a reset button, similar to webhelpers2.html.tags.submit ."""
157 return _input("reset", name, value, id, attrs)
157 return _input("reset", name, value, id, attrs)
158
158
159
159
160 def select(name, selected_values, options, id=NotGiven, **attrs):
160 def select(name, selected_values, options, id=NotGiven, **attrs):
161 """Convenient wrapper of webhelpers2 to let it accept options as a tuple list"""
161 """Convenient wrapper of webhelpers2 to let it accept options as a tuple list"""
162 if isinstance(options, list):
162 if isinstance(options, list):
163 option_list = options
163 option_list = options
164 # Handle old value,label lists ... where value also can be value,label lists
164 # Handle old value,label lists ... where value also can be value,label lists
165 options = Options()
165 options = Options()
166 for x in option_list:
166 for x in option_list:
167 if isinstance(x, tuple) and len(x) == 2:
167 if isinstance(x, tuple) and len(x) == 2:
168 value, label = x
168 value, label = x
169 elif isinstance(x, str):
169 elif isinstance(x, str):
170 value = label = x
170 value = label = x
171 else:
171 else:
172 log.error('invalid select option %r', x)
172 log.error('invalid select option %r', x)
173 raise
173 raise
174 if isinstance(value, list):
174 if isinstance(value, list):
175 og = options.add_optgroup(label)
175 og = options.add_optgroup(label)
176 for x in value:
176 for x in value:
177 if isinstance(x, tuple) and len(x) == 2:
177 if isinstance(x, tuple) and len(x) == 2:
178 group_value, group_label = x
178 group_value, group_label = x
179 elif isinstance(x, str):
179 elif isinstance(x, str):
180 group_value = group_label = x
180 group_value = group_label = x
181 else:
181 else:
182 log.error('invalid select option %r', x)
182 log.error('invalid select option %r', x)
183 raise
183 raise
184 og.add_option(group_label, group_value)
184 og.add_option(group_label, group_value)
185 else:
185 else:
186 options.add_option(label, value)
186 options.add_option(label, value)
187 return webhelpers2_select(name, selected_values, options, id=id, **attrs)
187 return webhelpers2_select(name, selected_values, options, id=id, **attrs)
188
188
189
189
190 safeid = _make_safe_id_component
190 safeid = _make_safe_id_component
191
191
192
192
193 def FID(raw_id, path):
193 def FID(raw_id, path):
194 """
194 """
195 Creates a unique ID for filenode based on it's hash of path and revision
195 Creates a unique ID for filenode based on it's hash of path and revision
196 it's safe to use in urls
196 it's safe to use in urls
197
197
198 :param raw_id:
198 :param raw_id:
199 :param path:
199 :param path:
200 """
200 """
201
201
202 return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12])
202 return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12])
203
203
204
204
205 class _FilesBreadCrumbs(object):
205 class _FilesBreadCrumbs(object):
206
206
207 def __call__(self, repo_name, rev, paths):
207 def __call__(self, repo_name, rev, paths):
208 url_l = [link_to(repo_name, url('files_home',
208 url_l = [link_to(repo_name, url('files_home',
209 repo_name=repo_name,
209 repo_name=repo_name,
210 revision=rev, f_path=''),
210 revision=rev, f_path=''),
211 class_='ypjax-link')]
211 class_='ypjax-link')]
212 paths_l = paths.split('/')
212 paths_l = paths.split('/')
213 for cnt, p in enumerate(paths_l):
213 for cnt, p in enumerate(paths_l):
214 if p != '':
214 if p != '':
215 url_l.append(link_to(p,
215 url_l.append(link_to(p,
216 url('files_home',
216 url('files_home',
217 repo_name=repo_name,
217 repo_name=repo_name,
218 revision=rev,
218 revision=rev,
219 f_path='/'.join(paths_l[:cnt + 1])
219 f_path='/'.join(paths_l[:cnt + 1])
220 ),
220 ),
221 class_='ypjax-link'
221 class_='ypjax-link'
222 )
222 )
223 )
223 )
224
224
225 return literal('/'.join(url_l))
225 return literal('/'.join(url_l))
226
226
227
227
228 files_breadcrumbs = _FilesBreadCrumbs()
228 files_breadcrumbs = _FilesBreadCrumbs()
229
229
230
230
231 class CodeHtmlFormatter(HtmlFormatter):
231 class CodeHtmlFormatter(HtmlFormatter):
232 """
232 """
233 My code Html Formatter for source codes
233 My code Html Formatter for source codes
234 """
234 """
235
235
236 def wrap(self, source, outfile):
236 def wrap(self, source, outfile):
237 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
237 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
238
238
239 def _wrap_code(self, source):
239 def _wrap_code(self, source):
240 for cnt, it in enumerate(source):
240 for cnt, it in enumerate(source):
241 i, t = it
241 i, t = it
242 t = '<span id="L%s">%s</span>' % (cnt + 1, t)
242 t = '<span id="L%s">%s</span>' % (cnt + 1, t)
243 yield i, t
243 yield i, t
244
244
245 def _wrap_tablelinenos(self, inner):
245 def _wrap_tablelinenos(self, inner):
246 inner_lines = []
246 inner_lines = []
247 lncount = 0
247 lncount = 0
248 for t, line in inner:
248 for t, line in inner:
249 if t:
249 if t:
250 lncount += 1
250 lncount += 1
251 inner_lines.append(line)
251 inner_lines.append(line)
252
252
253 fl = self.linenostart
253 fl = self.linenostart
254 mw = len(str(lncount + fl - 1))
254 mw = len(str(lncount + fl - 1))
255 sp = self.linenospecial
255 sp = self.linenospecial
256 st = self.linenostep
256 st = self.linenostep
257 la = self.lineanchors
257 la = self.lineanchors
258 aln = self.anchorlinenos
258 aln = self.anchorlinenos
259 nocls = self.noclasses
259 nocls = self.noclasses
260 if sp:
260 if sp:
261 lines = []
261 lines = []
262
262
263 for i in range(fl, fl + lncount):
263 for i in range(fl, fl + lncount):
264 if i % st == 0:
264 if i % st == 0:
265 if i % sp == 0:
265 if i % sp == 0:
266 if aln:
266 if aln:
267 lines.append('<a href="#%s%d" class="special">%*d</a>' %
267 lines.append('<a href="#%s%d" class="special">%*d</a>' %
268 (la, i, mw, i))
268 (la, i, mw, i))
269 else:
269 else:
270 lines.append('<span class="special">%*d</span>' % (mw, i))
270 lines.append('<span class="special">%*d</span>' % (mw, i))
271 else:
271 else:
272 if aln:
272 if aln:
273 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
273 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
274 else:
274 else:
275 lines.append('%*d' % (mw, i))
275 lines.append('%*d' % (mw, i))
276 else:
276 else:
277 lines.append('')
277 lines.append('')
278 ls = '\n'.join(lines)
278 ls = '\n'.join(lines)
279 else:
279 else:
280 lines = []
280 lines = []
281 for i in range(fl, fl + lncount):
281 for i in range(fl, fl + lncount):
282 if i % st == 0:
282 if i % st == 0:
283 if aln:
283 if aln:
284 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
284 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
285 else:
285 else:
286 lines.append('%*d' % (mw, i))
286 lines.append('%*d' % (mw, i))
287 else:
287 else:
288 lines.append('')
288 lines.append('')
289 ls = '\n'.join(lines)
289 ls = '\n'.join(lines)
290
290
291 # in case you wonder about the seemingly redundant <div> here: since the
291 # in case you wonder about the seemingly redundant <div> here: since the
292 # content in the other cell also is wrapped in a div, some browsers in
292 # content in the other cell also is wrapped in a div, some browsers in
293 # some configurations seem to mess up the formatting...
293 # some configurations seem to mess up the formatting...
294 if nocls:
294 if nocls:
295 yield 0, ('<table class="%stable">' % self.cssclass +
295 yield 0, ('<table class="%stable">' % self.cssclass +
296 '<tr><td><div class="linenodiv">'
296 '<tr><td><div class="linenodiv">'
297 '<pre>' + ls + '</pre></div></td>'
297 '<pre>' + ls + '</pre></div></td>'
298 '<td id="hlcode" class="code">')
298 '<td id="hlcode" class="code">')
299 else:
299 else:
300 yield 0, ('<table class="%stable">' % self.cssclass +
300 yield 0, ('<table class="%stable">' % self.cssclass +
301 '<tr><td class="linenos"><div class="linenodiv">'
301 '<tr><td class="linenos"><div class="linenodiv">'
302 '<pre>' + ls + '</pre></div></td>'
302 '<pre>' + ls + '</pre></div></td>'
303 '<td id="hlcode" class="code">')
303 '<td id="hlcode" class="code">')
304 yield 0, ''.join(inner_lines)
304 yield 0, ''.join(inner_lines)
305 yield 0, '</td></tr></table>'
305 yield 0, '</td></tr></table>'
306
306
307
307
308 _whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)')
308 _whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)')
309
309
310
310
311 def _markup_whitespace(m):
311 def _markup_whitespace(m):
312 groups = m.groups()
312 groups = m.groups()
313 if groups[0]:
313 if groups[0]:
314 return '<u>\t</u>'
314 return '<u>\t</u>'
315 if groups[1]:
315 if groups[1]:
316 return ' <i></i>'
316 return ' <i></i>'
317
317
318
318
319 def markup_whitespace(s):
319 def markup_whitespace(s):
320 return _whitespace_re.sub(_markup_whitespace, s)
320 return _whitespace_re.sub(_markup_whitespace, s)
321
321
322
322
323 def pygmentize(filenode, **kwargs):
323 def pygmentize(filenode, **kwargs):
324 """
324 """
325 pygmentize function using pygments
325 pygmentize function using pygments
326
326
327 :param filenode:
327 :param filenode:
328 """
328 """
329 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
329 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
330 return literal(markup_whitespace(
330 return literal(markup_whitespace(
331 code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs))))
331 code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs))))
332
332
333
333
334 def hsv_to_rgb(h, s, v):
334 def hsv_to_rgb(h, s, v):
335 if s == 0.0:
335 if s == 0.0:
336 return v, v, v
336 return v, v, v
337 i = int(h * 6.0) # XXX assume int() truncates!
337 i = int(h * 6.0) # XXX assume int() truncates!
338 f = (h * 6.0) - i
338 f = (h * 6.0) - i
339 p = v * (1.0 - s)
339 p = v * (1.0 - s)
340 q = v * (1.0 - s * f)
340 q = v * (1.0 - s * f)
341 t = v * (1.0 - s * (1.0 - f))
341 t = v * (1.0 - s * (1.0 - f))
342 i = i % 6
342 i = i % 6
343 if i == 0:
343 if i == 0:
344 return v, t, p
344 return v, t, p
345 if i == 1:
345 if i == 1:
346 return q, v, p
346 return q, v, p
347 if i == 2:
347 if i == 2:
348 return p, v, t
348 return p, v, t
349 if i == 3:
349 if i == 3:
350 return p, q, v
350 return p, q, v
351 if i == 4:
351 if i == 4:
352 return t, p, v
352 return t, p, v
353 if i == 5:
353 if i == 5:
354 return v, p, q
354 return v, p, q
355
355
356
356
357 def gen_color(n=10000):
357 def gen_color(n=10000):
358 """generator for getting n of evenly distributed colors using
358 """generator for getting n of evenly distributed colors using
359 hsv color and golden ratio. It always return same order of colors
359 hsv color and golden ratio. It always return same order of colors
360
360
361 :returns: RGB tuple
361 :returns: RGB tuple
362 """
362 """
363
363
364 golden_ratio = 0.618033988749895
364 golden_ratio = 0.618033988749895
365 h = 0.22717784590367374
365 h = 0.22717784590367374
366
366
367 for _unused in range(n):
367 for _unused in range(n):
368 h += golden_ratio
368 h += golden_ratio
369 h %= 1
369 h %= 1
370 HSV_tuple = [h, 0.95, 0.95]
370 HSV_tuple = [h, 0.95, 0.95]
371 RGB_tuple = hsv_to_rgb(*HSV_tuple)
371 RGB_tuple = hsv_to_rgb(*HSV_tuple)
372 yield [str(int(x * 256)) for x in RGB_tuple]
372 yield [str(int(x * 256)) for x in RGB_tuple]
373
373
374
374
375 def pygmentize_annotation(repo_name, filenode, **kwargs):
375 def pygmentize_annotation(repo_name, filenode, **kwargs):
376 """
376 """
377 pygmentize function for annotation
377 pygmentize function for annotation
378
378
379 :param filenode:
379 :param filenode:
380 """
380 """
381 cgenerator = gen_color()
381 cgenerator = gen_color()
382 color_dict = {}
382 color_dict = {}
383
383
384 def get_color_string(cs):
384 def get_color_string(cs):
385 if cs in color_dict:
385 if cs in color_dict:
386 col = color_dict[cs]
386 col = color_dict[cs]
387 else:
387 else:
388 col = color_dict[cs] = next(cgenerator)
388 col = color_dict[cs] = next(cgenerator)
389 return "color: rgb(%s)! important;" % (', '.join(col))
389 return "color: rgb(%s)! important;" % (', '.join(col))
390
390
391 def url_func(changeset):
391 def url_func(changeset):
392 author = escape(changeset.author)
392 author = escape(changeset.author)
393 date = changeset.date
393 date = changeset.date
394 message = escape(changeset.message)
394 message = escape(changeset.message)
395 tooltip_html = ("<b>Author:</b> %s<br/>"
395 tooltip_html = ("<b>Author:</b> %s<br/>"
396 "<b>Date:</b> %s</b><br/>"
396 "<b>Date:</b> %s</b><br/>"
397 "<b>Message:</b> %s") % (author, date, message)
397 "<b>Message:</b> %s") % (author, date, message)
398
398
399 lnk_format = show_id(changeset)
399 lnk_format = show_id(changeset)
400 uri = link_to(
400 uri = link_to(
401 lnk_format,
401 lnk_format,
402 url('changeset_home', repo_name=repo_name,
402 url('changeset_home', repo_name=repo_name,
403 revision=changeset.raw_id),
403 revision=changeset.raw_id),
404 style=get_color_string(changeset.raw_id),
404 style=get_color_string(changeset.raw_id),
405 **{'data-toggle': 'popover',
405 **{'data-toggle': 'popover',
406 'data-content': tooltip_html}
406 'data-content': tooltip_html}
407 )
407 )
408
408
409 uri += '\n'
409 uri += '\n'
410 return uri
410 return uri
411
411
412 return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs)))
412 return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs)))
413
413
414
414
415 class _Message(object):
415 class _Message(object):
416 """A message returned by ``pop_flash_messages()``.
416 """A message returned by ``pop_flash_messages()``.
417
417
418 Converting the message to a string returns the message text. Instances
418 Converting the message to a string returns the message text. Instances
419 also have the following attributes:
419 also have the following attributes:
420
420
421 * ``category``: the category specified when the message was created.
421 * ``category``: the category specified when the message was created.
422 * ``message``: the html-safe message text.
422 * ``message``: the html-safe message text.
423 """
423 """
424
424
425 def __init__(self, category, message):
425 def __init__(self, category, message):
426 self.category = category
426 self.category = category
427 self.message = message
427 self.message = message
428
428
429
429
430 def _session_flash_messages(append=None, clear=False):
430 def _session_flash_messages(append=None, clear=False):
431 """Manage a message queue in tg.session: return the current message queue
431 """Manage a message queue in tg.session: return the current message queue
432 after appending the given message, and possibly clearing the queue."""
432 after appending the given message, and possibly clearing the queue."""
433 key = 'flash'
433 key = 'flash'
434 from tg import session
434 from tg import session
435 if key in session:
435 if key in session:
436 flash_messages = session[key]
436 flash_messages = session[key]
437 else:
437 else:
438 if append is None: # common fast path - also used for clearing empty queue
438 if append is None: # common fast path - also used for clearing empty queue
439 return [] # don't bother saving
439 return [] # don't bother saving
440 flash_messages = []
440 flash_messages = []
441 session[key] = flash_messages
441 session[key] = flash_messages
442 if append is not None and append not in flash_messages:
442 if append is not None and append not in flash_messages:
443 flash_messages.append(append)
443 flash_messages.append(append)
444 if clear:
444 if clear:
445 session.pop(key, None)
445 session.pop(key, None)
446 session.save()
446 session.save()
447 return flash_messages
447 return flash_messages
448
448
449
449
450 def flash(message, category, logf=None):
450 def flash(message, category, logf=None):
451 """
451 """
452 Show a message to the user _and_ log it through the specified function
452 Show a message to the user _and_ log it through the specified function
453
453
454 category: notice (default), warning, error, success
454 category: notice (default), warning, error, success
455 logf: a custom log function - such as log.debug
455 logf: a custom log function - such as log.debug
456
456
457 logf defaults to log.info, unless category equals 'success', in which
457 logf defaults to log.info, unless category equals 'success', in which
458 case logf defaults to log.debug.
458 case logf defaults to log.debug.
459 """
459 """
460 assert category in ('error', 'success', 'warning'), category
460 assert category in ('error', 'success', 'warning'), category
461 if hasattr(message, '__html__'):
461 if hasattr(message, '__html__'):
462 # render to HTML for storing in cookie
462 # render to HTML for storing in cookie
463 safe_message = unicode(message)
463 safe_message = str(message)
464 else:
464 else:
465 # Apply str - the message might be an exception with __str__
465 # Apply str - the message might be an exception with __str__
466 # Escape, so we can trust the result without further escaping, without any risk of injection
466 # Escape, so we can trust the result without further escaping, without any risk of injection
467 safe_message = html_escape(unicode(message))
467 safe_message = html_escape(str(message))
468 if logf is None:
468 if logf is None:
469 logf = log.info
469 logf = log.info
470 if category == 'success':
470 if category == 'success':
471 logf = log.debug
471 logf = log.debug
472
472
473 logf('Flash %s: %s', category, safe_message)
473 logf('Flash %s: %s', category, safe_message)
474
474
475 _session_flash_messages(append=(category, safe_message))
475 _session_flash_messages(append=(category, safe_message))
476
476
477
477
478 def pop_flash_messages():
478 def pop_flash_messages():
479 """Return all accumulated messages and delete them from the session.
479 """Return all accumulated messages and delete them from the session.
480
480
481 The return value is a list of ``Message`` objects.
481 The return value is a list of ``Message`` objects.
482 """
482 """
483 return [_Message(category, message) for category, message in _session_flash_messages(clear=True)]
483 return [_Message(category, message) for category, message in _session_flash_messages(clear=True)]
484
484
485
485
486 age = lambda x, y=False: _age(x, y)
486 age = lambda x, y=False: _age(x, y)
487 capitalize = lambda x: x.capitalize()
487 capitalize = lambda x: x.capitalize()
488 email = author_email
488 email = author_email
489 short_id = lambda x: x[:12]
489 short_id = lambda x: x[:12]
490 hide_credentials = lambda x: ''.join(credentials_filter(x))
490 hide_credentials = lambda x: ''.join(credentials_filter(x))
491
491
492
492
493 def show_id(cs):
493 def show_id(cs):
494 """
494 """
495 Configurable function that shows ID
495 Configurable function that shows ID
496 by default it's r123:fffeeefffeee
496 by default it's r123:fffeeefffeee
497
497
498 :param cs: changeset instance
498 :param cs: changeset instance
499 """
499 """
500 from kallithea import CONFIG
500 from kallithea import CONFIG
501 def_len = safe_int(CONFIG.get('show_sha_length', 12))
501 def_len = safe_int(CONFIG.get('show_sha_length', 12))
502 show_rev = str2bool(CONFIG.get('show_revision_number', False))
502 show_rev = str2bool(CONFIG.get('show_revision_number', False))
503
503
504 raw_id = cs.raw_id[:def_len]
504 raw_id = cs.raw_id[:def_len]
505 if show_rev:
505 if show_rev:
506 return 'r%s:%s' % (cs.revision, raw_id)
506 return 'r%s:%s' % (cs.revision, raw_id)
507 else:
507 else:
508 return raw_id
508 return raw_id
509
509
510
510
511 def fmt_date(date):
511 def fmt_date(date):
512 if date:
512 if date:
513 return date.strftime("%Y-%m-%d %H:%M:%S")
513 return date.strftime("%Y-%m-%d %H:%M:%S")
514 return ""
514 return ""
515
515
516
516
517 def is_git(repository):
517 def is_git(repository):
518 if hasattr(repository, 'alias'):
518 if hasattr(repository, 'alias'):
519 _type = repository.alias
519 _type = repository.alias
520 elif hasattr(repository, 'repo_type'):
520 elif hasattr(repository, 'repo_type'):
521 _type = repository.repo_type
521 _type = repository.repo_type
522 else:
522 else:
523 _type = repository
523 _type = repository
524 return _type == 'git'
524 return _type == 'git'
525
525
526
526
527 def is_hg(repository):
527 def is_hg(repository):
528 if hasattr(repository, 'alias'):
528 if hasattr(repository, 'alias'):
529 _type = repository.alias
529 _type = repository.alias
530 elif hasattr(repository, 'repo_type'):
530 elif hasattr(repository, 'repo_type'):
531 _type = repository.repo_type
531 _type = repository.repo_type
532 else:
532 else:
533 _type = repository
533 _type = repository
534 return _type == 'hg'
534 return _type == 'hg'
535
535
536
536
537 @cache_region('long_term', 'user_attr_or_none')
537 @cache_region('long_term', 'user_attr_or_none')
538 def user_attr_or_none(author, show_attr):
538 def user_attr_or_none(author, show_attr):
539 """Try to match email part of VCS committer string with a local user and return show_attr
539 """Try to match email part of VCS committer string with a local user and return show_attr
540 - or return None if user not found"""
540 - or return None if user not found"""
541 email = author_email(author)
541 email = author_email(author)
542 if email:
542 if email:
543 from kallithea.model.db import User
543 from kallithea.model.db import User
544 user = User.get_by_email(email, cache=True) # cache will only use sql_cache_short
544 user = User.get_by_email(email, cache=True) # cache will only use sql_cache_short
545 if user is not None:
545 if user is not None:
546 return getattr(user, show_attr)
546 return getattr(user, show_attr)
547 return None
547 return None
548
548
549
549
550 def email_or_none(author):
550 def email_or_none(author):
551 """Try to match email part of VCS committer string with a local user.
551 """Try to match email part of VCS committer string with a local user.
552 Return primary email of user, email part of the specified author name, or None."""
552 Return primary email of user, email part of the specified author name, or None."""
553 if not author:
553 if not author:
554 return None
554 return None
555 email = user_attr_or_none(author, 'email')
555 email = user_attr_or_none(author, 'email')
556 if email is not None:
556 if email is not None:
557 return email # always use user's main email address - not necessarily the one used to find user
557 return email # always use user's main email address - not necessarily the one used to find user
558
558
559 # extract email from the commit string
559 # extract email from the commit string
560 email = author_email(author)
560 email = author_email(author)
561 if email:
561 if email:
562 return email
562 return email
563
563
564 # No valid email, not a valid user in the system, none!
564 # No valid email, not a valid user in the system, none!
565 return None
565 return None
566
566
567
567
568 def person(author, show_attr="username"):
568 def person(author, show_attr="username"):
569 """Find the user identified by 'author', return one of the users attributes,
569 """Find the user identified by 'author', return one of the users attributes,
570 default to the username attribute, None if there is no user"""
570 default to the username attribute, None if there is no user"""
571 from kallithea.model.db import User
571 from kallithea.model.db import User
572 # if author is already an instance use it for extraction
572 # if author is already an instance use it for extraction
573 if isinstance(author, User):
573 if isinstance(author, User):
574 return getattr(author, show_attr)
574 return getattr(author, show_attr)
575
575
576 value = user_attr_or_none(author, show_attr)
576 value = user_attr_or_none(author, show_attr)
577 if value is not None:
577 if value is not None:
578 return value
578 return value
579
579
580 # Still nothing? Just pass back the author name if any, else the email
580 # Still nothing? Just pass back the author name if any, else the email
581 return author_name(author) or email(author)
581 return author_name(author) or email(author)
582
582
583
583
584 def person_by_id(id_, show_attr="username"):
584 def person_by_id(id_, show_attr="username"):
585 from kallithea.model.db import User
585 from kallithea.model.db import User
586 # attr to return from fetched user
586 # attr to return from fetched user
587 person_getter = lambda usr: getattr(usr, show_attr)
587 person_getter = lambda usr: getattr(usr, show_attr)
588
588
589 # maybe it's an ID ?
589 # maybe it's an ID ?
590 if str(id_).isdigit() or isinstance(id_, int):
590 if str(id_).isdigit() or isinstance(id_, int):
591 id_ = int(id_)
591 id_ = int(id_)
592 user = User.get(id_)
592 user = User.get(id_)
593 if user is not None:
593 if user is not None:
594 return person_getter(user)
594 return person_getter(user)
595 return id_
595 return id_
596
596
597
597
598 def boolicon(value):
598 def boolicon(value):
599 """Returns boolean value of a value, represented as small html image of true/false
599 """Returns boolean value of a value, represented as small html image of true/false
600 icons
600 icons
601
601
602 :param value: value
602 :param value: value
603 """
603 """
604
604
605 if value:
605 if value:
606 return HTML.tag('i', class_="icon-ok")
606 return HTML.tag('i', class_="icon-ok")
607 else:
607 else:
608 return HTML.tag('i', class_="icon-minus-circled")
608 return HTML.tag('i', class_="icon-minus-circled")
609
609
610
610
611 def action_parser(user_log, feed=False, parse_cs=False):
611 def action_parser(user_log, feed=False, parse_cs=False):
612 """
612 """
613 This helper will action_map the specified string action into translated
613 This helper will action_map the specified string action into translated
614 fancy names with icons and links
614 fancy names with icons and links
615
615
616 :param user_log: user log instance
616 :param user_log: user log instance
617 :param feed: use output for feeds (no html and fancy icons)
617 :param feed: use output for feeds (no html and fancy icons)
618 :param parse_cs: parse Changesets into VCS instances
618 :param parse_cs: parse Changesets into VCS instances
619 """
619 """
620
620
621 action = user_log.action
621 action = user_log.action
622 action_params = ' '
622 action_params = ' '
623
623
624 x = action.split(':')
624 x = action.split(':')
625
625
626 if len(x) > 1:
626 if len(x) > 1:
627 action, action_params = x
627 action, action_params = x
628
628
629 def get_cs_links():
629 def get_cs_links():
630 revs_limit = 3 # display this amount always
630 revs_limit = 3 # display this amount always
631 revs_top_limit = 50 # show upto this amount of changesets hidden
631 revs_top_limit = 50 # show upto this amount of changesets hidden
632 revs_ids = action_params.split(',')
632 revs_ids = action_params.split(',')
633 deleted = user_log.repository is None
633 deleted = user_log.repository is None
634 if deleted:
634 if deleted:
635 return ','.join(revs_ids)
635 return ','.join(revs_ids)
636
636
637 repo_name = user_log.repository.repo_name
637 repo_name = user_log.repository.repo_name
638
638
639 def lnk(rev, repo_name):
639 def lnk(rev, repo_name):
640 lazy_cs = False
640 lazy_cs = False
641 title_ = None
641 title_ = None
642 url_ = '#'
642 url_ = '#'
643 if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict):
643 if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict):
644 if rev.op and rev.ref_name:
644 if rev.op and rev.ref_name:
645 if rev.op == 'delete_branch':
645 if rev.op == 'delete_branch':
646 lbl = _('Deleted branch: %s') % rev.ref_name
646 lbl = _('Deleted branch: %s') % rev.ref_name
647 elif rev.op == 'tag':
647 elif rev.op == 'tag':
648 lbl = _('Created tag: %s') % rev.ref_name
648 lbl = _('Created tag: %s') % rev.ref_name
649 else:
649 else:
650 lbl = 'Unknown operation %s' % rev.op
650 lbl = 'Unknown operation %s' % rev.op
651 else:
651 else:
652 lazy_cs = True
652 lazy_cs = True
653 lbl = rev.short_id[:8]
653 lbl = rev.short_id[:8]
654 url_ = url('changeset_home', repo_name=repo_name,
654 url_ = url('changeset_home', repo_name=repo_name,
655 revision=rev.raw_id)
655 revision=rev.raw_id)
656 else:
656 else:
657 # changeset cannot be found - it might have been stripped or removed
657 # changeset cannot be found - it might have been stripped or removed
658 lbl = rev[:12]
658 lbl = rev[:12]
659 title_ = _('Changeset %s not found') % lbl
659 title_ = _('Changeset %s not found') % lbl
660 if parse_cs:
660 if parse_cs:
661 return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'})
661 return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'})
662 return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '',
662 return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '',
663 **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name})
663 **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name})
664
664
665 def _get_op(rev_txt):
665 def _get_op(rev_txt):
666 _op = None
666 _op = None
667 _name = rev_txt
667 _name = rev_txt
668 if len(rev_txt.split('=>')) == 2:
668 if len(rev_txt.split('=>')) == 2:
669 _op, _name = rev_txt.split('=>')
669 _op, _name = rev_txt.split('=>')
670 return _op, _name
670 return _op, _name
671
671
672 revs = []
672 revs = []
673 if len([v for v in revs_ids if v != '']) > 0:
673 if len([v for v in revs_ids if v != '']) > 0:
674 repo = None
674 repo = None
675 for rev in revs_ids[:revs_top_limit]:
675 for rev in revs_ids[:revs_top_limit]:
676 _op, _name = _get_op(rev)
676 _op, _name = _get_op(rev)
677
677
678 # we want parsed changesets, or new log store format is bad
678 # we want parsed changesets, or new log store format is bad
679 if parse_cs:
679 if parse_cs:
680 try:
680 try:
681 if repo is None:
681 if repo is None:
682 repo = user_log.repository.scm_instance
682 repo = user_log.repository.scm_instance
683 _rev = repo.get_changeset(rev)
683 _rev = repo.get_changeset(rev)
684 revs.append(_rev)
684 revs.append(_rev)
685 except ChangesetDoesNotExistError:
685 except ChangesetDoesNotExistError:
686 log.error('cannot find revision %s in this repo', rev)
686 log.error('cannot find revision %s in this repo', rev)
687 revs.append(rev)
687 revs.append(rev)
688 else:
688 else:
689 _rev = AttributeDict({
689 _rev = AttributeDict({
690 'short_id': rev[:12],
690 'short_id': rev[:12],
691 'raw_id': rev,
691 'raw_id': rev,
692 'message': '',
692 'message': '',
693 'op': _op,
693 'op': _op,
694 'ref_name': _name
694 'ref_name': _name
695 })
695 })
696 revs.append(_rev)
696 revs.append(_rev)
697 cs_links = [" " + ', '.join(
697 cs_links = [" " + ', '.join(
698 [lnk(rev, repo_name) for rev in revs[:revs_limit]]
698 [lnk(rev, repo_name) for rev in revs[:revs_limit]]
699 )]
699 )]
700 _op1, _name1 = _get_op(revs_ids[0])
700 _op1, _name1 = _get_op(revs_ids[0])
701 _op2, _name2 = _get_op(revs_ids[-1])
701 _op2, _name2 = _get_op(revs_ids[-1])
702
702
703 _rev = '%s...%s' % (_name1, _name2)
703 _rev = '%s...%s' % (_name1, _name2)
704
704
705 compare_view = (
705 compare_view = (
706 ' <div class="compare_view" data-toggle="tooltip" title="%s">'
706 ' <div class="compare_view" data-toggle="tooltip" title="%s">'
707 '<a href="%s">%s</a> </div>' % (
707 '<a href="%s">%s</a> </div>' % (
708 _('Show all combined changesets %s->%s') % (
708 _('Show all combined changesets %s->%s') % (
709 revs_ids[0][:12], revs_ids[-1][:12]
709 revs_ids[0][:12], revs_ids[-1][:12]
710 ),
710 ),
711 url('changeset_home', repo_name=repo_name,
711 url('changeset_home', repo_name=repo_name,
712 revision=_rev
712 revision=_rev
713 ),
713 ),
714 _('Compare view')
714 _('Compare view')
715 )
715 )
716 )
716 )
717
717
718 # if we have exactly one more than normally displayed
718 # if we have exactly one more than normally displayed
719 # just display it, takes less space than displaying
719 # just display it, takes less space than displaying
720 # "and 1 more revisions"
720 # "and 1 more revisions"
721 if len(revs_ids) == revs_limit + 1:
721 if len(revs_ids) == revs_limit + 1:
722 cs_links.append(", " + lnk(revs[revs_limit], repo_name))
722 cs_links.append(", " + lnk(revs[revs_limit], repo_name))
723
723
724 # hidden-by-default ones
724 # hidden-by-default ones
725 if len(revs_ids) > revs_limit + 1:
725 if len(revs_ids) > revs_limit + 1:
726 uniq_id = revs_ids[0]
726 uniq_id = revs_ids[0]
727 html_tmpl = (
727 html_tmpl = (
728 '<span> %s <a class="show_more" id="_%s" '
728 '<span> %s <a class="show_more" id="_%s" '
729 'href="#more">%s</a> %s</span>'
729 'href="#more">%s</a> %s</span>'
730 )
730 )
731 if not feed:
731 if not feed:
732 cs_links.append(html_tmpl % (
732 cs_links.append(html_tmpl % (
733 _('and'),
733 _('and'),
734 uniq_id, _('%s more') % (len(revs_ids) - revs_limit),
734 uniq_id, _('%s more') % (len(revs_ids) - revs_limit),
735 _('revisions')
735 _('revisions')
736 )
736 )
737 )
737 )
738
738
739 if not feed:
739 if not feed:
740 html_tmpl = '<span id="%s" style="display:none">, %s </span>'
740 html_tmpl = '<span id="%s" style="display:none">, %s </span>'
741 else:
741 else:
742 html_tmpl = '<span id="%s"> %s </span>'
742 html_tmpl = '<span id="%s"> %s </span>'
743
743
744 morelinks = ', '.join(
744 morelinks = ', '.join(
745 [lnk(rev, repo_name) for rev in revs[revs_limit:]]
745 [lnk(rev, repo_name) for rev in revs[revs_limit:]]
746 )
746 )
747
747
748 if len(revs_ids) > revs_top_limit:
748 if len(revs_ids) > revs_top_limit:
749 morelinks += ', ...'
749 morelinks += ', ...'
750
750
751 cs_links.append(html_tmpl % (uniq_id, morelinks))
751 cs_links.append(html_tmpl % (uniq_id, morelinks))
752 if len(revs) > 1:
752 if len(revs) > 1:
753 cs_links.append(compare_view)
753 cs_links.append(compare_view)
754 return ''.join(cs_links)
754 return ''.join(cs_links)
755
755
756 def get_fork_name():
756 def get_fork_name():
757 repo_name = action_params
757 repo_name = action_params
758 url_ = url('summary_home', repo_name=repo_name)
758 url_ = url('summary_home', repo_name=repo_name)
759 return _('Fork name %s') % link_to(action_params, url_)
759 return _('Fork name %s') % link_to(action_params, url_)
760
760
761 def get_user_name():
761 def get_user_name():
762 user_name = action_params
762 user_name = action_params
763 return user_name
763 return user_name
764
764
765 def get_users_group():
765 def get_users_group():
766 group_name = action_params
766 group_name = action_params
767 return group_name
767 return group_name
768
768
769 def get_pull_request():
769 def get_pull_request():
770 from kallithea.model.db import PullRequest
770 from kallithea.model.db import PullRequest
771 pull_request_id = action_params
771 pull_request_id = action_params
772 nice_id = PullRequest.make_nice_id(pull_request_id)
772 nice_id = PullRequest.make_nice_id(pull_request_id)
773
773
774 deleted = user_log.repository is None
774 deleted = user_log.repository is None
775 if deleted:
775 if deleted:
776 repo_name = user_log.repository_name
776 repo_name = user_log.repository_name
777 else:
777 else:
778 repo_name = user_log.repository.repo_name
778 repo_name = user_log.repository.repo_name
779
779
780 return link_to(_('Pull request %s') % nice_id,
780 return link_to(_('Pull request %s') % nice_id,
781 url('pullrequest_show', repo_name=repo_name,
781 url('pullrequest_show', repo_name=repo_name,
782 pull_request_id=pull_request_id))
782 pull_request_id=pull_request_id))
783
783
784 def get_archive_name():
784 def get_archive_name():
785 archive_name = action_params
785 archive_name = action_params
786 return archive_name
786 return archive_name
787
787
788 # action : translated str, callback(extractor), icon
788 # action : translated str, callback(extractor), icon
789 action_map = {
789 action_map = {
790 'user_deleted_repo': (_('[deleted] repository'),
790 'user_deleted_repo': (_('[deleted] repository'),
791 None, 'icon-trashcan'),
791 None, 'icon-trashcan'),
792 'user_created_repo': (_('[created] repository'),
792 'user_created_repo': (_('[created] repository'),
793 None, 'icon-plus'),
793 None, 'icon-plus'),
794 'user_created_fork': (_('[created] repository as fork'),
794 'user_created_fork': (_('[created] repository as fork'),
795 None, 'icon-fork'),
795 None, 'icon-fork'),
796 'user_forked_repo': (_('[forked] repository'),
796 'user_forked_repo': (_('[forked] repository'),
797 get_fork_name, 'icon-fork'),
797 get_fork_name, 'icon-fork'),
798 'user_updated_repo': (_('[updated] repository'),
798 'user_updated_repo': (_('[updated] repository'),
799 None, 'icon-pencil'),
799 None, 'icon-pencil'),
800 'user_downloaded_archive': (_('[downloaded] archive from repository'),
800 'user_downloaded_archive': (_('[downloaded] archive from repository'),
801 get_archive_name, 'icon-download-cloud'),
801 get_archive_name, 'icon-download-cloud'),
802 'admin_deleted_repo': (_('[delete] repository'),
802 'admin_deleted_repo': (_('[delete] repository'),
803 None, 'icon-trashcan'),
803 None, 'icon-trashcan'),
804 'admin_created_repo': (_('[created] repository'),
804 'admin_created_repo': (_('[created] repository'),
805 None, 'icon-plus'),
805 None, 'icon-plus'),
806 'admin_forked_repo': (_('[forked] repository'),
806 'admin_forked_repo': (_('[forked] repository'),
807 None, 'icon-fork'),
807 None, 'icon-fork'),
808 'admin_updated_repo': (_('[updated] repository'),
808 'admin_updated_repo': (_('[updated] repository'),
809 None, 'icon-pencil'),
809 None, 'icon-pencil'),
810 'admin_created_user': (_('[created] user'),
810 'admin_created_user': (_('[created] user'),
811 get_user_name, 'icon-user'),
811 get_user_name, 'icon-user'),
812 'admin_updated_user': (_('[updated] user'),
812 'admin_updated_user': (_('[updated] user'),
813 get_user_name, 'icon-user'),
813 get_user_name, 'icon-user'),
814 'admin_created_users_group': (_('[created] user group'),
814 'admin_created_users_group': (_('[created] user group'),
815 get_users_group, 'icon-pencil'),
815 get_users_group, 'icon-pencil'),
816 'admin_updated_users_group': (_('[updated] user group'),
816 'admin_updated_users_group': (_('[updated] user group'),
817 get_users_group, 'icon-pencil'),
817 get_users_group, 'icon-pencil'),
818 'user_commented_revision': (_('[commented] on revision in repository'),
818 'user_commented_revision': (_('[commented] on revision in repository'),
819 get_cs_links, 'icon-comment'),
819 get_cs_links, 'icon-comment'),
820 'user_commented_pull_request': (_('[commented] on pull request for'),
820 'user_commented_pull_request': (_('[commented] on pull request for'),
821 get_pull_request, 'icon-comment'),
821 get_pull_request, 'icon-comment'),
822 'user_closed_pull_request': (_('[closed] pull request for'),
822 'user_closed_pull_request': (_('[closed] pull request for'),
823 get_pull_request, 'icon-ok'),
823 get_pull_request, 'icon-ok'),
824 'push': (_('[pushed] into'),
824 'push': (_('[pushed] into'),
825 get_cs_links, 'icon-move-up'),
825 get_cs_links, 'icon-move-up'),
826 'push_local': (_('[committed via Kallithea] into repository'),
826 'push_local': (_('[committed via Kallithea] into repository'),
827 get_cs_links, 'icon-pencil'),
827 get_cs_links, 'icon-pencil'),
828 'push_remote': (_('[pulled from remote] into repository'),
828 'push_remote': (_('[pulled from remote] into repository'),
829 get_cs_links, 'icon-move-up'),
829 get_cs_links, 'icon-move-up'),
830 'pull': (_('[pulled] from'),
830 'pull': (_('[pulled] from'),
831 None, 'icon-move-down'),
831 None, 'icon-move-down'),
832 'started_following_repo': (_('[started following] repository'),
832 'started_following_repo': (_('[started following] repository'),
833 None, 'icon-heart'),
833 None, 'icon-heart'),
834 'stopped_following_repo': (_('[stopped following] repository'),
834 'stopped_following_repo': (_('[stopped following] repository'),
835 None, 'icon-heart-empty'),
835 None, 'icon-heart-empty'),
836 }
836 }
837
837
838 action_str = action_map.get(action, action)
838 action_str = action_map.get(action, action)
839 if feed:
839 if feed:
840 action = action_str[0].replace('[', '').replace(']', '')
840 action = action_str[0].replace('[', '').replace(']', '')
841 else:
841 else:
842 action = action_str[0] \
842 action = action_str[0] \
843 .replace('[', '<b>') \
843 .replace('[', '<b>') \
844 .replace(']', '</b>')
844 .replace(']', '</b>')
845
845
846 action_params_func = lambda: ""
846 action_params_func = lambda: ""
847
847
848 if callable(action_str[1]):
848 if callable(action_str[1]):
849 action_params_func = action_str[1]
849 action_params_func = action_str[1]
850
850
851 def action_parser_icon():
851 def action_parser_icon():
852 action = user_log.action
852 action = user_log.action
853 action_params = None
853 action_params = None
854 x = action.split(':')
854 x = action.split(':')
855
855
856 if len(x) > 1:
856 if len(x) > 1:
857 action, action_params = x
857 action, action_params = x
858
858
859 ico = action_map.get(action, ['', '', ''])[2]
859 ico = action_map.get(action, ['', '', ''])[2]
860 html = """<i class="%s"></i>""" % ico
860 html = """<i class="%s"></i>""" % ico
861 return literal(html)
861 return literal(html)
862
862
863 # returned callbacks we need to call to get
863 # returned callbacks we need to call to get
864 return [lambda: literal(action), action_params_func, action_parser_icon]
864 return [lambda: literal(action), action_params_func, action_parser_icon]
865
865
866
866
867 #==============================================================================
867 #==============================================================================
868 # GRAVATAR URL
868 # GRAVATAR URL
869 #==============================================================================
869 #==============================================================================
870 def gravatar_div(email_address, cls='', size=30, **div_attributes):
870 def gravatar_div(email_address, cls='', size=30, **div_attributes):
871 """Return an html literal with a span around a gravatar if they are enabled.
871 """Return an html literal with a span around a gravatar if they are enabled.
872 Extra keyword parameters starting with 'div_' will get the prefix removed
872 Extra keyword parameters starting with 'div_' will get the prefix removed
873 and '_' changed to '-' and be used as attributes on the div. The default
873 and '_' changed to '-' and be used as attributes on the div. The default
874 class is 'gravatar'.
874 class is 'gravatar'.
875 """
875 """
876 from tg import tmpl_context as c
876 from tg import tmpl_context as c
877 if not c.visual.use_gravatar:
877 if not c.visual.use_gravatar:
878 return ''
878 return ''
879 if 'div_class' not in div_attributes:
879 if 'div_class' not in div_attributes:
880 div_attributes['div_class'] = "gravatar"
880 div_attributes['div_class'] = "gravatar"
881 attributes = []
881 attributes = []
882 for k, v in sorted(div_attributes.items()):
882 for k, v in sorted(div_attributes.items()):
883 assert k.startswith('div_'), k
883 assert k.startswith('div_'), k
884 attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v)))
884 attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v)))
885 return literal("""<span%s>%s</span>""" %
885 return literal("""<span%s>%s</span>""" %
886 (''.join(attributes),
886 (''.join(attributes),
887 gravatar(email_address, cls=cls, size=size)))
887 gravatar(email_address, cls=cls, size=size)))
888
888
889
889
890 def gravatar(email_address, cls='', size=30):
890 def gravatar(email_address, cls='', size=30):
891 """return html element of the gravatar
891 """return html element of the gravatar
892
892
893 This method will return an <img> with the resolution double the size (for
893 This method will return an <img> with the resolution double the size (for
894 retina screens) of the image. If the url returned from gravatar_url is
894 retina screens) of the image. If the url returned from gravatar_url is
895 empty then we fallback to using an icon.
895 empty then we fallback to using an icon.
896
896
897 """
897 """
898 from tg import tmpl_context as c
898 from tg import tmpl_context as c
899 if not c.visual.use_gravatar:
899 if not c.visual.use_gravatar:
900 return ''
900 return ''
901
901
902 src = gravatar_url(email_address, size * 2)
902 src = gravatar_url(email_address, size * 2)
903
903
904 if src:
904 if src:
905 # here it makes sense to use style="width: ..." (instead of, say, a
905 # here it makes sense to use style="width: ..." (instead of, say, a
906 # stylesheet) because we using this to generate a high-res (retina) size
906 # stylesheet) because we using this to generate a high-res (retina) size
907 html = ('<i class="icon-gravatar {cls}"'
907 html = ('<i class="icon-gravatar {cls}"'
908 ' style="font-size: {size}px;background-size: {size}px;background-image: url(\'{src}\')"'
908 ' style="font-size: {size}px;background-size: {size}px;background-image: url(\'{src}\')"'
909 '></i>').format(cls=cls, size=size, src=src)
909 '></i>').format(cls=cls, size=size, src=src)
910
910
911 else:
911 else:
912 # if src is empty then there was no gravatar, so we use a font icon
912 # if src is empty then there was no gravatar, so we use a font icon
913 html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>"""
913 html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>"""
914 .format(cls=cls, size=size, src=src))
914 .format(cls=cls, size=size, src=src))
915
915
916 return literal(html)
916 return literal(html)
917
917
918
918
919 def gravatar_url(email_address, size=30, default=''):
919 def gravatar_url(email_address, size=30, default=''):
920 # doh, we need to re-import those to mock it later
920 # doh, we need to re-import those to mock it later
921 from kallithea.config.routing import url
921 from kallithea.config.routing import url
922 from kallithea.model.db import User
922 from kallithea.model.db import User
923 from tg import tmpl_context as c
923 from tg import tmpl_context as c
924 if not c.visual.use_gravatar:
924 if not c.visual.use_gravatar:
925 return ""
925 return ""
926
926
927 _def = 'anonymous@kallithea-scm.org' # default gravatar
927 _def = 'anonymous@kallithea-scm.org' # default gravatar
928 email_address = email_address or _def
928 email_address = email_address or _def
929
929
930 if email_address == _def:
930 if email_address == _def:
931 return default
931 return default
932
932
933 parsed_url = urllib.parse.urlparse(url.current(qualified=True))
933 parsed_url = urllib.parse.urlparse(url.current(qualified=True))
934 url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \
934 url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \
935 .replace('{email}', email_address) \
935 .replace('{email}', email_address) \
936 .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \
936 .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \
937 .replace('{netloc}', parsed_url.netloc) \
937 .replace('{netloc}', parsed_url.netloc) \
938 .replace('{scheme}', parsed_url.scheme) \
938 .replace('{scheme}', parsed_url.scheme) \
939 .replace('{size}', str(size))
939 .replace('{size}', str(size))
940 return url
940 return url
941
941
942
942
943 def changed_tooltip(nodes):
943 def changed_tooltip(nodes):
944 """
944 """
945 Generates a html string for changed nodes in changeset page.
945 Generates a html string for changed nodes in changeset page.
946 It limits the output to 30 entries
946 It limits the output to 30 entries
947
947
948 :param nodes: LazyNodesGenerator
948 :param nodes: LazyNodesGenerator
949 """
949 """
950 if nodes:
950 if nodes:
951 pref = ': <br/> '
951 pref = ': <br/> '
952 suf = ''
952 suf = ''
953 if len(nodes) > 30:
953 if len(nodes) > 30:
954 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
954 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
955 return literal(pref + '<br/> '.join([x.path
955 return literal(pref + '<br/> '.join([x.path
956 for x in nodes[:30]]) + suf)
956 for x in nodes[:30]]) + suf)
957 else:
957 else:
958 return ': ' + _('No files')
958 return ': ' + _('No files')
959
959
960
960
961 def fancy_file_stats(stats):
961 def fancy_file_stats(stats):
962 """
962 """
963 Displays a fancy two colored bar for number of added/deleted
963 Displays a fancy two colored bar for number of added/deleted
964 lines of code on file
964 lines of code on file
965
965
966 :param stats: two element list of added/deleted lines of code
966 :param stats: two element list of added/deleted lines of code
967 """
967 """
968 from kallithea.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
968 from kallithea.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
969 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
969 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
970
970
971 a, d = stats['added'], stats['deleted']
971 a, d = stats['added'], stats['deleted']
972 width = 100
972 width = 100
973
973
974 if stats['binary']:
974 if stats['binary']:
975 # binary mode
975 # binary mode
976 lbl = ''
976 lbl = ''
977 bin_op = 1
977 bin_op = 1
978
978
979 if BIN_FILENODE in stats['ops']:
979 if BIN_FILENODE in stats['ops']:
980 lbl = 'bin+'
980 lbl = 'bin+'
981
981
982 if NEW_FILENODE in stats['ops']:
982 if NEW_FILENODE in stats['ops']:
983 lbl += _('new file')
983 lbl += _('new file')
984 bin_op = NEW_FILENODE
984 bin_op = NEW_FILENODE
985 elif MOD_FILENODE in stats['ops']:
985 elif MOD_FILENODE in stats['ops']:
986 lbl += _('mod')
986 lbl += _('mod')
987 bin_op = MOD_FILENODE
987 bin_op = MOD_FILENODE
988 elif DEL_FILENODE in stats['ops']:
988 elif DEL_FILENODE in stats['ops']:
989 lbl += _('del')
989 lbl += _('del')
990 bin_op = DEL_FILENODE
990 bin_op = DEL_FILENODE
991 elif RENAMED_FILENODE in stats['ops']:
991 elif RENAMED_FILENODE in stats['ops']:
992 lbl += _('rename')
992 lbl += _('rename')
993 bin_op = RENAMED_FILENODE
993 bin_op = RENAMED_FILENODE
994
994
995 # chmod can go with other operations
995 # chmod can go with other operations
996 if CHMOD_FILENODE in stats['ops']:
996 if CHMOD_FILENODE in stats['ops']:
997 _org_lbl = _('chmod')
997 _org_lbl = _('chmod')
998 lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl
998 lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl
999
999
1000 #import ipdb;ipdb.set_trace()
1000 #import ipdb;ipdb.set_trace()
1001 b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl)
1001 b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl)
1002 b_a = '<div class="bin bin1" style="width:0%"></div>'
1002 b_a = '<div class="bin bin1" style="width:0%"></div>'
1003 return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d))
1003 return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d))
1004
1004
1005 t = stats['added'] + stats['deleted']
1005 t = stats['added'] + stats['deleted']
1006 unit = float(width) / (t or 1)
1006 unit = float(width) / (t or 1)
1007
1007
1008 # needs > 9% of width to be visible or 0 to be hidden
1008 # needs > 9% of width to be visible or 0 to be hidden
1009 a_p = max(9, unit * a) if a > 0 else 0
1009 a_p = max(9, unit * a) if a > 0 else 0
1010 d_p = max(9, unit * d) if d > 0 else 0
1010 d_p = max(9, unit * d) if d > 0 else 0
1011 p_sum = a_p + d_p
1011 p_sum = a_p + d_p
1012
1012
1013 if p_sum > width:
1013 if p_sum > width:
1014 # adjust the percentage to be == 100% since we adjusted to 9
1014 # adjust the percentage to be == 100% since we adjusted to 9
1015 if a_p > d_p:
1015 if a_p > d_p:
1016 a_p = a_p - (p_sum - width)
1016 a_p = a_p - (p_sum - width)
1017 else:
1017 else:
1018 d_p = d_p - (p_sum - width)
1018 d_p = d_p - (p_sum - width)
1019
1019
1020 a_v = a if a > 0 else ''
1020 a_v = a if a > 0 else ''
1021 d_v = d if d > 0 else ''
1021 d_v = d if d > 0 else ''
1022
1022
1023 d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % (
1023 d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % (
1024 a_p, a_v
1024 a_p, a_v
1025 )
1025 )
1026 d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % (
1026 d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % (
1027 d_p, d_v
1027 d_p, d_v
1028 )
1028 )
1029 return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1029 return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1030
1030
1031
1031
1032 _URLIFY_RE = re.compile(r'''
1032 _URLIFY_RE = re.compile(r'''
1033 # URL markup
1033 # URL markup
1034 (?P<url>%s) |
1034 (?P<url>%s) |
1035 # @mention markup
1035 # @mention markup
1036 (?P<mention>%s) |
1036 (?P<mention>%s) |
1037 # Changeset hash markup
1037 # Changeset hash markup
1038 (?<!\w|[-_])
1038 (?<!\w|[-_])
1039 (?P<hash>[0-9a-f]{12,40})
1039 (?P<hash>[0-9a-f]{12,40})
1040 (?!\w|[-_]) |
1040 (?!\w|[-_]) |
1041 # Markup of *bold text*
1041 # Markup of *bold text*
1042 (?:
1042 (?:
1043 (?:^|(?<=\s))
1043 (?:^|(?<=\s))
1044 (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] )
1044 (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] )
1045 (?![*\w])
1045 (?![*\w])
1046 ) |
1046 ) |
1047 # "Stylize" markup
1047 # "Stylize" markup
1048 \[see\ \=&gt;\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
1048 \[see\ \=&gt;\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
1049 \[license\ \=&gt;\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
1049 \[license\ \=&gt;\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
1050 \[(?P<tagtype>requires|recommends|conflicts|base)\ \=&gt;\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] |
1050 \[(?P<tagtype>requires|recommends|conflicts|base)\ \=&gt;\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] |
1051 \[(?:lang|language)\ \=&gt;\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] |
1051 \[(?:lang|language)\ \=&gt;\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] |
1052 \[(?P<tag>[a-z]+)\]
1052 \[(?P<tag>[a-z]+)\]
1053 ''' % (url_re.pattern, MENTIONS_REGEX.pattern),
1053 ''' % (url_re.pattern, MENTIONS_REGEX.pattern),
1054 re.VERBOSE | re.MULTILINE | re.IGNORECASE)
1054 re.VERBOSE | re.MULTILINE | re.IGNORECASE)
1055
1055
1056
1056
1057 def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate):
1057 def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate):
1058 """
1058 """
1059 Parses given text message and make literal html with markup.
1059 Parses given text message and make literal html with markup.
1060 The text will be truncated to the specified length.
1060 The text will be truncated to the specified length.
1061 Hashes are turned into changeset links to specified repository.
1061 Hashes are turned into changeset links to specified repository.
1062 URLs links to what they say.
1062 URLs links to what they say.
1063 Issues are linked to given issue-server.
1063 Issues are linked to given issue-server.
1064 If link_ is provided, all text not already linking somewhere will link there.
1064 If link_ is provided, all text not already linking somewhere will link there.
1065 """
1065 """
1066
1066
1067 def _replace(match_obj):
1067 def _replace(match_obj):
1068 url = match_obj.group('url')
1068 url = match_obj.group('url')
1069 if url is not None:
1069 if url is not None:
1070 return '<a href="%(url)s">%(url)s</a>' % {'url': url}
1070 return '<a href="%(url)s">%(url)s</a>' % {'url': url}
1071 mention = match_obj.group('mention')
1071 mention = match_obj.group('mention')
1072 if mention is not None:
1072 if mention is not None:
1073 return '<b>%s</b>' % mention
1073 return '<b>%s</b>' % mention
1074 hash_ = match_obj.group('hash')
1074 hash_ = match_obj.group('hash')
1075 if hash_ is not None and repo_name is not None:
1075 if hash_ is not None and repo_name is not None:
1076 from kallithea.config.routing import url # doh, we need to re-import url to mock it later
1076 from kallithea.config.routing import url # doh, we need to re-import url to mock it later
1077 return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % {
1077 return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % {
1078 'url': url('changeset_home', repo_name=repo_name, revision=hash_),
1078 'url': url('changeset_home', repo_name=repo_name, revision=hash_),
1079 'hash': hash_,
1079 'hash': hash_,
1080 }
1080 }
1081 bold = match_obj.group('bold')
1081 bold = match_obj.group('bold')
1082 if bold is not None:
1082 if bold is not None:
1083 return '<b>*%s*</b>' % _urlify(bold[1:-1])
1083 return '<b>*%s*</b>' % _urlify(bold[1:-1])
1084 if stylize:
1084 if stylize:
1085 seen = match_obj.group('seen')
1085 seen = match_obj.group('seen')
1086 if seen:
1086 if seen:
1087 return '<div class="label label-meta" data-tag="see">see =&gt; %s</div>' % seen
1087 return '<div class="label label-meta" data-tag="see">see =&gt; %s</div>' % seen
1088 license = match_obj.group('license')
1088 license = match_obj.group('license')
1089 if license:
1089 if license:
1090 return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license)
1090 return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license)
1091 tagtype = match_obj.group('tagtype')
1091 tagtype = match_obj.group('tagtype')
1092 if tagtype:
1092 if tagtype:
1093 tagvalue = match_obj.group('tagvalue')
1093 tagvalue = match_obj.group('tagvalue')
1094 return '<div class="label label-meta" data-tag="%s">%s =&gt; <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue)
1094 return '<div class="label label-meta" data-tag="%s">%s =&gt; <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue)
1095 lang = match_obj.group('lang')
1095 lang = match_obj.group('lang')
1096 if lang:
1096 if lang:
1097 return '<div class="label label-meta" data-tag="lang">%s</div>' % lang
1097 return '<div class="label label-meta" data-tag="lang">%s</div>' % lang
1098 tag = match_obj.group('tag')
1098 tag = match_obj.group('tag')
1099 if tag:
1099 if tag:
1100 return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag)
1100 return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag)
1101 return match_obj.group(0)
1101 return match_obj.group(0)
1102
1102
1103 def _urlify(s):
1103 def _urlify(s):
1104 """
1104 """
1105 Extract urls from text and make html links out of them
1105 Extract urls from text and make html links out of them
1106 """
1106 """
1107 return _URLIFY_RE.sub(_replace, s)
1107 return _URLIFY_RE.sub(_replace, s)
1108
1108
1109 if truncate is None:
1109 if truncate is None:
1110 s = s.rstrip()
1110 s = s.rstrip()
1111 else:
1111 else:
1112 s = truncatef(s, truncate, whole_word=True)
1112 s = truncatef(s, truncate, whole_word=True)
1113 s = html_escape(s)
1113 s = html_escape(s)
1114 s = _urlify(s)
1114 s = _urlify(s)
1115 if repo_name is not None:
1115 if repo_name is not None:
1116 s = urlify_issues(s, repo_name)
1116 s = urlify_issues(s, repo_name)
1117 if link_ is not None:
1117 if link_ is not None:
1118 # make href around everything that isn't a href already
1118 # make href around everything that isn't a href already
1119 s = linkify_others(s, link_)
1119 s = linkify_others(s, link_)
1120 s = s.replace('\r\n', '<br/>').replace('\n', '<br/>')
1120 s = s.replace('\r\n', '<br/>').replace('\n', '<br/>')
1121 # Turn HTML5 into more valid HTML4 as required by some mail readers.
1121 # Turn HTML5 into more valid HTML4 as required by some mail readers.
1122 # (This is not done in one step in html_escape, because character codes like
1122 # (This is not done in one step in html_escape, because character codes like
1123 # &#123; risk to be seen as an issue reference due to the presence of '#'.)
1123 # &#123; risk to be seen as an issue reference due to the presence of '#'.)
1124 s = s.replace("&apos;", "&#39;")
1124 s = s.replace("&apos;", "&#39;")
1125 return literal(s)
1125 return literal(s)
1126
1126
1127
1127
1128 def linkify_others(t, l):
1128 def linkify_others(t, l):
1129 """Add a default link to html with links.
1129 """Add a default link to html with links.
1130 HTML doesn't allow nesting of links, so the outer link must be broken up
1130 HTML doesn't allow nesting of links, so the outer link must be broken up
1131 in pieces and give space for other links.
1131 in pieces and give space for other links.
1132 """
1132 """
1133 urls = re.compile(r'(\<a.*?\<\/a\>)',)
1133 urls = re.compile(r'(\<a.*?\<\/a\>)',)
1134 links = []
1134 links = []
1135 for e in urls.split(t):
1135 for e in urls.split(t):
1136 if e.strip() and not urls.match(e):
1136 if e.strip() and not urls.match(e):
1137 links.append('<a class="message-link" href="%s">%s</a>' % (l, e))
1137 links.append('<a class="message-link" href="%s">%s</a>' % (l, e))
1138 else:
1138 else:
1139 links.append(e)
1139 links.append(e)
1140
1140
1141 return ''.join(links)
1141 return ''.join(links)
1142
1142
1143
1143
1144 # Global variable that will hold the actual urlify_issues function body.
1144 # Global variable that will hold the actual urlify_issues function body.
1145 # Will be set on first use when the global configuration has been read.
1145 # Will be set on first use when the global configuration has been read.
1146 _urlify_issues_f = None
1146 _urlify_issues_f = None
1147
1147
1148
1148
1149 def urlify_issues(newtext, repo_name):
1149 def urlify_issues(newtext, repo_name):
1150 """Urlify issue references according to .ini configuration"""
1150 """Urlify issue references according to .ini configuration"""
1151 global _urlify_issues_f
1151 global _urlify_issues_f
1152 if _urlify_issues_f is None:
1152 if _urlify_issues_f is None:
1153 from kallithea import CONFIG
1153 from kallithea import CONFIG
1154 from kallithea.model.db import URL_SEP
1154 from kallithea.model.db import URL_SEP
1155 assert CONFIG['sqlalchemy.url'] # make sure config has been loaded
1155 assert CONFIG['sqlalchemy.url'] # make sure config has been loaded
1156
1156
1157 # Build chain of urlify functions, starting with not doing any transformation
1157 # Build chain of urlify functions, starting with not doing any transformation
1158 tmp_urlify_issues_f = lambda s: s
1158 tmp_urlify_issues_f = lambda s: s
1159
1159
1160 issue_pat_re = re.compile(r'issue_pat(.*)')
1160 issue_pat_re = re.compile(r'issue_pat(.*)')
1161 for k in CONFIG:
1161 for k in CONFIG:
1162 # Find all issue_pat* settings that also have corresponding server_link and prefix configuration
1162 # Find all issue_pat* settings that also have corresponding server_link and prefix configuration
1163 m = issue_pat_re.match(k)
1163 m = issue_pat_re.match(k)
1164 if m is None:
1164 if m is None:
1165 continue
1165 continue
1166 suffix = m.group(1)
1166 suffix = m.group(1)
1167 issue_pat = CONFIG.get(k)
1167 issue_pat = CONFIG.get(k)
1168 issue_server_link = CONFIG.get('issue_server_link%s' % suffix)
1168 issue_server_link = CONFIG.get('issue_server_link%s' % suffix)
1169 issue_sub = CONFIG.get('issue_sub%s' % suffix)
1169 issue_sub = CONFIG.get('issue_sub%s' % suffix)
1170 if not issue_pat or not issue_server_link or issue_sub is None: # issue_sub can be empty but should be present
1170 if not issue_pat or not issue_server_link or issue_sub is None: # issue_sub can be empty but should be present
1171 log.error('skipping incomplete issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_sub)
1171 log.error('skipping incomplete issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_sub)
1172 continue
1172 continue
1173
1173
1174 # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound
1174 # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound
1175 try:
1175 try:
1176 issue_re = re.compile(issue_pat)
1176 issue_re = re.compile(issue_pat)
1177 except re.error as e:
1177 except re.error as e:
1178 log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', suffix, issue_pat, issue_server_link, issue_sub, str(e))
1178 log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', suffix, issue_pat, issue_server_link, issue_sub, str(e))
1179 continue
1179 continue
1180
1180
1181 log.debug('issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_sub)
1181 log.debug('issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_sub)
1182
1182
1183 def issues_replace(match_obj,
1183 def issues_replace(match_obj,
1184 issue_server_link=issue_server_link, issue_sub=issue_sub):
1184 issue_server_link=issue_server_link, issue_sub=issue_sub):
1185 try:
1185 try:
1186 issue_url = match_obj.expand(issue_server_link)
1186 issue_url = match_obj.expand(issue_server_link)
1187 except (IndexError, re.error) as e:
1187 except (IndexError, re.error) as e:
1188 log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
1188 log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
1189 issue_url = issue_server_link
1189 issue_url = issue_server_link
1190 issue_url = issue_url.replace('{repo}', repo_name)
1190 issue_url = issue_url.replace('{repo}', repo_name)
1191 issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1])
1191 issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1])
1192 # if issue_sub is empty use the matched issue reference verbatim
1192 # if issue_sub is empty use the matched issue reference verbatim
1193 if not issue_sub:
1193 if not issue_sub:
1194 issue_text = match_obj.group()
1194 issue_text = match_obj.group()
1195 else:
1195 else:
1196 try:
1196 try:
1197 issue_text = match_obj.expand(issue_sub)
1197 issue_text = match_obj.expand(issue_sub)
1198 except (IndexError, re.error) as e:
1198 except (IndexError, re.error) as e:
1199 log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
1199 log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
1200 issue_text = match_obj.group()
1200 issue_text = match_obj.group()
1201
1201
1202 return (
1202 return (
1203 '<a class="issue-tracker-link" href="%(url)s">'
1203 '<a class="issue-tracker-link" href="%(url)s">'
1204 '%(text)s'
1204 '%(text)s'
1205 '</a>'
1205 '</a>'
1206 ) % {
1206 ) % {
1207 'url': issue_url,
1207 'url': issue_url,
1208 'text': issue_text,
1208 'text': issue_text,
1209 }
1209 }
1210 tmp_urlify_issues_f = (lambda s,
1210 tmp_urlify_issues_f = (lambda s,
1211 issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f:
1211 issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f:
1212 issue_re.sub(issues_replace, chain_f(s)))
1212 issue_re.sub(issues_replace, chain_f(s)))
1213
1213
1214 # Set tmp function globally - atomically
1214 # Set tmp function globally - atomically
1215 _urlify_issues_f = tmp_urlify_issues_f
1215 _urlify_issues_f = tmp_urlify_issues_f
1216
1216
1217 return _urlify_issues_f(newtext)
1217 return _urlify_issues_f(newtext)
1218
1218
1219
1219
1220 def render_w_mentions(source, repo_name=None):
1220 def render_w_mentions(source, repo_name=None):
1221 """
1221 """
1222 Render plain text with revision hashes and issue references urlified
1222 Render plain text with revision hashes and issue references urlified
1223 and with @mention highlighting.
1223 and with @mention highlighting.
1224 """
1224 """
1225 s = safe_str(source)
1225 s = safe_str(source)
1226 s = urlify_text(s, repo_name=repo_name)
1226 s = urlify_text(s, repo_name=repo_name)
1227 return literal('<div class="formatted-fixed">%s</div>' % s)
1227 return literal('<div class="formatted-fixed">%s</div>' % s)
1228
1228
1229
1229
1230 def short_ref(ref_type, ref_name):
1230 def short_ref(ref_type, ref_name):
1231 if ref_type == 'rev':
1231 if ref_type == 'rev':
1232 return short_id(ref_name)
1232 return short_id(ref_name)
1233 return ref_name
1233 return ref_name
1234
1234
1235
1235
1236 def link_to_ref(repo_name, ref_type, ref_name, rev=None):
1236 def link_to_ref(repo_name, ref_type, ref_name, rev=None):
1237 """
1237 """
1238 Return full markup for a href to changeset_home for a changeset.
1238 Return full markup for a href to changeset_home for a changeset.
1239 If ref_type is branch it will link to changelog.
1239 If ref_type is branch it will link to changelog.
1240 ref_name is shortened if ref_type is 'rev'.
1240 ref_name is shortened if ref_type is 'rev'.
1241 if rev is specified show it too, explicitly linking to that revision.
1241 if rev is specified show it too, explicitly linking to that revision.
1242 """
1242 """
1243 txt = short_ref(ref_type, ref_name)
1243 txt = short_ref(ref_type, ref_name)
1244 if ref_type == 'branch':
1244 if ref_type == 'branch':
1245 u = url('changelog_home', repo_name=repo_name, branch=ref_name)
1245 u = url('changelog_home', repo_name=repo_name, branch=ref_name)
1246 else:
1246 else:
1247 u = url('changeset_home', repo_name=repo_name, revision=ref_name)
1247 u = url('changeset_home', repo_name=repo_name, revision=ref_name)
1248 l = link_to(repo_name + '#' + txt, u)
1248 l = link_to(repo_name + '#' + txt, u)
1249 if rev and ref_type != 'rev':
1249 if rev and ref_type != 'rev':
1250 l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev))))
1250 l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev))))
1251 return l
1251 return l
1252
1252
1253
1253
1254 def changeset_status(repo, revision):
1254 def changeset_status(repo, revision):
1255 from kallithea.model.changeset_status import ChangesetStatusModel
1255 from kallithea.model.changeset_status import ChangesetStatusModel
1256 return ChangesetStatusModel().get_status(repo, revision)
1256 return ChangesetStatusModel().get_status(repo, revision)
1257
1257
1258
1258
1259 def changeset_status_lbl(changeset_status):
1259 def changeset_status_lbl(changeset_status):
1260 from kallithea.model.db import ChangesetStatus
1260 from kallithea.model.db import ChangesetStatus
1261 return ChangesetStatus.get_status_lbl(changeset_status)
1261 return ChangesetStatus.get_status_lbl(changeset_status)
1262
1262
1263
1263
1264 def get_permission_name(key):
1264 def get_permission_name(key):
1265 from kallithea.model.db import Permission
1265 from kallithea.model.db import Permission
1266 return dict(Permission.PERMS).get(key)
1266 return dict(Permission.PERMS).get(key)
1267
1267
1268
1268
1269 def journal_filter_help():
1269 def journal_filter_help():
1270 return _(textwrap.dedent('''
1270 return _(textwrap.dedent('''
1271 Example filter terms:
1271 Example filter terms:
1272 repository:vcs
1272 repository:vcs
1273 username:developer
1273 username:developer
1274 action:*push*
1274 action:*push*
1275 ip:127.0.0.1
1275 ip:127.0.0.1
1276 date:20120101
1276 date:20120101
1277 date:[20120101100000 TO 20120102]
1277 date:[20120101100000 TO 20120102]
1278
1278
1279 Generate wildcards using '*' character:
1279 Generate wildcards using '*' character:
1280 "repository:vcs*" - search everything starting with 'vcs'
1280 "repository:vcs*" - search everything starting with 'vcs'
1281 "repository:*vcs*" - search for repository containing 'vcs'
1281 "repository:*vcs*" - search for repository containing 'vcs'
1282
1282
1283 Optional AND / OR operators in queries
1283 Optional AND / OR operators in queries
1284 "repository:vcs OR repository:test"
1284 "repository:vcs OR repository:test"
1285 "username:test AND repository:test*"
1285 "username:test AND repository:test*"
1286 '''))
1286 '''))
1287
1287
1288
1288
1289 def not_mapped_error(repo_name):
1289 def not_mapped_error(repo_name):
1290 flash(_('%s repository is not mapped to db perhaps'
1290 flash(_('%s repository is not mapped to db perhaps'
1291 ' it was created or renamed from the filesystem'
1291 ' it was created or renamed from the filesystem'
1292 ' please run the application again'
1292 ' please run the application again'
1293 ' in order to rescan repositories') % repo_name, category='error')
1293 ' in order to rescan repositories') % repo_name, category='error')
1294
1294
1295
1295
1296 def ip_range(ip_addr):
1296 def ip_range(ip_addr):
1297 from kallithea.model.db import UserIpMap
1297 from kallithea.model.db import UserIpMap
1298 s, e = UserIpMap._get_ip_range(ip_addr)
1298 s, e = UserIpMap._get_ip_range(ip_addr)
1299 return '%s - %s' % (s, e)
1299 return '%s - %s' % (s, e)
1300
1300
1301
1301
1302 session_csrf_secret_name = "_session_csrf_secret_token"
1302 session_csrf_secret_name = "_session_csrf_secret_token"
1303
1303
1304 def session_csrf_secret_token():
1304 def session_csrf_secret_token():
1305 """Return (and create) the current session's CSRF protection token."""
1305 """Return (and create) the current session's CSRF protection token."""
1306 from tg import session
1306 from tg import session
1307 if not session_csrf_secret_name in session:
1307 if not session_csrf_secret_name in session:
1308 session[session_csrf_secret_name] = str(random.getrandbits(128))
1308 session[session_csrf_secret_name] = str(random.getrandbits(128))
1309 session.save()
1309 session.save()
1310 return session[session_csrf_secret_name]
1310 return session[session_csrf_secret_name]
1311
1311
1312 def form(url, method="post", **attrs):
1312 def form(url, method="post", **attrs):
1313 """Like webhelpers.html.tags.form , but automatically adding
1313 """Like webhelpers.html.tags.form , but automatically adding
1314 session_csrf_secret_token for POST. The secret is thus never leaked in GET
1314 session_csrf_secret_token for POST. The secret is thus never leaked in GET
1315 URLs.
1315 URLs.
1316 """
1316 """
1317 form = insecure_form(url, method, **attrs)
1317 form = insecure_form(url, method, **attrs)
1318 if method.lower() == 'get':
1318 if method.lower() == 'get':
1319 return form
1319 return form
1320 return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;")
1320 return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;")
@@ -1,59 +1,59 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 import json
2 import json
3 import urllib.parse
3 import urllib.parse
4 import urllib.request
4 import urllib.request
5
5
6
6
7 class RecaptchaResponse(object):
7 class RecaptchaResponse(object):
8 def __init__(self, is_valid, error_code=None):
8 def __init__(self, is_valid, error_code=None):
9 self.is_valid = is_valid
9 self.is_valid = is_valid
10 self.error_code = error_code
10 self.error_code = error_code
11
11
12 def __repr__(self):
12 def __repr__(self):
13 return '<RecaptchaResponse:%s>' % (self.is_valid)
13 return '<RecaptchaResponse:%s>' % (self.is_valid)
14
14
15
15
16 def submit(g_recaptcha_response, private_key, remoteip):
16 def submit(g_recaptcha_response, private_key, remoteip):
17 """
17 """
18 Submits a reCAPTCHA request for verification. Returns RecaptchaResponse for the request
18 Submits a reCAPTCHA request for verification. Returns RecaptchaResponse for the request
19
19
20 g_recaptcha_response -- The value of g_recaptcha_response from the form
20 g_recaptcha_response -- The value of g_recaptcha_response from the form
21 private_key -- your reCAPTCHA private key
21 private_key -- your reCAPTCHA private key
22 remoteip -- the user's IP address
22 remoteip -- the user's IP address
23 """
23 """
24
24
25 if not (g_recaptcha_response and len(g_recaptcha_response)):
25 if not (g_recaptcha_response and len(g_recaptcha_response)):
26 return RecaptchaResponse(is_valid=False, error_code='incorrect-captcha-sol')
26 return RecaptchaResponse(is_valid=False, error_code='incorrect-captcha-sol')
27
27
28 def encode_if_necessary(s):
28 def encode_if_necessary(s):
29 if isinstance(s, unicode):
29 if isinstance(s, str):
30 return s.encode('utf-8')
30 return s.encode('utf-8')
31 return s
31 return s
32
32
33 params = urllib.parse.urlencode({
33 params = urllib.parse.urlencode({
34 'secret': encode_if_necessary(private_key),
34 'secret': encode_if_necessary(private_key),
35 'remoteip': encode_if_necessary(remoteip),
35 'remoteip': encode_if_necessary(remoteip),
36 'response': encode_if_necessary(g_recaptcha_response),
36 'response': encode_if_necessary(g_recaptcha_response),
37 }).encode('ascii')
37 }).encode('ascii')
38
38
39 req = urllib.request.Request(
39 req = urllib.request.Request(
40 url="https://www.google.com/recaptcha/api/siteverify",
40 url="https://www.google.com/recaptcha/api/siteverify",
41 data=params,
41 data=params,
42 headers={
42 headers={
43 "Content-type": "application/x-www-form-urlencoded",
43 "Content-type": "application/x-www-form-urlencoded",
44 "User-agent": "reCAPTCHA Python"
44 "User-agent": "reCAPTCHA Python"
45 }
45 }
46 )
46 )
47
47
48 httpresp = urllib.request.urlopen(req)
48 httpresp = urllib.request.urlopen(req)
49 return_values = json.loads(httpresp.read())
49 return_values = json.loads(httpresp.read())
50 httpresp.close()
50 httpresp.close()
51
51
52 if not (isinstance(return_values, dict)):
52 if not (isinstance(return_values, dict)):
53 return RecaptchaResponse(is_valid=False, error_code='incorrect-captcha-sol')
53 return RecaptchaResponse(is_valid=False, error_code='incorrect-captcha-sol')
54 elif (("success" in return_values) and ((return_values["success"] is True) or (return_values["success"] == "true"))):
54 elif (("success" in return_values) and ((return_values["success"] is True) or (return_values["success"] == "true"))):
55 return RecaptchaResponse(is_valid=True)
55 return RecaptchaResponse(is_valid=True)
56 elif (("error-codes" in return_values) and isinstance(return_values["error-codes"], list) and (len(return_values["error-codes"]) > 0)):
56 elif (("error-codes" in return_values) and isinstance(return_values["error-codes"], list) and (len(return_values["error-codes"]) > 0)):
57 return RecaptchaResponse(is_valid=False, error_code=return_values["error-codes"][0])
57 return RecaptchaResponse(is_valid=False, error_code=return_values["error-codes"][0])
58 else:
58 else:
59 return RecaptchaResponse(is_valid=False, error_code='incorrect-captcha-sol')
59 return RecaptchaResponse(is_valid=False, error_code='incorrect-captcha-sol')
@@ -1,613 +1,613 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.lib.utils2
15 kallithea.lib.utils2
16 ~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~
17
17
18 Some simple helper functions.
18 Some simple helper functions.
19 Note: all these functions should be independent of Kallithea classes, i.e.
19 Note: all these functions should be independent of Kallithea classes, i.e.
20 models, controllers, etc. to prevent import cycles.
20 models, controllers, etc. to prevent import cycles.
21
21
22 This file was forked by the Kallithea project in July 2014.
22 This file was forked by the Kallithea project in July 2014.
23 Original author and date, and relevant copyright and licensing information is below:
23 Original author and date, and relevant copyright and licensing information is below:
24 :created_on: Jan 5, 2011
24 :created_on: Jan 5, 2011
25 :author: marcink
25 :author: marcink
26 :copyright: (c) 2013 RhodeCode GmbH, and others.
26 :copyright: (c) 2013 RhodeCode GmbH, and others.
27 :license: GPLv3, see LICENSE.md for more details.
27 :license: GPLv3, see LICENSE.md for more details.
28 """
28 """
29
29
30 from __future__ import print_function
30 from __future__ import print_function
31
31
32 import binascii
32 import binascii
33 import datetime
33 import datetime
34 import json
34 import json
35 import os
35 import os
36 import pwd
36 import pwd
37 import re
37 import re
38 import time
38 import time
39 import urllib.parse
39 import urllib.parse
40
40
41 import urlobject
41 import urlobject
42 from tg.i18n import ugettext as _
42 from tg.i18n import ugettext as _
43 from tg.i18n import ungettext
43 from tg.i18n import ungettext
44 from webhelpers2.text import collapse, remove_formatting, strip_tags
44 from webhelpers2.text import collapse, remove_formatting, strip_tags
45
45
46 from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export
46 from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export
47 from kallithea.lib.vcs.utils.lazy import LazyProperty
47 from kallithea.lib.vcs.utils.lazy import LazyProperty
48
48
49
49
50 def str2bool(_str):
50 def str2bool(_str):
51 """
51 """
52 returns True/False value from given string, it tries to translate the
52 returns True/False value from given string, it tries to translate the
53 string into boolean
53 string into boolean
54
54
55 :param _str: string value to translate into boolean
55 :param _str: string value to translate into boolean
56 :rtype: boolean
56 :rtype: boolean
57 :returns: boolean from given string
57 :returns: boolean from given string
58 """
58 """
59 if _str is None:
59 if _str is None:
60 return False
60 return False
61 if _str in (True, False):
61 if _str in (True, False):
62 return _str
62 return _str
63 _str = str(_str).strip().lower()
63 _str = str(_str).strip().lower()
64 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
64 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
65
65
66
66
67 def aslist(obj, sep=None, strip=True):
67 def aslist(obj, sep=None, strip=True):
68 """
68 """
69 Returns given string separated by sep as list
69 Returns given string separated by sep as list
70
70
71 :param obj:
71 :param obj:
72 :param sep:
72 :param sep:
73 :param strip:
73 :param strip:
74 """
74 """
75 if isinstance(obj, (str)):
75 if isinstance(obj, (str)):
76 lst = obj.split(sep)
76 lst = obj.split(sep)
77 if strip:
77 if strip:
78 lst = [v.strip() for v in lst]
78 lst = [v.strip() for v in lst]
79 return lst
79 return lst
80 elif isinstance(obj, (list, tuple)):
80 elif isinstance(obj, (list, tuple)):
81 return obj
81 return obj
82 elif obj is None:
82 elif obj is None:
83 return []
83 return []
84 else:
84 else:
85 return [obj]
85 return [obj]
86
86
87
87
88 def convert_line_endings(line, mode):
88 def convert_line_endings(line, mode):
89 """
89 """
90 Converts a given line "line end" according to given mode
90 Converts a given line "line end" according to given mode
91
91
92 Available modes are::
92 Available modes are::
93 0 - Unix
93 0 - Unix
94 1 - Mac
94 1 - Mac
95 2 - DOS
95 2 - DOS
96
96
97 :param line: given line to convert
97 :param line: given line to convert
98 :param mode: mode to convert to
98 :param mode: mode to convert to
99 :rtype: str
99 :rtype: str
100 :return: converted line according to mode
100 :return: converted line according to mode
101 """
101 """
102 if mode == 0:
102 if mode == 0:
103 line = line.replace('\r\n', '\n')
103 line = line.replace('\r\n', '\n')
104 line = line.replace('\r', '\n')
104 line = line.replace('\r', '\n')
105 elif mode == 1:
105 elif mode == 1:
106 line = line.replace('\r\n', '\r')
106 line = line.replace('\r\n', '\r')
107 line = line.replace('\n', '\r')
107 line = line.replace('\n', '\r')
108 elif mode == 2:
108 elif mode == 2:
109 line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
109 line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
110 return line
110 return line
111
111
112
112
113 def detect_mode(line, default):
113 def detect_mode(line, default):
114 """
114 """
115 Detects line break for given line, if line break couldn't be found
115 Detects line break for given line, if line break couldn't be found
116 given default value is returned
116 given default value is returned
117
117
118 :param line: str line
118 :param line: str line
119 :param default: default
119 :param default: default
120 :rtype: int
120 :rtype: int
121 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
121 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
122 """
122 """
123 if line.endswith('\r\n'):
123 if line.endswith('\r\n'):
124 return 2
124 return 2
125 elif line.endswith('\n'):
125 elif line.endswith('\n'):
126 return 0
126 return 0
127 elif line.endswith('\r'):
127 elif line.endswith('\r'):
128 return 1
128 return 1
129 else:
129 else:
130 return default
130 return default
131
131
132
132
133 def generate_api_key():
133 def generate_api_key():
134 """
134 """
135 Generates a random (presumably unique) API key.
135 Generates a random (presumably unique) API key.
136
136
137 This value is used in URLs and "Bearer" HTTP Authorization headers,
137 This value is used in URLs and "Bearer" HTTP Authorization headers,
138 which in practice means it should only contain URL-safe characters
138 which in practice means it should only contain URL-safe characters
139 (RFC 3986):
139 (RFC 3986):
140
140
141 unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
141 unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
142 """
142 """
143 # Hexadecimal certainly qualifies as URL-safe.
143 # Hexadecimal certainly qualifies as URL-safe.
144 return ascii_str(binascii.hexlify(os.urandom(20)))
144 return ascii_str(binascii.hexlify(os.urandom(20)))
145
145
146
146
147 def safe_int(val, default=None):
147 def safe_int(val, default=None):
148 """
148 """
149 Returns int() of val if val is not convertable to int use default
149 Returns int() of val if val is not convertable to int use default
150 instead
150 instead
151
151
152 :param val:
152 :param val:
153 :param default:
153 :param default:
154 """
154 """
155 try:
155 try:
156 val = int(val)
156 val = int(val)
157 except (ValueError, TypeError):
157 except (ValueError, TypeError):
158 val = default
158 val = default
159 return val
159 return val
160
160
161
161
162 def remove_suffix(s, suffix):
162 def remove_suffix(s, suffix):
163 if s.endswith(suffix):
163 if s.endswith(suffix):
164 s = s[:-1 * len(suffix)]
164 s = s[:-1 * len(suffix)]
165 return s
165 return s
166
166
167
167
168 def remove_prefix(s, prefix):
168 def remove_prefix(s, prefix):
169 if s.startswith(prefix):
169 if s.startswith(prefix):
170 s = s[len(prefix):]
170 s = s[len(prefix):]
171 return s
171 return s
172
172
173
173
174 def age(prevdate, show_short_version=False, now=None):
174 def age(prevdate, show_short_version=False, now=None):
175 """
175 """
176 turns a datetime into an age string.
176 turns a datetime into an age string.
177 If show_short_version is True, then it will generate a not so accurate but shorter string,
177 If show_short_version is True, then it will generate a not so accurate but shorter string,
178 example: 2days ago, instead of 2 days and 23 hours ago.
178 example: 2days ago, instead of 2 days and 23 hours ago.
179
179
180 :param prevdate: datetime object
180 :param prevdate: datetime object
181 :param show_short_version: if it should approximate the date and return a shorter string
181 :param show_short_version: if it should approximate the date and return a shorter string
182 :rtype: unicode
182 :rtype: str
183 :returns: unicode words describing age
183 :returns: str words describing age
184 """
184 """
185 now = now or datetime.datetime.now()
185 now = now or datetime.datetime.now()
186 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
186 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
187 deltas = {}
187 deltas = {}
188 future = False
188 future = False
189
189
190 if prevdate > now:
190 if prevdate > now:
191 now, prevdate = prevdate, now
191 now, prevdate = prevdate, now
192 future = True
192 future = True
193 if future:
193 if future:
194 prevdate = prevdate.replace(microsecond=0)
194 prevdate = prevdate.replace(microsecond=0)
195 # Get date parts deltas
195 # Get date parts deltas
196 from dateutil import relativedelta
196 from dateutil import relativedelta
197 for part in order:
197 for part in order:
198 d = relativedelta.relativedelta(now, prevdate)
198 d = relativedelta.relativedelta(now, prevdate)
199 deltas[part] = getattr(d, part + 's')
199 deltas[part] = getattr(d, part + 's')
200
200
201 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
201 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
202 # not 1 hour, -59 minutes and -59 seconds)
202 # not 1 hour, -59 minutes and -59 seconds)
203 for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours
203 for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours
204 part = order[num]
204 part = order[num]
205 carry_part = order[num - 1]
205 carry_part = order[num - 1]
206
206
207 if deltas[part] < 0:
207 if deltas[part] < 0:
208 deltas[part] += length
208 deltas[part] += length
209 deltas[carry_part] -= 1
209 deltas[carry_part] -= 1
210
210
211 # Same thing for days except that the increment depends on the (variable)
211 # Same thing for days except that the increment depends on the (variable)
212 # number of days in the month
212 # number of days in the month
213 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
213 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
214 if deltas['day'] < 0:
214 if deltas['day'] < 0:
215 if prevdate.month == 2 and (prevdate.year % 4 == 0 and
215 if prevdate.month == 2 and (prevdate.year % 4 == 0 and
216 (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)
216 (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)
217 ):
217 ):
218 deltas['day'] += 29
218 deltas['day'] += 29
219 else:
219 else:
220 deltas['day'] += month_lengths[prevdate.month - 1]
220 deltas['day'] += month_lengths[prevdate.month - 1]
221
221
222 deltas['month'] -= 1
222 deltas['month'] -= 1
223
223
224 if deltas['month'] < 0:
224 if deltas['month'] < 0:
225 deltas['month'] += 12
225 deltas['month'] += 12
226 deltas['year'] -= 1
226 deltas['year'] -= 1
227
227
228 # In short version, we want nicer handling of ages of more than a year
228 # In short version, we want nicer handling of ages of more than a year
229 if show_short_version:
229 if show_short_version:
230 if deltas['year'] == 1:
230 if deltas['year'] == 1:
231 # ages between 1 and 2 years: show as months
231 # ages between 1 and 2 years: show as months
232 deltas['month'] += 12
232 deltas['month'] += 12
233 deltas['year'] = 0
233 deltas['year'] = 0
234 if deltas['year'] >= 2:
234 if deltas['year'] >= 2:
235 # ages 2+ years: round
235 # ages 2+ years: round
236 if deltas['month'] > 6:
236 if deltas['month'] > 6:
237 deltas['year'] += 1
237 deltas['year'] += 1
238 deltas['month'] = 0
238 deltas['month'] = 0
239
239
240 # Format the result
240 # Format the result
241 fmt_funcs = {
241 fmt_funcs = {
242 'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
242 'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
243 'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
243 'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
244 'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
244 'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
245 'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
245 'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
246 'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
246 'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
247 'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
247 'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
248 }
248 }
249
249
250 for i, part in enumerate(order):
250 for i, part in enumerate(order):
251 value = deltas[part]
251 value = deltas[part]
252 if value == 0:
252 if value == 0:
253 continue
253 continue
254
254
255 if i < 5:
255 if i < 5:
256 sub_part = order[i + 1]
256 sub_part = order[i + 1]
257 sub_value = deltas[sub_part]
257 sub_value = deltas[sub_part]
258 else:
258 else:
259 sub_value = 0
259 sub_value = 0
260
260
261 if sub_value == 0 or show_short_version:
261 if sub_value == 0 or show_short_version:
262 if future:
262 if future:
263 return _('in %s') % fmt_funcs[part](value)
263 return _('in %s') % fmt_funcs[part](value)
264 else:
264 else:
265 return _('%s ago') % fmt_funcs[part](value)
265 return _('%s ago') % fmt_funcs[part](value)
266 if future:
266 if future:
267 return _('in %s and %s') % (fmt_funcs[part](value),
267 return _('in %s and %s') % (fmt_funcs[part](value),
268 fmt_funcs[sub_part](sub_value))
268 fmt_funcs[sub_part](sub_value))
269 else:
269 else:
270 return _('%s and %s ago') % (fmt_funcs[part](value),
270 return _('%s and %s ago') % (fmt_funcs[part](value),
271 fmt_funcs[sub_part](sub_value))
271 fmt_funcs[sub_part](sub_value))
272
272
273 return _('just now')
273 return _('just now')
274
274
275
275
276 def uri_filter(uri):
276 def uri_filter(uri):
277 """
277 """
278 Removes user:password from given url string
278 Removes user:password from given url string
279
279
280 :param uri:
280 :param uri:
281 :rtype: unicode
281 :rtype: str
282 :returns: filtered list of strings
282 :returns: filtered list of strings
283 """
283 """
284 if not uri:
284 if not uri:
285 return []
285 return []
286
286
287 proto = ''
287 proto = ''
288
288
289 for pat in ('https://', 'http://', 'git://'):
289 for pat in ('https://', 'http://', 'git://'):
290 if uri.startswith(pat):
290 if uri.startswith(pat):
291 uri = uri[len(pat):]
291 uri = uri[len(pat):]
292 proto = pat
292 proto = pat
293 break
293 break
294
294
295 # remove passwords and username
295 # remove passwords and username
296 uri = uri[uri.find('@') + 1:]
296 uri = uri[uri.find('@') + 1:]
297
297
298 # get the port
298 # get the port
299 cred_pos = uri.find(':')
299 cred_pos = uri.find(':')
300 if cred_pos == -1:
300 if cred_pos == -1:
301 host, port = uri, None
301 host, port = uri, None
302 else:
302 else:
303 host, port = uri[:cred_pos], uri[cred_pos + 1:]
303 host, port = uri[:cred_pos], uri[cred_pos + 1:]
304
304
305 return [_f for _f in [proto, host, port] if _f]
305 return [_f for _f in [proto, host, port] if _f]
306
306
307
307
308 def credentials_filter(uri):
308 def credentials_filter(uri):
309 """
309 """
310 Returns a url with removed credentials
310 Returns a url with removed credentials
311
311
312 :param uri:
312 :param uri:
313 """
313 """
314
314
315 uri = uri_filter(uri)
315 uri = uri_filter(uri)
316 # check if we have port
316 # check if we have port
317 if len(uri) > 2 and uri[2]:
317 if len(uri) > 2 and uri[2]:
318 uri[2] = ':' + uri[2]
318 uri[2] = ':' + uri[2]
319
319
320 return ''.join(uri)
320 return ''.join(uri)
321
321
322
322
323 def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None):
323 def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None):
324 parsed_url = urlobject.URLObject(prefix_url)
324 parsed_url = urlobject.URLObject(prefix_url)
325 prefix = urllib.parse.unquote(parsed_url.path.rstrip('/'))
325 prefix = urllib.parse.unquote(parsed_url.path.rstrip('/'))
326 try:
326 try:
327 system_user = pwd.getpwuid(os.getuid()).pw_name
327 system_user = pwd.getpwuid(os.getuid()).pw_name
328 except Exception: # TODO: support all systems - especially Windows
328 except Exception: # TODO: support all systems - especially Windows
329 system_user = 'kallithea' # hardcoded default value ...
329 system_user = 'kallithea' # hardcoded default value ...
330 args = {
330 args = {
331 'scheme': parsed_url.scheme,
331 'scheme': parsed_url.scheme,
332 'user': urllib.parse.quote(username or ''),
332 'user': urllib.parse.quote(username or ''),
333 'netloc': parsed_url.netloc + prefix, # like "hostname:port/prefix" (with optional ":port" and "/prefix")
333 'netloc': parsed_url.netloc + prefix, # like "hostname:port/prefix" (with optional ":port" and "/prefix")
334 'prefix': prefix, # undocumented, empty or starting with /
334 'prefix': prefix, # undocumented, empty or starting with /
335 'repo': repo_name,
335 'repo': repo_name,
336 'repoid': str(repo_id),
336 'repoid': str(repo_id),
337 'system_user': system_user,
337 'system_user': system_user,
338 'hostname': parsed_url.hostname,
338 'hostname': parsed_url.hostname,
339 }
339 }
340 url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl)
340 url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl)
341
341
342 # remove leading @ sign if it's present. Case of empty user
342 # remove leading @ sign if it's present. Case of empty user
343 url_obj = urlobject.URLObject(url)
343 url_obj = urlobject.URLObject(url)
344 if not url_obj.username:
344 if not url_obj.username:
345 url_obj = url_obj.with_username(None)
345 url_obj = url_obj.with_username(None)
346
346
347 return str(url_obj)
347 return str(url_obj)
348
348
349
349
350 def get_changeset_safe(repo, rev):
350 def get_changeset_safe(repo, rev):
351 """
351 """
352 Safe version of get_changeset if this changeset doesn't exists for a
352 Safe version of get_changeset if this changeset doesn't exists for a
353 repo it returns a Dummy one instead
353 repo it returns a Dummy one instead
354
354
355 :param repo:
355 :param repo:
356 :param rev:
356 :param rev:
357 """
357 """
358 from kallithea.lib.vcs.backends.base import BaseRepository
358 from kallithea.lib.vcs.backends.base import BaseRepository
359 from kallithea.lib.vcs.exceptions import RepositoryError
359 from kallithea.lib.vcs.exceptions import RepositoryError
360 from kallithea.lib.vcs.backends.base import EmptyChangeset
360 from kallithea.lib.vcs.backends.base import EmptyChangeset
361 if not isinstance(repo, BaseRepository):
361 if not isinstance(repo, BaseRepository):
362 raise Exception('You must pass an Repository '
362 raise Exception('You must pass an Repository '
363 'object as first argument got %s' % type(repo))
363 'object as first argument got %s' % type(repo))
364
364
365 try:
365 try:
366 cs = repo.get_changeset(rev)
366 cs = repo.get_changeset(rev)
367 except (RepositoryError, LookupError):
367 except (RepositoryError, LookupError):
368 cs = EmptyChangeset(requested_revision=rev)
368 cs = EmptyChangeset(requested_revision=rev)
369 return cs
369 return cs
370
370
371
371
372 def datetime_to_time(dt):
372 def datetime_to_time(dt):
373 if dt:
373 if dt:
374 return time.mktime(dt.timetuple())
374 return time.mktime(dt.timetuple())
375
375
376
376
377 def time_to_datetime(tm):
377 def time_to_datetime(tm):
378 if tm:
378 if tm:
379 if isinstance(tm, str):
379 if isinstance(tm, str):
380 try:
380 try:
381 tm = float(tm)
381 tm = float(tm)
382 except ValueError:
382 except ValueError:
383 return
383 return
384 return datetime.datetime.fromtimestamp(tm)
384 return datetime.datetime.fromtimestamp(tm)
385
385
386
386
387 # Must match regexp in kallithea/public/js/base.js MentionsAutoComplete()
387 # Must match regexp in kallithea/public/js/base.js MentionsAutoComplete()
388 # Check char before @ - it must not look like we are in an email addresses.
388 # Check char before @ - it must not look like we are in an email addresses.
389 # Matching is greedy so we don't have to look beyond the end.
389 # Matching is greedy so we don't have to look beyond the end.
390 MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])')
390 MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])')
391
391
392
392
393 def extract_mentioned_usernames(text):
393 def extract_mentioned_usernames(text):
394 r"""
394 r"""
395 Returns list of (possible) usernames @mentioned in given text.
395 Returns list of (possible) usernames @mentioned in given text.
396
396
397 >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,')
397 >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,')
398 ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz']
398 ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz']
399 """
399 """
400 return MENTIONS_REGEX.findall(text)
400 return MENTIONS_REGEX.findall(text)
401
401
402
402
403 def extract_mentioned_users(text):
403 def extract_mentioned_users(text):
404 """ Returns set of actual database Users @mentioned in given text. """
404 """ Returns set of actual database Users @mentioned in given text. """
405 from kallithea.model.db import User
405 from kallithea.model.db import User
406 result = set()
406 result = set()
407 for name in extract_mentioned_usernames(text):
407 for name in extract_mentioned_usernames(text):
408 user = User.get_by_username(name, case_insensitive=True)
408 user = User.get_by_username(name, case_insensitive=True)
409 if user is not None and not user.is_default_user:
409 if user is not None and not user.is_default_user:
410 result.add(user)
410 result.add(user)
411 return result
411 return result
412
412
413
413
414 class AttributeDict(dict):
414 class AttributeDict(dict):
415 def __getattr__(self, attr):
415 def __getattr__(self, attr):
416 return self.get(attr, None)
416 return self.get(attr, None)
417 __setattr__ = dict.__setitem__
417 __setattr__ = dict.__setitem__
418 __delattr__ = dict.__delitem__
418 __delattr__ = dict.__delitem__
419
419
420
420
421 def obfuscate_url_pw(engine):
421 def obfuscate_url_pw(engine):
422 from sqlalchemy.engine import url as sa_url
422 from sqlalchemy.engine import url as sa_url
423 from sqlalchemy.exc import ArgumentError
423 from sqlalchemy.exc import ArgumentError
424 try:
424 try:
425 _url = sa_url.make_url(engine or '')
425 _url = sa_url.make_url(engine or '')
426 except ArgumentError:
426 except ArgumentError:
427 return engine
427 return engine
428 if _url.password:
428 if _url.password:
429 _url.password = 'XXXXX'
429 _url.password = 'XXXXX'
430 return str(_url)
430 return str(_url)
431
431
432
432
433 class HookEnvironmentError(Exception): pass
433 class HookEnvironmentError(Exception): pass
434
434
435
435
436 def get_hook_environment():
436 def get_hook_environment():
437 """
437 """
438 Get hook context by deserializing the global KALLITHEA_EXTRAS environment
438 Get hook context by deserializing the global KALLITHEA_EXTRAS environment
439 variable.
439 variable.
440
440
441 Called early in Git out-of-process hooks to get .ini config path so the
441 Called early in Git out-of-process hooks to get .ini config path so the
442 basic environment can be configured properly. Also used in all hooks to get
442 basic environment can be configured properly. Also used in all hooks to get
443 information about the action that triggered it.
443 information about the action that triggered it.
444 """
444 """
445
445
446 try:
446 try:
447 kallithea_extras = os.environ['KALLITHEA_EXTRAS']
447 kallithea_extras = os.environ['KALLITHEA_EXTRAS']
448 except KeyError:
448 except KeyError:
449 raise HookEnvironmentError("Environment variable KALLITHEA_EXTRAS not found")
449 raise HookEnvironmentError("Environment variable KALLITHEA_EXTRAS not found")
450
450
451 extras = json.loads(kallithea_extras)
451 extras = json.loads(kallithea_extras)
452 for k in ['username', 'repository', 'scm', 'action', 'ip', 'config']:
452 for k in ['username', 'repository', 'scm', 'action', 'ip', 'config']:
453 try:
453 try:
454 extras[k]
454 extras[k]
455 except KeyError:
455 except KeyError:
456 raise HookEnvironmentError('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras))
456 raise HookEnvironmentError('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras))
457
457
458 return AttributeDict(extras)
458 return AttributeDict(extras)
459
459
460
460
461 def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None):
461 def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None):
462 """Prepare global context for running hooks by serializing data in the
462 """Prepare global context for running hooks by serializing data in the
463 global KALLITHEA_EXTRAS environment variable.
463 global KALLITHEA_EXTRAS environment variable.
464
464
465 Most importantly, this allow Git hooks to do proper logging and updating of
465 Most importantly, this allow Git hooks to do proper logging and updating of
466 caches after pushes.
466 caches after pushes.
467
467
468 Must always be called before anything with hooks are invoked.
468 Must always be called before anything with hooks are invoked.
469 """
469 """
470 from kallithea import CONFIG
470 from kallithea import CONFIG
471 extras = {
471 extras = {
472 'ip': ip_addr, # used in log_push/pull_action action_logger
472 'ip': ip_addr, # used in log_push/pull_action action_logger
473 'username': username,
473 'username': username,
474 'action': action or 'push_local', # used in log_push_action_raw_ids action_logger
474 'action': action or 'push_local', # used in log_push_action_raw_ids action_logger
475 'repository': repo_name,
475 'repository': repo_name,
476 'scm': repo_alias, # used to pick hack in log_push_action_raw_ids
476 'scm': repo_alias, # used to pick hack in log_push_action_raw_ids
477 'config': CONFIG['__file__'], # used by git hook to read config
477 'config': CONFIG['__file__'], # used by git hook to read config
478 }
478 }
479 os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras)
479 os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras)
480
480
481
481
482 def get_current_authuser():
482 def get_current_authuser():
483 """
483 """
484 Gets kallithea user from threadlocal tmpl_context variable if it's
484 Gets kallithea user from threadlocal tmpl_context variable if it's
485 defined, else returns None.
485 defined, else returns None.
486 """
486 """
487 from tg import tmpl_context
487 from tg import tmpl_context
488 try:
488 try:
489 return getattr(tmpl_context, 'authuser', None)
489 return getattr(tmpl_context, 'authuser', None)
490 except TypeError: # No object (name: context) has been registered for this thread
490 except TypeError: # No object (name: context) has been registered for this thread
491 return None
491 return None
492
492
493
493
494 class OptionalAttr(object):
494 class OptionalAttr(object):
495 """
495 """
496 Special Optional Option that defines other attribute. Example::
496 Special Optional Option that defines other attribute. Example::
497
497
498 def test(apiuser, userid=Optional(OAttr('apiuser')):
498 def test(apiuser, userid=Optional(OAttr('apiuser')):
499 user = Optional.extract(userid)
499 user = Optional.extract(userid)
500 # calls
500 # calls
501
501
502 """
502 """
503
503
504 def __init__(self, attr_name):
504 def __init__(self, attr_name):
505 self.attr_name = attr_name
505 self.attr_name = attr_name
506
506
507 def __repr__(self):
507 def __repr__(self):
508 return '<OptionalAttr:%s>' % self.attr_name
508 return '<OptionalAttr:%s>' % self.attr_name
509
509
510 def __call__(self):
510 def __call__(self):
511 return self
511 return self
512
512
513
513
514 # alias
514 # alias
515 OAttr = OptionalAttr
515 OAttr = OptionalAttr
516
516
517
517
518 class Optional(object):
518 class Optional(object):
519 """
519 """
520 Defines an optional parameter::
520 Defines an optional parameter::
521
521
522 param = param.getval() if isinstance(param, Optional) else param
522 param = param.getval() if isinstance(param, Optional) else param
523 param = param() if isinstance(param, Optional) else param
523 param = param() if isinstance(param, Optional) else param
524
524
525 is equivalent of::
525 is equivalent of::
526
526
527 param = Optional.extract(param)
527 param = Optional.extract(param)
528
528
529 """
529 """
530
530
531 def __init__(self, type_):
531 def __init__(self, type_):
532 self.type_ = type_
532 self.type_ = type_
533
533
534 def __repr__(self):
534 def __repr__(self):
535 return '<Optional:%s>' % self.type_.__repr__()
535 return '<Optional:%s>' % self.type_.__repr__()
536
536
537 def __call__(self):
537 def __call__(self):
538 return self.getval()
538 return self.getval()
539
539
540 def getval(self):
540 def getval(self):
541 """
541 """
542 returns value from this Optional instance
542 returns value from this Optional instance
543 """
543 """
544 if isinstance(self.type_, OAttr):
544 if isinstance(self.type_, OAttr):
545 # use params name
545 # use params name
546 return self.type_.attr_name
546 return self.type_.attr_name
547 return self.type_
547 return self.type_
548
548
549 @classmethod
549 @classmethod
550 def extract(cls, val):
550 def extract(cls, val):
551 """
551 """
552 Extracts value from Optional() instance
552 Extracts value from Optional() instance
553
553
554 :param val:
554 :param val:
555 :return: original value if it's not Optional instance else
555 :return: original value if it's not Optional instance else
556 value of instance
556 value of instance
557 """
557 """
558 if isinstance(val, cls):
558 if isinstance(val, cls):
559 return val.getval()
559 return val.getval()
560 return val
560 return val
561
561
562
562
563 def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub):
563 def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub):
564 return _cleanstringsub('_', s).rstrip('_')
564 return _cleanstringsub('_', s).rstrip('_')
565
565
566
566
567 def recursive_replace(str_, replace=' '):
567 def recursive_replace(str_, replace=' '):
568 """
568 """
569 Recursive replace of given sign to just one instance
569 Recursive replace of given sign to just one instance
570
570
571 :param str_: given string
571 :param str_: given string
572 :param replace: char to find and replace multiple instances
572 :param replace: char to find and replace multiple instances
573
573
574 Examples::
574 Examples::
575 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
575 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
576 'Mighty-Mighty-Bo-sstones'
576 'Mighty-Mighty-Bo-sstones'
577 """
577 """
578
578
579 if str_.find(replace * 2) == -1:
579 if str_.find(replace * 2) == -1:
580 return str_
580 return str_
581 else:
581 else:
582 str_ = str_.replace(replace * 2, replace)
582 str_ = str_.replace(replace * 2, replace)
583 return recursive_replace(str_, replace)
583 return recursive_replace(str_, replace)
584
584
585
585
586 def repo_name_slug(value):
586 def repo_name_slug(value):
587 """
587 """
588 Return slug of name of repository
588 Return slug of name of repository
589 This function is called on each creation/modification
589 This function is called on each creation/modification
590 of repository to prevent bad names in repo
590 of repository to prevent bad names in repo
591 """
591 """
592
592
593 slug = remove_formatting(value)
593 slug = remove_formatting(value)
594 slug = strip_tags(slug)
594 slug = strip_tags(slug)
595
595
596 for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
596 for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
597 slug = slug.replace(c, '-')
597 slug = slug.replace(c, '-')
598 slug = recursive_replace(slug, '-')
598 slug = recursive_replace(slug, '-')
599 slug = collapse(slug, '-')
599 slug = collapse(slug, '-')
600 return slug
600 return slug
601
601
602
602
603 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
603 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
604 while True:
604 while True:
605 ok = input(prompt)
605 ok = input(prompt)
606 if ok in ('y', 'ye', 'yes'):
606 if ok in ('y', 'ye', 'yes'):
607 return True
607 return True
608 if ok in ('n', 'no', 'nop', 'nope'):
608 if ok in ('n', 'no', 'nop', 'nope'):
609 return False
609 return False
610 retries = retries - 1
610 retries = retries - 1
611 if retries < 0:
611 if retries < 0:
612 raise IOError
612 raise IOError
613 print(complaint)
613 print(complaint)
@@ -1,1057 +1,1057 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 vcs.backends.base
3 vcs.backends.base
4 ~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~
5
5
6 Base for all available scm backends
6 Base for all available scm backends
7
7
8 :created_on: Apr 8, 2010
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
10 """
11
11
12 import datetime
12 import datetime
13 import itertools
13 import itertools
14
14
15 from kallithea.lib.vcs.conf import settings
15 from kallithea.lib.vcs.conf import settings
16 from kallithea.lib.vcs.exceptions import (
16 from kallithea.lib.vcs.exceptions import (
17 ChangesetError, EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, RepositoryError)
17 ChangesetError, EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, RepositoryError)
18 from kallithea.lib.vcs.utils import author_email, author_name
18 from kallithea.lib.vcs.utils import author_email, author_name
19 from kallithea.lib.vcs.utils.helpers import get_dict_for_attrs
19 from kallithea.lib.vcs.utils.helpers import get_dict_for_attrs
20 from kallithea.lib.vcs.utils.lazy import LazyProperty
20 from kallithea.lib.vcs.utils.lazy import LazyProperty
21
21
22
22
23 class BaseRepository(object):
23 class BaseRepository(object):
24 """
24 """
25 Base Repository for final backends
25 Base Repository for final backends
26
26
27 **Attributes**
27 **Attributes**
28
28
29 ``DEFAULT_BRANCH_NAME``
29 ``DEFAULT_BRANCH_NAME``
30 name of default branch (i.e. "trunk" for svn, "master" for git etc.
30 name of default branch (i.e. "trunk" for svn, "master" for git etc.
31
31
32 ``scm``
32 ``scm``
33 alias of scm, i.e. *git* or *hg*
33 alias of scm, i.e. *git* or *hg*
34
34
35 ``repo``
35 ``repo``
36 object from external api
36 object from external api
37
37
38 ``revisions``
38 ``revisions``
39 list of all available revisions' ids, in ascending order
39 list of all available revisions' ids, in ascending order
40
40
41 ``changesets``
41 ``changesets``
42 storage dict caching returned changesets
42 storage dict caching returned changesets
43
43
44 ``path``
44 ``path``
45 absolute path to the repository
45 absolute path to the repository
46
46
47 ``branches``
47 ``branches``
48 branches as list of changesets
48 branches as list of changesets
49
49
50 ``tags``
50 ``tags``
51 tags as list of changesets
51 tags as list of changesets
52 """
52 """
53 scm = None
53 scm = None
54 DEFAULT_BRANCH_NAME = None
54 DEFAULT_BRANCH_NAME = None
55 EMPTY_CHANGESET = '0' * 40
55 EMPTY_CHANGESET = '0' * 40
56
56
57 def __init__(self, repo_path, create=False, **kwargs):
57 def __init__(self, repo_path, create=False, **kwargs):
58 """
58 """
59 Initializes repository. Raises RepositoryError if repository could
59 Initializes repository. Raises RepositoryError if repository could
60 not be find at the given ``repo_path`` or directory at ``repo_path``
60 not be find at the given ``repo_path`` or directory at ``repo_path``
61 exists and ``create`` is set to True.
61 exists and ``create`` is set to True.
62
62
63 :param repo_path: local path of the repository
63 :param repo_path: local path of the repository
64 :param create=False: if set to True, would try to create repository.
64 :param create=False: if set to True, would try to create repository.
65 :param src_url=None: if set, should be proper url from which repository
65 :param src_url=None: if set, should be proper url from which repository
66 would be cloned; requires ``create`` parameter to be set to True -
66 would be cloned; requires ``create`` parameter to be set to True -
67 raises RepositoryError if src_url is set and create evaluates to
67 raises RepositoryError if src_url is set and create evaluates to
68 False
68 False
69 """
69 """
70 raise NotImplementedError
70 raise NotImplementedError
71
71
72 def __str__(self):
72 def __str__(self):
73 return '<%s at %s>' % (self.__class__.__name__, self.path)
73 return '<%s at %s>' % (self.__class__.__name__, self.path)
74
74
75 def __repr__(self):
75 def __repr__(self):
76 return self.__str__()
76 return self.__str__()
77
77
78 def __len__(self):
78 def __len__(self):
79 return self.count()
79 return self.count()
80
80
81 def __eq__(self, other):
81 def __eq__(self, other):
82 same_instance = isinstance(other, self.__class__)
82 same_instance = isinstance(other, self.__class__)
83 return same_instance and getattr(other, 'path', None) == self.path
83 return same_instance and getattr(other, 'path', None) == self.path
84
84
85 def __ne__(self, other):
85 def __ne__(self, other):
86 return not self.__eq__(other)
86 return not self.__eq__(other)
87
87
88 @LazyProperty
88 @LazyProperty
89 def alias(self):
89 def alias(self):
90 for k, v in settings.BACKENDS.items():
90 for k, v in settings.BACKENDS.items():
91 if v.split('.')[-1] == str(self.__class__.__name__):
91 if v.split('.')[-1] == str(self.__class__.__name__):
92 return k
92 return k
93
93
94 @LazyProperty
94 @LazyProperty
95 def name(self):
95 def name(self):
96 """
96 """
97 Return repository name (without group name)
97 Return repository name (without group name)
98 """
98 """
99 raise NotImplementedError
99 raise NotImplementedError
100
100
101 @LazyProperty
101 @LazyProperty
102 def owner(self):
102 def owner(self):
103 raise NotImplementedError
103 raise NotImplementedError
104
104
105 @LazyProperty
105 @LazyProperty
106 def description(self):
106 def description(self):
107 raise NotImplementedError
107 raise NotImplementedError
108
108
109 @LazyProperty
109 @LazyProperty
110 def size(self):
110 def size(self):
111 """
111 """
112 Returns combined size in bytes for all repository files
112 Returns combined size in bytes for all repository files
113 """
113 """
114
114
115 size = 0
115 size = 0
116 try:
116 try:
117 tip = self.get_changeset()
117 tip = self.get_changeset()
118 for topnode, dirs, files in tip.walk('/'):
118 for topnode, dirs, files in tip.walk('/'):
119 for f in files:
119 for f in files:
120 size += tip.get_file_size(f.path)
120 size += tip.get_file_size(f.path)
121
121
122 except RepositoryError as e:
122 except RepositoryError as e:
123 pass
123 pass
124 return size
124 return size
125
125
126 def is_valid(self):
126 def is_valid(self):
127 """
127 """
128 Validates repository.
128 Validates repository.
129 """
129 """
130 raise NotImplementedError
130 raise NotImplementedError
131
131
132 def is_empty(self):
132 def is_empty(self):
133 return self._empty
133 return self._empty
134
134
135 #==========================================================================
135 #==========================================================================
136 # CHANGESETS
136 # CHANGESETS
137 #==========================================================================
137 #==========================================================================
138
138
139 def get_changeset(self, revision=None):
139 def get_changeset(self, revision=None):
140 """
140 """
141 Returns instance of ``Changeset`` class. If ``revision`` is None, most
141 Returns instance of ``Changeset`` class. If ``revision`` is None, most
142 recent changeset is returned.
142 recent changeset is returned.
143
143
144 :raises ``EmptyRepositoryError``: if there are no revisions
144 :raises ``EmptyRepositoryError``: if there are no revisions
145 """
145 """
146 raise NotImplementedError
146 raise NotImplementedError
147
147
148 def __iter__(self):
148 def __iter__(self):
149 """
149 """
150 Allows Repository objects to be iterated.
150 Allows Repository objects to be iterated.
151
151
152 *Requires* implementation of ``__getitem__`` method.
152 *Requires* implementation of ``__getitem__`` method.
153 """
153 """
154 for revision in self.revisions:
154 for revision in self.revisions:
155 yield self.get_changeset(revision)
155 yield self.get_changeset(revision)
156
156
157 def get_changesets(self, start=None, end=None, start_date=None,
157 def get_changesets(self, start=None, end=None, start_date=None,
158 end_date=None, branch_name=None, reverse=False, max_revisions=None):
158 end_date=None, branch_name=None, reverse=False, max_revisions=None):
159 """
159 """
160 Returns iterator of ``BaseChangeset`` objects from start to end,
160 Returns iterator of ``BaseChangeset`` objects from start to end,
161 both inclusive.
161 both inclusive.
162
162
163 :param start: None or str
163 :param start: None or str
164 :param end: None or str
164 :param end: None or str
165 :param start_date:
165 :param start_date:
166 :param end_date:
166 :param end_date:
167 :param branch_name:
167 :param branch_name:
168 :param reversed:
168 :param reversed:
169 """
169 """
170 raise NotImplementedError
170 raise NotImplementedError
171
171
172 def __getitem__(self, key):
172 def __getitem__(self, key):
173 if isinstance(key, slice):
173 if isinstance(key, slice):
174 return (self.get_changeset(rev) for rev in self.revisions[key])
174 return (self.get_changeset(rev) for rev in self.revisions[key])
175 return self.get_changeset(key)
175 return self.get_changeset(key)
176
176
177 def count(self):
177 def count(self):
178 return len(self.revisions)
178 return len(self.revisions)
179
179
180 def tag(self, name, user, revision=None, message=None, date=None, **opts):
180 def tag(self, name, user, revision=None, message=None, date=None, **opts):
181 """
181 """
182 Creates and returns a tag for the given ``revision``.
182 Creates and returns a tag for the given ``revision``.
183
183
184 :param name: name for new tag
184 :param name: name for new tag
185 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
185 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
186 :param revision: changeset id for which new tag would be created
186 :param revision: changeset id for which new tag would be created
187 :param message: message of the tag's commit
187 :param message: message of the tag's commit
188 :param date: date of tag's commit
188 :param date: date of tag's commit
189
189
190 :raises TagAlreadyExistError: if tag with same name already exists
190 :raises TagAlreadyExistError: if tag with same name already exists
191 """
191 """
192 raise NotImplementedError
192 raise NotImplementedError
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given ``name``.
196 Removes tag with the given ``name``.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 raise NotImplementedError
205 raise NotImplementedError
206
206
207 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
207 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
208 context=3):
208 context=3):
209 """
209 """
210 Returns (git like) *diff*, as plain text. Shows changes introduced by
210 Returns (git like) *diff*, as plain text. Shows changes introduced by
211 ``rev2`` since ``rev1``.
211 ``rev2`` since ``rev1``.
212
212
213 :param rev1: Entry point from which diff is shown. Can be
213 :param rev1: Entry point from which diff is shown. Can be
214 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
214 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
215 the changes since empty state of the repository until ``rev2``
215 the changes since empty state of the repository until ``rev2``
216 :param rev2: Until which revision changes should be shown.
216 :param rev2: Until which revision changes should be shown.
217 :param ignore_whitespace: If set to ``True``, would not show whitespace
217 :param ignore_whitespace: If set to ``True``, would not show whitespace
218 changes. Defaults to ``False``.
218 changes. Defaults to ``False``.
219 :param context: How many lines before/after changed lines should be
219 :param context: How many lines before/after changed lines should be
220 shown. Defaults to ``3``.
220 shown. Defaults to ``3``.
221 """
221 """
222 raise NotImplementedError
222 raise NotImplementedError
223
223
224 # ========== #
224 # ========== #
225 # COMMIT API #
225 # COMMIT API #
226 # ========== #
226 # ========== #
227
227
228 @LazyProperty
228 @LazyProperty
229 def in_memory_changeset(self):
229 def in_memory_changeset(self):
230 """
230 """
231 Returns ``InMemoryChangeset`` object for this repository.
231 Returns ``InMemoryChangeset`` object for this repository.
232 """
232 """
233 raise NotImplementedError
233 raise NotImplementedError
234
234
235 def add(self, filenode, **kwargs):
235 def add(self, filenode, **kwargs):
236 """
236 """
237 Commit api function that will add given ``FileNode`` into this
237 Commit api function that will add given ``FileNode`` into this
238 repository.
238 repository.
239
239
240 :raises ``NodeAlreadyExistsError``: if there is a file with same path
240 :raises ``NodeAlreadyExistsError``: if there is a file with same path
241 already in repository
241 already in repository
242 :raises ``NodeAlreadyAddedError``: if given node is already marked as
242 :raises ``NodeAlreadyAddedError``: if given node is already marked as
243 *added*
243 *added*
244 """
244 """
245 raise NotImplementedError
245 raise NotImplementedError
246
246
247 def remove(self, filenode, **kwargs):
247 def remove(self, filenode, **kwargs):
248 """
248 """
249 Commit api function that will remove given ``FileNode`` into this
249 Commit api function that will remove given ``FileNode`` into this
250 repository.
250 repository.
251
251
252 :raises ``EmptyRepositoryError``: if there are no changesets yet
252 :raises ``EmptyRepositoryError``: if there are no changesets yet
253 :raises ``NodeDoesNotExistError``: if there is no file with given path
253 :raises ``NodeDoesNotExistError``: if there is no file with given path
254 """
254 """
255 raise NotImplementedError
255 raise NotImplementedError
256
256
257 def commit(self, message, **kwargs):
257 def commit(self, message, **kwargs):
258 """
258 """
259 Persists current changes made on this repository and returns newly
259 Persists current changes made on this repository and returns newly
260 created changeset.
260 created changeset.
261
261
262 :raises ``NothingChangedError``: if no changes has been made
262 :raises ``NothingChangedError``: if no changes has been made
263 """
263 """
264 raise NotImplementedError
264 raise NotImplementedError
265
265
266 def get_state(self):
266 def get_state(self):
267 """
267 """
268 Returns dictionary with ``added``, ``changed`` and ``removed`` lists
268 Returns dictionary with ``added``, ``changed`` and ``removed`` lists
269 containing ``FileNode`` objects.
269 containing ``FileNode`` objects.
270 """
270 """
271 raise NotImplementedError
271 raise NotImplementedError
272
272
273 def get_config_value(self, section, name, config_file=None):
273 def get_config_value(self, section, name, config_file=None):
274 """
274 """
275 Returns configuration value for a given [``section``] and ``name``.
275 Returns configuration value for a given [``section``] and ``name``.
276
276
277 :param section: Section we want to retrieve value from
277 :param section: Section we want to retrieve value from
278 :param name: Name of configuration we want to retrieve
278 :param name: Name of configuration we want to retrieve
279 :param config_file: A path to file which should be used to retrieve
279 :param config_file: A path to file which should be used to retrieve
280 configuration from (might also be a list of file paths)
280 configuration from (might also be a list of file paths)
281 """
281 """
282 raise NotImplementedError
282 raise NotImplementedError
283
283
284 def get_user_name(self, config_file=None):
284 def get_user_name(self, config_file=None):
285 """
285 """
286 Returns user's name from global configuration file.
286 Returns user's name from global configuration file.
287
287
288 :param config_file: A path to file which should be used to retrieve
288 :param config_file: A path to file which should be used to retrieve
289 configuration from (might also be a list of file paths)
289 configuration from (might also be a list of file paths)
290 """
290 """
291 raise NotImplementedError
291 raise NotImplementedError
292
292
293 def get_user_email(self, config_file=None):
293 def get_user_email(self, config_file=None):
294 """
294 """
295 Returns user's email from global configuration file.
295 Returns user's email from global configuration file.
296
296
297 :param config_file: A path to file which should be used to retrieve
297 :param config_file: A path to file which should be used to retrieve
298 configuration from (might also be a list of file paths)
298 configuration from (might also be a list of file paths)
299 """
299 """
300 raise NotImplementedError
300 raise NotImplementedError
301
301
302 # =========== #
302 # =========== #
303 # WORKDIR API #
303 # WORKDIR API #
304 # =========== #
304 # =========== #
305
305
306 @LazyProperty
306 @LazyProperty
307 def workdir(self):
307 def workdir(self):
308 """
308 """
309 Returns ``Workdir`` instance for this repository.
309 Returns ``Workdir`` instance for this repository.
310 """
310 """
311 raise NotImplementedError
311 raise NotImplementedError
312
312
313
313
314 class BaseChangeset(object):
314 class BaseChangeset(object):
315 """
315 """
316 Each backend should implement it's changeset representation.
316 Each backend should implement it's changeset representation.
317
317
318 **Attributes**
318 **Attributes**
319
319
320 ``repository``
320 ``repository``
321 repository object within which changeset exists
321 repository object within which changeset exists
322
322
323 ``raw_id``
323 ``raw_id``
324 raw changeset representation (i.e. full 40 length sha for git
324 raw changeset representation (i.e. full 40 length sha for git
325 backend)
325 backend)
326
326
327 ``short_id``
327 ``short_id``
328 shortened (if apply) version of ``raw_id``; it would be simple
328 shortened (if apply) version of ``raw_id``; it would be simple
329 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
329 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
330 as ``raw_id`` for subversion
330 as ``raw_id`` for subversion
331
331
332 ``revision``
332 ``revision``
333 revision number as integer
333 revision number as integer
334
334
335 ``files``
335 ``files``
336 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
336 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
337
337
338 ``dirs``
338 ``dirs``
339 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
339 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
340
340
341 ``nodes``
341 ``nodes``
342 combined list of ``Node`` objects
342 combined list of ``Node`` objects
343
343
344 ``author``
344 ``author``
345 author of the changeset, as unicode
345 author of the changeset, as str
346
346
347 ``message``
347 ``message``
348 message of the changeset, as unicode
348 message of the changeset, as str
349
349
350 ``parents``
350 ``parents``
351 list of parent changesets
351 list of parent changesets
352
352
353 ``last``
353 ``last``
354 ``True`` if this is last changeset in repository, ``False``
354 ``True`` if this is last changeset in repository, ``False``
355 otherwise; trying to access this attribute while there is no
355 otherwise; trying to access this attribute while there is no
356 changesets would raise ``EmptyRepositoryError``
356 changesets would raise ``EmptyRepositoryError``
357 """
357 """
358 def __str__(self):
358 def __str__(self):
359 return '<%s at %s:%s>' % (self.__class__.__name__, self.revision,
359 return '<%s at %s:%s>' % (self.__class__.__name__, self.revision,
360 self.short_id)
360 self.short_id)
361
361
362 def __repr__(self):
362 def __repr__(self):
363 return self.__str__()
363 return self.__str__()
364
364
365 def __eq__(self, other):
365 def __eq__(self, other):
366 if type(self) is not type(other):
366 if type(self) is not type(other):
367 return False
367 return False
368 return self.raw_id == other.raw_id
368 return self.raw_id == other.raw_id
369
369
370 def __json__(self, with_file_list=False):
370 def __json__(self, with_file_list=False):
371 if with_file_list:
371 if with_file_list:
372 return dict(
372 return dict(
373 short_id=self.short_id,
373 short_id=self.short_id,
374 raw_id=self.raw_id,
374 raw_id=self.raw_id,
375 revision=self.revision,
375 revision=self.revision,
376 message=self.message,
376 message=self.message,
377 date=self.date,
377 date=self.date,
378 author=self.author,
378 author=self.author,
379 added=[el.path for el in self.added],
379 added=[el.path for el in self.added],
380 changed=[el.path for el in self.changed],
380 changed=[el.path for el in self.changed],
381 removed=[el.path for el in self.removed],
381 removed=[el.path for el in self.removed],
382 )
382 )
383 else:
383 else:
384 return dict(
384 return dict(
385 short_id=self.short_id,
385 short_id=self.short_id,
386 raw_id=self.raw_id,
386 raw_id=self.raw_id,
387 revision=self.revision,
387 revision=self.revision,
388 message=self.message,
388 message=self.message,
389 date=self.date,
389 date=self.date,
390 author=self.author,
390 author=self.author,
391 )
391 )
392
392
393 @LazyProperty
393 @LazyProperty
394 def last(self):
394 def last(self):
395 if self.repository is None:
395 if self.repository is None:
396 raise ChangesetError("Cannot check if it's most recent revision")
396 raise ChangesetError("Cannot check if it's most recent revision")
397 return self.raw_id == self.repository.revisions[-1]
397 return self.raw_id == self.repository.revisions[-1]
398
398
399 @LazyProperty
399 @LazyProperty
400 def parents(self):
400 def parents(self):
401 """
401 """
402 Returns list of parents changesets.
402 Returns list of parents changesets.
403 """
403 """
404 raise NotImplementedError
404 raise NotImplementedError
405
405
406 @LazyProperty
406 @LazyProperty
407 def children(self):
407 def children(self):
408 """
408 """
409 Returns list of children changesets.
409 Returns list of children changesets.
410 """
410 """
411 raise NotImplementedError
411 raise NotImplementedError
412
412
413 @LazyProperty
413 @LazyProperty
414 def raw_id(self):
414 def raw_id(self):
415 """
415 """
416 Returns raw string identifying this changeset.
416 Returns raw string identifying this changeset.
417 """
417 """
418 raise NotImplementedError
418 raise NotImplementedError
419
419
420 @LazyProperty
420 @LazyProperty
421 def short_id(self):
421 def short_id(self):
422 """
422 """
423 Returns shortened version of ``raw_id`` attribute, as string,
423 Returns shortened version of ``raw_id`` attribute, as string,
424 identifying this changeset, useful for web representation.
424 identifying this changeset, useful for web representation.
425 """
425 """
426 raise NotImplementedError
426 raise NotImplementedError
427
427
428 @LazyProperty
428 @LazyProperty
429 def revision(self):
429 def revision(self):
430 """
430 """
431 Returns integer identifying this changeset.
431 Returns integer identifying this changeset.
432
432
433 """
433 """
434 raise NotImplementedError
434 raise NotImplementedError
435
435
436 @LazyProperty
436 @LazyProperty
437 def committer(self):
437 def committer(self):
438 """
438 """
439 Returns Committer for given commit
439 Returns Committer for given commit
440 """
440 """
441
441
442 raise NotImplementedError
442 raise NotImplementedError
443
443
444 @LazyProperty
444 @LazyProperty
445 def committer_name(self):
445 def committer_name(self):
446 """
446 """
447 Returns Author name for given commit
447 Returns Author name for given commit
448 """
448 """
449
449
450 return author_name(self.committer)
450 return author_name(self.committer)
451
451
452 @LazyProperty
452 @LazyProperty
453 def committer_email(self):
453 def committer_email(self):
454 """
454 """
455 Returns Author email address for given commit
455 Returns Author email address for given commit
456 """
456 """
457
457
458 return author_email(self.committer)
458 return author_email(self.committer)
459
459
460 @LazyProperty
460 @LazyProperty
461 def author(self):
461 def author(self):
462 """
462 """
463 Returns Author for given commit
463 Returns Author for given commit
464 """
464 """
465
465
466 raise NotImplementedError
466 raise NotImplementedError
467
467
468 @LazyProperty
468 @LazyProperty
469 def author_name(self):
469 def author_name(self):
470 """
470 """
471 Returns Author name for given commit
471 Returns Author name for given commit
472 """
472 """
473
473
474 return author_name(self.author)
474 return author_name(self.author)
475
475
476 @LazyProperty
476 @LazyProperty
477 def author_email(self):
477 def author_email(self):
478 """
478 """
479 Returns Author email address for given commit
479 Returns Author email address for given commit
480 """
480 """
481
481
482 return author_email(self.author)
482 return author_email(self.author)
483
483
484 def get_file_mode(self, path):
484 def get_file_mode(self, path):
485 """
485 """
486 Returns stat mode of the file at the given ``path``.
486 Returns stat mode of the file at the given ``path``.
487 """
487 """
488 raise NotImplementedError
488 raise NotImplementedError
489
489
490 def get_file_content(self, path):
490 def get_file_content(self, path):
491 """
491 """
492 Returns content of the file at the given ``path``.
492 Returns content of the file at the given ``path``.
493 """
493 """
494 raise NotImplementedError
494 raise NotImplementedError
495
495
496 def get_file_size(self, path):
496 def get_file_size(self, path):
497 """
497 """
498 Returns size of the file at the given ``path``.
498 Returns size of the file at the given ``path``.
499 """
499 """
500 raise NotImplementedError
500 raise NotImplementedError
501
501
502 def get_file_changeset(self, path):
502 def get_file_changeset(self, path):
503 """
503 """
504 Returns last commit of the file at the given ``path``.
504 Returns last commit of the file at the given ``path``.
505 """
505 """
506 raise NotImplementedError
506 raise NotImplementedError
507
507
508 def get_file_history(self, path):
508 def get_file_history(self, path):
509 """
509 """
510 Returns history of file as reversed list of ``Changeset`` objects for
510 Returns history of file as reversed list of ``Changeset`` objects for
511 which file at given ``path`` has been modified.
511 which file at given ``path`` has been modified.
512 """
512 """
513 raise NotImplementedError
513 raise NotImplementedError
514
514
515 def get_nodes(self, path):
515 def get_nodes(self, path):
516 """
516 """
517 Returns combined ``DirNode`` and ``FileNode`` objects list representing
517 Returns combined ``DirNode`` and ``FileNode`` objects list representing
518 state of changeset at the given ``path``.
518 state of changeset at the given ``path``.
519
519
520 :raises ``ChangesetError``: if node at the given ``path`` is not
520 :raises ``ChangesetError``: if node at the given ``path`` is not
521 instance of ``DirNode``
521 instance of ``DirNode``
522 """
522 """
523 raise NotImplementedError
523 raise NotImplementedError
524
524
525 def get_node(self, path):
525 def get_node(self, path):
526 """
526 """
527 Returns ``Node`` object from the given ``path``.
527 Returns ``Node`` object from the given ``path``.
528
528
529 :raises ``NodeDoesNotExistError``: if there is no node at the given
529 :raises ``NodeDoesNotExistError``: if there is no node at the given
530 ``path``
530 ``path``
531 """
531 """
532 raise NotImplementedError
532 raise NotImplementedError
533
533
534 def fill_archive(self, stream=None, kind='tgz', prefix=None):
534 def fill_archive(self, stream=None, kind='tgz', prefix=None):
535 """
535 """
536 Fills up given stream.
536 Fills up given stream.
537
537
538 :param stream: file like object.
538 :param stream: file like object.
539 :param kind: one of following: ``zip``, ``tar``, ``tgz``
539 :param kind: one of following: ``zip``, ``tar``, ``tgz``
540 or ``tbz2``. Default: ``tgz``.
540 or ``tbz2``. Default: ``tgz``.
541 :param prefix: name of root directory in archive.
541 :param prefix: name of root directory in archive.
542 Default is repository name and changeset's raw_id joined with dash.
542 Default is repository name and changeset's raw_id joined with dash.
543
543
544 repo-tip.<kind>
544 repo-tip.<kind>
545 """
545 """
546
546
547 raise NotImplementedError
547 raise NotImplementedError
548
548
549 def get_chunked_archive(self, **kwargs):
549 def get_chunked_archive(self, **kwargs):
550 """
550 """
551 Returns iterable archive. Tiny wrapper around ``fill_archive`` method.
551 Returns iterable archive. Tiny wrapper around ``fill_archive`` method.
552
552
553 :param chunk_size: extra parameter which controls size of returned
553 :param chunk_size: extra parameter which controls size of returned
554 chunks. Default:8k.
554 chunks. Default:8k.
555 """
555 """
556
556
557 chunk_size = kwargs.pop('chunk_size', 8192)
557 chunk_size = kwargs.pop('chunk_size', 8192)
558 stream = kwargs.get('stream')
558 stream = kwargs.get('stream')
559 self.fill_archive(**kwargs)
559 self.fill_archive(**kwargs)
560 while True:
560 while True:
561 data = stream.read(chunk_size)
561 data = stream.read(chunk_size)
562 if not data:
562 if not data:
563 break
563 break
564 yield data
564 yield data
565
565
566 @LazyProperty
566 @LazyProperty
567 def root(self):
567 def root(self):
568 """
568 """
569 Returns ``RootNode`` object for this changeset.
569 Returns ``RootNode`` object for this changeset.
570 """
570 """
571 return self.get_node('')
571 return self.get_node('')
572
572
573 def next(self, branch=None):
573 def next(self, branch=None):
574 """
574 """
575 Returns next changeset from current, if branch is gives it will return
575 Returns next changeset from current, if branch is gives it will return
576 next changeset belonging to this branch
576 next changeset belonging to this branch
577
577
578 :param branch: show changesets within the given named branch
578 :param branch: show changesets within the given named branch
579 """
579 """
580 raise NotImplementedError
580 raise NotImplementedError
581
581
582 def prev(self, branch=None):
582 def prev(self, branch=None):
583 """
583 """
584 Returns previous changeset from current, if branch is gives it will
584 Returns previous changeset from current, if branch is gives it will
585 return previous changeset belonging to this branch
585 return previous changeset belonging to this branch
586
586
587 :param branch: show changesets within the given named branch
587 :param branch: show changesets within the given named branch
588 """
588 """
589 raise NotImplementedError
589 raise NotImplementedError
590
590
591 @LazyProperty
591 @LazyProperty
592 def added(self):
592 def added(self):
593 """
593 """
594 Returns list of added ``FileNode`` objects.
594 Returns list of added ``FileNode`` objects.
595 """
595 """
596 raise NotImplementedError
596 raise NotImplementedError
597
597
598 @LazyProperty
598 @LazyProperty
599 def changed(self):
599 def changed(self):
600 """
600 """
601 Returns list of modified ``FileNode`` objects.
601 Returns list of modified ``FileNode`` objects.
602 """
602 """
603 raise NotImplementedError
603 raise NotImplementedError
604
604
605 @LazyProperty
605 @LazyProperty
606 def removed(self):
606 def removed(self):
607 """
607 """
608 Returns list of removed ``FileNode`` objects.
608 Returns list of removed ``FileNode`` objects.
609 """
609 """
610 raise NotImplementedError
610 raise NotImplementedError
611
611
612 @LazyProperty
612 @LazyProperty
613 def size(self):
613 def size(self):
614 """
614 """
615 Returns total number of bytes from contents of all filenodes.
615 Returns total number of bytes from contents of all filenodes.
616 """
616 """
617 return sum((node.size for node in self.get_filenodes_generator()))
617 return sum((node.size for node in self.get_filenodes_generator()))
618
618
619 def walk(self, topurl=''):
619 def walk(self, topurl=''):
620 """
620 """
621 Similar to os.walk method. Instead of filesystem it walks through
621 Similar to os.walk method. Instead of filesystem it walks through
622 changeset starting at given ``topurl``. Returns generator of tuples
622 changeset starting at given ``topurl``. Returns generator of tuples
623 (topnode, dirnodes, filenodes).
623 (topnode, dirnodes, filenodes).
624 """
624 """
625 topnode = self.get_node(topurl)
625 topnode = self.get_node(topurl)
626 yield (topnode, topnode.dirs, topnode.files)
626 yield (topnode, topnode.dirs, topnode.files)
627 for dirnode in topnode.dirs:
627 for dirnode in topnode.dirs:
628 for tup in self.walk(dirnode.path):
628 for tup in self.walk(dirnode.path):
629 yield tup
629 yield tup
630
630
631 def get_filenodes_generator(self):
631 def get_filenodes_generator(self):
632 """
632 """
633 Returns generator that yields *all* file nodes.
633 Returns generator that yields *all* file nodes.
634 """
634 """
635 for topnode, dirs, files in self.walk():
635 for topnode, dirs, files in self.walk():
636 for node in files:
636 for node in files:
637 yield node
637 yield node
638
638
639 def as_dict(self):
639 def as_dict(self):
640 """
640 """
641 Returns dictionary with changeset's attributes and their values.
641 Returns dictionary with changeset's attributes and their values.
642 """
642 """
643 data = get_dict_for_attrs(self, ['raw_id', 'short_id',
643 data = get_dict_for_attrs(self, ['raw_id', 'short_id',
644 'revision', 'date', 'message'])
644 'revision', 'date', 'message'])
645 data['author'] = {'name': self.author_name, 'email': self.author_email}
645 data['author'] = {'name': self.author_name, 'email': self.author_email}
646 data['added'] = [node.path for node in self.added]
646 data['added'] = [node.path for node in self.added]
647 data['changed'] = [node.path for node in self.changed]
647 data['changed'] = [node.path for node in self.changed]
648 data['removed'] = [node.path for node in self.removed]
648 data['removed'] = [node.path for node in self.removed]
649 return data
649 return data
650
650
651 @LazyProperty
651 @LazyProperty
652 def closesbranch(self):
652 def closesbranch(self):
653 return False
653 return False
654
654
655 @LazyProperty
655 @LazyProperty
656 def obsolete(self):
656 def obsolete(self):
657 return False
657 return False
658
658
659 @LazyProperty
659 @LazyProperty
660 def bumped(self):
660 def bumped(self):
661 return False
661 return False
662
662
663 @LazyProperty
663 @LazyProperty
664 def divergent(self):
664 def divergent(self):
665 return False
665 return False
666
666
667 @LazyProperty
667 @LazyProperty
668 def extinct(self):
668 def extinct(self):
669 return False
669 return False
670
670
671 @LazyProperty
671 @LazyProperty
672 def unstable(self):
672 def unstable(self):
673 return False
673 return False
674
674
675 @LazyProperty
675 @LazyProperty
676 def phase(self):
676 def phase(self):
677 return ''
677 return ''
678
678
679
679
680 class BaseWorkdir(object):
680 class BaseWorkdir(object):
681 """
681 """
682 Working directory representation of single repository.
682 Working directory representation of single repository.
683
683
684 :attribute: repository: repository object of working directory
684 :attribute: repository: repository object of working directory
685 """
685 """
686
686
687 def __init__(self, repository):
687 def __init__(self, repository):
688 self.repository = repository
688 self.repository = repository
689
689
690 def get_branch(self):
690 def get_branch(self):
691 """
691 """
692 Returns name of current branch.
692 Returns name of current branch.
693 """
693 """
694 raise NotImplementedError
694 raise NotImplementedError
695
695
696 def get_changeset(self):
696 def get_changeset(self):
697 """
697 """
698 Returns current changeset.
698 Returns current changeset.
699 """
699 """
700 raise NotImplementedError
700 raise NotImplementedError
701
701
702 def get_added(self):
702 def get_added(self):
703 """
703 """
704 Returns list of ``FileNode`` objects marked as *new* in working
704 Returns list of ``FileNode`` objects marked as *new* in working
705 directory.
705 directory.
706 """
706 """
707 raise NotImplementedError
707 raise NotImplementedError
708
708
709 def get_changed(self):
709 def get_changed(self):
710 """
710 """
711 Returns list of ``FileNode`` objects *changed* in working directory.
711 Returns list of ``FileNode`` objects *changed* in working directory.
712 """
712 """
713 raise NotImplementedError
713 raise NotImplementedError
714
714
715 def get_removed(self):
715 def get_removed(self):
716 """
716 """
717 Returns list of ``RemovedFileNode`` objects marked as *removed* in
717 Returns list of ``RemovedFileNode`` objects marked as *removed* in
718 working directory.
718 working directory.
719 """
719 """
720 raise NotImplementedError
720 raise NotImplementedError
721
721
722 def get_untracked(self):
722 def get_untracked(self):
723 """
723 """
724 Returns list of ``FileNode`` objects which are present within working
724 Returns list of ``FileNode`` objects which are present within working
725 directory however are not tracked by repository.
725 directory however are not tracked by repository.
726 """
726 """
727 raise NotImplementedError
727 raise NotImplementedError
728
728
729 def get_status(self):
729 def get_status(self):
730 """
730 """
731 Returns dict with ``added``, ``changed``, ``removed`` and ``untracked``
731 Returns dict with ``added``, ``changed``, ``removed`` and ``untracked``
732 lists.
732 lists.
733 """
733 """
734 raise NotImplementedError
734 raise NotImplementedError
735
735
736 def commit(self, message, **kwargs):
736 def commit(self, message, **kwargs):
737 """
737 """
738 Commits local (from working directory) changes and returns newly
738 Commits local (from working directory) changes and returns newly
739 created
739 created
740 ``Changeset``. Updates repository's ``revisions`` list.
740 ``Changeset``. Updates repository's ``revisions`` list.
741
741
742 :raises ``CommitError``: if any error occurs while committing
742 :raises ``CommitError``: if any error occurs while committing
743 """
743 """
744 raise NotImplementedError
744 raise NotImplementedError
745
745
746 def update(self, revision=None):
746 def update(self, revision=None):
747 """
747 """
748 Fetches content of the given revision and populates it within working
748 Fetches content of the given revision and populates it within working
749 directory.
749 directory.
750 """
750 """
751 raise NotImplementedError
751 raise NotImplementedError
752
752
753 def checkout_branch(self, branch=None):
753 def checkout_branch(self, branch=None):
754 """
754 """
755 Checks out ``branch`` or the backend's default branch.
755 Checks out ``branch`` or the backend's default branch.
756
756
757 Raises ``BranchDoesNotExistError`` if the branch does not exist.
757 Raises ``BranchDoesNotExistError`` if the branch does not exist.
758 """
758 """
759 raise NotImplementedError
759 raise NotImplementedError
760
760
761
761
762 class BaseInMemoryChangeset(object):
762 class BaseInMemoryChangeset(object):
763 """
763 """
764 Represents differences between repository's state (most recent head) and
764 Represents differences between repository's state (most recent head) and
765 changes made *in place*.
765 changes made *in place*.
766
766
767 **Attributes**
767 **Attributes**
768
768
769 ``repository``
769 ``repository``
770 repository object for this in-memory-changeset
770 repository object for this in-memory-changeset
771
771
772 ``added``
772 ``added``
773 list of ``FileNode`` objects marked as *added*
773 list of ``FileNode`` objects marked as *added*
774
774
775 ``changed``
775 ``changed``
776 list of ``FileNode`` objects marked as *changed*
776 list of ``FileNode`` objects marked as *changed*
777
777
778 ``removed``
778 ``removed``
779 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
779 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
780 *removed*
780 *removed*
781
781
782 ``parents``
782 ``parents``
783 list of ``Changeset`` representing parents of in-memory changeset.
783 list of ``Changeset`` representing parents of in-memory changeset.
784 Should always be 2-element sequence.
784 Should always be 2-element sequence.
785
785
786 """
786 """
787
787
788 def __init__(self, repository):
788 def __init__(self, repository):
789 self.repository = repository
789 self.repository = repository
790 self.added = []
790 self.added = []
791 self.changed = []
791 self.changed = []
792 self.removed = []
792 self.removed = []
793 self.parents = []
793 self.parents = []
794
794
795 def add(self, *filenodes):
795 def add(self, *filenodes):
796 """
796 """
797 Marks given ``FileNode`` objects as *to be committed*.
797 Marks given ``FileNode`` objects as *to be committed*.
798
798
799 :raises ``NodeAlreadyExistsError``: if node with same path exists at
799 :raises ``NodeAlreadyExistsError``: if node with same path exists at
800 latest changeset
800 latest changeset
801 :raises ``NodeAlreadyAddedError``: if node with same path is already
801 :raises ``NodeAlreadyAddedError``: if node with same path is already
802 marked as *added*
802 marked as *added*
803 """
803 """
804 # Check if not already marked as *added* first
804 # Check if not already marked as *added* first
805 for node in filenodes:
805 for node in filenodes:
806 if node.path in (n.path for n in self.added):
806 if node.path in (n.path for n in self.added):
807 raise NodeAlreadyAddedError("Such FileNode %s is already "
807 raise NodeAlreadyAddedError("Such FileNode %s is already "
808 "marked for addition" % node.path)
808 "marked for addition" % node.path)
809 for node in filenodes:
809 for node in filenodes:
810 self.added.append(node)
810 self.added.append(node)
811
811
812 def change(self, *filenodes):
812 def change(self, *filenodes):
813 """
813 """
814 Marks given ``FileNode`` objects to be *changed* in next commit.
814 Marks given ``FileNode`` objects to be *changed* in next commit.
815
815
816 :raises ``EmptyRepositoryError``: if there are no changesets yet
816 :raises ``EmptyRepositoryError``: if there are no changesets yet
817 :raises ``NodeAlreadyExistsError``: if node with same path is already
817 :raises ``NodeAlreadyExistsError``: if node with same path is already
818 marked to be *changed*
818 marked to be *changed*
819 :raises ``NodeAlreadyRemovedError``: if node with same path is already
819 :raises ``NodeAlreadyRemovedError``: if node with same path is already
820 marked to be *removed*
820 marked to be *removed*
821 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
821 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
822 changeset
822 changeset
823 :raises ``NodeNotChangedError``: if node hasn't really be changed
823 :raises ``NodeNotChangedError``: if node hasn't really be changed
824 """
824 """
825 for node in filenodes:
825 for node in filenodes:
826 if node.path in (n.path for n in self.removed):
826 if node.path in (n.path for n in self.removed):
827 raise NodeAlreadyRemovedError("Node at %s is already marked "
827 raise NodeAlreadyRemovedError("Node at %s is already marked "
828 "as removed" % node.path)
828 "as removed" % node.path)
829 try:
829 try:
830 self.repository.get_changeset()
830 self.repository.get_changeset()
831 except EmptyRepositoryError:
831 except EmptyRepositoryError:
832 raise EmptyRepositoryError("Nothing to change - try to *add* new "
832 raise EmptyRepositoryError("Nothing to change - try to *add* new "
833 "nodes rather than changing them")
833 "nodes rather than changing them")
834 for node in filenodes:
834 for node in filenodes:
835 if node.path in (n.path for n in self.changed):
835 if node.path in (n.path for n in self.changed):
836 raise NodeAlreadyChangedError("Node at '%s' is already "
836 raise NodeAlreadyChangedError("Node at '%s' is already "
837 "marked as changed" % node.path)
837 "marked as changed" % node.path)
838 self.changed.append(node)
838 self.changed.append(node)
839
839
840 def remove(self, *filenodes):
840 def remove(self, *filenodes):
841 """
841 """
842 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
842 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
843 *removed* in next commit.
843 *removed* in next commit.
844
844
845 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
845 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
846 be *removed*
846 be *removed*
847 :raises ``NodeAlreadyChangedError``: if node has been already marked to
847 :raises ``NodeAlreadyChangedError``: if node has been already marked to
848 be *changed*
848 be *changed*
849 """
849 """
850 for node in filenodes:
850 for node in filenodes:
851 if node.path in (n.path for n in self.removed):
851 if node.path in (n.path for n in self.removed):
852 raise NodeAlreadyRemovedError("Node is already marked to "
852 raise NodeAlreadyRemovedError("Node is already marked to "
853 "for removal at %s" % node.path)
853 "for removal at %s" % node.path)
854 if node.path in (n.path for n in self.changed):
854 if node.path in (n.path for n in self.changed):
855 raise NodeAlreadyChangedError("Node is already marked to "
855 raise NodeAlreadyChangedError("Node is already marked to "
856 "be changed at %s" % node.path)
856 "be changed at %s" % node.path)
857 # We only mark node as *removed* - real removal is done by
857 # We only mark node as *removed* - real removal is done by
858 # commit method
858 # commit method
859 self.removed.append(node)
859 self.removed.append(node)
860
860
861 def reset(self):
861 def reset(self):
862 """
862 """
863 Resets this instance to initial state (cleans ``added``, ``changed``
863 Resets this instance to initial state (cleans ``added``, ``changed``
864 and ``removed`` lists).
864 and ``removed`` lists).
865 """
865 """
866 self.added = []
866 self.added = []
867 self.changed = []
867 self.changed = []
868 self.removed = []
868 self.removed = []
869 self.parents = []
869 self.parents = []
870
870
871 def get_ipaths(self):
871 def get_ipaths(self):
872 """
872 """
873 Returns generator of paths from nodes marked as added, changed or
873 Returns generator of paths from nodes marked as added, changed or
874 removed.
874 removed.
875 """
875 """
876 for node in itertools.chain(self.added, self.changed, self.removed):
876 for node in itertools.chain(self.added, self.changed, self.removed):
877 yield node.path
877 yield node.path
878
878
879 def get_paths(self):
879 def get_paths(self):
880 """
880 """
881 Returns list of paths from nodes marked as added, changed or removed.
881 Returns list of paths from nodes marked as added, changed or removed.
882 """
882 """
883 return list(self.get_ipaths())
883 return list(self.get_ipaths())
884
884
885 def check_integrity(self, parents=None):
885 def check_integrity(self, parents=None):
886 """
886 """
887 Checks in-memory changeset's integrity. Also, sets parents if not
887 Checks in-memory changeset's integrity. Also, sets parents if not
888 already set.
888 already set.
889
889
890 :raises CommitError: if any error occurs (i.e.
890 :raises CommitError: if any error occurs (i.e.
891 ``NodeDoesNotExistError``).
891 ``NodeDoesNotExistError``).
892 """
892 """
893 if not self.parents:
893 if not self.parents:
894 parents = parents or []
894 parents = parents or []
895 if len(parents) == 0:
895 if len(parents) == 0:
896 try:
896 try:
897 parents = [self.repository.get_changeset(), None]
897 parents = [self.repository.get_changeset(), None]
898 except EmptyRepositoryError:
898 except EmptyRepositoryError:
899 parents = [None, None]
899 parents = [None, None]
900 elif len(parents) == 1:
900 elif len(parents) == 1:
901 parents += [None]
901 parents += [None]
902 self.parents = parents
902 self.parents = parents
903
903
904 # Local parents, only if not None
904 # Local parents, only if not None
905 parents = [p for p in self.parents if p]
905 parents = [p for p in self.parents if p]
906
906
907 # Check nodes marked as added
907 # Check nodes marked as added
908 for p in parents:
908 for p in parents:
909 for node in self.added:
909 for node in self.added:
910 try:
910 try:
911 p.get_node(node.path)
911 p.get_node(node.path)
912 except NodeDoesNotExistError:
912 except NodeDoesNotExistError:
913 pass
913 pass
914 else:
914 else:
915 raise NodeAlreadyExistsError("Node at %s already exists "
915 raise NodeAlreadyExistsError("Node at %s already exists "
916 "at %s" % (node.path, p))
916 "at %s" % (node.path, p))
917
917
918 # Check nodes marked as changed
918 # Check nodes marked as changed
919 missing = set(node.path for node in self.changed)
919 missing = set(node.path for node in self.changed)
920 not_changed = set(node.path for node in self.changed)
920 not_changed = set(node.path for node in self.changed)
921 if self.changed and not parents:
921 if self.changed and not parents:
922 raise NodeDoesNotExistError(self.changed[0].path)
922 raise NodeDoesNotExistError(self.changed[0].path)
923 for p in parents:
923 for p in parents:
924 for node in self.changed:
924 for node in self.changed:
925 try:
925 try:
926 old = p.get_node(node.path)
926 old = p.get_node(node.path)
927 missing.remove(node.path)
927 missing.remove(node.path)
928 # if content actually changed, remove node from unchanged
928 # if content actually changed, remove node from unchanged
929 if old.content != node.content:
929 if old.content != node.content:
930 not_changed.remove(node.path)
930 not_changed.remove(node.path)
931 except NodeDoesNotExistError:
931 except NodeDoesNotExistError:
932 pass
932 pass
933 if self.changed and missing:
933 if self.changed and missing:
934 raise NodeDoesNotExistError("Node at %s is missing "
934 raise NodeDoesNotExistError("Node at %s is missing "
935 "(parents: %s)" % (node.path, parents))
935 "(parents: %s)" % (node.path, parents))
936
936
937 if self.changed and not_changed:
937 if self.changed and not_changed:
938 raise NodeNotChangedError("Node at %s wasn't actually changed "
938 raise NodeNotChangedError("Node at %s wasn't actually changed "
939 "since parents' changesets: %s" % (not_changed.pop(),
939 "since parents' changesets: %s" % (not_changed.pop(),
940 parents)
940 parents)
941 )
941 )
942
942
943 # Check nodes marked as removed
943 # Check nodes marked as removed
944 if self.removed and not parents:
944 if self.removed and not parents:
945 raise NodeDoesNotExistError("Cannot remove node at %s as there "
945 raise NodeDoesNotExistError("Cannot remove node at %s as there "
946 "were no parents specified" % self.removed[0].path)
946 "were no parents specified" % self.removed[0].path)
947 really_removed = set()
947 really_removed = set()
948 for p in parents:
948 for p in parents:
949 for node in self.removed:
949 for node in self.removed:
950 try:
950 try:
951 p.get_node(node.path)
951 p.get_node(node.path)
952 really_removed.add(node.path)
952 really_removed.add(node.path)
953 except ChangesetError:
953 except ChangesetError:
954 pass
954 pass
955 not_removed = list(set(node.path for node in self.removed) - really_removed)
955 not_removed = list(set(node.path for node in self.removed) - really_removed)
956 if not_removed:
956 if not_removed:
957 raise NodeDoesNotExistError("Cannot remove node at %s from "
957 raise NodeDoesNotExistError("Cannot remove node at %s from "
958 "following parents: %s" % (not_removed[0], parents))
958 "following parents: %s" % (not_removed[0], parents))
959
959
960 def commit(self, message, author, parents=None, branch=None, date=None,
960 def commit(self, message, author, parents=None, branch=None, date=None,
961 **kwargs):
961 **kwargs):
962 """
962 """
963 Performs in-memory commit (doesn't check workdir in any way) and
963 Performs in-memory commit (doesn't check workdir in any way) and
964 returns newly created ``Changeset``. Updates repository's
964 returns newly created ``Changeset``. Updates repository's
965 ``revisions``.
965 ``revisions``.
966
966
967 .. note::
967 .. note::
968 While overriding this method each backend's should call
968 While overriding this method each backend's should call
969 ``self.check_integrity(parents)`` in the first place.
969 ``self.check_integrity(parents)`` in the first place.
970
970
971 :param message: message of the commit
971 :param message: message of the commit
972 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
972 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
973 :param parents: single parent or sequence of parents from which commit
973 :param parents: single parent or sequence of parents from which commit
974 would be derived
974 would be derived
975 :param date: ``datetime.datetime`` instance. Defaults to
975 :param date: ``datetime.datetime`` instance. Defaults to
976 ``datetime.datetime.now()``.
976 ``datetime.datetime.now()``.
977 :param branch: branch name, as string. If none given, default backend's
977 :param branch: branch name, as string. If none given, default backend's
978 branch would be used.
978 branch would be used.
979
979
980 :raises ``CommitError``: if any error occurs while committing
980 :raises ``CommitError``: if any error occurs while committing
981 """
981 """
982 raise NotImplementedError
982 raise NotImplementedError
983
983
984
984
985 class EmptyChangeset(BaseChangeset):
985 class EmptyChangeset(BaseChangeset):
986 """
986 """
987 An dummy empty changeset. It's possible to pass hash when creating
987 An dummy empty changeset. It's possible to pass hash when creating
988 an EmptyChangeset
988 an EmptyChangeset
989 """
989 """
990
990
991 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
991 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
992 alias=None, revision=-1, message='', author='', date=None):
992 alias=None, revision=-1, message='', author='', date=None):
993 self._empty_cs = cs
993 self._empty_cs = cs
994 self.revision = revision
994 self.revision = revision
995 self.message = message
995 self.message = message
996 self.author = author
996 self.author = author
997 self.date = date or datetime.datetime.fromtimestamp(0)
997 self.date = date or datetime.datetime.fromtimestamp(0)
998 self.repository = repo
998 self.repository = repo
999 self.requested_revision = requested_revision
999 self.requested_revision = requested_revision
1000 self.alias = alias
1000 self.alias = alias
1001
1001
1002 @LazyProperty
1002 @LazyProperty
1003 def raw_id(self):
1003 def raw_id(self):
1004 """
1004 """
1005 Returns raw string identifying this changeset, useful for web
1005 Returns raw string identifying this changeset, useful for web
1006 representation.
1006 representation.
1007 """
1007 """
1008
1008
1009 return self._empty_cs
1009 return self._empty_cs
1010
1010
1011 @LazyProperty
1011 @LazyProperty
1012 def branch(self):
1012 def branch(self):
1013 from kallithea.lib.vcs.backends import get_backend
1013 from kallithea.lib.vcs.backends import get_backend
1014 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1014 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1015
1015
1016 @LazyProperty
1016 @LazyProperty
1017 def branches(self):
1017 def branches(self):
1018 from kallithea.lib.vcs.backends import get_backend
1018 from kallithea.lib.vcs.backends import get_backend
1019 return [get_backend(self.alias).DEFAULT_BRANCH_NAME]
1019 return [get_backend(self.alias).DEFAULT_BRANCH_NAME]
1020
1020
1021 @LazyProperty
1021 @LazyProperty
1022 def short_id(self):
1022 def short_id(self):
1023 return self.raw_id[:12]
1023 return self.raw_id[:12]
1024
1024
1025 def get_file_changeset(self, path):
1025 def get_file_changeset(self, path):
1026 return self
1026 return self
1027
1027
1028 def get_file_content(self, path):
1028 def get_file_content(self, path):
1029 return u''
1029 return u''
1030
1030
1031 def get_file_size(self, path):
1031 def get_file_size(self, path):
1032 return 0
1032 return 0
1033
1033
1034
1034
1035 class CollectionGenerator(object):
1035 class CollectionGenerator(object):
1036
1036
1037 def __init__(self, repo, revs):
1037 def __init__(self, repo, revs):
1038 self.repo = repo
1038 self.repo = repo
1039 self.revs = revs
1039 self.revs = revs
1040
1040
1041 def __len__(self):
1041 def __len__(self):
1042 return len(self.revs)
1042 return len(self.revs)
1043
1043
1044 def __iter__(self):
1044 def __iter__(self):
1045 for rev in self.revs:
1045 for rev in self.revs:
1046 yield self.repo.get_changeset(rev)
1046 yield self.repo.get_changeset(rev)
1047
1047
1048 def __getitem__(self, what):
1048 def __getitem__(self, what):
1049 """Return either a single element by index, or a sliced collection."""
1049 """Return either a single element by index, or a sliced collection."""
1050 if isinstance(what, slice):
1050 if isinstance(what, slice):
1051 return CollectionGenerator(self.repo, self.revs[what])
1051 return CollectionGenerator(self.repo, self.revs[what])
1052 else:
1052 else:
1053 # single item
1053 # single item
1054 return self.repo.get_changeset(self.revs[what])
1054 return self.repo.get_changeset(self.revs[what])
1055
1055
1056 def __repr__(self):
1056 def __repr__(self):
1057 return '<CollectionGenerator[len:%s]>' % (len(self))
1057 return '<CollectionGenerator[len:%s]>' % (len(self))
@@ -1,617 +1,617 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 vcs.backends.hg.repository
3 vcs.backends.hg.repository
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 Mercurial repository implementation.
6 Mercurial repository implementation.
7
7
8 :created_on: Apr 8, 2010
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
10 """
11
11
12 import datetime
12 import datetime
13 import logging
13 import logging
14 import os
14 import os
15 import time
15 import time
16 import urllib.error
16 import urllib.error
17 import urllib.parse
17 import urllib.parse
18 import urllib.request
18 import urllib.request
19 from collections import OrderedDict
19 from collections import OrderedDict
20
20
21 import mercurial.commands
21 import mercurial.commands
22 import mercurial.error
22 import mercurial.error
23 import mercurial.exchange
23 import mercurial.exchange
24 import mercurial.hg
24 import mercurial.hg
25 import mercurial.hgweb
25 import mercurial.hgweb
26 import mercurial.httppeer
26 import mercurial.httppeer
27 import mercurial.localrepo
27 import mercurial.localrepo
28 import mercurial.match
28 import mercurial.match
29 import mercurial.mdiff
29 import mercurial.mdiff
30 import mercurial.node
30 import mercurial.node
31 import mercurial.patch
31 import mercurial.patch
32 import mercurial.scmutil
32 import mercurial.scmutil
33 import mercurial.sshpeer
33 import mercurial.sshpeer
34 import mercurial.tags
34 import mercurial.tags
35 import mercurial.ui
35 import mercurial.ui
36 import mercurial.url
36 import mercurial.url
37 import mercurial.util
37 import mercurial.util
38
38
39 from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
39 from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
40 from kallithea.lib.vcs.exceptions import (
40 from kallithea.lib.vcs.exceptions import (
41 BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
41 BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
42 from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str
42 from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str
43 from kallithea.lib.vcs.utils.lazy import LazyProperty
43 from kallithea.lib.vcs.utils.lazy import LazyProperty
44 from kallithea.lib.vcs.utils.paths import abspath
44 from kallithea.lib.vcs.utils.paths import abspath
45
45
46 from .changeset import MercurialChangeset
46 from .changeset import MercurialChangeset
47 from .inmemory import MercurialInMemoryChangeset
47 from .inmemory import MercurialInMemoryChangeset
48 from .workdir import MercurialWorkdir
48 from .workdir import MercurialWorkdir
49
49
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59 scm = 'hg'
59 scm = 'hg'
60
60
61 def __init__(self, repo_path, create=False, baseui=None, src_url=None,
61 def __init__(self, repo_path, create=False, baseui=None, src_url=None,
62 update_after_clone=False):
62 update_after_clone=False):
63 """
63 """
64 Raises RepositoryError if repository could not be find at the given
64 Raises RepositoryError if repository could not be find at the given
65 ``repo_path``.
65 ``repo_path``.
66
66
67 :param repo_path: local path of the repository
67 :param repo_path: local path of the repository
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param baseui=None: user data
70 :param baseui=None: user data
71 :param src_url=None: would try to clone repository from given location
71 :param src_url=None: would try to clone repository from given location
72 :param update_after_clone=False: sets update of working copy after
72 :param update_after_clone=False: sets update of working copy after
73 making a clone
73 making a clone
74 """
74 """
75
75
76 if not isinstance(repo_path, str):
76 if not isinstance(repo_path, str):
77 raise VCSError('Mercurial backend requires repository path to '
77 raise VCSError('Mercurial backend requires repository path to '
78 'be instance of <str> got %s instead' %
78 'be instance of <str> got %s instead' %
79 type(repo_path))
79 type(repo_path))
80 self.path = abspath(repo_path)
80 self.path = abspath(repo_path)
81 self.baseui = baseui or mercurial.ui.ui()
81 self.baseui = baseui or mercurial.ui.ui()
82 # We've set path and ui, now we can set _repo itself
82 # We've set path and ui, now we can set _repo itself
83 self._repo = self._get_repo(create, src_url, update_after_clone)
83 self._repo = self._get_repo(create, src_url, update_after_clone)
84
84
85 @property
85 @property
86 def _empty(self):
86 def _empty(self):
87 """
87 """
88 Checks if repository is empty ie. without any changesets
88 Checks if repository is empty ie. without any changesets
89 """
89 """
90 # TODO: Following raises errors when using InMemoryChangeset...
90 # TODO: Following raises errors when using InMemoryChangeset...
91 # return len(self._repo.changelog) == 0
91 # return len(self._repo.changelog) == 0
92 return len(self.revisions) == 0
92 return len(self.revisions) == 0
93
93
94 @LazyProperty
94 @LazyProperty
95 def revisions(self):
95 def revisions(self):
96 """
96 """
97 Returns list of revisions' ids, in ascending order. Being lazy
97 Returns list of revisions' ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 return self._get_all_revisions()
100 return self._get_all_revisions()
101
101
102 @LazyProperty
102 @LazyProperty
103 def name(self):
103 def name(self):
104 return os.path.basename(self.path)
104 return os.path.basename(self.path)
105
105
106 @LazyProperty
106 @LazyProperty
107 def branches(self):
107 def branches(self):
108 return self._get_branches()
108 return self._get_branches()
109
109
110 @LazyProperty
110 @LazyProperty
111 def closed_branches(self):
111 def closed_branches(self):
112 return self._get_branches(normal=False, closed=True)
112 return self._get_branches(normal=False, closed=True)
113
113
114 @LazyProperty
114 @LazyProperty
115 def allbranches(self):
115 def allbranches(self):
116 """
116 """
117 List all branches, including closed branches.
117 List all branches, including closed branches.
118 """
118 """
119 return self._get_branches(closed=True)
119 return self._get_branches(closed=True)
120
120
121 def _get_branches(self, normal=True, closed=False):
121 def _get_branches(self, normal=True, closed=False):
122 """
122 """
123 Gets branches for this repository
123 Gets branches for this repository
124 Returns only not closed branches by default
124 Returns only not closed branches by default
125
125
126 :param closed: return also closed branches for mercurial
126 :param closed: return also closed branches for mercurial
127 :param normal: return also normal branches
127 :param normal: return also normal branches
128 """
128 """
129
129
130 if self._empty:
130 if self._empty:
131 return {}
131 return {}
132
132
133 bt = OrderedDict()
133 bt = OrderedDict()
134 for bn, _heads, node, isclosed in sorted(self._repo.branchmap().iterbranches()):
134 for bn, _heads, node, isclosed in sorted(self._repo.branchmap().iterbranches()):
135 if isclosed:
135 if isclosed:
136 if closed:
136 if closed:
137 bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node))
137 bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node))
138 else:
138 else:
139 if normal:
139 if normal:
140 bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node))
140 bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node))
141 return bt
141 return bt
142
142
143 @LazyProperty
143 @LazyProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self._empty:
151 if self._empty:
152 return {}
152 return {}
153
153
154 return OrderedDict(sorted(
154 return OrderedDict(sorted(
155 ((safe_str(n), ascii_str(mercurial.node.hex(h))) for n, h in self._repo.tags().items()),
155 ((safe_str(n), ascii_str(mercurial.node.hex(h))) for n, h in self._repo.tags().items()),
156 reverse=True,
156 reverse=True,
157 key=lambda x: x[0], # sort by name
157 key=lambda x: x[0], # sort by name
158 ))
158 ))
159
159
160 def tag(self, name, user, revision=None, message=None, date=None,
160 def tag(self, name, user, revision=None, message=None, date=None,
161 **kwargs):
161 **kwargs):
162 """
162 """
163 Creates and returns a tag for the given ``revision``.
163 Creates and returns a tag for the given ``revision``.
164
164
165 :param name: name for new tag
165 :param name: name for new tag
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param revision: changeset id for which new tag would be created
167 :param revision: changeset id for which new tag would be created
168 :param message: message of the tag's commit
168 :param message: message of the tag's commit
169 :param date: date of tag's commit
169 :param date: date of tag's commit
170
170
171 :raises TagAlreadyExistError: if tag with same name already exists
171 :raises TagAlreadyExistError: if tag with same name already exists
172 """
172 """
173 if name in self.tags:
173 if name in self.tags:
174 raise TagAlreadyExistError("Tag %s already exists" % name)
174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 changeset = self.get_changeset(revision)
175 changeset = self.get_changeset(revision)
176 local = kwargs.setdefault('local', False)
176 local = kwargs.setdefault('local', False)
177
177
178 if message is None:
178 if message is None:
179 message = "Added tag %s for changeset %s" % (name,
179 message = "Added tag %s for changeset %s" % (name,
180 changeset.short_id)
180 changeset.short_id)
181
181
182 if date is None:
182 if date is None:
183 date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S'))
183 date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S'))
184
184
185 try:
185 try:
186 mercurial.tags.tag(self._repo, safe_bytes(name), changeset._ctx.node(), safe_bytes(message), local, safe_bytes(user), date)
186 mercurial.tags.tag(self._repo, safe_bytes(name), changeset._ctx.node(), safe_bytes(message), local, safe_bytes(user), date)
187 except mercurial.error.Abort as e:
187 except mercurial.error.Abort as e:
188 raise RepositoryError(e.args[0])
188 raise RepositoryError(e.args[0])
189
189
190 # Reinitialize tags
190 # Reinitialize tags
191 self.tags = self._get_tags()
191 self.tags = self._get_tags()
192 tag_id = self.tags[name]
192 tag_id = self.tags[name]
193
193
194 return self.get_changeset(revision=tag_id)
194 return self.get_changeset(revision=tag_id)
195
195
196 def remove_tag(self, name, user, message=None, date=None):
196 def remove_tag(self, name, user, message=None, date=None):
197 """
197 """
198 Removes tag with the given ``name``.
198 Removes tag with the given ``name``.
199
199
200 :param name: name of the tag to be removed
200 :param name: name of the tag to be removed
201 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
201 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
202 :param message: message of the tag's removal commit
202 :param message: message of the tag's removal commit
203 :param date: date of tag's removal commit
203 :param date: date of tag's removal commit
204
204
205 :raises TagDoesNotExistError: if tag with given name does not exists
205 :raises TagDoesNotExistError: if tag with given name does not exists
206 """
206 """
207 if name not in self.tags:
207 if name not in self.tags:
208 raise TagDoesNotExistError("Tag %s does not exist" % name)
208 raise TagDoesNotExistError("Tag %s does not exist" % name)
209 if message is None:
209 if message is None:
210 message = "Removed tag %s" % name
210 message = "Removed tag %s" % name
211 if date is None:
211 if date is None:
212 date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S'))
212 date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S'))
213 local = False
213 local = False
214
214
215 try:
215 try:
216 mercurial.tags.tag(self._repo, safe_bytes(name), mercurial.commands.nullid, safe_bytes(message), local, safe_bytes(user), date)
216 mercurial.tags.tag(self._repo, safe_bytes(name), mercurial.commands.nullid, safe_bytes(message), local, safe_bytes(user), date)
217 self.tags = self._get_tags()
217 self.tags = self._get_tags()
218 except mercurial.error.Abort as e:
218 except mercurial.error.Abort as e:
219 raise RepositoryError(e.args[0])
219 raise RepositoryError(e.args[0])
220
220
221 @LazyProperty
221 @LazyProperty
222 def bookmarks(self):
222 def bookmarks(self):
223 """
223 """
224 Gets bookmarks for this repository
224 Gets bookmarks for this repository
225 """
225 """
226 return self._get_bookmarks()
226 return self._get_bookmarks()
227
227
228 def _get_bookmarks(self):
228 def _get_bookmarks(self):
229 if self._empty:
229 if self._empty:
230 return {}
230 return {}
231
231
232 return OrderedDict(sorted(
232 return OrderedDict(sorted(
233 ((safe_str(n), ascii_str(h)) for n, h in self._repo._bookmarks.items()),
233 ((safe_str(n), ascii_str(h)) for n, h in self._repo._bookmarks.items()),
234 reverse=True,
234 reverse=True,
235 key=lambda x: x[0], # sort by name
235 key=lambda x: x[0], # sort by name
236 ))
236 ))
237
237
238 def _get_all_revisions(self):
238 def _get_all_revisions(self):
239 return [ascii_str(self._repo[x].hex()) for x in self._repo.filtered(b'visible').changelog.revs()]
239 return [ascii_str(self._repo[x].hex()) for x in self._repo.filtered(b'visible').changelog.revs()]
240
240
241 def get_diff(self, rev1, rev2, path='', ignore_whitespace=False,
241 def get_diff(self, rev1, rev2, path='', ignore_whitespace=False,
242 context=3):
242 context=3):
243 """
243 """
244 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 ``rev2`` since ``rev1``.
245 ``rev2`` since ``rev1``.
246
246
247 :param rev1: Entry point from which diff is shown. Can be
247 :param rev1: Entry point from which diff is shown. Can be
248 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
248 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
249 the changes since empty state of the repository until ``rev2``
249 the changes since empty state of the repository until ``rev2``
250 :param rev2: Until which revision changes should be shown.
250 :param rev2: Until which revision changes should be shown.
251 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 changes. Defaults to ``False``.
252 changes. Defaults to ``False``.
253 :param context: How many lines before/after changed lines should be
253 :param context: How many lines before/after changed lines should be
254 shown. Defaults to ``3``. If negative value is passed-in, it will be
254 shown. Defaults to ``3``. If negative value is passed-in, it will be
255 set to ``0`` instead.
255 set to ``0`` instead.
256 """
256 """
257
257
258 # Negative context values make no sense, and will result in
258 # Negative context values make no sense, and will result in
259 # errors. Ensure this does not happen.
259 # errors. Ensure this does not happen.
260 if context < 0:
260 if context < 0:
261 context = 0
261 context = 0
262
262
263 if hasattr(rev1, 'raw_id'):
263 if hasattr(rev1, 'raw_id'):
264 rev1 = getattr(rev1, 'raw_id')
264 rev1 = getattr(rev1, 'raw_id')
265
265
266 if hasattr(rev2, 'raw_id'):
266 if hasattr(rev2, 'raw_id'):
267 rev2 = getattr(rev2, 'raw_id')
267 rev2 = getattr(rev2, 'raw_id')
268
268
269 # Check if given revisions are present at repository (may raise
269 # Check if given revisions are present at repository (may raise
270 # ChangesetDoesNotExistError)
270 # ChangesetDoesNotExistError)
271 if rev1 != self.EMPTY_CHANGESET:
271 if rev1 != self.EMPTY_CHANGESET:
272 self.get_changeset(rev1)
272 self.get_changeset(rev1)
273 self.get_changeset(rev2)
273 self.get_changeset(rev2)
274 if path:
274 if path:
275 file_filter = mercurial.match.exact(path)
275 file_filter = mercurial.match.exact(path)
276 else:
276 else:
277 file_filter = None
277 file_filter = None
278
278
279 return b''.join(mercurial.patch.diff(self._repo, rev1, rev2, match=file_filter,
279 return b''.join(mercurial.patch.diff(self._repo, rev1, rev2, match=file_filter,
280 opts=mercurial.mdiff.diffopts(git=True,
280 opts=mercurial.mdiff.diffopts(git=True,
281 showfunc=True,
281 showfunc=True,
282 ignorews=ignore_whitespace,
282 ignorews=ignore_whitespace,
283 context=context)))
283 context=context)))
284
284
285 @classmethod
285 @classmethod
286 def _check_url(cls, url, repoui=None):
286 def _check_url(cls, url, repoui=None):
287 """
287 """
288 Function will check given url and try to verify if it's a valid
288 Function will check given url and try to verify if it's a valid
289 link. Sometimes it may happened that mercurial will issue basic
289 link. Sometimes it may happened that mercurial will issue basic
290 auth request that can cause whole API to hang when used from python
290 auth request that can cause whole API to hang when used from python
291 or other external calls.
291 or other external calls.
292
292
293 On failures it'll raise urllib2.HTTPError, exception is also thrown
293 On failures it'll raise urllib2.HTTPError, exception is also thrown
294 when the return code is non 200
294 when the return code is non 200
295 """
295 """
296 # check first if it's not an local url
296 # check first if it's not an local url
297 if os.path.isdir(url) or url.startswith(b'file:'):
297 if os.path.isdir(url) or url.startswith(b'file:'):
298 return True
298 return True
299
299
300 if url.startswith(b'ssh:'):
300 if url.startswith(b'ssh:'):
301 # in case of invalid uri or authentication issues, sshpeer will
301 # in case of invalid uri or authentication issues, sshpeer will
302 # throw an exception.
302 # throw an exception.
303 mercurial.sshpeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
303 mercurial.sshpeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
304 return True
304 return True
305
305
306 url_prefix = None
306 url_prefix = None
307 if b'+' in url[:url.find(b'://')]:
307 if b'+' in url[:url.find(b'://')]:
308 url_prefix, url = url.split(b'+', 1)
308 url_prefix, url = url.split(b'+', 1)
309
309
310 handlers = []
310 handlers = []
311 url_obj = mercurial.util.url(url)
311 url_obj = mercurial.util.url(url)
312 test_uri, authinfo = url_obj.authinfo()
312 test_uri, authinfo = url_obj.authinfo()
313 url_obj.passwd = b'*****'
313 url_obj.passwd = b'*****'
314 cleaned_uri = str(url_obj)
314 cleaned_uri = str(url_obj)
315
315
316 if authinfo:
316 if authinfo:
317 # create a password manager
317 # create a password manager
318 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
318 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
319 passmgr.add_password(*authinfo)
319 passmgr.add_password(*authinfo)
320
320
321 handlers.extend((mercurial.url.httpbasicauthhandler(passmgr),
321 handlers.extend((mercurial.url.httpbasicauthhandler(passmgr),
322 mercurial.url.httpdigestauthhandler(passmgr)))
322 mercurial.url.httpdigestauthhandler(passmgr)))
323
323
324 o = urllib.request.build_opener(*handlers)
324 o = urllib.request.build_opener(*handlers)
325 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
325 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
326 ('Accept', 'application/mercurial-0.1')]
326 ('Accept', 'application/mercurial-0.1')]
327
327
328 req = urllib.request.Request(
328 req = urllib.request.Request(
329 "%s?%s" % (
329 "%s?%s" % (
330 test_uri,
330 test_uri,
331 urllib.parse.urlencode({
331 urllib.parse.urlencode({
332 'cmd': 'between',
332 'cmd': 'between',
333 'pairs': "%s-%s" % ('0' * 40, '0' * 40),
333 'pairs': "%s-%s" % ('0' * 40, '0' * 40),
334 })
334 })
335 ))
335 ))
336
336
337 try:
337 try:
338 resp = o.open(req)
338 resp = o.open(req)
339 if resp.code != 200:
339 if resp.code != 200:
340 raise Exception('Return Code is not 200')
340 raise Exception('Return Code is not 200')
341 except Exception as e:
341 except Exception as e:
342 # means it cannot be cloned
342 # means it cannot be cloned
343 raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
343 raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
344
344
345 if not url_prefix: # skip svn+http://... (and git+... too)
345 if not url_prefix: # skip svn+http://... (and git+... too)
346 # now check if it's a proper hg repo
346 # now check if it's a proper hg repo
347 try:
347 try:
348 mercurial.httppeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
348 mercurial.httppeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
349 except Exception as e:
349 except Exception as e:
350 raise urllib.error.URLError(
350 raise urllib.error.URLError(
351 "url [%s] does not look like an hg repo org_exc: %s"
351 "url [%s] does not look like an hg repo org_exc: %s"
352 % (cleaned_uri, e))
352 % (cleaned_uri, e))
353
353
354 return True
354 return True
355
355
356 def _get_repo(self, create, src_url=None, update_after_clone=False):
356 def _get_repo(self, create, src_url=None, update_after_clone=False):
357 """
357 """
358 Function will check for mercurial repository in given path and return
358 Function will check for mercurial repository in given path and return
359 a localrepo object. If there is no repository in that path it will
359 a localrepo object. If there is no repository in that path it will
360 raise an exception unless ``create`` parameter is set to True - in
360 raise an exception unless ``create`` parameter is set to True - in
361 that case repository would be created and returned.
361 that case repository would be created and returned.
362 If ``src_url`` is given, would try to clone repository from the
362 If ``src_url`` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to ``update_after_clone`` flag
364 working copy accordingly to ``update_after_clone`` flag
365 """
365 """
366 try:
366 try:
367 if src_url:
367 if src_url:
368 url = safe_bytes(self._get_url(src_url))
368 url = safe_bytes(self._get_url(src_url))
369 opts = {}
369 opts = {}
370 if not update_after_clone:
370 if not update_after_clone:
371 opts.update({'noupdate': True})
371 opts.update({'noupdate': True})
372 MercurialRepository._check_url(url, self.baseui)
372 MercurialRepository._check_url(url, self.baseui)
373 mercurial.commands.clone(self.baseui, url, safe_bytes(self.path), **opts)
373 mercurial.commands.clone(self.baseui, url, safe_bytes(self.path), **opts)
374
374
375 # Don't try to create if we've already cloned repo
375 # Don't try to create if we've already cloned repo
376 create = False
376 create = False
377 return mercurial.localrepo.instance(self.baseui, safe_bytes(self.path), create=create)
377 return mercurial.localrepo.instance(self.baseui, safe_bytes(self.path), create=create)
378 except (mercurial.error.Abort, mercurial.error.RepoError) as err:
378 except (mercurial.error.Abort, mercurial.error.RepoError) as err:
379 if create:
379 if create:
380 msg = "Cannot create repository at %s. Original error was %s" \
380 msg = "Cannot create repository at %s. Original error was %s" \
381 % (self.name, err)
381 % (self.name, err)
382 else:
382 else:
383 msg = "Not valid repository at %s. Original error was %s" \
383 msg = "Not valid repository at %s. Original error was %s" \
384 % (self.name, err)
384 % (self.name, err)
385 raise RepositoryError(msg)
385 raise RepositoryError(msg)
386
386
387 @LazyProperty
387 @LazyProperty
388 def in_memory_changeset(self):
388 def in_memory_changeset(self):
389 return MercurialInMemoryChangeset(self)
389 return MercurialInMemoryChangeset(self)
390
390
391 @LazyProperty
391 @LazyProperty
392 def description(self):
392 def description(self):
393 _desc = self._repo.ui.config(b'web', b'description', None, untrusted=True)
393 _desc = self._repo.ui.config(b'web', b'description', None, untrusted=True)
394 return safe_str(_desc or b'unknown')
394 return safe_str(_desc or b'unknown')
395
395
396 @LazyProperty
396 @LazyProperty
397 def contact(self):
397 def contact(self):
398 return safe_str(mercurial.hgweb.common.get_contact(self._repo.ui.config)
398 return safe_str(mercurial.hgweb.common.get_contact(self._repo.ui.config)
399 or b'Unknown')
399 or b'Unknown')
400
400
401 @LazyProperty
401 @LazyProperty
402 def last_change(self):
402 def last_change(self):
403 """
403 """
404 Returns last change made on this repository as datetime object
404 Returns last change made on this repository as datetime object
405 """
405 """
406 return date_fromtimestamp(self._get_mtime(), makedate()[1])
406 return date_fromtimestamp(self._get_mtime(), makedate()[1])
407
407
408 def _get_mtime(self):
408 def _get_mtime(self):
409 try:
409 try:
410 return time.mktime(self.get_changeset().date.timetuple())
410 return time.mktime(self.get_changeset().date.timetuple())
411 except RepositoryError:
411 except RepositoryError:
412 # fallback to filesystem
412 # fallback to filesystem
413 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
413 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
414 st_path = os.path.join(self.path, '.hg', "store")
414 st_path = os.path.join(self.path, '.hg', "store")
415 if os.path.exists(cl_path):
415 if os.path.exists(cl_path):
416 return os.stat(cl_path).st_mtime
416 return os.stat(cl_path).st_mtime
417 else:
417 else:
418 return os.stat(st_path).st_mtime
418 return os.stat(st_path).st_mtime
419
419
420 def _get_revision(self, revision):
420 def _get_revision(self, revision):
421 """
421 """
422 Given any revision identifier, returns a 40 char string with revision hash.
422 Given any revision identifier, returns a 40 char string with revision hash.
423
423
424 :param revision: str or int or None
424 :param revision: str or int or None
425 """
425 """
426 if self._empty:
426 if self._empty:
427 raise EmptyRepositoryError("There are no changesets yet")
427 raise EmptyRepositoryError("There are no changesets yet")
428
428
429 if revision in [-1, None]:
429 if revision in [-1, None]:
430 revision = b'tip'
430 revision = b'tip'
431 elif isinstance(revision, unicode):
431 elif isinstance(revision, str):
432 revision = safe_bytes(revision)
432 revision = safe_bytes(revision)
433
433
434 try:
434 try:
435 if isinstance(revision, int):
435 if isinstance(revision, int):
436 return ascii_str(self._repo[revision].hex())
436 return ascii_str(self._repo[revision].hex())
437 return ascii_str(mercurial.scmutil.revsymbol(self._repo, revision).hex())
437 return ascii_str(mercurial.scmutil.revsymbol(self._repo, revision).hex())
438 except (IndexError, ValueError, mercurial.error.RepoLookupError, TypeError):
438 except (IndexError, ValueError, mercurial.error.RepoLookupError, TypeError):
439 msg = "Revision %r does not exist for %s" % (safe_str(revision), self.name)
439 msg = "Revision %r does not exist for %s" % (safe_str(revision), self.name)
440 raise ChangesetDoesNotExistError(msg)
440 raise ChangesetDoesNotExistError(msg)
441 except (LookupError, ):
441 except (LookupError, ):
442 msg = "Ambiguous identifier `%s` for %s" % (safe_str(revision), self.name)
442 msg = "Ambiguous identifier `%s` for %s" % (safe_str(revision), self.name)
443 raise ChangesetDoesNotExistError(msg)
443 raise ChangesetDoesNotExistError(msg)
444
444
445 def get_ref_revision(self, ref_type, ref_name):
445 def get_ref_revision(self, ref_type, ref_name):
446 """
446 """
447 Returns revision number for the given reference.
447 Returns revision number for the given reference.
448 """
448 """
449 if ref_type == 'rev' and not ref_name.strip('0'):
449 if ref_type == 'rev' and not ref_name.strip('0'):
450 return self.EMPTY_CHANGESET
450 return self.EMPTY_CHANGESET
451 # lookup up the exact node id
451 # lookup up the exact node id
452 _revset_predicates = {
452 _revset_predicates = {
453 'branch': 'branch',
453 'branch': 'branch',
454 'book': 'bookmark',
454 'book': 'bookmark',
455 'tag': 'tag',
455 'tag': 'tag',
456 'rev': 'id',
456 'rev': 'id',
457 }
457 }
458 # avoid expensive branch(x) iteration over whole repo
458 # avoid expensive branch(x) iteration over whole repo
459 rev_spec = "%%s & %s(%%s)" % _revset_predicates[ref_type]
459 rev_spec = "%%s & %s(%%s)" % _revset_predicates[ref_type]
460 try:
460 try:
461 revs = self._repo.revs(rev_spec, ref_name, ref_name)
461 revs = self._repo.revs(rev_spec, ref_name, ref_name)
462 except LookupError:
462 except LookupError:
463 msg = "Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name)
463 msg = "Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name)
464 raise ChangesetDoesNotExistError(msg)
464 raise ChangesetDoesNotExistError(msg)
465 except mercurial.error.RepoLookupError:
465 except mercurial.error.RepoLookupError:
466 msg = "Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name)
466 msg = "Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name)
467 raise ChangesetDoesNotExistError(msg)
467 raise ChangesetDoesNotExistError(msg)
468 if revs:
468 if revs:
469 revision = revs.last()
469 revision = revs.last()
470 else:
470 else:
471 # TODO: just report 'not found'?
471 # TODO: just report 'not found'?
472 revision = ref_name
472 revision = ref_name
473
473
474 return self._get_revision(revision)
474 return self._get_revision(revision)
475
475
476 def _get_archives(self, archive_name='tip'):
476 def _get_archives(self, archive_name='tip'):
477 allowed = self.baseui.configlist(b"web", b"allow_archive",
477 allowed = self.baseui.configlist(b"web", b"allow_archive",
478 untrusted=True)
478 untrusted=True)
479 for name, ext in [(b'zip', '.zip'), (b'gz', '.tar.gz'), (b'bz2', '.tar.bz2')]:
479 for name, ext in [(b'zip', '.zip'), (b'gz', '.tar.gz'), (b'bz2', '.tar.bz2')]:
480 if name in allowed or self._repo.ui.configbool(b"web",
480 if name in allowed or self._repo.ui.configbool(b"web",
481 b"allow" + name,
481 b"allow" + name,
482 untrusted=True):
482 untrusted=True):
483 yield {"type": safe_str(name), "extension": ext, "node": archive_name}
483 yield {"type": safe_str(name), "extension": ext, "node": archive_name}
484
484
485 def _get_url(self, url):
485 def _get_url(self, url):
486 """
486 """
487 Returns normalized url. If schema is not given, fall back to
487 Returns normalized url. If schema is not given, fall back to
488 filesystem (``file:///``) schema.
488 filesystem (``file:///``) schema.
489 """
489 """
490 if url != 'default' and '://' not in url:
490 if url != 'default' and '://' not in url:
491 url = "file:" + urllib.request.pathname2url(url)
491 url = "file:" + urllib.request.pathname2url(url)
492 return url
492 return url
493
493
494 def get_changeset(self, revision=None):
494 def get_changeset(self, revision=None):
495 """
495 """
496 Returns ``MercurialChangeset`` object representing repository's
496 Returns ``MercurialChangeset`` object representing repository's
497 changeset at the given ``revision``.
497 changeset at the given ``revision``.
498 """
498 """
499 return MercurialChangeset(repository=self, revision=self._get_revision(revision))
499 return MercurialChangeset(repository=self, revision=self._get_revision(revision))
500
500
501 def get_changesets(self, start=None, end=None, start_date=None,
501 def get_changesets(self, start=None, end=None, start_date=None,
502 end_date=None, branch_name=None, reverse=False, max_revisions=None):
502 end_date=None, branch_name=None, reverse=False, max_revisions=None):
503 """
503 """
504 Returns iterator of ``MercurialChangeset`` objects from start to end
504 Returns iterator of ``MercurialChangeset`` objects from start to end
505 (both are inclusive)
505 (both are inclusive)
506
506
507 :param start: None, str, int or mercurial lookup format
507 :param start: None, str, int or mercurial lookup format
508 :param end: None, str, int or mercurial lookup format
508 :param end: None, str, int or mercurial lookup format
509 :param start_date:
509 :param start_date:
510 :param end_date:
510 :param end_date:
511 :param branch_name:
511 :param branch_name:
512 :param reversed: return changesets in reversed order
512 :param reversed: return changesets in reversed order
513 """
513 """
514 start_raw_id = self._get_revision(start)
514 start_raw_id = self._get_revision(start)
515 start_pos = None if start is None else self.revisions.index(start_raw_id)
515 start_pos = None if start is None else self.revisions.index(start_raw_id)
516 end_raw_id = self._get_revision(end)
516 end_raw_id = self._get_revision(end)
517 end_pos = None if end is None else self.revisions.index(end_raw_id)
517 end_pos = None if end is None else self.revisions.index(end_raw_id)
518
518
519 if start_pos is not None and end_pos is not None and start_pos > end_pos:
519 if start_pos is not None and end_pos is not None and start_pos > end_pos:
520 raise RepositoryError("Start revision '%s' cannot be "
520 raise RepositoryError("Start revision '%s' cannot be "
521 "after end revision '%s'" % (start, end))
521 "after end revision '%s'" % (start, end))
522
522
523 if branch_name and branch_name not in self.allbranches:
523 if branch_name and branch_name not in self.allbranches:
524 msg = "Branch %r not found in %s" % (branch_name, self.name)
524 msg = "Branch %r not found in %s" % (branch_name, self.name)
525 raise BranchDoesNotExistError(msg)
525 raise BranchDoesNotExistError(msg)
526 if end_pos is not None:
526 if end_pos is not None:
527 end_pos += 1
527 end_pos += 1
528 # filter branches
528 # filter branches
529 filter_ = []
529 filter_ = []
530 if branch_name:
530 if branch_name:
531 filter_.append(b'branch("%s")' % safe_bytes(branch_name))
531 filter_.append(b'branch("%s")' % safe_bytes(branch_name))
532 if start_date:
532 if start_date:
533 filter_.append(b'date(">%s")' % safe_bytes(str(start_date)))
533 filter_.append(b'date(">%s")' % safe_bytes(str(start_date)))
534 if end_date:
534 if end_date:
535 filter_.append(b'date("<%s")' % safe_bytes(str(end_date)))
535 filter_.append(b'date("<%s")' % safe_bytes(str(end_date)))
536 if filter_ or max_revisions:
536 if filter_ or max_revisions:
537 if filter_:
537 if filter_:
538 revspec = b' and '.join(filter_)
538 revspec = b' and '.join(filter_)
539 else:
539 else:
540 revspec = b'all()'
540 revspec = b'all()'
541 if max_revisions:
541 if max_revisions:
542 revspec = b'limit(%s, %d)' % (revspec, max_revisions)
542 revspec = b'limit(%s, %d)' % (revspec, max_revisions)
543 revisions = mercurial.scmutil.revrange(self._repo, [revspec])
543 revisions = mercurial.scmutil.revrange(self._repo, [revspec])
544 else:
544 else:
545 revisions = self.revisions
545 revisions = self.revisions
546
546
547 # this is very much a hack to turn this into a list; a better solution
547 # this is very much a hack to turn this into a list; a better solution
548 # would be to get rid of this function entirely and use revsets
548 # would be to get rid of this function entirely and use revsets
549 revs = list(revisions)[start_pos:end_pos]
549 revs = list(revisions)[start_pos:end_pos]
550 if reverse:
550 if reverse:
551 revs.reverse()
551 revs.reverse()
552
552
553 return CollectionGenerator(self, revs)
553 return CollectionGenerator(self, revs)
554
554
555 def pull(self, url):
555 def pull(self, url):
556 """
556 """
557 Tries to pull changes from external location.
557 Tries to pull changes from external location.
558 """
558 """
559 other = mercurial.hg.peer(self._repo, {}, safe_bytes(self._get_url(url)))
559 other = mercurial.hg.peer(self._repo, {}, safe_bytes(self._get_url(url)))
560 try:
560 try:
561 mercurial.exchange.pull(self._repo, other, heads=None, force=None)
561 mercurial.exchange.pull(self._repo, other, heads=None, force=None)
562 except mercurial.error.Abort as err:
562 except mercurial.error.Abort as err:
563 # Propagate error but with vcs's type
563 # Propagate error but with vcs's type
564 raise RepositoryError(str(err))
564 raise RepositoryError(str(err))
565
565
566 @LazyProperty
566 @LazyProperty
567 def workdir(self):
567 def workdir(self):
568 """
568 """
569 Returns ``Workdir`` instance for this repository.
569 Returns ``Workdir`` instance for this repository.
570 """
570 """
571 return MercurialWorkdir(self)
571 return MercurialWorkdir(self)
572
572
573 def get_config_value(self, section, name=None, config_file=None):
573 def get_config_value(self, section, name=None, config_file=None):
574 """
574 """
575 Returns configuration value for a given [``section``] and ``name``.
575 Returns configuration value for a given [``section``] and ``name``.
576
576
577 :param section: Section we want to retrieve value from
577 :param section: Section we want to retrieve value from
578 :param name: Name of configuration we want to retrieve
578 :param name: Name of configuration we want to retrieve
579 :param config_file: A path to file which should be used to retrieve
579 :param config_file: A path to file which should be used to retrieve
580 configuration from (might also be a list of file paths)
580 configuration from (might also be a list of file paths)
581 """
581 """
582 if config_file is None:
582 if config_file is None:
583 config_file = []
583 config_file = []
584 elif isinstance(config_file, str):
584 elif isinstance(config_file, str):
585 config_file = [config_file]
585 config_file = [config_file]
586
586
587 config = self._repo.ui
587 config = self._repo.ui
588 if config_file:
588 if config_file:
589 config = mercurial.ui.ui()
589 config = mercurial.ui.ui()
590 for path in config_file:
590 for path in config_file:
591 config.readconfig(safe_bytes(path))
591 config.readconfig(safe_bytes(path))
592 value = config.config(safe_bytes(section), safe_bytes(name))
592 value = config.config(safe_bytes(section), safe_bytes(name))
593 return value if value is None else safe_str(value)
593 return value if value is None else safe_str(value)
594
594
595 def get_user_name(self, config_file=None):
595 def get_user_name(self, config_file=None):
596 """
596 """
597 Returns user's name from global configuration file.
597 Returns user's name from global configuration file.
598
598
599 :param config_file: A path to file which should be used to retrieve
599 :param config_file: A path to file which should be used to retrieve
600 configuration from (might also be a list of file paths)
600 configuration from (might also be a list of file paths)
601 """
601 """
602 username = self.get_config_value('ui', 'username', config_file=config_file)
602 username = self.get_config_value('ui', 'username', config_file=config_file)
603 if username:
603 if username:
604 return author_name(username)
604 return author_name(username)
605 return None
605 return None
606
606
607 def get_user_email(self, config_file=None):
607 def get_user_email(self, config_file=None):
608 """
608 """
609 Returns user's email from global configuration file.
609 Returns user's email from global configuration file.
610
610
611 :param config_file: A path to file which should be used to retrieve
611 :param config_file: A path to file which should be used to retrieve
612 configuration from (might also be a list of file paths)
612 configuration from (might also be a list of file paths)
613 """
613 """
614 username = self.get_config_value('ui', 'username', config_file=config_file)
614 username = self.get_config_value('ui', 'username', config_file=config_file)
615 if username:
615 if username:
616 return author_email(username)
616 return author_email(username)
617 return None
617 return None
@@ -1,424 +1,424 b''
1 """
1 """
2 Module provides a class allowing to wrap communication over subprocess.Popen
2 Module provides a class allowing to wrap communication over subprocess.Popen
3 input, output, error streams into a meaningful, non-blocking, concurrent
3 input, output, error streams into a meaningful, non-blocking, concurrent
4 stream processor exposing the output data as an iterator fitting to be a
4 stream processor exposing the output data as an iterator fitting to be a
5 return value passed by a WSGI application to a WSGI server per PEP 3333.
5 return value passed by a WSGI application to a WSGI server per PEP 3333.
6
6
7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
8
8
9 This file is part of git_http_backend.py Project.
9 This file is part of git_http_backend.py Project.
10
10
11 git_http_backend.py Project is free software: you can redistribute it and/or
11 git_http_backend.py Project is free software: you can redistribute it and/or
12 modify it under the terms of the GNU Lesser General Public License as
12 modify it under the terms of the GNU Lesser General Public License as
13 published by the Free Software Foundation, either version 2.1 of the License,
13 published by the Free Software Foundation, either version 2.1 of the License,
14 or (at your option) any later version.
14 or (at your option) any later version.
15
15
16 git_http_backend.py Project is distributed in the hope that it will be useful,
16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU Lesser General Public License for more details.
19 GNU Lesser General Public License for more details.
20
20
21 You should have received a copy of the GNU Lesser General Public License
21 You should have received a copy of the GNU Lesser General Public License
22 along with git_http_backend.py Project.
22 along with git_http_backend.py Project.
23 If not, see <http://www.gnu.org/licenses/>.
23 If not, see <http://www.gnu.org/licenses/>.
24 """
24 """
25 import collections
25 import collections
26 import os
26 import os
27 import subprocess
27 import subprocess
28 import threading
28 import threading
29
29
30
30
31 class StreamFeeder(threading.Thread):
31 class StreamFeeder(threading.Thread):
32 """
32 """
33 Normal writing into pipe-like is blocking once the buffer is filled.
33 Normal writing into pipe-like is blocking once the buffer is filled.
34 This thread allows a thread to seep data from a file-like into a pipe
34 This thread allows a thread to seep data from a file-like into a pipe
35 without blocking the main thread.
35 without blocking the main thread.
36 We close inpipe once the end of the source stream is reached.
36 We close inpipe once the end of the source stream is reached.
37 """
37 """
38
38
39 def __init__(self, source):
39 def __init__(self, source):
40 super(StreamFeeder, self).__init__()
40 super(StreamFeeder, self).__init__()
41 self.daemon = True
41 self.daemon = True
42 filelike = False
42 filelike = False
43 self.bytes = bytes()
43 self.bytes = bytes()
44 if type(source) in (type(''), bytes, bytearray): # string-like
44 if type(source) in (type(''), bytes, bytearray): # string-like
45 self.bytes = bytes(source)
45 self.bytes = bytes(source)
46 else: # can be either file pointer or file-like
46 else: # can be either file pointer or file-like
47 if isinstance(source, int): # file pointer it is
47 if isinstance(source, int): # file pointer it is
48 # converting file descriptor (int) stdin into file-like
48 # converting file descriptor (int) stdin into file-like
49 source = os.fdopen(source, 'rb', 16384)
49 source = os.fdopen(source, 'rb', 16384)
50 # let's see if source is file-like by now
50 # let's see if source is file-like by now
51 filelike = hasattr(source, 'read')
51 filelike = hasattr(source, 'read')
52 if not filelike and not self.bytes:
52 if not filelike and not self.bytes:
53 raise TypeError("StreamFeeder's source object must be a readable "
53 raise TypeError("StreamFeeder's source object must be a readable "
54 "file-like, a file descriptor, or a string-like.")
54 "file-like, a file descriptor, or a string-like.")
55 self.source = source
55 self.source = source
56 self.readiface, self.writeiface = os.pipe()
56 self.readiface, self.writeiface = os.pipe()
57
57
58 def run(self):
58 def run(self):
59 t = self.writeiface
59 t = self.writeiface
60 if self.bytes:
60 if self.bytes:
61 os.write(t, self.bytes)
61 os.write(t, self.bytes)
62 else:
62 else:
63 s = self.source
63 s = self.source
64 b = s.read(4096)
64 b = s.read(4096)
65 while b:
65 while b:
66 os.write(t, b)
66 os.write(t, b)
67 b = s.read(4096)
67 b = s.read(4096)
68 os.close(t)
68 os.close(t)
69
69
70 @property
70 @property
71 def output(self):
71 def output(self):
72 return self.readiface
72 return self.readiface
73
73
74
74
75 class InputStreamChunker(threading.Thread):
75 class InputStreamChunker(threading.Thread):
76 def __init__(self, source, target, buffer_size, chunk_size):
76 def __init__(self, source, target, buffer_size, chunk_size):
77
77
78 super(InputStreamChunker, self).__init__()
78 super(InputStreamChunker, self).__init__()
79
79
80 self.daemon = True # die die die.
80 self.daemon = True # die die die.
81
81
82 self.source = source
82 self.source = source
83 self.target = target
83 self.target = target
84 self.chunk_count_max = int(buffer_size / chunk_size) + 1
84 self.chunk_count_max = int(buffer_size / chunk_size) + 1
85 self.chunk_size = chunk_size
85 self.chunk_size = chunk_size
86
86
87 self.data_added = threading.Event()
87 self.data_added = threading.Event()
88 self.data_added.clear()
88 self.data_added.clear()
89
89
90 self.keep_reading = threading.Event()
90 self.keep_reading = threading.Event()
91 self.keep_reading.set()
91 self.keep_reading.set()
92
92
93 self.EOF = threading.Event()
93 self.EOF = threading.Event()
94 self.EOF.clear()
94 self.EOF.clear()
95
95
96 self.go = threading.Event()
96 self.go = threading.Event()
97 self.go.set()
97 self.go.set()
98
98
99 def stop(self):
99 def stop(self):
100 self.go.clear()
100 self.go.clear()
101 self.EOF.set()
101 self.EOF.set()
102 try:
102 try:
103 # this is not proper, but is done to force the reader thread let
103 # this is not proper, but is done to force the reader thread let
104 # go of the input because, if successful, .close() will send EOF
104 # go of the input because, if successful, .close() will send EOF
105 # down the pipe.
105 # down the pipe.
106 self.source.close()
106 self.source.close()
107 except:
107 except:
108 pass
108 pass
109
109
110 def run(self):
110 def run(self):
111 s = self.source
111 s = self.source
112 t = self.target
112 t = self.target
113 cs = self.chunk_size
113 cs = self.chunk_size
114 ccm = self.chunk_count_max
114 ccm = self.chunk_count_max
115 kr = self.keep_reading
115 kr = self.keep_reading
116 da = self.data_added
116 da = self.data_added
117 go = self.go
117 go = self.go
118
118
119 try:
119 try:
120 b = s.read(cs)
120 b = s.read(cs)
121 except ValueError:
121 except ValueError:
122 b = ''
122 b = ''
123
123
124 while b and go.is_set():
124 while b and go.is_set():
125 if len(t) > ccm:
125 if len(t) > ccm:
126 kr.clear()
126 kr.clear()
127 kr.wait(2)
127 kr.wait(2)
128 # # this only works on 2.7.x and up
128 # # this only works on 2.7.x and up
129 # if not kr.wait(10):
129 # if not kr.wait(10):
130 # raise Exception("Timed out while waiting for input to be read.")
130 # raise Exception("Timed out while waiting for input to be read.")
131 # instead we'll use this
131 # instead we'll use this
132 if len(t) > ccm + 3:
132 if len(t) > ccm + 3:
133 raise IOError(
133 raise IOError(
134 "Timed out while waiting for input from subprocess.")
134 "Timed out while waiting for input from subprocess.")
135 t.append(b)
135 t.append(b)
136 da.set()
136 da.set()
137 try:
137 try:
138 b = s.read(cs)
138 b = s.read(cs)
139 except ValueError: # probably "I/O operation on closed file"
139 except ValueError: # probably "I/O operation on closed file"
140 b = ''
140 b = ''
141
141
142 self.EOF.set()
142 self.EOF.set()
143 da.set() # for cases when done but there was no input.
143 da.set() # for cases when done but there was no input.
144
144
145
145
146 class BufferedGenerator(object):
146 class BufferedGenerator(object):
147 """
147 """
148 Class behaves as a non-blocking, buffered pipe reader.
148 Class behaves as a non-blocking, buffered pipe reader.
149 Reads chunks of data (through a thread)
149 Reads chunks of data (through a thread)
150 from a blocking pipe, and attaches these to an array (Deque) of chunks.
150 from a blocking pipe, and attaches these to an array (Deque) of chunks.
151 Reading is halted in the thread when max chunks is internally buffered.
151 Reading is halted in the thread when max chunks is internally buffered.
152 The .next() may operate in blocking or non-blocking fashion by yielding
152 The .next() may operate in blocking or non-blocking fashion by yielding
153 '' if no data is ready
153 '' if no data is ready
154 to be sent or by not returning until there is some data to send
154 to be sent or by not returning until there is some data to send
155 When we get EOF from underlying source pipe we raise the marker to raise
155 When we get EOF from underlying source pipe we raise the marker to raise
156 StopIteration after the last chunk of data is yielded.
156 StopIteration after the last chunk of data is yielded.
157 """
157 """
158
158
159 def __init__(self, source, buffer_size=65536, chunk_size=4096,
159 def __init__(self, source, buffer_size=65536, chunk_size=4096,
160 starting_values=None, bottomless=False):
160 starting_values=None, bottomless=False):
161 starting_values = starting_values or []
161 starting_values = starting_values or []
162 if bottomless:
162 if bottomless:
163 maxlen = int(buffer_size / chunk_size)
163 maxlen = int(buffer_size / chunk_size)
164 else:
164 else:
165 maxlen = None
165 maxlen = None
166
166
167 self.data = collections.deque(starting_values, maxlen)
167 self.data = collections.deque(starting_values, maxlen)
168 self.worker = InputStreamChunker(source, self.data, buffer_size,
168 self.worker = InputStreamChunker(source, self.data, buffer_size,
169 chunk_size)
169 chunk_size)
170 if starting_values:
170 if starting_values:
171 self.worker.data_added.set()
171 self.worker.data_added.set()
172 self.worker.start()
172 self.worker.start()
173
173
174 ####################
174 ####################
175 # Generator's methods
175 # Generator's methods
176 ####################
176 ####################
177
177
178 def __iter__(self):
178 def __iter__(self):
179 return self
179 return self
180
180
181 def __next__(self):
181 def __next__(self):
182 while not len(self.data) and not self.worker.EOF.is_set():
182 while not len(self.data) and not self.worker.EOF.is_set():
183 self.worker.data_added.clear()
183 self.worker.data_added.clear()
184 self.worker.data_added.wait(0.2)
184 self.worker.data_added.wait(0.2)
185 if len(self.data):
185 if len(self.data):
186 self.worker.keep_reading.set()
186 self.worker.keep_reading.set()
187 return bytes(self.data.popleft())
187 return bytes(self.data.popleft())
188 elif self.worker.EOF.is_set():
188 elif self.worker.EOF.is_set():
189 raise StopIteration
189 raise StopIteration
190
190
191 def throw(self, type, value=None, traceback=None):
191 def throw(self, type, value=None, traceback=None):
192 if not self.worker.EOF.is_set():
192 if not self.worker.EOF.is_set():
193 raise type(value)
193 raise type(value)
194
194
195 def start(self):
195 def start(self):
196 self.worker.start()
196 self.worker.start()
197
197
198 def stop(self):
198 def stop(self):
199 self.worker.stop()
199 self.worker.stop()
200
200
201 def close(self):
201 def close(self):
202 try:
202 try:
203 self.worker.stop()
203 self.worker.stop()
204 self.throw(GeneratorExit)
204 self.throw(GeneratorExit)
205 except (GeneratorExit, StopIteration):
205 except (GeneratorExit, StopIteration):
206 pass
206 pass
207
207
208 ####################
208 ####################
209 # Threaded reader's infrastructure.
209 # Threaded reader's infrastructure.
210 ####################
210 ####################
211 @property
211 @property
212 def input(self):
212 def input(self):
213 return self.worker.w
213 return self.worker.w
214
214
215 @property
215 @property
216 def data_added_event(self):
216 def data_added_event(self):
217 return self.worker.data_added
217 return self.worker.data_added
218
218
219 @property
219 @property
220 def data_added(self):
220 def data_added(self):
221 return self.worker.data_added.is_set()
221 return self.worker.data_added.is_set()
222
222
223 @property
223 @property
224 def reading_paused(self):
224 def reading_paused(self):
225 return not self.worker.keep_reading.is_set()
225 return not self.worker.keep_reading.is_set()
226
226
227 @property
227 @property
228 def done_reading_event(self):
228 def done_reading_event(self):
229 """
229 """
230 Done_reading does not mean that the iterator's buffer is empty.
230 Done_reading does not mean that the iterator's buffer is empty.
231 Iterator might have done reading from underlying source, but the read
231 Iterator might have done reading from underlying source, but the read
232 chunks might still be available for serving through .next() method.
232 chunks might still be available for serving through .next() method.
233
233
234 :returns: An threading.Event class instance.
234 :returns: An threading.Event class instance.
235 """
235 """
236 return self.worker.EOF
236 return self.worker.EOF
237
237
238 @property
238 @property
239 def done_reading(self):
239 def done_reading(self):
240 """
240 """
241 Done_reading does not mean that the iterator's buffer is empty.
241 Done_reading does not mean that the iterator's buffer is empty.
242 Iterator might have done reading from underlying source, but the read
242 Iterator might have done reading from underlying source, but the read
243 chunks might still be available for serving through .next() method.
243 chunks might still be available for serving through .next() method.
244
244
245 :returns: An Bool value.
245 :returns: An Bool value.
246 """
246 """
247 return self.worker.EOF.is_set()
247 return self.worker.EOF.is_set()
248
248
249 @property
249 @property
250 def length(self):
250 def length(self):
251 """
251 """
252 returns int.
252 returns int.
253
253
254 This is the length of the queue of chunks, not the length of
254 This is the length of the queue of chunks, not the length of
255 the combined contents in those chunks.
255 the combined contents in those chunks.
256
256
257 __len__() cannot be meaningfully implemented because this
257 __len__() cannot be meaningfully implemented because this
258 reader is just flying through a bottomless pit content and
258 reader is just flying through a bottomless pit content and
259 can only know the length of what it already saw.
259 can only know the length of what it already saw.
260
260
261 If __len__() on WSGI server per PEP 3333 returns a value,
261 If __len__() on WSGI server per PEP 3333 returns a value,
262 the response's length will be set to that. In order not to
262 the response's length will be set to that. In order not to
263 confuse WSGI PEP3333 servers, we will not implement __len__
263 confuse WSGI PEP3333 servers, we will not implement __len__
264 at all.
264 at all.
265 """
265 """
266 return len(self.data)
266 return len(self.data)
267
267
268 def prepend(self, x):
268 def prepend(self, x):
269 self.data.appendleft(x)
269 self.data.appendleft(x)
270
270
271 def append(self, x):
271 def append(self, x):
272 self.data.append(x)
272 self.data.append(x)
273
273
274 def extend(self, o):
274 def extend(self, o):
275 self.data.extend(o)
275 self.data.extend(o)
276
276
277 def __getitem__(self, i):
277 def __getitem__(self, i):
278 return self.data[i]
278 return self.data[i]
279
279
280
280
281 class SubprocessIOChunker(object):
281 class SubprocessIOChunker(object):
282 """
282 """
283 Processor class wrapping handling of subprocess IO.
283 Processor class wrapping handling of subprocess IO.
284
284
285 In a way, this is a "communicate()" replacement with a twist.
285 In a way, this is a "communicate()" replacement with a twist.
286
286
287 - We are multithreaded. Writing in and reading out, err are all sep threads.
287 - We are multithreaded. Writing in and reading out, err are all sep threads.
288 - We support concurrent (in and out) stream processing.
288 - We support concurrent (in and out) stream processing.
289 - The output is not a stream. It's a queue of read string (bytes, not unicode)
289 - The output is not a stream. It's a queue of read string (bytes, not str)
290 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
290 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
291 - We are non-blocking in more respects than communicate()
291 - We are non-blocking in more respects than communicate()
292 (reading from subprocess out pauses when internal buffer is full, but
292 (reading from subprocess out pauses when internal buffer is full, but
293 does not block the parent calling code. On the flip side, reading from
293 does not block the parent calling code. On the flip side, reading from
294 slow-yielding subprocess may block the iteration until data shows up. This
294 slow-yielding subprocess may block the iteration until data shows up. This
295 does not block the parallel inpipe reading occurring parallel thread.)
295 does not block the parallel inpipe reading occurring parallel thread.)
296
296
297 The purpose of the object is to allow us to wrap subprocess interactions into
297 The purpose of the object is to allow us to wrap subprocess interactions into
298 an iterable that can be passed to a WSGI server as the application's return
298 an iterable that can be passed to a WSGI server as the application's return
299 value. Because of stream-processing-ability, WSGI does not have to read ALL
299 value. Because of stream-processing-ability, WSGI does not have to read ALL
300 of the subprocess's output and buffer it, before handing it to WSGI server for
300 of the subprocess's output and buffer it, before handing it to WSGI server for
301 HTTP response. Instead, the class initializer reads just a bit of the stream
301 HTTP response. Instead, the class initializer reads just a bit of the stream
302 to figure out if error occurred or likely to occur and if not, just hands the
302 to figure out if error occurred or likely to occur and if not, just hands the
303 further iteration over subprocess output to the server for completion of HTTP
303 further iteration over subprocess output to the server for completion of HTTP
304 response.
304 response.
305
305
306 The real or perceived subprocess error is trapped and raised as one of
306 The real or perceived subprocess error is trapped and raised as one of
307 EnvironmentError family of exceptions
307 EnvironmentError family of exceptions
308
308
309 Example usage:
309 Example usage:
310 # try:
310 # try:
311 # answer = SubprocessIOChunker(
311 # answer = SubprocessIOChunker(
312 # cmd,
312 # cmd,
313 # input,
313 # input,
314 # buffer_size = 65536,
314 # buffer_size = 65536,
315 # chunk_size = 4096
315 # chunk_size = 4096
316 # )
316 # )
317 # except (EnvironmentError) as e:
317 # except (EnvironmentError) as e:
318 # print str(e)
318 # print str(e)
319 # raise e
319 # raise e
320 #
320 #
321 # return answer
321 # return answer
322
322
323
323
324 """
324 """
325
325
326 def __init__(self, cmd, inputstream=None, buffer_size=65536,
326 def __init__(self, cmd, inputstream=None, buffer_size=65536,
327 chunk_size=4096, starting_values=None, **kwargs):
327 chunk_size=4096, starting_values=None, **kwargs):
328 """
328 """
329 Initializes SubprocessIOChunker
329 Initializes SubprocessIOChunker
330
330
331 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
331 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
332 :param inputstream: (Default: None) A file-like, string, or file pointer.
332 :param inputstream: (Default: None) A file-like, string, or file pointer.
333 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
333 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
334 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
334 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
335 :param starting_values: (Default: []) An array of strings to put in front of output que.
335 :param starting_values: (Default: []) An array of strings to put in front of output que.
336 """
336 """
337 starting_values = starting_values or []
337 starting_values = starting_values or []
338 if inputstream:
338 if inputstream:
339 input_streamer = StreamFeeder(inputstream)
339 input_streamer = StreamFeeder(inputstream)
340 input_streamer.start()
340 input_streamer.start()
341 inputstream = input_streamer.output
341 inputstream = input_streamer.output
342
342
343 # Note: fragile cmd mangling has been removed for use in Kallithea
343 # Note: fragile cmd mangling has been removed for use in Kallithea
344 assert isinstance(cmd, list), cmd
344 assert isinstance(cmd, list), cmd
345
345
346 _p = subprocess.Popen(cmd, bufsize=-1,
346 _p = subprocess.Popen(cmd, bufsize=-1,
347 stdin=inputstream,
347 stdin=inputstream,
348 stdout=subprocess.PIPE,
348 stdout=subprocess.PIPE,
349 stderr=subprocess.PIPE,
349 stderr=subprocess.PIPE,
350 **kwargs)
350 **kwargs)
351
351
352 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
352 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
353 starting_values)
353 starting_values)
354 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
354 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
355
355
356 while not bg_out.done_reading and not bg_out.reading_paused:
356 while not bg_out.done_reading and not bg_out.reading_paused:
357 # doing this until we reach either end of file, or end of buffer.
357 # doing this until we reach either end of file, or end of buffer.
358 bg_out.data_added_event.wait(1)
358 bg_out.data_added_event.wait(1)
359 bg_out.data_added_event.clear()
359 bg_out.data_added_event.clear()
360
360
361 # at this point it's still ambiguous if we are done reading or just full buffer.
361 # at this point it's still ambiguous if we are done reading or just full buffer.
362 # Either way, if error (returned by ended process, or implied based on
362 # Either way, if error (returned by ended process, or implied based on
363 # presence of stuff in stderr output) we error out.
363 # presence of stuff in stderr output) we error out.
364 # Else, we are happy.
364 # Else, we are happy.
365 returncode = _p.poll()
365 returncode = _p.poll()
366 if (returncode is not None # process has terminated
366 if (returncode is not None # process has terminated
367 and returncode != 0
367 and returncode != 0
368 ): # and it failed
368 ): # and it failed
369 bg_out.stop()
369 bg_out.stop()
370 out = b''.join(bg_out)
370 out = b''.join(bg_out)
371 bg_err.stop()
371 bg_err.stop()
372 err = b''.join(bg_err)
372 err = b''.join(bg_err)
373 if (err.strip() == b'fatal: The remote end hung up unexpectedly' and
373 if (err.strip() == b'fatal: The remote end hung up unexpectedly' and
374 out.startswith(b'0034shallow ')
374 out.startswith(b'0034shallow ')
375 ):
375 ):
376 # hack inspired by https://github.com/schacon/grack/pull/7
376 # hack inspired by https://github.com/schacon/grack/pull/7
377 bg_out = iter([out])
377 bg_out = iter([out])
378 _p = None
378 _p = None
379 elif err:
379 elif err:
380 raise EnvironmentError("Subprocess exited due to an error: %s" % err)
380 raise EnvironmentError("Subprocess exited due to an error: %s" % err)
381 else:
381 else:
382 raise EnvironmentError(
382 raise EnvironmentError(
383 "Subprocess exited with non 0 ret code: %s" % returncode)
383 "Subprocess exited with non 0 ret code: %s" % returncode)
384 self.process = _p
384 self.process = _p
385 self.output = bg_out
385 self.output = bg_out
386 self.error = bg_err
386 self.error = bg_err
387 self.inputstream = inputstream
387 self.inputstream = inputstream
388
388
389 def __iter__(self):
389 def __iter__(self):
390 return self
390 return self
391
391
392 def __next__(self):
392 def __next__(self):
393 if self.process:
393 if self.process:
394 returncode = self.process.poll()
394 returncode = self.process.poll()
395 if (returncode is not None # process has terminated
395 if (returncode is not None # process has terminated
396 and returncode != 0
396 and returncode != 0
397 ): # and it failed
397 ): # and it failed
398 self.output.stop()
398 self.output.stop()
399 self.error.stop()
399 self.error.stop()
400 err = ''.join(self.error)
400 err = ''.join(self.error)
401 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
401 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
402 return next(self.output)
402 return next(self.output)
403
403
404 def throw(self, type, value=None, traceback=None):
404 def throw(self, type, value=None, traceback=None):
405 if self.output.length or not self.output.done_reading:
405 if self.output.length or not self.output.done_reading:
406 raise type(value)
406 raise type(value)
407
407
408 def close(self):
408 def close(self):
409 try:
409 try:
410 self.process.terminate()
410 self.process.terminate()
411 except:
411 except:
412 pass
412 pass
413 try:
413 try:
414 self.output.close()
414 self.output.close()
415 except:
415 except:
416 pass
416 pass
417 try:
417 try:
418 self.error.close()
418 self.error.close()
419 except:
419 except:
420 pass
420 pass
421 try:
421 try:
422 os.close(self.inputstream)
422 os.close(self.inputstream)
423 except:
423 except:
424 pass
424 pass
@@ -1,2550 +1,2550 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.model.db
15 kallithea.model.db
16 ~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~
17
17
18 Database Models for Kallithea
18 Database Models for Kallithea
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: Apr 08, 2010
22 :created_on: Apr 08, 2010
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28 import base64
28 import base64
29 import collections
29 import collections
30 import datetime
30 import datetime
31 import functools
31 import functools
32 import hashlib
32 import hashlib
33 import logging
33 import logging
34 import os
34 import os
35 import time
35 import time
36 import traceback
36 import traceback
37
37
38 import ipaddr
38 import ipaddr
39 import sqlalchemy
39 import sqlalchemy
40 from beaker.cache import cache_region, region_invalidate
40 from beaker.cache import cache_region, region_invalidate
41 from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Index, Integer, LargeBinary, String, Unicode, UnicodeText, UniqueConstraint
41 from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Index, Integer, LargeBinary, String, Unicode, UnicodeText, UniqueConstraint
42 from sqlalchemy.ext.hybrid import hybrid_property
42 from sqlalchemy.ext.hybrid import hybrid_property
43 from sqlalchemy.orm import class_mapper, joinedload, relationship, validates
43 from sqlalchemy.orm import class_mapper, joinedload, relationship, validates
44 from tg.i18n import lazy_ugettext as _
44 from tg.i18n import lazy_ugettext as _
45 from webob.exc import HTTPNotFound
45 from webob.exc import HTTPNotFound
46
46
47 import kallithea
47 import kallithea
48 from kallithea.lib import ext_json
48 from kallithea.lib import ext_json
49 from kallithea.lib.caching_query import FromCache
49 from kallithea.lib.caching_query import FromCache
50 from kallithea.lib.exceptions import DefaultUserException
50 from kallithea.lib.exceptions import DefaultUserException
51 from kallithea.lib.utils2 import (
51 from kallithea.lib.utils2 import (
52 Optional, ascii_bytes, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int, safe_str, str2bool, urlreadable)
52 Optional, ascii_bytes, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int, safe_str, str2bool, urlreadable)
53 from kallithea.lib.vcs import get_backend
53 from kallithea.lib.vcs import get_backend
54 from kallithea.lib.vcs.backends.base import EmptyChangeset
54 from kallithea.lib.vcs.backends.base import EmptyChangeset
55 from kallithea.lib.vcs.utils.helpers import get_scm
55 from kallithea.lib.vcs.utils.helpers import get_scm
56 from kallithea.lib.vcs.utils.lazy import LazyProperty
56 from kallithea.lib.vcs.utils.lazy import LazyProperty
57 from kallithea.model.meta import Base, Session
57 from kallithea.model.meta import Base, Session
58
58
59
59
60 URL_SEP = '/'
60 URL_SEP = '/'
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63 #==============================================================================
63 #==============================================================================
64 # BASE CLASSES
64 # BASE CLASSES
65 #==============================================================================
65 #==============================================================================
66
66
67 def _hash_key(k):
67 def _hash_key(k):
68 return hashlib.md5(safe_bytes(k)).hexdigest()
68 return hashlib.md5(safe_bytes(k)).hexdigest()
69
69
70
70
71 class BaseDbModel(object):
71 class BaseDbModel(object):
72 """
72 """
73 Base Model for all classes
73 Base Model for all classes
74 """
74 """
75
75
76 @classmethod
76 @classmethod
77 def _get_keys(cls):
77 def _get_keys(cls):
78 """return column names for this model """
78 """return column names for this model """
79 # Note: not a normal dict - iterator gives "users.firstname", but keys gives "firstname"
79 # Note: not a normal dict - iterator gives "users.firstname", but keys gives "firstname"
80 return class_mapper(cls).c.keys()
80 return class_mapper(cls).c.keys()
81
81
82 def get_dict(self):
82 def get_dict(self):
83 """
83 """
84 return dict with keys and values corresponding
84 return dict with keys and values corresponding
85 to this model data """
85 to this model data """
86
86
87 d = {}
87 d = {}
88 for k in self._get_keys():
88 for k in self._get_keys():
89 d[k] = getattr(self, k)
89 d[k] = getattr(self, k)
90
90
91 # also use __json__() if present to get additional fields
91 # also use __json__() if present to get additional fields
92 _json_attr = getattr(self, '__json__', None)
92 _json_attr = getattr(self, '__json__', None)
93 if _json_attr:
93 if _json_attr:
94 # update with attributes from __json__
94 # update with attributes from __json__
95 if callable(_json_attr):
95 if callable(_json_attr):
96 _json_attr = _json_attr()
96 _json_attr = _json_attr()
97 for k, val in _json_attr.items():
97 for k, val in _json_attr.items():
98 d[k] = val
98 d[k] = val
99 return d
99 return d
100
100
101 def get_appstruct(self):
101 def get_appstruct(self):
102 """return list with keys and values tuples corresponding
102 """return list with keys and values tuples corresponding
103 to this model data """
103 to this model data """
104
104
105 return [
105 return [
106 (k, getattr(self, k))
106 (k, getattr(self, k))
107 for k in self._get_keys()
107 for k in self._get_keys()
108 ]
108 ]
109
109
110 def populate_obj(self, populate_dict):
110 def populate_obj(self, populate_dict):
111 """populate model with data from given populate_dict"""
111 """populate model with data from given populate_dict"""
112
112
113 for k in self._get_keys():
113 for k in self._get_keys():
114 if k in populate_dict:
114 if k in populate_dict:
115 setattr(self, k, populate_dict[k])
115 setattr(self, k, populate_dict[k])
116
116
117 @classmethod
117 @classmethod
118 def query(cls):
118 def query(cls):
119 return Session().query(cls)
119 return Session().query(cls)
120
120
121 @classmethod
121 @classmethod
122 def get(cls, id_):
122 def get(cls, id_):
123 if id_:
123 if id_:
124 return cls.query().get(id_)
124 return cls.query().get(id_)
125
125
126 @classmethod
126 @classmethod
127 def guess_instance(cls, value, callback=None):
127 def guess_instance(cls, value, callback=None):
128 """Haphazardly attempt to convert `value` to a `cls` instance.
128 """Haphazardly attempt to convert `value` to a `cls` instance.
129
129
130 If `value` is None or already a `cls` instance, return it. If `value`
130 If `value` is None or already a `cls` instance, return it. If `value`
131 is a number (or looks like one if you squint just right), assume it's
131 is a number (or looks like one if you squint just right), assume it's
132 a database primary key and let SQLAlchemy sort things out. Otherwise,
132 a database primary key and let SQLAlchemy sort things out. Otherwise,
133 fall back to resolving it using `callback` (if specified); this could
133 fall back to resolving it using `callback` (if specified); this could
134 e.g. be a function that looks up instances by name (though that won't
134 e.g. be a function that looks up instances by name (though that won't
135 work if the name begins with a digit). Otherwise, raise Exception.
135 work if the name begins with a digit). Otherwise, raise Exception.
136 """
136 """
137
137
138 if value is None:
138 if value is None:
139 return None
139 return None
140 if isinstance(value, cls):
140 if isinstance(value, cls):
141 return value
141 return value
142 if isinstance(value, int):
142 if isinstance(value, int):
143 return cls.get(value)
143 return cls.get(value)
144 if isinstance(value, str) and value.isdigit():
144 if isinstance(value, str) and value.isdigit():
145 return cls.get(int(value))
145 return cls.get(int(value))
146 if callback is not None:
146 if callback is not None:
147 return callback(value)
147 return callback(value)
148
148
149 raise Exception(
149 raise Exception(
150 'given object must be int, long or Instance of %s '
150 'given object must be int, long or Instance of %s '
151 'got %s, no callback provided' % (cls, type(value))
151 'got %s, no callback provided' % (cls, type(value))
152 )
152 )
153
153
154 @classmethod
154 @classmethod
155 def get_or_404(cls, id_):
155 def get_or_404(cls, id_):
156 try:
156 try:
157 id_ = int(id_)
157 id_ = int(id_)
158 except (TypeError, ValueError):
158 except (TypeError, ValueError):
159 raise HTTPNotFound
159 raise HTTPNotFound
160
160
161 res = cls.query().get(id_)
161 res = cls.query().get(id_)
162 if res is None:
162 if res is None:
163 raise HTTPNotFound
163 raise HTTPNotFound
164 return res
164 return res
165
165
166 @classmethod
166 @classmethod
167 def delete(cls, id_):
167 def delete(cls, id_):
168 obj = cls.query().get(id_)
168 obj = cls.query().get(id_)
169 Session().delete(obj)
169 Session().delete(obj)
170
170
171 def __repr__(self):
171 def __repr__(self):
172 return '<DB:%s>' % (self.__class__.__name__)
172 return '<DB:%s>' % (self.__class__.__name__)
173
173
174
174
175 _table_args_default_dict = {'extend_existing': True,
175 _table_args_default_dict = {'extend_existing': True,
176 'mysql_engine': 'InnoDB',
176 'mysql_engine': 'InnoDB',
177 'mysql_charset': 'utf8',
177 'mysql_charset': 'utf8',
178 'sqlite_autoincrement': True,
178 'sqlite_autoincrement': True,
179 }
179 }
180
180
181 class Setting(Base, BaseDbModel):
181 class Setting(Base, BaseDbModel):
182 __tablename__ = 'settings'
182 __tablename__ = 'settings'
183 __table_args__ = (
183 __table_args__ = (
184 _table_args_default_dict,
184 _table_args_default_dict,
185 )
185 )
186
186
187 SETTINGS_TYPES = {
187 SETTINGS_TYPES = {
188 'str': safe_bytes,
188 'str': safe_bytes,
189 'int': safe_int,
189 'int': safe_int,
190 'unicode': safe_str,
190 'unicode': safe_str,
191 'bool': str2bool,
191 'bool': str2bool,
192 'list': functools.partial(aslist, sep=',')
192 'list': functools.partial(aslist, sep=',')
193 }
193 }
194 DEFAULT_UPDATE_URL = ''
194 DEFAULT_UPDATE_URL = ''
195
195
196 app_settings_id = Column(Integer(), primary_key=True)
196 app_settings_id = Column(Integer(), primary_key=True)
197 app_settings_name = Column(String(255), nullable=False, unique=True)
197 app_settings_name = Column(String(255), nullable=False, unique=True)
198 _app_settings_value = Column("app_settings_value", Unicode(4096), nullable=False)
198 _app_settings_value = Column("app_settings_value", Unicode(4096), nullable=False)
199 _app_settings_type = Column("app_settings_type", String(255), nullable=True) # FIXME: not nullable?
199 _app_settings_type = Column("app_settings_type", String(255), nullable=True) # FIXME: not nullable?
200
200
201 def __init__(self, key='', val='', type='unicode'):
201 def __init__(self, key='', val='', type='unicode'):
202 self.app_settings_name = key
202 self.app_settings_name = key
203 self.app_settings_value = val
203 self.app_settings_value = val
204 self.app_settings_type = type
204 self.app_settings_type = type
205
205
206 @validates('_app_settings_value')
206 @validates('_app_settings_value')
207 def validate_settings_value(self, key, val):
207 def validate_settings_value(self, key, val):
208 assert isinstance(val, unicode)
208 assert isinstance(val, str)
209 return val
209 return val
210
210
211 @hybrid_property
211 @hybrid_property
212 def app_settings_value(self):
212 def app_settings_value(self):
213 v = self._app_settings_value
213 v = self._app_settings_value
214 _type = self.app_settings_type
214 _type = self.app_settings_type
215 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
215 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
216 return converter(v)
216 return converter(v)
217
217
218 @app_settings_value.setter
218 @app_settings_value.setter
219 def app_settings_value(self, val):
219 def app_settings_value(self, val):
220 """
220 """
221 Setter that will always make sure we use str in app_settings_value
221 Setter that will always make sure we use str in app_settings_value
222 """
222 """
223 self._app_settings_value = safe_str(val)
223 self._app_settings_value = safe_str(val)
224
224
225 @hybrid_property
225 @hybrid_property
226 def app_settings_type(self):
226 def app_settings_type(self):
227 return self._app_settings_type
227 return self._app_settings_type
228
228
229 @app_settings_type.setter
229 @app_settings_type.setter
230 def app_settings_type(self, val):
230 def app_settings_type(self, val):
231 if val not in self.SETTINGS_TYPES:
231 if val not in self.SETTINGS_TYPES:
232 raise Exception('type must be one of %s got %s'
232 raise Exception('type must be one of %s got %s'
233 % (list(self.SETTINGS_TYPES), val))
233 % (list(self.SETTINGS_TYPES), val))
234 self._app_settings_type = val
234 self._app_settings_type = val
235
235
236 def __repr__(self):
236 def __repr__(self):
237 return "<%s %s.%s=%r>" % (
237 return "<%s %s.%s=%r>" % (
238 self.__class__.__name__,
238 self.__class__.__name__,
239 self.app_settings_name, self.app_settings_type, self.app_settings_value
239 self.app_settings_name, self.app_settings_type, self.app_settings_value
240 )
240 )
241
241
242 @classmethod
242 @classmethod
243 def get_by_name(cls, key):
243 def get_by_name(cls, key):
244 return cls.query() \
244 return cls.query() \
245 .filter(cls.app_settings_name == key).scalar()
245 .filter(cls.app_settings_name == key).scalar()
246
246
247 @classmethod
247 @classmethod
248 def get_by_name_or_create(cls, key, val='', type='unicode'):
248 def get_by_name_or_create(cls, key, val='', type='unicode'):
249 res = cls.get_by_name(key)
249 res = cls.get_by_name(key)
250 if res is None:
250 if res is None:
251 res = cls(key, val, type)
251 res = cls(key, val, type)
252 return res
252 return res
253
253
254 @classmethod
254 @classmethod
255 def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
255 def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
256 """
256 """
257 Creates or updates Kallithea setting. If updates are triggered, it will only
257 Creates or updates Kallithea setting. If updates are triggered, it will only
258 update parameters that are explicitly set. Optional instance will be skipped.
258 update parameters that are explicitly set. Optional instance will be skipped.
259
259
260 :param key:
260 :param key:
261 :param val:
261 :param val:
262 :param type:
262 :param type:
263 :return:
263 :return:
264 """
264 """
265 res = cls.get_by_name(key)
265 res = cls.get_by_name(key)
266 if res is None:
266 if res is None:
267 val = Optional.extract(val)
267 val = Optional.extract(val)
268 type = Optional.extract(type)
268 type = Optional.extract(type)
269 res = cls(key, val, type)
269 res = cls(key, val, type)
270 Session().add(res)
270 Session().add(res)
271 else:
271 else:
272 res.app_settings_name = key
272 res.app_settings_name = key
273 if not isinstance(val, Optional):
273 if not isinstance(val, Optional):
274 # update if set
274 # update if set
275 res.app_settings_value = val
275 res.app_settings_value = val
276 if not isinstance(type, Optional):
276 if not isinstance(type, Optional):
277 # update if set
277 # update if set
278 res.app_settings_type = type
278 res.app_settings_type = type
279 return res
279 return res
280
280
281 @classmethod
281 @classmethod
282 def get_app_settings(cls, cache=False):
282 def get_app_settings(cls, cache=False):
283
283
284 ret = cls.query()
284 ret = cls.query()
285
285
286 if cache:
286 if cache:
287 ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
287 ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
288
288
289 if ret is None:
289 if ret is None:
290 raise Exception('Could not get application settings !')
290 raise Exception('Could not get application settings !')
291 settings = {}
291 settings = {}
292 for each in ret:
292 for each in ret:
293 settings[each.app_settings_name] = \
293 settings[each.app_settings_name] = \
294 each.app_settings_value
294 each.app_settings_value
295
295
296 return settings
296 return settings
297
297
298 @classmethod
298 @classmethod
299 def get_auth_settings(cls, cache=False):
299 def get_auth_settings(cls, cache=False):
300 ret = cls.query() \
300 ret = cls.query() \
301 .filter(cls.app_settings_name.startswith('auth_')).all()
301 .filter(cls.app_settings_name.startswith('auth_')).all()
302 fd = {}
302 fd = {}
303 for row in ret:
303 for row in ret:
304 fd[row.app_settings_name] = row.app_settings_value
304 fd[row.app_settings_name] = row.app_settings_value
305 return fd
305 return fd
306
306
307 @classmethod
307 @classmethod
308 def get_default_repo_settings(cls, cache=False, strip_prefix=False):
308 def get_default_repo_settings(cls, cache=False, strip_prefix=False):
309 ret = cls.query() \
309 ret = cls.query() \
310 .filter(cls.app_settings_name.startswith('default_')).all()
310 .filter(cls.app_settings_name.startswith('default_')).all()
311 fd = {}
311 fd = {}
312 for row in ret:
312 for row in ret:
313 key = row.app_settings_name
313 key = row.app_settings_name
314 if strip_prefix:
314 if strip_prefix:
315 key = remove_prefix(key, prefix='default_')
315 key = remove_prefix(key, prefix='default_')
316 fd.update({key: row.app_settings_value})
316 fd.update({key: row.app_settings_value})
317
317
318 return fd
318 return fd
319
319
320 @classmethod
320 @classmethod
321 def get_server_info(cls):
321 def get_server_info(cls):
322 import pkg_resources
322 import pkg_resources
323 import platform
323 import platform
324 from kallithea.lib.utils import check_git_version
324 from kallithea.lib.utils import check_git_version
325 mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
325 mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
326 info = {
326 info = {
327 'modules': sorted(mods, key=lambda k: k[0].lower()),
327 'modules': sorted(mods, key=lambda k: k[0].lower()),
328 'py_version': platform.python_version(),
328 'py_version': platform.python_version(),
329 'platform': platform.platform(),
329 'platform': platform.platform(),
330 'kallithea_version': kallithea.__version__,
330 'kallithea_version': kallithea.__version__,
331 'git_version': str(check_git_version()),
331 'git_version': str(check_git_version()),
332 'git_path': kallithea.CONFIG.get('git_path')
332 'git_path': kallithea.CONFIG.get('git_path')
333 }
333 }
334 return info
334 return info
335
335
336
336
337 class Ui(Base, BaseDbModel):
337 class Ui(Base, BaseDbModel):
338 __tablename__ = 'ui'
338 __tablename__ = 'ui'
339 __table_args__ = (
339 __table_args__ = (
340 # FIXME: ui_key as key is wrong and should be removed when the corresponding
340 # FIXME: ui_key as key is wrong and should be removed when the corresponding
341 # Ui.get_by_key has been replaced by the composite key
341 # Ui.get_by_key has been replaced by the composite key
342 UniqueConstraint('ui_key'),
342 UniqueConstraint('ui_key'),
343 UniqueConstraint('ui_section', 'ui_key'),
343 UniqueConstraint('ui_section', 'ui_key'),
344 _table_args_default_dict,
344 _table_args_default_dict,
345 )
345 )
346
346
347 HOOK_UPDATE = 'changegroup.update'
347 HOOK_UPDATE = 'changegroup.update'
348 HOOK_REPO_SIZE = 'changegroup.repo_size'
348 HOOK_REPO_SIZE = 'changegroup.repo_size'
349
349
350 ui_id = Column(Integer(), primary_key=True)
350 ui_id = Column(Integer(), primary_key=True)
351 ui_section = Column(String(255), nullable=False)
351 ui_section = Column(String(255), nullable=False)
352 ui_key = Column(String(255), nullable=False)
352 ui_key = Column(String(255), nullable=False)
353 ui_value = Column(String(255), nullable=True) # FIXME: not nullable?
353 ui_value = Column(String(255), nullable=True) # FIXME: not nullable?
354 ui_active = Column(Boolean(), nullable=False, default=True)
354 ui_active = Column(Boolean(), nullable=False, default=True)
355
355
356 @classmethod
356 @classmethod
357 def get_by_key(cls, section, key):
357 def get_by_key(cls, section, key):
358 """ Return specified Ui object, or None if not found. """
358 """ Return specified Ui object, or None if not found. """
359 return cls.query().filter_by(ui_section=section, ui_key=key).scalar()
359 return cls.query().filter_by(ui_section=section, ui_key=key).scalar()
360
360
361 @classmethod
361 @classmethod
362 def get_or_create(cls, section, key):
362 def get_or_create(cls, section, key):
363 """ Return specified Ui object, creating it if necessary. """
363 """ Return specified Ui object, creating it if necessary. """
364 setting = cls.get_by_key(section, key)
364 setting = cls.get_by_key(section, key)
365 if setting is None:
365 if setting is None:
366 setting = cls(ui_section=section, ui_key=key)
366 setting = cls(ui_section=section, ui_key=key)
367 Session().add(setting)
367 Session().add(setting)
368 return setting
368 return setting
369
369
370 @classmethod
370 @classmethod
371 def get_builtin_hooks(cls):
371 def get_builtin_hooks(cls):
372 q = cls.query()
372 q = cls.query()
373 q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE]))
373 q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE]))
374 q = q.filter(cls.ui_section == 'hooks')
374 q = q.filter(cls.ui_section == 'hooks')
375 return q.all()
375 return q.all()
376
376
377 @classmethod
377 @classmethod
378 def get_custom_hooks(cls):
378 def get_custom_hooks(cls):
379 q = cls.query()
379 q = cls.query()
380 q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE]))
380 q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE]))
381 q = q.filter(cls.ui_section == 'hooks')
381 q = q.filter(cls.ui_section == 'hooks')
382 return q.all()
382 return q.all()
383
383
384 @classmethod
384 @classmethod
385 def get_repos_location(cls):
385 def get_repos_location(cls):
386 return cls.get_by_key('paths', '/').ui_value
386 return cls.get_by_key('paths', '/').ui_value
387
387
388 @classmethod
388 @classmethod
389 def create_or_update_hook(cls, key, val):
389 def create_or_update_hook(cls, key, val):
390 new_ui = cls.get_or_create('hooks', key)
390 new_ui = cls.get_or_create('hooks', key)
391 new_ui.ui_active = True
391 new_ui.ui_active = True
392 new_ui.ui_value = val
392 new_ui.ui_value = val
393
393
394 def __repr__(self):
394 def __repr__(self):
395 return '<%s %s.%s=%r>' % (
395 return '<%s %s.%s=%r>' % (
396 self.__class__.__name__,
396 self.__class__.__name__,
397 self.ui_section, self.ui_key, self.ui_value)
397 self.ui_section, self.ui_key, self.ui_value)
398
398
399
399
400 class User(Base, BaseDbModel):
400 class User(Base, BaseDbModel):
401 __tablename__ = 'users'
401 __tablename__ = 'users'
402 __table_args__ = (
402 __table_args__ = (
403 Index('u_username_idx', 'username'),
403 Index('u_username_idx', 'username'),
404 Index('u_email_idx', 'email'),
404 Index('u_email_idx', 'email'),
405 _table_args_default_dict,
405 _table_args_default_dict,
406 )
406 )
407
407
408 DEFAULT_USER = 'default'
408 DEFAULT_USER = 'default'
409 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
409 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
410 # The name of the default auth type in extern_type, 'internal' lives in auth_internal.py
410 # The name of the default auth type in extern_type, 'internal' lives in auth_internal.py
411 DEFAULT_AUTH_TYPE = 'internal'
411 DEFAULT_AUTH_TYPE = 'internal'
412
412
413 user_id = Column(Integer(), primary_key=True)
413 user_id = Column(Integer(), primary_key=True)
414 username = Column(String(255), nullable=False, unique=True)
414 username = Column(String(255), nullable=False, unique=True)
415 password = Column(String(255), nullable=False)
415 password = Column(String(255), nullable=False)
416 active = Column(Boolean(), nullable=False, default=True)
416 active = Column(Boolean(), nullable=False, default=True)
417 admin = Column(Boolean(), nullable=False, default=False)
417 admin = Column(Boolean(), nullable=False, default=False)
418 name = Column("firstname", Unicode(255), nullable=False)
418 name = Column("firstname", Unicode(255), nullable=False)
419 lastname = Column(Unicode(255), nullable=False)
419 lastname = Column(Unicode(255), nullable=False)
420 _email = Column("email", String(255), nullable=True, unique=True) # FIXME: not nullable?
420 _email = Column("email", String(255), nullable=True, unique=True) # FIXME: not nullable?
421 last_login = Column(DateTime(timezone=False), nullable=True)
421 last_login = Column(DateTime(timezone=False), nullable=True)
422 extern_type = Column(String(255), nullable=True) # FIXME: not nullable?
422 extern_type = Column(String(255), nullable=True) # FIXME: not nullable?
423 extern_name = Column(String(255), nullable=True) # FIXME: not nullable?
423 extern_name = Column(String(255), nullable=True) # FIXME: not nullable?
424 api_key = Column(String(255), nullable=False)
424 api_key = Column(String(255), nullable=False)
425 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
425 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
426 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data # FIXME: not nullable?
426 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data # FIXME: not nullable?
427
427
428 user_log = relationship('UserLog')
428 user_log = relationship('UserLog')
429 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
429 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
430
430
431 repositories = relationship('Repository')
431 repositories = relationship('Repository')
432 repo_groups = relationship('RepoGroup')
432 repo_groups = relationship('RepoGroup')
433 user_groups = relationship('UserGroup')
433 user_groups = relationship('UserGroup')
434 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
434 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
435 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
435 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
436
436
437 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
437 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
438 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
438 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
439
439
440 group_member = relationship('UserGroupMember', cascade='all')
440 group_member = relationship('UserGroupMember', cascade='all')
441
441
442 # comments created by this user
442 # comments created by this user
443 user_comments = relationship('ChangesetComment', cascade='all')
443 user_comments = relationship('ChangesetComment', cascade='all')
444 # extra emails for this user
444 # extra emails for this user
445 user_emails = relationship('UserEmailMap', cascade='all')
445 user_emails = relationship('UserEmailMap', cascade='all')
446 # extra API keys
446 # extra API keys
447 user_api_keys = relationship('UserApiKeys', cascade='all')
447 user_api_keys = relationship('UserApiKeys', cascade='all')
448 ssh_keys = relationship('UserSshKeys', cascade='all')
448 ssh_keys = relationship('UserSshKeys', cascade='all')
449
449
450 @hybrid_property
450 @hybrid_property
451 def email(self):
451 def email(self):
452 return self._email
452 return self._email
453
453
454 @email.setter
454 @email.setter
455 def email(self, val):
455 def email(self, val):
456 self._email = val.lower() if val else None
456 self._email = val.lower() if val else None
457
457
458 @property
458 @property
459 def firstname(self):
459 def firstname(self):
460 # alias for future
460 # alias for future
461 return self.name
461 return self.name
462
462
463 @property
463 @property
464 def emails(self):
464 def emails(self):
465 other = UserEmailMap.query().filter(UserEmailMap.user == self).all()
465 other = UserEmailMap.query().filter(UserEmailMap.user == self).all()
466 return [self.email] + [x.email for x in other]
466 return [self.email] + [x.email for x in other]
467
467
468 @property
468 @property
469 def api_keys(self):
469 def api_keys(self):
470 other = UserApiKeys.query().filter(UserApiKeys.user == self).all()
470 other = UserApiKeys.query().filter(UserApiKeys.user == self).all()
471 return [self.api_key] + [x.api_key for x in other]
471 return [self.api_key] + [x.api_key for x in other]
472
472
473 @property
473 @property
474 def ip_addresses(self):
474 def ip_addresses(self):
475 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
475 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
476 return [x.ip_addr for x in ret]
476 return [x.ip_addr for x in ret]
477
477
478 @property
478 @property
479 def full_name(self):
479 def full_name(self):
480 return '%s %s' % (self.firstname, self.lastname)
480 return '%s %s' % (self.firstname, self.lastname)
481
481
482 @property
482 @property
483 def full_name_or_username(self):
483 def full_name_or_username(self):
484 """
484 """
485 Show full name.
485 Show full name.
486 If full name is not set, fall back to username.
486 If full name is not set, fall back to username.
487 """
487 """
488 return ('%s %s' % (self.firstname, self.lastname)
488 return ('%s %s' % (self.firstname, self.lastname)
489 if (self.firstname and self.lastname) else self.username)
489 if (self.firstname and self.lastname) else self.username)
490
490
491 @property
491 @property
492 def full_name_and_username(self):
492 def full_name_and_username(self):
493 """
493 """
494 Show full name and username as 'Firstname Lastname (username)'.
494 Show full name and username as 'Firstname Lastname (username)'.
495 If full name is not set, fall back to username.
495 If full name is not set, fall back to username.
496 """
496 """
497 return ('%s %s (%s)' % (self.firstname, self.lastname, self.username)
497 return ('%s %s (%s)' % (self.firstname, self.lastname, self.username)
498 if (self.firstname and self.lastname) else self.username)
498 if (self.firstname and self.lastname) else self.username)
499
499
500 @property
500 @property
501 def full_contact(self):
501 def full_contact(self):
502 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
502 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
503
503
504 @property
504 @property
505 def short_contact(self):
505 def short_contact(self):
506 return '%s %s' % (self.firstname, self.lastname)
506 return '%s %s' % (self.firstname, self.lastname)
507
507
508 @property
508 @property
509 def is_admin(self):
509 def is_admin(self):
510 return self.admin
510 return self.admin
511
511
512 @hybrid_property
512 @hybrid_property
513 def is_default_user(self):
513 def is_default_user(self):
514 return self.username == User.DEFAULT_USER
514 return self.username == User.DEFAULT_USER
515
515
516 @hybrid_property
516 @hybrid_property
517 def user_data(self):
517 def user_data(self):
518 if not self._user_data:
518 if not self._user_data:
519 return {}
519 return {}
520
520
521 try:
521 try:
522 return ext_json.loads(self._user_data)
522 return ext_json.loads(self._user_data)
523 except TypeError:
523 except TypeError:
524 return {}
524 return {}
525
525
526 @user_data.setter
526 @user_data.setter
527 def user_data(self, val):
527 def user_data(self, val):
528 try:
528 try:
529 self._user_data = ascii_bytes(ext_json.dumps(val))
529 self._user_data = ascii_bytes(ext_json.dumps(val))
530 except Exception:
530 except Exception:
531 log.error(traceback.format_exc())
531 log.error(traceback.format_exc())
532
532
533 def __repr__(self):
533 def __repr__(self):
534 return "<%s %s: %r')>" % (self.__class__.__name__, self.user_id, self.username)
534 return "<%s %s: %r')>" % (self.__class__.__name__, self.user_id, self.username)
535
535
536 @classmethod
536 @classmethod
537 def guess_instance(cls, value):
537 def guess_instance(cls, value):
538 return super(User, cls).guess_instance(value, User.get_by_username)
538 return super(User, cls).guess_instance(value, User.get_by_username)
539
539
540 @classmethod
540 @classmethod
541 def get_or_404(cls, id_, allow_default=True):
541 def get_or_404(cls, id_, allow_default=True):
542 '''
542 '''
543 Overridden version of BaseDbModel.get_or_404, with an extra check on
543 Overridden version of BaseDbModel.get_or_404, with an extra check on
544 the default user.
544 the default user.
545 '''
545 '''
546 user = super(User, cls).get_or_404(id_)
546 user = super(User, cls).get_or_404(id_)
547 if not allow_default and user.is_default_user:
547 if not allow_default and user.is_default_user:
548 raise DefaultUserException()
548 raise DefaultUserException()
549 return user
549 return user
550
550
551 @classmethod
551 @classmethod
552 def get_by_username_or_email(cls, username_or_email, case_insensitive=False, cache=False):
552 def get_by_username_or_email(cls, username_or_email, case_insensitive=False, cache=False):
553 """
553 """
554 For anything that looks like an email address, look up by the email address (matching
554 For anything that looks like an email address, look up by the email address (matching
555 case insensitively).
555 case insensitively).
556 For anything else, try to look up by the user name.
556 For anything else, try to look up by the user name.
557
557
558 This assumes no normal username can have '@' symbol.
558 This assumes no normal username can have '@' symbol.
559 """
559 """
560 if '@' in username_or_email:
560 if '@' in username_or_email:
561 return User.get_by_email(username_or_email, cache=cache)
561 return User.get_by_email(username_or_email, cache=cache)
562 else:
562 else:
563 return User.get_by_username(username_or_email, case_insensitive=case_insensitive, cache=cache)
563 return User.get_by_username(username_or_email, case_insensitive=case_insensitive, cache=cache)
564
564
565 @classmethod
565 @classmethod
566 def get_by_username(cls, username, case_insensitive=False, cache=False):
566 def get_by_username(cls, username, case_insensitive=False, cache=False):
567 if case_insensitive:
567 if case_insensitive:
568 q = cls.query().filter(sqlalchemy.func.lower(cls.username) == sqlalchemy.func.lower(username))
568 q = cls.query().filter(sqlalchemy.func.lower(cls.username) == sqlalchemy.func.lower(username))
569 else:
569 else:
570 q = cls.query().filter(cls.username == username)
570 q = cls.query().filter(cls.username == username)
571
571
572 if cache:
572 if cache:
573 q = q.options(FromCache(
573 q = q.options(FromCache(
574 "sql_cache_short",
574 "sql_cache_short",
575 "get_user_%s" % _hash_key(username)
575 "get_user_%s" % _hash_key(username)
576 )
576 )
577 )
577 )
578 return q.scalar()
578 return q.scalar()
579
579
580 @classmethod
580 @classmethod
581 def get_by_api_key(cls, api_key, cache=False, fallback=True):
581 def get_by_api_key(cls, api_key, cache=False, fallback=True):
582 if len(api_key) != 40 or not api_key.isalnum():
582 if len(api_key) != 40 or not api_key.isalnum():
583 return None
583 return None
584
584
585 q = cls.query().filter(cls.api_key == api_key)
585 q = cls.query().filter(cls.api_key == api_key)
586
586
587 if cache:
587 if cache:
588 q = q.options(FromCache("sql_cache_short",
588 q = q.options(FromCache("sql_cache_short",
589 "get_api_key_%s" % api_key))
589 "get_api_key_%s" % api_key))
590 res = q.scalar()
590 res = q.scalar()
591
591
592 if fallback and not res:
592 if fallback and not res:
593 # fallback to additional keys
593 # fallback to additional keys
594 _res = UserApiKeys.query().filter_by(api_key=api_key, is_expired=False).first()
594 _res = UserApiKeys.query().filter_by(api_key=api_key, is_expired=False).first()
595 if _res:
595 if _res:
596 res = _res.user
596 res = _res.user
597 if res is None or not res.active or res.is_default_user:
597 if res is None or not res.active or res.is_default_user:
598 return None
598 return None
599 return res
599 return res
600
600
601 @classmethod
601 @classmethod
602 def get_by_email(cls, email, cache=False):
602 def get_by_email(cls, email, cache=False):
603 q = cls.query().filter(sqlalchemy.func.lower(cls.email) == sqlalchemy.func.lower(email))
603 q = cls.query().filter(sqlalchemy.func.lower(cls.email) == sqlalchemy.func.lower(email))
604
604
605 if cache:
605 if cache:
606 q = q.options(FromCache("sql_cache_short",
606 q = q.options(FromCache("sql_cache_short",
607 "get_email_key_%s" % email))
607 "get_email_key_%s" % email))
608
608
609 ret = q.scalar()
609 ret = q.scalar()
610 if ret is None:
610 if ret is None:
611 q = UserEmailMap.query()
611 q = UserEmailMap.query()
612 # try fetching in alternate email map
612 # try fetching in alternate email map
613 q = q.filter(sqlalchemy.func.lower(UserEmailMap.email) == sqlalchemy.func.lower(email))
613 q = q.filter(sqlalchemy.func.lower(UserEmailMap.email) == sqlalchemy.func.lower(email))
614 q = q.options(joinedload(UserEmailMap.user))
614 q = q.options(joinedload(UserEmailMap.user))
615 if cache:
615 if cache:
616 q = q.options(FromCache("sql_cache_short",
616 q = q.options(FromCache("sql_cache_short",
617 "get_email_map_key_%s" % email))
617 "get_email_map_key_%s" % email))
618 ret = getattr(q.scalar(), 'user', None)
618 ret = getattr(q.scalar(), 'user', None)
619
619
620 return ret
620 return ret
621
621
622 @classmethod
622 @classmethod
623 def get_from_cs_author(cls, author):
623 def get_from_cs_author(cls, author):
624 """
624 """
625 Tries to get User objects out of commit author string
625 Tries to get User objects out of commit author string
626
626
627 :param author:
627 :param author:
628 """
628 """
629 from kallithea.lib.helpers import email, author_name
629 from kallithea.lib.helpers import email, author_name
630 # Valid email in the attribute passed, see if they're in the system
630 # Valid email in the attribute passed, see if they're in the system
631 _email = email(author)
631 _email = email(author)
632 if _email:
632 if _email:
633 user = cls.get_by_email(_email)
633 user = cls.get_by_email(_email)
634 if user is not None:
634 if user is not None:
635 return user
635 return user
636 # Maybe we can match by username?
636 # Maybe we can match by username?
637 _author = author_name(author)
637 _author = author_name(author)
638 user = cls.get_by_username(_author, case_insensitive=True)
638 user = cls.get_by_username(_author, case_insensitive=True)
639 if user is not None:
639 if user is not None:
640 return user
640 return user
641
641
642 def update_lastlogin(self):
642 def update_lastlogin(self):
643 """Update user lastlogin"""
643 """Update user lastlogin"""
644 self.last_login = datetime.datetime.now()
644 self.last_login = datetime.datetime.now()
645 log.debug('updated user %s lastlogin', self.username)
645 log.debug('updated user %s lastlogin', self.username)
646
646
647 @classmethod
647 @classmethod
648 def get_first_admin(cls):
648 def get_first_admin(cls):
649 user = User.query().filter(User.admin == True).first()
649 user = User.query().filter(User.admin == True).first()
650 if user is None:
650 if user is None:
651 raise Exception('Missing administrative account!')
651 raise Exception('Missing administrative account!')
652 return user
652 return user
653
653
654 @classmethod
654 @classmethod
655 def get_default_user(cls, cache=False):
655 def get_default_user(cls, cache=False):
656 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
656 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
657 if user is None:
657 if user is None:
658 raise Exception('Missing default account!')
658 raise Exception('Missing default account!')
659 return user
659 return user
660
660
661 def get_api_data(self, details=False):
661 def get_api_data(self, details=False):
662 """
662 """
663 Common function for generating user related data for API
663 Common function for generating user related data for API
664 """
664 """
665 user = self
665 user = self
666 data = dict(
666 data = dict(
667 user_id=user.user_id,
667 user_id=user.user_id,
668 username=user.username,
668 username=user.username,
669 firstname=user.name,
669 firstname=user.name,
670 lastname=user.lastname,
670 lastname=user.lastname,
671 email=user.email,
671 email=user.email,
672 emails=user.emails,
672 emails=user.emails,
673 active=user.active,
673 active=user.active,
674 admin=user.admin,
674 admin=user.admin,
675 )
675 )
676 if details:
676 if details:
677 data.update(dict(
677 data.update(dict(
678 extern_type=user.extern_type,
678 extern_type=user.extern_type,
679 extern_name=user.extern_name,
679 extern_name=user.extern_name,
680 api_key=user.api_key,
680 api_key=user.api_key,
681 api_keys=user.api_keys,
681 api_keys=user.api_keys,
682 last_login=user.last_login,
682 last_login=user.last_login,
683 ip_addresses=user.ip_addresses
683 ip_addresses=user.ip_addresses
684 ))
684 ))
685 return data
685 return data
686
686
687 def __json__(self):
687 def __json__(self):
688 data = dict(
688 data = dict(
689 full_name=self.full_name,
689 full_name=self.full_name,
690 full_name_or_username=self.full_name_or_username,
690 full_name_or_username=self.full_name_or_username,
691 short_contact=self.short_contact,
691 short_contact=self.short_contact,
692 full_contact=self.full_contact
692 full_contact=self.full_contact
693 )
693 )
694 data.update(self.get_api_data())
694 data.update(self.get_api_data())
695 return data
695 return data
696
696
697
697
698 class UserApiKeys(Base, BaseDbModel):
698 class UserApiKeys(Base, BaseDbModel):
699 __tablename__ = 'user_api_keys'
699 __tablename__ = 'user_api_keys'
700 __table_args__ = (
700 __table_args__ = (
701 Index('uak_api_key_idx', 'api_key'),
701 Index('uak_api_key_idx', 'api_key'),
702 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
702 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
703 _table_args_default_dict,
703 _table_args_default_dict,
704 )
704 )
705
705
706 user_api_key_id = Column(Integer(), primary_key=True)
706 user_api_key_id = Column(Integer(), primary_key=True)
707 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
707 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
708 api_key = Column(String(255), nullable=False, unique=True)
708 api_key = Column(String(255), nullable=False, unique=True)
709 description = Column(UnicodeText(), nullable=False)
709 description = Column(UnicodeText(), nullable=False)
710 expires = Column(Float(53), nullable=False)
710 expires = Column(Float(53), nullable=False)
711 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
711 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
712
712
713 user = relationship('User')
713 user = relationship('User')
714
714
715 @hybrid_property
715 @hybrid_property
716 def is_expired(self):
716 def is_expired(self):
717 return (self.expires != -1) & (time.time() > self.expires)
717 return (self.expires != -1) & (time.time() > self.expires)
718
718
719
719
720 class UserEmailMap(Base, BaseDbModel):
720 class UserEmailMap(Base, BaseDbModel):
721 __tablename__ = 'user_email_map'
721 __tablename__ = 'user_email_map'
722 __table_args__ = (
722 __table_args__ = (
723 Index('uem_email_idx', 'email'),
723 Index('uem_email_idx', 'email'),
724 _table_args_default_dict,
724 _table_args_default_dict,
725 )
725 )
726
726
727 email_id = Column(Integer(), primary_key=True)
727 email_id = Column(Integer(), primary_key=True)
728 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
728 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
729 _email = Column("email", String(255), nullable=False, unique=True)
729 _email = Column("email", String(255), nullable=False, unique=True)
730 user = relationship('User')
730 user = relationship('User')
731
731
732 @validates('_email')
732 @validates('_email')
733 def validate_email(self, key, email):
733 def validate_email(self, key, email):
734 # check if this email is not main one
734 # check if this email is not main one
735 main_email = Session().query(User).filter(User.email == email).scalar()
735 main_email = Session().query(User).filter(User.email == email).scalar()
736 if main_email is not None:
736 if main_email is not None:
737 raise AttributeError('email %s is present is user table' % email)
737 raise AttributeError('email %s is present is user table' % email)
738 return email
738 return email
739
739
740 @hybrid_property
740 @hybrid_property
741 def email(self):
741 def email(self):
742 return self._email
742 return self._email
743
743
744 @email.setter
744 @email.setter
745 def email(self, val):
745 def email(self, val):
746 self._email = val.lower() if val else None
746 self._email = val.lower() if val else None
747
747
748
748
749 class UserIpMap(Base, BaseDbModel):
749 class UserIpMap(Base, BaseDbModel):
750 __tablename__ = 'user_ip_map'
750 __tablename__ = 'user_ip_map'
751 __table_args__ = (
751 __table_args__ = (
752 UniqueConstraint('user_id', 'ip_addr'),
752 UniqueConstraint('user_id', 'ip_addr'),
753 _table_args_default_dict,
753 _table_args_default_dict,
754 )
754 )
755
755
756 ip_id = Column(Integer(), primary_key=True)
756 ip_id = Column(Integer(), primary_key=True)
757 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
757 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
758 ip_addr = Column(String(255), nullable=False)
758 ip_addr = Column(String(255), nullable=False)
759 active = Column(Boolean(), nullable=False, default=True)
759 active = Column(Boolean(), nullable=False, default=True)
760 user = relationship('User')
760 user = relationship('User')
761
761
762 @classmethod
762 @classmethod
763 def _get_ip_range(cls, ip_addr):
763 def _get_ip_range(cls, ip_addr):
764 net = ipaddr.IPNetwork(address=ip_addr)
764 net = ipaddr.IPNetwork(address=ip_addr)
765 return [str(net.network), str(net.broadcast)]
765 return [str(net.network), str(net.broadcast)]
766
766
767 def __json__(self):
767 def __json__(self):
768 return dict(
768 return dict(
769 ip_addr=self.ip_addr,
769 ip_addr=self.ip_addr,
770 ip_range=self._get_ip_range(self.ip_addr)
770 ip_range=self._get_ip_range(self.ip_addr)
771 )
771 )
772
772
773 def __repr__(self):
773 def __repr__(self):
774 return "<%s %s: %s>" % (self.__class__.__name__, self.user_id, self.ip_addr)
774 return "<%s %s: %s>" % (self.__class__.__name__, self.user_id, self.ip_addr)
775
775
776
776
777 class UserLog(Base, BaseDbModel):
777 class UserLog(Base, BaseDbModel):
778 __tablename__ = 'user_logs'
778 __tablename__ = 'user_logs'
779 __table_args__ = (
779 __table_args__ = (
780 _table_args_default_dict,
780 _table_args_default_dict,
781 )
781 )
782
782
783 user_log_id = Column(Integer(), primary_key=True)
783 user_log_id = Column(Integer(), primary_key=True)
784 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True)
784 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True)
785 username = Column(String(255), nullable=False)
785 username = Column(String(255), nullable=False)
786 repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True)
786 repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True)
787 repository_name = Column(Unicode(255), nullable=False)
787 repository_name = Column(Unicode(255), nullable=False)
788 user_ip = Column(String(255), nullable=True)
788 user_ip = Column(String(255), nullable=True)
789 action = Column(UnicodeText(), nullable=False)
789 action = Column(UnicodeText(), nullable=False)
790 action_date = Column(DateTime(timezone=False), nullable=False)
790 action_date = Column(DateTime(timezone=False), nullable=False)
791
791
792 def __repr__(self):
792 def __repr__(self):
793 return "<%s %r: %r')>" % (self.__class__.__name__,
793 return "<%s %r: %r')>" % (self.__class__.__name__,
794 self.repository_name,
794 self.repository_name,
795 self.action)
795 self.action)
796
796
797 @property
797 @property
798 def action_as_day(self):
798 def action_as_day(self):
799 return datetime.date(*self.action_date.timetuple()[:3])
799 return datetime.date(*self.action_date.timetuple()[:3])
800
800
801 user = relationship('User')
801 user = relationship('User')
802 repository = relationship('Repository', cascade='')
802 repository = relationship('Repository', cascade='')
803
803
804
804
805 class UserGroup(Base, BaseDbModel):
805 class UserGroup(Base, BaseDbModel):
806 __tablename__ = 'users_groups'
806 __tablename__ = 'users_groups'
807 __table_args__ = (
807 __table_args__ = (
808 _table_args_default_dict,
808 _table_args_default_dict,
809 )
809 )
810
810
811 users_group_id = Column(Integer(), primary_key=True)
811 users_group_id = Column(Integer(), primary_key=True)
812 users_group_name = Column(Unicode(255), nullable=False, unique=True)
812 users_group_name = Column(Unicode(255), nullable=False, unique=True)
813 user_group_description = Column(Unicode(10000), nullable=True) # FIXME: not nullable?
813 user_group_description = Column(Unicode(10000), nullable=True) # FIXME: not nullable?
814 users_group_active = Column(Boolean(), nullable=False)
814 users_group_active = Column(Boolean(), nullable=False)
815 owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
815 owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
816 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
816 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
817 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data # FIXME: not nullable?
817 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data # FIXME: not nullable?
818
818
819 members = relationship('UserGroupMember', cascade="all, delete-orphan")
819 members = relationship('UserGroupMember', cascade="all, delete-orphan")
820 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
820 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
821 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
821 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
822 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
822 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
823 user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
823 user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
824 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
824 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
825
825
826 owner = relationship('User')
826 owner = relationship('User')
827
827
828 @hybrid_property
828 @hybrid_property
829 def group_data(self):
829 def group_data(self):
830 if not self._group_data:
830 if not self._group_data:
831 return {}
831 return {}
832
832
833 try:
833 try:
834 return ext_json.loads(self._group_data)
834 return ext_json.loads(self._group_data)
835 except TypeError:
835 except TypeError:
836 return {}
836 return {}
837
837
838 @group_data.setter
838 @group_data.setter
839 def group_data(self, val):
839 def group_data(self, val):
840 try:
840 try:
841 self._group_data = ascii_bytes(ext_json.dumps(val))
841 self._group_data = ascii_bytes(ext_json.dumps(val))
842 except Exception:
842 except Exception:
843 log.error(traceback.format_exc())
843 log.error(traceback.format_exc())
844
844
845 def __repr__(self):
845 def __repr__(self):
846 return "<%s %s: %r')>" % (self.__class__.__name__,
846 return "<%s %s: %r')>" % (self.__class__.__name__,
847 self.users_group_id,
847 self.users_group_id,
848 self.users_group_name)
848 self.users_group_name)
849
849
850 @classmethod
850 @classmethod
851 def guess_instance(cls, value):
851 def guess_instance(cls, value):
852 return super(UserGroup, cls).guess_instance(value, UserGroup.get_by_group_name)
852 return super(UserGroup, cls).guess_instance(value, UserGroup.get_by_group_name)
853
853
854 @classmethod
854 @classmethod
855 def get_by_group_name(cls, group_name, cache=False,
855 def get_by_group_name(cls, group_name, cache=False,
856 case_insensitive=False):
856 case_insensitive=False):
857 if case_insensitive:
857 if case_insensitive:
858 q = cls.query().filter(sqlalchemy.func.lower(cls.users_group_name) == sqlalchemy.func.lower(group_name))
858 q = cls.query().filter(sqlalchemy.func.lower(cls.users_group_name) == sqlalchemy.func.lower(group_name))
859 else:
859 else:
860 q = cls.query().filter(cls.users_group_name == group_name)
860 q = cls.query().filter(cls.users_group_name == group_name)
861 if cache:
861 if cache:
862 q = q.options(FromCache(
862 q = q.options(FromCache(
863 "sql_cache_short",
863 "sql_cache_short",
864 "get_group_%s" % _hash_key(group_name)
864 "get_group_%s" % _hash_key(group_name)
865 )
865 )
866 )
866 )
867 return q.scalar()
867 return q.scalar()
868
868
869 @classmethod
869 @classmethod
870 def get(cls, user_group_id, cache=False):
870 def get(cls, user_group_id, cache=False):
871 user_group = cls.query()
871 user_group = cls.query()
872 if cache:
872 if cache:
873 user_group = user_group.options(FromCache("sql_cache_short",
873 user_group = user_group.options(FromCache("sql_cache_short",
874 "get_users_group_%s" % user_group_id))
874 "get_users_group_%s" % user_group_id))
875 return user_group.get(user_group_id)
875 return user_group.get(user_group_id)
876
876
877 def get_api_data(self, with_members=True):
877 def get_api_data(self, with_members=True):
878 user_group = self
878 user_group = self
879
879
880 data = dict(
880 data = dict(
881 users_group_id=user_group.users_group_id,
881 users_group_id=user_group.users_group_id,
882 group_name=user_group.users_group_name,
882 group_name=user_group.users_group_name,
883 group_description=user_group.user_group_description,
883 group_description=user_group.user_group_description,
884 active=user_group.users_group_active,
884 active=user_group.users_group_active,
885 owner=user_group.owner.username,
885 owner=user_group.owner.username,
886 )
886 )
887 if with_members:
887 if with_members:
888 data['members'] = [
888 data['members'] = [
889 ugm.user.get_api_data()
889 ugm.user.get_api_data()
890 for ugm in user_group.members
890 for ugm in user_group.members
891 ]
891 ]
892
892
893 return data
893 return data
894
894
895
895
896 class UserGroupMember(Base, BaseDbModel):
896 class UserGroupMember(Base, BaseDbModel):
897 __tablename__ = 'users_groups_members'
897 __tablename__ = 'users_groups_members'
898 __table_args__ = (
898 __table_args__ = (
899 _table_args_default_dict,
899 _table_args_default_dict,
900 )
900 )
901
901
902 users_group_member_id = Column(Integer(), primary_key=True)
902 users_group_member_id = Column(Integer(), primary_key=True)
903 users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
903 users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
904 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
904 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
905
905
906 user = relationship('User')
906 user = relationship('User')
907 users_group = relationship('UserGroup')
907 users_group = relationship('UserGroup')
908
908
909 def __init__(self, gr_id='', u_id=''):
909 def __init__(self, gr_id='', u_id=''):
910 self.users_group_id = gr_id
910 self.users_group_id = gr_id
911 self.user_id = u_id
911 self.user_id = u_id
912
912
913
913
914 class RepositoryField(Base, BaseDbModel):
914 class RepositoryField(Base, BaseDbModel):
915 __tablename__ = 'repositories_fields'
915 __tablename__ = 'repositories_fields'
916 __table_args__ = (
916 __table_args__ = (
917 UniqueConstraint('repository_id', 'field_key'), # no-multi field
917 UniqueConstraint('repository_id', 'field_key'), # no-multi field
918 _table_args_default_dict,
918 _table_args_default_dict,
919 )
919 )
920
920
921 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
921 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
922
922
923 repo_field_id = Column(Integer(), primary_key=True)
923 repo_field_id = Column(Integer(), primary_key=True)
924 repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
924 repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
925 field_key = Column(String(250), nullable=False)
925 field_key = Column(String(250), nullable=False)
926 field_label = Column(String(1024), nullable=False)
926 field_label = Column(String(1024), nullable=False)
927 field_value = Column(String(10000), nullable=False)
927 field_value = Column(String(10000), nullable=False)
928 field_desc = Column(String(1024), nullable=False)
928 field_desc = Column(String(1024), nullable=False)
929 field_type = Column(String(255), nullable=False)
929 field_type = Column(String(255), nullable=False)
930 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
930 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
931
931
932 repository = relationship('Repository')
932 repository = relationship('Repository')
933
933
934 @property
934 @property
935 def field_key_prefixed(self):
935 def field_key_prefixed(self):
936 return 'ex_%s' % self.field_key
936 return 'ex_%s' % self.field_key
937
937
938 @classmethod
938 @classmethod
939 def un_prefix_key(cls, key):
939 def un_prefix_key(cls, key):
940 if key.startswith(cls.PREFIX):
940 if key.startswith(cls.PREFIX):
941 return key[len(cls.PREFIX):]
941 return key[len(cls.PREFIX):]
942 return key
942 return key
943
943
944 @classmethod
944 @classmethod
945 def get_by_key_name(cls, key, repo):
945 def get_by_key_name(cls, key, repo):
946 row = cls.query() \
946 row = cls.query() \
947 .filter(cls.repository == repo) \
947 .filter(cls.repository == repo) \
948 .filter(cls.field_key == key).scalar()
948 .filter(cls.field_key == key).scalar()
949 return row
949 return row
950
950
951
951
952 class Repository(Base, BaseDbModel):
952 class Repository(Base, BaseDbModel):
953 __tablename__ = 'repositories'
953 __tablename__ = 'repositories'
954 __table_args__ = (
954 __table_args__ = (
955 Index('r_repo_name_idx', 'repo_name'),
955 Index('r_repo_name_idx', 'repo_name'),
956 _table_args_default_dict,
956 _table_args_default_dict,
957 )
957 )
958
958
959 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
959 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
960 DEFAULT_CLONE_SSH = 'ssh://{system_user}@{hostname}/{repo}'
960 DEFAULT_CLONE_SSH = 'ssh://{system_user}@{hostname}/{repo}'
961
961
962 STATE_CREATED = u'repo_state_created'
962 STATE_CREATED = u'repo_state_created'
963 STATE_PENDING = u'repo_state_pending'
963 STATE_PENDING = u'repo_state_pending'
964 STATE_ERROR = u'repo_state_error'
964 STATE_ERROR = u'repo_state_error'
965
965
966 repo_id = Column(Integer(), primary_key=True)
966 repo_id = Column(Integer(), primary_key=True)
967 repo_name = Column(Unicode(255), nullable=False, unique=True)
967 repo_name = Column(Unicode(255), nullable=False, unique=True)
968 repo_state = Column(String(255), nullable=False)
968 repo_state = Column(String(255), nullable=False)
969
969
970 clone_uri = Column(String(255), nullable=True) # FIXME: not nullable?
970 clone_uri = Column(String(255), nullable=True) # FIXME: not nullable?
971 repo_type = Column(String(255), nullable=False) # 'hg' or 'git'
971 repo_type = Column(String(255), nullable=False) # 'hg' or 'git'
972 owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
972 owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
973 private = Column(Boolean(), nullable=False)
973 private = Column(Boolean(), nullable=False)
974 enable_statistics = Column("statistics", Boolean(), nullable=False, default=True)
974 enable_statistics = Column("statistics", Boolean(), nullable=False, default=True)
975 enable_downloads = Column("downloads", Boolean(), nullable=False, default=True)
975 enable_downloads = Column("downloads", Boolean(), nullable=False, default=True)
976 description = Column(Unicode(10000), nullable=False)
976 description = Column(Unicode(10000), nullable=False)
977 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
977 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
978 updated_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
978 updated_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
979 _landing_revision = Column("landing_revision", String(255), nullable=False)
979 _landing_revision = Column("landing_revision", String(255), nullable=False)
980 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data # FIXME: not nullable?
980 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data # FIXME: not nullable?
981
981
982 fork_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True)
982 fork_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True)
983 group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=True)
983 group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=True)
984
984
985 owner = relationship('User')
985 owner = relationship('User')
986 fork = relationship('Repository', remote_side=repo_id)
986 fork = relationship('Repository', remote_side=repo_id)
987 group = relationship('RepoGroup')
987 group = relationship('RepoGroup')
988 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
988 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
989 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
989 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
990 stats = relationship('Statistics', cascade='all', uselist=False)
990 stats = relationship('Statistics', cascade='all', uselist=False)
991
991
992 followers = relationship('UserFollowing',
992 followers = relationship('UserFollowing',
993 primaryjoin='UserFollowing.follows_repository_id==Repository.repo_id',
993 primaryjoin='UserFollowing.follows_repository_id==Repository.repo_id',
994 cascade='all')
994 cascade='all')
995 extra_fields = relationship('RepositoryField',
995 extra_fields = relationship('RepositoryField',
996 cascade="all, delete-orphan")
996 cascade="all, delete-orphan")
997
997
998 logs = relationship('UserLog')
998 logs = relationship('UserLog')
999 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
999 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
1000
1000
1001 pull_requests_org = relationship('PullRequest',
1001 pull_requests_org = relationship('PullRequest',
1002 primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
1002 primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
1003 cascade="all, delete-orphan")
1003 cascade="all, delete-orphan")
1004
1004
1005 pull_requests_other = relationship('PullRequest',
1005 pull_requests_other = relationship('PullRequest',
1006 primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
1006 primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
1007 cascade="all, delete-orphan")
1007 cascade="all, delete-orphan")
1008
1008
1009 def __repr__(self):
1009 def __repr__(self):
1010 return "<%s %s: %r>" % (self.__class__.__name__,
1010 return "<%s %s: %r>" % (self.__class__.__name__,
1011 self.repo_id, self.repo_name)
1011 self.repo_id, self.repo_name)
1012
1012
1013 @hybrid_property
1013 @hybrid_property
1014 def landing_rev(self):
1014 def landing_rev(self):
1015 # always should return [rev_type, rev]
1015 # always should return [rev_type, rev]
1016 if self._landing_revision:
1016 if self._landing_revision:
1017 _rev_info = self._landing_revision.split(':')
1017 _rev_info = self._landing_revision.split(':')
1018 if len(_rev_info) < 2:
1018 if len(_rev_info) < 2:
1019 _rev_info.insert(0, 'rev')
1019 _rev_info.insert(0, 'rev')
1020 return [_rev_info[0], _rev_info[1]]
1020 return [_rev_info[0], _rev_info[1]]
1021 return [None, None]
1021 return [None, None]
1022
1022
1023 @landing_rev.setter
1023 @landing_rev.setter
1024 def landing_rev(self, val):
1024 def landing_rev(self, val):
1025 if ':' not in val:
1025 if ':' not in val:
1026 raise ValueError('value must be delimited with `:` and consist '
1026 raise ValueError('value must be delimited with `:` and consist '
1027 'of <rev_type>:<rev>, got %s instead' % val)
1027 'of <rev_type>:<rev>, got %s instead' % val)
1028 self._landing_revision = val
1028 self._landing_revision = val
1029
1029
1030 @hybrid_property
1030 @hybrid_property
1031 def changeset_cache(self):
1031 def changeset_cache(self):
1032 try:
1032 try:
1033 cs_cache = ext_json.loads(self._changeset_cache) # might raise on bad data
1033 cs_cache = ext_json.loads(self._changeset_cache) # might raise on bad data
1034 cs_cache['raw_id'] # verify data, raise exception on error
1034 cs_cache['raw_id'] # verify data, raise exception on error
1035 return cs_cache
1035 return cs_cache
1036 except (TypeError, KeyError, ValueError):
1036 except (TypeError, KeyError, ValueError):
1037 return EmptyChangeset().__json__()
1037 return EmptyChangeset().__json__()
1038
1038
1039 @changeset_cache.setter
1039 @changeset_cache.setter
1040 def changeset_cache(self, val):
1040 def changeset_cache(self, val):
1041 try:
1041 try:
1042 self._changeset_cache = ascii_bytes(ext_json.dumps(val))
1042 self._changeset_cache = ascii_bytes(ext_json.dumps(val))
1043 except Exception:
1043 except Exception:
1044 log.error(traceback.format_exc())
1044 log.error(traceback.format_exc())
1045
1045
1046 @classmethod
1046 @classmethod
1047 def query(cls, sorted=False):
1047 def query(cls, sorted=False):
1048 """Add Repository-specific helpers for common query constructs.
1048 """Add Repository-specific helpers for common query constructs.
1049
1049
1050 sorted: if True, apply the default ordering (name, case insensitive).
1050 sorted: if True, apply the default ordering (name, case insensitive).
1051 """
1051 """
1052 q = super(Repository, cls).query()
1052 q = super(Repository, cls).query()
1053
1053
1054 if sorted:
1054 if sorted:
1055 q = q.order_by(sqlalchemy.func.lower(Repository.repo_name))
1055 q = q.order_by(sqlalchemy.func.lower(Repository.repo_name))
1056
1056
1057 return q
1057 return q
1058
1058
1059 @classmethod
1059 @classmethod
1060 def url_sep(cls):
1060 def url_sep(cls):
1061 return URL_SEP
1061 return URL_SEP
1062
1062
1063 @classmethod
1063 @classmethod
1064 def normalize_repo_name(cls, repo_name):
1064 def normalize_repo_name(cls, repo_name):
1065 """
1065 """
1066 Normalizes os specific repo_name to the format internally stored inside
1066 Normalizes os specific repo_name to the format internally stored inside
1067 database using URL_SEP
1067 database using URL_SEP
1068
1068
1069 :param cls:
1069 :param cls:
1070 :param repo_name:
1070 :param repo_name:
1071 """
1071 """
1072 return cls.url_sep().join(repo_name.split(os.sep))
1072 return cls.url_sep().join(repo_name.split(os.sep))
1073
1073
1074 @classmethod
1074 @classmethod
1075 def guess_instance(cls, value):
1075 def guess_instance(cls, value):
1076 return super(Repository, cls).guess_instance(value, Repository.get_by_repo_name)
1076 return super(Repository, cls).guess_instance(value, Repository.get_by_repo_name)
1077
1077
1078 @classmethod
1078 @classmethod
1079 def get_by_repo_name(cls, repo_name, case_insensitive=False):
1079 def get_by_repo_name(cls, repo_name, case_insensitive=False):
1080 """Get the repo, defaulting to database case sensitivity.
1080 """Get the repo, defaulting to database case sensitivity.
1081 case_insensitive will be slower and should only be specified if necessary."""
1081 case_insensitive will be slower and should only be specified if necessary."""
1082 if case_insensitive:
1082 if case_insensitive:
1083 q = Session().query(cls).filter(sqlalchemy.func.lower(cls.repo_name) == sqlalchemy.func.lower(repo_name))
1083 q = Session().query(cls).filter(sqlalchemy.func.lower(cls.repo_name) == sqlalchemy.func.lower(repo_name))
1084 else:
1084 else:
1085 q = Session().query(cls).filter(cls.repo_name == repo_name)
1085 q = Session().query(cls).filter(cls.repo_name == repo_name)
1086 q = q.options(joinedload(Repository.fork)) \
1086 q = q.options(joinedload(Repository.fork)) \
1087 .options(joinedload(Repository.owner)) \
1087 .options(joinedload(Repository.owner)) \
1088 .options(joinedload(Repository.group))
1088 .options(joinedload(Repository.group))
1089 return q.scalar()
1089 return q.scalar()
1090
1090
1091 @classmethod
1091 @classmethod
1092 def get_by_full_path(cls, repo_full_path):
1092 def get_by_full_path(cls, repo_full_path):
1093 base_full_path = os.path.realpath(cls.base_path())
1093 base_full_path = os.path.realpath(cls.base_path())
1094 repo_full_path = os.path.realpath(repo_full_path)
1094 repo_full_path = os.path.realpath(repo_full_path)
1095 assert repo_full_path.startswith(base_full_path + os.path.sep)
1095 assert repo_full_path.startswith(base_full_path + os.path.sep)
1096 repo_name = repo_full_path[len(base_full_path) + 1:]
1096 repo_name = repo_full_path[len(base_full_path) + 1:]
1097 repo_name = cls.normalize_repo_name(repo_name)
1097 repo_name = cls.normalize_repo_name(repo_name)
1098 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1098 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1099
1099
1100 @classmethod
1100 @classmethod
1101 def get_repo_forks(cls, repo_id):
1101 def get_repo_forks(cls, repo_id):
1102 return cls.query().filter(Repository.fork_id == repo_id)
1102 return cls.query().filter(Repository.fork_id == repo_id)
1103
1103
1104 @classmethod
1104 @classmethod
1105 def base_path(cls):
1105 def base_path(cls):
1106 """
1106 """
1107 Returns base path where all repos are stored
1107 Returns base path where all repos are stored
1108
1108
1109 :param cls:
1109 :param cls:
1110 """
1110 """
1111 q = Session().query(Ui) \
1111 q = Session().query(Ui) \
1112 .filter(Ui.ui_key == cls.url_sep())
1112 .filter(Ui.ui_key == cls.url_sep())
1113 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1113 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1114 return q.one().ui_value
1114 return q.one().ui_value
1115
1115
1116 @property
1116 @property
1117 def forks(self):
1117 def forks(self):
1118 """
1118 """
1119 Return forks of this repo
1119 Return forks of this repo
1120 """
1120 """
1121 return Repository.get_repo_forks(self.repo_id)
1121 return Repository.get_repo_forks(self.repo_id)
1122
1122
1123 @property
1123 @property
1124 def parent(self):
1124 def parent(self):
1125 """
1125 """
1126 Returns fork parent
1126 Returns fork parent
1127 """
1127 """
1128 return self.fork
1128 return self.fork
1129
1129
1130 @property
1130 @property
1131 def just_name(self):
1131 def just_name(self):
1132 return self.repo_name.split(Repository.url_sep())[-1]
1132 return self.repo_name.split(Repository.url_sep())[-1]
1133
1133
1134 @property
1134 @property
1135 def groups_with_parents(self):
1135 def groups_with_parents(self):
1136 groups = []
1136 groups = []
1137 group = self.group
1137 group = self.group
1138 while group is not None:
1138 while group is not None:
1139 groups.append(group)
1139 groups.append(group)
1140 group = group.parent_group
1140 group = group.parent_group
1141 assert group not in groups, group # avoid recursion on bad db content
1141 assert group not in groups, group # avoid recursion on bad db content
1142 groups.reverse()
1142 groups.reverse()
1143 return groups
1143 return groups
1144
1144
1145 @LazyProperty
1145 @LazyProperty
1146 def repo_path(self):
1146 def repo_path(self):
1147 """
1147 """
1148 Returns base full path for that repository means where it actually
1148 Returns base full path for that repository means where it actually
1149 exists on a filesystem
1149 exists on a filesystem
1150 """
1150 """
1151 q = Session().query(Ui).filter(Ui.ui_key ==
1151 q = Session().query(Ui).filter(Ui.ui_key ==
1152 Repository.url_sep())
1152 Repository.url_sep())
1153 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1153 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1154 return q.one().ui_value
1154 return q.one().ui_value
1155
1155
1156 @property
1156 @property
1157 def repo_full_path(self):
1157 def repo_full_path(self):
1158 p = [self.repo_path]
1158 p = [self.repo_path]
1159 # we need to split the name by / since this is how we store the
1159 # we need to split the name by / since this is how we store the
1160 # names in the database, but that eventually needs to be converted
1160 # names in the database, but that eventually needs to be converted
1161 # into a valid system path
1161 # into a valid system path
1162 p += self.repo_name.split(Repository.url_sep())
1162 p += self.repo_name.split(Repository.url_sep())
1163 return os.path.join(*p)
1163 return os.path.join(*p)
1164
1164
1165 @property
1165 @property
1166 def cache_keys(self):
1166 def cache_keys(self):
1167 """
1167 """
1168 Returns associated cache keys for that repo
1168 Returns associated cache keys for that repo
1169 """
1169 """
1170 return CacheInvalidation.query() \
1170 return CacheInvalidation.query() \
1171 .filter(CacheInvalidation.cache_args == self.repo_name) \
1171 .filter(CacheInvalidation.cache_args == self.repo_name) \
1172 .order_by(CacheInvalidation.cache_key) \
1172 .order_by(CacheInvalidation.cache_key) \
1173 .all()
1173 .all()
1174
1174
1175 def get_new_name(self, repo_name):
1175 def get_new_name(self, repo_name):
1176 """
1176 """
1177 returns new full repository name based on assigned group and new new
1177 returns new full repository name based on assigned group and new new
1178
1178
1179 :param group_name:
1179 :param group_name:
1180 """
1180 """
1181 path_prefix = self.group.full_path_splitted if self.group else []
1181 path_prefix = self.group.full_path_splitted if self.group else []
1182 return Repository.url_sep().join(path_prefix + [repo_name])
1182 return Repository.url_sep().join(path_prefix + [repo_name])
1183
1183
1184 @property
1184 @property
1185 def _ui(self):
1185 def _ui(self):
1186 """
1186 """
1187 Creates an db based ui object for this repository
1187 Creates an db based ui object for this repository
1188 """
1188 """
1189 from kallithea.lib.utils import make_ui
1189 from kallithea.lib.utils import make_ui
1190 return make_ui()
1190 return make_ui()
1191
1191
1192 @classmethod
1192 @classmethod
1193 def is_valid(cls, repo_name):
1193 def is_valid(cls, repo_name):
1194 """
1194 """
1195 returns True if given repo name is a valid filesystem repository
1195 returns True if given repo name is a valid filesystem repository
1196
1196
1197 :param cls:
1197 :param cls:
1198 :param repo_name:
1198 :param repo_name:
1199 """
1199 """
1200 from kallithea.lib.utils import is_valid_repo
1200 from kallithea.lib.utils import is_valid_repo
1201
1201
1202 return is_valid_repo(repo_name, cls.base_path())
1202 return is_valid_repo(repo_name, cls.base_path())
1203
1203
1204 def get_api_data(self, with_revision_names=False,
1204 def get_api_data(self, with_revision_names=False,
1205 with_pullrequests=False):
1205 with_pullrequests=False):
1206 """
1206 """
1207 Common function for generating repo api data.
1207 Common function for generating repo api data.
1208 Optionally, also return tags, branches, bookmarks and PRs.
1208 Optionally, also return tags, branches, bookmarks and PRs.
1209 """
1209 """
1210 repo = self
1210 repo = self
1211 data = dict(
1211 data = dict(
1212 repo_id=repo.repo_id,
1212 repo_id=repo.repo_id,
1213 repo_name=repo.repo_name,
1213 repo_name=repo.repo_name,
1214 repo_type=repo.repo_type,
1214 repo_type=repo.repo_type,
1215 clone_uri=repo.clone_uri,
1215 clone_uri=repo.clone_uri,
1216 private=repo.private,
1216 private=repo.private,
1217 created_on=repo.created_on,
1217 created_on=repo.created_on,
1218 description=repo.description,
1218 description=repo.description,
1219 landing_rev=repo.landing_rev,
1219 landing_rev=repo.landing_rev,
1220 owner=repo.owner.username,
1220 owner=repo.owner.username,
1221 fork_of=repo.fork.repo_name if repo.fork else None,
1221 fork_of=repo.fork.repo_name if repo.fork else None,
1222 enable_statistics=repo.enable_statistics,
1222 enable_statistics=repo.enable_statistics,
1223 enable_downloads=repo.enable_downloads,
1223 enable_downloads=repo.enable_downloads,
1224 last_changeset=repo.changeset_cache,
1224 last_changeset=repo.changeset_cache,
1225 )
1225 )
1226 if with_revision_names:
1226 if with_revision_names:
1227 scm_repo = repo.scm_instance_no_cache()
1227 scm_repo = repo.scm_instance_no_cache()
1228 data.update(dict(
1228 data.update(dict(
1229 tags=scm_repo.tags,
1229 tags=scm_repo.tags,
1230 branches=scm_repo.branches,
1230 branches=scm_repo.branches,
1231 bookmarks=scm_repo.bookmarks,
1231 bookmarks=scm_repo.bookmarks,
1232 ))
1232 ))
1233 if with_pullrequests:
1233 if with_pullrequests:
1234 data['pull_requests'] = repo.pull_requests_other
1234 data['pull_requests'] = repo.pull_requests_other
1235 rc_config = Setting.get_app_settings()
1235 rc_config = Setting.get_app_settings()
1236 repository_fields = str2bool(rc_config.get('repository_fields'))
1236 repository_fields = str2bool(rc_config.get('repository_fields'))
1237 if repository_fields:
1237 if repository_fields:
1238 for f in self.extra_fields:
1238 for f in self.extra_fields:
1239 data[f.field_key_prefixed] = f.field_value
1239 data[f.field_key_prefixed] = f.field_value
1240
1240
1241 return data
1241 return data
1242
1242
1243 @property
1243 @property
1244 def last_db_change(self):
1244 def last_db_change(self):
1245 return self.updated_on
1245 return self.updated_on
1246
1246
1247 @property
1247 @property
1248 def clone_uri_hidden(self):
1248 def clone_uri_hidden(self):
1249 clone_uri = self.clone_uri
1249 clone_uri = self.clone_uri
1250 if clone_uri:
1250 if clone_uri:
1251 import urlobject
1251 import urlobject
1252 url_obj = urlobject.URLObject(self.clone_uri)
1252 url_obj = urlobject.URLObject(self.clone_uri)
1253 if url_obj.password:
1253 if url_obj.password:
1254 clone_uri = url_obj.with_password('*****')
1254 clone_uri = url_obj.with_password('*****')
1255 return clone_uri
1255 return clone_uri
1256
1256
1257 def clone_url(self, clone_uri_tmpl, with_id=False, username=None):
1257 def clone_url(self, clone_uri_tmpl, with_id=False, username=None):
1258 if '{repo}' not in clone_uri_tmpl and '_{repoid}' not in clone_uri_tmpl:
1258 if '{repo}' not in clone_uri_tmpl and '_{repoid}' not in clone_uri_tmpl:
1259 log.error("Configured clone_uri_tmpl %r has no '{repo}' or '_{repoid}' and cannot toggle to use repo id URLs", clone_uri_tmpl)
1259 log.error("Configured clone_uri_tmpl %r has no '{repo}' or '_{repoid}' and cannot toggle to use repo id URLs", clone_uri_tmpl)
1260 elif with_id:
1260 elif with_id:
1261 clone_uri_tmpl = clone_uri_tmpl.replace('{repo}', '_{repoid}')
1261 clone_uri_tmpl = clone_uri_tmpl.replace('{repo}', '_{repoid}')
1262 else:
1262 else:
1263 clone_uri_tmpl = clone_uri_tmpl.replace('_{repoid}', '{repo}')
1263 clone_uri_tmpl = clone_uri_tmpl.replace('_{repoid}', '{repo}')
1264
1264
1265 import kallithea.lib.helpers as h
1265 import kallithea.lib.helpers as h
1266 prefix_url = h.canonical_url('home')
1266 prefix_url = h.canonical_url('home')
1267
1267
1268 return get_clone_url(clone_uri_tmpl=clone_uri_tmpl,
1268 return get_clone_url(clone_uri_tmpl=clone_uri_tmpl,
1269 prefix_url=prefix_url,
1269 prefix_url=prefix_url,
1270 repo_name=self.repo_name,
1270 repo_name=self.repo_name,
1271 repo_id=self.repo_id,
1271 repo_id=self.repo_id,
1272 username=username)
1272 username=username)
1273
1273
1274 def set_state(self, state):
1274 def set_state(self, state):
1275 self.repo_state = state
1275 self.repo_state = state
1276
1276
1277 #==========================================================================
1277 #==========================================================================
1278 # SCM PROPERTIES
1278 # SCM PROPERTIES
1279 #==========================================================================
1279 #==========================================================================
1280
1280
1281 def get_changeset(self, rev=None):
1281 def get_changeset(self, rev=None):
1282 return get_changeset_safe(self.scm_instance, rev)
1282 return get_changeset_safe(self.scm_instance, rev)
1283
1283
1284 def get_landing_changeset(self):
1284 def get_landing_changeset(self):
1285 """
1285 """
1286 Returns landing changeset, or if that doesn't exist returns the tip
1286 Returns landing changeset, or if that doesn't exist returns the tip
1287 """
1287 """
1288 _rev_type, _rev = self.landing_rev
1288 _rev_type, _rev = self.landing_rev
1289 cs = self.get_changeset(_rev)
1289 cs = self.get_changeset(_rev)
1290 if isinstance(cs, EmptyChangeset):
1290 if isinstance(cs, EmptyChangeset):
1291 return self.get_changeset()
1291 return self.get_changeset()
1292 return cs
1292 return cs
1293
1293
1294 def update_changeset_cache(self, cs_cache=None):
1294 def update_changeset_cache(self, cs_cache=None):
1295 """
1295 """
1296 Update cache of last changeset for repository, keys should be::
1296 Update cache of last changeset for repository, keys should be::
1297
1297
1298 short_id
1298 short_id
1299 raw_id
1299 raw_id
1300 revision
1300 revision
1301 message
1301 message
1302 date
1302 date
1303 author
1303 author
1304
1304
1305 :param cs_cache:
1305 :param cs_cache:
1306 """
1306 """
1307 from kallithea.lib.vcs.backends.base import BaseChangeset
1307 from kallithea.lib.vcs.backends.base import BaseChangeset
1308 if cs_cache is None:
1308 if cs_cache is None:
1309 cs_cache = EmptyChangeset()
1309 cs_cache = EmptyChangeset()
1310 # use no-cache version here
1310 # use no-cache version here
1311 scm_repo = self.scm_instance_no_cache()
1311 scm_repo = self.scm_instance_no_cache()
1312 if scm_repo:
1312 if scm_repo:
1313 cs_cache = scm_repo.get_changeset()
1313 cs_cache = scm_repo.get_changeset()
1314
1314
1315 if isinstance(cs_cache, BaseChangeset):
1315 if isinstance(cs_cache, BaseChangeset):
1316 cs_cache = cs_cache.__json__()
1316 cs_cache = cs_cache.__json__()
1317
1317
1318 if (not self.changeset_cache or cs_cache['raw_id'] != self.changeset_cache['raw_id']):
1318 if (not self.changeset_cache or cs_cache['raw_id'] != self.changeset_cache['raw_id']):
1319 _default = datetime.datetime.fromtimestamp(0)
1319 _default = datetime.datetime.fromtimestamp(0)
1320 last_change = cs_cache.get('date') or _default
1320 last_change = cs_cache.get('date') or _default
1321 log.debug('updated repo %s with new cs cache %s',
1321 log.debug('updated repo %s with new cs cache %s',
1322 self.repo_name, cs_cache)
1322 self.repo_name, cs_cache)
1323 self.updated_on = last_change
1323 self.updated_on = last_change
1324 self.changeset_cache = cs_cache
1324 self.changeset_cache = cs_cache
1325 Session().commit()
1325 Session().commit()
1326 else:
1326 else:
1327 log.debug('changeset_cache for %s already up to date with %s',
1327 log.debug('changeset_cache for %s already up to date with %s',
1328 self.repo_name, cs_cache['raw_id'])
1328 self.repo_name, cs_cache['raw_id'])
1329
1329
1330 @property
1330 @property
1331 def tip(self):
1331 def tip(self):
1332 return self.get_changeset('tip')
1332 return self.get_changeset('tip')
1333
1333
1334 @property
1334 @property
1335 def author(self):
1335 def author(self):
1336 return self.tip.author
1336 return self.tip.author
1337
1337
1338 @property
1338 @property
1339 def last_change(self):
1339 def last_change(self):
1340 return self.scm_instance.last_change
1340 return self.scm_instance.last_change
1341
1341
1342 def get_comments(self, revisions=None):
1342 def get_comments(self, revisions=None):
1343 """
1343 """
1344 Returns comments for this repository grouped by revisions
1344 Returns comments for this repository grouped by revisions
1345
1345
1346 :param revisions: filter query by revisions only
1346 :param revisions: filter query by revisions only
1347 """
1347 """
1348 cmts = ChangesetComment.query() \
1348 cmts = ChangesetComment.query() \
1349 .filter(ChangesetComment.repo == self)
1349 .filter(ChangesetComment.repo == self)
1350 if revisions is not None:
1350 if revisions is not None:
1351 if not revisions:
1351 if not revisions:
1352 return {} # don't use sql 'in' on empty set
1352 return {} # don't use sql 'in' on empty set
1353 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
1353 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
1354 grouped = collections.defaultdict(list)
1354 grouped = collections.defaultdict(list)
1355 for cmt in cmts.all():
1355 for cmt in cmts.all():
1356 grouped[cmt.revision].append(cmt)
1356 grouped[cmt.revision].append(cmt)
1357 return grouped
1357 return grouped
1358
1358
1359 def statuses(self, revisions):
1359 def statuses(self, revisions):
1360 """
1360 """
1361 Returns statuses for this repository.
1361 Returns statuses for this repository.
1362 PRs without any votes do _not_ show up as unreviewed.
1362 PRs without any votes do _not_ show up as unreviewed.
1363
1363
1364 :param revisions: list of revisions to get statuses for
1364 :param revisions: list of revisions to get statuses for
1365 """
1365 """
1366 if not revisions:
1366 if not revisions:
1367 return {}
1367 return {}
1368
1368
1369 statuses = ChangesetStatus.query() \
1369 statuses = ChangesetStatus.query() \
1370 .filter(ChangesetStatus.repo == self) \
1370 .filter(ChangesetStatus.repo == self) \
1371 .filter(ChangesetStatus.version == 0) \
1371 .filter(ChangesetStatus.version == 0) \
1372 .filter(ChangesetStatus.revision.in_(revisions))
1372 .filter(ChangesetStatus.revision.in_(revisions))
1373
1373
1374 grouped = {}
1374 grouped = {}
1375 for stat in statuses.all():
1375 for stat in statuses.all():
1376 pr_id = pr_nice_id = pr_repo = None
1376 pr_id = pr_nice_id = pr_repo = None
1377 if stat.pull_request:
1377 if stat.pull_request:
1378 pr_id = stat.pull_request.pull_request_id
1378 pr_id = stat.pull_request.pull_request_id
1379 pr_nice_id = PullRequest.make_nice_id(pr_id)
1379 pr_nice_id = PullRequest.make_nice_id(pr_id)
1380 pr_repo = stat.pull_request.other_repo.repo_name
1380 pr_repo = stat.pull_request.other_repo.repo_name
1381 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
1381 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
1382 pr_id, pr_repo, pr_nice_id,
1382 pr_id, pr_repo, pr_nice_id,
1383 stat.author]
1383 stat.author]
1384 return grouped
1384 return grouped
1385
1385
1386 def _repo_size(self):
1386 def _repo_size(self):
1387 from kallithea.lib import helpers as h
1387 from kallithea.lib import helpers as h
1388 log.debug('calculating repository size...')
1388 log.debug('calculating repository size...')
1389 return h.format_byte_size(self.scm_instance.size)
1389 return h.format_byte_size(self.scm_instance.size)
1390
1390
1391 #==========================================================================
1391 #==========================================================================
1392 # SCM CACHE INSTANCE
1392 # SCM CACHE INSTANCE
1393 #==========================================================================
1393 #==========================================================================
1394
1394
1395 def set_invalidate(self):
1395 def set_invalidate(self):
1396 """
1396 """
1397 Mark caches of this repo as invalid.
1397 Mark caches of this repo as invalid.
1398 """
1398 """
1399 CacheInvalidation.set_invalidate(self.repo_name)
1399 CacheInvalidation.set_invalidate(self.repo_name)
1400
1400
1401 _scm_instance = None
1401 _scm_instance = None
1402
1402
1403 @property
1403 @property
1404 def scm_instance(self):
1404 def scm_instance(self):
1405 if self._scm_instance is None:
1405 if self._scm_instance is None:
1406 self._scm_instance = self.scm_instance_cached()
1406 self._scm_instance = self.scm_instance_cached()
1407 return self._scm_instance
1407 return self._scm_instance
1408
1408
1409 def scm_instance_cached(self, valid_cache_keys=None):
1409 def scm_instance_cached(self, valid_cache_keys=None):
1410 @cache_region('long_term', 'scm_instance_cached')
1410 @cache_region('long_term', 'scm_instance_cached')
1411 def _c(repo_name): # repo_name is just for the cache key
1411 def _c(repo_name): # repo_name is just for the cache key
1412 log.debug('Creating new %s scm_instance and populating cache', repo_name)
1412 log.debug('Creating new %s scm_instance and populating cache', repo_name)
1413 return self.scm_instance_no_cache()
1413 return self.scm_instance_no_cache()
1414 rn = self.repo_name
1414 rn = self.repo_name
1415
1415
1416 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1416 valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
1417 if not valid:
1417 if not valid:
1418 log.debug('Cache for %s invalidated, getting new object', rn)
1418 log.debug('Cache for %s invalidated, getting new object', rn)
1419 region_invalidate(_c, None, 'scm_instance_cached', rn)
1419 region_invalidate(_c, None, 'scm_instance_cached', rn)
1420 else:
1420 else:
1421 log.debug('Trying to get scm_instance of %s from cache', rn)
1421 log.debug('Trying to get scm_instance of %s from cache', rn)
1422 return _c(rn)
1422 return _c(rn)
1423
1423
1424 def scm_instance_no_cache(self):
1424 def scm_instance_no_cache(self):
1425 repo_full_path = self.repo_full_path
1425 repo_full_path = self.repo_full_path
1426 alias = get_scm(repo_full_path)[0]
1426 alias = get_scm(repo_full_path)[0]
1427 log.debug('Creating instance of %s repository from %s',
1427 log.debug('Creating instance of %s repository from %s',
1428 alias, self.repo_full_path)
1428 alias, self.repo_full_path)
1429 backend = get_backend(alias)
1429 backend = get_backend(alias)
1430
1430
1431 if alias == 'hg':
1431 if alias == 'hg':
1432 repo = backend(repo_full_path, create=False,
1432 repo = backend(repo_full_path, create=False,
1433 baseui=self._ui)
1433 baseui=self._ui)
1434 else:
1434 else:
1435 repo = backend(repo_full_path, create=False)
1435 repo = backend(repo_full_path, create=False)
1436
1436
1437 return repo
1437 return repo
1438
1438
1439 def __json__(self):
1439 def __json__(self):
1440 return dict(
1440 return dict(
1441 repo_id=self.repo_id,
1441 repo_id=self.repo_id,
1442 repo_name=self.repo_name,
1442 repo_name=self.repo_name,
1443 landing_rev=self.landing_rev,
1443 landing_rev=self.landing_rev,
1444 )
1444 )
1445
1445
1446
1446
1447 class RepoGroup(Base, BaseDbModel):
1447 class RepoGroup(Base, BaseDbModel):
1448 __tablename__ = 'groups'
1448 __tablename__ = 'groups'
1449 __table_args__ = (
1449 __table_args__ = (
1450 _table_args_default_dict,
1450 _table_args_default_dict,
1451 )
1451 )
1452
1452
1453 SEP = ' &raquo; '
1453 SEP = ' &raquo; '
1454
1454
1455 group_id = Column(Integer(), primary_key=True)
1455 group_id = Column(Integer(), primary_key=True)
1456 group_name = Column(Unicode(255), nullable=False, unique=True) # full path
1456 group_name = Column(Unicode(255), nullable=False, unique=True) # full path
1457 parent_group_id = Column('group_parent_id', Integer(), ForeignKey('groups.group_id'), nullable=True)
1457 parent_group_id = Column('group_parent_id', Integer(), ForeignKey('groups.group_id'), nullable=True)
1458 group_description = Column(Unicode(10000), nullable=False)
1458 group_description = Column(Unicode(10000), nullable=False)
1459 owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
1459 owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
1460 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1460 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1461
1461
1462 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
1462 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
1463 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1463 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1464 parent_group = relationship('RepoGroup', remote_side=group_id)
1464 parent_group = relationship('RepoGroup', remote_side=group_id)
1465 owner = relationship('User')
1465 owner = relationship('User')
1466
1466
1467 @classmethod
1467 @classmethod
1468 def query(cls, sorted=False):
1468 def query(cls, sorted=False):
1469 """Add RepoGroup-specific helpers for common query constructs.
1469 """Add RepoGroup-specific helpers for common query constructs.
1470
1470
1471 sorted: if True, apply the default ordering (name, case insensitive).
1471 sorted: if True, apply the default ordering (name, case insensitive).
1472 """
1472 """
1473 q = super(RepoGroup, cls).query()
1473 q = super(RepoGroup, cls).query()
1474
1474
1475 if sorted:
1475 if sorted:
1476 q = q.order_by(sqlalchemy.func.lower(RepoGroup.group_name))
1476 q = q.order_by(sqlalchemy.func.lower(RepoGroup.group_name))
1477
1477
1478 return q
1478 return q
1479
1479
1480 def __init__(self, group_name='', parent_group=None):
1480 def __init__(self, group_name='', parent_group=None):
1481 self.group_name = group_name
1481 self.group_name = group_name
1482 self.parent_group = parent_group
1482 self.parent_group = parent_group
1483
1483
1484 def __repr__(self):
1484 def __repr__(self):
1485 return "<%s %s: %s>" % (self.__class__.__name__,
1485 return "<%s %s: %s>" % (self.__class__.__name__,
1486 self.group_id, self.group_name)
1486 self.group_id, self.group_name)
1487
1487
1488 @classmethod
1488 @classmethod
1489 def _generate_choice(cls, repo_group):
1489 def _generate_choice(cls, repo_group):
1490 """Return tuple with group_id and name as html literal"""
1490 """Return tuple with group_id and name as html literal"""
1491 from webhelpers2.html import literal
1491 from webhelpers2.html import literal
1492 if repo_group is None:
1492 if repo_group is None:
1493 return (-1, u'-- %s --' % _('top level'))
1493 return (-1, u'-- %s --' % _('top level'))
1494 return repo_group.group_id, literal(cls.SEP.join(repo_group.full_path_splitted))
1494 return repo_group.group_id, literal(cls.SEP.join(repo_group.full_path_splitted))
1495
1495
1496 @classmethod
1496 @classmethod
1497 def groups_choices(cls, groups):
1497 def groups_choices(cls, groups):
1498 """Return tuples with group_id and name as html literal."""
1498 """Return tuples with group_id and name as html literal."""
1499 return sorted((cls._generate_choice(g) for g in groups),
1499 return sorted((cls._generate_choice(g) for g in groups),
1500 key=lambda c: c[1].split(cls.SEP))
1500 key=lambda c: c[1].split(cls.SEP))
1501
1501
1502 @classmethod
1502 @classmethod
1503 def url_sep(cls):
1503 def url_sep(cls):
1504 return URL_SEP
1504 return URL_SEP
1505
1505
1506 @classmethod
1506 @classmethod
1507 def guess_instance(cls, value):
1507 def guess_instance(cls, value):
1508 return super(RepoGroup, cls).guess_instance(value, RepoGroup.get_by_group_name)
1508 return super(RepoGroup, cls).guess_instance(value, RepoGroup.get_by_group_name)
1509
1509
1510 @classmethod
1510 @classmethod
1511 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
1511 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
1512 group_name = group_name.rstrip('/')
1512 group_name = group_name.rstrip('/')
1513 if case_insensitive:
1513 if case_insensitive:
1514 gr = cls.query() \
1514 gr = cls.query() \
1515 .filter(sqlalchemy.func.lower(cls.group_name) == sqlalchemy.func.lower(group_name))
1515 .filter(sqlalchemy.func.lower(cls.group_name) == sqlalchemy.func.lower(group_name))
1516 else:
1516 else:
1517 gr = cls.query() \
1517 gr = cls.query() \
1518 .filter(cls.group_name == group_name)
1518 .filter(cls.group_name == group_name)
1519 if cache:
1519 if cache:
1520 gr = gr.options(FromCache(
1520 gr = gr.options(FromCache(
1521 "sql_cache_short",
1521 "sql_cache_short",
1522 "get_group_%s" % _hash_key(group_name)
1522 "get_group_%s" % _hash_key(group_name)
1523 )
1523 )
1524 )
1524 )
1525 return gr.scalar()
1525 return gr.scalar()
1526
1526
1527 @property
1527 @property
1528 def parents(self):
1528 def parents(self):
1529 groups = []
1529 groups = []
1530 group = self.parent_group
1530 group = self.parent_group
1531 while group is not None:
1531 while group is not None:
1532 groups.append(group)
1532 groups.append(group)
1533 group = group.parent_group
1533 group = group.parent_group
1534 assert group not in groups, group # avoid recursion on bad db content
1534 assert group not in groups, group # avoid recursion on bad db content
1535 groups.reverse()
1535 groups.reverse()
1536 return groups
1536 return groups
1537
1537
1538 @property
1538 @property
1539 def children(self):
1539 def children(self):
1540 return RepoGroup.query().filter(RepoGroup.parent_group == self)
1540 return RepoGroup.query().filter(RepoGroup.parent_group == self)
1541
1541
1542 @property
1542 @property
1543 def name(self):
1543 def name(self):
1544 return self.group_name.split(RepoGroup.url_sep())[-1]
1544 return self.group_name.split(RepoGroup.url_sep())[-1]
1545
1545
1546 @property
1546 @property
1547 def full_path(self):
1547 def full_path(self):
1548 return self.group_name
1548 return self.group_name
1549
1549
1550 @property
1550 @property
1551 def full_path_splitted(self):
1551 def full_path_splitted(self):
1552 return self.group_name.split(RepoGroup.url_sep())
1552 return self.group_name.split(RepoGroup.url_sep())
1553
1553
1554 @property
1554 @property
1555 def repositories(self):
1555 def repositories(self):
1556 return Repository.query(sorted=True).filter_by(group=self)
1556 return Repository.query(sorted=True).filter_by(group=self)
1557
1557
1558 @property
1558 @property
1559 def repositories_recursive_count(self):
1559 def repositories_recursive_count(self):
1560 cnt = self.repositories.count()
1560 cnt = self.repositories.count()
1561
1561
1562 def children_count(group):
1562 def children_count(group):
1563 cnt = 0
1563 cnt = 0
1564 for child in group.children:
1564 for child in group.children:
1565 cnt += child.repositories.count()
1565 cnt += child.repositories.count()
1566 cnt += children_count(child)
1566 cnt += children_count(child)
1567 return cnt
1567 return cnt
1568
1568
1569 return cnt + children_count(self)
1569 return cnt + children_count(self)
1570
1570
1571 def _recursive_objects(self, include_repos=True):
1571 def _recursive_objects(self, include_repos=True):
1572 all_ = []
1572 all_ = []
1573
1573
1574 def _get_members(root_gr):
1574 def _get_members(root_gr):
1575 if include_repos:
1575 if include_repos:
1576 for r in root_gr.repositories:
1576 for r in root_gr.repositories:
1577 all_.append(r)
1577 all_.append(r)
1578 childs = root_gr.children.all()
1578 childs = root_gr.children.all()
1579 if childs:
1579 if childs:
1580 for gr in childs:
1580 for gr in childs:
1581 all_.append(gr)
1581 all_.append(gr)
1582 _get_members(gr)
1582 _get_members(gr)
1583
1583
1584 _get_members(self)
1584 _get_members(self)
1585 return [self] + all_
1585 return [self] + all_
1586
1586
1587 def recursive_groups_and_repos(self):
1587 def recursive_groups_and_repos(self):
1588 """
1588 """
1589 Recursive return all groups, with repositories in those groups
1589 Recursive return all groups, with repositories in those groups
1590 """
1590 """
1591 return self._recursive_objects()
1591 return self._recursive_objects()
1592
1592
1593 def recursive_groups(self):
1593 def recursive_groups(self):
1594 """
1594 """
1595 Returns all children groups for this group including children of children
1595 Returns all children groups for this group including children of children
1596 """
1596 """
1597 return self._recursive_objects(include_repos=False)
1597 return self._recursive_objects(include_repos=False)
1598
1598
1599 def get_new_name(self, group_name):
1599 def get_new_name(self, group_name):
1600 """
1600 """
1601 returns new full group name based on parent and new name
1601 returns new full group name based on parent and new name
1602
1602
1603 :param group_name:
1603 :param group_name:
1604 """
1604 """
1605 path_prefix = (self.parent_group.full_path_splitted if
1605 path_prefix = (self.parent_group.full_path_splitted if
1606 self.parent_group else [])
1606 self.parent_group else [])
1607 return RepoGroup.url_sep().join(path_prefix + [group_name])
1607 return RepoGroup.url_sep().join(path_prefix + [group_name])
1608
1608
1609 def get_api_data(self):
1609 def get_api_data(self):
1610 """
1610 """
1611 Common function for generating api data
1611 Common function for generating api data
1612
1612
1613 """
1613 """
1614 group = self
1614 group = self
1615 data = dict(
1615 data = dict(
1616 group_id=group.group_id,
1616 group_id=group.group_id,
1617 group_name=group.group_name,
1617 group_name=group.group_name,
1618 group_description=group.group_description,
1618 group_description=group.group_description,
1619 parent_group=group.parent_group.group_name if group.parent_group else None,
1619 parent_group=group.parent_group.group_name if group.parent_group else None,
1620 repositories=[x.repo_name for x in group.repositories],
1620 repositories=[x.repo_name for x in group.repositories],
1621 owner=group.owner.username
1621 owner=group.owner.username
1622 )
1622 )
1623 return data
1623 return data
1624
1624
1625
1625
1626 class Permission(Base, BaseDbModel):
1626 class Permission(Base, BaseDbModel):
1627 __tablename__ = 'permissions'
1627 __tablename__ = 'permissions'
1628 __table_args__ = (
1628 __table_args__ = (
1629 Index('p_perm_name_idx', 'permission_name'),
1629 Index('p_perm_name_idx', 'permission_name'),
1630 _table_args_default_dict,
1630 _table_args_default_dict,
1631 )
1631 )
1632
1632
1633 PERMS = (
1633 PERMS = (
1634 ('hg.admin', _('Kallithea Administrator')),
1634 ('hg.admin', _('Kallithea Administrator')),
1635
1635
1636 ('repository.none', _('Default user has no access to new repositories')),
1636 ('repository.none', _('Default user has no access to new repositories')),
1637 ('repository.read', _('Default user has read access to new repositories')),
1637 ('repository.read', _('Default user has read access to new repositories')),
1638 ('repository.write', _('Default user has write access to new repositories')),
1638 ('repository.write', _('Default user has write access to new repositories')),
1639 ('repository.admin', _('Default user has admin access to new repositories')),
1639 ('repository.admin', _('Default user has admin access to new repositories')),
1640
1640
1641 ('group.none', _('Default user has no access to new repository groups')),
1641 ('group.none', _('Default user has no access to new repository groups')),
1642 ('group.read', _('Default user has read access to new repository groups')),
1642 ('group.read', _('Default user has read access to new repository groups')),
1643 ('group.write', _('Default user has write access to new repository groups')),
1643 ('group.write', _('Default user has write access to new repository groups')),
1644 ('group.admin', _('Default user has admin access to new repository groups')),
1644 ('group.admin', _('Default user has admin access to new repository groups')),
1645
1645
1646 ('usergroup.none', _('Default user has no access to new user groups')),
1646 ('usergroup.none', _('Default user has no access to new user groups')),
1647 ('usergroup.read', _('Default user has read access to new user groups')),
1647 ('usergroup.read', _('Default user has read access to new user groups')),
1648 ('usergroup.write', _('Default user has write access to new user groups')),
1648 ('usergroup.write', _('Default user has write access to new user groups')),
1649 ('usergroup.admin', _('Default user has admin access to new user groups')),
1649 ('usergroup.admin', _('Default user has admin access to new user groups')),
1650
1650
1651 ('hg.repogroup.create.false', _('Only admins can create repository groups')),
1651 ('hg.repogroup.create.false', _('Only admins can create repository groups')),
1652 ('hg.repogroup.create.true', _('Non-admins can create repository groups')),
1652 ('hg.repogroup.create.true', _('Non-admins can create repository groups')),
1653
1653
1654 ('hg.usergroup.create.false', _('Only admins can create user groups')),
1654 ('hg.usergroup.create.false', _('Only admins can create user groups')),
1655 ('hg.usergroup.create.true', _('Non-admins can create user groups')),
1655 ('hg.usergroup.create.true', _('Non-admins can create user groups')),
1656
1656
1657 ('hg.create.none', _('Only admins can create top level repositories')),
1657 ('hg.create.none', _('Only admins can create top level repositories')),
1658 ('hg.create.repository', _('Non-admins can create top level repositories')),
1658 ('hg.create.repository', _('Non-admins can create top level repositories')),
1659
1659
1660 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
1660 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
1661 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
1661 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
1662
1662
1663 ('hg.fork.none', _('Only admins can fork repositories')),
1663 ('hg.fork.none', _('Only admins can fork repositories')),
1664 ('hg.fork.repository', _('Non-admins can fork repositories')),
1664 ('hg.fork.repository', _('Non-admins can fork repositories')),
1665
1665
1666 ('hg.register.none', _('Registration disabled')),
1666 ('hg.register.none', _('Registration disabled')),
1667 ('hg.register.manual_activate', _('User registration with manual account activation')),
1667 ('hg.register.manual_activate', _('User registration with manual account activation')),
1668 ('hg.register.auto_activate', _('User registration with automatic account activation')),
1668 ('hg.register.auto_activate', _('User registration with automatic account activation')),
1669
1669
1670 ('hg.extern_activate.manual', _('Manual activation of external account')),
1670 ('hg.extern_activate.manual', _('Manual activation of external account')),
1671 ('hg.extern_activate.auto', _('Automatic activation of external account')),
1671 ('hg.extern_activate.auto', _('Automatic activation of external account')),
1672 )
1672 )
1673
1673
1674 # definition of system default permissions for DEFAULT user
1674 # definition of system default permissions for DEFAULT user
1675 DEFAULT_USER_PERMISSIONS = (
1675 DEFAULT_USER_PERMISSIONS = (
1676 'repository.read',
1676 'repository.read',
1677 'group.read',
1677 'group.read',
1678 'usergroup.read',
1678 'usergroup.read',
1679 'hg.create.repository',
1679 'hg.create.repository',
1680 'hg.create.write_on_repogroup.true',
1680 'hg.create.write_on_repogroup.true',
1681 'hg.fork.repository',
1681 'hg.fork.repository',
1682 'hg.register.manual_activate',
1682 'hg.register.manual_activate',
1683 'hg.extern_activate.auto',
1683 'hg.extern_activate.auto',
1684 )
1684 )
1685
1685
1686 # defines which permissions are more important higher the more important
1686 # defines which permissions are more important higher the more important
1687 # Weight defines which permissions are more important.
1687 # Weight defines which permissions are more important.
1688 # The higher number the more important.
1688 # The higher number the more important.
1689 PERM_WEIGHTS = {
1689 PERM_WEIGHTS = {
1690 'repository.none': 0,
1690 'repository.none': 0,
1691 'repository.read': 1,
1691 'repository.read': 1,
1692 'repository.write': 3,
1692 'repository.write': 3,
1693 'repository.admin': 4,
1693 'repository.admin': 4,
1694
1694
1695 'group.none': 0,
1695 'group.none': 0,
1696 'group.read': 1,
1696 'group.read': 1,
1697 'group.write': 3,
1697 'group.write': 3,
1698 'group.admin': 4,
1698 'group.admin': 4,
1699
1699
1700 'usergroup.none': 0,
1700 'usergroup.none': 0,
1701 'usergroup.read': 1,
1701 'usergroup.read': 1,
1702 'usergroup.write': 3,
1702 'usergroup.write': 3,
1703 'usergroup.admin': 4,
1703 'usergroup.admin': 4,
1704
1704
1705 'hg.repogroup.create.false': 0,
1705 'hg.repogroup.create.false': 0,
1706 'hg.repogroup.create.true': 1,
1706 'hg.repogroup.create.true': 1,
1707
1707
1708 'hg.usergroup.create.false': 0,
1708 'hg.usergroup.create.false': 0,
1709 'hg.usergroup.create.true': 1,
1709 'hg.usergroup.create.true': 1,
1710
1710
1711 'hg.fork.none': 0,
1711 'hg.fork.none': 0,
1712 'hg.fork.repository': 1,
1712 'hg.fork.repository': 1,
1713
1713
1714 'hg.create.none': 0,
1714 'hg.create.none': 0,
1715 'hg.create.repository': 1,
1715 'hg.create.repository': 1,
1716
1716
1717 'hg.create.write_on_repogroup.false': 0,
1717 'hg.create.write_on_repogroup.false': 0,
1718 'hg.create.write_on_repogroup.true': 1,
1718 'hg.create.write_on_repogroup.true': 1,
1719
1719
1720 'hg.register.none': 0,
1720 'hg.register.none': 0,
1721 'hg.register.manual_activate': 1,
1721 'hg.register.manual_activate': 1,
1722 'hg.register.auto_activate': 2,
1722 'hg.register.auto_activate': 2,
1723
1723
1724 'hg.extern_activate.manual': 0,
1724 'hg.extern_activate.manual': 0,
1725 'hg.extern_activate.auto': 1,
1725 'hg.extern_activate.auto': 1,
1726 }
1726 }
1727
1727
1728 permission_id = Column(Integer(), primary_key=True)
1728 permission_id = Column(Integer(), primary_key=True)
1729 permission_name = Column(String(255), nullable=False)
1729 permission_name = Column(String(255), nullable=False)
1730
1730
1731 def __repr__(self):
1731 def __repr__(self):
1732 return "<%s %s: %r>" % (
1732 return "<%s %s: %r>" % (
1733 self.__class__.__name__, self.permission_id, self.permission_name
1733 self.__class__.__name__, self.permission_id, self.permission_name
1734 )
1734 )
1735
1735
1736 @classmethod
1736 @classmethod
1737 def guess_instance(cls, value):
1737 def guess_instance(cls, value):
1738 return super(Permission, cls).guess_instance(value, Permission.get_by_key)
1738 return super(Permission, cls).guess_instance(value, Permission.get_by_key)
1739
1739
1740 @classmethod
1740 @classmethod
1741 def get_by_key(cls, key):
1741 def get_by_key(cls, key):
1742 return cls.query().filter(cls.permission_name == key).scalar()
1742 return cls.query().filter(cls.permission_name == key).scalar()
1743
1743
1744 @classmethod
1744 @classmethod
1745 def get_default_perms(cls, default_user_id):
1745 def get_default_perms(cls, default_user_id):
1746 q = Session().query(UserRepoToPerm, Repository, cls) \
1746 q = Session().query(UserRepoToPerm, Repository, cls) \
1747 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
1747 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id)) \
1748 .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
1748 .join((cls, UserRepoToPerm.permission_id == cls.permission_id)) \
1749 .filter(UserRepoToPerm.user_id == default_user_id)
1749 .filter(UserRepoToPerm.user_id == default_user_id)
1750
1750
1751 return q.all()
1751 return q.all()
1752
1752
1753 @classmethod
1753 @classmethod
1754 def get_default_group_perms(cls, default_user_id):
1754 def get_default_group_perms(cls, default_user_id):
1755 q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
1755 q = Session().query(UserRepoGroupToPerm, RepoGroup, cls) \
1756 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
1756 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id)) \
1757 .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
1757 .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id)) \
1758 .filter(UserRepoGroupToPerm.user_id == default_user_id)
1758 .filter(UserRepoGroupToPerm.user_id == default_user_id)
1759
1759
1760 return q.all()
1760 return q.all()
1761
1761
1762 @classmethod
1762 @classmethod
1763 def get_default_user_group_perms(cls, default_user_id):
1763 def get_default_user_group_perms(cls, default_user_id):
1764 q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
1764 q = Session().query(UserUserGroupToPerm, UserGroup, cls) \
1765 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
1765 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id)) \
1766 .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
1766 .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id)) \
1767 .filter(UserUserGroupToPerm.user_id == default_user_id)
1767 .filter(UserUserGroupToPerm.user_id == default_user_id)
1768
1768
1769 return q.all()
1769 return q.all()
1770
1770
1771
1771
1772 class UserRepoToPerm(Base, BaseDbModel):
1772 class UserRepoToPerm(Base, BaseDbModel):
1773 __tablename__ = 'repo_to_perm'
1773 __tablename__ = 'repo_to_perm'
1774 __table_args__ = (
1774 __table_args__ = (
1775 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
1775 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
1776 _table_args_default_dict,
1776 _table_args_default_dict,
1777 )
1777 )
1778
1778
1779 repo_to_perm_id = Column(Integer(), primary_key=True)
1779 repo_to_perm_id = Column(Integer(), primary_key=True)
1780 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
1780 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
1781 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1781 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1782 repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
1782 repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
1783
1783
1784 user = relationship('User')
1784 user = relationship('User')
1785 repository = relationship('Repository')
1785 repository = relationship('Repository')
1786 permission = relationship('Permission')
1786 permission = relationship('Permission')
1787
1787
1788 @classmethod
1788 @classmethod
1789 def create(cls, user, repository, permission):
1789 def create(cls, user, repository, permission):
1790 n = cls()
1790 n = cls()
1791 n.user = user
1791 n.user = user
1792 n.repository = repository
1792 n.repository = repository
1793 n.permission = permission
1793 n.permission = permission
1794 Session().add(n)
1794 Session().add(n)
1795 return n
1795 return n
1796
1796
1797 def __repr__(self):
1797 def __repr__(self):
1798 return '<%s %s at %s: %s>' % (
1798 return '<%s %s at %s: %s>' % (
1799 self.__class__.__name__, self.user, self.repository, self.permission)
1799 self.__class__.__name__, self.user, self.repository, self.permission)
1800
1800
1801
1801
1802 class UserUserGroupToPerm(Base, BaseDbModel):
1802 class UserUserGroupToPerm(Base, BaseDbModel):
1803 __tablename__ = 'user_user_group_to_perm'
1803 __tablename__ = 'user_user_group_to_perm'
1804 __table_args__ = (
1804 __table_args__ = (
1805 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
1805 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
1806 _table_args_default_dict,
1806 _table_args_default_dict,
1807 )
1807 )
1808
1808
1809 user_user_group_to_perm_id = Column(Integer(), primary_key=True)
1809 user_user_group_to_perm_id = Column(Integer(), primary_key=True)
1810 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
1810 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
1811 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1811 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1812 user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1812 user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1813
1813
1814 user = relationship('User')
1814 user = relationship('User')
1815 user_group = relationship('UserGroup')
1815 user_group = relationship('UserGroup')
1816 permission = relationship('Permission')
1816 permission = relationship('Permission')
1817
1817
1818 @classmethod
1818 @classmethod
1819 def create(cls, user, user_group, permission):
1819 def create(cls, user, user_group, permission):
1820 n = cls()
1820 n = cls()
1821 n.user = user
1821 n.user = user
1822 n.user_group = user_group
1822 n.user_group = user_group
1823 n.permission = permission
1823 n.permission = permission
1824 Session().add(n)
1824 Session().add(n)
1825 return n
1825 return n
1826
1826
1827 def __repr__(self):
1827 def __repr__(self):
1828 return '<%s %s at %s: %s>' % (
1828 return '<%s %s at %s: %s>' % (
1829 self.__class__.__name__, self.user, self.user_group, self.permission)
1829 self.__class__.__name__, self.user, self.user_group, self.permission)
1830
1830
1831
1831
1832 class UserToPerm(Base, BaseDbModel):
1832 class UserToPerm(Base, BaseDbModel):
1833 __tablename__ = 'user_to_perm'
1833 __tablename__ = 'user_to_perm'
1834 __table_args__ = (
1834 __table_args__ = (
1835 UniqueConstraint('user_id', 'permission_id'),
1835 UniqueConstraint('user_id', 'permission_id'),
1836 _table_args_default_dict,
1836 _table_args_default_dict,
1837 )
1837 )
1838
1838
1839 user_to_perm_id = Column(Integer(), primary_key=True)
1839 user_to_perm_id = Column(Integer(), primary_key=True)
1840 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
1840 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
1841 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1841 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1842
1842
1843 user = relationship('User')
1843 user = relationship('User')
1844 permission = relationship('Permission')
1844 permission = relationship('Permission')
1845
1845
1846 def __repr__(self):
1846 def __repr__(self):
1847 return '<%s %s: %s>' % (
1847 return '<%s %s: %s>' % (
1848 self.__class__.__name__, self.user, self.permission)
1848 self.__class__.__name__, self.user, self.permission)
1849
1849
1850
1850
1851 class UserGroupRepoToPerm(Base, BaseDbModel):
1851 class UserGroupRepoToPerm(Base, BaseDbModel):
1852 __tablename__ = 'users_group_repo_to_perm'
1852 __tablename__ = 'users_group_repo_to_perm'
1853 __table_args__ = (
1853 __table_args__ = (
1854 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
1854 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
1855 _table_args_default_dict,
1855 _table_args_default_dict,
1856 )
1856 )
1857
1857
1858 users_group_to_perm_id = Column(Integer(), primary_key=True)
1858 users_group_to_perm_id = Column(Integer(), primary_key=True)
1859 users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1859 users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1860 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1860 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1861 repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
1861 repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
1862
1862
1863 users_group = relationship('UserGroup')
1863 users_group = relationship('UserGroup')
1864 permission = relationship('Permission')
1864 permission = relationship('Permission')
1865 repository = relationship('Repository')
1865 repository = relationship('Repository')
1866
1866
1867 @classmethod
1867 @classmethod
1868 def create(cls, users_group, repository, permission):
1868 def create(cls, users_group, repository, permission):
1869 n = cls()
1869 n = cls()
1870 n.users_group = users_group
1870 n.users_group = users_group
1871 n.repository = repository
1871 n.repository = repository
1872 n.permission = permission
1872 n.permission = permission
1873 Session().add(n)
1873 Session().add(n)
1874 return n
1874 return n
1875
1875
1876 def __repr__(self):
1876 def __repr__(self):
1877 return '<%s %s at %s: %s>' % (
1877 return '<%s %s at %s: %s>' % (
1878 self.__class__.__name__, self.users_group, self.repository, self.permission)
1878 self.__class__.__name__, self.users_group, self.repository, self.permission)
1879
1879
1880
1880
1881 class UserGroupUserGroupToPerm(Base, BaseDbModel):
1881 class UserGroupUserGroupToPerm(Base, BaseDbModel):
1882 __tablename__ = 'user_group_user_group_to_perm'
1882 __tablename__ = 'user_group_user_group_to_perm'
1883 __table_args__ = (
1883 __table_args__ = (
1884 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
1884 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
1885 _table_args_default_dict,
1885 _table_args_default_dict,
1886 )
1886 )
1887
1887
1888 user_group_user_group_to_perm_id = Column(Integer(), primary_key=True)
1888 user_group_user_group_to_perm_id = Column(Integer(), primary_key=True)
1889 target_user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1889 target_user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1890 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1890 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1891 user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1891 user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1892
1892
1893 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
1893 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
1894 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
1894 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
1895 permission = relationship('Permission')
1895 permission = relationship('Permission')
1896
1896
1897 @classmethod
1897 @classmethod
1898 def create(cls, target_user_group, user_group, permission):
1898 def create(cls, target_user_group, user_group, permission):
1899 n = cls()
1899 n = cls()
1900 n.target_user_group = target_user_group
1900 n.target_user_group = target_user_group
1901 n.user_group = user_group
1901 n.user_group = user_group
1902 n.permission = permission
1902 n.permission = permission
1903 Session().add(n)
1903 Session().add(n)
1904 return n
1904 return n
1905
1905
1906 def __repr__(self):
1906 def __repr__(self):
1907 return '<%s %s at %s: %s>' % (
1907 return '<%s %s at %s: %s>' % (
1908 self.__class__.__name__, self.user_group, self.target_user_group, self.permission)
1908 self.__class__.__name__, self.user_group, self.target_user_group, self.permission)
1909
1909
1910
1910
1911 class UserGroupToPerm(Base, BaseDbModel):
1911 class UserGroupToPerm(Base, BaseDbModel):
1912 __tablename__ = 'users_group_to_perm'
1912 __tablename__ = 'users_group_to_perm'
1913 __table_args__ = (
1913 __table_args__ = (
1914 UniqueConstraint('users_group_id', 'permission_id',),
1914 UniqueConstraint('users_group_id', 'permission_id',),
1915 _table_args_default_dict,
1915 _table_args_default_dict,
1916 )
1916 )
1917
1917
1918 users_group_to_perm_id = Column(Integer(), primary_key=True)
1918 users_group_to_perm_id = Column(Integer(), primary_key=True)
1919 users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1919 users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1920 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1920 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1921
1921
1922 users_group = relationship('UserGroup')
1922 users_group = relationship('UserGroup')
1923 permission = relationship('Permission')
1923 permission = relationship('Permission')
1924
1924
1925
1925
1926 class UserRepoGroupToPerm(Base, BaseDbModel):
1926 class UserRepoGroupToPerm(Base, BaseDbModel):
1927 __tablename__ = 'user_repo_group_to_perm'
1927 __tablename__ = 'user_repo_group_to_perm'
1928 __table_args__ = (
1928 __table_args__ = (
1929 UniqueConstraint('user_id', 'group_id', 'permission_id'),
1929 UniqueConstraint('user_id', 'group_id', 'permission_id'),
1930 _table_args_default_dict,
1930 _table_args_default_dict,
1931 )
1931 )
1932
1932
1933 group_to_perm_id = Column(Integer(), primary_key=True)
1933 group_to_perm_id = Column(Integer(), primary_key=True)
1934 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
1934 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
1935 group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=False)
1935 group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=False)
1936 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1936 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1937
1937
1938 user = relationship('User')
1938 user = relationship('User')
1939 group = relationship('RepoGroup')
1939 group = relationship('RepoGroup')
1940 permission = relationship('Permission')
1940 permission = relationship('Permission')
1941
1941
1942 @classmethod
1942 @classmethod
1943 def create(cls, user, repository_group, permission):
1943 def create(cls, user, repository_group, permission):
1944 n = cls()
1944 n = cls()
1945 n.user = user
1945 n.user = user
1946 n.group = repository_group
1946 n.group = repository_group
1947 n.permission = permission
1947 n.permission = permission
1948 Session().add(n)
1948 Session().add(n)
1949 return n
1949 return n
1950
1950
1951
1951
1952 class UserGroupRepoGroupToPerm(Base, BaseDbModel):
1952 class UserGroupRepoGroupToPerm(Base, BaseDbModel):
1953 __tablename__ = 'users_group_repo_group_to_perm'
1953 __tablename__ = 'users_group_repo_group_to_perm'
1954 __table_args__ = (
1954 __table_args__ = (
1955 UniqueConstraint('users_group_id', 'group_id'),
1955 UniqueConstraint('users_group_id', 'group_id'),
1956 _table_args_default_dict,
1956 _table_args_default_dict,
1957 )
1957 )
1958
1958
1959 users_group_repo_group_to_perm_id = Column(Integer(), primary_key=True)
1959 users_group_repo_group_to_perm_id = Column(Integer(), primary_key=True)
1960 users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1960 users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
1961 group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=False)
1961 group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=False)
1962 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1962 permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False)
1963
1963
1964 users_group = relationship('UserGroup')
1964 users_group = relationship('UserGroup')
1965 permission = relationship('Permission')
1965 permission = relationship('Permission')
1966 group = relationship('RepoGroup')
1966 group = relationship('RepoGroup')
1967
1967
1968 @classmethod
1968 @classmethod
1969 def create(cls, user_group, repository_group, permission):
1969 def create(cls, user_group, repository_group, permission):
1970 n = cls()
1970 n = cls()
1971 n.users_group = user_group
1971 n.users_group = user_group
1972 n.group = repository_group
1972 n.group = repository_group
1973 n.permission = permission
1973 n.permission = permission
1974 Session().add(n)
1974 Session().add(n)
1975 return n
1975 return n
1976
1976
1977
1977
1978 class Statistics(Base, BaseDbModel):
1978 class Statistics(Base, BaseDbModel):
1979 __tablename__ = 'statistics'
1979 __tablename__ = 'statistics'
1980 __table_args__ = (
1980 __table_args__ = (
1981 _table_args_default_dict,
1981 _table_args_default_dict,
1982 )
1982 )
1983
1983
1984 stat_id = Column(Integer(), primary_key=True)
1984 stat_id = Column(Integer(), primary_key=True)
1985 repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True)
1985 repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True)
1986 stat_on_revision = Column(Integer(), nullable=False)
1986 stat_on_revision = Column(Integer(), nullable=False)
1987 commit_activity = Column(LargeBinary(1000000), nullable=False) # JSON data
1987 commit_activity = Column(LargeBinary(1000000), nullable=False) # JSON data
1988 commit_activity_combined = Column(LargeBinary(), nullable=False) # JSON data
1988 commit_activity_combined = Column(LargeBinary(), nullable=False) # JSON data
1989 languages = Column(LargeBinary(1000000), nullable=False) # JSON data
1989 languages = Column(LargeBinary(1000000), nullable=False) # JSON data
1990
1990
1991 repository = relationship('Repository', single_parent=True)
1991 repository = relationship('Repository', single_parent=True)
1992
1992
1993
1993
1994 class UserFollowing(Base, BaseDbModel):
1994 class UserFollowing(Base, BaseDbModel):
1995 __tablename__ = 'user_followings'
1995 __tablename__ = 'user_followings'
1996 __table_args__ = (
1996 __table_args__ = (
1997 UniqueConstraint('user_id', 'follows_repository_id', name='uq_user_followings_user_repo'),
1997 UniqueConstraint('user_id', 'follows_repository_id', name='uq_user_followings_user_repo'),
1998 UniqueConstraint('user_id', 'follows_user_id', name='uq_user_followings_user_user'),
1998 UniqueConstraint('user_id', 'follows_user_id', name='uq_user_followings_user_user'),
1999 _table_args_default_dict,
1999 _table_args_default_dict,
2000 )
2000 )
2001
2001
2002 user_following_id = Column(Integer(), primary_key=True)
2002 user_following_id = Column(Integer(), primary_key=True)
2003 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
2003 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
2004 follows_repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True)
2004 follows_repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True)
2005 follows_user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True)
2005 follows_user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True)
2006 follows_from = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2006 follows_from = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2007
2007
2008 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2008 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2009
2009
2010 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2010 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2011 follows_repository = relationship('Repository', order_by=lambda: sqlalchemy.func.lower(Repository.repo_name))
2011 follows_repository = relationship('Repository', order_by=lambda: sqlalchemy.func.lower(Repository.repo_name))
2012
2012
2013 @classmethod
2013 @classmethod
2014 def get_repo_followers(cls, repo_id):
2014 def get_repo_followers(cls, repo_id):
2015 return cls.query().filter(cls.follows_repository_id == repo_id)
2015 return cls.query().filter(cls.follows_repository_id == repo_id)
2016
2016
2017
2017
2018 class CacheInvalidation(Base, BaseDbModel):
2018 class CacheInvalidation(Base, BaseDbModel):
2019 __tablename__ = 'cache_invalidation'
2019 __tablename__ = 'cache_invalidation'
2020 __table_args__ = (
2020 __table_args__ = (
2021 Index('key_idx', 'cache_key'),
2021 Index('key_idx', 'cache_key'),
2022 _table_args_default_dict,
2022 _table_args_default_dict,
2023 )
2023 )
2024
2024
2025 # cache_id, not used
2025 # cache_id, not used
2026 cache_id = Column(Integer(), primary_key=True)
2026 cache_id = Column(Integer(), primary_key=True)
2027 # cache_key as created by _get_cache_key
2027 # cache_key as created by _get_cache_key
2028 cache_key = Column(Unicode(255), nullable=False, unique=True)
2028 cache_key = Column(Unicode(255), nullable=False, unique=True)
2029 # cache_args is a repo_name
2029 # cache_args is a repo_name
2030 cache_args = Column(Unicode(255), nullable=False)
2030 cache_args = Column(Unicode(255), nullable=False)
2031 # instance sets cache_active True when it is caching, other instances set
2031 # instance sets cache_active True when it is caching, other instances set
2032 # cache_active to False to indicate that this cache is invalid
2032 # cache_active to False to indicate that this cache is invalid
2033 cache_active = Column(Boolean(), nullable=False, default=False)
2033 cache_active = Column(Boolean(), nullable=False, default=False)
2034
2034
2035 def __init__(self, cache_key, repo_name=''):
2035 def __init__(self, cache_key, repo_name=''):
2036 self.cache_key = cache_key
2036 self.cache_key = cache_key
2037 self.cache_args = repo_name
2037 self.cache_args = repo_name
2038 self.cache_active = False
2038 self.cache_active = False
2039
2039
2040 def __repr__(self):
2040 def __repr__(self):
2041 return "<%s %s: %s=%s" % (
2041 return "<%s %s: %s=%s" % (
2042 self.__class__.__name__,
2042 self.__class__.__name__,
2043 self.cache_id, self.cache_key, self.cache_active)
2043 self.cache_id, self.cache_key, self.cache_active)
2044
2044
2045 def _cache_key_partition(self):
2045 def _cache_key_partition(self):
2046 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2046 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2047 return prefix, repo_name, suffix
2047 return prefix, repo_name, suffix
2048
2048
2049 def get_prefix(self):
2049 def get_prefix(self):
2050 """
2050 """
2051 get prefix that might have been used in _get_cache_key to
2051 get prefix that might have been used in _get_cache_key to
2052 generate self.cache_key. Only used for informational purposes
2052 generate self.cache_key. Only used for informational purposes
2053 in repo_edit.html.
2053 in repo_edit.html.
2054 """
2054 """
2055 # prefix, repo_name, suffix
2055 # prefix, repo_name, suffix
2056 return self._cache_key_partition()[0]
2056 return self._cache_key_partition()[0]
2057
2057
2058 def get_suffix(self):
2058 def get_suffix(self):
2059 """
2059 """
2060 get suffix that might have been used in _get_cache_key to
2060 get suffix that might have been used in _get_cache_key to
2061 generate self.cache_key. Only used for informational purposes
2061 generate self.cache_key. Only used for informational purposes
2062 in repo_edit.html.
2062 in repo_edit.html.
2063 """
2063 """
2064 # prefix, repo_name, suffix
2064 # prefix, repo_name, suffix
2065 return self._cache_key_partition()[2]
2065 return self._cache_key_partition()[2]
2066
2066
2067 @classmethod
2067 @classmethod
2068 def clear_cache(cls):
2068 def clear_cache(cls):
2069 """
2069 """
2070 Delete all cache keys from database.
2070 Delete all cache keys from database.
2071 Should only be run when all instances are down and all entries thus stale.
2071 Should only be run when all instances are down and all entries thus stale.
2072 """
2072 """
2073 cls.query().delete()
2073 cls.query().delete()
2074 Session().commit()
2074 Session().commit()
2075
2075
2076 @classmethod
2076 @classmethod
2077 def _get_cache_key(cls, key):
2077 def _get_cache_key(cls, key):
2078 """
2078 """
2079 Wrapper for generating a unique cache key for this instance and "key".
2079 Wrapper for generating a unique cache key for this instance and "key".
2080 key must / will start with a repo_name which will be stored in .cache_args .
2080 key must / will start with a repo_name which will be stored in .cache_args .
2081 """
2081 """
2082 prefix = kallithea.CONFIG.get('instance_id', '')
2082 prefix = kallithea.CONFIG.get('instance_id', '')
2083 return "%s%s" % (prefix, key)
2083 return "%s%s" % (prefix, key)
2084
2084
2085 @classmethod
2085 @classmethod
2086 def set_invalidate(cls, repo_name):
2086 def set_invalidate(cls, repo_name):
2087 """
2087 """
2088 Mark all caches of a repo as invalid in the database.
2088 Mark all caches of a repo as invalid in the database.
2089 """
2089 """
2090 inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
2090 inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
2091 log.debug('for repo %s got %s invalidation objects',
2091 log.debug('for repo %s got %s invalidation objects',
2092 repo_name, inv_objs)
2092 repo_name, inv_objs)
2093
2093
2094 for inv_obj in inv_objs:
2094 for inv_obj in inv_objs:
2095 log.debug('marking %s key for invalidation based on repo_name=%s',
2095 log.debug('marking %s key for invalidation based on repo_name=%s',
2096 inv_obj, repo_name)
2096 inv_obj, repo_name)
2097 Session().delete(inv_obj)
2097 Session().delete(inv_obj)
2098 Session().commit()
2098 Session().commit()
2099
2099
2100 @classmethod
2100 @classmethod
2101 def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
2101 def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
2102 """
2102 """
2103 Mark this cache key as active and currently cached.
2103 Mark this cache key as active and currently cached.
2104 Return True if the existing cache registration still was valid.
2104 Return True if the existing cache registration still was valid.
2105 Return False to indicate that it had been invalidated and caches should be refreshed.
2105 Return False to indicate that it had been invalidated and caches should be refreshed.
2106 """
2106 """
2107
2107
2108 key = (repo_name + '_' + kind) if kind else repo_name
2108 key = (repo_name + '_' + kind) if kind else repo_name
2109 cache_key = cls._get_cache_key(key)
2109 cache_key = cls._get_cache_key(key)
2110
2110
2111 if valid_cache_keys and cache_key in valid_cache_keys:
2111 if valid_cache_keys and cache_key in valid_cache_keys:
2112 return True
2112 return True
2113
2113
2114 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2114 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2115 if inv_obj is None:
2115 if inv_obj is None:
2116 inv_obj = cls(cache_key, repo_name)
2116 inv_obj = cls(cache_key, repo_name)
2117 Session().add(inv_obj)
2117 Session().add(inv_obj)
2118 elif inv_obj.cache_active:
2118 elif inv_obj.cache_active:
2119 return True
2119 return True
2120 inv_obj.cache_active = True
2120 inv_obj.cache_active = True
2121 try:
2121 try:
2122 Session().commit()
2122 Session().commit()
2123 except sqlalchemy.exc.IntegrityError:
2123 except sqlalchemy.exc.IntegrityError:
2124 log.error('commit of CacheInvalidation failed - retrying')
2124 log.error('commit of CacheInvalidation failed - retrying')
2125 Session().rollback()
2125 Session().rollback()
2126 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2126 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2127 if inv_obj is None:
2127 if inv_obj is None:
2128 log.error('failed to create CacheInvalidation entry')
2128 log.error('failed to create CacheInvalidation entry')
2129 # TODO: fail badly?
2129 # TODO: fail badly?
2130 # else: TOCTOU - another thread added the key at the same time; no further action required
2130 # else: TOCTOU - another thread added the key at the same time; no further action required
2131 return False
2131 return False
2132
2132
2133 @classmethod
2133 @classmethod
2134 def get_valid_cache_keys(cls):
2134 def get_valid_cache_keys(cls):
2135 """
2135 """
2136 Return opaque object with information of which caches still are valid
2136 Return opaque object with information of which caches still are valid
2137 and can be used without checking for invalidation.
2137 and can be used without checking for invalidation.
2138 """
2138 """
2139 return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
2139 return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
2140
2140
2141
2141
2142 class ChangesetComment(Base, BaseDbModel):
2142 class ChangesetComment(Base, BaseDbModel):
2143 __tablename__ = 'changeset_comments'
2143 __tablename__ = 'changeset_comments'
2144 __table_args__ = (
2144 __table_args__ = (
2145 Index('cc_revision_idx', 'revision'),
2145 Index('cc_revision_idx', 'revision'),
2146 Index('cc_pull_request_id_idx', 'pull_request_id'),
2146 Index('cc_pull_request_id_idx', 'pull_request_id'),
2147 _table_args_default_dict,
2147 _table_args_default_dict,
2148 )
2148 )
2149
2149
2150 comment_id = Column(Integer(), primary_key=True)
2150 comment_id = Column(Integer(), primary_key=True)
2151 repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2151 repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2152 revision = Column(String(40), nullable=True)
2152 revision = Column(String(40), nullable=True)
2153 pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2153 pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2154 line_no = Column(Unicode(10), nullable=True)
2154 line_no = Column(Unicode(10), nullable=True)
2155 f_path = Column(Unicode(1000), nullable=True)
2155 f_path = Column(Unicode(1000), nullable=True)
2156 author_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2156 author_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2157 text = Column(UnicodeText(), nullable=False)
2157 text = Column(UnicodeText(), nullable=False)
2158 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2158 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2159 modified_at = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2159 modified_at = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2160
2160
2161 author = relationship('User')
2161 author = relationship('User')
2162 repo = relationship('Repository')
2162 repo = relationship('Repository')
2163 # status_change is frequently used directly in templates - make it a lazy
2163 # status_change is frequently used directly in templates - make it a lazy
2164 # join to avoid fetching each related ChangesetStatus on demand.
2164 # join to avoid fetching each related ChangesetStatus on demand.
2165 # There will only be one ChangesetStatus referencing each comment so the join will not explode.
2165 # There will only be one ChangesetStatus referencing each comment so the join will not explode.
2166 status_change = relationship('ChangesetStatus',
2166 status_change = relationship('ChangesetStatus',
2167 cascade="all, delete-orphan", lazy='joined')
2167 cascade="all, delete-orphan", lazy='joined')
2168 pull_request = relationship('PullRequest')
2168 pull_request = relationship('PullRequest')
2169
2169
2170 def url(self):
2170 def url(self):
2171 anchor = "comment-%s" % self.comment_id
2171 anchor = "comment-%s" % self.comment_id
2172 import kallithea.lib.helpers as h
2172 import kallithea.lib.helpers as h
2173 if self.revision:
2173 if self.revision:
2174 return h.url('changeset_home', repo_name=self.repo.repo_name, revision=self.revision, anchor=anchor)
2174 return h.url('changeset_home', repo_name=self.repo.repo_name, revision=self.revision, anchor=anchor)
2175 elif self.pull_request_id is not None:
2175 elif self.pull_request_id is not None:
2176 return self.pull_request.url(anchor=anchor)
2176 return self.pull_request.url(anchor=anchor)
2177
2177
2178 def __json__(self):
2178 def __json__(self):
2179 return dict(
2179 return dict(
2180 comment_id=self.comment_id,
2180 comment_id=self.comment_id,
2181 username=self.author.username,
2181 username=self.author.username,
2182 text=self.text,
2182 text=self.text,
2183 )
2183 )
2184
2184
2185 def deletable(self):
2185 def deletable(self):
2186 return self.created_on > datetime.datetime.now() - datetime.timedelta(minutes=5)
2186 return self.created_on > datetime.datetime.now() - datetime.timedelta(minutes=5)
2187
2187
2188
2188
2189 class ChangesetStatus(Base, BaseDbModel):
2189 class ChangesetStatus(Base, BaseDbModel):
2190 __tablename__ = 'changeset_statuses'
2190 __tablename__ = 'changeset_statuses'
2191 __table_args__ = (
2191 __table_args__ = (
2192 Index('cs_revision_idx', 'revision'),
2192 Index('cs_revision_idx', 'revision'),
2193 Index('cs_version_idx', 'version'),
2193 Index('cs_version_idx', 'version'),
2194 Index('cs_pull_request_id_idx', 'pull_request_id'),
2194 Index('cs_pull_request_id_idx', 'pull_request_id'),
2195 Index('cs_changeset_comment_id_idx', 'changeset_comment_id'),
2195 Index('cs_changeset_comment_id_idx', 'changeset_comment_id'),
2196 Index('cs_pull_request_id_user_id_version_idx', 'pull_request_id', 'user_id', 'version'),
2196 Index('cs_pull_request_id_user_id_version_idx', 'pull_request_id', 'user_id', 'version'),
2197 Index('cs_repo_id_pull_request_id_idx', 'repo_id', 'pull_request_id'),
2197 Index('cs_repo_id_pull_request_id_idx', 'repo_id', 'pull_request_id'),
2198 UniqueConstraint('repo_id', 'revision', 'version'),
2198 UniqueConstraint('repo_id', 'revision', 'version'),
2199 _table_args_default_dict,
2199 _table_args_default_dict,
2200 )
2200 )
2201
2201
2202 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
2202 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
2203 STATUS_APPROVED = 'approved'
2203 STATUS_APPROVED = 'approved'
2204 STATUS_REJECTED = 'rejected' # is shown as "Not approved" - TODO: change database content / scheme
2204 STATUS_REJECTED = 'rejected' # is shown as "Not approved" - TODO: change database content / scheme
2205 STATUS_UNDER_REVIEW = 'under_review'
2205 STATUS_UNDER_REVIEW = 'under_review'
2206
2206
2207 STATUSES = [
2207 STATUSES = [
2208 (STATUS_NOT_REVIEWED, _("Not reviewed")), # (no icon) and default
2208 (STATUS_NOT_REVIEWED, _("Not reviewed")), # (no icon) and default
2209 (STATUS_UNDER_REVIEW, _("Under review")),
2209 (STATUS_UNDER_REVIEW, _("Under review")),
2210 (STATUS_REJECTED, _("Not approved")),
2210 (STATUS_REJECTED, _("Not approved")),
2211 (STATUS_APPROVED, _("Approved")),
2211 (STATUS_APPROVED, _("Approved")),
2212 ]
2212 ]
2213 STATUSES_DICT = dict(STATUSES)
2213 STATUSES_DICT = dict(STATUSES)
2214
2214
2215 changeset_status_id = Column(Integer(), primary_key=True)
2215 changeset_status_id = Column(Integer(), primary_key=True)
2216 repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2216 repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2217 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
2217 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
2218 revision = Column(String(40), nullable=True)
2218 revision = Column(String(40), nullable=True)
2219 status = Column(String(128), nullable=False, default=DEFAULT)
2219 status = Column(String(128), nullable=False, default=DEFAULT)
2220 comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
2220 comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
2221 modified_at = Column(DateTime(), nullable=False, default=datetime.datetime.now)
2221 modified_at = Column(DateTime(), nullable=False, default=datetime.datetime.now)
2222 version = Column(Integer(), nullable=False, default=0)
2222 version = Column(Integer(), nullable=False, default=0)
2223 pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2223 pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2224
2224
2225 author = relationship('User')
2225 author = relationship('User')
2226 repo = relationship('Repository')
2226 repo = relationship('Repository')
2227 comment = relationship('ChangesetComment')
2227 comment = relationship('ChangesetComment')
2228 pull_request = relationship('PullRequest')
2228 pull_request = relationship('PullRequest')
2229
2229
2230 def __repr__(self):
2230 def __repr__(self):
2231 return "<%s %r by %r>" % (
2231 return "<%s %r by %r>" % (
2232 self.__class__.__name__,
2232 self.__class__.__name__,
2233 self.status, self.author
2233 self.status, self.author
2234 )
2234 )
2235
2235
2236 @classmethod
2236 @classmethod
2237 def get_status_lbl(cls, value):
2237 def get_status_lbl(cls, value):
2238 return cls.STATUSES_DICT.get(value)
2238 return cls.STATUSES_DICT.get(value)
2239
2239
2240 @property
2240 @property
2241 def status_lbl(self):
2241 def status_lbl(self):
2242 return ChangesetStatus.get_status_lbl(self.status)
2242 return ChangesetStatus.get_status_lbl(self.status)
2243
2243
2244 def __json__(self):
2244 def __json__(self):
2245 return dict(
2245 return dict(
2246 status=self.status,
2246 status=self.status,
2247 modified_at=self.modified_at.replace(microsecond=0),
2247 modified_at=self.modified_at.replace(microsecond=0),
2248 reviewer=self.author.username,
2248 reviewer=self.author.username,
2249 )
2249 )
2250
2250
2251
2251
2252 class PullRequest(Base, BaseDbModel):
2252 class PullRequest(Base, BaseDbModel):
2253 __tablename__ = 'pull_requests'
2253 __tablename__ = 'pull_requests'
2254 __table_args__ = (
2254 __table_args__ = (
2255 Index('pr_org_repo_id_idx', 'org_repo_id'),
2255 Index('pr_org_repo_id_idx', 'org_repo_id'),
2256 Index('pr_other_repo_id_idx', 'other_repo_id'),
2256 Index('pr_other_repo_id_idx', 'other_repo_id'),
2257 _table_args_default_dict,
2257 _table_args_default_dict,
2258 )
2258 )
2259
2259
2260 # values for .status
2260 # values for .status
2261 STATUS_NEW = u'new'
2261 STATUS_NEW = u'new'
2262 STATUS_CLOSED = u'closed'
2262 STATUS_CLOSED = u'closed'
2263
2263
2264 pull_request_id = Column(Integer(), primary_key=True)
2264 pull_request_id = Column(Integer(), primary_key=True)
2265 title = Column(Unicode(255), nullable=False)
2265 title = Column(Unicode(255), nullable=False)
2266 description = Column(UnicodeText(), nullable=False)
2266 description = Column(UnicodeText(), nullable=False)
2267 status = Column(Unicode(255), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
2267 status = Column(Unicode(255), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
2268 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2268 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2269 updated_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2269 updated_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2270 owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2270 owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2271 _revisions = Column('revisions', UnicodeText(), nullable=False)
2271 _revisions = Column('revisions', UnicodeText(), nullable=False)
2272 org_repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2272 org_repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2273 org_ref = Column(Unicode(255), nullable=False)
2273 org_ref = Column(Unicode(255), nullable=False)
2274 other_repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2274 other_repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2275 other_ref = Column(Unicode(255), nullable=False)
2275 other_ref = Column(Unicode(255), nullable=False)
2276
2276
2277 @hybrid_property
2277 @hybrid_property
2278 def revisions(self):
2278 def revisions(self):
2279 return self._revisions.split(':')
2279 return self._revisions.split(':')
2280
2280
2281 @revisions.setter
2281 @revisions.setter
2282 def revisions(self, val):
2282 def revisions(self, val):
2283 self._revisions = ':'.join(val)
2283 self._revisions = ':'.join(val)
2284
2284
2285 @property
2285 @property
2286 def org_ref_parts(self):
2286 def org_ref_parts(self):
2287 return self.org_ref.split(':')
2287 return self.org_ref.split(':')
2288
2288
2289 @property
2289 @property
2290 def other_ref_parts(self):
2290 def other_ref_parts(self):
2291 return self.other_ref.split(':')
2291 return self.other_ref.split(':')
2292
2292
2293 owner = relationship('User')
2293 owner = relationship('User')
2294 reviewers = relationship('PullRequestReviewer',
2294 reviewers = relationship('PullRequestReviewer',
2295 cascade="all, delete-orphan")
2295 cascade="all, delete-orphan")
2296 org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
2296 org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
2297 other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
2297 other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
2298 statuses = relationship('ChangesetStatus', order_by='ChangesetStatus.changeset_status_id')
2298 statuses = relationship('ChangesetStatus', order_by='ChangesetStatus.changeset_status_id')
2299 comments = relationship('ChangesetComment', order_by='ChangesetComment.comment_id',
2299 comments = relationship('ChangesetComment', order_by='ChangesetComment.comment_id',
2300 cascade="all, delete-orphan")
2300 cascade="all, delete-orphan")
2301
2301
2302 @classmethod
2302 @classmethod
2303 def query(cls, reviewer_id=None, include_closed=True, sorted=False):
2303 def query(cls, reviewer_id=None, include_closed=True, sorted=False):
2304 """Add PullRequest-specific helpers for common query constructs.
2304 """Add PullRequest-specific helpers for common query constructs.
2305
2305
2306 reviewer_id: only PRs with the specified user added as reviewer.
2306 reviewer_id: only PRs with the specified user added as reviewer.
2307
2307
2308 include_closed: if False, do not include closed PRs.
2308 include_closed: if False, do not include closed PRs.
2309
2309
2310 sorted: if True, apply the default ordering (newest first).
2310 sorted: if True, apply the default ordering (newest first).
2311 """
2311 """
2312 q = super(PullRequest, cls).query()
2312 q = super(PullRequest, cls).query()
2313
2313
2314 if reviewer_id is not None:
2314 if reviewer_id is not None:
2315 q = q.join(PullRequestReviewer).filter(PullRequestReviewer.user_id == reviewer_id)
2315 q = q.join(PullRequestReviewer).filter(PullRequestReviewer.user_id == reviewer_id)
2316
2316
2317 if not include_closed:
2317 if not include_closed:
2318 q = q.filter(PullRequest.status != PullRequest.STATUS_CLOSED)
2318 q = q.filter(PullRequest.status != PullRequest.STATUS_CLOSED)
2319
2319
2320 if sorted:
2320 if sorted:
2321 q = q.order_by(PullRequest.created_on.desc())
2321 q = q.order_by(PullRequest.created_on.desc())
2322
2322
2323 return q
2323 return q
2324
2324
2325 def get_reviewer_users(self):
2325 def get_reviewer_users(self):
2326 """Like .reviewers, but actually returning the users"""
2326 """Like .reviewers, but actually returning the users"""
2327 return User.query() \
2327 return User.query() \
2328 .join(PullRequestReviewer) \
2328 .join(PullRequestReviewer) \
2329 .filter(PullRequestReviewer.pull_request == self) \
2329 .filter(PullRequestReviewer.pull_request == self) \
2330 .order_by(PullRequestReviewer.pull_request_reviewers_id) \
2330 .order_by(PullRequestReviewer.pull_request_reviewers_id) \
2331 .all()
2331 .all()
2332
2332
2333 def is_closed(self):
2333 def is_closed(self):
2334 return self.status == self.STATUS_CLOSED
2334 return self.status == self.STATUS_CLOSED
2335
2335
2336 def user_review_status(self, user_id):
2336 def user_review_status(self, user_id):
2337 """Return the user's latest status votes on PR"""
2337 """Return the user's latest status votes on PR"""
2338 # note: no filtering on repo - that would be redundant
2338 # note: no filtering on repo - that would be redundant
2339 status = ChangesetStatus.query() \
2339 status = ChangesetStatus.query() \
2340 .filter(ChangesetStatus.pull_request == self) \
2340 .filter(ChangesetStatus.pull_request == self) \
2341 .filter(ChangesetStatus.user_id == user_id) \
2341 .filter(ChangesetStatus.user_id == user_id) \
2342 .order_by(ChangesetStatus.version) \
2342 .order_by(ChangesetStatus.version) \
2343 .first()
2343 .first()
2344 return str(status.status) if status else ''
2344 return str(status.status) if status else ''
2345
2345
2346 @classmethod
2346 @classmethod
2347 def make_nice_id(cls, pull_request_id):
2347 def make_nice_id(cls, pull_request_id):
2348 '''Return pull request id nicely formatted for displaying'''
2348 '''Return pull request id nicely formatted for displaying'''
2349 return '#%s' % pull_request_id
2349 return '#%s' % pull_request_id
2350
2350
2351 def nice_id(self):
2351 def nice_id(self):
2352 '''Return the id of this pull request, nicely formatted for displaying'''
2352 '''Return the id of this pull request, nicely formatted for displaying'''
2353 return self.make_nice_id(self.pull_request_id)
2353 return self.make_nice_id(self.pull_request_id)
2354
2354
2355 def get_api_data(self):
2355 def get_api_data(self):
2356 return self.__json__()
2356 return self.__json__()
2357
2357
2358 def __json__(self):
2358 def __json__(self):
2359 clone_uri_tmpl = kallithea.CONFIG.get('clone_uri_tmpl') or Repository.DEFAULT_CLONE_URI
2359 clone_uri_tmpl = kallithea.CONFIG.get('clone_uri_tmpl') or Repository.DEFAULT_CLONE_URI
2360 return dict(
2360 return dict(
2361 pull_request_id=self.pull_request_id,
2361 pull_request_id=self.pull_request_id,
2362 url=self.url(),
2362 url=self.url(),
2363 reviewers=self.reviewers,
2363 reviewers=self.reviewers,
2364 revisions=self.revisions,
2364 revisions=self.revisions,
2365 owner=self.owner.username,
2365 owner=self.owner.username,
2366 title=self.title,
2366 title=self.title,
2367 description=self.description,
2367 description=self.description,
2368 org_repo_url=self.org_repo.clone_url(clone_uri_tmpl=clone_uri_tmpl),
2368 org_repo_url=self.org_repo.clone_url(clone_uri_tmpl=clone_uri_tmpl),
2369 org_ref_parts=self.org_ref_parts,
2369 org_ref_parts=self.org_ref_parts,
2370 other_ref_parts=self.other_ref_parts,
2370 other_ref_parts=self.other_ref_parts,
2371 status=self.status,
2371 status=self.status,
2372 comments=self.comments,
2372 comments=self.comments,
2373 statuses=self.statuses,
2373 statuses=self.statuses,
2374 )
2374 )
2375
2375
2376 def url(self, **kwargs):
2376 def url(self, **kwargs):
2377 canonical = kwargs.pop('canonical', None)
2377 canonical = kwargs.pop('canonical', None)
2378 import kallithea.lib.helpers as h
2378 import kallithea.lib.helpers as h
2379 b = self.org_ref_parts[1]
2379 b = self.org_ref_parts[1]
2380 if b != self.other_ref_parts[1]:
2380 if b != self.other_ref_parts[1]:
2381 s = '/_/' + b
2381 s = '/_/' + b
2382 else:
2382 else:
2383 s = '/_/' + self.title
2383 s = '/_/' + self.title
2384 kwargs['extra'] = urlreadable(s)
2384 kwargs['extra'] = urlreadable(s)
2385 if canonical:
2385 if canonical:
2386 return h.canonical_url('pullrequest_show', repo_name=self.other_repo.repo_name,
2386 return h.canonical_url('pullrequest_show', repo_name=self.other_repo.repo_name,
2387 pull_request_id=self.pull_request_id, **kwargs)
2387 pull_request_id=self.pull_request_id, **kwargs)
2388 return h.url('pullrequest_show', repo_name=self.other_repo.repo_name,
2388 return h.url('pullrequest_show', repo_name=self.other_repo.repo_name,
2389 pull_request_id=self.pull_request_id, **kwargs)
2389 pull_request_id=self.pull_request_id, **kwargs)
2390
2390
2391
2391
2392 class PullRequestReviewer(Base, BaseDbModel):
2392 class PullRequestReviewer(Base, BaseDbModel):
2393 __tablename__ = 'pull_request_reviewers'
2393 __tablename__ = 'pull_request_reviewers'
2394 __table_args__ = (
2394 __table_args__ = (
2395 Index('pull_request_reviewers_user_id_idx', 'user_id'),
2395 Index('pull_request_reviewers_user_id_idx', 'user_id'),
2396 _table_args_default_dict,
2396 _table_args_default_dict,
2397 )
2397 )
2398
2398
2399 def __init__(self, user=None, pull_request=None):
2399 def __init__(self, user=None, pull_request=None):
2400 self.user = user
2400 self.user = user
2401 self.pull_request = pull_request
2401 self.pull_request = pull_request
2402
2402
2403 pull_request_reviewers_id = Column('pull_requests_reviewers_id', Integer(), primary_key=True)
2403 pull_request_reviewers_id = Column('pull_requests_reviewers_id', Integer(), primary_key=True)
2404 pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
2404 pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
2405 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
2405 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
2406
2406
2407 user = relationship('User')
2407 user = relationship('User')
2408 pull_request = relationship('PullRequest')
2408 pull_request = relationship('PullRequest')
2409
2409
2410 def __json__(self):
2410 def __json__(self):
2411 return dict(
2411 return dict(
2412 username=self.user.username if self.user else None,
2412 username=self.user.username if self.user else None,
2413 )
2413 )
2414
2414
2415
2415
2416 class Notification(object):
2416 class Notification(object):
2417 __tablename__ = 'notifications'
2417 __tablename__ = 'notifications'
2418
2418
2419 class UserNotification(object):
2419 class UserNotification(object):
2420 __tablename__ = 'user_to_notification'
2420 __tablename__ = 'user_to_notification'
2421
2421
2422
2422
2423 class Gist(Base, BaseDbModel):
2423 class Gist(Base, BaseDbModel):
2424 __tablename__ = 'gists'
2424 __tablename__ = 'gists'
2425 __table_args__ = (
2425 __table_args__ = (
2426 Index('g_gist_access_id_idx', 'gist_access_id'),
2426 Index('g_gist_access_id_idx', 'gist_access_id'),
2427 Index('g_created_on_idx', 'created_on'),
2427 Index('g_created_on_idx', 'created_on'),
2428 _table_args_default_dict,
2428 _table_args_default_dict,
2429 )
2429 )
2430
2430
2431 GIST_PUBLIC = u'public'
2431 GIST_PUBLIC = u'public'
2432 GIST_PRIVATE = u'private'
2432 GIST_PRIVATE = u'private'
2433 DEFAULT_FILENAME = u'gistfile1.txt'
2433 DEFAULT_FILENAME = u'gistfile1.txt'
2434
2434
2435 gist_id = Column(Integer(), primary_key=True)
2435 gist_id = Column(Integer(), primary_key=True)
2436 gist_access_id = Column(Unicode(250), nullable=False)
2436 gist_access_id = Column(Unicode(250), nullable=False)
2437 gist_description = Column(UnicodeText(), nullable=False)
2437 gist_description = Column(UnicodeText(), nullable=False)
2438 owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2438 owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2439 gist_expires = Column(Float(53), nullable=False)
2439 gist_expires = Column(Float(53), nullable=False)
2440 gist_type = Column(Unicode(128), nullable=False)
2440 gist_type = Column(Unicode(128), nullable=False)
2441 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2441 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2442 modified_at = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2442 modified_at = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2443
2443
2444 owner = relationship('User')
2444 owner = relationship('User')
2445
2445
2446 @hybrid_property
2446 @hybrid_property
2447 def is_expired(self):
2447 def is_expired(self):
2448 return (self.gist_expires != -1) & (time.time() > self.gist_expires)
2448 return (self.gist_expires != -1) & (time.time() > self.gist_expires)
2449
2449
2450 def __repr__(self):
2450 def __repr__(self):
2451 return "<%s %s %s>" % (
2451 return "<%s %s %s>" % (
2452 self.__class__.__name__,
2452 self.__class__.__name__,
2453 self.gist_type, self.gist_access_id)
2453 self.gist_type, self.gist_access_id)
2454
2454
2455 @classmethod
2455 @classmethod
2456 def guess_instance(cls, value):
2456 def guess_instance(cls, value):
2457 return super(Gist, cls).guess_instance(value, Gist.get_by_access_id)
2457 return super(Gist, cls).guess_instance(value, Gist.get_by_access_id)
2458
2458
2459 @classmethod
2459 @classmethod
2460 def get_or_404(cls, id_):
2460 def get_or_404(cls, id_):
2461 res = cls.query().filter(cls.gist_access_id == id_).scalar()
2461 res = cls.query().filter(cls.gist_access_id == id_).scalar()
2462 if res is None:
2462 if res is None:
2463 raise HTTPNotFound
2463 raise HTTPNotFound
2464 return res
2464 return res
2465
2465
2466 @classmethod
2466 @classmethod
2467 def get_by_access_id(cls, gist_access_id):
2467 def get_by_access_id(cls, gist_access_id):
2468 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
2468 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
2469
2469
2470 def gist_url(self):
2470 def gist_url(self):
2471 alias_url = kallithea.CONFIG.get('gist_alias_url')
2471 alias_url = kallithea.CONFIG.get('gist_alias_url')
2472 if alias_url:
2472 if alias_url:
2473 return alias_url.replace('{gistid}', self.gist_access_id)
2473 return alias_url.replace('{gistid}', self.gist_access_id)
2474
2474
2475 import kallithea.lib.helpers as h
2475 import kallithea.lib.helpers as h
2476 return h.canonical_url('gist', gist_id=self.gist_access_id)
2476 return h.canonical_url('gist', gist_id=self.gist_access_id)
2477
2477
2478 @classmethod
2478 @classmethod
2479 def base_path(cls):
2479 def base_path(cls):
2480 """
2480 """
2481 Returns base path where all gists are stored
2481 Returns base path where all gists are stored
2482
2482
2483 :param cls:
2483 :param cls:
2484 """
2484 """
2485 from kallithea.model.gist import GIST_STORE_LOC
2485 from kallithea.model.gist import GIST_STORE_LOC
2486 q = Session().query(Ui) \
2486 q = Session().query(Ui) \
2487 .filter(Ui.ui_key == URL_SEP)
2487 .filter(Ui.ui_key == URL_SEP)
2488 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2488 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2489 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
2489 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
2490
2490
2491 def get_api_data(self):
2491 def get_api_data(self):
2492 """
2492 """
2493 Common function for generating gist related data for API
2493 Common function for generating gist related data for API
2494 """
2494 """
2495 gist = self
2495 gist = self
2496 data = dict(
2496 data = dict(
2497 gist_id=gist.gist_id,
2497 gist_id=gist.gist_id,
2498 type=gist.gist_type,
2498 type=gist.gist_type,
2499 access_id=gist.gist_access_id,
2499 access_id=gist.gist_access_id,
2500 description=gist.gist_description,
2500 description=gist.gist_description,
2501 url=gist.gist_url(),
2501 url=gist.gist_url(),
2502 expires=gist.gist_expires,
2502 expires=gist.gist_expires,
2503 created_on=gist.created_on,
2503 created_on=gist.created_on,
2504 )
2504 )
2505 return data
2505 return data
2506
2506
2507 def __json__(self):
2507 def __json__(self):
2508 data = dict(
2508 data = dict(
2509 )
2509 )
2510 data.update(self.get_api_data())
2510 data.update(self.get_api_data())
2511 return data
2511 return data
2512 ## SCM functions
2512 ## SCM functions
2513
2513
2514 @property
2514 @property
2515 def scm_instance(self):
2515 def scm_instance(self):
2516 from kallithea.lib.vcs import get_repo
2516 from kallithea.lib.vcs import get_repo
2517 base_path = self.base_path()
2517 base_path = self.base_path()
2518 return get_repo(os.path.join(base_path, self.gist_access_id))
2518 return get_repo(os.path.join(base_path, self.gist_access_id))
2519
2519
2520
2520
2521 class UserSshKeys(Base, BaseDbModel):
2521 class UserSshKeys(Base, BaseDbModel):
2522 __tablename__ = 'user_ssh_keys'
2522 __tablename__ = 'user_ssh_keys'
2523 __table_args__ = (
2523 __table_args__ = (
2524 Index('usk_fingerprint_idx', 'fingerprint'),
2524 Index('usk_fingerprint_idx', 'fingerprint'),
2525 UniqueConstraint('fingerprint'),
2525 UniqueConstraint('fingerprint'),
2526 _table_args_default_dict
2526 _table_args_default_dict
2527 )
2527 )
2528 __mapper_args__ = {}
2528 __mapper_args__ = {}
2529
2529
2530 user_ssh_key_id = Column(Integer(), primary_key=True)
2530 user_ssh_key_id = Column(Integer(), primary_key=True)
2531 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
2531 user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
2532 _public_key = Column('public_key', UnicodeText(), nullable=False)
2532 _public_key = Column('public_key', UnicodeText(), nullable=False)
2533 description = Column(UnicodeText(), nullable=False)
2533 description = Column(UnicodeText(), nullable=False)
2534 fingerprint = Column(String(255), nullable=False, unique=True)
2534 fingerprint = Column(String(255), nullable=False, unique=True)
2535 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2535 created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2536 last_seen = Column(DateTime(timezone=False), nullable=True)
2536 last_seen = Column(DateTime(timezone=False), nullable=True)
2537
2537
2538 user = relationship('User')
2538 user = relationship('User')
2539
2539
2540 @property
2540 @property
2541 def public_key(self):
2541 def public_key(self):
2542 return self._public_key
2542 return self._public_key
2543
2543
2544 @public_key.setter
2544 @public_key.setter
2545 def public_key(self, full_key):
2545 def public_key(self, full_key):
2546 # the full public key is too long to be suitable as database key - instead,
2546 # the full public key is too long to be suitable as database key - instead,
2547 # use fingerprints similar to 'ssh-keygen -E sha256 -lf ~/.ssh/id_rsa.pub'
2547 # use fingerprints similar to 'ssh-keygen -E sha256 -lf ~/.ssh/id_rsa.pub'
2548 self._public_key = full_key
2548 self._public_key = full_key
2549 enc_key = safe_bytes(full_key.split(" ")[1])
2549 enc_key = safe_bytes(full_key.split(" ")[1])
2550 self.fingerprint = base64.b64encode(hashlib.sha256(base64.b64decode(enc_key)).digest()).replace(b'\n', b'').rstrip(b'=').decode()
2550 self.fingerprint = base64.b64encode(hashlib.sha256(base64.b64decode(enc_key)).digest()).replace(b'\n', b'').rstrip(b'=').decode()
@@ -1,236 +1,236 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.model.gist
15 kallithea.model.gist
16 ~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~
17
17
18 gist model for Kallithea
18 gist model for Kallithea
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: May 9, 2013
22 :created_on: May 9, 2013
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28 import logging
28 import logging
29 import os
29 import os
30 import random
30 import random
31 import shutil
31 import shutil
32 import time
32 import time
33 import traceback
33 import traceback
34
34
35 from kallithea.lib import ext_json
35 from kallithea.lib import ext_json
36 from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, time_to_datetime
36 from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, time_to_datetime
37 from kallithea.model.db import Gist, Session, User
37 from kallithea.model.db import Gist, Session, User
38 from kallithea.model.repo import RepoModel
38 from kallithea.model.repo import RepoModel
39 from kallithea.model.scm import ScmModel
39 from kallithea.model.scm import ScmModel
40
40
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44 GIST_STORE_LOC = '.rc_gist_store'
44 GIST_STORE_LOC = '.rc_gist_store'
45 GIST_METADATA_FILE = '.rc_gist_metadata'
45 GIST_METADATA_FILE = '.rc_gist_metadata'
46
46
47
47
48 def make_gist_access_id():
48 def make_gist_access_id():
49 """Generate a random, URL safe, almost certainly unique gist identifier."""
49 """Generate a random, URL safe, almost certainly unique gist identifier."""
50 rnd = random.SystemRandom() # use cryptographically secure system PRNG
50 rnd = random.SystemRandom() # use cryptographically secure system PRNG
51 alphabet = '23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz'
51 alphabet = '23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz'
52 length = 20
52 length = 20
53 return u''.join(rnd.choice(alphabet) for _ in range(length))
53 return u''.join(rnd.choice(alphabet) for _ in range(length))
54
54
55
55
56 class GistModel(object):
56 class GistModel(object):
57
57
58 def __delete_gist(self, gist):
58 def __delete_gist(self, gist):
59 """
59 """
60 removes gist from filesystem
60 removes gist from filesystem
61
61
62 :param gist: gist object
62 :param gist: gist object
63 """
63 """
64 root_path = RepoModel().repos_path
64 root_path = RepoModel().repos_path
65 rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
65 rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
66 log.info("Removing %s", rm_path)
66 log.info("Removing %s", rm_path)
67 shutil.rmtree(rm_path)
67 shutil.rmtree(rm_path)
68
68
69 def _store_metadata(self, repo, gist_id, gist_access_id, user_id, gist_type,
69 def _store_metadata(self, repo, gist_id, gist_access_id, user_id, gist_type,
70 gist_expires):
70 gist_expires):
71 """
71 """
72 store metadata inside the gist, this can be later used for imports
72 store metadata inside the gist, this can be later used for imports
73 or gist identification
73 or gist identification
74 """
74 """
75 metadata = {
75 metadata = {
76 'metadata_version': '1',
76 'metadata_version': '1',
77 'gist_db_id': gist_id,
77 'gist_db_id': gist_id,
78 'gist_access_id': gist_access_id,
78 'gist_access_id': gist_access_id,
79 'gist_owner_id': user_id,
79 'gist_owner_id': user_id,
80 'gist_type': gist_type,
80 'gist_type': gist_type,
81 'gist_expires': gist_expires,
81 'gist_expires': gist_expires,
82 'gist_updated': time.time(),
82 'gist_updated': time.time(),
83 }
83 }
84 with open(os.path.join(repo.path, '.hg', GIST_METADATA_FILE), 'wb') as f:
84 with open(os.path.join(repo.path, '.hg', GIST_METADATA_FILE), 'wb') as f:
85 f.write(ascii_bytes(ext_json.dumps(metadata)))
85 f.write(ascii_bytes(ext_json.dumps(metadata)))
86
86
87 def get_gist(self, gist):
87 def get_gist(self, gist):
88 return Gist.guess_instance(gist)
88 return Gist.guess_instance(gist)
89
89
90 def get_gist_files(self, gist_access_id, revision=None):
90 def get_gist_files(self, gist_access_id, revision=None):
91 """
91 """
92 Get files for given gist
92 Get files for given gist
93
93
94 :param gist_access_id:
94 :param gist_access_id:
95 """
95 """
96 repo = Gist.get_by_access_id(gist_access_id)
96 repo = Gist.get_by_access_id(gist_access_id)
97 cs = repo.scm_instance.get_changeset(revision)
97 cs = repo.scm_instance.get_changeset(revision)
98 return cs, [n for n in cs.get_node('/')]
98 return cs, [n for n in cs.get_node('/')]
99
99
100 def create(self, description, owner, ip_addr, gist_mapping,
100 def create(self, description, owner, ip_addr, gist_mapping,
101 gist_type=Gist.GIST_PUBLIC, lifetime=-1):
101 gist_type=Gist.GIST_PUBLIC, lifetime=-1):
102 """
102 """
103
103
104 :param description: description of the gist
104 :param description: description of the gist
105 :param owner: user who created this gist
105 :param owner: user who created this gist
106 :param gist_mapping: mapping {filename:{'content':content},...}
106 :param gist_mapping: mapping {filename:{'content':content},...}
107 :param gist_type: type of gist private/public
107 :param gist_type: type of gist private/public
108 :param lifetime: in minutes, -1 == forever
108 :param lifetime: in minutes, -1 == forever
109 """
109 """
110 owner = User.guess_instance(owner)
110 owner = User.guess_instance(owner)
111 gist_access_id = make_gist_access_id()
111 gist_access_id = make_gist_access_id()
112 lifetime = safe_int(lifetime, -1)
112 lifetime = safe_int(lifetime, -1)
113 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
113 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
114 log.debug('set GIST expiration date to: %s',
114 log.debug('set GIST expiration date to: %s',
115 time_to_datetime(gist_expires)
115 time_to_datetime(gist_expires)
116 if gist_expires != -1 else 'forever')
116 if gist_expires != -1 else 'forever')
117 # create the Database version
117 # create the Database version
118 gist = Gist()
118 gist = Gist()
119 gist.gist_description = description
119 gist.gist_description = description
120 gist.gist_access_id = gist_access_id
120 gist.gist_access_id = gist_access_id
121 gist.owner_id = owner.user_id
121 gist.owner_id = owner.user_id
122 gist.gist_expires = gist_expires
122 gist.gist_expires = gist_expires
123 gist.gist_type = gist_type
123 gist.gist_type = gist_type
124 Session().add(gist)
124 Session().add(gist)
125 Session().flush() # make database assign gist.gist_id
125 Session().flush() # make database assign gist.gist_id
126 if gist_type == Gist.GIST_PUBLIC:
126 if gist_type == Gist.GIST_PUBLIC:
127 # use DB ID for easy to use GIST ID
127 # use DB ID for easy to use GIST ID
128 gist.gist_access_id = unicode(gist.gist_id)
128 gist.gist_access_id = str(gist.gist_id)
129
129
130 log.debug('Creating new %s GIST repo %s', gist_type, gist.gist_access_id)
130 log.debug('Creating new %s GIST repo %s', gist_type, gist.gist_access_id)
131 repo = RepoModel()._create_filesystem_repo(
131 repo = RepoModel()._create_filesystem_repo(
132 repo_name=gist.gist_access_id, repo_type='hg', repo_group=GIST_STORE_LOC)
132 repo_name=gist.gist_access_id, repo_type='hg', repo_group=GIST_STORE_LOC)
133
133
134 processed_mapping = {}
134 processed_mapping = {}
135 for filename in gist_mapping:
135 for filename in gist_mapping:
136 if filename != os.path.basename(filename):
136 if filename != os.path.basename(filename):
137 raise Exception('Filename cannot be inside a directory')
137 raise Exception('Filename cannot be inside a directory')
138
138
139 content = gist_mapping[filename]['content']
139 content = gist_mapping[filename]['content']
140 # TODO: expand support for setting explicit lexers
140 # TODO: expand support for setting explicit lexers
141 # if lexer is None:
141 # if lexer is None:
142 # try:
142 # try:
143 # guess_lexer = pygments.lexers.guess_lexer_for_filename
143 # guess_lexer = pygments.lexers.guess_lexer_for_filename
144 # lexer = guess_lexer(filename,content)
144 # lexer = guess_lexer(filename,content)
145 # except pygments.util.ClassNotFound:
145 # except pygments.util.ClassNotFound:
146 # lexer = 'text'
146 # lexer = 'text'
147 processed_mapping[filename] = {'content': content}
147 processed_mapping[filename] = {'content': content}
148
148
149 # now create single multifile commit
149 # now create single multifile commit
150 message = 'added file'
150 message = 'added file'
151 message += 's: ' if len(processed_mapping) > 1 else ': '
151 message += 's: ' if len(processed_mapping) > 1 else ': '
152 message += ', '.join([x for x in processed_mapping])
152 message += ', '.join([x for x in processed_mapping])
153
153
154 # fake Kallithea Repository object
154 # fake Kallithea Repository object
155 fake_repo = AttributeDict(dict(
155 fake_repo = AttributeDict(dict(
156 repo_name=os.path.join(GIST_STORE_LOC, gist.gist_access_id),
156 repo_name=os.path.join(GIST_STORE_LOC, gist.gist_access_id),
157 scm_instance_no_cache=lambda: repo,
157 scm_instance_no_cache=lambda: repo,
158 ))
158 ))
159 ScmModel().create_nodes(
159 ScmModel().create_nodes(
160 user=owner.user_id,
160 user=owner.user_id,
161 ip_addr=ip_addr,
161 ip_addr=ip_addr,
162 repo=fake_repo,
162 repo=fake_repo,
163 message=message,
163 message=message,
164 nodes=processed_mapping,
164 nodes=processed_mapping,
165 trigger_push_hook=False
165 trigger_push_hook=False
166 )
166 )
167
167
168 self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
168 self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
169 owner.user_id, gist.gist_type, gist.gist_expires)
169 owner.user_id, gist.gist_type, gist.gist_expires)
170 return gist
170 return gist
171
171
172 def delete(self, gist, fs_remove=True):
172 def delete(self, gist, fs_remove=True):
173 gist = Gist.guess_instance(gist)
173 gist = Gist.guess_instance(gist)
174 try:
174 try:
175 Session().delete(gist)
175 Session().delete(gist)
176 if fs_remove:
176 if fs_remove:
177 self.__delete_gist(gist)
177 self.__delete_gist(gist)
178 else:
178 else:
179 log.debug('skipping removal from filesystem')
179 log.debug('skipping removal from filesystem')
180 except Exception:
180 except Exception:
181 log.error(traceback.format_exc())
181 log.error(traceback.format_exc())
182 raise
182 raise
183
183
184 def update(self, gist, description, owner, ip_addr, gist_mapping, gist_type,
184 def update(self, gist, description, owner, ip_addr, gist_mapping, gist_type,
185 lifetime):
185 lifetime):
186 gist = Gist.guess_instance(gist)
186 gist = Gist.guess_instance(gist)
187 gist_repo = gist.scm_instance
187 gist_repo = gist.scm_instance
188
188
189 lifetime = safe_int(lifetime, -1)
189 lifetime = safe_int(lifetime, -1)
190 if lifetime == 0: # preserve old value
190 if lifetime == 0: # preserve old value
191 gist_expires = gist.gist_expires
191 gist_expires = gist.gist_expires
192 else:
192 else:
193 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
193 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
194
194
195 # calculate operation type based on given data
195 # calculate operation type based on given data
196 gist_mapping_op = {}
196 gist_mapping_op = {}
197 for k, v in gist_mapping.items():
197 for k, v in gist_mapping.items():
198 # add, mod, del
198 # add, mod, del
199 if not v['org_filename'] and v['filename']:
199 if not v['org_filename'] and v['filename']:
200 op = 'add'
200 op = 'add'
201 elif v['org_filename'] and not v['filename']:
201 elif v['org_filename'] and not v['filename']:
202 op = 'del'
202 op = 'del'
203 else:
203 else:
204 op = 'mod'
204 op = 'mod'
205
205
206 v['op'] = op
206 v['op'] = op
207 gist_mapping_op[k] = v
207 gist_mapping_op[k] = v
208
208
209 gist.gist_description = description
209 gist.gist_description = description
210 gist.gist_expires = gist_expires
210 gist.gist_expires = gist_expires
211 gist.owner = owner
211 gist.owner = owner
212 gist.gist_type = gist_type
212 gist.gist_type = gist_type
213
213
214 message = 'updated file'
214 message = 'updated file'
215 message += 's: ' if len(gist_mapping) > 1 else ': '
215 message += 's: ' if len(gist_mapping) > 1 else ': '
216 message += ', '.join([x for x in gist_mapping])
216 message += ', '.join([x for x in gist_mapping])
217
217
218 # fake Kallithea Repository object
218 # fake Kallithea Repository object
219 fake_repo = AttributeDict(dict(
219 fake_repo = AttributeDict(dict(
220 repo_name=os.path.join(GIST_STORE_LOC, gist.gist_access_id),
220 repo_name=os.path.join(GIST_STORE_LOC, gist.gist_access_id),
221 scm_instance_no_cache=lambda: gist_repo,
221 scm_instance_no_cache=lambda: gist_repo,
222 ))
222 ))
223
223
224 self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
224 self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
225 owner.user_id, gist.gist_type, gist.gist_expires)
225 owner.user_id, gist.gist_type, gist.gist_expires)
226
226
227 ScmModel().update_nodes(
227 ScmModel().update_nodes(
228 user=owner.user_id,
228 user=owner.user_id,
229 ip_addr=ip_addr,
229 ip_addr=ip_addr,
230 repo=fake_repo,
230 repo=fake_repo,
231 message=message,
231 message=message,
232 nodes=gist_mapping_op,
232 nodes=gist_mapping_op,
233 trigger_push_hook=False
233 trigger_push_hook=False
234 )
234 )
235
235
236 return gist
236 return gist
@@ -1,230 +1,230 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.model.notification
15 kallithea.model.notification
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17
17
18 Model for notifications
18 Model for notifications
19
19
20
20
21 This file was forked by the Kallithea project in July 2014.
21 This file was forked by the Kallithea project in July 2014.
22 Original author and date, and relevant copyright and licensing information is below:
22 Original author and date, and relevant copyright and licensing information is below:
23 :created_on: Nov 20, 2011
23 :created_on: Nov 20, 2011
24 :author: marcink
24 :author: marcink
25 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :copyright: (c) 2013 RhodeCode GmbH, and others.
26 :license: GPLv3, see LICENSE.md for more details.
26 :license: GPLv3, see LICENSE.md for more details.
27 """
27 """
28
28
29 import datetime
29 import datetime
30 import logging
30 import logging
31
31
32 from tg import app_globals
32 from tg import app_globals
33 from tg import tmpl_context as c
33 from tg import tmpl_context as c
34 from tg.i18n import ugettext as _
34 from tg.i18n import ugettext as _
35
35
36 import kallithea
36 import kallithea
37 from kallithea.lib import helpers as h
37 from kallithea.lib import helpers as h
38 from kallithea.model.db import User
38 from kallithea.model.db import User
39
39
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 class NotificationModel(object):
44 class NotificationModel(object):
45
45
46 TYPE_CHANGESET_COMMENT = u'cs_comment'
46 TYPE_CHANGESET_COMMENT = u'cs_comment'
47 TYPE_MESSAGE = u'message'
47 TYPE_MESSAGE = u'message'
48 TYPE_MENTION = u'mention' # not used
48 TYPE_MENTION = u'mention' # not used
49 TYPE_REGISTRATION = u'registration'
49 TYPE_REGISTRATION = u'registration'
50 TYPE_PULL_REQUEST = u'pull_request'
50 TYPE_PULL_REQUEST = u'pull_request'
51 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
51 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
52
52
53 def create(self, created_by, subject, body, recipients=None,
53 def create(self, created_by, subject, body, recipients=None,
54 type_=TYPE_MESSAGE, with_email=True,
54 type_=TYPE_MESSAGE, with_email=True,
55 email_kwargs=None, repo_name=None):
55 email_kwargs=None, repo_name=None):
56 """
56 """
57
57
58 Creates notification of given type
58 Creates notification of given type
59
59
60 :param created_by: int, str or User instance. User who created this
60 :param created_by: int, str or User instance. User who created this
61 notification
61 notification
62 :param subject:
62 :param subject:
63 :param body:
63 :param body:
64 :param recipients: list of int, str or User objects, when None
64 :param recipients: list of int, str or User objects, when None
65 is given send to all admins
65 is given send to all admins
66 :param type_: type of notification
66 :param type_: type of notification
67 :param with_email: send email with this notification
67 :param with_email: send email with this notification
68 :param email_kwargs: additional dict to pass as args to email template
68 :param email_kwargs: additional dict to pass as args to email template
69 """
69 """
70 from kallithea.lib.celerylib import tasks
70 from kallithea.lib.celerylib import tasks
71 email_kwargs = email_kwargs or {}
71 email_kwargs = email_kwargs or {}
72 if recipients and not getattr(recipients, '__iter__', False):
72 if recipients and not getattr(recipients, '__iter__', False):
73 raise Exception('recipients must be a list or iterable')
73 raise Exception('recipients must be a list or iterable')
74
74
75 created_by_obj = User.guess_instance(created_by)
75 created_by_obj = User.guess_instance(created_by)
76
76
77 recipients_objs = set()
77 recipients_objs = set()
78 if recipients:
78 if recipients:
79 for u in recipients:
79 for u in recipients:
80 obj = User.guess_instance(u)
80 obj = User.guess_instance(u)
81 if obj is not None:
81 if obj is not None:
82 recipients_objs.add(obj)
82 recipients_objs.add(obj)
83 else:
83 else:
84 # TODO: inform user that requested operation couldn't be completed
84 # TODO: inform user that requested operation couldn't be completed
85 log.error('cannot email unknown user %r', u)
85 log.error('cannot email unknown user %r', u)
86 log.debug('sending notifications %s to %s',
86 log.debug('sending notifications %s to %s',
87 type_, recipients_objs
87 type_, recipients_objs
88 )
88 )
89 elif recipients is None:
89 elif recipients is None:
90 # empty recipients means to all admins
90 # empty recipients means to all admins
91 recipients_objs = User.query().filter(User.admin == True).all()
91 recipients_objs = User.query().filter(User.admin == True).all()
92 log.debug('sending notifications %s to admins: %s',
92 log.debug('sending notifications %s to admins: %s',
93 type_, recipients_objs
93 type_, recipients_objs
94 )
94 )
95 #else: silently skip notification mails?
95 #else: silently skip notification mails?
96
96
97 if not with_email:
97 if not with_email:
98 return
98 return
99
99
100 headers = {}
100 headers = {}
101 headers['X-Kallithea-Notification-Type'] = type_
101 headers['X-Kallithea-Notification-Type'] = type_
102 if 'threading' in email_kwargs:
102 if 'threading' in email_kwargs:
103 headers['References'] = ' '.join('<%s>' % x for x in email_kwargs['threading'])
103 headers['References'] = ' '.join('<%s>' % x for x in email_kwargs['threading'])
104
104
105 # this is passed into template
105 # this is passed into template
106 created_on = h.fmt_date(datetime.datetime.now())
106 created_on = h.fmt_date(datetime.datetime.now())
107 html_kwargs = {
107 html_kwargs = {
108 'subject': subject,
108 'subject': subject,
109 'body': h.render_w_mentions(body, repo_name),
109 'body': h.render_w_mentions(body, repo_name),
110 'when': created_on,
110 'when': created_on,
111 'user': created_by_obj.username,
111 'user': created_by_obj.username,
112 }
112 }
113
113
114 txt_kwargs = {
114 txt_kwargs = {
115 'subject': subject,
115 'subject': subject,
116 'body': body,
116 'body': body,
117 'when': created_on,
117 'when': created_on,
118 'user': created_by_obj.username,
118 'user': created_by_obj.username,
119 }
119 }
120
120
121 html_kwargs.update(email_kwargs)
121 html_kwargs.update(email_kwargs)
122 txt_kwargs.update(email_kwargs)
122 txt_kwargs.update(email_kwargs)
123 email_subject = EmailNotificationModel() \
123 email_subject = EmailNotificationModel() \
124 .get_email_description(type_, **txt_kwargs)
124 .get_email_description(type_, **txt_kwargs)
125 email_txt_body = EmailNotificationModel() \
125 email_txt_body = EmailNotificationModel() \
126 .get_email_tmpl(type_, 'txt', **txt_kwargs)
126 .get_email_tmpl(type_, 'txt', **txt_kwargs)
127 email_html_body = EmailNotificationModel() \
127 email_html_body = EmailNotificationModel() \
128 .get_email_tmpl(type_, 'html', **html_kwargs)
128 .get_email_tmpl(type_, 'html', **html_kwargs)
129
129
130 # don't send email to person who created this comment
130 # don't send email to person who created this comment
131 rec_objs = set(recipients_objs).difference(set([created_by_obj]))
131 rec_objs = set(recipients_objs).difference(set([created_by_obj]))
132
132
133 # send email with notification to all other participants
133 # send email with notification to all other participants
134 for rec in rec_objs:
134 for rec in rec_objs:
135 tasks.send_email([rec.email], email_subject, email_txt_body,
135 tasks.send_email([rec.email], email_subject, email_txt_body,
136 email_html_body, headers, author=created_by_obj)
136 email_html_body, headers, author=created_by_obj)
137
137
138
138
139 class EmailNotificationModel(object):
139 class EmailNotificationModel(object):
140
140
141 TYPE_CHANGESET_COMMENT = NotificationModel.TYPE_CHANGESET_COMMENT
141 TYPE_CHANGESET_COMMENT = NotificationModel.TYPE_CHANGESET_COMMENT
142 TYPE_MESSAGE = NotificationModel.TYPE_MESSAGE # only used for testing
142 TYPE_MESSAGE = NotificationModel.TYPE_MESSAGE # only used for testing
143 # NotificationModel.TYPE_MENTION is not used
143 # NotificationModel.TYPE_MENTION is not used
144 TYPE_PASSWORD_RESET = 'password_link'
144 TYPE_PASSWORD_RESET = 'password_link'
145 TYPE_REGISTRATION = NotificationModel.TYPE_REGISTRATION
145 TYPE_REGISTRATION = NotificationModel.TYPE_REGISTRATION
146 TYPE_PULL_REQUEST = NotificationModel.TYPE_PULL_REQUEST
146 TYPE_PULL_REQUEST = NotificationModel.TYPE_PULL_REQUEST
147 TYPE_PULL_REQUEST_COMMENT = NotificationModel.TYPE_PULL_REQUEST_COMMENT
147 TYPE_PULL_REQUEST_COMMENT = NotificationModel.TYPE_PULL_REQUEST_COMMENT
148 TYPE_DEFAULT = 'default'
148 TYPE_DEFAULT = 'default'
149
149
150 def __init__(self):
150 def __init__(self):
151 super(EmailNotificationModel, self).__init__()
151 super(EmailNotificationModel, self).__init__()
152 self._template_root = kallithea.CONFIG['paths']['templates'][0]
152 self._template_root = kallithea.CONFIG['paths']['templates'][0]
153 self._tmpl_lookup = app_globals.mako_lookup
153 self._tmpl_lookup = app_globals.mako_lookup
154 self.email_types = {
154 self.email_types = {
155 self.TYPE_CHANGESET_COMMENT: 'changeset_comment',
155 self.TYPE_CHANGESET_COMMENT: 'changeset_comment',
156 self.TYPE_PASSWORD_RESET: 'password_reset',
156 self.TYPE_PASSWORD_RESET: 'password_reset',
157 self.TYPE_REGISTRATION: 'registration',
157 self.TYPE_REGISTRATION: 'registration',
158 self.TYPE_DEFAULT: 'default',
158 self.TYPE_DEFAULT: 'default',
159 self.TYPE_PULL_REQUEST: 'pull_request',
159 self.TYPE_PULL_REQUEST: 'pull_request',
160 self.TYPE_PULL_REQUEST_COMMENT: 'pull_request_comment',
160 self.TYPE_PULL_REQUEST_COMMENT: 'pull_request_comment',
161 }
161 }
162 self._subj_map = {
162 self._subj_map = {
163 self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s'),
163 self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s'),
164 self.TYPE_MESSAGE: 'Test Message',
164 self.TYPE_MESSAGE: 'Test Message',
165 # self.TYPE_PASSWORD_RESET
165 # self.TYPE_PASSWORD_RESET
166 self.TYPE_REGISTRATION: _('New user %(new_username)s registered'),
166 self.TYPE_REGISTRATION: _('New user %(new_username)s registered'),
167 # self.TYPE_DEFAULT
167 # self.TYPE_DEFAULT
168 self.TYPE_PULL_REQUEST: _('[Review] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'),
168 self.TYPE_PULL_REQUEST: _('[Review] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'),
169 self.TYPE_PULL_REQUEST_COMMENT: _('[Comment] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'),
169 self.TYPE_PULL_REQUEST_COMMENT: _('[Comment] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'),
170 }
170 }
171
171
172 def get_email_description(self, type_, **kwargs):
172 def get_email_description(self, type_, **kwargs):
173 """
173 """
174 return subject for email based on given type
174 return subject for email based on given type
175 """
175 """
176 tmpl = self._subj_map[type_]
176 tmpl = self._subj_map[type_]
177 try:
177 try:
178 subj = tmpl % kwargs
178 subj = tmpl % kwargs
179 except KeyError as e:
179 except KeyError as e:
180 log.error('error generating email subject for %r from %s: %s', type_, ', '.join(self._subj_map), e)
180 log.error('error generating email subject for %r from %s: %s', type_, ', '.join(self._subj_map), e)
181 raise
181 raise
182 # gmail doesn't do proper threading but will ignore leading square
182 # gmail doesn't do proper threading but will ignore leading square
183 # bracket content ... so that is where we put status info
183 # bracket content ... so that is where we put status info
184 bracket_tags = []
184 bracket_tags = []
185 status_change = kwargs.get('status_change')
185 status_change = kwargs.get('status_change')
186 if status_change:
186 if status_change:
187 bracket_tags.append(unicode(status_change)) # apply unicode to evaluate LazyString before .join
187 bracket_tags.append(str(status_change)) # apply str to evaluate LazyString before .join
188 if kwargs.get('closing_pr'):
188 if kwargs.get('closing_pr'):
189 bracket_tags.append(_('Closing'))
189 bracket_tags.append(_('Closing'))
190 if bracket_tags:
190 if bracket_tags:
191 if subj.startswith('['):
191 if subj.startswith('['):
192 subj = '[' + ', '.join(bracket_tags) + ': ' + subj[1:]
192 subj = '[' + ', '.join(bracket_tags) + ': ' + subj[1:]
193 else:
193 else:
194 subj = '[' + ', '.join(bracket_tags) + '] ' + subj
194 subj = '[' + ', '.join(bracket_tags) + '] ' + subj
195 return subj
195 return subj
196
196
197 def get_email_tmpl(self, type_, content_type, **kwargs):
197 def get_email_tmpl(self, type_, content_type, **kwargs):
198 """
198 """
199 return generated template for email based on given type
199 return generated template for email based on given type
200 """
200 """
201
201
202 base = 'email_templates/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type
202 base = 'email_templates/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type
203 email_template = self._tmpl_lookup.get_template(base)
203 email_template = self._tmpl_lookup.get_template(base)
204 # translator and helpers inject
204 # translator and helpers inject
205 _kwargs = {'_': _,
205 _kwargs = {'_': _,
206 'h': h,
206 'h': h,
207 'c': c}
207 'c': c}
208 _kwargs.update(kwargs)
208 _kwargs.update(kwargs)
209 if content_type == 'html':
209 if content_type == 'html':
210 _kwargs.update({
210 _kwargs.update({
211 "color_text": "#202020",
211 "color_text": "#202020",
212 "color_emph": "#395fa0",
212 "color_emph": "#395fa0",
213 "color_link": "#395fa0",
213 "color_link": "#395fa0",
214 "color_border": "#ddd",
214 "color_border": "#ddd",
215 "color_background_grey": "#f9f9f9",
215 "color_background_grey": "#f9f9f9",
216 "color_button": "#395fa0",
216 "color_button": "#395fa0",
217 "monospace_style": "font-family:Lucida Console,Consolas,Monaco,Inconsolata,Liberation Mono,monospace",
217 "monospace_style": "font-family:Lucida Console,Consolas,Monaco,Inconsolata,Liberation Mono,monospace",
218 "sans_style": "font-family:Helvetica,Arial,sans-serif",
218 "sans_style": "font-family:Helvetica,Arial,sans-serif",
219 })
219 })
220 _kwargs.update({
220 _kwargs.update({
221 "default_style": "%(sans_style)s;font-weight:200;font-size:14px;line-height:17px;color:%(color_text)s" % _kwargs,
221 "default_style": "%(sans_style)s;font-weight:200;font-size:14px;line-height:17px;color:%(color_text)s" % _kwargs,
222 "comment_style": "%(monospace_style)s;white-space:pre-wrap" % _kwargs,
222 "comment_style": "%(monospace_style)s;white-space:pre-wrap" % _kwargs,
223 "data_style": "border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs,
223 "data_style": "border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs,
224 "emph_style": "font-weight:600;color:%(color_emph)s" % _kwargs,
224 "emph_style": "font-weight:600;color:%(color_emph)s" % _kwargs,
225 "link_style": "color:%(color_link)s;text-decoration:none" % _kwargs,
225 "link_style": "color:%(color_link)s;text-decoration:none" % _kwargs,
226 "link_text_style": "color:%(color_text)s;text-decoration:none;border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs,
226 "link_text_style": "color:%(color_text)s;text-decoration:none;border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs,
227 })
227 })
228
228
229 log.debug('rendering tmpl %s with kwargs %s', base, _kwargs)
229 log.debug('rendering tmpl %s with kwargs %s', base, _kwargs)
230 return email_template.render_unicode(**_kwargs)
230 return email_template.render_unicode(**_kwargs)
@@ -1,846 +1,846 b''
1 import datetime
1 import datetime
2 import os
2 import os
3 import sys
3 import sys
4 import urllib.error
4 import urllib.error
5
5
6 import mock
6 import mock
7 import pytest
7 import pytest
8
8
9 from kallithea.lib.vcs.backends.git import GitChangeset, GitRepository
9 from kallithea.lib.vcs.backends.git import GitChangeset, GitRepository
10 from kallithea.lib.vcs.exceptions import NodeDoesNotExistError, RepositoryError, VCSError
10 from kallithea.lib.vcs.exceptions import NodeDoesNotExistError, RepositoryError, VCSError
11 from kallithea.lib.vcs.nodes import DirNode, FileNode, NodeKind, NodeState
11 from kallithea.lib.vcs.nodes import DirNode, FileNode, NodeKind, NodeState
12 from kallithea.model.scm import ScmModel
12 from kallithea.model.scm import ScmModel
13 from kallithea.tests.vcs.base import _BackendTestMixin
13 from kallithea.tests.vcs.base import _BackendTestMixin
14 from kallithea.tests.vcs.conf import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, TESTS_TMP_PATH, get_new_dir
14 from kallithea.tests.vcs.conf import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, TESTS_TMP_PATH, get_new_dir
15
15
16
16
17 class TestGitRepository(object):
17 class TestGitRepository(object):
18
18
19 def __check_for_existing_repo(self):
19 def __check_for_existing_repo(self):
20 if os.path.exists(TEST_GIT_REPO_CLONE):
20 if os.path.exists(TEST_GIT_REPO_CLONE):
21 pytest.fail('Cannot test git clone repo as location %s already '
21 pytest.fail('Cannot test git clone repo as location %s already '
22 'exists. You should manually remove it first.'
22 'exists. You should manually remove it first.'
23 % TEST_GIT_REPO_CLONE)
23 % TEST_GIT_REPO_CLONE)
24
24
25 def setup_method(self):
25 def setup_method(self):
26 self.repo = GitRepository(TEST_GIT_REPO)
26 self.repo = GitRepository(TEST_GIT_REPO)
27
27
28 def test_wrong_repo_path(self):
28 def test_wrong_repo_path(self):
29 wrong_repo_path = os.path.join(TESTS_TMP_PATH, 'errorrepo')
29 wrong_repo_path = os.path.join(TESTS_TMP_PATH, 'errorrepo')
30 with pytest.raises(RepositoryError):
30 with pytest.raises(RepositoryError):
31 GitRepository(wrong_repo_path)
31 GitRepository(wrong_repo_path)
32
32
33 def test_git_cmd_injection(self):
33 def test_git_cmd_injection(self):
34 repo_inject_path = TEST_GIT_REPO + '; echo "Cake";'
34 repo_inject_path = TEST_GIT_REPO + '; echo "Cake";'
35 with pytest.raises(urllib.error.URLError):
35 with pytest.raises(urllib.error.URLError):
36 # Should fail because URL will contain the parts after ; too
36 # Should fail because URL will contain the parts after ; too
37 GitRepository(get_new_dir('injection-repo'), src_url=repo_inject_path, update_after_clone=True, create=True)
37 GitRepository(get_new_dir('injection-repo'), src_url=repo_inject_path, update_after_clone=True, create=True)
38
38
39 with pytest.raises(RepositoryError):
39 with pytest.raises(RepositoryError):
40 # Should fail on direct clone call, which as of this writing does not happen outside of class
40 # Should fail on direct clone call, which as of this writing does not happen outside of class
41 clone_fail_repo = GitRepository(get_new_dir('injection-repo'), create=True)
41 clone_fail_repo = GitRepository(get_new_dir('injection-repo'), create=True)
42 clone_fail_repo.clone(repo_inject_path, update_after_clone=True,)
42 clone_fail_repo.clone(repo_inject_path, update_after_clone=True,)
43
43
44 # Verify correct quoting of evil characters that should work on posix file systems
44 # Verify correct quoting of evil characters that should work on posix file systems
45 if sys.platform == 'win32':
45 if sys.platform == 'win32':
46 # windows does not allow '"' in dir names
46 # windows does not allow '"' in dir names
47 # and some versions of the git client don't like ` and '
47 # and some versions of the git client don't like ` and '
48 tricky_path = get_new_dir("tricky-path-repo-$")
48 tricky_path = get_new_dir("tricky-path-repo-$")
49 else:
49 else:
50 tricky_path = get_new_dir("tricky-path-repo-$'\"`")
50 tricky_path = get_new_dir("tricky-path-repo-$'\"`")
51 successfully_cloned = GitRepository(tricky_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
51 successfully_cloned = GitRepository(tricky_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
52 # Repo should have been created
52 # Repo should have been created
53 assert not successfully_cloned._repo.bare
53 assert not successfully_cloned._repo.bare
54
54
55 if sys.platform == 'win32':
55 if sys.platform == 'win32':
56 # windows does not allow '"' in dir names
56 # windows does not allow '"' in dir names
57 # and some versions of the git client don't like ` and '
57 # and some versions of the git client don't like ` and '
58 tricky_path_2 = get_new_dir("tricky-path-2-repo-$")
58 tricky_path_2 = get_new_dir("tricky-path-2-repo-$")
59 else:
59 else:
60 tricky_path_2 = get_new_dir("tricky-path-2-repo-$'\"`")
60 tricky_path_2 = get_new_dir("tricky-path-2-repo-$'\"`")
61 successfully_cloned2 = GitRepository(tricky_path_2, src_url=tricky_path, bare=True, create=True)
61 successfully_cloned2 = GitRepository(tricky_path_2, src_url=tricky_path, bare=True, create=True)
62 # Repo should have been created and thus used correct quoting for clone
62 # Repo should have been created and thus used correct quoting for clone
63 assert successfully_cloned2._repo.bare
63 assert successfully_cloned2._repo.bare
64
64
65 # Should pass because URL has been properly quoted
65 # Should pass because URL has been properly quoted
66 successfully_cloned.pull(tricky_path_2)
66 successfully_cloned.pull(tricky_path_2)
67 successfully_cloned2.fetch(tricky_path)
67 successfully_cloned2.fetch(tricky_path)
68
68
69 def test_repo_create_with_spaces_in_path(self):
69 def test_repo_create_with_spaces_in_path(self):
70 repo_path = get_new_dir("path with spaces")
70 repo_path = get_new_dir("path with spaces")
71 repo = GitRepository(repo_path, src_url=None, bare=True, create=True)
71 repo = GitRepository(repo_path, src_url=None, bare=True, create=True)
72 # Repo should have been created
72 # Repo should have been created
73 assert repo._repo.bare
73 assert repo._repo.bare
74
74
75 def test_repo_clone(self):
75 def test_repo_clone(self):
76 self.__check_for_existing_repo()
76 self.__check_for_existing_repo()
77 repo = GitRepository(TEST_GIT_REPO)
77 repo = GitRepository(TEST_GIT_REPO)
78 repo_clone = GitRepository(TEST_GIT_REPO_CLONE,
78 repo_clone = GitRepository(TEST_GIT_REPO_CLONE,
79 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
79 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
80 assert len(repo.revisions) == len(repo_clone.revisions)
80 assert len(repo.revisions) == len(repo_clone.revisions)
81 # Checking hashes of changesets should be enough
81 # Checking hashes of changesets should be enough
82 for changeset in repo.get_changesets():
82 for changeset in repo.get_changesets():
83 raw_id = changeset.raw_id
83 raw_id = changeset.raw_id
84 assert raw_id == repo_clone.get_changeset(raw_id).raw_id
84 assert raw_id == repo_clone.get_changeset(raw_id).raw_id
85
85
86 def test_repo_clone_with_spaces_in_path(self):
86 def test_repo_clone_with_spaces_in_path(self):
87 repo_path = get_new_dir("path with spaces")
87 repo_path = get_new_dir("path with spaces")
88 successfully_cloned = GitRepository(repo_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
88 successfully_cloned = GitRepository(repo_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
89 # Repo should have been created
89 # Repo should have been created
90 assert not successfully_cloned._repo.bare
90 assert not successfully_cloned._repo.bare
91
91
92 successfully_cloned.pull(TEST_GIT_REPO)
92 successfully_cloned.pull(TEST_GIT_REPO)
93 self.repo.fetch(repo_path)
93 self.repo.fetch(repo_path)
94
94
95 def test_repo_clone_without_create(self):
95 def test_repo_clone_without_create(self):
96 with pytest.raises(RepositoryError):
96 with pytest.raises(RepositoryError):
97 GitRepository(TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
97 GitRepository(TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
98
98
99 def test_repo_clone_with_update(self):
99 def test_repo_clone_with_update(self):
100 repo = GitRepository(TEST_GIT_REPO)
100 repo = GitRepository(TEST_GIT_REPO)
101 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
101 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
102 repo_clone = GitRepository(clone_path,
102 repo_clone = GitRepository(clone_path,
103 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
103 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
104 assert len(repo.revisions) == len(repo_clone.revisions)
104 assert len(repo.revisions) == len(repo_clone.revisions)
105
105
106 # check if current workdir was updated
106 # check if current workdir was updated
107 fpath = os.path.join(clone_path, 'MANIFEST.in')
107 fpath = os.path.join(clone_path, 'MANIFEST.in')
108 assert os.path.isfile(fpath) == True, 'Repo was cloned and updated but file %s could not be found' % fpath
108 assert os.path.isfile(fpath) == True, 'Repo was cloned and updated but file %s could not be found' % fpath
109
109
110 def test_repo_clone_without_update(self):
110 def test_repo_clone_without_update(self):
111 repo = GitRepository(TEST_GIT_REPO)
111 repo = GitRepository(TEST_GIT_REPO)
112 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
112 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
113 repo_clone = GitRepository(clone_path,
113 repo_clone = GitRepository(clone_path,
114 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
114 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
115 assert len(repo.revisions) == len(repo_clone.revisions)
115 assert len(repo.revisions) == len(repo_clone.revisions)
116 # check if current workdir was *NOT* updated
116 # check if current workdir was *NOT* updated
117 fpath = os.path.join(clone_path, 'MANIFEST.in')
117 fpath = os.path.join(clone_path, 'MANIFEST.in')
118 # Make sure it's not bare repo
118 # Make sure it's not bare repo
119 assert not repo_clone._repo.bare
119 assert not repo_clone._repo.bare
120 assert os.path.isfile(fpath) == False, 'Repo was cloned and updated but file %s was found' % fpath
120 assert os.path.isfile(fpath) == False, 'Repo was cloned and updated but file %s was found' % fpath
121
121
122 def test_repo_clone_into_bare_repo(self):
122 def test_repo_clone_into_bare_repo(self):
123 repo = GitRepository(TEST_GIT_REPO)
123 repo = GitRepository(TEST_GIT_REPO)
124 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
124 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
125 repo_clone = GitRepository(clone_path, create=True,
125 repo_clone = GitRepository(clone_path, create=True,
126 src_url=repo.path, bare=True)
126 src_url=repo.path, bare=True)
127 assert repo_clone._repo.bare
127 assert repo_clone._repo.bare
128
128
129 def test_create_repo_is_not_bare_by_default(self):
129 def test_create_repo_is_not_bare_by_default(self):
130 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
130 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
131 assert not repo._repo.bare
131 assert not repo._repo.bare
132
132
133 def test_create_bare_repo(self):
133 def test_create_bare_repo(self):
134 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
134 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
135 assert repo._repo.bare
135 assert repo._repo.bare
136
136
137 def test_revisions(self):
137 def test_revisions(self):
138 # there are 112 revisions (by now)
138 # there are 112 revisions (by now)
139 # so we can assume they would be available from now on
139 # so we can assume they would be available from now on
140 subset = set([
140 subset = set([
141 'c1214f7e79e02fc37156ff215cd71275450cffc3',
141 'c1214f7e79e02fc37156ff215cd71275450cffc3',
142 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
142 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
143 'fa6600f6848800641328adbf7811fd2372c02ab2',
143 'fa6600f6848800641328adbf7811fd2372c02ab2',
144 '102607b09cdd60e2793929c4f90478be29f85a17',
144 '102607b09cdd60e2793929c4f90478be29f85a17',
145 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
145 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
146 '2d1028c054665b962fa3d307adfc923ddd528038',
146 '2d1028c054665b962fa3d307adfc923ddd528038',
147 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
147 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
148 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
148 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
149 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
149 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
150 '8430a588b43b5d6da365400117c89400326e7992',
150 '8430a588b43b5d6da365400117c89400326e7992',
151 'd955cd312c17b02143c04fa1099a352b04368118',
151 'd955cd312c17b02143c04fa1099a352b04368118',
152 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
152 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
153 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
153 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
154 'f298fe1189f1b69779a4423f40b48edf92a703fc',
154 'f298fe1189f1b69779a4423f40b48edf92a703fc',
155 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
155 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
156 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
156 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
157 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
157 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
158 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
158 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
159 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
159 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
160 '45223f8f114c64bf4d6f853e3c35a369a6305520',
160 '45223f8f114c64bf4d6f853e3c35a369a6305520',
161 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
161 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
162 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
162 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
163 '27d48942240f5b91dfda77accd2caac94708cc7d',
163 '27d48942240f5b91dfda77accd2caac94708cc7d',
164 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
164 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
165 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
165 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
166 assert subset.issubset(set(self.repo.revisions))
166 assert subset.issubset(set(self.repo.revisions))
167
167
168 def test_slicing(self):
168 def test_slicing(self):
169 # 4 1 5 10 95
169 # 4 1 5 10 95
170 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
170 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
171 (10, 20, 10), (5, 100, 95)]:
171 (10, 20, 10), (5, 100, 95)]:
172 revs = list(self.repo[sfrom:sto])
172 revs = list(self.repo[sfrom:sto])
173 assert len(revs) == size
173 assert len(revs) == size
174 assert revs[0] == self.repo.get_changeset(sfrom)
174 assert revs[0] == self.repo.get_changeset(sfrom)
175 assert revs[-1] == self.repo.get_changeset(sto - 1)
175 assert revs[-1] == self.repo.get_changeset(sto - 1)
176
176
177 def test_branches(self):
177 def test_branches(self):
178 # TODO: Need more tests here
178 # TODO: Need more tests here
179 # Removed (those are 'remotes' branches for cloned repo)
179 # Removed (those are 'remotes' branches for cloned repo)
180 #assert 'master' in self.repo.branches
180 #assert 'master' in self.repo.branches
181 #assert 'gittree' in self.repo.branches
181 #assert 'gittree' in self.repo.branches
182 #assert 'web-branch' in self.repo.branches
182 #assert 'web-branch' in self.repo.branches
183 for name, id in self.repo.branches.items():
183 for name, id in self.repo.branches.items():
184 assert isinstance(self.repo.get_changeset(id), GitChangeset)
184 assert isinstance(self.repo.get_changeset(id), GitChangeset)
185
185
186 def test_tags(self):
186 def test_tags(self):
187 # TODO: Need more tests here
187 # TODO: Need more tests here
188 assert 'v0.1.1' in self.repo.tags
188 assert 'v0.1.1' in self.repo.tags
189 assert 'v0.1.2' in self.repo.tags
189 assert 'v0.1.2' in self.repo.tags
190 for name, id in self.repo.tags.items():
190 for name, id in self.repo.tags.items():
191 assert isinstance(self.repo.get_changeset(id), GitChangeset)
191 assert isinstance(self.repo.get_changeset(id), GitChangeset)
192
192
193 def _test_single_changeset_cache(self, revision):
193 def _test_single_changeset_cache(self, revision):
194 chset = self.repo.get_changeset(revision)
194 chset = self.repo.get_changeset(revision)
195 assert revision in self.repo.changesets
195 assert revision in self.repo.changesets
196 assert chset is self.repo.changesets[revision]
196 assert chset is self.repo.changesets[revision]
197
197
198 def test_initial_changeset(self):
198 def test_initial_changeset(self):
199 id = self.repo.revisions[0]
199 id = self.repo.revisions[0]
200 init_chset = self.repo.get_changeset(id)
200 init_chset = self.repo.get_changeset(id)
201 assert init_chset.message == 'initial import\n'
201 assert init_chset.message == 'initial import\n'
202 assert init_chset.author == 'Marcin Kuzminski <marcin@python-blog.com>'
202 assert init_chset.author == 'Marcin Kuzminski <marcin@python-blog.com>'
203 for path in ('vcs/__init__.py',
203 for path in ('vcs/__init__.py',
204 'vcs/backends/BaseRepository.py',
204 'vcs/backends/BaseRepository.py',
205 'vcs/backends/__init__.py'):
205 'vcs/backends/__init__.py'):
206 assert isinstance(init_chset.get_node(path), FileNode)
206 assert isinstance(init_chset.get_node(path), FileNode)
207 for path in ('', 'vcs', 'vcs/backends'):
207 for path in ('', 'vcs', 'vcs/backends'):
208 assert isinstance(init_chset.get_node(path), DirNode)
208 assert isinstance(init_chset.get_node(path), DirNode)
209
209
210 with pytest.raises(NodeDoesNotExistError):
210 with pytest.raises(NodeDoesNotExistError):
211 init_chset.get_node(path='foobar')
211 init_chset.get_node(path='foobar')
212
212
213 node = init_chset.get_node('vcs/')
213 node = init_chset.get_node('vcs/')
214 assert hasattr(node, 'kind')
214 assert hasattr(node, 'kind')
215 assert node.kind == NodeKind.DIR
215 assert node.kind == NodeKind.DIR
216
216
217 node = init_chset.get_node('vcs')
217 node = init_chset.get_node('vcs')
218 assert hasattr(node, 'kind')
218 assert hasattr(node, 'kind')
219 assert node.kind == NodeKind.DIR
219 assert node.kind == NodeKind.DIR
220
220
221 node = init_chset.get_node('vcs/__init__.py')
221 node = init_chset.get_node('vcs/__init__.py')
222 assert hasattr(node, 'kind')
222 assert hasattr(node, 'kind')
223 assert node.kind == NodeKind.FILE
223 assert node.kind == NodeKind.FILE
224
224
225 def test_not_existing_changeset(self):
225 def test_not_existing_changeset(self):
226 with pytest.raises(RepositoryError):
226 with pytest.raises(RepositoryError):
227 self.repo.get_changeset('f' * 40)
227 self.repo.get_changeset('f' * 40)
228
228
229 def test_changeset10(self):
229 def test_changeset10(self):
230
230
231 chset10 = self.repo.get_changeset(self.repo.revisions[9])
231 chset10 = self.repo.get_changeset(self.repo.revisions[9])
232 readme = b"""===
232 readme = b"""===
233 VCS
233 VCS
234 ===
234 ===
235
235
236 Various Version Control System management abstraction layer for Python.
236 Various Version Control System management abstraction layer for Python.
237
237
238 Introduction
238 Introduction
239 ------------
239 ------------
240
240
241 TODO: To be written...
241 TODO: To be written...
242
242
243 """
243 """
244 node = chset10.get_node('README.rst')
244 node = chset10.get_node('README.rst')
245 assert node.kind == NodeKind.FILE
245 assert node.kind == NodeKind.FILE
246 assert node.content == readme
246 assert node.content == readme
247
247
248
248
249 class TestGitChangeset(object):
249 class TestGitChangeset(object):
250
250
251 def setup_method(self):
251 def setup_method(self):
252 self.repo = GitRepository(TEST_GIT_REPO)
252 self.repo = GitRepository(TEST_GIT_REPO)
253
253
254 def test_default_changeset(self):
254 def test_default_changeset(self):
255 tip = self.repo.get_changeset()
255 tip = self.repo.get_changeset()
256 assert tip == self.repo.get_changeset(None)
256 assert tip == self.repo.get_changeset(None)
257 assert tip == self.repo.get_changeset('tip')
257 assert tip == self.repo.get_changeset('tip')
258
258
259 def test_root_node(self):
259 def test_root_node(self):
260 tip = self.repo.get_changeset()
260 tip = self.repo.get_changeset()
261 assert tip.root is tip.get_node('')
261 assert tip.root is tip.get_node('')
262
262
263 def test_lazy_fetch(self):
263 def test_lazy_fetch(self):
264 """
264 """
265 Test if changeset's nodes expands and are cached as we walk through
265 Test if changeset's nodes expands and are cached as we walk through
266 the revision. This test is somewhat hard to write as order of tests
266 the revision. This test is somewhat hard to write as order of tests
267 is a key here. Written by running command after command in a shell.
267 is a key here. Written by running command after command in a shell.
268 """
268 """
269 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
269 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
270 assert commit_id in self.repo.revisions
270 assert commit_id in self.repo.revisions
271 chset = self.repo.get_changeset(commit_id)
271 chset = self.repo.get_changeset(commit_id)
272 assert len(chset.nodes) == 0
272 assert len(chset.nodes) == 0
273 root = chset.root
273 root = chset.root
274 assert len(chset.nodes) == 1
274 assert len(chset.nodes) == 1
275 assert len(root.nodes) == 8
275 assert len(root.nodes) == 8
276 # accessing root.nodes updates chset.nodes
276 # accessing root.nodes updates chset.nodes
277 assert len(chset.nodes) == 9
277 assert len(chset.nodes) == 9
278
278
279 docs = root.get_node('docs')
279 docs = root.get_node('docs')
280 # we haven't yet accessed anything new as docs dir was already cached
280 # we haven't yet accessed anything new as docs dir was already cached
281 assert len(chset.nodes) == 9
281 assert len(chset.nodes) == 9
282 assert len(docs.nodes) == 8
282 assert len(docs.nodes) == 8
283 # accessing docs.nodes updates chset.nodes
283 # accessing docs.nodes updates chset.nodes
284 assert len(chset.nodes) == 17
284 assert len(chset.nodes) == 17
285
285
286 assert docs is chset.get_node('docs')
286 assert docs is chset.get_node('docs')
287 assert docs is root.nodes[0]
287 assert docs is root.nodes[0]
288 assert docs is root.dirs[0]
288 assert docs is root.dirs[0]
289 assert docs is chset.get_node('docs')
289 assert docs is chset.get_node('docs')
290
290
291 def test_nodes_with_changeset(self):
291 def test_nodes_with_changeset(self):
292 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
292 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
293 chset = self.repo.get_changeset(commit_id)
293 chset = self.repo.get_changeset(commit_id)
294 root = chset.root
294 root = chset.root
295 docs = root.get_node('docs')
295 docs = root.get_node('docs')
296 assert docs is chset.get_node('docs')
296 assert docs is chset.get_node('docs')
297 api = docs.get_node('api')
297 api = docs.get_node('api')
298 assert api is chset.get_node('docs/api')
298 assert api is chset.get_node('docs/api')
299 index = api.get_node('index.rst')
299 index = api.get_node('index.rst')
300 assert index is chset.get_node('docs/api/index.rst')
300 assert index is chset.get_node('docs/api/index.rst')
301 assert index is chset.get_node('docs') \
301 assert index is chset.get_node('docs') \
302 .get_node('api') \
302 .get_node('api') \
303 .get_node('index.rst')
303 .get_node('index.rst')
304
304
305 def test_branch_and_tags(self):
305 def test_branch_and_tags(self):
306 # Those tests seem to show wrong results:
306 # Those tests seem to show wrong results:
307 # in Git, only heads have a branch - most changesets don't
307 # in Git, only heads have a branch - most changesets don't
308 rev0 = self.repo.revisions[0]
308 rev0 = self.repo.revisions[0]
309 chset0 = self.repo.get_changeset(rev0)
309 chset0 = self.repo.get_changeset(rev0)
310 assert chset0.branch is None # should be 'master'?
310 assert chset0.branch is None # should be 'master'?
311 assert chset0.branches == [] # should be 'master'?
311 assert chset0.branches == [] # should be 'master'?
312 assert chset0.tags == []
312 assert chset0.tags == []
313
313
314 rev10 = self.repo.revisions[10]
314 rev10 = self.repo.revisions[10]
315 chset10 = self.repo.get_changeset(rev10)
315 chset10 = self.repo.get_changeset(rev10)
316 assert chset10.branch is None # should be 'master'?
316 assert chset10.branch is None # should be 'master'?
317 assert chset10.branches == [] # should be 'master'?
317 assert chset10.branches == [] # should be 'master'?
318 assert chset10.tags == []
318 assert chset10.tags == []
319
319
320 rev44 = self.repo.revisions[44]
320 rev44 = self.repo.revisions[44]
321 chset44 = self.repo.get_changeset(rev44)
321 chset44 = self.repo.get_changeset(rev44)
322 assert chset44.branch is None # should be 'web-branch'?
322 assert chset44.branch is None # should be 'web-branch'?
323 assert chset44.branches == [] # should be 'web-branch'?
323 assert chset44.branches == [] # should be 'web-branch'?
324
324
325 tip = self.repo.get_changeset('tip')
325 tip = self.repo.get_changeset('tip')
326 assert 'tip' not in tip.tags # it should be?
326 assert 'tip' not in tip.tags # it should be?
327 assert not tip.tags # how it is!
327 assert not tip.tags # how it is!
328
328
329 def _test_slices(self, limit, offset):
329 def _test_slices(self, limit, offset):
330 count = self.repo.count()
330 count = self.repo.count()
331 changesets = self.repo.get_changesets(limit=limit, offset=offset)
331 changesets = self.repo.get_changesets(limit=limit, offset=offset)
332 idx = 0
332 idx = 0
333 for changeset in changesets:
333 for changeset in changesets:
334 rev = offset + idx
334 rev = offset + idx
335 idx += 1
335 idx += 1
336 rev_id = self.repo.revisions[rev]
336 rev_id = self.repo.revisions[rev]
337 if idx > limit:
337 if idx > limit:
338 pytest.fail("Exceeded limit already (getting revision %s, "
338 pytest.fail("Exceeded limit already (getting revision %s, "
339 "there are %s total revisions, offset=%s, limit=%s)"
339 "there are %s total revisions, offset=%s, limit=%s)"
340 % (rev_id, count, offset, limit))
340 % (rev_id, count, offset, limit))
341 assert changeset == self.repo.get_changeset(rev_id)
341 assert changeset == self.repo.get_changeset(rev_id)
342 result = list(self.repo.get_changesets(limit=limit, offset=offset))
342 result = list(self.repo.get_changesets(limit=limit, offset=offset))
343 start = offset
343 start = offset
344 end = limit and offset + limit or None
344 end = limit and offset + limit or None
345 sliced = list(self.repo[start:end])
345 sliced = list(self.repo[start:end])
346 pytest.assertEqual(result, sliced,
346 pytest.assertEqual(result, sliced,
347 msg="Comparison failed for limit=%s, offset=%s"
347 msg="Comparison failed for limit=%s, offset=%s"
348 "(get_changeset returned: %s and sliced: %s"
348 "(get_changeset returned: %s and sliced: %s"
349 % (limit, offset, result, sliced))
349 % (limit, offset, result, sliced))
350
350
351 def _test_file_size(self, revision, path, size):
351 def _test_file_size(self, revision, path, size):
352 node = self.repo.get_changeset(revision).get_node(path)
352 node = self.repo.get_changeset(revision).get_node(path)
353 assert node.is_file()
353 assert node.is_file()
354 assert node.size == size
354 assert node.size == size
355
355
356 def test_file_size(self):
356 def test_file_size(self):
357 to_check = (
357 to_check = (
358 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
358 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
359 'vcs/backends/BaseRepository.py', 502),
359 'vcs/backends/BaseRepository.py', 502),
360 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
360 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
361 'vcs/backends/hg.py', 854),
361 'vcs/backends/hg.py', 854),
362 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
362 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
363 'setup.py', 1068),
363 'setup.py', 1068),
364 ('d955cd312c17b02143c04fa1099a352b04368118',
364 ('d955cd312c17b02143c04fa1099a352b04368118',
365 'vcs/backends/base.py', 2921),
365 'vcs/backends/base.py', 2921),
366 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
366 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
367 'vcs/backends/base.py', 3936),
367 'vcs/backends/base.py', 3936),
368 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
368 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
369 'vcs/backends/base.py', 6189),
369 'vcs/backends/base.py', 6189),
370 )
370 )
371 for revision, path, size in to_check:
371 for revision, path, size in to_check:
372 self._test_file_size(revision, path, size)
372 self._test_file_size(revision, path, size)
373
373
374 def _test_dir_size(self, revision, path, size):
374 def _test_dir_size(self, revision, path, size):
375 node = self.repo.get_changeset(revision).get_node(path)
375 node = self.repo.get_changeset(revision).get_node(path)
376 assert node.size == size
376 assert node.size == size
377
377
378 def test_dir_size(self):
378 def test_dir_size(self):
379 to_check = (
379 to_check = (
380 ('5f2c6ee195929b0be80749243c18121c9864a3b3', '/', 674076),
380 ('5f2c6ee195929b0be80749243c18121c9864a3b3', '/', 674076),
381 ('7ab37bc680b4aa72c34d07b230c866c28e9fc204', '/', 674049),
381 ('7ab37bc680b4aa72c34d07b230c866c28e9fc204', '/', 674049),
382 ('6892503fb8f2a552cef5f4d4cc2cdbd13ae1cd2f', '/', 671830),
382 ('6892503fb8f2a552cef5f4d4cc2cdbd13ae1cd2f', '/', 671830),
383 )
383 )
384 for revision, path, size in to_check:
384 for revision, path, size in to_check:
385 self._test_dir_size(revision, path, size)
385 self._test_dir_size(revision, path, size)
386
386
387 def test_repo_size(self):
387 def test_repo_size(self):
388 assert self.repo.size == 674076
388 assert self.repo.size == 674076
389
389
390 def test_file_history(self):
390 def test_file_history(self):
391 # we can only check if those revisions are present in the history
391 # we can only check if those revisions are present in the history
392 # as we cannot update this test every time file is changed
392 # as we cannot update this test every time file is changed
393 files = {
393 files = {
394 'setup.py': [
394 'setup.py': [
395 '54386793436c938cff89326944d4c2702340037d',
395 '54386793436c938cff89326944d4c2702340037d',
396 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
396 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
397 '998ed409c795fec2012b1c0ca054d99888b22090',
397 '998ed409c795fec2012b1c0ca054d99888b22090',
398 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
398 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
399 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
399 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
400 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
400 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
401 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
401 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
402 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
402 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
403 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
403 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
404 ],
404 ],
405 'vcs/nodes.py': [
405 'vcs/nodes.py': [
406 '33fa3223355104431402a888fa77a4e9956feb3e',
406 '33fa3223355104431402a888fa77a4e9956feb3e',
407 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
407 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
408 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
408 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
409 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
409 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
410 'c877b68d18e792a66b7f4c529ea02c8f80801542',
410 'c877b68d18e792a66b7f4c529ea02c8f80801542',
411 '4313566d2e417cb382948f8d9d7c765330356054',
411 '4313566d2e417cb382948f8d9d7c765330356054',
412 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
412 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
413 '54386793436c938cff89326944d4c2702340037d',
413 '54386793436c938cff89326944d4c2702340037d',
414 '54000345d2e78b03a99d561399e8e548de3f3203',
414 '54000345d2e78b03a99d561399e8e548de3f3203',
415 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
415 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
416 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
416 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
417 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
417 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
418 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
418 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
419 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
419 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
420 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
420 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
421 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
421 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
422 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
422 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
423 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
423 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
424 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
424 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
425 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
425 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
426 'f15c21f97864b4f071cddfbf2750ec2e23859414',
426 'f15c21f97864b4f071cddfbf2750ec2e23859414',
427 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
427 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
428 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
428 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
429 '84dec09632a4458f79f50ddbbd155506c460b4f9',
429 '84dec09632a4458f79f50ddbbd155506c460b4f9',
430 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
430 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
431 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
431 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
432 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
432 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
433 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
433 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
434 '6970b057cffe4aab0a792aa634c89f4bebf01441',
434 '6970b057cffe4aab0a792aa634c89f4bebf01441',
435 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
435 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
436 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
436 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
437 ],
437 ],
438 'vcs/backends/git.py': [
438 'vcs/backends/git.py': [
439 '4cf116ad5a457530381135e2f4c453e68a1b0105',
439 '4cf116ad5a457530381135e2f4c453e68a1b0105',
440 '9a751d84d8e9408e736329767387f41b36935153',
440 '9a751d84d8e9408e736329767387f41b36935153',
441 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
441 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
442 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
442 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
443 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
443 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
444 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
444 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
445 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
445 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
446 '54000345d2e78b03a99d561399e8e548de3f3203',
446 '54000345d2e78b03a99d561399e8e548de3f3203',
447 ],
447 ],
448 }
448 }
449 for path, revs in files.items():
449 for path, revs in files.items():
450 node = self.repo.get_changeset(revs[0]).get_node(path)
450 node = self.repo.get_changeset(revs[0]).get_node(path)
451 node_revs = [chset.raw_id for chset in node.history]
451 node_revs = [chset.raw_id for chset in node.history]
452 assert set(revs).issubset(set(node_revs)), "We assumed that %s is subset of revisions for which file %s " \
452 assert set(revs).issubset(set(node_revs)), "We assumed that %s is subset of revisions for which file %s " \
453 "has been changed, and history of that node returned: %s" \
453 "has been changed, and history of that node returned: %s" \
454 % (revs, path, node_revs)
454 % (revs, path, node_revs)
455
455
456 def test_file_annotate(self):
456 def test_file_annotate(self):
457 files = {
457 files = {
458 'vcs/backends/__init__.py': {
458 'vcs/backends/__init__.py': {
459 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
459 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
460 'lines_no': 1,
460 'lines_no': 1,
461 'changesets': [
461 'changesets': [
462 'c1214f7e79e02fc37156ff215cd71275450cffc3',
462 'c1214f7e79e02fc37156ff215cd71275450cffc3',
463 ],
463 ],
464 },
464 },
465 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
465 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
466 'lines_no': 21,
466 'lines_no': 21,
467 'changesets': [
467 'changesets': [
468 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
468 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
469 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
469 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
470 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
470 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
471 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
471 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
472 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
472 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
473 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
473 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
474 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
474 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
475 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
475 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
476 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
476 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
477 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
477 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
478 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
478 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
479 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
479 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
480 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
480 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
481 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
481 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
482 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
482 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
483 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
483 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
484 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
484 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
485 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
485 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
486 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
486 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
487 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
487 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
488 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
488 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
489 ],
489 ],
490 },
490 },
491 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
491 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
492 'lines_no': 32,
492 'lines_no': 32,
493 'changesets': [
493 'changesets': [
494 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
494 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
495 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
495 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
496 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
496 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
497 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
497 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
498 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
498 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
499 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
499 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
500 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
500 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
501 '54000345d2e78b03a99d561399e8e548de3f3203',
501 '54000345d2e78b03a99d561399e8e548de3f3203',
502 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
502 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
503 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
503 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
504 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
504 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
505 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
505 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
506 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
506 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
507 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
507 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
508 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
508 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
509 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
509 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
510 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
510 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
511 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
511 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
512 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
512 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
513 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
513 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
514 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
514 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
515 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
515 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
516 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
516 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
517 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
517 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
518 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
518 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
519 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
519 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
520 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
520 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
521 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
521 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
522 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
522 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
523 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
523 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
524 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
524 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
525 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
525 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
526 ],
526 ],
527 },
527 },
528 },
528 },
529 }
529 }
530
530
531 for fname, revision_dict in files.items():
531 for fname, revision_dict in files.items():
532 for rev, data in revision_dict.items():
532 for rev, data in revision_dict.items():
533 cs = self.repo.get_changeset(rev)
533 cs = self.repo.get_changeset(rev)
534
534
535 l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
535 l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
536 l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
536 l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
537 assert l1_1 == l1_2
537 assert l1_1 == l1_2
538 l1 = l1_1
538 l1 = l1_1
539 l2 = files[fname][rev]['changesets']
539 l2 = files[fname][rev]['changesets']
540 assert l1 == l2, "The lists of revision for %s@rev %s" \
540 assert l1 == l2, "The lists of revision for %s@rev %s" \
541 "from annotation list should match each other, " \
541 "from annotation list should match each other, " \
542 "got \n%s \nvs \n%s " % (fname, rev, l1, l2)
542 "got \n%s \nvs \n%s " % (fname, rev, l1, l2)
543
543
544 def test_files_state(self):
544 def test_files_state(self):
545 """
545 """
546 Tests state of FileNodes.
546 Tests state of FileNodes.
547 """
547 """
548 node = self.repo \
548 node = self.repo \
549 .get_changeset('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0') \
549 .get_changeset('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0') \
550 .get_node('vcs/utils/diffs.py')
550 .get_node('vcs/utils/diffs.py')
551 assert node.state, NodeState.ADDED
551 assert node.state, NodeState.ADDED
552 assert node.added
552 assert node.added
553 assert not node.changed
553 assert not node.changed
554 assert not node.not_changed
554 assert not node.not_changed
555 assert not node.removed
555 assert not node.removed
556
556
557 node = self.repo \
557 node = self.repo \
558 .get_changeset('33fa3223355104431402a888fa77a4e9956feb3e') \
558 .get_changeset('33fa3223355104431402a888fa77a4e9956feb3e') \
559 .get_node('.hgignore')
559 .get_node('.hgignore')
560 assert node.state, NodeState.CHANGED
560 assert node.state, NodeState.CHANGED
561 assert not node.added
561 assert not node.added
562 assert node.changed
562 assert node.changed
563 assert not node.not_changed
563 assert not node.not_changed
564 assert not node.removed
564 assert not node.removed
565
565
566 node = self.repo \
566 node = self.repo \
567 .get_changeset('e29b67bd158580fc90fc5e9111240b90e6e86064') \
567 .get_changeset('e29b67bd158580fc90fc5e9111240b90e6e86064') \
568 .get_node('setup.py')
568 .get_node('setup.py')
569 assert node.state, NodeState.NOT_CHANGED
569 assert node.state, NodeState.NOT_CHANGED
570 assert not node.added
570 assert not node.added
571 assert not node.changed
571 assert not node.changed
572 assert node.not_changed
572 assert node.not_changed
573 assert not node.removed
573 assert not node.removed
574
574
575 # If node has REMOVED state then trying to fetch it would raise
575 # If node has REMOVED state then trying to fetch it would raise
576 # ChangesetError exception
576 # ChangesetError exception
577 chset = self.repo.get_changeset(
577 chset = self.repo.get_changeset(
578 'fa6600f6848800641328adbf7811fd2372c02ab2')
578 'fa6600f6848800641328adbf7811fd2372c02ab2')
579 path = 'vcs/backends/BaseRepository.py'
579 path = 'vcs/backends/BaseRepository.py'
580 with pytest.raises(NodeDoesNotExistError):
580 with pytest.raises(NodeDoesNotExistError):
581 chset.get_node(path)
581 chset.get_node(path)
582 # but it would be one of ``removed`` (changeset's attribute)
582 # but it would be one of ``removed`` (changeset's attribute)
583 assert path in [rf.path for rf in chset.removed]
583 assert path in [rf.path for rf in chset.removed]
584
584
585 chset = self.repo.get_changeset(
585 chset = self.repo.get_changeset(
586 '54386793436c938cff89326944d4c2702340037d')
586 '54386793436c938cff89326944d4c2702340037d')
587 changed = ['setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
587 changed = ['setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
588 'vcs/nodes.py']
588 'vcs/nodes.py']
589 assert set(changed) == set([f.path for f in chset.changed])
589 assert set(changed) == set([f.path for f in chset.changed])
590
590
591 def test_commit_message_is_unicode(self):
591 def test_commit_message_is_str(self):
592 for cs in self.repo:
592 for cs in self.repo:
593 assert isinstance(cs.message, unicode)
593 assert isinstance(cs.message, str)
594
594
595 def test_changeset_author_is_unicode(self):
595 def test_changeset_author_is_str(self):
596 for cs in self.repo:
596 for cs in self.repo:
597 assert isinstance(cs.author, unicode)
597 assert isinstance(cs.author, str)
598
598
599 def test_repo_files_content_is_bytes(self):
599 def test_repo_files_content_is_bytes(self):
600 changeset = self.repo.get_changeset()
600 changeset = self.repo.get_changeset()
601 for node in changeset.get_node('/'):
601 for node in changeset.get_node('/'):
602 if node.is_file():
602 if node.is_file():
603 assert isinstance(node.content, bytes)
603 assert isinstance(node.content, bytes)
604
604
605 def test_wrong_path(self):
605 def test_wrong_path(self):
606 # There is 'setup.py' in the root dir but not there:
606 # There is 'setup.py' in the root dir but not there:
607 path = 'foo/bar/setup.py'
607 path = 'foo/bar/setup.py'
608 tip = self.repo.get_changeset()
608 tip = self.repo.get_changeset()
609 with pytest.raises(VCSError):
609 with pytest.raises(VCSError):
610 tip.get_node(path)
610 tip.get_node(path)
611
611
612 def test_author_email(self):
612 def test_author_email(self):
613 assert 'marcin@python-blog.com' == self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3').author_email
613 assert 'marcin@python-blog.com' == self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3').author_email
614 assert 'lukasz.balcerzak@python-center.pl' == self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b').author_email
614 assert 'lukasz.balcerzak@python-center.pl' == self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b').author_email
615 assert '' == self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992').author_email
615 assert '' == self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992').author_email
616
616
617 def test_author_username(self):
617 def test_author_username(self):
618 assert 'Marcin Kuzminski' == self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3').author_name
618 assert 'Marcin Kuzminski' == self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3').author_name
619 assert 'Lukasz Balcerzak' == self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b').author_name
619 assert 'Lukasz Balcerzak' == self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b').author_name
620 assert 'marcink none@none' == self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992').author_name
620 assert 'marcink none@none' == self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992').author_name
621
621
622
622
623 class TestGitSpecificWithRepo(_BackendTestMixin):
623 class TestGitSpecificWithRepo(_BackendTestMixin):
624 backend_alias = 'git'
624 backend_alias = 'git'
625
625
626 @classmethod
626 @classmethod
627 def _get_commits(cls):
627 def _get_commits(cls):
628 return [
628 return [
629 {
629 {
630 'message': 'Initial',
630 'message': 'Initial',
631 'author': 'Joe Doe <joe.doe@example.com>',
631 'author': 'Joe Doe <joe.doe@example.com>',
632 'date': datetime.datetime(2010, 1, 1, 20),
632 'date': datetime.datetime(2010, 1, 1, 20),
633 'added': [
633 'added': [
634 FileNode('foobar/static/js/admin/base.js', content='base'),
634 FileNode('foobar/static/js/admin/base.js', content='base'),
635 FileNode('foobar/static/admin', content='admin',
635 FileNode('foobar/static/admin', content='admin',
636 mode=0o120000), # this is a link
636 mode=0o120000), # this is a link
637 FileNode('foo', content='foo'),
637 FileNode('foo', content='foo'),
638 ],
638 ],
639 },
639 },
640 {
640 {
641 'message': 'Second',
641 'message': 'Second',
642 'author': 'Joe Doe <joe.doe@example.com>',
642 'author': 'Joe Doe <joe.doe@example.com>',
643 'date': datetime.datetime(2010, 1, 1, 22),
643 'date': datetime.datetime(2010, 1, 1, 22),
644 'added': [
644 'added': [
645 FileNode('foo2', content='foo2'),
645 FileNode('foo2', content='foo2'),
646 ],
646 ],
647 },
647 },
648 ]
648 ]
649
649
650 def test_paths_slow_traversing(self):
650 def test_paths_slow_traversing(self):
651 cs = self.repo.get_changeset()
651 cs = self.repo.get_changeset()
652 assert cs.get_node('foobar').get_node('static').get_node('js').get_node('admin').get_node('base.js').content == b'base'
652 assert cs.get_node('foobar').get_node('static').get_node('js').get_node('admin').get_node('base.js').content == b'base'
653
653
654 def test_paths_fast_traversing(self):
654 def test_paths_fast_traversing(self):
655 cs = self.repo.get_changeset()
655 cs = self.repo.get_changeset()
656 assert cs.get_node('foobar/static/js/admin/base.js').content == b'base'
656 assert cs.get_node('foobar/static/js/admin/base.js').content == b'base'
657
657
658 def test_workdir_get_branch(self):
658 def test_workdir_get_branch(self):
659 self.repo.run_git_command(['checkout', '-b', 'production'])
659 self.repo.run_git_command(['checkout', '-b', 'production'])
660 # Regression test: one of following would fail if we don't check
660 # Regression test: one of following would fail if we don't check
661 # .git/HEAD file
661 # .git/HEAD file
662 self.repo.run_git_command(['checkout', 'production'])
662 self.repo.run_git_command(['checkout', 'production'])
663 assert self.repo.workdir.get_branch() == 'production'
663 assert self.repo.workdir.get_branch() == 'production'
664 self.repo.run_git_command(['checkout', 'master'])
664 self.repo.run_git_command(['checkout', 'master'])
665 assert self.repo.workdir.get_branch() == 'master'
665 assert self.repo.workdir.get_branch() == 'master'
666
666
667 def test_get_diff_runs_git_command_with_hashes(self):
667 def test_get_diff_runs_git_command_with_hashes(self):
668 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
668 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
669 self.repo.get_diff(0, 1)
669 self.repo.get_diff(0, 1)
670 self.repo._run_git_command.assert_called_once_with(
670 self.repo._run_git_command.assert_called_once_with(
671 ['diff', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
671 ['diff', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
672 self.repo._get_revision(0), self.repo._get_revision(1)], cwd=self.repo.path)
672 self.repo._get_revision(0), self.repo._get_revision(1)], cwd=self.repo.path)
673
673
674 def test_get_diff_runs_git_command_with_str_hashes(self):
674 def test_get_diff_runs_git_command_with_str_hashes(self):
675 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
675 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
676 self.repo.get_diff(self.repo.EMPTY_CHANGESET, 1)
676 self.repo.get_diff(self.repo.EMPTY_CHANGESET, 1)
677 self.repo._run_git_command.assert_called_once_with(
677 self.repo._run_git_command.assert_called_once_with(
678 ['show', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
678 ['show', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
679 self.repo._get_revision(1)], cwd=self.repo.path)
679 self.repo._get_revision(1)], cwd=self.repo.path)
680
680
681 def test_get_diff_runs_git_command_with_path_if_its_given(self):
681 def test_get_diff_runs_git_command_with_path_if_its_given(self):
682 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
682 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
683 self.repo.get_diff(0, 1, 'foo')
683 self.repo.get_diff(0, 1, 'foo')
684 self.repo._run_git_command.assert_called_once_with(
684 self.repo._run_git_command.assert_called_once_with(
685 ['diff', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
685 ['diff', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
686 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
686 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
687
687
688 def test_get_diff_does_not_sanitize_valid_context(self):
688 def test_get_diff_does_not_sanitize_valid_context(self):
689 almost_overflowed_long_int = 2**31-1
689 almost_overflowed_long_int = 2**31-1
690
690
691 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
691 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
692 self.repo.get_diff(0, 1, 'foo', context=almost_overflowed_long_int)
692 self.repo.get_diff(0, 1, 'foo', context=almost_overflowed_long_int)
693 self.repo._run_git_command.assert_called_once_with(
693 self.repo._run_git_command.assert_called_once_with(
694 ['diff', '-U' + str(almost_overflowed_long_int), '--full-index', '--binary', '-p', '-M', '--abbrev=40',
694 ['diff', '-U' + str(almost_overflowed_long_int), '--full-index', '--binary', '-p', '-M', '--abbrev=40',
695 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
695 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
696
696
697 def test_get_diff_sanitizes_overflowing_context(self):
697 def test_get_diff_sanitizes_overflowing_context(self):
698 overflowed_long_int = 2**31
698 overflowed_long_int = 2**31
699 sanitized_overflowed_long_int = overflowed_long_int-1
699 sanitized_overflowed_long_int = overflowed_long_int-1
700
700
701 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
701 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
702 self.repo.get_diff(0, 1, 'foo', context=overflowed_long_int)
702 self.repo.get_diff(0, 1, 'foo', context=overflowed_long_int)
703
703
704 self.repo._run_git_command.assert_called_once_with(
704 self.repo._run_git_command.assert_called_once_with(
705 ['diff', '-U' + str(sanitized_overflowed_long_int), '--full-index', '--binary', '-p', '-M', '--abbrev=40',
705 ['diff', '-U' + str(sanitized_overflowed_long_int), '--full-index', '--binary', '-p', '-M', '--abbrev=40',
706 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
706 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
707
707
708 def test_get_diff_does_not_sanitize_zero_context(self):
708 def test_get_diff_does_not_sanitize_zero_context(self):
709 zero_context = 0
709 zero_context = 0
710
710
711 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
711 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
712 self.repo.get_diff(0, 1, 'foo', context=zero_context)
712 self.repo.get_diff(0, 1, 'foo', context=zero_context)
713
713
714 self.repo._run_git_command.assert_called_once_with(
714 self.repo._run_git_command.assert_called_once_with(
715 ['diff', '-U' + str(zero_context), '--full-index', '--binary', '-p', '-M', '--abbrev=40',
715 ['diff', '-U' + str(zero_context), '--full-index', '--binary', '-p', '-M', '--abbrev=40',
716 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
716 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
717
717
718 def test_get_diff_sanitizes_negative_context(self):
718 def test_get_diff_sanitizes_negative_context(self):
719 negative_context = -10
719 negative_context = -10
720
720
721 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
721 self.repo._run_git_command = mock.Mock(return_value=(b'', b''))
722 self.repo.get_diff(0, 1, 'foo', context=negative_context)
722 self.repo.get_diff(0, 1, 'foo', context=negative_context)
723
723
724 self.repo._run_git_command.assert_called_once_with(
724 self.repo._run_git_command.assert_called_once_with(
725 ['diff', '-U0', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
725 ['diff', '-U0', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
726 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
726 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'], cwd=self.repo.path)
727
727
728
728
729 class TestGitRegression(_BackendTestMixin):
729 class TestGitRegression(_BackendTestMixin):
730 backend_alias = 'git'
730 backend_alias = 'git'
731
731
732 @classmethod
732 @classmethod
733 def _get_commits(cls):
733 def _get_commits(cls):
734 return [
734 return [
735 {
735 {
736 'message': 'Initial',
736 'message': 'Initial',
737 'author': 'Joe Doe <joe.doe@example.com>',
737 'author': 'Joe Doe <joe.doe@example.com>',
738 'date': datetime.datetime(2010, 1, 1, 20),
738 'date': datetime.datetime(2010, 1, 1, 20),
739 'added': [
739 'added': [
740 FileNode('bot/__init__.py', content='base'),
740 FileNode('bot/__init__.py', content='base'),
741 FileNode('bot/templates/404.html', content='base'),
741 FileNode('bot/templates/404.html', content='base'),
742 FileNode('bot/templates/500.html', content='base'),
742 FileNode('bot/templates/500.html', content='base'),
743 ],
743 ],
744 },
744 },
745 {
745 {
746 'message': 'Second',
746 'message': 'Second',
747 'author': 'Joe Doe <joe.doe@example.com>',
747 'author': 'Joe Doe <joe.doe@example.com>',
748 'date': datetime.datetime(2010, 1, 1, 22),
748 'date': datetime.datetime(2010, 1, 1, 22),
749 'added': [
749 'added': [
750 FileNode('bot/build/migrations/1.py', content='foo2'),
750 FileNode('bot/build/migrations/1.py', content='foo2'),
751 FileNode('bot/build/migrations/2.py', content='foo2'),
751 FileNode('bot/build/migrations/2.py', content='foo2'),
752 FileNode('bot/build/static/templates/f.html', content='foo2'),
752 FileNode('bot/build/static/templates/f.html', content='foo2'),
753 FileNode('bot/build/static/templates/f1.html', content='foo2'),
753 FileNode('bot/build/static/templates/f1.html', content='foo2'),
754 FileNode('bot/build/templates/err.html', content='foo2'),
754 FileNode('bot/build/templates/err.html', content='foo2'),
755 FileNode('bot/build/templates/err2.html', content='foo2'),
755 FileNode('bot/build/templates/err2.html', content='foo2'),
756 ],
756 ],
757 },
757 },
758 ]
758 ]
759
759
760 def test_similar_paths(self):
760 def test_similar_paths(self):
761 cs = self.repo.get_changeset()
761 cs = self.repo.get_changeset()
762 paths = lambda *n: [x.path for x in n]
762 paths = lambda *n: [x.path for x in n]
763 assert paths(*cs.get_nodes('bot')) == ['bot/build', 'bot/templates', 'bot/__init__.py']
763 assert paths(*cs.get_nodes('bot')) == ['bot/build', 'bot/templates', 'bot/__init__.py']
764 assert paths(*cs.get_nodes('bot/build')) == ['bot/build/migrations', 'bot/build/static', 'bot/build/templates']
764 assert paths(*cs.get_nodes('bot/build')) == ['bot/build/migrations', 'bot/build/static', 'bot/build/templates']
765 assert paths(*cs.get_nodes('bot/build/static')) == ['bot/build/static/templates']
765 assert paths(*cs.get_nodes('bot/build/static')) == ['bot/build/static/templates']
766 # this get_nodes below causes troubles !
766 # this get_nodes below causes troubles !
767 assert paths(*cs.get_nodes('bot/build/static/templates')) == ['bot/build/static/templates/f.html', 'bot/build/static/templates/f1.html']
767 assert paths(*cs.get_nodes('bot/build/static/templates')) == ['bot/build/static/templates/f.html', 'bot/build/static/templates/f1.html']
768 assert paths(*cs.get_nodes('bot/build/templates')) == ['bot/build/templates/err.html', 'bot/build/templates/err2.html']
768 assert paths(*cs.get_nodes('bot/build/templates')) == ['bot/build/templates/err.html', 'bot/build/templates/err2.html']
769 assert paths(*cs.get_nodes('bot/templates/')) == ['bot/templates/404.html', 'bot/templates/500.html']
769 assert paths(*cs.get_nodes('bot/templates/')) == ['bot/templates/404.html', 'bot/templates/500.html']
770
770
771
771
772 class TestGitHooks(object):
772 class TestGitHooks(object):
773 """
773 """
774 Tests related to hook functionality of Git repositories.
774 Tests related to hook functionality of Git repositories.
775 """
775 """
776
776
777 def setup_method(self):
777 def setup_method(self):
778 # For each run we want a fresh repo.
778 # For each run we want a fresh repo.
779 self.repo_directory = get_new_dir("githookrepo")
779 self.repo_directory = get_new_dir("githookrepo")
780 self.repo = GitRepository(self.repo_directory, create=True)
780 self.repo = GitRepository(self.repo_directory, create=True)
781
781
782 # Create a dictionary where keys are hook names, and values are paths to
782 # Create a dictionary where keys are hook names, and values are paths to
783 # them in the non-bare repo. Deduplicates code in tests a bit.
783 # them in the non-bare repo. Deduplicates code in tests a bit.
784 self.kallithea_hooks = {
784 self.kallithea_hooks = {
785 "pre-receive": os.path.join(self.repo.path, '.git', 'hooks', "pre-receive"),
785 "pre-receive": os.path.join(self.repo.path, '.git', 'hooks', "pre-receive"),
786 "post-receive": os.path.join(self.repo.path, '.git', 'hooks', "post-receive"),
786 "post-receive": os.path.join(self.repo.path, '.git', 'hooks', "post-receive"),
787 }
787 }
788
788
789 def test_hooks_created_if_missing(self):
789 def test_hooks_created_if_missing(self):
790 """
790 """
791 Tests if hooks are installed in repository if they are missing.
791 Tests if hooks are installed in repository if they are missing.
792 """
792 """
793
793
794 for hook, hook_path in self.kallithea_hooks.items():
794 for hook, hook_path in self.kallithea_hooks.items():
795 if os.path.exists(hook_path):
795 if os.path.exists(hook_path):
796 os.remove(hook_path)
796 os.remove(hook_path)
797
797
798 ScmModel().install_git_hooks(repo=self.repo)
798 ScmModel().install_git_hooks(repo=self.repo)
799
799
800 for hook, hook_path in self.kallithea_hooks.items():
800 for hook, hook_path in self.kallithea_hooks.items():
801 assert os.path.exists(hook_path)
801 assert os.path.exists(hook_path)
802
802
803 def test_kallithea_hooks_updated(self):
803 def test_kallithea_hooks_updated(self):
804 """
804 """
805 Tests if hooks are updated if they are Kallithea hooks already.
805 Tests if hooks are updated if they are Kallithea hooks already.
806 """
806 """
807
807
808 for hook, hook_path in self.kallithea_hooks.items():
808 for hook, hook_path in self.kallithea_hooks.items():
809 with open(hook_path, "w") as f:
809 with open(hook_path, "w") as f:
810 f.write("KALLITHEA_HOOK_VER=0.0.0\nJUST_BOGUS")
810 f.write("KALLITHEA_HOOK_VER=0.0.0\nJUST_BOGUS")
811
811
812 ScmModel().install_git_hooks(repo=self.repo)
812 ScmModel().install_git_hooks(repo=self.repo)
813
813
814 for hook, hook_path in self.kallithea_hooks.items():
814 for hook, hook_path in self.kallithea_hooks.items():
815 with open(hook_path) as f:
815 with open(hook_path) as f:
816 assert "JUST_BOGUS" not in f.read()
816 assert "JUST_BOGUS" not in f.read()
817
817
818 def test_custom_hooks_untouched(self):
818 def test_custom_hooks_untouched(self):
819 """
819 """
820 Tests if hooks are left untouched if they are not Kallithea hooks.
820 Tests if hooks are left untouched if they are not Kallithea hooks.
821 """
821 """
822
822
823 for hook, hook_path in self.kallithea_hooks.items():
823 for hook, hook_path in self.kallithea_hooks.items():
824 with open(hook_path, "w") as f:
824 with open(hook_path, "w") as f:
825 f.write("#!/bin/bash\n#CUSTOM_HOOK")
825 f.write("#!/bin/bash\n#CUSTOM_HOOK")
826
826
827 ScmModel().install_git_hooks(repo=self.repo)
827 ScmModel().install_git_hooks(repo=self.repo)
828
828
829 for hook, hook_path in self.kallithea_hooks.items():
829 for hook, hook_path in self.kallithea_hooks.items():
830 with open(hook_path) as f:
830 with open(hook_path) as f:
831 assert "CUSTOM_HOOK" in f.read()
831 assert "CUSTOM_HOOK" in f.read()
832
832
833 def test_custom_hooks_forced_update(self):
833 def test_custom_hooks_forced_update(self):
834 """
834 """
835 Tests if hooks are forcefully updated even though they are custom hooks.
835 Tests if hooks are forcefully updated even though they are custom hooks.
836 """
836 """
837
837
838 for hook, hook_path in self.kallithea_hooks.items():
838 for hook, hook_path in self.kallithea_hooks.items():
839 with open(hook_path, "w") as f:
839 with open(hook_path, "w") as f:
840 f.write("#!/bin/bash\n#CUSTOM_HOOK")
840 f.write("#!/bin/bash\n#CUSTOM_HOOK")
841
841
842 ScmModel().install_git_hooks(repo=self.repo, force_create=True)
842 ScmModel().install_git_hooks(repo=self.repo, force_create=True)
843
843
844 for hook, hook_path in self.kallithea_hooks.items():
844 for hook, hook_path in self.kallithea_hooks.items():
845 with open(hook_path) as f:
845 with open(hook_path) as f:
846 assert "KALLITHEA_HOOK_VER" in f.read()
846 assert "KALLITHEA_HOOK_VER" in f.read()
@@ -1,591 +1,591 b''
1 import os
1 import os
2
2
3 import mock
3 import mock
4 import pytest
4 import pytest
5
5
6 from kallithea.lib.vcs.backends.hg import MercurialChangeset, MercurialRepository
6 from kallithea.lib.vcs.backends.hg import MercurialChangeset, MercurialRepository
7 from kallithea.lib.vcs.exceptions import NodeDoesNotExistError, RepositoryError, VCSError
7 from kallithea.lib.vcs.exceptions import NodeDoesNotExistError, RepositoryError, VCSError
8 from kallithea.lib.vcs.nodes import NodeKind, NodeState
8 from kallithea.lib.vcs.nodes import NodeKind, NodeState
9 from kallithea.tests.vcs.conf import TEST_HG_REPO, TEST_HG_REPO_CLONE, TEST_HG_REPO_PULL, TESTS_TMP_PATH
9 from kallithea.tests.vcs.conf import TEST_HG_REPO, TEST_HG_REPO_CLONE, TEST_HG_REPO_PULL, TESTS_TMP_PATH
10
10
11
11
12 class TestMercurialRepository(object):
12 class TestMercurialRepository(object):
13
13
14 def __check_for_existing_repo(self):
14 def __check_for_existing_repo(self):
15 if os.path.exists(TEST_HG_REPO_CLONE):
15 if os.path.exists(TEST_HG_REPO_CLONE):
16 pytest.fail('Cannot test mercurial clone repo as location %s already '
16 pytest.fail('Cannot test mercurial clone repo as location %s already '
17 'exists. You should manually remove it first.'
17 'exists. You should manually remove it first.'
18 % TEST_HG_REPO_CLONE)
18 % TEST_HG_REPO_CLONE)
19
19
20 def setup_method(self):
20 def setup_method(self):
21 self.repo = MercurialRepository(TEST_HG_REPO)
21 self.repo = MercurialRepository(TEST_HG_REPO)
22
22
23 def test_wrong_repo_path(self):
23 def test_wrong_repo_path(self):
24 wrong_repo_path = os.path.join(TESTS_TMP_PATH, 'errorrepo')
24 wrong_repo_path = os.path.join(TESTS_TMP_PATH, 'errorrepo')
25 with pytest.raises(RepositoryError):
25 with pytest.raises(RepositoryError):
26 MercurialRepository(wrong_repo_path)
26 MercurialRepository(wrong_repo_path)
27
27
28 def test_unicode_path_repo(self):
28 def test_unicode_path_repo(self):
29 with pytest.raises(VCSError):
29 with pytest.raises(VCSError):
30 MercurialRepository(u'iShouldFail')
30 MercurialRepository(u'iShouldFail')
31
31
32 def test_repo_clone(self):
32 def test_repo_clone(self):
33 self.__check_for_existing_repo()
33 self.__check_for_existing_repo()
34 repo = MercurialRepository(TEST_HG_REPO)
34 repo = MercurialRepository(TEST_HG_REPO)
35 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
35 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
36 src_url=TEST_HG_REPO, update_after_clone=True)
36 src_url=TEST_HG_REPO, update_after_clone=True)
37 assert len(repo.revisions) == len(repo_clone.revisions)
37 assert len(repo.revisions) == len(repo_clone.revisions)
38 # Checking hashes of changesets should be enough
38 # Checking hashes of changesets should be enough
39 for changeset in repo.get_changesets():
39 for changeset in repo.get_changesets():
40 raw_id = changeset.raw_id
40 raw_id = changeset.raw_id
41 assert raw_id == repo_clone.get_changeset(raw_id).raw_id
41 assert raw_id == repo_clone.get_changeset(raw_id).raw_id
42
42
43 def test_repo_clone_with_update(self):
43 def test_repo_clone_with_update(self):
44 repo = MercurialRepository(TEST_HG_REPO)
44 repo = MercurialRepository(TEST_HG_REPO)
45 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_w_update',
45 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_w_update',
46 src_url=TEST_HG_REPO, update_after_clone=True)
46 src_url=TEST_HG_REPO, update_after_clone=True)
47 assert len(repo.revisions) == len(repo_clone.revisions)
47 assert len(repo.revisions) == len(repo_clone.revisions)
48
48
49 # check if current workdir was updated
49 # check if current workdir was updated
50 assert os.path.isfile(
50 assert os.path.isfile(
51 os.path.join(
51 os.path.join(
52 TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'
52 TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'
53 )
53 )
54 )
54 )
55
55
56 def test_repo_clone_without_update(self):
56 def test_repo_clone_without_update(self):
57 repo = MercurialRepository(TEST_HG_REPO)
57 repo = MercurialRepository(TEST_HG_REPO)
58 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_wo_update',
58 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_wo_update',
59 src_url=TEST_HG_REPO, update_after_clone=False)
59 src_url=TEST_HG_REPO, update_after_clone=False)
60 assert len(repo.revisions) == len(repo_clone.revisions)
60 assert len(repo.revisions) == len(repo_clone.revisions)
61 assert not os.path.isfile(
61 assert not os.path.isfile(
62 os.path.join(
62 os.path.join(
63 TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'
63 TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'
64 )
64 )
65 )
65 )
66
66
67 def test_pull(self):
67 def test_pull(self):
68 if os.path.exists(TEST_HG_REPO_PULL):
68 if os.path.exists(TEST_HG_REPO_PULL):
69 pytest.fail('Cannot test mercurial pull command as location %s '
69 pytest.fail('Cannot test mercurial pull command as location %s '
70 'already exists. You should manually remove it first'
70 'already exists. You should manually remove it first'
71 % TEST_HG_REPO_PULL)
71 % TEST_HG_REPO_PULL)
72 repo_new = MercurialRepository(TEST_HG_REPO_PULL, create=True)
72 repo_new = MercurialRepository(TEST_HG_REPO_PULL, create=True)
73 assert len(self.repo.revisions) > len(repo_new.revisions)
73 assert len(self.repo.revisions) > len(repo_new.revisions)
74
74
75 repo_new.pull(self.repo.path)
75 repo_new.pull(self.repo.path)
76 repo_new = MercurialRepository(TEST_HG_REPO_PULL)
76 repo_new = MercurialRepository(TEST_HG_REPO_PULL)
77 assert len(self.repo.revisions) == len(repo_new.revisions)
77 assert len(self.repo.revisions) == len(repo_new.revisions)
78
78
79 def test_revisions(self):
79 def test_revisions(self):
80 # there are 21 revisions at bitbucket now
80 # there are 21 revisions at bitbucket now
81 # so we can assume they would be available from now on
81 # so we can assume they would be available from now on
82 subset = set(['b986218ba1c9b0d6a259fac9b050b1724ed8e545',
82 subset = set(['b986218ba1c9b0d6a259fac9b050b1724ed8e545',
83 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
83 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
84 '6cba7170863a2411822803fa77a0a264f1310b35',
84 '6cba7170863a2411822803fa77a0a264f1310b35',
85 '56349e29c2af3ac913b28bde9a2c6154436e615b',
85 '56349e29c2af3ac913b28bde9a2c6154436e615b',
86 '2dda4e345facb0ccff1a191052dd1606dba6781d',
86 '2dda4e345facb0ccff1a191052dd1606dba6781d',
87 '6fff84722075f1607a30f436523403845f84cd9e',
87 '6fff84722075f1607a30f436523403845f84cd9e',
88 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
88 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
89 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
89 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
90 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
90 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
91 'be90031137367893f1c406e0a8683010fd115b79',
91 'be90031137367893f1c406e0a8683010fd115b79',
92 'db8e58be770518cbb2b1cdfa69146e47cd481481',
92 'db8e58be770518cbb2b1cdfa69146e47cd481481',
93 '84478366594b424af694a6c784cb991a16b87c21',
93 '84478366594b424af694a6c784cb991a16b87c21',
94 '17f8e105dddb9f339600389c6dc7175d395a535c',
94 '17f8e105dddb9f339600389c6dc7175d395a535c',
95 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
95 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
96 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
96 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
97 '786facd2c61deb9cf91e9534735124fb8fc11842',
97 '786facd2c61deb9cf91e9534735124fb8fc11842',
98 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
98 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
99 'aa6a0de05b7612707db567078e130a6cd114a9a7',
99 'aa6a0de05b7612707db567078e130a6cd114a9a7',
100 'eada5a770da98ab0dd7325e29d00e0714f228d09'
100 'eada5a770da98ab0dd7325e29d00e0714f228d09'
101 ])
101 ])
102 assert subset.issubset(set(self.repo.revisions))
102 assert subset.issubset(set(self.repo.revisions))
103
103
104 # check if we have the proper order of revisions
104 # check if we have the proper order of revisions
105 org = ['b986218ba1c9b0d6a259fac9b050b1724ed8e545',
105 org = ['b986218ba1c9b0d6a259fac9b050b1724ed8e545',
106 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
106 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
107 '6cba7170863a2411822803fa77a0a264f1310b35',
107 '6cba7170863a2411822803fa77a0a264f1310b35',
108 '56349e29c2af3ac913b28bde9a2c6154436e615b',
108 '56349e29c2af3ac913b28bde9a2c6154436e615b',
109 '2dda4e345facb0ccff1a191052dd1606dba6781d',
109 '2dda4e345facb0ccff1a191052dd1606dba6781d',
110 '6fff84722075f1607a30f436523403845f84cd9e',
110 '6fff84722075f1607a30f436523403845f84cd9e',
111 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
111 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
112 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
112 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
113 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
113 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
114 'be90031137367893f1c406e0a8683010fd115b79',
114 'be90031137367893f1c406e0a8683010fd115b79',
115 'db8e58be770518cbb2b1cdfa69146e47cd481481',
115 'db8e58be770518cbb2b1cdfa69146e47cd481481',
116 '84478366594b424af694a6c784cb991a16b87c21',
116 '84478366594b424af694a6c784cb991a16b87c21',
117 '17f8e105dddb9f339600389c6dc7175d395a535c',
117 '17f8e105dddb9f339600389c6dc7175d395a535c',
118 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
118 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
119 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
119 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
120 '786facd2c61deb9cf91e9534735124fb8fc11842',
120 '786facd2c61deb9cf91e9534735124fb8fc11842',
121 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
121 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
122 'aa6a0de05b7612707db567078e130a6cd114a9a7',
122 'aa6a0de05b7612707db567078e130a6cd114a9a7',
123 'eada5a770da98ab0dd7325e29d00e0714f228d09',
123 'eada5a770da98ab0dd7325e29d00e0714f228d09',
124 '2c1885c735575ca478bf9e17b0029dca68824458',
124 '2c1885c735575ca478bf9e17b0029dca68824458',
125 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
125 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
126 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
126 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
127 '4fb8326d78e5120da2c7468dcf7098997be385da',
127 '4fb8326d78e5120da2c7468dcf7098997be385da',
128 '62b4a097164940bd66030c4db51687f3ec035eed',
128 '62b4a097164940bd66030c4db51687f3ec035eed',
129 '536c1a19428381cfea92ac44985304f6a8049569',
129 '536c1a19428381cfea92ac44985304f6a8049569',
130 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
130 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
131 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
131 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
132 'f8940bcb890a98c4702319fbe36db75ea309b475',
132 'f8940bcb890a98c4702319fbe36db75ea309b475',
133 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
133 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
134 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
134 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
135 'ee87846a61c12153b51543bf860e1026c6d3dcba', ]
135 'ee87846a61c12153b51543bf860e1026c6d3dcba', ]
136 assert org == self.repo.revisions[:31]
136 assert org == self.repo.revisions[:31]
137
137
138 def test_iter_slice(self):
138 def test_iter_slice(self):
139 sliced = list(self.repo[:10])
139 sliced = list(self.repo[:10])
140 itered = list(self.repo)[:10]
140 itered = list(self.repo)[:10]
141 assert sliced == itered
141 assert sliced == itered
142
142
143 def test_slicing(self):
143 def test_slicing(self):
144 # 4 1 5 10 95
144 # 4 1 5 10 95
145 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
145 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
146 (10, 20, 10), (5, 100, 95)]:
146 (10, 20, 10), (5, 100, 95)]:
147 revs = list(self.repo[sfrom:sto])
147 revs = list(self.repo[sfrom:sto])
148 assert len(revs) == size
148 assert len(revs) == size
149 assert revs[0] == self.repo.get_changeset(sfrom)
149 assert revs[0] == self.repo.get_changeset(sfrom)
150 assert revs[-1] == self.repo.get_changeset(sto - 1)
150 assert revs[-1] == self.repo.get_changeset(sto - 1)
151
151
152 def test_branches(self):
152 def test_branches(self):
153 # TODO: Need more tests here
153 # TODO: Need more tests here
154
154
155 # active branches
155 # active branches
156 assert 'default' in self.repo.branches
156 assert 'default' in self.repo.branches
157 assert 'stable' in self.repo.branches
157 assert 'stable' in self.repo.branches
158
158
159 # closed
159 # closed
160 assert 'git' in self.repo._get_branches(closed=True)
160 assert 'git' in self.repo._get_branches(closed=True)
161 assert 'web' in self.repo._get_branches(closed=True)
161 assert 'web' in self.repo._get_branches(closed=True)
162
162
163 for name, id in self.repo.branches.items():
163 for name, id in self.repo.branches.items():
164 assert isinstance(self.repo.get_changeset(id), MercurialChangeset)
164 assert isinstance(self.repo.get_changeset(id), MercurialChangeset)
165
165
166 def test_tip_in_tags(self):
166 def test_tip_in_tags(self):
167 # tip is always a tag
167 # tip is always a tag
168 assert 'tip' in self.repo.tags
168 assert 'tip' in self.repo.tags
169
169
170 def test_tip_changeset_in_tags(self):
170 def test_tip_changeset_in_tags(self):
171 tip = self.repo.get_changeset()
171 tip = self.repo.get_changeset()
172 assert self.repo.tags['tip'] == tip.raw_id
172 assert self.repo.tags['tip'] == tip.raw_id
173
173
174 def test_initial_changeset(self):
174 def test_initial_changeset(self):
175
175
176 init_chset = self.repo.get_changeset(0)
176 init_chset = self.repo.get_changeset(0)
177 assert init_chset.message == 'initial import'
177 assert init_chset.message == 'initial import'
178 assert init_chset.author == 'Marcin Kuzminski <marcin@python-blog.com>'
178 assert init_chset.author == 'Marcin Kuzminski <marcin@python-blog.com>'
179 assert sorted(init_chset._file_paths) == sorted([
179 assert sorted(init_chset._file_paths) == sorted([
180 'vcs/__init__.py',
180 'vcs/__init__.py',
181 'vcs/backends/BaseRepository.py',
181 'vcs/backends/BaseRepository.py',
182 'vcs/backends/__init__.py',
182 'vcs/backends/__init__.py',
183 ])
183 ])
184
184
185 assert sorted(init_chset._dir_paths) == sorted(['', 'vcs', 'vcs/backends'])
185 assert sorted(init_chset._dir_paths) == sorted(['', 'vcs', 'vcs/backends'])
186
186
187 with pytest.raises(NodeDoesNotExistError):
187 with pytest.raises(NodeDoesNotExistError):
188 init_chset.get_node(path='foobar')
188 init_chset.get_node(path='foobar')
189
189
190 node = init_chset.get_node('vcs/')
190 node = init_chset.get_node('vcs/')
191 assert hasattr(node, 'kind')
191 assert hasattr(node, 'kind')
192 assert node.kind == NodeKind.DIR
192 assert node.kind == NodeKind.DIR
193
193
194 node = init_chset.get_node('vcs')
194 node = init_chset.get_node('vcs')
195 assert hasattr(node, 'kind')
195 assert hasattr(node, 'kind')
196 assert node.kind == NodeKind.DIR
196 assert node.kind == NodeKind.DIR
197
197
198 node = init_chset.get_node('vcs/__init__.py')
198 node = init_chset.get_node('vcs/__init__.py')
199 assert hasattr(node, 'kind')
199 assert hasattr(node, 'kind')
200 assert node.kind == NodeKind.FILE
200 assert node.kind == NodeKind.FILE
201
201
202 def test_not_existing_changeset(self):
202 def test_not_existing_changeset(self):
203 # rawid
203 # rawid
204 with pytest.raises(RepositoryError):
204 with pytest.raises(RepositoryError):
205 self.repo.get_changeset('abcd' * 10)
205 self.repo.get_changeset('abcd' * 10)
206 # shortid
206 # shortid
207 with pytest.raises(RepositoryError):
207 with pytest.raises(RepositoryError):
208 self.repo.get_changeset('erro' * 4)
208 self.repo.get_changeset('erro' * 4)
209 # numeric
209 # numeric
210 with pytest.raises(RepositoryError):
210 with pytest.raises(RepositoryError):
211 self.repo.get_changeset(self.repo.count() + 1)
211 self.repo.get_changeset(self.repo.count() + 1)
212
212
213 # Small chance we ever get to this one
213 # Small chance we ever get to this one
214 revision = pow(2, 30)
214 revision = pow(2, 30)
215 with pytest.raises(RepositoryError):
215 with pytest.raises(RepositoryError):
216 self.repo.get_changeset(revision)
216 self.repo.get_changeset(revision)
217
217
218 def test_changeset10(self):
218 def test_changeset10(self):
219
219
220 chset10 = self.repo.get_changeset(10)
220 chset10 = self.repo.get_changeset(10)
221 readme = b"""===
221 readme = b"""===
222 VCS
222 VCS
223 ===
223 ===
224
224
225 Various Version Control System management abstraction layer for Python.
225 Various Version Control System management abstraction layer for Python.
226
226
227 Introduction
227 Introduction
228 ------------
228 ------------
229
229
230 TODO: To be written...
230 TODO: To be written...
231
231
232 """
232 """
233 node = chset10.get_node('README.rst')
233 node = chset10.get_node('README.rst')
234 assert node.kind == NodeKind.FILE
234 assert node.kind == NodeKind.FILE
235 assert node.content == readme
235 assert node.content == readme
236
236
237 @mock.patch('mercurial.mdiff.diffopts')
237 @mock.patch('mercurial.mdiff.diffopts')
238 def test_get_diff_does_not_sanitize_zero_context(self, mock_diffopts):
238 def test_get_diff_does_not_sanitize_zero_context(self, mock_diffopts):
239 zero_context = 0
239 zero_context = 0
240
240
241 self.repo.get_diff(0, 1, 'foo', context=zero_context)
241 self.repo.get_diff(0, 1, 'foo', context=zero_context)
242
242
243 mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context)
243 mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context)
244
244
245 @mock.patch('mercurial.mdiff.diffopts')
245 @mock.patch('mercurial.mdiff.diffopts')
246 def test_get_diff_sanitizes_negative_context(self, mock_diffopts):
246 def test_get_diff_sanitizes_negative_context(self, mock_diffopts):
247 negative_context = -10
247 negative_context = -10
248 zero_context = 0
248 zero_context = 0
249
249
250 self.repo.get_diff(0, 1, 'foo', context=negative_context)
250 self.repo.get_diff(0, 1, 'foo', context=negative_context)
251
251
252 mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context)
252 mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context)
253
253
254
254
255 class TestMercurialChangeset(object):
255 class TestMercurialChangeset(object):
256
256
257 def setup_method(self):
257 def setup_method(self):
258 self.repo = MercurialRepository(TEST_HG_REPO)
258 self.repo = MercurialRepository(TEST_HG_REPO)
259
259
260 def _test_equality(self, changeset):
260 def _test_equality(self, changeset):
261 revision = changeset.revision
261 revision = changeset.revision
262 assert changeset == self.repo.get_changeset(revision)
262 assert changeset == self.repo.get_changeset(revision)
263
263
264 def test_equality(self):
264 def test_equality(self):
265 revs = [0, 10, 20]
265 revs = [0, 10, 20]
266 changesets = [self.repo.get_changeset(rev) for rev in revs]
266 changesets = [self.repo.get_changeset(rev) for rev in revs]
267 for changeset in changesets:
267 for changeset in changesets:
268 self._test_equality(changeset)
268 self._test_equality(changeset)
269
269
270 def test_default_changeset(self):
270 def test_default_changeset(self):
271 tip = self.repo.get_changeset('tip')
271 tip = self.repo.get_changeset('tip')
272 assert tip == self.repo.get_changeset()
272 assert tip == self.repo.get_changeset()
273 assert tip == self.repo.get_changeset(revision=None)
273 assert tip == self.repo.get_changeset(revision=None)
274 assert tip == list(self.repo[-1:])[0]
274 assert tip == list(self.repo[-1:])[0]
275
275
276 def test_root_node(self):
276 def test_root_node(self):
277 tip = self.repo.get_changeset('tip')
277 tip = self.repo.get_changeset('tip')
278 assert tip.root is tip.get_node('')
278 assert tip.root is tip.get_node('')
279
279
280 def test_lazy_fetch(self):
280 def test_lazy_fetch(self):
281 """
281 """
282 Test if changeset's nodes expands and are cached as we walk through
282 Test if changeset's nodes expands and are cached as we walk through
283 the revision. This test is somewhat hard to write as order of tests
283 the revision. This test is somewhat hard to write as order of tests
284 is a key here. Written by running command after command in a shell.
284 is a key here. Written by running command after command in a shell.
285 """
285 """
286 chset = self.repo.get_changeset(45)
286 chset = self.repo.get_changeset(45)
287 assert len(chset.nodes) == 0
287 assert len(chset.nodes) == 0
288 root = chset.root
288 root = chset.root
289 assert len(chset.nodes) == 1
289 assert len(chset.nodes) == 1
290 assert len(root.nodes) == 8
290 assert len(root.nodes) == 8
291 # accessing root.nodes updates chset.nodes
291 # accessing root.nodes updates chset.nodes
292 assert len(chset.nodes) == 9
292 assert len(chset.nodes) == 9
293
293
294 docs = root.get_node('docs')
294 docs = root.get_node('docs')
295 # we haven't yet accessed anything new as docs dir was already cached
295 # we haven't yet accessed anything new as docs dir was already cached
296 assert len(chset.nodes) == 9
296 assert len(chset.nodes) == 9
297 assert len(docs.nodes) == 8
297 assert len(docs.nodes) == 8
298 # accessing docs.nodes updates chset.nodes
298 # accessing docs.nodes updates chset.nodes
299 assert len(chset.nodes) == 17
299 assert len(chset.nodes) == 17
300
300
301 assert docs is chset.get_node('docs')
301 assert docs is chset.get_node('docs')
302 assert docs is root.nodes[0]
302 assert docs is root.nodes[0]
303 assert docs is root.dirs[0]
303 assert docs is root.dirs[0]
304 assert docs is chset.get_node('docs')
304 assert docs is chset.get_node('docs')
305
305
306 def test_nodes_with_changeset(self):
306 def test_nodes_with_changeset(self):
307 chset = self.repo.get_changeset(45)
307 chset = self.repo.get_changeset(45)
308 root = chset.root
308 root = chset.root
309 docs = root.get_node('docs')
309 docs = root.get_node('docs')
310 assert docs is chset.get_node('docs')
310 assert docs is chset.get_node('docs')
311 api = docs.get_node('api')
311 api = docs.get_node('api')
312 assert api is chset.get_node('docs/api')
312 assert api is chset.get_node('docs/api')
313 index = api.get_node('index.rst')
313 index = api.get_node('index.rst')
314 assert index is chset.get_node('docs/api/index.rst')
314 assert index is chset.get_node('docs/api/index.rst')
315 assert index is chset.get_node('docs').get_node('api').get_node('index.rst')
315 assert index is chset.get_node('docs').get_node('api').get_node('index.rst')
316
316
317 def test_branch_and_tags(self):
317 def test_branch_and_tags(self):
318 chset0 = self.repo.get_changeset(0)
318 chset0 = self.repo.get_changeset(0)
319 assert chset0.branch == 'default'
319 assert chset0.branch == 'default'
320 assert chset0.branches == ['default']
320 assert chset0.branches == ['default']
321 assert chset0.tags == []
321 assert chset0.tags == []
322
322
323 chset10 = self.repo.get_changeset(10)
323 chset10 = self.repo.get_changeset(10)
324 assert chset10.branch == 'default'
324 assert chset10.branch == 'default'
325 assert chset10.branches == ['default']
325 assert chset10.branches == ['default']
326 assert chset10.tags == []
326 assert chset10.tags == []
327
327
328 chset44 = self.repo.get_changeset(44)
328 chset44 = self.repo.get_changeset(44)
329 assert chset44.branch == 'web'
329 assert chset44.branch == 'web'
330 assert chset44.branches == ['web']
330 assert chset44.branches == ['web']
331
331
332 tip = self.repo.get_changeset('tip')
332 tip = self.repo.get_changeset('tip')
333 assert 'tip' in tip.tags
333 assert 'tip' in tip.tags
334
334
335 def _test_file_size(self, revision, path, size):
335 def _test_file_size(self, revision, path, size):
336 node = self.repo.get_changeset(revision).get_node(path)
336 node = self.repo.get_changeset(revision).get_node(path)
337 assert node.is_file()
337 assert node.is_file()
338 assert node.size == size
338 assert node.size == size
339
339
340 def test_file_size(self):
340 def test_file_size(self):
341 to_check = (
341 to_check = (
342 (10, 'setup.py', 1068),
342 (10, 'setup.py', 1068),
343 (20, 'setup.py', 1106),
343 (20, 'setup.py', 1106),
344 (60, 'setup.py', 1074),
344 (60, 'setup.py', 1074),
345
345
346 (10, 'vcs/backends/base.py', 2921),
346 (10, 'vcs/backends/base.py', 2921),
347 (20, 'vcs/backends/base.py', 3936),
347 (20, 'vcs/backends/base.py', 3936),
348 (60, 'vcs/backends/base.py', 6189),
348 (60, 'vcs/backends/base.py', 6189),
349 )
349 )
350 for revision, path, size in to_check:
350 for revision, path, size in to_check:
351 self._test_file_size(revision, path, size)
351 self._test_file_size(revision, path, size)
352
352
353 def _test_dir_size(self, revision, path, size):
353 def _test_dir_size(self, revision, path, size):
354 node = self.repo.get_changeset(revision).get_node(path)
354 node = self.repo.get_changeset(revision).get_node(path)
355 assert not node.is_file()
355 assert not node.is_file()
356 assert node.size == size
356 assert node.size == size
357
357
358 def test_dir_size(self):
358 def test_dir_size(self):
359 to_check = (
359 to_check = (
360 ('96507bd11ecc', '/', 682421),
360 ('96507bd11ecc', '/', 682421),
361 ('a53d9201d4bc', '/', 682410),
361 ('a53d9201d4bc', '/', 682410),
362 ('90243de06161', '/', 682006),
362 ('90243de06161', '/', 682006),
363 )
363 )
364 for revision, path, size in to_check:
364 for revision, path, size in to_check:
365 self._test_dir_size(revision, path, size)
365 self._test_dir_size(revision, path, size)
366
366
367 def test_repo_size(self):
367 def test_repo_size(self):
368 assert self.repo.size == 682421
368 assert self.repo.size == 682421
369
369
370 def test_file_history(self):
370 def test_file_history(self):
371 # we can only check if those revisions are present in the history
371 # we can only check if those revisions are present in the history
372 # as we cannot update this test every time file is changed
372 # as we cannot update this test every time file is changed
373 files = {
373 files = {
374 'setup.py': [7, 18, 45, 46, 47, 69, 77],
374 'setup.py': [7, 18, 45, 46, 47, 69, 77],
375 'vcs/nodes.py': [7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60,
375 'vcs/nodes.py': [7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60,
376 61, 73, 76],
376 61, 73, 76],
377 'vcs/backends/hg.py': [4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23,
377 'vcs/backends/hg.py': [4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23,
378 26, 27, 28, 30, 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47,
378 26, 27, 28, 30, 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47,
379 48, 49, 53, 54, 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79,
379 48, 49, 53, 54, 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79,
380 82],
380 82],
381 }
381 }
382 for path, revs in files.items():
382 for path, revs in files.items():
383 tip = self.repo.get_changeset(revs[-1])
383 tip = self.repo.get_changeset(revs[-1])
384 node = tip.get_node(path)
384 node = tip.get_node(path)
385 node_revs = [chset.revision for chset in node.history]
385 node_revs = [chset.revision for chset in node.history]
386 assert set(revs).issubset(set(node_revs)), \
386 assert set(revs).issubset(set(node_revs)), \
387 "We assumed that %s is subset of revisions for which file %s " \
387 "We assumed that %s is subset of revisions for which file %s " \
388 "has been changed, and history of that node returned: %s" \
388 "has been changed, and history of that node returned: %s" \
389 % (revs, path, node_revs)
389 % (revs, path, node_revs)
390
390
391 def test_file_annotate(self):
391 def test_file_annotate(self):
392 files = {
392 files = {
393 'vcs/backends/__init__.py':
393 'vcs/backends/__init__.py':
394 {89: {'lines_no': 31,
394 {89: {'lines_no': 31,
395 'changesets': [32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
395 'changesets': [32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
396 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
396 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
397 32, 32, 32, 32, 37, 32, 37, 37, 32,
397 32, 32, 32, 32, 37, 32, 37, 37, 32,
398 32, 32]},
398 32, 32]},
399 20: {'lines_no': 1,
399 20: {'lines_no': 1,
400 'changesets': [4]},
400 'changesets': [4]},
401 55: {'lines_no': 31,
401 55: {'lines_no': 31,
402 'changesets': [32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
402 'changesets': [32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
403 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
403 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
404 32, 32, 32, 32, 37, 32, 37, 37, 32,
404 32, 32, 32, 32, 37, 32, 37, 37, 32,
405 32, 32]}},
405 32, 32]}},
406 'vcs/exceptions.py':
406 'vcs/exceptions.py':
407 {89: {'lines_no': 18,
407 {89: {'lines_no': 18,
408 'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
408 'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
409 16, 16, 17, 16, 16, 18, 18, 18]},
409 16, 16, 17, 16, 16, 18, 18, 18]},
410 20: {'lines_no': 18,
410 20: {'lines_no': 18,
411 'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
411 'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
412 16, 16, 17, 16, 16, 18, 18, 18]},
412 16, 16, 17, 16, 16, 18, 18, 18]},
413 55: {'lines_no': 18, 'changesets': [16, 16, 16, 16, 16, 16,
413 55: {'lines_no': 18, 'changesets': [16, 16, 16, 16, 16, 16,
414 16, 16, 16, 16, 16, 16,
414 16, 16, 16, 16, 16, 16,
415 17, 16, 16, 18, 18, 18]}},
415 17, 16, 16, 18, 18, 18]}},
416 'MANIFEST.in': {89: {'lines_no': 5,
416 'MANIFEST.in': {89: {'lines_no': 5,
417 'changesets': [7, 7, 7, 71, 71]},
417 'changesets': [7, 7, 7, 71, 71]},
418 20: {'lines_no': 3,
418 20: {'lines_no': 3,
419 'changesets': [7, 7, 7]},
419 'changesets': [7, 7, 7]},
420 55: {'lines_no': 3,
420 55: {'lines_no': 3,
421 'changesets': [7, 7, 7]}}}
421 'changesets': [7, 7, 7]}}}
422
422
423 for fname, revision_dict in files.items():
423 for fname, revision_dict in files.items():
424 for rev, data in revision_dict.items():
424 for rev, data in revision_dict.items():
425 cs = self.repo.get_changeset(rev)
425 cs = self.repo.get_changeset(rev)
426 l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
426 l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
427 l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
427 l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
428 assert l1_1 == l1_2
428 assert l1_1 == l1_2
429 l1 = l1_2 = [x[2]().revision for x in cs.get_file_annotate(fname)]
429 l1 = l1_2 = [x[2]().revision for x in cs.get_file_annotate(fname)]
430 l2 = files[fname][rev]['changesets']
430 l2 = files[fname][rev]['changesets']
431 assert l1 == l2, "The lists of revision for %s@rev%s" \
431 assert l1 == l2, "The lists of revision for %s@rev%s" \
432 "from annotation list should match each other," \
432 "from annotation list should match each other," \
433 "got \n%s \nvs \n%s " % (fname, rev, l1, l2)
433 "got \n%s \nvs \n%s " % (fname, rev, l1, l2)
434
434
435 def test_changeset_state(self):
435 def test_changeset_state(self):
436 """
436 """
437 Tests which files have been added/changed/removed at particular revision
437 Tests which files have been added/changed/removed at particular revision
438 """
438 """
439
439
440 # rev 46ad32a4f974:
440 # rev 46ad32a4f974:
441 # hg st --rev 46ad32a4f974
441 # hg st --rev 46ad32a4f974
442 # changed: 13
442 # changed: 13
443 # added: 20
443 # added: 20
444 # removed: 1
444 # removed: 1
445 changed = set(['.hgignore'
445 changed = set(['.hgignore'
446 , 'README.rst', 'docs/conf.py', 'docs/index.rst', 'setup.py'
446 , 'README.rst', 'docs/conf.py', 'docs/index.rst', 'setup.py'
447 , 'tests/test_hg.py', 'tests/test_nodes.py', 'vcs/__init__.py'
447 , 'tests/test_hg.py', 'tests/test_nodes.py', 'vcs/__init__.py'
448 , 'vcs/backends/__init__.py', 'vcs/backends/base.py'
448 , 'vcs/backends/__init__.py', 'vcs/backends/base.py'
449 , 'vcs/backends/hg.py', 'vcs/nodes.py', 'vcs/utils/__init__.py'])
449 , 'vcs/backends/hg.py', 'vcs/nodes.py', 'vcs/utils/__init__.py'])
450
450
451 added = set(['docs/api/backends/hg.rst'
451 added = set(['docs/api/backends/hg.rst'
452 , 'docs/api/backends/index.rst', 'docs/api/index.rst'
452 , 'docs/api/backends/index.rst', 'docs/api/index.rst'
453 , 'docs/api/nodes.rst', 'docs/api/web/index.rst'
453 , 'docs/api/nodes.rst', 'docs/api/web/index.rst'
454 , 'docs/api/web/simplevcs.rst', 'docs/installation.rst'
454 , 'docs/api/web/simplevcs.rst', 'docs/installation.rst'
455 , 'docs/quickstart.rst', 'setup.cfg', 'vcs/utils/baseui_config.py'
455 , 'docs/quickstart.rst', 'setup.cfg', 'vcs/utils/baseui_config.py'
456 , 'vcs/utils/web.py', 'vcs/web/__init__.py', 'vcs/web/exceptions.py'
456 , 'vcs/utils/web.py', 'vcs/web/__init__.py', 'vcs/web/exceptions.py'
457 , 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py'
457 , 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py'
458 , 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py'
458 , 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py'
459 , 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py'
459 , 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py'
460 , 'vcs/web/simplevcs/views.py'])
460 , 'vcs/web/simplevcs/views.py'])
461
461
462 removed = set(['docs/api.rst'])
462 removed = set(['docs/api.rst'])
463
463
464 chset64 = self.repo.get_changeset('46ad32a4f974')
464 chset64 = self.repo.get_changeset('46ad32a4f974')
465 assert set((node.path for node in chset64.added)) == added
465 assert set((node.path for node in chset64.added)) == added
466 assert set((node.path for node in chset64.changed)) == changed
466 assert set((node.path for node in chset64.changed)) == changed
467 assert set((node.path for node in chset64.removed)) == removed
467 assert set((node.path for node in chset64.removed)) == removed
468
468
469 # rev b090f22d27d6:
469 # rev b090f22d27d6:
470 # hg st --rev b090f22d27d6
470 # hg st --rev b090f22d27d6
471 # changed: 13
471 # changed: 13
472 # added: 20
472 # added: 20
473 # removed: 1
473 # removed: 1
474 chset88 = self.repo.get_changeset('b090f22d27d6')
474 chset88 = self.repo.get_changeset('b090f22d27d6')
475 assert set((node.path for node in chset88.added)) == set()
475 assert set((node.path for node in chset88.added)) == set()
476 assert set((node.path for node in chset88.changed)) == set(['.hgignore'])
476 assert set((node.path for node in chset88.changed)) == set(['.hgignore'])
477 assert set((node.path for node in chset88.removed)) == set()
477 assert set((node.path for node in chset88.removed)) == set()
478
478
479 # 85:
479 # 85:
480 # added: 2 ['vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
480 # added: 2 ['vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
481 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
481 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
482 # removed: 1 ['vcs/utils/web.py']
482 # removed: 1 ['vcs/utils/web.py']
483 chset85 = self.repo.get_changeset(85)
483 chset85 = self.repo.get_changeset(85)
484 assert set((node.path for node in chset85.added)) == set([
484 assert set((node.path for node in chset85.added)) == set([
485 'vcs/utils/diffs.py',
485 'vcs/utils/diffs.py',
486 'vcs/web/simplevcs/views/diffs.py'
486 'vcs/web/simplevcs/views/diffs.py'
487 ])
487 ])
488
488
489 assert set((node.path for node in chset85.changed)) == set([
489 assert set((node.path for node in chset85.changed)) == set([
490 'vcs/web/simplevcs/models.py',
490 'vcs/web/simplevcs/models.py',
491 'vcs/web/simplevcs/utils.py',
491 'vcs/web/simplevcs/utils.py',
492 'vcs/web/simplevcs/views/__init__.py',
492 'vcs/web/simplevcs/views/__init__.py',
493 'vcs/web/simplevcs/views/repository.py',
493 'vcs/web/simplevcs/views/repository.py',
494 ])
494 ])
495
495
496 assert set((node.path for node in chset85.removed)) == set([
496 assert set((node.path for node in chset85.removed)) == set([
497 'vcs/utils/web.py'
497 'vcs/utils/web.py'
498 ])
498 ])
499
499
500
500
501 def test_files_state(self):
501 def test_files_state(self):
502 """
502 """
503 Tests state of FileNodes.
503 Tests state of FileNodes.
504 """
504 """
505 chset = self.repo.get_changeset(85)
505 chset = self.repo.get_changeset(85)
506 node = chset.get_node('vcs/utils/diffs.py')
506 node = chset.get_node('vcs/utils/diffs.py')
507 assert node.state, NodeState.ADDED
507 assert node.state, NodeState.ADDED
508 assert node.added
508 assert node.added
509 assert not node.changed
509 assert not node.changed
510 assert not node.not_changed
510 assert not node.not_changed
511 assert not node.removed
511 assert not node.removed
512
512
513 chset = self.repo.get_changeset(88)
513 chset = self.repo.get_changeset(88)
514 node = chset.get_node('.hgignore')
514 node = chset.get_node('.hgignore')
515 assert node.state, NodeState.CHANGED
515 assert node.state, NodeState.CHANGED
516 assert not node.added
516 assert not node.added
517 assert node.changed
517 assert node.changed
518 assert not node.not_changed
518 assert not node.not_changed
519 assert not node.removed
519 assert not node.removed
520
520
521 chset = self.repo.get_changeset(85)
521 chset = self.repo.get_changeset(85)
522 node = chset.get_node('setup.py')
522 node = chset.get_node('setup.py')
523 assert node.state, NodeState.NOT_CHANGED
523 assert node.state, NodeState.NOT_CHANGED
524 assert not node.added
524 assert not node.added
525 assert not node.changed
525 assert not node.changed
526 assert node.not_changed
526 assert node.not_changed
527 assert not node.removed
527 assert not node.removed
528
528
529 # If node has REMOVED state then trying to fetch it would raise
529 # If node has REMOVED state then trying to fetch it would raise
530 # ChangesetError exception
530 # ChangesetError exception
531 chset = self.repo.get_changeset(2)
531 chset = self.repo.get_changeset(2)
532 path = 'vcs/backends/BaseRepository.py'
532 path = 'vcs/backends/BaseRepository.py'
533 with pytest.raises(NodeDoesNotExistError):
533 with pytest.raises(NodeDoesNotExistError):
534 chset.get_node(path)
534 chset.get_node(path)
535 # but it would be one of ``removed`` (changeset's attribute)
535 # but it would be one of ``removed`` (changeset's attribute)
536 assert path in [rf.path for rf in chset.removed]
536 assert path in [rf.path for rf in chset.removed]
537
537
538 def test_commit_message_is_unicode(self):
538 def test_commit_message_is_str(self):
539 for cm in self.repo:
539 for cm in self.repo:
540 assert isinstance(cm.message, unicode)
540 assert isinstance(cm.message, str)
541
541
542 def test_changeset_author_is_unicode(self):
542 def test_changeset_author_is_str(self):
543 for cm in self.repo:
543 for cm in self.repo:
544 assert isinstance(cm.author, unicode)
544 assert isinstance(cm.author, str)
545
545
546 def test_repo_files_content_is_bytes(self):
546 def test_repo_files_content_is_bytes(self):
547 test_changeset = self.repo.get_changeset(100)
547 test_changeset = self.repo.get_changeset(100)
548 for node in test_changeset.get_node('/'):
548 for node in test_changeset.get_node('/'):
549 if node.is_file():
549 if node.is_file():
550 assert isinstance(node.content, bytes)
550 assert isinstance(node.content, bytes)
551
551
552 def test_wrong_path(self):
552 def test_wrong_path(self):
553 # There is 'setup.py' in the root dir but not there:
553 # There is 'setup.py' in the root dir but not there:
554 path = 'foo/bar/setup.py'
554 path = 'foo/bar/setup.py'
555 with pytest.raises(VCSError):
555 with pytest.raises(VCSError):
556 self.repo.get_changeset().get_node(path)
556 self.repo.get_changeset().get_node(path)
557
557
558 def test_archival_file(self):
558 def test_archival_file(self):
559 # TODO:
559 # TODO:
560 pass
560 pass
561
561
562 def test_archival_as_generator(self):
562 def test_archival_as_generator(self):
563 # TODO:
563 # TODO:
564 pass
564 pass
565
565
566 def test_archival_wrong_kind(self):
566 def test_archival_wrong_kind(self):
567 tip = self.repo.get_changeset()
567 tip = self.repo.get_changeset()
568 with pytest.raises(VCSError):
568 with pytest.raises(VCSError):
569 tip.fill_archive(kind='error')
569 tip.fill_archive(kind='error')
570
570
571 def test_archival_empty_prefix(self):
571 def test_archival_empty_prefix(self):
572 # TODO:
572 # TODO:
573 pass
573 pass
574
574
575 def test_author_email(self):
575 def test_author_email(self):
576 assert 'marcin@python-blog.com' == self.repo.get_changeset('b986218ba1c9').author_email
576 assert 'marcin@python-blog.com' == self.repo.get_changeset('b986218ba1c9').author_email
577 assert 'lukasz.balcerzak@python-center.pl' == self.repo.get_changeset('3803844fdbd3').author_email
577 assert 'lukasz.balcerzak@python-center.pl' == self.repo.get_changeset('3803844fdbd3').author_email
578 assert '' == self.repo.get_changeset('84478366594b').author_email
578 assert '' == self.repo.get_changeset('84478366594b').author_email
579
579
580 def test_author_username(self):
580 def test_author_username(self):
581 assert 'Marcin Kuzminski' == self.repo.get_changeset('b986218ba1c9').author_name
581 assert 'Marcin Kuzminski' == self.repo.get_changeset('b986218ba1c9').author_name
582 assert 'Lukasz Balcerzak' == self.repo.get_changeset('3803844fdbd3').author_name
582 assert 'Lukasz Balcerzak' == self.repo.get_changeset('3803844fdbd3').author_name
583 assert 'marcink' == self.repo.get_changeset('84478366594b').author_name
583 assert 'marcink' == self.repo.get_changeset('84478366594b').author_name
584
584
585 def test_successors(self):
585 def test_successors(self):
586 init_chset = self.repo.get_changeset(0)
586 init_chset = self.repo.get_changeset(0)
587 assert init_chset.successors == []
587 assert init_chset.successors == []
588
588
589 def test_predecessors(self):
589 def test_predecessors(self):
590 init_chset = self.repo.get_changeset(0)
590 init_chset = self.repo.get_changeset(0)
591 assert len(init_chset.predecessors) == 0
591 assert len(init_chset.predecessors) == 0
@@ -1,330 +1,330 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 """
2 """
3 Tests so called "in memory changesets" commit API of vcs.
3 Tests so called "in memory changesets" commit API of vcs.
4 """
4 """
5
5
6 import datetime
6 import datetime
7
7
8 import pytest
8 import pytest
9
9
10 from kallithea.lib.vcs.exceptions import (
10 from kallithea.lib.vcs.exceptions import (
11 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError)
11 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError)
12 from kallithea.lib.vcs.nodes import DirNode, FileNode
12 from kallithea.lib.vcs.nodes import DirNode, FileNode
13 from kallithea.tests.vcs.base import _BackendTestMixin
13 from kallithea.tests.vcs.base import _BackendTestMixin
14
14
15
15
16 class InMemoryChangesetTestMixin(_BackendTestMixin):
16 class InMemoryChangesetTestMixin(_BackendTestMixin):
17
17
18 @classmethod
18 @classmethod
19 def _get_commits(cls):
19 def _get_commits(cls):
20 # Note: this is slightly different than the regular _get_commits methods
20 # Note: this is slightly different than the regular _get_commits methods
21 # as we don't actually return any commits. The creation of commits is
21 # as we don't actually return any commits. The creation of commits is
22 # handled in the tests themselves.
22 # handled in the tests themselves.
23 cls.nodes = [
23 cls.nodes = [
24 FileNode('foobar', content='Foo & bar'),
24 FileNode('foobar', content='Foo & bar'),
25 FileNode('foobar2', content='Foo & bar, doubled!'),
25 FileNode('foobar2', content='Foo & bar, doubled!'),
26 FileNode('foo bar with spaces', content=''),
26 FileNode('foo bar with spaces', content=''),
27 FileNode('foo/bar/baz', content='Inside'),
27 FileNode('foo/bar/baz', content='Inside'),
28 FileNode('foo/bar/file.bin', content='\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'),
28 FileNode('foo/bar/file.bin', content='\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'),
29 ]
29 ]
30 commits = []
30 commits = []
31 return commits
31 return commits
32
32
33 def test_add(self):
33 def test_add(self):
34 rev_count = len(self.repo.revisions)
34 rev_count = len(self.repo.revisions)
35 to_add = [FileNode(node.path, content=node.content)
35 to_add = [FileNode(node.path, content=node.content)
36 for node in self.nodes]
36 for node in self.nodes]
37 for node in to_add:
37 for node in to_add:
38 self.imc.add(node)
38 self.imc.add(node)
39 message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
39 message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
40 author = unicode(self.__class__)
40 author = str(self.__class__)
41 changeset = self.imc.commit(message=message, author=author)
41 changeset = self.imc.commit(message=message, author=author)
42
42
43 newtip = self.repo.get_changeset()
43 newtip = self.repo.get_changeset()
44 assert changeset == newtip
44 assert changeset == newtip
45 assert rev_count + 1 == len(self.repo.revisions)
45 assert rev_count + 1 == len(self.repo.revisions)
46 assert newtip.message == message
46 assert newtip.message == message
47 assert newtip.author == author
47 assert newtip.author == author
48 assert not any((
48 assert not any((
49 self.imc.added,
49 self.imc.added,
50 self.imc.changed,
50 self.imc.changed,
51 self.imc.removed
51 self.imc.removed
52 ))
52 ))
53 for node in to_add:
53 for node in to_add:
54 assert newtip.get_node(node.path).content == node.content
54 assert newtip.get_node(node.path).content == node.content
55
55
56 def test_add_in_bulk(self):
56 def test_add_in_bulk(self):
57 rev_count = len(self.repo.revisions)
57 rev_count = len(self.repo.revisions)
58 to_add = [FileNode(node.path, content=node.content)
58 to_add = [FileNode(node.path, content=node.content)
59 for node in self.nodes]
59 for node in self.nodes]
60 self.imc.add(*to_add)
60 self.imc.add(*to_add)
61 message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
61 message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
62 author = unicode(self.__class__)
62 author = str(self.__class__)
63 changeset = self.imc.commit(message=message, author=author)
63 changeset = self.imc.commit(message=message, author=author)
64
64
65 newtip = self.repo.get_changeset()
65 newtip = self.repo.get_changeset()
66 assert changeset == newtip
66 assert changeset == newtip
67 assert rev_count + 1 == len(self.repo.revisions)
67 assert rev_count + 1 == len(self.repo.revisions)
68 assert newtip.message == message
68 assert newtip.message == message
69 assert newtip.author == author
69 assert newtip.author == author
70 assert not any((
70 assert not any((
71 self.imc.added,
71 self.imc.added,
72 self.imc.changed,
72 self.imc.changed,
73 self.imc.removed
73 self.imc.removed
74 ))
74 ))
75 for node in to_add:
75 for node in to_add:
76 assert newtip.get_node(node.path).content == node.content
76 assert newtip.get_node(node.path).content == node.content
77
77
78 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
78 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
79 self.imc.add(FileNode('foo/bar/image.png', content='\0'))
79 self.imc.add(FileNode('foo/bar/image.png', content='\0'))
80 self.imc.add(FileNode('foo/README.txt', content='readme!'))
80 self.imc.add(FileNode('foo/README.txt', content='readme!'))
81 changeset = self.imc.commit(u'Initial', u'joe.doe@example.com')
81 changeset = self.imc.commit(u'Initial', u'joe.doe@example.com')
82 assert isinstance(changeset.get_node('foo'), DirNode)
82 assert isinstance(changeset.get_node('foo'), DirNode)
83 assert isinstance(changeset.get_node('foo/bar'), DirNode)
83 assert isinstance(changeset.get_node('foo/bar'), DirNode)
84 assert changeset.get_node('foo/bar/image.png').content == b'\0'
84 assert changeset.get_node('foo/bar/image.png').content == b'\0'
85 assert changeset.get_node('foo/README.txt').content == b'readme!'
85 assert changeset.get_node('foo/README.txt').content == b'readme!'
86
86
87 # commit some more files again
87 # commit some more files again
88 to_add = [
88 to_add = [
89 FileNode('foo/bar/foobaz/bar', content='foo'),
89 FileNode('foo/bar/foobaz/bar', content='foo'),
90 FileNode('foo/bar/another/bar', content='foo'),
90 FileNode('foo/bar/another/bar', content='foo'),
91 FileNode('foo/baz.txt', content='foo'),
91 FileNode('foo/baz.txt', content='foo'),
92 FileNode('foobar/foobaz/file', content='foo'),
92 FileNode('foobar/foobaz/file', content='foo'),
93 FileNode('foobar/barbaz', content='foo'),
93 FileNode('foobar/barbaz', content='foo'),
94 ]
94 ]
95 self.imc.add(*to_add)
95 self.imc.add(*to_add)
96 changeset = self.imc.commit(u'Another', u'joe.doe@example.com')
96 changeset = self.imc.commit(u'Another', u'joe.doe@example.com')
97 changeset.get_node('foo/bar/foobaz/bar').content == b'foo'
97 changeset.get_node('foo/bar/foobaz/bar').content == b'foo'
98 changeset.get_node('foo/bar/another/bar').content == b'foo'
98 changeset.get_node('foo/bar/another/bar').content == b'foo'
99 changeset.get_node('foo/baz.txt').content == b'foo'
99 changeset.get_node('foo/baz.txt').content == b'foo'
100 changeset.get_node('foobar/foobaz/file').content == b'foo'
100 changeset.get_node('foobar/foobaz/file').content == b'foo'
101 changeset.get_node('foobar/barbaz').content == b'foo'
101 changeset.get_node('foobar/barbaz').content == b'foo'
102
102
103 def test_add_non_ascii_files(self):
103 def test_add_non_ascii_files(self):
104 rev_count = len(self.repo.revisions)
104 rev_count = len(self.repo.revisions)
105 to_add = [
105 to_add = [
106 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
106 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
107 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
107 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
108 ]
108 ]
109 for node in to_add:
109 for node in to_add:
110 self.imc.add(node)
110 self.imc.add(node)
111 message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
111 message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
112 author = unicode(self.__class__)
112 author = str(self.__class__)
113 changeset = self.imc.commit(message=message, author=author)
113 changeset = self.imc.commit(message=message, author=author)
114
114
115 newtip = self.repo.get_changeset()
115 newtip = self.repo.get_changeset()
116 assert changeset == newtip
116 assert changeset == newtip
117 assert rev_count + 1 == len(self.repo.revisions)
117 assert rev_count + 1 == len(self.repo.revisions)
118 assert newtip.message == message
118 assert newtip.message == message
119 assert newtip.author == author
119 assert newtip.author == author
120 assert not any((
120 assert not any((
121 self.imc.added,
121 self.imc.added,
122 self.imc.changed,
122 self.imc.changed,
123 self.imc.removed
123 self.imc.removed
124 ))
124 ))
125 for node in to_add:
125 for node in to_add:
126 assert newtip.get_node(node.path).content == node.content
126 assert newtip.get_node(node.path).content == node.content
127
127
128 def test_add_raise_already_added(self):
128 def test_add_raise_already_added(self):
129 node = FileNode('foobar', content='baz')
129 node = FileNode('foobar', content='baz')
130 self.imc.add(node)
130 self.imc.add(node)
131 with pytest.raises(NodeAlreadyAddedError):
131 with pytest.raises(NodeAlreadyAddedError):
132 self.imc.add(node)
132 self.imc.add(node)
133
133
134 def test_check_integrity_raise_already_exist(self):
134 def test_check_integrity_raise_already_exist(self):
135 node = FileNode('foobar', content='baz')
135 node = FileNode('foobar', content='baz')
136 self.imc.add(node)
136 self.imc.add(node)
137 self.imc.commit(message=u'Added foobar', author=unicode(self))
137 self.imc.commit(message=u'Added foobar', author=str(self))
138 self.imc.add(node)
138 self.imc.add(node)
139 with pytest.raises(NodeAlreadyExistsError):
139 with pytest.raises(NodeAlreadyExistsError):
140 self.imc.commit(message='new message',
140 self.imc.commit(message='new message',
141 author=str(self))
141 author=str(self))
142
142
143 def test_change(self):
143 def test_change(self):
144 self.imc.add(FileNode('foo/bar/baz', content='foo'))
144 self.imc.add(FileNode('foo/bar/baz', content='foo'))
145 self.imc.add(FileNode('foo/fbar', content='foobar'))
145 self.imc.add(FileNode('foo/fbar', content='foobar'))
146 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
146 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
147
147
148 # Change node's content
148 # Change node's content
149 node = FileNode('foo/bar/baz', content='My **changed** content')
149 node = FileNode('foo/bar/baz', content='My **changed** content')
150 self.imc.change(node)
150 self.imc.change(node)
151 self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
151 self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
152
152
153 newtip = self.repo.get_changeset()
153 newtip = self.repo.get_changeset()
154 assert tip != newtip
154 assert tip != newtip
155 assert tip.raw_id != newtip.raw_id
155 assert tip.raw_id != newtip.raw_id
156 assert newtip.get_node('foo/bar/baz').content == b'My **changed** content'
156 assert newtip.get_node('foo/bar/baz').content == b'My **changed** content'
157
157
158 def test_change_non_ascii(self):
158 def test_change_non_ascii(self):
159 to_add = [
159 to_add = [
160 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
160 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
161 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
161 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
162 ]
162 ]
163 for node in to_add:
163 for node in to_add:
164 self.imc.add(node)
164 self.imc.add(node)
165
165
166 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
166 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
167
167
168 # Change node's content
168 # Change node's content
169 node = FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='My **changed** content')
169 node = FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='My **changed** content')
170 self.imc.change(node)
170 self.imc.change(node)
171 self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
171 self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
172
172
173 node = FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'My **changed** content')
173 node = FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'My **changed** content')
174 self.imc.change(node)
174 self.imc.change(node)
175 self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
175 self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
176
176
177 newtip = self.repo.get_changeset()
177 newtip = self.repo.get_changeset()
178 assert tip != newtip
178 assert tip != newtip
179 assert tip.raw_id != newtip.raw_id
179 assert tip.raw_id != newtip.raw_id
180
180
181 assert newtip.get_node('ΕΌΓ³Ε‚wik/zwierzΔ…tko').content == b'My **changed** content'
181 assert newtip.get_node('ΕΌΓ³Ε‚wik/zwierzΔ…tko').content == b'My **changed** content'
182 assert newtip.get_node('ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni').content == b'My **changed** content'
182 assert newtip.get_node('ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni').content == b'My **changed** content'
183
183
184 def test_change_raise_empty_repository(self):
184 def test_change_raise_empty_repository(self):
185 node = FileNode('foobar')
185 node = FileNode('foobar')
186 with pytest.raises(EmptyRepositoryError):
186 with pytest.raises(EmptyRepositoryError):
187 self.imc.change(node)
187 self.imc.change(node)
188
188
189 def test_check_integrity_change_raise_node_does_not_exist(self):
189 def test_check_integrity_change_raise_node_does_not_exist(self):
190 node = FileNode('foobar', content='baz')
190 node = FileNode('foobar', content='baz')
191 self.imc.add(node)
191 self.imc.add(node)
192 self.imc.commit(message=u'Added foobar', author=unicode(self))
192 self.imc.commit(message=u'Added foobar', author=str(self))
193 node = FileNode('not-foobar', content='')
193 node = FileNode('not-foobar', content='')
194 self.imc.change(node)
194 self.imc.change(node)
195 with pytest.raises(NodeDoesNotExistError):
195 with pytest.raises(NodeDoesNotExistError):
196 self.imc.commit(message='Changed not existing node', author=str(self))
196 self.imc.commit(message='Changed not existing node', author=str(self))
197
197
198 def test_change_raise_node_already_changed(self):
198 def test_change_raise_node_already_changed(self):
199 node = FileNode('foobar', content='baz')
199 node = FileNode('foobar', content='baz')
200 self.imc.add(node)
200 self.imc.add(node)
201 self.imc.commit(message=u'Added foobar', author=unicode(self))
201 self.imc.commit(message=u'Added foobar', author=str(self))
202 node = FileNode('foobar', content='more baz')
202 node = FileNode('foobar', content='more baz')
203 self.imc.change(node)
203 self.imc.change(node)
204 with pytest.raises(NodeAlreadyChangedError):
204 with pytest.raises(NodeAlreadyChangedError):
205 self.imc.change(node)
205 self.imc.change(node)
206
206
207 def test_check_integrity_change_raise_node_not_changed(self):
207 def test_check_integrity_change_raise_node_not_changed(self):
208 self.test_add() # Performs first commit
208 self.test_add() # Performs first commit
209
209
210 node = FileNode(self.nodes[0].path, content=self.nodes[0].content)
210 node = FileNode(self.nodes[0].path, content=self.nodes[0].content)
211 self.imc.change(node)
211 self.imc.change(node)
212 with pytest.raises(NodeNotChangedError):
212 with pytest.raises(NodeNotChangedError):
213 self.imc.commit(
213 self.imc.commit(
214 message=u'Trying to mark node as changed without touching it',
214 message=u'Trying to mark node as changed without touching it',
215 author=unicode(self)
215 author=str(self),
216 )
216 )
217
217
218 def test_change_raise_node_already_removed(self):
218 def test_change_raise_node_already_removed(self):
219 node = FileNode('foobar', content='baz')
219 node = FileNode('foobar', content='baz')
220 self.imc.add(node)
220 self.imc.add(node)
221 self.imc.commit(message=u'Added foobar', author=unicode(self))
221 self.imc.commit(message=u'Added foobar', author=str(self))
222 self.imc.remove(FileNode('foobar'))
222 self.imc.remove(FileNode('foobar'))
223 with pytest.raises(NodeAlreadyRemovedError):
223 with pytest.raises(NodeAlreadyRemovedError):
224 self.imc.change(node)
224 self.imc.change(node)
225
225
226 def test_remove(self):
226 def test_remove(self):
227 self.test_add() # Performs first commit
227 self.test_add() # Performs first commit
228
228
229 tip = self.repo.get_changeset()
229 tip = self.repo.get_changeset()
230 node = self.nodes[0]
230 node = self.nodes[0]
231 assert node.content == tip.get_node(node.path).content
231 assert node.content == tip.get_node(node.path).content
232 self.imc.remove(node)
232 self.imc.remove(node)
233 self.imc.commit(message=u'Removed %s' % node.path, author=unicode(self))
233 self.imc.commit(message=u'Removed %s' % node.path, author=str(self))
234
234
235 newtip = self.repo.get_changeset()
235 newtip = self.repo.get_changeset()
236 assert tip != newtip
236 assert tip != newtip
237 assert tip.raw_id != newtip.raw_id
237 assert tip.raw_id != newtip.raw_id
238 with pytest.raises(NodeDoesNotExistError):
238 with pytest.raises(NodeDoesNotExistError):
239 newtip.get_node(node.path)
239 newtip.get_node(node.path)
240
240
241 def test_remove_last_file_from_directory(self):
241 def test_remove_last_file_from_directory(self):
242 node = FileNode('omg/qwe/foo/bar', content='foobar')
242 node = FileNode('omg/qwe/foo/bar', content='foobar')
243 self.imc.add(node)
243 self.imc.add(node)
244 self.imc.commit(u'added', u'joe doe')
244 self.imc.commit(u'added', u'joe doe')
245
245
246 self.imc.remove(node)
246 self.imc.remove(node)
247 tip = self.imc.commit(u'removed', u'joe doe')
247 tip = self.imc.commit(u'removed', u'joe doe')
248 with pytest.raises(NodeDoesNotExistError):
248 with pytest.raises(NodeDoesNotExistError):
249 tip.get_node('omg/qwe/foo/bar')
249 tip.get_node('omg/qwe/foo/bar')
250
250
251 def test_remove_raise_node_does_not_exist(self):
251 def test_remove_raise_node_does_not_exist(self):
252 self.imc.remove(self.nodes[0])
252 self.imc.remove(self.nodes[0])
253 with pytest.raises(NodeDoesNotExistError):
253 with pytest.raises(NodeDoesNotExistError):
254 self.imc.commit(
254 self.imc.commit(
255 message='Trying to remove node at empty repository',
255 message='Trying to remove node at empty repository',
256 author=str(self)
256 author=str(self),
257 )
257 )
258
258
259 def test_check_integrity_remove_raise_node_does_not_exist(self):
259 def test_check_integrity_remove_raise_node_does_not_exist(self):
260 self.test_add() # Performs first commit
260 self.test_add() # Performs first commit
261
261
262 node = FileNode('no-such-file')
262 node = FileNode('no-such-file')
263 self.imc.remove(node)
263 self.imc.remove(node)
264 with pytest.raises(NodeDoesNotExistError):
264 with pytest.raises(NodeDoesNotExistError):
265 self.imc.commit(
265 self.imc.commit(
266 message=u'Trying to remove not existing node',
266 message=u'Trying to remove not existing node',
267 author=unicode(self)
267 author=str(self),
268 )
268 )
269
269
270 def test_remove_raise_node_already_removed(self):
270 def test_remove_raise_node_already_removed(self):
271 self.test_add() # Performs first commit
271 self.test_add() # Performs first commit
272
272
273 node = FileNode(self.nodes[0].path)
273 node = FileNode(self.nodes[0].path)
274 self.imc.remove(node)
274 self.imc.remove(node)
275 with pytest.raises(NodeAlreadyRemovedError):
275 with pytest.raises(NodeAlreadyRemovedError):
276 self.imc.remove(node)
276 self.imc.remove(node)
277
277
278 def test_remove_raise_node_already_changed(self):
278 def test_remove_raise_node_already_changed(self):
279 self.test_add() # Performs first commit
279 self.test_add() # Performs first commit
280
280
281 node = FileNode(self.nodes[0].path, content='Bending time')
281 node = FileNode(self.nodes[0].path, content='Bending time')
282 self.imc.change(node)
282 self.imc.change(node)
283 with pytest.raises(NodeAlreadyChangedError):
283 with pytest.raises(NodeAlreadyChangedError):
284 self.imc.remove(node)
284 self.imc.remove(node)
285
285
286 def test_reset(self):
286 def test_reset(self):
287 self.imc.add(FileNode('foo', content='bar'))
287 self.imc.add(FileNode('foo', content='bar'))
288 #self.imc.change(FileNode('baz', content='new'))
288 #self.imc.change(FileNode('baz', content='new'))
289 #self.imc.remove(FileNode('qwe'))
289 #self.imc.remove(FileNode('qwe'))
290 self.imc.reset()
290 self.imc.reset()
291 assert not any((
291 assert not any((
292 self.imc.added,
292 self.imc.added,
293 self.imc.changed,
293 self.imc.changed,
294 self.imc.removed
294 self.imc.removed
295 ))
295 ))
296
296
297 def test_multiple_commits(self):
297 def test_multiple_commits(self):
298 N = 3 # number of commits to perform
298 N = 3 # number of commits to perform
299 last = None
299 last = None
300 for x in range(N):
300 for x in range(N):
301 fname = 'file%s' % str(x).rjust(5, '0')
301 fname = 'file%s' % str(x).rjust(5, '0')
302 content = 'foobar\n' * x
302 content = 'foobar\n' * x
303 node = FileNode(fname, content=content)
303 node = FileNode(fname, content=content)
304 self.imc.add(node)
304 self.imc.add(node)
305 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs')
305 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs')
306 assert last != commit
306 assert last != commit
307 last = commit
307 last = commit
308
308
309 # Check commit number for same repo
309 # Check commit number for same repo
310 assert len(self.repo.revisions) == N
310 assert len(self.repo.revisions) == N
311
311
312 # Check commit number for recreated repo
312 # Check commit number for recreated repo
313 assert len(self.repo.revisions) == N
313 assert len(self.repo.revisions) == N
314
314
315 def test_date_attr(self):
315 def test_date_attr(self):
316 node = FileNode('foobar.txt', content='Foobared!')
316 node = FileNode('foobar.txt', content='Foobared!')
317 self.imc.add(node)
317 self.imc.add(node)
318 date = datetime.datetime(1985, 1, 30, 1, 45)
318 date = datetime.datetime(1985, 1, 30, 1, 45)
319 commit = self.imc.commit(u"Committed at time when I was born ;-)",
319 commit = self.imc.commit(u"Committed at time when I was born ;-)",
320 author=u'lb <lb@example.com>', date=date)
320 author=u'lb <lb@example.com>', date=date)
321
321
322 assert commit.date == date
322 assert commit.date == date
323
323
324
324
325 class TestGitInMemoryChangeset(InMemoryChangesetTestMixin):
325 class TestGitInMemoryChangeset(InMemoryChangesetTestMixin):
326 backend_alias = 'git'
326 backend_alias = 'git'
327
327
328
328
329 class TestHgInMemoryChangeset(InMemoryChangesetTestMixin):
329 class TestHgInMemoryChangeset(InMemoryChangesetTestMixin):
330 backend_alias = 'hg'
330 backend_alias = 'hg'
General Comments 0
You need to be logged in to leave comments. Login now