##// END OF EJS Templates
slack: fix links in commit/pr events
dan -
r419:d85a7d46 default
parent child Browse files
Show More
@@ -1,278 +1,270 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import logging
22 import logging
23 import urlparse
23 import urlparse
24 import threading
24 import threading
25 from BaseHTTPServer import BaseHTTPRequestHandler
25 from BaseHTTPServer import BaseHTTPRequestHandler
26 from SocketServer import TCPServer
26 from SocketServer import TCPServer
27 from routes.util import URLGenerator
27 from routes.util import URLGenerator
28
28
29 import Pyro4
29 import Pyro4
30 import pylons
30 import pylons
31 import rhodecode
31 import rhodecode
32
32
33 from rhodecode.lib import hooks_base
33 from rhodecode.lib import hooks_base
34 from rhodecode.lib.utils2 import AttributeDict
34 from rhodecode.lib.utils2 import (
35 AttributeDict, safe_str, get_routes_generator_for_server_url)
35
36
36
37
37 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
38
39
39
40
40 class HooksHttpHandler(BaseHTTPRequestHandler):
41 class HooksHttpHandler(BaseHTTPRequestHandler):
41 def do_POST(self):
42 def do_POST(self):
42 method, extras = self._read_request()
43 method, extras = self._read_request()
43 try:
44 try:
44 result = self._call_hook(method, extras)
45 result = self._call_hook(method, extras)
45 except Exception as e:
46 except Exception as e:
46 result = {
47 result = {
47 'exception': e.__class__.__name__,
48 'exception': e.__class__.__name__,
48 'exception_args': e.args
49 'exception_args': e.args
49 }
50 }
50 self._write_response(result)
51 self._write_response(result)
51
52
52 def _read_request(self):
53 def _read_request(self):
53 length = int(self.headers['Content-Length'])
54 length = int(self.headers['Content-Length'])
54 body = self.rfile.read(length).decode('utf-8')
55 body = self.rfile.read(length).decode('utf-8')
55 data = json.loads(body)
56 data = json.loads(body)
56 return data['method'], data['extras']
57 return data['method'], data['extras']
57
58
58 def _write_response(self, result):
59 def _write_response(self, result):
59 self.send_response(200)
60 self.send_response(200)
60 self.send_header("Content-type", "text/json")
61 self.send_header("Content-type", "text/json")
61 self.end_headers()
62 self.end_headers()
62 self.wfile.write(json.dumps(result))
63 self.wfile.write(json.dumps(result))
63
64
64 def _call_hook(self, method, extras):
65 def _call_hook(self, method, extras):
65 hooks = Hooks()
66 hooks = Hooks()
66 result = getattr(hooks, method)(extras)
67 result = getattr(hooks, method)(extras)
67 return result
68 return result
68
69
69 def log_message(self, format, *args):
70 def log_message(self, format, *args):
70 """
71 """
71 This is an overriden method of BaseHTTPRequestHandler which logs using
72 This is an overriden method of BaseHTTPRequestHandler which logs using
72 logging library instead of writing directly to stderr.
73 logging library instead of writing directly to stderr.
73 """
74 """
74
75
75 message = format % args
76 message = format % args
76
77
77 # TODO: mikhail: add different log levels support
78 # TODO: mikhail: add different log levels support
78 log.debug(
79 log.debug(
79 "%s - - [%s] %s", self.client_address[0],
80 "%s - - [%s] %s", self.client_address[0],
80 self.log_date_time_string(), message)
81 self.log_date_time_string(), message)
81
82
82
83
83 class DummyHooksCallbackDaemon(object):
84 class DummyHooksCallbackDaemon(object):
84 def __init__(self):
85 def __init__(self):
85 self.hooks_module = Hooks.__module__
86 self.hooks_module = Hooks.__module__
86
87
87 def __enter__(self):
88 def __enter__(self):
88 log.debug('Running dummy hooks callback daemon')
89 log.debug('Running dummy hooks callback daemon')
89 return self
90 return self
90
91
91 def __exit__(self, exc_type, exc_val, exc_tb):
92 def __exit__(self, exc_type, exc_val, exc_tb):
92 log.debug('Exiting dummy hooks callback daemon')
93 log.debug('Exiting dummy hooks callback daemon')
93
94
94
95
95 class ThreadedHookCallbackDaemon(object):
96 class ThreadedHookCallbackDaemon(object):
96
97
97 _callback_thread = None
98 _callback_thread = None
98 _daemon = None
99 _daemon = None
99 _done = False
100 _done = False
100
101
101 def __init__(self):
102 def __init__(self):
102 self._prepare()
103 self._prepare()
103
104
104 def __enter__(self):
105 def __enter__(self):
105 self._run()
106 self._run()
106 return self
107 return self
107
108
108 def __exit__(self, exc_type, exc_val, exc_tb):
109 def __exit__(self, exc_type, exc_val, exc_tb):
109 self._stop()
110 self._stop()
110
111
111 def _prepare(self):
112 def _prepare(self):
112 raise NotImplementedError()
113 raise NotImplementedError()
113
114
114 def _run(self):
115 def _run(self):
115 raise NotImplementedError()
116 raise NotImplementedError()
116
117
117 def _stop(self):
118 def _stop(self):
118 raise NotImplementedError()
119 raise NotImplementedError()
119
120
120
121
121 class Pyro4HooksCallbackDaemon(ThreadedHookCallbackDaemon):
122 class Pyro4HooksCallbackDaemon(ThreadedHookCallbackDaemon):
122 """
123 """
123 Context manager which will run a callback daemon in a background thread.
124 Context manager which will run a callback daemon in a background thread.
124 """
125 """
125
126
126 hooks_uri = None
127 hooks_uri = None
127
128
128 def _prepare(self):
129 def _prepare(self):
129 log.debug("Preparing callback daemon and registering hook object")
130 log.debug("Preparing callback daemon and registering hook object")
130 self._daemon = Pyro4.Daemon()
131 self._daemon = Pyro4.Daemon()
131 hooks_interface = Hooks()
132 hooks_interface = Hooks()
132 self.hooks_uri = str(self._daemon.register(hooks_interface))
133 self.hooks_uri = str(self._daemon.register(hooks_interface))
133 log.debug("Hooks uri is: %s", self.hooks_uri)
134 log.debug("Hooks uri is: %s", self.hooks_uri)
134
135
135 def _run(self):
136 def _run(self):
136 log.debug("Running event loop of callback daemon in background thread")
137 log.debug("Running event loop of callback daemon in background thread")
137 callback_thread = threading.Thread(
138 callback_thread = threading.Thread(
138 target=self._daemon.requestLoop,
139 target=self._daemon.requestLoop,
139 kwargs={'loopCondition': lambda: not self._done})
140 kwargs={'loopCondition': lambda: not self._done})
140 callback_thread.daemon = True
141 callback_thread.daemon = True
141 callback_thread.start()
142 callback_thread.start()
142 self._callback_thread = callback_thread
143 self._callback_thread = callback_thread
143
144
144 def _stop(self):
145 def _stop(self):
145 log.debug("Waiting for background thread to finish.")
146 log.debug("Waiting for background thread to finish.")
146 self._done = True
147 self._done = True
147 self._callback_thread.join()
148 self._callback_thread.join()
148 self._daemon.close()
149 self._daemon.close()
149 self._daemon = None
150 self._daemon = None
150 self._callback_thread = None
151 self._callback_thread = None
151
152
152
153
153 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
154 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
154 """
155 """
155 Context manager which will run a callback daemon in a background thread.
156 Context manager which will run a callback daemon in a background thread.
156 """
157 """
157
158
158 hooks_uri = None
159 hooks_uri = None
159
160
160 IP_ADDRESS = '127.0.0.1'
161 IP_ADDRESS = '127.0.0.1'
161
162
162 # From Python docs: Polling reduces our responsiveness to a shutdown
163 # From Python docs: Polling reduces our responsiveness to a shutdown
163 # request and wastes cpu at all other times.
164 # request and wastes cpu at all other times.
164 POLL_INTERVAL = 0.1
165 POLL_INTERVAL = 0.1
165
166
166 def _prepare(self):
167 def _prepare(self):
167 log.debug("Preparing callback daemon and registering hook object")
168 log.debug("Preparing callback daemon and registering hook object")
168
169
169 self._done = False
170 self._done = False
170 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
171 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
171 _, port = self._daemon.server_address
172 _, port = self._daemon.server_address
172 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
173 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
173
174
174 log.debug("Hooks uri is: %s", self.hooks_uri)
175 log.debug("Hooks uri is: %s", self.hooks_uri)
175
176
176 def _run(self):
177 def _run(self):
177 log.debug("Running event loop of callback daemon in background thread")
178 log.debug("Running event loop of callback daemon in background thread")
178 callback_thread = threading.Thread(
179 callback_thread = threading.Thread(
179 target=self._daemon.serve_forever,
180 target=self._daemon.serve_forever,
180 kwargs={'poll_interval': self.POLL_INTERVAL})
181 kwargs={'poll_interval': self.POLL_INTERVAL})
181 callback_thread.daemon = True
182 callback_thread.daemon = True
182 callback_thread.start()
183 callback_thread.start()
183 self._callback_thread = callback_thread
184 self._callback_thread = callback_thread
184
185
185 def _stop(self):
186 def _stop(self):
186 log.debug("Waiting for background thread to finish.")
187 log.debug("Waiting for background thread to finish.")
187 self._daemon.shutdown()
188 self._daemon.shutdown()
188 self._callback_thread.join()
189 self._callback_thread.join()
189 self._daemon = None
190 self._daemon = None
190 self._callback_thread = None
191 self._callback_thread = None
191
192
192
193
193 def prepare_callback_daemon(extras, protocol=None, use_direct_calls=False):
194 def prepare_callback_daemon(extras, protocol=None, use_direct_calls=False):
194 callback_daemon = None
195 callback_daemon = None
195 protocol = protocol.lower() if protocol else None
196 protocol = protocol.lower() if protocol else None
196
197
197 if use_direct_calls:
198 if use_direct_calls:
198 callback_daemon = DummyHooksCallbackDaemon()
199 callback_daemon = DummyHooksCallbackDaemon()
199 extras['hooks_module'] = callback_daemon.hooks_module
200 extras['hooks_module'] = callback_daemon.hooks_module
200 else:
201 else:
201 callback_daemon = (
202 callback_daemon = (
202 Pyro4HooksCallbackDaemon()
203 Pyro4HooksCallbackDaemon()
203 if protocol == 'pyro4'
204 if protocol == 'pyro4'
204 else HttpHooksCallbackDaemon())
205 else HttpHooksCallbackDaemon())
205 extras['hooks_uri'] = callback_daemon.hooks_uri
206 extras['hooks_uri'] = callback_daemon.hooks_uri
206 extras['hooks_protocol'] = protocol
207 extras['hooks_protocol'] = protocol
207
208
208 return callback_daemon, extras
209 return callback_daemon, extras
209
210
210
211
211 class Hooks(object):
212 class Hooks(object):
212 """
213 """
213 Exposes the hooks for remote call backs
214 Exposes the hooks for remote call backs
214 """
215 """
215
216
216 @Pyro4.callback
217 @Pyro4.callback
217 def repo_size(self, extras):
218 def repo_size(self, extras):
218 log.debug("Called repo_size of Hooks object")
219 log.debug("Called repo_size of Hooks object")
219 return self._call_hook(hooks_base.repo_size, extras)
220 return self._call_hook(hooks_base.repo_size, extras)
220
221
221 @Pyro4.callback
222 @Pyro4.callback
222 def pre_pull(self, extras):
223 def pre_pull(self, extras):
223 log.debug("Called pre_pull of Hooks object")
224 log.debug("Called pre_pull of Hooks object")
224 return self._call_hook(hooks_base.pre_pull, extras)
225 return self._call_hook(hooks_base.pre_pull, extras)
225
226
226 @Pyro4.callback
227 @Pyro4.callback
227 def post_pull(self, extras):
228 def post_pull(self, extras):
228 log.debug("Called post_pull of Hooks object")
229 log.debug("Called post_pull of Hooks object")
229 return self._call_hook(hooks_base.post_pull, extras)
230 return self._call_hook(hooks_base.post_pull, extras)
230
231
231 @Pyro4.callback
232 @Pyro4.callback
232 def pre_push(self, extras):
233 def pre_push(self, extras):
233 log.debug("Called pre_push of Hooks object")
234 log.debug("Called pre_push of Hooks object")
234 return self._call_hook(hooks_base.pre_push, extras)
235 return self._call_hook(hooks_base.pre_push, extras)
235
236
236 @Pyro4.callback
237 @Pyro4.callback
237 def post_push(self, extras):
238 def post_push(self, extras):
238 log.debug("Called post_push of Hooks object")
239 log.debug("Called post_push of Hooks object")
239 return self._call_hook(hooks_base.post_push, extras)
240 return self._call_hook(hooks_base.post_push, extras)
240
241
241 def _call_hook(self, hook, extras):
242 def _call_hook(self, hook, extras):
242 extras = AttributeDict(extras)
243 extras = AttributeDict(extras)
243 netloc = urlparse.urlparse(extras.server_url).netloc
244 pylons_router = get_routes_generator_for_server_url(extras.server_url)
244 environ = {
245 'SERVER_NAME': netloc.split(':')[0],
246 'SERVER_PORT': ':' in netloc and netloc.split(':')[1] or '80',
247 'SCRIPT_NAME': '',
248 'PATH_INFO': '/',
249 'HTTP_HOST': 'localhost',
250 'REQUEST_METHOD': 'GET',
251 }
252 pylons_router = URLGenerator(rhodecode.CONFIG['routes.map'], environ)
253 pylons.url._push_object(pylons_router)
245 pylons.url._push_object(pylons_router)
254
246
255 try:
247 try:
256 result = hook(extras)
248 result = hook(extras)
257 except Exception as error:
249 except Exception as error:
258 log.exception('Exception when handling hook %s', hook)
250 log.exception('Exception when handling hook %s', hook)
259 error_args = error.args
251 error_args = error.args
260 return {
252 return {
261 'status': 128,
253 'status': 128,
262 'output': '',
254 'output': '',
263 'exception': type(error).__name__,
255 'exception': type(error).__name__,
264 'exception_args': error_args,
256 'exception_args': error_args,
265 }
257 }
266 finally:
258 finally:
267 pylons.url._pop_object()
259 pylons.url._pop_object()
268
260
269 return {
261 return {
270 'status': result.status,
262 'status': result.status,
271 'output': result.output,
263 'output': result.output,
272 }
264 }
273
265
274 def __enter__(self):
266 def __enter__(self):
275 return self
267 return self
276
268
277 def __exit__(self, exc_type, exc_val, exc_tb):
269 def __exit__(self, exc_type, exc_val, exc_tb):
278 pass
270 pass
@@ -1,860 +1,886 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2016 RhodeCode GmbH
3 # Copyright (C) 2011-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Some simple helper functions
23 Some simple helper functions
24 """
24 """
25
25
26
26
27 import collections
27 import collections
28 import datetime
28 import datetime
29 import dateutil.relativedelta
29 import dateutil.relativedelta
30 import hashlib
30 import hashlib
31 import logging
31 import logging
32 import re
32 import re
33 import sys
33 import sys
34 import time
34 import time
35 import threading
35 import threading
36 import urllib
36 import urllib
37 import urlobject
37 import urlobject
38 import uuid
38 import uuid
39
39
40 import pygments.lexers
40 import pygments.lexers
41 import sqlalchemy
41 import sqlalchemy
42 import sqlalchemy.engine.url
42 import sqlalchemy.engine.url
43 import webob
43 import webob
44 import routes.util
44
45
45 import rhodecode
46 import rhodecode
46
47
47
48
48 def md5(s):
49 def md5(s):
49 return hashlib.md5(s).hexdigest()
50 return hashlib.md5(s).hexdigest()
50
51
51
52
52 def md5_safe(s):
53 def md5_safe(s):
53 return md5(safe_str(s))
54 return md5(safe_str(s))
54
55
55
56
56 def __get_lem():
57 def __get_lem():
57 """
58 """
58 Get language extension map based on what's inside pygments lexers
59 Get language extension map based on what's inside pygments lexers
59 """
60 """
60 d = collections.defaultdict(lambda: [])
61 d = collections.defaultdict(lambda: [])
61
62
62 def __clean(s):
63 def __clean(s):
63 s = s.lstrip('*')
64 s = s.lstrip('*')
64 s = s.lstrip('.')
65 s = s.lstrip('.')
65
66
66 if s.find('[') != -1:
67 if s.find('[') != -1:
67 exts = []
68 exts = []
68 start, stop = s.find('['), s.find(']')
69 start, stop = s.find('['), s.find(']')
69
70
70 for suffix in s[start + 1:stop]:
71 for suffix in s[start + 1:stop]:
71 exts.append(s[:s.find('[')] + suffix)
72 exts.append(s[:s.find('[')] + suffix)
72 return [e.lower() for e in exts]
73 return [e.lower() for e in exts]
73 else:
74 else:
74 return [s.lower()]
75 return [s.lower()]
75
76
76 for lx, t in sorted(pygments.lexers.LEXERS.items()):
77 for lx, t in sorted(pygments.lexers.LEXERS.items()):
77 m = map(__clean, t[-2])
78 m = map(__clean, t[-2])
78 if m:
79 if m:
79 m = reduce(lambda x, y: x + y, m)
80 m = reduce(lambda x, y: x + y, m)
80 for ext in m:
81 for ext in m:
81 desc = lx.replace('Lexer', '')
82 desc = lx.replace('Lexer', '')
82 d[ext].append(desc)
83 d[ext].append(desc)
83
84
84 return dict(d)
85 return dict(d)
85
86
86
87
87 def str2bool(_str):
88 def str2bool(_str):
88 """
89 """
89 returs True/False value from given string, it tries to translate the
90 returs True/False value from given string, it tries to translate the
90 string into boolean
91 string into boolean
91
92
92 :param _str: string value to translate into boolean
93 :param _str: string value to translate into boolean
93 :rtype: boolean
94 :rtype: boolean
94 :returns: boolean from given string
95 :returns: boolean from given string
95 """
96 """
96 if _str is None:
97 if _str is None:
97 return False
98 return False
98 if _str in (True, False):
99 if _str in (True, False):
99 return _str
100 return _str
100 _str = str(_str).strip().lower()
101 _str = str(_str).strip().lower()
101 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
102 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
102
103
103
104
104 def aslist(obj, sep=None, strip=True):
105 def aslist(obj, sep=None, strip=True):
105 """
106 """
106 Returns given string separated by sep as list
107 Returns given string separated by sep as list
107
108
108 :param obj:
109 :param obj:
109 :param sep:
110 :param sep:
110 :param strip:
111 :param strip:
111 """
112 """
112 if isinstance(obj, (basestring)):
113 if isinstance(obj, (basestring)):
113 lst = obj.split(sep)
114 lst = obj.split(sep)
114 if strip:
115 if strip:
115 lst = [v.strip() for v in lst]
116 lst = [v.strip() for v in lst]
116 return lst
117 return lst
117 elif isinstance(obj, (list, tuple)):
118 elif isinstance(obj, (list, tuple)):
118 return obj
119 return obj
119 elif obj is None:
120 elif obj is None:
120 return []
121 return []
121 else:
122 else:
122 return [obj]
123 return [obj]
123
124
124
125
125 def convert_line_endings(line, mode):
126 def convert_line_endings(line, mode):
126 """
127 """
127 Converts a given line "line end" accordingly to given mode
128 Converts a given line "line end" accordingly to given mode
128
129
129 Available modes are::
130 Available modes are::
130 0 - Unix
131 0 - Unix
131 1 - Mac
132 1 - Mac
132 2 - DOS
133 2 - DOS
133
134
134 :param line: given line to convert
135 :param line: given line to convert
135 :param mode: mode to convert to
136 :param mode: mode to convert to
136 :rtype: str
137 :rtype: str
137 :return: converted line according to mode
138 :return: converted line according to mode
138 """
139 """
139 if mode == 0:
140 if mode == 0:
140 line = line.replace('\r\n', '\n')
141 line = line.replace('\r\n', '\n')
141 line = line.replace('\r', '\n')
142 line = line.replace('\r', '\n')
142 elif mode == 1:
143 elif mode == 1:
143 line = line.replace('\r\n', '\r')
144 line = line.replace('\r\n', '\r')
144 line = line.replace('\n', '\r')
145 line = line.replace('\n', '\r')
145 elif mode == 2:
146 elif mode == 2:
146 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
147 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
147 return line
148 return line
148
149
149
150
150 def detect_mode(line, default):
151 def detect_mode(line, default):
151 """
152 """
152 Detects line break for given line, if line break couldn't be found
153 Detects line break for given line, if line break couldn't be found
153 given default value is returned
154 given default value is returned
154
155
155 :param line: str line
156 :param line: str line
156 :param default: default
157 :param default: default
157 :rtype: int
158 :rtype: int
158 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
159 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
159 """
160 """
160 if line.endswith('\r\n'):
161 if line.endswith('\r\n'):
161 return 2
162 return 2
162 elif line.endswith('\n'):
163 elif line.endswith('\n'):
163 return 0
164 return 0
164 elif line.endswith('\r'):
165 elif line.endswith('\r'):
165 return 1
166 return 1
166 else:
167 else:
167 return default
168 return default
168
169
169
170
170 def safe_int(val, default=None):
171 def safe_int(val, default=None):
171 """
172 """
172 Returns int() of val if val is not convertable to int use default
173 Returns int() of val if val is not convertable to int use default
173 instead
174 instead
174
175
175 :param val:
176 :param val:
176 :param default:
177 :param default:
177 """
178 """
178
179
179 try:
180 try:
180 val = int(val)
181 val = int(val)
181 except (ValueError, TypeError):
182 except (ValueError, TypeError):
182 val = default
183 val = default
183
184
184 return val
185 return val
185
186
186
187
187 def safe_unicode(str_, from_encoding=None):
188 def safe_unicode(str_, from_encoding=None):
188 """
189 """
189 safe unicode function. Does few trick to turn str_ into unicode
190 safe unicode function. Does few trick to turn str_ into unicode
190
191
191 In case of UnicodeDecode error, we try to return it with encoding detected
192 In case of UnicodeDecode error, we try to return it with encoding detected
192 by chardet library if it fails fallback to unicode with errors replaced
193 by chardet library if it fails fallback to unicode with errors replaced
193
194
194 :param str_: string to decode
195 :param str_: string to decode
195 :rtype: unicode
196 :rtype: unicode
196 :returns: unicode object
197 :returns: unicode object
197 """
198 """
198 if isinstance(str_, unicode):
199 if isinstance(str_, unicode):
199 return str_
200 return str_
200
201
201 if not from_encoding:
202 if not from_encoding:
202 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
203 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
203 'utf8'), sep=',')
204 'utf8'), sep=',')
204 from_encoding = DEFAULT_ENCODINGS
205 from_encoding = DEFAULT_ENCODINGS
205
206
206 if not isinstance(from_encoding, (list, tuple)):
207 if not isinstance(from_encoding, (list, tuple)):
207 from_encoding = [from_encoding]
208 from_encoding = [from_encoding]
208
209
209 try:
210 try:
210 return unicode(str_)
211 return unicode(str_)
211 except UnicodeDecodeError:
212 except UnicodeDecodeError:
212 pass
213 pass
213
214
214 for enc in from_encoding:
215 for enc in from_encoding:
215 try:
216 try:
216 return unicode(str_, enc)
217 return unicode(str_, enc)
217 except UnicodeDecodeError:
218 except UnicodeDecodeError:
218 pass
219 pass
219
220
220 try:
221 try:
221 import chardet
222 import chardet
222 encoding = chardet.detect(str_)['encoding']
223 encoding = chardet.detect(str_)['encoding']
223 if encoding is None:
224 if encoding is None:
224 raise Exception()
225 raise Exception()
225 return str_.decode(encoding)
226 return str_.decode(encoding)
226 except (ImportError, UnicodeDecodeError, Exception):
227 except (ImportError, UnicodeDecodeError, Exception):
227 return unicode(str_, from_encoding[0], 'replace')
228 return unicode(str_, from_encoding[0], 'replace')
228
229
229
230
230 def safe_str(unicode_, to_encoding=None):
231 def safe_str(unicode_, to_encoding=None):
231 """
232 """
232 safe str function. Does few trick to turn unicode_ into string
233 safe str function. Does few trick to turn unicode_ into string
233
234
234 In case of UnicodeEncodeError, we try to return it with encoding detected
235 In case of UnicodeEncodeError, we try to return it with encoding detected
235 by chardet library if it fails fallback to string with errors replaced
236 by chardet library if it fails fallback to string with errors replaced
236
237
237 :param unicode_: unicode to encode
238 :param unicode_: unicode to encode
238 :rtype: str
239 :rtype: str
239 :returns: str object
240 :returns: str object
240 """
241 """
241
242
242 # if it's not basestr cast to str
243 # if it's not basestr cast to str
243 if not isinstance(unicode_, basestring):
244 if not isinstance(unicode_, basestring):
244 return str(unicode_)
245 return str(unicode_)
245
246
246 if isinstance(unicode_, str):
247 if isinstance(unicode_, str):
247 return unicode_
248 return unicode_
248
249
249 if not to_encoding:
250 if not to_encoding:
250 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
251 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
251 'utf8'), sep=',')
252 'utf8'), sep=',')
252 to_encoding = DEFAULT_ENCODINGS
253 to_encoding = DEFAULT_ENCODINGS
253
254
254 if not isinstance(to_encoding, (list, tuple)):
255 if not isinstance(to_encoding, (list, tuple)):
255 to_encoding = [to_encoding]
256 to_encoding = [to_encoding]
256
257
257 for enc in to_encoding:
258 for enc in to_encoding:
258 try:
259 try:
259 return unicode_.encode(enc)
260 return unicode_.encode(enc)
260 except UnicodeEncodeError:
261 except UnicodeEncodeError:
261 pass
262 pass
262
263
263 try:
264 try:
264 import chardet
265 import chardet
265 encoding = chardet.detect(unicode_)['encoding']
266 encoding = chardet.detect(unicode_)['encoding']
266 if encoding is None:
267 if encoding is None:
267 raise UnicodeEncodeError()
268 raise UnicodeEncodeError()
268
269
269 return unicode_.encode(encoding)
270 return unicode_.encode(encoding)
270 except (ImportError, UnicodeEncodeError):
271 except (ImportError, UnicodeEncodeError):
271 return unicode_.encode(to_encoding[0], 'replace')
272 return unicode_.encode(to_encoding[0], 'replace')
272
273
273
274
274 def remove_suffix(s, suffix):
275 def remove_suffix(s, suffix):
275 if s.endswith(suffix):
276 if s.endswith(suffix):
276 s = s[:-1 * len(suffix)]
277 s = s[:-1 * len(suffix)]
277 return s
278 return s
278
279
279
280
280 def remove_prefix(s, prefix):
281 def remove_prefix(s, prefix):
281 if s.startswith(prefix):
282 if s.startswith(prefix):
282 s = s[len(prefix):]
283 s = s[len(prefix):]
283 return s
284 return s
284
285
285
286
286 def find_calling_context(ignore_modules=None):
287 def find_calling_context(ignore_modules=None):
287 """
288 """
288 Look through the calling stack and return the frame which called
289 Look through the calling stack and return the frame which called
289 this function and is part of core module ( ie. rhodecode.* )
290 this function and is part of core module ( ie. rhodecode.* )
290
291
291 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
292 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
292 """
293 """
293
294
294 ignore_modules = ignore_modules or []
295 ignore_modules = ignore_modules or []
295
296
296 f = sys._getframe(2)
297 f = sys._getframe(2)
297 while f.f_back is not None:
298 while f.f_back is not None:
298 name = f.f_globals.get('__name__')
299 name = f.f_globals.get('__name__')
299 if name and name.startswith(__name__.split('.')[0]):
300 if name and name.startswith(__name__.split('.')[0]):
300 if name not in ignore_modules:
301 if name not in ignore_modules:
301 return f
302 return f
302 f = f.f_back
303 f = f.f_back
303 return None
304 return None
304
305
305
306
306 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
307 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
307 """Custom engine_from_config functions."""
308 """Custom engine_from_config functions."""
308 log = logging.getLogger('sqlalchemy.engine')
309 log = logging.getLogger('sqlalchemy.engine')
309 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
310 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
310
311
311 def color_sql(sql):
312 def color_sql(sql):
312 color_seq = '\033[1;33m' # This is yellow: code 33
313 color_seq = '\033[1;33m' # This is yellow: code 33
313 normal = '\x1b[0m'
314 normal = '\x1b[0m'
314 return ''.join([color_seq, sql, normal])
315 return ''.join([color_seq, sql, normal])
315
316
316 if configuration['debug']:
317 if configuration['debug']:
317 # attach events only for debug configuration
318 # attach events only for debug configuration
318
319
319 def before_cursor_execute(conn, cursor, statement,
320 def before_cursor_execute(conn, cursor, statement,
320 parameters, context, executemany):
321 parameters, context, executemany):
321 setattr(conn, 'query_start_time', time.time())
322 setattr(conn, 'query_start_time', time.time())
322 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
323 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
323 calling_context = find_calling_context(ignore_modules=[
324 calling_context = find_calling_context(ignore_modules=[
324 'rhodecode.lib.caching_query',
325 'rhodecode.lib.caching_query',
325 'rhodecode.model.settings',
326 'rhodecode.model.settings',
326 ])
327 ])
327 if calling_context:
328 if calling_context:
328 log.info(color_sql('call context %s:%s' % (
329 log.info(color_sql('call context %s:%s' % (
329 calling_context.f_code.co_filename,
330 calling_context.f_code.co_filename,
330 calling_context.f_lineno,
331 calling_context.f_lineno,
331 )))
332 )))
332
333
333 def after_cursor_execute(conn, cursor, statement,
334 def after_cursor_execute(conn, cursor, statement,
334 parameters, context, executemany):
335 parameters, context, executemany):
335 delattr(conn, 'query_start_time')
336 delattr(conn, 'query_start_time')
336
337
337 sqlalchemy.event.listen(engine, "before_cursor_execute",
338 sqlalchemy.event.listen(engine, "before_cursor_execute",
338 before_cursor_execute)
339 before_cursor_execute)
339 sqlalchemy.event.listen(engine, "after_cursor_execute",
340 sqlalchemy.event.listen(engine, "after_cursor_execute",
340 after_cursor_execute)
341 after_cursor_execute)
341
342
342 return engine
343 return engine
343
344
344
345
345 def get_encryption_key(config):
346 def get_encryption_key(config):
346 secret = config.get('rhodecode.encrypted_values.secret')
347 secret = config.get('rhodecode.encrypted_values.secret')
347 default = config['beaker.session.secret']
348 default = config['beaker.session.secret']
348 return secret or default
349 return secret or default
349
350
350
351
351 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
352 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
352 short_format=False):
353 short_format=False):
353 """
354 """
354 Turns a datetime into an age string.
355 Turns a datetime into an age string.
355 If show_short_version is True, this generates a shorter string with
356 If show_short_version is True, this generates a shorter string with
356 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
357 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
357
358
358 * IMPORTANT*
359 * IMPORTANT*
359 Code of this function is written in special way so it's easier to
360 Code of this function is written in special way so it's easier to
360 backport it to javascript. If you mean to update it, please also update
361 backport it to javascript. If you mean to update it, please also update
361 `jquery.timeago-extension.js` file
362 `jquery.timeago-extension.js` file
362
363
363 :param prevdate: datetime object
364 :param prevdate: datetime object
364 :param now: get current time, if not define we use
365 :param now: get current time, if not define we use
365 `datetime.datetime.now()`
366 `datetime.datetime.now()`
366 :param show_short_version: if it should approximate the date and
367 :param show_short_version: if it should approximate the date and
367 return a shorter string
368 return a shorter string
368 :param show_suffix:
369 :param show_suffix:
369 :param short_format: show short format, eg 2D instead of 2 days
370 :param short_format: show short format, eg 2D instead of 2 days
370 :rtype: unicode
371 :rtype: unicode
371 :returns: unicode words describing age
372 :returns: unicode words describing age
372 """
373 """
373 from pylons.i18n.translation import _, ungettext
374 from pylons.i18n.translation import _, ungettext
374
375
375 def _get_relative_delta(now, prevdate):
376 def _get_relative_delta(now, prevdate):
376 base = dateutil.relativedelta.relativedelta(now, prevdate)
377 base = dateutil.relativedelta.relativedelta(now, prevdate)
377 return {
378 return {
378 'year': base.years,
379 'year': base.years,
379 'month': base.months,
380 'month': base.months,
380 'day': base.days,
381 'day': base.days,
381 'hour': base.hours,
382 'hour': base.hours,
382 'minute': base.minutes,
383 'minute': base.minutes,
383 'second': base.seconds,
384 'second': base.seconds,
384 }
385 }
385
386
386 def _is_leap_year(year):
387 def _is_leap_year(year):
387 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
388 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
388
389
389 def get_month(prevdate):
390 def get_month(prevdate):
390 return prevdate.month
391 return prevdate.month
391
392
392 def get_year(prevdate):
393 def get_year(prevdate):
393 return prevdate.year
394 return prevdate.year
394
395
395 now = now or datetime.datetime.now()
396 now = now or datetime.datetime.now()
396 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
397 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
397 deltas = {}
398 deltas = {}
398 future = False
399 future = False
399
400
400 if prevdate > now:
401 if prevdate > now:
401 now_old = now
402 now_old = now
402 now = prevdate
403 now = prevdate
403 prevdate = now_old
404 prevdate = now_old
404 future = True
405 future = True
405 if future:
406 if future:
406 prevdate = prevdate.replace(microsecond=0)
407 prevdate = prevdate.replace(microsecond=0)
407 # Get date parts deltas
408 # Get date parts deltas
408 for part in order:
409 for part in order:
409 rel_delta = _get_relative_delta(now, prevdate)
410 rel_delta = _get_relative_delta(now, prevdate)
410 deltas[part] = rel_delta[part]
411 deltas[part] = rel_delta[part]
411
412
412 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
413 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
413 # not 1 hour, -59 minutes and -59 seconds)
414 # not 1 hour, -59 minutes and -59 seconds)
414 offsets = [[5, 60], [4, 60], [3, 24]]
415 offsets = [[5, 60], [4, 60], [3, 24]]
415 for element in offsets: # seconds, minutes, hours
416 for element in offsets: # seconds, minutes, hours
416 num = element[0]
417 num = element[0]
417 length = element[1]
418 length = element[1]
418
419
419 part = order[num]
420 part = order[num]
420 carry_part = order[num - 1]
421 carry_part = order[num - 1]
421
422
422 if deltas[part] < 0:
423 if deltas[part] < 0:
423 deltas[part] += length
424 deltas[part] += length
424 deltas[carry_part] -= 1
425 deltas[carry_part] -= 1
425
426
426 # Same thing for days except that the increment depends on the (variable)
427 # Same thing for days except that the increment depends on the (variable)
427 # number of days in the month
428 # number of days in the month
428 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
429 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
429 if deltas['day'] < 0:
430 if deltas['day'] < 0:
430 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
431 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
431 deltas['day'] += 29
432 deltas['day'] += 29
432 else:
433 else:
433 deltas['day'] += month_lengths[get_month(prevdate) - 1]
434 deltas['day'] += month_lengths[get_month(prevdate) - 1]
434
435
435 deltas['month'] -= 1
436 deltas['month'] -= 1
436
437
437 if deltas['month'] < 0:
438 if deltas['month'] < 0:
438 deltas['month'] += 12
439 deltas['month'] += 12
439 deltas['year'] -= 1
440 deltas['year'] -= 1
440
441
441 # Format the result
442 # Format the result
442 if short_format:
443 if short_format:
443 fmt_funcs = {
444 fmt_funcs = {
444 'year': lambda d: u'%dy' % d,
445 'year': lambda d: u'%dy' % d,
445 'month': lambda d: u'%dm' % d,
446 'month': lambda d: u'%dm' % d,
446 'day': lambda d: u'%dd' % d,
447 'day': lambda d: u'%dd' % d,
447 'hour': lambda d: u'%dh' % d,
448 'hour': lambda d: u'%dh' % d,
448 'minute': lambda d: u'%dmin' % d,
449 'minute': lambda d: u'%dmin' % d,
449 'second': lambda d: u'%dsec' % d,
450 'second': lambda d: u'%dsec' % d,
450 }
451 }
451 else:
452 else:
452 fmt_funcs = {
453 fmt_funcs = {
453 'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
454 'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
454 'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
455 'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
455 'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
456 'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
456 'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
457 'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
457 'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
458 'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
458 'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
459 'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
459 }
460 }
460
461
461 i = 0
462 i = 0
462 for part in order:
463 for part in order:
463 value = deltas[part]
464 value = deltas[part]
464 if value != 0:
465 if value != 0:
465
466
466 if i < 5:
467 if i < 5:
467 sub_part = order[i + 1]
468 sub_part = order[i + 1]
468 sub_value = deltas[sub_part]
469 sub_value = deltas[sub_part]
469 else:
470 else:
470 sub_value = 0
471 sub_value = 0
471
472
472 if sub_value == 0 or show_short_version:
473 if sub_value == 0 or show_short_version:
473 _val = fmt_funcs[part](value)
474 _val = fmt_funcs[part](value)
474 if future:
475 if future:
475 if show_suffix:
476 if show_suffix:
476 return _(u'in %s') % _val
477 return _(u'in %s') % _val
477 else:
478 else:
478 return _val
479 return _val
479
480
480 else:
481 else:
481 if show_suffix:
482 if show_suffix:
482 return _(u'%s ago') % _val
483 return _(u'%s ago') % _val
483 else:
484 else:
484 return _val
485 return _val
485
486
486 val = fmt_funcs[part](value)
487 val = fmt_funcs[part](value)
487 val_detail = fmt_funcs[sub_part](sub_value)
488 val_detail = fmt_funcs[sub_part](sub_value)
488
489
489 if short_format:
490 if short_format:
490 datetime_tmpl = u'%s, %s'
491 datetime_tmpl = u'%s, %s'
491 if show_suffix:
492 if show_suffix:
492 datetime_tmpl = _(u'%s, %s ago')
493 datetime_tmpl = _(u'%s, %s ago')
493 if future:
494 if future:
494 datetime_tmpl = _(u'in %s, %s')
495 datetime_tmpl = _(u'in %s, %s')
495 else:
496 else:
496 datetime_tmpl = _(u'%s and %s')
497 datetime_tmpl = _(u'%s and %s')
497 if show_suffix:
498 if show_suffix:
498 datetime_tmpl = _(u'%s and %s ago')
499 datetime_tmpl = _(u'%s and %s ago')
499 if future:
500 if future:
500 datetime_tmpl = _(u'in %s and %s')
501 datetime_tmpl = _(u'in %s and %s')
501
502
502 return datetime_tmpl % (val, val_detail)
503 return datetime_tmpl % (val, val_detail)
503 i += 1
504 i += 1
504 return _(u'just now')
505 return _(u'just now')
505
506
506
507
507 def uri_filter(uri):
508 def uri_filter(uri):
508 """
509 """
509 Removes user:password from given url string
510 Removes user:password from given url string
510
511
511 :param uri:
512 :param uri:
512 :rtype: unicode
513 :rtype: unicode
513 :returns: filtered list of strings
514 :returns: filtered list of strings
514 """
515 """
515 if not uri:
516 if not uri:
516 return ''
517 return ''
517
518
518 proto = ''
519 proto = ''
519
520
520 for pat in ('https://', 'http://'):
521 for pat in ('https://', 'http://'):
521 if uri.startswith(pat):
522 if uri.startswith(pat):
522 uri = uri[len(pat):]
523 uri = uri[len(pat):]
523 proto = pat
524 proto = pat
524 break
525 break
525
526
526 # remove passwords and username
527 # remove passwords and username
527 uri = uri[uri.find('@') + 1:]
528 uri = uri[uri.find('@') + 1:]
528
529
529 # get the port
530 # get the port
530 cred_pos = uri.find(':')
531 cred_pos = uri.find(':')
531 if cred_pos == -1:
532 if cred_pos == -1:
532 host, port = uri, None
533 host, port = uri, None
533 else:
534 else:
534 host, port = uri[:cred_pos], uri[cred_pos + 1:]
535 host, port = uri[:cred_pos], uri[cred_pos + 1:]
535
536
536 return filter(None, [proto, host, port])
537 return filter(None, [proto, host, port])
537
538
538
539
539 def credentials_filter(uri):
540 def credentials_filter(uri):
540 """
541 """
541 Returns a url with removed credentials
542 Returns a url with removed credentials
542
543
543 :param uri:
544 :param uri:
544 """
545 """
545
546
546 uri = uri_filter(uri)
547 uri = uri_filter(uri)
547 # check if we have port
548 # check if we have port
548 if len(uri) > 2 and uri[2]:
549 if len(uri) > 2 and uri[2]:
549 uri[2] = ':' + uri[2]
550 uri[2] = ':' + uri[2]
550
551
551 return ''.join(uri)
552 return ''.join(uri)
552
553
553
554
554 def get_clone_url(uri_tmpl, qualifed_home_url, repo_name, repo_id, **override):
555 def get_clone_url(uri_tmpl, qualifed_home_url, repo_name, repo_id, **override):
555 parsed_url = urlobject.URLObject(qualifed_home_url)
556 parsed_url = urlobject.URLObject(qualifed_home_url)
556 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
557 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
557 args = {
558 args = {
558 'scheme': parsed_url.scheme,
559 'scheme': parsed_url.scheme,
559 'user': '',
560 'user': '',
560 # path if we use proxy-prefix
561 # path if we use proxy-prefix
561 'netloc': parsed_url.netloc+decoded_path,
562 'netloc': parsed_url.netloc+decoded_path,
562 'prefix': decoded_path,
563 'prefix': decoded_path,
563 'repo': repo_name,
564 'repo': repo_name,
564 'repoid': str(repo_id)
565 'repoid': str(repo_id)
565 }
566 }
566 args.update(override)
567 args.update(override)
567 args['user'] = urllib.quote(safe_str(args['user']))
568 args['user'] = urllib.quote(safe_str(args['user']))
568
569
569 for k, v in args.items():
570 for k, v in args.items():
570 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
571 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
571
572
572 # remove leading @ sign if it's present. Case of empty user
573 # remove leading @ sign if it's present. Case of empty user
573 url_obj = urlobject.URLObject(uri_tmpl)
574 url_obj = urlobject.URLObject(uri_tmpl)
574 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
575 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
575
576
576 return safe_unicode(url)
577 return safe_unicode(url)
577
578
578
579
579 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
580 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
580 """
581 """
581 Safe version of get_commit if this commit doesn't exists for a
582 Safe version of get_commit if this commit doesn't exists for a
582 repository it returns a Dummy one instead
583 repository it returns a Dummy one instead
583
584
584 :param repo: repository instance
585 :param repo: repository instance
585 :param commit_id: commit id as str
586 :param commit_id: commit id as str
586 :param pre_load: optional list of commit attributes to load
587 :param pre_load: optional list of commit attributes to load
587 """
588 """
588 # TODO(skreft): remove these circular imports
589 # TODO(skreft): remove these circular imports
589 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
590 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
590 from rhodecode.lib.vcs.exceptions import RepositoryError
591 from rhodecode.lib.vcs.exceptions import RepositoryError
591 if not isinstance(repo, BaseRepository):
592 if not isinstance(repo, BaseRepository):
592 raise Exception('You must pass an Repository '
593 raise Exception('You must pass an Repository '
593 'object as first argument got %s', type(repo))
594 'object as first argument got %s', type(repo))
594
595
595 try:
596 try:
596 commit = repo.get_commit(
597 commit = repo.get_commit(
597 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
598 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
598 except (RepositoryError, LookupError):
599 except (RepositoryError, LookupError):
599 commit = EmptyCommit()
600 commit = EmptyCommit()
600 return commit
601 return commit
601
602
602
603
603 def datetime_to_time(dt):
604 def datetime_to_time(dt):
604 if dt:
605 if dt:
605 return time.mktime(dt.timetuple())
606 return time.mktime(dt.timetuple())
606
607
607
608
608 def time_to_datetime(tm):
609 def time_to_datetime(tm):
609 if tm:
610 if tm:
610 if isinstance(tm, basestring):
611 if isinstance(tm, basestring):
611 try:
612 try:
612 tm = float(tm)
613 tm = float(tm)
613 except ValueError:
614 except ValueError:
614 return
615 return
615 return datetime.datetime.fromtimestamp(tm)
616 return datetime.datetime.fromtimestamp(tm)
616
617
617
618
618 def time_to_utcdatetime(tm):
619 def time_to_utcdatetime(tm):
619 if tm:
620 if tm:
620 if isinstance(tm, basestring):
621 if isinstance(tm, basestring):
621 try:
622 try:
622 tm = float(tm)
623 tm = float(tm)
623 except ValueError:
624 except ValueError:
624 return
625 return
625 return datetime.datetime.utcfromtimestamp(tm)
626 return datetime.datetime.utcfromtimestamp(tm)
626
627
627
628
628 MENTIONS_REGEX = re.compile(
629 MENTIONS_REGEX = re.compile(
629 # ^@ or @ without any special chars in front
630 # ^@ or @ without any special chars in front
630 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
631 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
631 # main body starts with letter, then can be . - _
632 # main body starts with letter, then can be . - _
632 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
633 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
633 re.VERBOSE | re.MULTILINE)
634 re.VERBOSE | re.MULTILINE)
634
635
635
636
636 def extract_mentioned_users(s):
637 def extract_mentioned_users(s):
637 """
638 """
638 Returns unique usernames from given string s that have @mention
639 Returns unique usernames from given string s that have @mention
639
640
640 :param s: string to get mentions
641 :param s: string to get mentions
641 """
642 """
642 usrs = set()
643 usrs = set()
643 for username in MENTIONS_REGEX.findall(s):
644 for username in MENTIONS_REGEX.findall(s):
644 usrs.add(username)
645 usrs.add(username)
645
646
646 return sorted(list(usrs), key=lambda k: k.lower())
647 return sorted(list(usrs), key=lambda k: k.lower())
647
648
648
649
649 class AttributeDict(dict):
650 class AttributeDict(dict):
650 def __getattr__(self, attr):
651 def __getattr__(self, attr):
651 return self.get(attr, None)
652 return self.get(attr, None)
652 __setattr__ = dict.__setitem__
653 __setattr__ = dict.__setitem__
653 __delattr__ = dict.__delitem__
654 __delattr__ = dict.__delitem__
654
655
655
656
656 def fix_PATH(os_=None):
657 def fix_PATH(os_=None):
657 """
658 """
658 Get current active python path, and append it to PATH variable to fix
659 Get current active python path, and append it to PATH variable to fix
659 issues of subprocess calls and different python versions
660 issues of subprocess calls and different python versions
660 """
661 """
661 if os_ is None:
662 if os_ is None:
662 import os
663 import os
663 else:
664 else:
664 os = os_
665 os = os_
665
666
666 cur_path = os.path.split(sys.executable)[0]
667 cur_path = os.path.split(sys.executable)[0]
667 if not os.environ['PATH'].startswith(cur_path):
668 if not os.environ['PATH'].startswith(cur_path):
668 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
669 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
669
670
670
671
671 def obfuscate_url_pw(engine):
672 def obfuscate_url_pw(engine):
672 _url = engine or ''
673 _url = engine or ''
673 try:
674 try:
674 _url = sqlalchemy.engine.url.make_url(engine)
675 _url = sqlalchemy.engine.url.make_url(engine)
675 if _url.password:
676 if _url.password:
676 _url.password = 'XXXXX'
677 _url.password = 'XXXXX'
677 except Exception:
678 except Exception:
678 pass
679 pass
679 return unicode(_url)
680 return unicode(_url)
680
681
681
682
682 def get_server_url(environ):
683 def get_server_url(environ):
683 req = webob.Request(environ)
684 req = webob.Request(environ)
684 return req.host_url + req.script_name
685 return req.host_url + req.script_name
685
686
686
687
687 def unique_id(hexlen=32):
688 def unique_id(hexlen=32):
688 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
689 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
689 return suuid(truncate_to=hexlen, alphabet=alphabet)
690 return suuid(truncate_to=hexlen, alphabet=alphabet)
690
691
691
692
692 def suuid(url=None, truncate_to=22, alphabet=None):
693 def suuid(url=None, truncate_to=22, alphabet=None):
693 """
694 """
694 Generate and return a short URL safe UUID.
695 Generate and return a short URL safe UUID.
695
696
696 If the url parameter is provided, set the namespace to the provided
697 If the url parameter is provided, set the namespace to the provided
697 URL and generate a UUID.
698 URL and generate a UUID.
698
699
699 :param url to get the uuid for
700 :param url to get the uuid for
700 :truncate_to: truncate the basic 22 UUID to shorter version
701 :truncate_to: truncate the basic 22 UUID to shorter version
701
702
702 The IDs won't be universally unique any longer, but the probability of
703 The IDs won't be universally unique any longer, but the probability of
703 a collision will still be very low.
704 a collision will still be very low.
704 """
705 """
705 # Define our alphabet.
706 # Define our alphabet.
706 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
707 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
707
708
708 # If no URL is given, generate a random UUID.
709 # If no URL is given, generate a random UUID.
709 if url is None:
710 if url is None:
710 unique_id = uuid.uuid4().int
711 unique_id = uuid.uuid4().int
711 else:
712 else:
712 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
713 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
713
714
714 alphabet_length = len(_ALPHABET)
715 alphabet_length = len(_ALPHABET)
715 output = []
716 output = []
716 while unique_id > 0:
717 while unique_id > 0:
717 digit = unique_id % alphabet_length
718 digit = unique_id % alphabet_length
718 output.append(_ALPHABET[digit])
719 output.append(_ALPHABET[digit])
719 unique_id = int(unique_id / alphabet_length)
720 unique_id = int(unique_id / alphabet_length)
720 return "".join(output)[:truncate_to]
721 return "".join(output)[:truncate_to]
721
722
722
723
723 def get_current_rhodecode_user():
724 def get_current_rhodecode_user():
724 """
725 """
725 Gets rhodecode user from threadlocal tmpl_context variable if it's
726 Gets rhodecode user from threadlocal tmpl_context variable if it's
726 defined, else returns None.
727 defined, else returns None.
727 """
728 """
728 from pylons import tmpl_context as c
729 from pylons import tmpl_context as c
729 if hasattr(c, 'rhodecode_user'):
730 if hasattr(c, 'rhodecode_user'):
730 return c.rhodecode_user
731 return c.rhodecode_user
731
732
732 return None
733 return None
733
734
734
735
735 def action_logger_generic(action, namespace=''):
736 def action_logger_generic(action, namespace=''):
736 """
737 """
737 A generic logger for actions useful to the system overview, tries to find
738 A generic logger for actions useful to the system overview, tries to find
738 an acting user for the context of the call otherwise reports unknown user
739 an acting user for the context of the call otherwise reports unknown user
739
740
740 :param action: logging message eg 'comment 5 deleted'
741 :param action: logging message eg 'comment 5 deleted'
741 :param type: string
742 :param type: string
742
743
743 :param namespace: namespace of the logging message eg. 'repo.comments'
744 :param namespace: namespace of the logging message eg. 'repo.comments'
744 :param type: string
745 :param type: string
745
746
746 """
747 """
747
748
748 logger_name = 'rhodecode.actions'
749 logger_name = 'rhodecode.actions'
749
750
750 if namespace:
751 if namespace:
751 logger_name += '.' + namespace
752 logger_name += '.' + namespace
752
753
753 log = logging.getLogger(logger_name)
754 log = logging.getLogger(logger_name)
754
755
755 # get a user if we can
756 # get a user if we can
756 user = get_current_rhodecode_user()
757 user = get_current_rhodecode_user()
757
758
758 logfunc = log.info
759 logfunc = log.info
759
760
760 if not user:
761 if not user:
761 user = '<unknown user>'
762 user = '<unknown user>'
762 logfunc = log.warning
763 logfunc = log.warning
763
764
764 logfunc('Logging action by {}: {}'.format(user, action))
765 logfunc('Logging action by {}: {}'.format(user, action))
765
766
766
767
767 def escape_split(text, sep=',', maxsplit=-1):
768 def escape_split(text, sep=',', maxsplit=-1):
768 r"""
769 r"""
769 Allows for escaping of the separator: e.g. arg='foo\, bar'
770 Allows for escaping of the separator: e.g. arg='foo\, bar'
770
771
771 It should be noted that the way bash et. al. do command line parsing, those
772 It should be noted that the way bash et. al. do command line parsing, those
772 single quotes are required.
773 single quotes are required.
773 """
774 """
774 escaped_sep = r'\%s' % sep
775 escaped_sep = r'\%s' % sep
775
776
776 if escaped_sep not in text:
777 if escaped_sep not in text:
777 return text.split(sep, maxsplit)
778 return text.split(sep, maxsplit)
778
779
779 before, _mid, after = text.partition(escaped_sep)
780 before, _mid, after = text.partition(escaped_sep)
780 startlist = before.split(sep, maxsplit) # a regular split is fine here
781 startlist = before.split(sep, maxsplit) # a regular split is fine here
781 unfinished = startlist[-1]
782 unfinished = startlist[-1]
782 startlist = startlist[:-1]
783 startlist = startlist[:-1]
783
784
784 # recurse because there may be more escaped separators
785 # recurse because there may be more escaped separators
785 endlist = escape_split(after, sep, maxsplit)
786 endlist = escape_split(after, sep, maxsplit)
786
787
787 # finish building the escaped value. we use endlist[0] becaue the first
788 # finish building the escaped value. we use endlist[0] becaue the first
788 # part of the string sent in recursion is the rest of the escaped value.
789 # part of the string sent in recursion is the rest of the escaped value.
789 unfinished += sep + endlist[0]
790 unfinished += sep + endlist[0]
790
791
791 return startlist + [unfinished] + endlist[1:] # put together all the parts
792 return startlist + [unfinished] + endlist[1:] # put together all the parts
792
793
793
794
794 class OptionalAttr(object):
795 class OptionalAttr(object):
795 """
796 """
796 Special Optional Option that defines other attribute. Example::
797 Special Optional Option that defines other attribute. Example::
797
798
798 def test(apiuser, userid=Optional(OAttr('apiuser')):
799 def test(apiuser, userid=Optional(OAttr('apiuser')):
799 user = Optional.extract(userid)
800 user = Optional.extract(userid)
800 # calls
801 # calls
801
802
802 """
803 """
803
804
804 def __init__(self, attr_name):
805 def __init__(self, attr_name):
805 self.attr_name = attr_name
806 self.attr_name = attr_name
806
807
807 def __repr__(self):
808 def __repr__(self):
808 return '<OptionalAttr:%s>' % self.attr_name
809 return '<OptionalAttr:%s>' % self.attr_name
809
810
810 def __call__(self):
811 def __call__(self):
811 return self
812 return self
812
813
813
814
814 # alias
815 # alias
815 OAttr = OptionalAttr
816 OAttr = OptionalAttr
816
817
817
818
818 class Optional(object):
819 class Optional(object):
819 """
820 """
820 Defines an optional parameter::
821 Defines an optional parameter::
821
822
822 param = param.getval() if isinstance(param, Optional) else param
823 param = param.getval() if isinstance(param, Optional) else param
823 param = param() if isinstance(param, Optional) else param
824 param = param() if isinstance(param, Optional) else param
824
825
825 is equivalent of::
826 is equivalent of::
826
827
827 param = Optional.extract(param)
828 param = Optional.extract(param)
828
829
829 """
830 """
830
831
831 def __init__(self, type_):
832 def __init__(self, type_):
832 self.type_ = type_
833 self.type_ = type_
833
834
834 def __repr__(self):
835 def __repr__(self):
835 return '<Optional:%s>' % self.type_.__repr__()
836 return '<Optional:%s>' % self.type_.__repr__()
836
837
837 def __call__(self):
838 def __call__(self):
838 return self.getval()
839 return self.getval()
839
840
840 def getval(self):
841 def getval(self):
841 """
842 """
842 returns value from this Optional instance
843 returns value from this Optional instance
843 """
844 """
844 if isinstance(self.type_, OAttr):
845 if isinstance(self.type_, OAttr):
845 # use params name
846 # use params name
846 return self.type_.attr_name
847 return self.type_.attr_name
847 return self.type_
848 return self.type_
848
849
849 @classmethod
850 @classmethod
850 def extract(cls, val):
851 def extract(cls, val):
851 """
852 """
852 Extracts value from Optional() instance
853 Extracts value from Optional() instance
853
854
854 :param val:
855 :param val:
855 :return: original value if it's not Optional instance else
856 :return: original value if it's not Optional instance else
856 value of instance
857 value of instance
857 """
858 """
858 if isinstance(val, cls):
859 if isinstance(val, cls):
859 return val.getval()
860 return val.getval()
860 return val
861 return val
862
863
864 def get_routes_generator_for_server_url(server_url):
865 parsed_url = urlobject.URLObject(server_url)
866 netloc = safe_str(parsed_url.netloc)
867 script_name = safe_str(parsed_url.path)
868
869 if ':' in netloc:
870 server_name, server_port = netloc.split(':')
871 else:
872 server_name = netloc
873 server_port = (parsed_url.scheme == 'https' and '443' or '80')
874
875 environ = {
876 'REQUEST_METHOD': 'GET',
877 'PATH_INFO': '/',
878 'SERVER_NAME': server_name,
879 'SERVER_PORT': server_port,
880 'SCRIPT_NAME': script_name,
881 }
882 if parsed_url.scheme == 'https':
883 environ['HTTPS'] = 'on'
884 environ['wsgi.url_scheme'] = 'https'
885
886 return routes.util.URLGenerator(rhodecode.CONFIG['routes.map'], environ)
@@ -1,1154 +1,1154 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26 from collections import namedtuple
26 from collections import namedtuple
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30
30
31 from pylons.i18n.translation import _
31 from pylons.i18n.translation import _
32 from pylons.i18n.translation import lazy_ugettext
32 from pylons.i18n.translation import lazy_ugettext
33
33
34 import rhodecode
34 import rhodecode
35 from rhodecode.lib import helpers as h, hooks_utils, diffs
35 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 from rhodecode.lib.compat import OrderedDict
36 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 from rhodecode.lib.markup_renderer import (
38 from rhodecode.lib.markup_renderer import (
39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 from rhodecode.lib.utils import action_logger
40 from rhodecode.lib.utils import action_logger
41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 from rhodecode.lib.vcs.backends.base import (
42 from rhodecode.lib.vcs.backends.base import (
43 Reference, MergeResponse, MergeFailureReason)
43 Reference, MergeResponse, MergeFailureReason)
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 CommitDoesNotExistError, EmptyRepositoryError)
45 CommitDoesNotExistError, EmptyRepositoryError)
46 from rhodecode.model import BaseModel
46 from rhodecode.model import BaseModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 from rhodecode.model.comment import ChangesetCommentsModel
48 from rhodecode.model.comment import ChangesetCommentsModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
51 PullRequestVersion, ChangesetComment)
51 PullRequestVersion, ChangesetComment)
52 from rhodecode.model.meta import Session
52 from rhodecode.model.meta import Session
53 from rhodecode.model.notification import NotificationModel, \
53 from rhodecode.model.notification import NotificationModel, \
54 EmailNotificationModel
54 EmailNotificationModel
55 from rhodecode.model.scm import ScmModel
55 from rhodecode.model.scm import ScmModel
56 from rhodecode.model.settings import VcsSettingsModel
56 from rhodecode.model.settings import VcsSettingsModel
57
57
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class PullRequestModel(BaseModel):
62 class PullRequestModel(BaseModel):
63
63
64 cls = PullRequest
64 cls = PullRequest
65
65
66 DIFF_CONTEXT = 3
66 DIFF_CONTEXT = 3
67
67
68 MERGE_STATUS_MESSAGES = {
68 MERGE_STATUS_MESSAGES = {
69 MergeFailureReason.NONE: lazy_ugettext(
69 MergeFailureReason.NONE: lazy_ugettext(
70 'This pull request can be automatically merged.'),
70 'This pull request can be automatically merged.'),
71 MergeFailureReason.UNKNOWN: lazy_ugettext(
71 MergeFailureReason.UNKNOWN: lazy_ugettext(
72 'This pull request cannot be merged because of an unhandled'
72 'This pull request cannot be merged because of an unhandled'
73 ' exception.'),
73 ' exception.'),
74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
75 'This pull request cannot be merged because of conflicts.'),
75 'This pull request cannot be merged because of conflicts.'),
76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
77 'This pull request could not be merged because push to target'
77 'This pull request could not be merged because push to target'
78 ' failed.'),
78 ' failed.'),
79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
80 'This pull request cannot be merged because the target is not a'
80 'This pull request cannot be merged because the target is not a'
81 ' head.'),
81 ' head.'),
82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
83 'This pull request cannot be merged because the source contains'
83 'This pull request cannot be merged because the source contains'
84 ' more branches than the target.'),
84 ' more branches than the target.'),
85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
86 'This pull request cannot be merged because the target has'
86 'This pull request cannot be merged because the target has'
87 ' multiple heads.'),
87 ' multiple heads.'),
88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
89 'This pull request cannot be merged because the target repository'
89 'This pull request cannot be merged because the target repository'
90 ' is locked.'),
90 ' is locked.'),
91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
92 'This pull request cannot be merged because the target or the '
92 'This pull request cannot be merged because the target or the '
93 'source reference is missing.'),
93 'source reference is missing.'),
94 }
94 }
95
95
96 def __get_pull_request(self, pull_request):
96 def __get_pull_request(self, pull_request):
97 return self._get_instance(PullRequest, pull_request)
97 return self._get_instance(PullRequest, pull_request)
98
98
99 def _check_perms(self, perms, pull_request, user, api=False):
99 def _check_perms(self, perms, pull_request, user, api=False):
100 if not api:
100 if not api:
101 return h.HasRepoPermissionAny(*perms)(
101 return h.HasRepoPermissionAny(*perms)(
102 user=user, repo_name=pull_request.target_repo.repo_name)
102 user=user, repo_name=pull_request.target_repo.repo_name)
103 else:
103 else:
104 return h.HasRepoPermissionAnyApi(*perms)(
104 return h.HasRepoPermissionAnyApi(*perms)(
105 user=user, repo_name=pull_request.target_repo.repo_name)
105 user=user, repo_name=pull_request.target_repo.repo_name)
106
106
107 def check_user_read(self, pull_request, user, api=False):
107 def check_user_read(self, pull_request, user, api=False):
108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
109 return self._check_perms(_perms, pull_request, user, api)
109 return self._check_perms(_perms, pull_request, user, api)
110
110
111 def check_user_merge(self, pull_request, user, api=False):
111 def check_user_merge(self, pull_request, user, api=False):
112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
113 return self._check_perms(_perms, pull_request, user, api)
113 return self._check_perms(_perms, pull_request, user, api)
114
114
115 def check_user_update(self, pull_request, user, api=False):
115 def check_user_update(self, pull_request, user, api=False):
116 owner = user.user_id == pull_request.user_id
116 owner = user.user_id == pull_request.user_id
117 return self.check_user_merge(pull_request, user, api) or owner
117 return self.check_user_merge(pull_request, user, api) or owner
118
118
119 def check_user_change_status(self, pull_request, user, api=False):
119 def check_user_change_status(self, pull_request, user, api=False):
120 reviewer = user.user_id in [x.user_id for x in
120 reviewer = user.user_id in [x.user_id for x in
121 pull_request.reviewers]
121 pull_request.reviewers]
122 return self.check_user_update(pull_request, user, api) or reviewer
122 return self.check_user_update(pull_request, user, api) or reviewer
123
123
124 def get(self, pull_request):
124 def get(self, pull_request):
125 return self.__get_pull_request(pull_request)
125 return self.__get_pull_request(pull_request)
126
126
127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
128 opened_by=None, order_by=None,
128 opened_by=None, order_by=None,
129 order_dir='desc'):
129 order_dir='desc'):
130 repo = self._get_repo(repo_name)
130 repo = self._get_repo(repo_name)
131 q = PullRequest.query()
131 q = PullRequest.query()
132 # source or target
132 # source or target
133 if source:
133 if source:
134 q = q.filter(PullRequest.source_repo == repo)
134 q = q.filter(PullRequest.source_repo == repo)
135 else:
135 else:
136 q = q.filter(PullRequest.target_repo == repo)
136 q = q.filter(PullRequest.target_repo == repo)
137
137
138 # closed,opened
138 # closed,opened
139 if statuses:
139 if statuses:
140 q = q.filter(PullRequest.status.in_(statuses))
140 q = q.filter(PullRequest.status.in_(statuses))
141
141
142 # opened by filter
142 # opened by filter
143 if opened_by:
143 if opened_by:
144 q = q.filter(PullRequest.user_id.in_(opened_by))
144 q = q.filter(PullRequest.user_id.in_(opened_by))
145
145
146 if order_by:
146 if order_by:
147 order_map = {
147 order_map = {
148 'name_raw': PullRequest.pull_request_id,
148 'name_raw': PullRequest.pull_request_id,
149 'title': PullRequest.title,
149 'title': PullRequest.title,
150 'updated_on_raw': PullRequest.updated_on
150 'updated_on_raw': PullRequest.updated_on
151 }
151 }
152 if order_dir == 'asc':
152 if order_dir == 'asc':
153 q = q.order_by(order_map[order_by].asc())
153 q = q.order_by(order_map[order_by].asc())
154 else:
154 else:
155 q = q.order_by(order_map[order_by].desc())
155 q = q.order_by(order_map[order_by].desc())
156
156
157 return q
157 return q
158
158
159 def count_all(self, repo_name, source=False, statuses=None,
159 def count_all(self, repo_name, source=False, statuses=None,
160 opened_by=None):
160 opened_by=None):
161 """
161 """
162 Count the number of pull requests for a specific repository.
162 Count the number of pull requests for a specific repository.
163
163
164 :param repo_name: target or source repo
164 :param repo_name: target or source repo
165 :param source: boolean flag to specify if repo_name refers to source
165 :param source: boolean flag to specify if repo_name refers to source
166 :param statuses: list of pull request statuses
166 :param statuses: list of pull request statuses
167 :param opened_by: author user of the pull request
167 :param opened_by: author user of the pull request
168 :returns: int number of pull requests
168 :returns: int number of pull requests
169 """
169 """
170 q = self._prepare_get_all_query(
170 q = self._prepare_get_all_query(
171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
172
172
173 return q.count()
173 return q.count()
174
174
175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
176 offset=0, length=None, order_by=None, order_dir='desc'):
176 offset=0, length=None, order_by=None, order_dir='desc'):
177 """
177 """
178 Get all pull requests for a specific repository.
178 Get all pull requests for a specific repository.
179
179
180 :param repo_name: target or source repo
180 :param repo_name: target or source repo
181 :param source: boolean flag to specify if repo_name refers to source
181 :param source: boolean flag to specify if repo_name refers to source
182 :param statuses: list of pull request statuses
182 :param statuses: list of pull request statuses
183 :param opened_by: author user of the pull request
183 :param opened_by: author user of the pull request
184 :param offset: pagination offset
184 :param offset: pagination offset
185 :param length: length of returned list
185 :param length: length of returned list
186 :param order_by: order of the returned list
186 :param order_by: order of the returned list
187 :param order_dir: 'asc' or 'desc' ordering direction
187 :param order_dir: 'asc' or 'desc' ordering direction
188 :returns: list of pull requests
188 :returns: list of pull requests
189 """
189 """
190 q = self._prepare_get_all_query(
190 q = self._prepare_get_all_query(
191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
192 order_by=order_by, order_dir=order_dir)
192 order_by=order_by, order_dir=order_dir)
193
193
194 if length:
194 if length:
195 pull_requests = q.limit(length).offset(offset).all()
195 pull_requests = q.limit(length).offset(offset).all()
196 else:
196 else:
197 pull_requests = q.all()
197 pull_requests = q.all()
198
198
199 return pull_requests
199 return pull_requests
200
200
201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
202 opened_by=None):
202 opened_by=None):
203 """
203 """
204 Count the number of pull requests for a specific repository that are
204 Count the number of pull requests for a specific repository that are
205 awaiting review.
205 awaiting review.
206
206
207 :param repo_name: target or source repo
207 :param repo_name: target or source repo
208 :param source: boolean flag to specify if repo_name refers to source
208 :param source: boolean flag to specify if repo_name refers to source
209 :param statuses: list of pull request statuses
209 :param statuses: list of pull request statuses
210 :param opened_by: author user of the pull request
210 :param opened_by: author user of the pull request
211 :returns: int number of pull requests
211 :returns: int number of pull requests
212 """
212 """
213 pull_requests = self.get_awaiting_review(
213 pull_requests = self.get_awaiting_review(
214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215
215
216 return len(pull_requests)
216 return len(pull_requests)
217
217
218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
219 opened_by=None, offset=0, length=None,
219 opened_by=None, offset=0, length=None,
220 order_by=None, order_dir='desc'):
220 order_by=None, order_dir='desc'):
221 """
221 """
222 Get all pull requests for a specific repository that are awaiting
222 Get all pull requests for a specific repository that are awaiting
223 review.
223 review.
224
224
225 :param repo_name: target or source repo
225 :param repo_name: target or source repo
226 :param source: boolean flag to specify if repo_name refers to source
226 :param source: boolean flag to specify if repo_name refers to source
227 :param statuses: list of pull request statuses
227 :param statuses: list of pull request statuses
228 :param opened_by: author user of the pull request
228 :param opened_by: author user of the pull request
229 :param offset: pagination offset
229 :param offset: pagination offset
230 :param length: length of returned list
230 :param length: length of returned list
231 :param order_by: order of the returned list
231 :param order_by: order of the returned list
232 :param order_dir: 'asc' or 'desc' ordering direction
232 :param order_dir: 'asc' or 'desc' ordering direction
233 :returns: list of pull requests
233 :returns: list of pull requests
234 """
234 """
235 pull_requests = self.get_all(
235 pull_requests = self.get_all(
236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
237 order_by=order_by, order_dir=order_dir)
237 order_by=order_by, order_dir=order_dir)
238
238
239 _filtered_pull_requests = []
239 _filtered_pull_requests = []
240 for pr in pull_requests:
240 for pr in pull_requests:
241 status = pr.calculated_review_status()
241 status = pr.calculated_review_status()
242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
243 ChangesetStatus.STATUS_UNDER_REVIEW]:
243 ChangesetStatus.STATUS_UNDER_REVIEW]:
244 _filtered_pull_requests.append(pr)
244 _filtered_pull_requests.append(pr)
245 if length:
245 if length:
246 return _filtered_pull_requests[offset:offset+length]
246 return _filtered_pull_requests[offset:offset+length]
247 else:
247 else:
248 return _filtered_pull_requests
248 return _filtered_pull_requests
249
249
250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
251 opened_by=None, user_id=None):
251 opened_by=None, user_id=None):
252 """
252 """
253 Count the number of pull requests for a specific repository that are
253 Count the number of pull requests for a specific repository that are
254 awaiting review from a specific user.
254 awaiting review from a specific user.
255
255
256 :param repo_name: target or source repo
256 :param repo_name: target or source repo
257 :param source: boolean flag to specify if repo_name refers to source
257 :param source: boolean flag to specify if repo_name refers to source
258 :param statuses: list of pull request statuses
258 :param statuses: list of pull request statuses
259 :param opened_by: author user of the pull request
259 :param opened_by: author user of the pull request
260 :param user_id: reviewer user of the pull request
260 :param user_id: reviewer user of the pull request
261 :returns: int number of pull requests
261 :returns: int number of pull requests
262 """
262 """
263 pull_requests = self.get_awaiting_my_review(
263 pull_requests = self.get_awaiting_my_review(
264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
265 user_id=user_id)
265 user_id=user_id)
266
266
267 return len(pull_requests)
267 return len(pull_requests)
268
268
269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
270 opened_by=None, user_id=None, offset=0,
270 opened_by=None, user_id=None, offset=0,
271 length=None, order_by=None, order_dir='desc'):
271 length=None, order_by=None, order_dir='desc'):
272 """
272 """
273 Get all pull requests for a specific repository that are awaiting
273 Get all pull requests for a specific repository that are awaiting
274 review from a specific user.
274 review from a specific user.
275
275
276 :param repo_name: target or source repo
276 :param repo_name: target or source repo
277 :param source: boolean flag to specify if repo_name refers to source
277 :param source: boolean flag to specify if repo_name refers to source
278 :param statuses: list of pull request statuses
278 :param statuses: list of pull request statuses
279 :param opened_by: author user of the pull request
279 :param opened_by: author user of the pull request
280 :param user_id: reviewer user of the pull request
280 :param user_id: reviewer user of the pull request
281 :param offset: pagination offset
281 :param offset: pagination offset
282 :param length: length of returned list
282 :param length: length of returned list
283 :param order_by: order of the returned list
283 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
284 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
285 :returns: list of pull requests
286 """
286 """
287 pull_requests = self.get_all(
287 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
289 order_by=order_by, order_dir=order_dir)
290
290
291 _my = PullRequestModel().get_not_reviewed(user_id)
291 _my = PullRequestModel().get_not_reviewed(user_id)
292 my_participation = []
292 my_participation = []
293 for pr in pull_requests:
293 for pr in pull_requests:
294 if pr in _my:
294 if pr in _my:
295 my_participation.append(pr)
295 my_participation.append(pr)
296 _filtered_pull_requests = my_participation
296 _filtered_pull_requests = my_participation
297 if length:
297 if length:
298 return _filtered_pull_requests[offset:offset+length]
298 return _filtered_pull_requests[offset:offset+length]
299 else:
299 else:
300 return _filtered_pull_requests
300 return _filtered_pull_requests
301
301
302 def get_not_reviewed(self, user_id):
302 def get_not_reviewed(self, user_id):
303 return [
303 return [
304 x.pull_request for x in PullRequestReviewers.query().filter(
304 x.pull_request for x in PullRequestReviewers.query().filter(
305 PullRequestReviewers.user_id == user_id).all()
305 PullRequestReviewers.user_id == user_id).all()
306 ]
306 ]
307
307
308 def get_versions(self, pull_request):
308 def get_versions(self, pull_request):
309 """
309 """
310 returns version of pull request sorted by ID descending
310 returns version of pull request sorted by ID descending
311 """
311 """
312 return PullRequestVersion.query()\
312 return PullRequestVersion.query()\
313 .filter(PullRequestVersion.pull_request == pull_request)\
313 .filter(PullRequestVersion.pull_request == pull_request)\
314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
315 .all()
315 .all()
316
316
317 def create(self, created_by, source_repo, source_ref, target_repo,
317 def create(self, created_by, source_repo, source_ref, target_repo,
318 target_ref, revisions, reviewers, title, description=None):
318 target_ref, revisions, reviewers, title, description=None):
319 created_by_user = self._get_user(created_by)
319 created_by_user = self._get_user(created_by)
320 source_repo = self._get_repo(source_repo)
320 source_repo = self._get_repo(source_repo)
321 target_repo = self._get_repo(target_repo)
321 target_repo = self._get_repo(target_repo)
322
322
323 pull_request = PullRequest()
323 pull_request = PullRequest()
324 pull_request.source_repo = source_repo
324 pull_request.source_repo = source_repo
325 pull_request.source_ref = source_ref
325 pull_request.source_ref = source_ref
326 pull_request.target_repo = target_repo
326 pull_request.target_repo = target_repo
327 pull_request.target_ref = target_ref
327 pull_request.target_ref = target_ref
328 pull_request.revisions = revisions
328 pull_request.revisions = revisions
329 pull_request.title = title
329 pull_request.title = title
330 pull_request.description = description
330 pull_request.description = description
331 pull_request.author = created_by_user
331 pull_request.author = created_by_user
332
332
333 Session().add(pull_request)
333 Session().add(pull_request)
334 Session().flush()
334 Session().flush()
335
335
336 # members / reviewers
336 # members / reviewers
337 for user_id in set(reviewers):
337 for user_id in set(reviewers):
338 user = self._get_user(user_id)
338 user = self._get_user(user_id)
339 reviewer = PullRequestReviewers(user, pull_request)
339 reviewer = PullRequestReviewers(user, pull_request)
340 Session().add(reviewer)
340 Session().add(reviewer)
341
341
342 # Set approval status to "Under Review" for all commits which are
342 # Set approval status to "Under Review" for all commits which are
343 # part of this pull request.
343 # part of this pull request.
344 ChangesetStatusModel().set_status(
344 ChangesetStatusModel().set_status(
345 repo=target_repo,
345 repo=target_repo,
346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
347 user=created_by_user,
347 user=created_by_user,
348 pull_request=pull_request
348 pull_request=pull_request
349 )
349 )
350
350
351 self.notify_reviewers(pull_request, reviewers)
351 self.notify_reviewers(pull_request, reviewers)
352 self._trigger_pull_request_hook(
352 self._trigger_pull_request_hook(
353 pull_request, created_by_user, 'create')
353 pull_request, created_by_user, 'create')
354
354
355 return pull_request
355 return pull_request
356
356
357 def _trigger_pull_request_hook(self, pull_request, user, action):
357 def _trigger_pull_request_hook(self, pull_request, user, action):
358 pull_request = self.__get_pull_request(pull_request)
358 pull_request = self.__get_pull_request(pull_request)
359 target_scm = pull_request.target_repo.scm_instance()
359 target_scm = pull_request.target_repo.scm_instance()
360 if action == 'create':
360 if action == 'create':
361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
362 elif action == 'merge':
362 elif action == 'merge':
363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
364 elif action == 'close':
364 elif action == 'close':
365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
366 elif action == 'review_status_change':
366 elif action == 'review_status_change':
367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
368 elif action == 'update':
368 elif action == 'update':
369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
370 else:
370 else:
371 return
371 return
372
372
373 trigger_hook(
373 trigger_hook(
374 username=user.username,
374 username=user.username,
375 repo_name=pull_request.target_repo.repo_name,
375 repo_name=pull_request.target_repo.repo_name,
376 repo_alias=target_scm.alias,
376 repo_alias=target_scm.alias,
377 pull_request=pull_request)
377 pull_request=pull_request)
378
378
379 def _get_commit_ids(self, pull_request):
379 def _get_commit_ids(self, pull_request):
380 """
380 """
381 Return the commit ids of the merged pull request.
381 Return the commit ids of the merged pull request.
382
382
383 This method is not dealing correctly yet with the lack of autoupdates
383 This method is not dealing correctly yet with the lack of autoupdates
384 nor with the implicit target updates.
384 nor with the implicit target updates.
385 For example: if a commit in the source repo is already in the target it
385 For example: if a commit in the source repo is already in the target it
386 will be reported anyways.
386 will be reported anyways.
387 """
387 """
388 merge_rev = pull_request.merge_rev
388 merge_rev = pull_request.merge_rev
389 if merge_rev is None:
389 if merge_rev is None:
390 raise ValueError('This pull request was not merged yet')
390 raise ValueError('This pull request was not merged yet')
391
391
392 commit_ids = list(pull_request.revisions)
392 commit_ids = list(pull_request.revisions)
393 if merge_rev not in commit_ids:
393 if merge_rev not in commit_ids:
394 commit_ids.append(merge_rev)
394 commit_ids.append(merge_rev)
395
395
396 return commit_ids
396 return commit_ids
397
397
398 def merge(self, pull_request, user, extras):
398 def merge(self, pull_request, user, extras):
399 log.debug("Merging pull request %s", pull_request.pull_request_id)
399 log.debug("Merging pull request %s", pull_request.pull_request_id)
400 merge_state = self._merge_pull_request(pull_request, user, extras)
400 merge_state = self._merge_pull_request(pull_request, user, extras)
401 if merge_state.executed:
401 if merge_state.executed:
402 log.debug(
402 log.debug(
403 "Merge was successful, updating the pull request comments.")
403 "Merge was successful, updating the pull request comments.")
404 self._comment_and_close_pr(pull_request, user, merge_state)
404 self._comment_and_close_pr(pull_request, user, merge_state)
405 self._log_action('user_merged_pull_request', user, pull_request)
405 self._log_action('user_merged_pull_request', user, pull_request)
406 else:
406 else:
407 log.warn("Merge failed, not updating the pull request.")
407 log.warn("Merge failed, not updating the pull request.")
408 return merge_state
408 return merge_state
409
409
410 def _merge_pull_request(self, pull_request, user, extras):
410 def _merge_pull_request(self, pull_request, user, extras):
411 target_vcs = pull_request.target_repo.scm_instance()
411 target_vcs = pull_request.target_repo.scm_instance()
412 source_vcs = pull_request.source_repo.scm_instance()
412 source_vcs = pull_request.source_repo.scm_instance()
413 target_ref = self._refresh_reference(
413 target_ref = self._refresh_reference(
414 pull_request.target_ref_parts, target_vcs)
414 pull_request.target_ref_parts, target_vcs)
415
415
416 message = _(
416 message = _(
417 'Merge pull request #%(pr_id)s from '
417 'Merge pull request #%(pr_id)s from '
418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
419 'pr_id': pull_request.pull_request_id,
419 'pr_id': pull_request.pull_request_id,
420 'source_repo': source_vcs.name,
420 'source_repo': source_vcs.name,
421 'source_ref_name': pull_request.source_ref_parts.name,
421 'source_ref_name': pull_request.source_ref_parts.name,
422 'pr_title': pull_request.title
422 'pr_title': pull_request.title
423 }
423 }
424
424
425 workspace_id = self._workspace_id(pull_request)
425 workspace_id = self._workspace_id(pull_request)
426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
428 use_rebase = self._use_rebase_for_merging(pull_request)
428 use_rebase = self._use_rebase_for_merging(pull_request)
429
429
430 callback_daemon, extras = prepare_callback_daemon(
430 callback_daemon, extras = prepare_callback_daemon(
431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
432
432
433 with callback_daemon:
433 with callback_daemon:
434 # TODO: johbo: Implement a clean way to run a config_override
434 # TODO: johbo: Implement a clean way to run a config_override
435 # for a single call.
435 # for a single call.
436 target_vcs.config.set(
436 target_vcs.config.set(
437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
438 merge_state = target_vcs.merge(
438 merge_state = target_vcs.merge(
439 target_ref, source_vcs, pull_request.source_ref_parts,
439 target_ref, source_vcs, pull_request.source_ref_parts,
440 workspace_id, user_name=user.username,
440 workspace_id, user_name=user.username,
441 user_email=user.email, message=message, use_rebase=use_rebase)
441 user_email=user.email, message=message, use_rebase=use_rebase)
442 return merge_state
442 return merge_state
443
443
444 def _comment_and_close_pr(self, pull_request, user, merge_state):
444 def _comment_and_close_pr(self, pull_request, user, merge_state):
445 pull_request.merge_rev = merge_state.merge_commit_id
445 pull_request.merge_rev = merge_state.merge_commit_id
446 pull_request.updated_on = datetime.datetime.now()
446 pull_request.updated_on = datetime.datetime.now()
447
447
448 ChangesetCommentsModel().create(
448 ChangesetCommentsModel().create(
449 text=unicode(_('Pull request merged and closed')),
449 text=unicode(_('Pull request merged and closed')),
450 repo=pull_request.target_repo.repo_id,
450 repo=pull_request.target_repo.repo_id,
451 user=user.user_id,
451 user=user.user_id,
452 pull_request=pull_request.pull_request_id,
452 pull_request=pull_request.pull_request_id,
453 f_path=None,
453 f_path=None,
454 line_no=None,
454 line_no=None,
455 closing_pr=True
455 closing_pr=True
456 )
456 )
457
457
458 Session().add(pull_request)
458 Session().add(pull_request)
459 Session().flush()
459 Session().flush()
460 # TODO: paris: replace invalidation with less radical solution
460 # TODO: paris: replace invalidation with less radical solution
461 ScmModel().mark_for_invalidation(
461 ScmModel().mark_for_invalidation(
462 pull_request.target_repo.repo_name)
462 pull_request.target_repo.repo_name)
463 self._trigger_pull_request_hook(pull_request, user, 'merge')
463 self._trigger_pull_request_hook(pull_request, user, 'merge')
464
464
465 def has_valid_update_type(self, pull_request):
465 def has_valid_update_type(self, pull_request):
466 source_ref_type = pull_request.source_ref_parts.type
466 source_ref_type = pull_request.source_ref_parts.type
467 return source_ref_type in ['book', 'branch', 'tag']
467 return source_ref_type in ['book', 'branch', 'tag']
468
468
469 def update_commits(self, pull_request):
469 def update_commits(self, pull_request):
470 """
470 """
471 Get the updated list of commits for the pull request
471 Get the updated list of commits for the pull request
472 and return the new pull request version and the list
472 and return the new pull request version and the list
473 of commits processed by this update action
473 of commits processed by this update action
474 """
474 """
475
475
476 pull_request = self.__get_pull_request(pull_request)
476 pull_request = self.__get_pull_request(pull_request)
477 source_ref_type = pull_request.source_ref_parts.type
477 source_ref_type = pull_request.source_ref_parts.type
478 source_ref_name = pull_request.source_ref_parts.name
478 source_ref_name = pull_request.source_ref_parts.name
479 source_ref_id = pull_request.source_ref_parts.commit_id
479 source_ref_id = pull_request.source_ref_parts.commit_id
480
480
481 if not self.has_valid_update_type(pull_request):
481 if not self.has_valid_update_type(pull_request):
482 log.debug(
482 log.debug(
483 "Skipping update of pull request %s due to ref type: %s",
483 "Skipping update of pull request %s due to ref type: %s",
484 pull_request, source_ref_type)
484 pull_request, source_ref_type)
485 return (None, None)
485 return (None, None)
486
486
487 source_repo = pull_request.source_repo.scm_instance()
487 source_repo = pull_request.source_repo.scm_instance()
488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
489 if source_ref_id == source_commit.raw_id:
489 if source_ref_id == source_commit.raw_id:
490 log.debug("Nothing changed in pull request %s", pull_request)
490 log.debug("Nothing changed in pull request %s", pull_request)
491 return (None, None)
491 return (None, None)
492
492
493 # Finally there is a need for an update
493 # Finally there is a need for an update
494 pull_request_version = self._create_version_from_snapshot(pull_request)
494 pull_request_version = self._create_version_from_snapshot(pull_request)
495 self._link_comments_to_version(pull_request_version)
495 self._link_comments_to_version(pull_request_version)
496
496
497 target_ref_type = pull_request.target_ref_parts.type
497 target_ref_type = pull_request.target_ref_parts.type
498 target_ref_name = pull_request.target_ref_parts.name
498 target_ref_name = pull_request.target_ref_parts.name
499 target_ref_id = pull_request.target_ref_parts.commit_id
499 target_ref_id = pull_request.target_ref_parts.commit_id
500 target_repo = pull_request.target_repo.scm_instance()
500 target_repo = pull_request.target_repo.scm_instance()
501
501
502 if target_ref_type in ('tag', 'branch', 'book'):
502 if target_ref_type in ('tag', 'branch', 'book'):
503 target_commit = target_repo.get_commit(target_ref_name)
503 target_commit = target_repo.get_commit(target_ref_name)
504 else:
504 else:
505 target_commit = target_repo.get_commit(target_ref_id)
505 target_commit = target_repo.get_commit(target_ref_id)
506
506
507 # re-compute commit ids
507 # re-compute commit ids
508 old_commit_ids = set(pull_request.revisions)
508 old_commit_ids = set(pull_request.revisions)
509 pre_load = ["author", "branch", "date", "message"]
509 pre_load = ["author", "branch", "date", "message"]
510 commit_ranges = target_repo.compare(
510 commit_ranges = target_repo.compare(
511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
512 pre_load=pre_load)
512 pre_load=pre_load)
513
513
514 ancestor = target_repo.get_common_ancestor(
514 ancestor = target_repo.get_common_ancestor(
515 target_commit.raw_id, source_commit.raw_id, source_repo)
515 target_commit.raw_id, source_commit.raw_id, source_repo)
516
516
517 pull_request.source_ref = '%s:%s:%s' % (
517 pull_request.source_ref = '%s:%s:%s' % (
518 source_ref_type, source_ref_name, source_commit.raw_id)
518 source_ref_type, source_ref_name, source_commit.raw_id)
519 pull_request.target_ref = '%s:%s:%s' % (
519 pull_request.target_ref = '%s:%s:%s' % (
520 target_ref_type, target_ref_name, ancestor)
520 target_ref_type, target_ref_name, ancestor)
521 pull_request.revisions = [
521 pull_request.revisions = [
522 commit.raw_id for commit in reversed(commit_ranges)]
522 commit.raw_id for commit in reversed(commit_ranges)]
523 pull_request.updated_on = datetime.datetime.now()
523 pull_request.updated_on = datetime.datetime.now()
524 Session().add(pull_request)
524 Session().add(pull_request)
525 new_commit_ids = set(pull_request.revisions)
525 new_commit_ids = set(pull_request.revisions)
526
526
527 changes = self._calculate_commit_id_changes(
527 changes = self._calculate_commit_id_changes(
528 old_commit_ids, new_commit_ids)
528 old_commit_ids, new_commit_ids)
529
529
530 old_diff_data, new_diff_data = self._generate_update_diffs(
530 old_diff_data, new_diff_data = self._generate_update_diffs(
531 pull_request, pull_request_version)
531 pull_request, pull_request_version)
532
532
533 ChangesetCommentsModel().outdate_comments(
533 ChangesetCommentsModel().outdate_comments(
534 pull_request, old_diff_data=old_diff_data,
534 pull_request, old_diff_data=old_diff_data,
535 new_diff_data=new_diff_data)
535 new_diff_data=new_diff_data)
536
536
537 file_changes = self._calculate_file_changes(
537 file_changes = self._calculate_file_changes(
538 old_diff_data, new_diff_data)
538 old_diff_data, new_diff_data)
539
539
540 # Add an automatic comment to the pull request
540 # Add an automatic comment to the pull request
541 update_comment = ChangesetCommentsModel().create(
541 update_comment = ChangesetCommentsModel().create(
542 text=self._render_update_message(changes, file_changes),
542 text=self._render_update_message(changes, file_changes),
543 repo=pull_request.target_repo,
543 repo=pull_request.target_repo,
544 user=pull_request.author,
544 user=pull_request.author,
545 pull_request=pull_request,
545 pull_request=pull_request,
546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
547
547
548 # Update status to "Under Review" for added commits
548 # Update status to "Under Review" for added commits
549 for commit_id in changes.added:
549 for commit_id in changes.added:
550 ChangesetStatusModel().set_status(
550 ChangesetStatusModel().set_status(
551 repo=pull_request.source_repo,
551 repo=pull_request.source_repo,
552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
553 comment=update_comment,
553 comment=update_comment,
554 user=pull_request.author,
554 user=pull_request.author,
555 pull_request=pull_request,
555 pull_request=pull_request,
556 revision=commit_id)
556 revision=commit_id)
557
557
558 log.debug(
558 log.debug(
559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
560 'removed_ids: %s', pull_request.pull_request_id,
560 'removed_ids: %s', pull_request.pull_request_id,
561 changes.added, changes.common, changes.removed)
561 changes.added, changes.common, changes.removed)
562 log.debug('Updated pull request with the following file changes: %s',
562 log.debug('Updated pull request with the following file changes: %s',
563 file_changes)
563 file_changes)
564
564
565 log.info(
565 log.info(
566 "Updated pull request %s from commit %s to commit %s, "
566 "Updated pull request %s from commit %s to commit %s, "
567 "stored new version %s of this pull request.",
567 "stored new version %s of this pull request.",
568 pull_request.pull_request_id, source_ref_id,
568 pull_request.pull_request_id, source_ref_id,
569 pull_request.source_ref_parts.commit_id,
569 pull_request.source_ref_parts.commit_id,
570 pull_request_version.pull_request_version_id)
570 pull_request_version.pull_request_version_id)
571 Session().commit()
571 Session().commit()
572 self._trigger_pull_request_hook(pull_request, pull_request.author,
572 self._trigger_pull_request_hook(pull_request, pull_request.author,
573 'update')
573 'update')
574 return (pull_request_version, changes)
574 return (pull_request_version, changes)
575
575
576 def _create_version_from_snapshot(self, pull_request):
576 def _create_version_from_snapshot(self, pull_request):
577 version = PullRequestVersion()
577 version = PullRequestVersion()
578 version.title = pull_request.title
578 version.title = pull_request.title
579 version.description = pull_request.description
579 version.description = pull_request.description
580 version.status = pull_request.status
580 version.status = pull_request.status
581 version.created_on = pull_request.created_on
581 version.created_on = pull_request.created_on
582 version.updated_on = pull_request.updated_on
582 version.updated_on = pull_request.updated_on
583 version.user_id = pull_request.user_id
583 version.user_id = pull_request.user_id
584 version.source_repo = pull_request.source_repo
584 version.source_repo = pull_request.source_repo
585 version.source_ref = pull_request.source_ref
585 version.source_ref = pull_request.source_ref
586 version.target_repo = pull_request.target_repo
586 version.target_repo = pull_request.target_repo
587 version.target_ref = pull_request.target_ref
587 version.target_ref = pull_request.target_ref
588
588
589 version._last_merge_source_rev = pull_request._last_merge_source_rev
589 version._last_merge_source_rev = pull_request._last_merge_source_rev
590 version._last_merge_target_rev = pull_request._last_merge_target_rev
590 version._last_merge_target_rev = pull_request._last_merge_target_rev
591 version._last_merge_status = pull_request._last_merge_status
591 version._last_merge_status = pull_request._last_merge_status
592 version.merge_rev = pull_request.merge_rev
592 version.merge_rev = pull_request.merge_rev
593
593
594 version.revisions = pull_request.revisions
594 version.revisions = pull_request.revisions
595 version.pull_request = pull_request
595 version.pull_request = pull_request
596 Session().add(version)
596 Session().add(version)
597 Session().flush()
597 Session().flush()
598
598
599 return version
599 return version
600
600
601 def _generate_update_diffs(self, pull_request, pull_request_version):
601 def _generate_update_diffs(self, pull_request, pull_request_version):
602 diff_context = (
602 diff_context = (
603 self.DIFF_CONTEXT +
603 self.DIFF_CONTEXT +
604 ChangesetCommentsModel.needed_extra_diff_context())
604 ChangesetCommentsModel.needed_extra_diff_context())
605 old_diff = self._get_diff_from_pr_or_version(
605 old_diff = self._get_diff_from_pr_or_version(
606 pull_request_version, context=diff_context)
606 pull_request_version, context=diff_context)
607 new_diff = self._get_diff_from_pr_or_version(
607 new_diff = self._get_diff_from_pr_or_version(
608 pull_request, context=diff_context)
608 pull_request, context=diff_context)
609
609
610 old_diff_data = diffs.DiffProcessor(old_diff)
610 old_diff_data = diffs.DiffProcessor(old_diff)
611 old_diff_data.prepare()
611 old_diff_data.prepare()
612 new_diff_data = diffs.DiffProcessor(new_diff)
612 new_diff_data = diffs.DiffProcessor(new_diff)
613 new_diff_data.prepare()
613 new_diff_data.prepare()
614
614
615 return old_diff_data, new_diff_data
615 return old_diff_data, new_diff_data
616
616
617 def _link_comments_to_version(self, pull_request_version):
617 def _link_comments_to_version(self, pull_request_version):
618 """
618 """
619 Link all unlinked comments of this pull request to the given version.
619 Link all unlinked comments of this pull request to the given version.
620
620
621 :param pull_request_version: The `PullRequestVersion` to which
621 :param pull_request_version: The `PullRequestVersion` to which
622 the comments shall be linked.
622 the comments shall be linked.
623
623
624 """
624 """
625 pull_request = pull_request_version.pull_request
625 pull_request = pull_request_version.pull_request
626 comments = ChangesetComment.query().filter(
626 comments = ChangesetComment.query().filter(
627 # TODO: johbo: Should we query for the repo at all here?
627 # TODO: johbo: Should we query for the repo at all here?
628 # Pending decision on how comments of PRs are to be related
628 # Pending decision on how comments of PRs are to be related
629 # to either the source repo, the target repo or no repo at all.
629 # to either the source repo, the target repo or no repo at all.
630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
631 ChangesetComment.pull_request == pull_request,
631 ChangesetComment.pull_request == pull_request,
632 ChangesetComment.pull_request_version == None)
632 ChangesetComment.pull_request_version == None)
633
633
634 # TODO: johbo: Find out why this breaks if it is done in a bulk
634 # TODO: johbo: Find out why this breaks if it is done in a bulk
635 # operation.
635 # operation.
636 for comment in comments:
636 for comment in comments:
637 comment.pull_request_version_id = (
637 comment.pull_request_version_id = (
638 pull_request_version.pull_request_version_id)
638 pull_request_version.pull_request_version_id)
639 Session().add(comment)
639 Session().add(comment)
640
640
641 def _calculate_commit_id_changes(self, old_ids, new_ids):
641 def _calculate_commit_id_changes(self, old_ids, new_ids):
642 added = new_ids.difference(old_ids)
642 added = new_ids.difference(old_ids)
643 common = old_ids.intersection(new_ids)
643 common = old_ids.intersection(new_ids)
644 removed = old_ids.difference(new_ids)
644 removed = old_ids.difference(new_ids)
645 return ChangeTuple(added, common, removed)
645 return ChangeTuple(added, common, removed)
646
646
647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
648
648
649 old_files = OrderedDict()
649 old_files = OrderedDict()
650 for diff_data in old_diff_data.parsed_diff:
650 for diff_data in old_diff_data.parsed_diff:
651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
652
652
653 added_files = []
653 added_files = []
654 modified_files = []
654 modified_files = []
655 removed_files = []
655 removed_files = []
656 for diff_data in new_diff_data.parsed_diff:
656 for diff_data in new_diff_data.parsed_diff:
657 new_filename = diff_data['filename']
657 new_filename = diff_data['filename']
658 new_hash = md5_safe(diff_data['raw_diff'])
658 new_hash = md5_safe(diff_data['raw_diff'])
659
659
660 old_hash = old_files.get(new_filename)
660 old_hash = old_files.get(new_filename)
661 if not old_hash:
661 if not old_hash:
662 # file is not present in old diff, means it's added
662 # file is not present in old diff, means it's added
663 added_files.append(new_filename)
663 added_files.append(new_filename)
664 else:
664 else:
665 if new_hash != old_hash:
665 if new_hash != old_hash:
666 modified_files.append(new_filename)
666 modified_files.append(new_filename)
667 # now remove a file from old, since we have seen it already
667 # now remove a file from old, since we have seen it already
668 del old_files[new_filename]
668 del old_files[new_filename]
669
669
670 # removed files is when there are present in old, but not in NEW,
670 # removed files is when there are present in old, but not in NEW,
671 # since we remove old files that are present in new diff, left-overs
671 # since we remove old files that are present in new diff, left-overs
672 # if any should be the removed files
672 # if any should be the removed files
673 removed_files.extend(old_files.keys())
673 removed_files.extend(old_files.keys())
674
674
675 return FileChangeTuple(added_files, modified_files, removed_files)
675 return FileChangeTuple(added_files, modified_files, removed_files)
676
676
677 def _render_update_message(self, changes, file_changes):
677 def _render_update_message(self, changes, file_changes):
678 """
678 """
679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
680 so it's always looking the same disregarding on which default
680 so it's always looking the same disregarding on which default
681 renderer system is using.
681 renderer system is using.
682
682
683 :param changes: changes named tuple
683 :param changes: changes named tuple
684 :param file_changes: file changes named tuple
684 :param file_changes: file changes named tuple
685
685
686 """
686 """
687 new_status = ChangesetStatus.get_status_lbl(
687 new_status = ChangesetStatus.get_status_lbl(
688 ChangesetStatus.STATUS_UNDER_REVIEW)
688 ChangesetStatus.STATUS_UNDER_REVIEW)
689
689
690 changed_files = (
690 changed_files = (
691 file_changes.added + file_changes.modified + file_changes.removed)
691 file_changes.added + file_changes.modified + file_changes.removed)
692
692
693 params = {
693 params = {
694 'under_review_label': new_status,
694 'under_review_label': new_status,
695 'added_commits': changes.added,
695 'added_commits': changes.added,
696 'removed_commits': changes.removed,
696 'removed_commits': changes.removed,
697 'changed_files': changed_files,
697 'changed_files': changed_files,
698 'added_files': file_changes.added,
698 'added_files': file_changes.added,
699 'modified_files': file_changes.modified,
699 'modified_files': file_changes.modified,
700 'removed_files': file_changes.removed,
700 'removed_files': file_changes.removed,
701 }
701 }
702 renderer = RstTemplateRenderer()
702 renderer = RstTemplateRenderer()
703 return renderer.render('pull_request_update.mako', **params)
703 return renderer.render('pull_request_update.mako', **params)
704
704
705 def edit(self, pull_request, title, description):
705 def edit(self, pull_request, title, description):
706 pull_request = self.__get_pull_request(pull_request)
706 pull_request = self.__get_pull_request(pull_request)
707 if pull_request.is_closed():
707 if pull_request.is_closed():
708 raise ValueError('This pull request is closed')
708 raise ValueError('This pull request is closed')
709 if title:
709 if title:
710 pull_request.title = title
710 pull_request.title = title
711 pull_request.description = description
711 pull_request.description = description
712 pull_request.updated_on = datetime.datetime.now()
712 pull_request.updated_on = datetime.datetime.now()
713 Session().add(pull_request)
713 Session().add(pull_request)
714
714
715 def update_reviewers(self, pull_request, reviewers_ids):
715 def update_reviewers(self, pull_request, reviewers_ids):
716 reviewers_ids = set(reviewers_ids)
716 reviewers_ids = set(reviewers_ids)
717 pull_request = self.__get_pull_request(pull_request)
717 pull_request = self.__get_pull_request(pull_request)
718 current_reviewers = PullRequestReviewers.query()\
718 current_reviewers = PullRequestReviewers.query()\
719 .filter(PullRequestReviewers.pull_request ==
719 .filter(PullRequestReviewers.pull_request ==
720 pull_request).all()
720 pull_request).all()
721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
722
722
723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
725
725
726 log.debug("Adding %s reviewers", ids_to_add)
726 log.debug("Adding %s reviewers", ids_to_add)
727 log.debug("Removing %s reviewers", ids_to_remove)
727 log.debug("Removing %s reviewers", ids_to_remove)
728 changed = False
728 changed = False
729 for uid in ids_to_add:
729 for uid in ids_to_add:
730 changed = True
730 changed = True
731 _usr = self._get_user(uid)
731 _usr = self._get_user(uid)
732 reviewer = PullRequestReviewers(_usr, pull_request)
732 reviewer = PullRequestReviewers(_usr, pull_request)
733 Session().add(reviewer)
733 Session().add(reviewer)
734
734
735 self.notify_reviewers(pull_request, ids_to_add)
735 self.notify_reviewers(pull_request, ids_to_add)
736
736
737 for uid in ids_to_remove:
737 for uid in ids_to_remove:
738 changed = True
738 changed = True
739 reviewer = PullRequestReviewers.query()\
739 reviewer = PullRequestReviewers.query()\
740 .filter(PullRequestReviewers.user_id == uid,
740 .filter(PullRequestReviewers.user_id == uid,
741 PullRequestReviewers.pull_request == pull_request)\
741 PullRequestReviewers.pull_request == pull_request)\
742 .scalar()
742 .scalar()
743 if reviewer:
743 if reviewer:
744 Session().delete(reviewer)
744 Session().delete(reviewer)
745 if changed:
745 if changed:
746 pull_request.updated_on = datetime.datetime.now()
746 pull_request.updated_on = datetime.datetime.now()
747 Session().add(pull_request)
747 Session().add(pull_request)
748
748
749 return ids_to_add, ids_to_remove
749 return ids_to_add, ids_to_remove
750
750
751 def get_url(self, pull_request):
751 def get_url(self, pull_request):
752 return h.url('pullrequest_show',
752 return h.url('pullrequest_show',
753 repo_name=pull_request.target_repo.repo_name,
753 repo_name=safe_str(pull_request.target_repo.repo_name),
754 pull_request_id=pull_request.pull_request_id,
754 pull_request_id=pull_request.pull_request_id,
755 qualified=True)
755 qualified=True)
756
756
757 def notify_reviewers(self, pull_request, reviewers_ids):
757 def notify_reviewers(self, pull_request, reviewers_ids):
758 # notification to reviewers
758 # notification to reviewers
759 if not reviewers_ids:
759 if not reviewers_ids:
760 return
760 return
761
761
762 pull_request_obj = pull_request
762 pull_request_obj = pull_request
763 # get the current participants of this pull request
763 # get the current participants of this pull request
764 recipients = reviewers_ids
764 recipients = reviewers_ids
765 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
765 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
766
766
767 pr_source_repo = pull_request_obj.source_repo
767 pr_source_repo = pull_request_obj.source_repo
768 pr_target_repo = pull_request_obj.target_repo
768 pr_target_repo = pull_request_obj.target_repo
769
769
770 pr_url = h.url(
770 pr_url = h.url(
771 'pullrequest_show',
771 'pullrequest_show',
772 repo_name=pr_target_repo.repo_name,
772 repo_name=pr_target_repo.repo_name,
773 pull_request_id=pull_request_obj.pull_request_id,
773 pull_request_id=pull_request_obj.pull_request_id,
774 qualified=True,)
774 qualified=True,)
775
775
776 # set some variables for email notification
776 # set some variables for email notification
777 pr_target_repo_url = h.url(
777 pr_target_repo_url = h.url(
778 'summary_home',
778 'summary_home',
779 repo_name=pr_target_repo.repo_name,
779 repo_name=pr_target_repo.repo_name,
780 qualified=True)
780 qualified=True)
781
781
782 pr_source_repo_url = h.url(
782 pr_source_repo_url = h.url(
783 'summary_home',
783 'summary_home',
784 repo_name=pr_source_repo.repo_name,
784 repo_name=pr_source_repo.repo_name,
785 qualified=True)
785 qualified=True)
786
786
787 # pull request specifics
787 # pull request specifics
788 pull_request_commits = [
788 pull_request_commits = [
789 (x.raw_id, x.message)
789 (x.raw_id, x.message)
790 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
790 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
791
791
792 kwargs = {
792 kwargs = {
793 'user': pull_request.author,
793 'user': pull_request.author,
794 'pull_request': pull_request_obj,
794 'pull_request': pull_request_obj,
795 'pull_request_commits': pull_request_commits,
795 'pull_request_commits': pull_request_commits,
796
796
797 'pull_request_target_repo': pr_target_repo,
797 'pull_request_target_repo': pr_target_repo,
798 'pull_request_target_repo_url': pr_target_repo_url,
798 'pull_request_target_repo_url': pr_target_repo_url,
799
799
800 'pull_request_source_repo': pr_source_repo,
800 'pull_request_source_repo': pr_source_repo,
801 'pull_request_source_repo_url': pr_source_repo_url,
801 'pull_request_source_repo_url': pr_source_repo_url,
802
802
803 'pull_request_url': pr_url,
803 'pull_request_url': pr_url,
804 }
804 }
805
805
806 # pre-generate the subject for notification itself
806 # pre-generate the subject for notification itself
807 (subject,
807 (subject,
808 _h, _e, # we don't care about those
808 _h, _e, # we don't care about those
809 body_plaintext) = EmailNotificationModel().render_email(
809 body_plaintext) = EmailNotificationModel().render_email(
810 notification_type, **kwargs)
810 notification_type, **kwargs)
811
811
812 # create notification objects, and emails
812 # create notification objects, and emails
813 NotificationModel().create(
813 NotificationModel().create(
814 created_by=pull_request.author,
814 created_by=pull_request.author,
815 notification_subject=subject,
815 notification_subject=subject,
816 notification_body=body_plaintext,
816 notification_body=body_plaintext,
817 notification_type=notification_type,
817 notification_type=notification_type,
818 recipients=recipients,
818 recipients=recipients,
819 email_kwargs=kwargs,
819 email_kwargs=kwargs,
820 )
820 )
821
821
822 def delete(self, pull_request):
822 def delete(self, pull_request):
823 pull_request = self.__get_pull_request(pull_request)
823 pull_request = self.__get_pull_request(pull_request)
824 self._cleanup_merge_workspace(pull_request)
824 self._cleanup_merge_workspace(pull_request)
825 Session().delete(pull_request)
825 Session().delete(pull_request)
826
826
827 def close_pull_request(self, pull_request, user):
827 def close_pull_request(self, pull_request, user):
828 pull_request = self.__get_pull_request(pull_request)
828 pull_request = self.__get_pull_request(pull_request)
829 self._cleanup_merge_workspace(pull_request)
829 self._cleanup_merge_workspace(pull_request)
830 pull_request.status = PullRequest.STATUS_CLOSED
830 pull_request.status = PullRequest.STATUS_CLOSED
831 pull_request.updated_on = datetime.datetime.now()
831 pull_request.updated_on = datetime.datetime.now()
832 Session().add(pull_request)
832 Session().add(pull_request)
833 self._trigger_pull_request_hook(
833 self._trigger_pull_request_hook(
834 pull_request, pull_request.author, 'close')
834 pull_request, pull_request.author, 'close')
835 self._log_action('user_closed_pull_request', user, pull_request)
835 self._log_action('user_closed_pull_request', user, pull_request)
836
836
837 def close_pull_request_with_comment(self, pull_request, user, repo,
837 def close_pull_request_with_comment(self, pull_request, user, repo,
838 message=None):
838 message=None):
839 status = ChangesetStatus.STATUS_REJECTED
839 status = ChangesetStatus.STATUS_REJECTED
840
840
841 if not message:
841 if not message:
842 message = (
842 message = (
843 _('Status change %(transition_icon)s %(status)s') % {
843 _('Status change %(transition_icon)s %(status)s') % {
844 'transition_icon': '>',
844 'transition_icon': '>',
845 'status': ChangesetStatus.get_status_lbl(status)})
845 'status': ChangesetStatus.get_status_lbl(status)})
846
846
847 internal_message = _('Closing with') + ' ' + message
847 internal_message = _('Closing with') + ' ' + message
848
848
849 comm = ChangesetCommentsModel().create(
849 comm = ChangesetCommentsModel().create(
850 text=internal_message,
850 text=internal_message,
851 repo=repo.repo_id,
851 repo=repo.repo_id,
852 user=user.user_id,
852 user=user.user_id,
853 pull_request=pull_request.pull_request_id,
853 pull_request=pull_request.pull_request_id,
854 f_path=None,
854 f_path=None,
855 line_no=None,
855 line_no=None,
856 status_change=ChangesetStatus.get_status_lbl(status),
856 status_change=ChangesetStatus.get_status_lbl(status),
857 closing_pr=True
857 closing_pr=True
858 )
858 )
859
859
860 ChangesetStatusModel().set_status(
860 ChangesetStatusModel().set_status(
861 repo.repo_id,
861 repo.repo_id,
862 status,
862 status,
863 user.user_id,
863 user.user_id,
864 comm,
864 comm,
865 pull_request=pull_request.pull_request_id
865 pull_request=pull_request.pull_request_id
866 )
866 )
867 Session().flush()
867 Session().flush()
868
868
869 PullRequestModel().close_pull_request(
869 PullRequestModel().close_pull_request(
870 pull_request.pull_request_id, user)
870 pull_request.pull_request_id, user)
871
871
872 def merge_status(self, pull_request):
872 def merge_status(self, pull_request):
873 if not self._is_merge_enabled(pull_request):
873 if not self._is_merge_enabled(pull_request):
874 return False, _('Server-side pull request merging is disabled.')
874 return False, _('Server-side pull request merging is disabled.')
875 if pull_request.is_closed():
875 if pull_request.is_closed():
876 return False, _('This pull request is closed.')
876 return False, _('This pull request is closed.')
877 merge_possible, msg = self._check_repo_requirements(
877 merge_possible, msg = self._check_repo_requirements(
878 target=pull_request.target_repo, source=pull_request.source_repo)
878 target=pull_request.target_repo, source=pull_request.source_repo)
879 if not merge_possible:
879 if not merge_possible:
880 return merge_possible, msg
880 return merge_possible, msg
881
881
882 try:
882 try:
883 resp = self._try_merge(pull_request)
883 resp = self._try_merge(pull_request)
884 status = resp.possible, self.merge_status_message(
884 status = resp.possible, self.merge_status_message(
885 resp.failure_reason)
885 resp.failure_reason)
886 except NotImplementedError:
886 except NotImplementedError:
887 status = False, _('Pull request merging is not supported.')
887 status = False, _('Pull request merging is not supported.')
888
888
889 return status
889 return status
890
890
891 def _check_repo_requirements(self, target, source):
891 def _check_repo_requirements(self, target, source):
892 """
892 """
893 Check if `target` and `source` have compatible requirements.
893 Check if `target` and `source` have compatible requirements.
894
894
895 Currently this is just checking for largefiles.
895 Currently this is just checking for largefiles.
896 """
896 """
897 target_has_largefiles = self._has_largefiles(target)
897 target_has_largefiles = self._has_largefiles(target)
898 source_has_largefiles = self._has_largefiles(source)
898 source_has_largefiles = self._has_largefiles(source)
899 merge_possible = True
899 merge_possible = True
900 message = u''
900 message = u''
901
901
902 if target_has_largefiles != source_has_largefiles:
902 if target_has_largefiles != source_has_largefiles:
903 merge_possible = False
903 merge_possible = False
904 if source_has_largefiles:
904 if source_has_largefiles:
905 message = _(
905 message = _(
906 'Target repository large files support is disabled.')
906 'Target repository large files support is disabled.')
907 else:
907 else:
908 message = _(
908 message = _(
909 'Source repository large files support is disabled.')
909 'Source repository large files support is disabled.')
910
910
911 return merge_possible, message
911 return merge_possible, message
912
912
913 def _has_largefiles(self, repo):
913 def _has_largefiles(self, repo):
914 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
914 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
915 'extensions', 'largefiles')
915 'extensions', 'largefiles')
916 return largefiles_ui and largefiles_ui[0].active
916 return largefiles_ui and largefiles_ui[0].active
917
917
918 def _try_merge(self, pull_request):
918 def _try_merge(self, pull_request):
919 """
919 """
920 Try to merge the pull request and return the merge status.
920 Try to merge the pull request and return the merge status.
921 """
921 """
922 log.debug(
922 log.debug(
923 "Trying out if the pull request %s can be merged.",
923 "Trying out if the pull request %s can be merged.",
924 pull_request.pull_request_id)
924 pull_request.pull_request_id)
925 target_vcs = pull_request.target_repo.scm_instance()
925 target_vcs = pull_request.target_repo.scm_instance()
926 target_ref = self._refresh_reference(
926 target_ref = self._refresh_reference(
927 pull_request.target_ref_parts, target_vcs)
927 pull_request.target_ref_parts, target_vcs)
928
928
929 target_locked = pull_request.target_repo.locked
929 target_locked = pull_request.target_repo.locked
930 if target_locked and target_locked[0]:
930 if target_locked and target_locked[0]:
931 log.debug("The target repository is locked.")
931 log.debug("The target repository is locked.")
932 merge_state = MergeResponse(
932 merge_state = MergeResponse(
933 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
933 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
934 elif self._needs_merge_state_refresh(pull_request, target_ref):
934 elif self._needs_merge_state_refresh(pull_request, target_ref):
935 log.debug("Refreshing the merge status of the repository.")
935 log.debug("Refreshing the merge status of the repository.")
936 merge_state = self._refresh_merge_state(
936 merge_state = self._refresh_merge_state(
937 pull_request, target_vcs, target_ref)
937 pull_request, target_vcs, target_ref)
938 else:
938 else:
939 possible = pull_request.\
939 possible = pull_request.\
940 _last_merge_status == MergeFailureReason.NONE
940 _last_merge_status == MergeFailureReason.NONE
941 merge_state = MergeResponse(
941 merge_state = MergeResponse(
942 possible, False, None, pull_request._last_merge_status)
942 possible, False, None, pull_request._last_merge_status)
943 log.debug("Merge response: %s", merge_state)
943 log.debug("Merge response: %s", merge_state)
944 return merge_state
944 return merge_state
945
945
946 def _refresh_reference(self, reference, vcs_repository):
946 def _refresh_reference(self, reference, vcs_repository):
947 if reference.type in ('branch', 'book'):
947 if reference.type in ('branch', 'book'):
948 name_or_id = reference.name
948 name_or_id = reference.name
949 else:
949 else:
950 name_or_id = reference.commit_id
950 name_or_id = reference.commit_id
951 refreshed_commit = vcs_repository.get_commit(name_or_id)
951 refreshed_commit = vcs_repository.get_commit(name_or_id)
952 refreshed_reference = Reference(
952 refreshed_reference = Reference(
953 reference.type, reference.name, refreshed_commit.raw_id)
953 reference.type, reference.name, refreshed_commit.raw_id)
954 return refreshed_reference
954 return refreshed_reference
955
955
956 def _needs_merge_state_refresh(self, pull_request, target_reference):
956 def _needs_merge_state_refresh(self, pull_request, target_reference):
957 return not(
957 return not(
958 pull_request.revisions and
958 pull_request.revisions and
959 pull_request.revisions[0] == pull_request._last_merge_source_rev and
959 pull_request.revisions[0] == pull_request._last_merge_source_rev and
960 target_reference.commit_id == pull_request._last_merge_target_rev)
960 target_reference.commit_id == pull_request._last_merge_target_rev)
961
961
962 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
962 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
963 workspace_id = self._workspace_id(pull_request)
963 workspace_id = self._workspace_id(pull_request)
964 source_vcs = pull_request.source_repo.scm_instance()
964 source_vcs = pull_request.source_repo.scm_instance()
965 use_rebase = self._use_rebase_for_merging(pull_request)
965 use_rebase = self._use_rebase_for_merging(pull_request)
966 merge_state = target_vcs.merge(
966 merge_state = target_vcs.merge(
967 target_reference, source_vcs, pull_request.source_ref_parts,
967 target_reference, source_vcs, pull_request.source_ref_parts,
968 workspace_id, dry_run=True, use_rebase=use_rebase)
968 workspace_id, dry_run=True, use_rebase=use_rebase)
969
969
970 # Do not store the response if there was an unknown error.
970 # Do not store the response if there was an unknown error.
971 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
971 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
972 pull_request._last_merge_source_rev = pull_request.\
972 pull_request._last_merge_source_rev = pull_request.\
973 source_ref_parts.commit_id
973 source_ref_parts.commit_id
974 pull_request._last_merge_target_rev = target_reference.commit_id
974 pull_request._last_merge_target_rev = target_reference.commit_id
975 pull_request._last_merge_status = (
975 pull_request._last_merge_status = (
976 merge_state.failure_reason)
976 merge_state.failure_reason)
977 Session().add(pull_request)
977 Session().add(pull_request)
978 Session().flush()
978 Session().flush()
979
979
980 return merge_state
980 return merge_state
981
981
982 def _workspace_id(self, pull_request):
982 def _workspace_id(self, pull_request):
983 workspace_id = 'pr-%s' % pull_request.pull_request_id
983 workspace_id = 'pr-%s' % pull_request.pull_request_id
984 return workspace_id
984 return workspace_id
985
985
986 def merge_status_message(self, status_code):
986 def merge_status_message(self, status_code):
987 """
987 """
988 Return a human friendly error message for the given merge status code.
988 Return a human friendly error message for the given merge status code.
989 """
989 """
990 return self.MERGE_STATUS_MESSAGES[status_code]
990 return self.MERGE_STATUS_MESSAGES[status_code]
991
991
992 def generate_repo_data(self, repo, commit_id=None, branch=None,
992 def generate_repo_data(self, repo, commit_id=None, branch=None,
993 bookmark=None):
993 bookmark=None):
994 all_refs, selected_ref = \
994 all_refs, selected_ref = \
995 self._get_repo_pullrequest_sources(
995 self._get_repo_pullrequest_sources(
996 repo.scm_instance(), commit_id=commit_id,
996 repo.scm_instance(), commit_id=commit_id,
997 branch=branch, bookmark=bookmark)
997 branch=branch, bookmark=bookmark)
998
998
999 refs_select2 = []
999 refs_select2 = []
1000 for element in all_refs:
1000 for element in all_refs:
1001 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1001 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1002 refs_select2.append({'text': element[1], 'children': children})
1002 refs_select2.append({'text': element[1], 'children': children})
1003
1003
1004 return {
1004 return {
1005 'user': {
1005 'user': {
1006 'user_id': repo.user.user_id,
1006 'user_id': repo.user.user_id,
1007 'username': repo.user.username,
1007 'username': repo.user.username,
1008 'firstname': repo.user.firstname,
1008 'firstname': repo.user.firstname,
1009 'lastname': repo.user.lastname,
1009 'lastname': repo.user.lastname,
1010 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1010 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1011 },
1011 },
1012 'description': h.chop_at_smart(repo.description, '\n'),
1012 'description': h.chop_at_smart(repo.description, '\n'),
1013 'refs': {
1013 'refs': {
1014 'all_refs': all_refs,
1014 'all_refs': all_refs,
1015 'selected_ref': selected_ref,
1015 'selected_ref': selected_ref,
1016 'select2_refs': refs_select2
1016 'select2_refs': refs_select2
1017 }
1017 }
1018 }
1018 }
1019
1019
1020 def generate_pullrequest_title(self, source, source_ref, target):
1020 def generate_pullrequest_title(self, source, source_ref, target):
1021 return '{source}#{at_ref} to {target}'.format(
1021 return '{source}#{at_ref} to {target}'.format(
1022 source=source,
1022 source=source,
1023 at_ref=source_ref,
1023 at_ref=source_ref,
1024 target=target,
1024 target=target,
1025 )
1025 )
1026
1026
1027 def _cleanup_merge_workspace(self, pull_request):
1027 def _cleanup_merge_workspace(self, pull_request):
1028 # Merging related cleanup
1028 # Merging related cleanup
1029 target_scm = pull_request.target_repo.scm_instance()
1029 target_scm = pull_request.target_repo.scm_instance()
1030 workspace_id = 'pr-%s' % pull_request.pull_request_id
1030 workspace_id = 'pr-%s' % pull_request.pull_request_id
1031
1031
1032 try:
1032 try:
1033 target_scm.cleanup_merge_workspace(workspace_id)
1033 target_scm.cleanup_merge_workspace(workspace_id)
1034 except NotImplementedError:
1034 except NotImplementedError:
1035 pass
1035 pass
1036
1036
1037 def _get_repo_pullrequest_sources(
1037 def _get_repo_pullrequest_sources(
1038 self, repo, commit_id=None, branch=None, bookmark=None):
1038 self, repo, commit_id=None, branch=None, bookmark=None):
1039 """
1039 """
1040 Return a structure with repo's interesting commits, suitable for
1040 Return a structure with repo's interesting commits, suitable for
1041 the selectors in pullrequest controller
1041 the selectors in pullrequest controller
1042
1042
1043 :param commit_id: a commit that must be in the list somehow
1043 :param commit_id: a commit that must be in the list somehow
1044 and selected by default
1044 and selected by default
1045 :param branch: a branch that must be in the list and selected
1045 :param branch: a branch that must be in the list and selected
1046 by default - even if closed
1046 by default - even if closed
1047 :param bookmark: a bookmark that must be in the list and selected
1047 :param bookmark: a bookmark that must be in the list and selected
1048 """
1048 """
1049
1049
1050 commit_id = safe_str(commit_id) if commit_id else None
1050 commit_id = safe_str(commit_id) if commit_id else None
1051 branch = safe_str(branch) if branch else None
1051 branch = safe_str(branch) if branch else None
1052 bookmark = safe_str(bookmark) if bookmark else None
1052 bookmark = safe_str(bookmark) if bookmark else None
1053
1053
1054 selected = None
1054 selected = None
1055
1055
1056 # order matters: first source that has commit_id in it will be selected
1056 # order matters: first source that has commit_id in it will be selected
1057 sources = []
1057 sources = []
1058 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1058 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1059 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1059 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1060
1060
1061 if commit_id:
1061 if commit_id:
1062 ref_commit = (h.short_id(commit_id), commit_id)
1062 ref_commit = (h.short_id(commit_id), commit_id)
1063 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1063 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1064
1064
1065 sources.append(
1065 sources.append(
1066 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1066 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1067 )
1067 )
1068
1068
1069 groups = []
1069 groups = []
1070 for group_key, ref_list, group_name, match in sources:
1070 for group_key, ref_list, group_name, match in sources:
1071 group_refs = []
1071 group_refs = []
1072 for ref_name, ref_id in ref_list:
1072 for ref_name, ref_id in ref_list:
1073 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1073 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1074 group_refs.append((ref_key, ref_name))
1074 group_refs.append((ref_key, ref_name))
1075
1075
1076 if not selected:
1076 if not selected:
1077 if set([commit_id, match]) & set([ref_id, ref_name]):
1077 if set([commit_id, match]) & set([ref_id, ref_name]):
1078 selected = ref_key
1078 selected = ref_key
1079
1079
1080 if group_refs:
1080 if group_refs:
1081 groups.append((group_refs, group_name))
1081 groups.append((group_refs, group_name))
1082
1082
1083 if not selected:
1083 if not selected:
1084 ref = commit_id or branch or bookmark
1084 ref = commit_id or branch or bookmark
1085 if ref:
1085 if ref:
1086 raise CommitDoesNotExistError(
1086 raise CommitDoesNotExistError(
1087 'No commit refs could be found matching: %s' % ref)
1087 'No commit refs could be found matching: %s' % ref)
1088 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1088 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1089 selected = 'branch:%s:%s' % (
1089 selected = 'branch:%s:%s' % (
1090 repo.DEFAULT_BRANCH_NAME,
1090 repo.DEFAULT_BRANCH_NAME,
1091 repo.branches[repo.DEFAULT_BRANCH_NAME]
1091 repo.branches[repo.DEFAULT_BRANCH_NAME]
1092 )
1092 )
1093 elif repo.commit_ids:
1093 elif repo.commit_ids:
1094 rev = repo.commit_ids[0]
1094 rev = repo.commit_ids[0]
1095 selected = 'rev:%s:%s' % (rev, rev)
1095 selected = 'rev:%s:%s' % (rev, rev)
1096 else:
1096 else:
1097 raise EmptyRepositoryError()
1097 raise EmptyRepositoryError()
1098 return groups, selected
1098 return groups, selected
1099
1099
1100 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1100 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1101 pull_request = self.__get_pull_request(pull_request)
1101 pull_request = self.__get_pull_request(pull_request)
1102 return self._get_diff_from_pr_or_version(pull_request, context=context)
1102 return self._get_diff_from_pr_or_version(pull_request, context=context)
1103
1103
1104 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1104 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1105 source_repo = pr_or_version.source_repo
1105 source_repo = pr_or_version.source_repo
1106
1106
1107 # we swap org/other ref since we run a simple diff on one repo
1107 # we swap org/other ref since we run a simple diff on one repo
1108 target_ref_id = pr_or_version.target_ref_parts.commit_id
1108 target_ref_id = pr_or_version.target_ref_parts.commit_id
1109 source_ref_id = pr_or_version.source_ref_parts.commit_id
1109 source_ref_id = pr_or_version.source_ref_parts.commit_id
1110 target_commit = source_repo.get_commit(
1110 target_commit = source_repo.get_commit(
1111 commit_id=safe_str(target_ref_id))
1111 commit_id=safe_str(target_ref_id))
1112 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1112 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1113 vcs_repo = source_repo.scm_instance()
1113 vcs_repo = source_repo.scm_instance()
1114
1114
1115 # TODO: johbo: In the context of an update, we cannot reach
1115 # TODO: johbo: In the context of an update, we cannot reach
1116 # the old commit anymore with our normal mechanisms. It needs
1116 # the old commit anymore with our normal mechanisms. It needs
1117 # some sort of special support in the vcs layer to avoid this
1117 # some sort of special support in the vcs layer to avoid this
1118 # workaround.
1118 # workaround.
1119 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1119 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1120 vcs_repo.alias == 'git'):
1120 vcs_repo.alias == 'git'):
1121 source_commit.raw_id = safe_str(source_ref_id)
1121 source_commit.raw_id = safe_str(source_ref_id)
1122
1122
1123 log.debug('calculating diff between '
1123 log.debug('calculating diff between '
1124 'source_ref:%s and target_ref:%s for repo `%s`',
1124 'source_ref:%s and target_ref:%s for repo `%s`',
1125 target_ref_id, source_ref_id,
1125 target_ref_id, source_ref_id,
1126 safe_unicode(vcs_repo.path))
1126 safe_unicode(vcs_repo.path))
1127
1127
1128 vcs_diff = vcs_repo.get_diff(
1128 vcs_diff = vcs_repo.get_diff(
1129 commit1=target_commit, commit2=source_commit, context=context)
1129 commit1=target_commit, commit2=source_commit, context=context)
1130 return vcs_diff
1130 return vcs_diff
1131
1131
1132 def _is_merge_enabled(self, pull_request):
1132 def _is_merge_enabled(self, pull_request):
1133 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1133 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1134 settings = settings_model.get_general_settings()
1134 settings = settings_model.get_general_settings()
1135 return settings.get('rhodecode_pr_merge_enabled', False)
1135 return settings.get('rhodecode_pr_merge_enabled', False)
1136
1136
1137 def _use_rebase_for_merging(self, pull_request):
1137 def _use_rebase_for_merging(self, pull_request):
1138 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1138 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1139 settings = settings_model.get_general_settings()
1139 settings = settings_model.get_general_settings()
1140 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1140 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1141
1141
1142 def _log_action(self, action, user, pull_request):
1142 def _log_action(self, action, user, pull_request):
1143 action_logger(
1143 action_logger(
1144 user,
1144 user,
1145 '{action}:{pr_id}'.format(
1145 '{action}:{pr_id}'.format(
1146 action=action, pr_id=pull_request.pull_request_id),
1146 action=action, pr_id=pull_request.pull_request_id),
1147 pull_request.target_repo)
1147 pull_request.target_repo)
1148
1148
1149
1149
1150 ChangeTuple = namedtuple('ChangeTuple',
1150 ChangeTuple = namedtuple('ChangeTuple',
1151 ['added', 'common', 'removed'])
1151 ['added', 'common', 'removed'])
1152
1152
1153 FileChangeTuple = namedtuple('FileChangeTuple',
1153 FileChangeTuple = namedtuple('FileChangeTuple',
1154 ['added', 'modified', 'removed'])
1154 ['added', 'modified', 'removed'])
@@ -1,934 +1,935 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Repository model for rhodecode
22 Repository model for rhodecode
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import shutil
28 import shutil
29 import time
29 import time
30 import traceback
30 import traceback
31 from datetime import datetime
31 from datetime import datetime
32
32
33 from sqlalchemy.sql import func
33 from sqlalchemy.sql import func
34 from sqlalchemy.sql.expression import true, or_
34 from sqlalchemy.sql.expression import true, or_
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from rhodecode import events
37 from rhodecode import events
38 from rhodecode.lib import helpers as h
38 from rhodecode.lib import helpers as h
39 from rhodecode.lib.auth import HasUserGroupPermissionAny
39 from rhodecode.lib.auth import HasUserGroupPermissionAny
40 from rhodecode.lib.caching_query import FromCache
40 from rhodecode.lib.caching_query import FromCache
41 from rhodecode.lib.exceptions import AttachedForksError
41 from rhodecode.lib.exceptions import AttachedForksError
42 from rhodecode.lib.hooks_base import log_delete_repository
42 from rhodecode.lib.hooks_base import log_delete_repository
43 from rhodecode.lib.utils import make_db_config
43 from rhodecode.lib.utils import make_db_config
44 from rhodecode.lib.utils2 import (
44 from rhodecode.lib.utils2 import (
45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
47 from rhodecode.lib.vcs.backends import get_backend
47 from rhodecode.lib.vcs.backends import get_backend
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
52 RepoGroup, RepositoryField)
52 RepoGroup, RepositoryField)
53 from rhodecode.model.scm import UserGroupList
53 from rhodecode.model.scm import UserGroupList
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55
55
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class RepoModel(BaseModel):
60 class RepoModel(BaseModel):
61
61
62 cls = Repository
62 cls = Repository
63
63
64 def _get_user_group(self, users_group):
64 def _get_user_group(self, users_group):
65 return self._get_instance(UserGroup, users_group,
65 return self._get_instance(UserGroup, users_group,
66 callback=UserGroup.get_by_group_name)
66 callback=UserGroup.get_by_group_name)
67
67
68 def _get_repo_group(self, repo_group):
68 def _get_repo_group(self, repo_group):
69 return self._get_instance(RepoGroup, repo_group,
69 return self._get_instance(RepoGroup, repo_group,
70 callback=RepoGroup.get_by_group_name)
70 callback=RepoGroup.get_by_group_name)
71
71
72 def _create_default_perms(self, repository, private):
72 def _create_default_perms(self, repository, private):
73 # create default permission
73 # create default permission
74 default = 'repository.read'
74 default = 'repository.read'
75 def_user = User.get_default_user()
75 def_user = User.get_default_user()
76 for p in def_user.user_perms:
76 for p in def_user.user_perms:
77 if p.permission.permission_name.startswith('repository.'):
77 if p.permission.permission_name.startswith('repository.'):
78 default = p.permission.permission_name
78 default = p.permission.permission_name
79 break
79 break
80
80
81 default_perm = 'repository.none' if private else default
81 default_perm = 'repository.none' if private else default
82
82
83 repo_to_perm = UserRepoToPerm()
83 repo_to_perm = UserRepoToPerm()
84 repo_to_perm.permission = Permission.get_by_key(default_perm)
84 repo_to_perm.permission = Permission.get_by_key(default_perm)
85
85
86 repo_to_perm.repository = repository
86 repo_to_perm.repository = repository
87 repo_to_perm.user_id = def_user.user_id
87 repo_to_perm.user_id = def_user.user_id
88
88
89 return repo_to_perm
89 return repo_to_perm
90
90
91 @LazyProperty
91 @LazyProperty
92 def repos_path(self):
92 def repos_path(self):
93 """
93 """
94 Gets the repositories root path from database
94 Gets the repositories root path from database
95 """
95 """
96 settings_model = VcsSettingsModel(sa=self.sa)
96 settings_model = VcsSettingsModel(sa=self.sa)
97 return settings_model.get_repos_location()
97 return settings_model.get_repos_location()
98
98
99 def get(self, repo_id, cache=False):
99 def get(self, repo_id, cache=False):
100 repo = self.sa.query(Repository) \
100 repo = self.sa.query(Repository) \
101 .filter(Repository.repo_id == repo_id)
101 .filter(Repository.repo_id == repo_id)
102
102
103 if cache:
103 if cache:
104 repo = repo.options(FromCache("sql_cache_short",
104 repo = repo.options(FromCache("sql_cache_short",
105 "get_repo_%s" % repo_id))
105 "get_repo_%s" % repo_id))
106 return repo.scalar()
106 return repo.scalar()
107
107
108 def get_repo(self, repository):
108 def get_repo(self, repository):
109 return self._get_repo(repository)
109 return self._get_repo(repository)
110
110
111 def get_by_repo_name(self, repo_name, cache=False):
111 def get_by_repo_name(self, repo_name, cache=False):
112 repo = self.sa.query(Repository) \
112 repo = self.sa.query(Repository) \
113 .filter(Repository.repo_name == repo_name)
113 .filter(Repository.repo_name == repo_name)
114
114
115 if cache:
115 if cache:
116 repo = repo.options(FromCache("sql_cache_short",
116 repo = repo.options(FromCache("sql_cache_short",
117 "get_repo_%s" % repo_name))
117 "get_repo_%s" % repo_name))
118 return repo.scalar()
118 return repo.scalar()
119
119
120 def _extract_id_from_repo_name(self, repo_name):
120 def _extract_id_from_repo_name(self, repo_name):
121 if repo_name.startswith('/'):
121 if repo_name.startswith('/'):
122 repo_name = repo_name.lstrip('/')
122 repo_name = repo_name.lstrip('/')
123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 if by_id_match:
124 if by_id_match:
125 return by_id_match.groups()[0]
125 return by_id_match.groups()[0]
126
126
127 def get_repo_by_id(self, repo_name):
127 def get_repo_by_id(self, repo_name):
128 """
128 """
129 Extracts repo_name by id from special urls.
129 Extracts repo_name by id from special urls.
130 Example url is _11/repo_name
130 Example url is _11/repo_name
131
131
132 :param repo_name:
132 :param repo_name:
133 :return: repo object if matched else None
133 :return: repo object if matched else None
134 """
134 """
135 try:
135 try:
136 _repo_id = self._extract_id_from_repo_name(repo_name)
136 _repo_id = self._extract_id_from_repo_name(repo_name)
137 if _repo_id:
137 if _repo_id:
138 return self.get(_repo_id)
138 return self.get(_repo_id)
139 except Exception:
139 except Exception:
140 log.exception('Failed to extract repo_name from URL')
140 log.exception('Failed to extract repo_name from URL')
141
141
142 return None
142 return None
143
143
144 def get_url(self, repo):
144 def get_url(self, repo):
145 return h.url('summary_home', repo_name=repo.repo_name, qualified=True)
145 return h.url('summary_home', repo_name=safe_str(repo.repo_name),
146 qualified=True)
146
147
147 def get_users(self, name_contains=None, limit=20, only_active=True):
148 def get_users(self, name_contains=None, limit=20, only_active=True):
148 # TODO: mikhail: move this method to the UserModel.
149 # TODO: mikhail: move this method to the UserModel.
149 query = self.sa.query(User)
150 query = self.sa.query(User)
150 if only_active:
151 if only_active:
151 query = query.filter(User.active == true())
152 query = query.filter(User.active == true())
152
153
153 if name_contains:
154 if name_contains:
154 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
155 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
155 query = query.filter(
156 query = query.filter(
156 or_(
157 or_(
157 User.name.ilike(ilike_expression),
158 User.name.ilike(ilike_expression),
158 User.lastname.ilike(ilike_expression),
159 User.lastname.ilike(ilike_expression),
159 User.username.ilike(ilike_expression)
160 User.username.ilike(ilike_expression)
160 )
161 )
161 )
162 )
162 query = query.limit(limit)
163 query = query.limit(limit)
163 users = query.all()
164 users = query.all()
164
165
165 _users = [
166 _users = [
166 {
167 {
167 'id': user.user_id,
168 'id': user.user_id,
168 'first_name': user.name,
169 'first_name': user.name,
169 'last_name': user.lastname,
170 'last_name': user.lastname,
170 'username': user.username,
171 'username': user.username,
171 'icon_link': h.gravatar_url(user.email, 14),
172 'icon_link': h.gravatar_url(user.email, 14),
172 'value_display': h.person(user.email),
173 'value_display': h.person(user.email),
173 'value': user.username,
174 'value': user.username,
174 'value_type': 'user',
175 'value_type': 'user',
175 'active': user.active,
176 'active': user.active,
176 }
177 }
177 for user in users
178 for user in users
178 ]
179 ]
179 return _users
180 return _users
180
181
181 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
182 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
182 # TODO: mikhail: move this method to the UserGroupModel.
183 # TODO: mikhail: move this method to the UserGroupModel.
183 query = self.sa.query(UserGroup)
184 query = self.sa.query(UserGroup)
184 if only_active:
185 if only_active:
185 query = query.filter(UserGroup.users_group_active == true())
186 query = query.filter(UserGroup.users_group_active == true())
186
187
187 if name_contains:
188 if name_contains:
188 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
189 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
189 query = query.filter(
190 query = query.filter(
190 UserGroup.users_group_name.ilike(ilike_expression))\
191 UserGroup.users_group_name.ilike(ilike_expression))\
191 .order_by(func.length(UserGroup.users_group_name))\
192 .order_by(func.length(UserGroup.users_group_name))\
192 .order_by(UserGroup.users_group_name)
193 .order_by(UserGroup.users_group_name)
193
194
194 query = query.limit(limit)
195 query = query.limit(limit)
195 user_groups = query.all()
196 user_groups = query.all()
196 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
197 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
197 user_groups = UserGroupList(user_groups, perm_set=perm_set)
198 user_groups = UserGroupList(user_groups, perm_set=perm_set)
198
199
199 _groups = [
200 _groups = [
200 {
201 {
201 'id': group.users_group_id,
202 'id': group.users_group_id,
202 # TODO: marcink figure out a way to generate the url for the
203 # TODO: marcink figure out a way to generate the url for the
203 # icon
204 # icon
204 'icon_link': '',
205 'icon_link': '',
205 'value_display': 'Group: %s (%d members)' % (
206 'value_display': 'Group: %s (%d members)' % (
206 group.users_group_name, len(group.members),),
207 group.users_group_name, len(group.members),),
207 'value': group.users_group_name,
208 'value': group.users_group_name,
208 'value_type': 'user_group',
209 'value_type': 'user_group',
209 'active': group.users_group_active,
210 'active': group.users_group_active,
210 }
211 }
211 for group in user_groups
212 for group in user_groups
212 ]
213 ]
213 return _groups
214 return _groups
214
215
215 @classmethod
216 @classmethod
216 def update_repoinfo(cls, repositories=None):
217 def update_repoinfo(cls, repositories=None):
217 if not repositories:
218 if not repositories:
218 repositories = Repository.getAll()
219 repositories = Repository.getAll()
219 for repo in repositories:
220 for repo in repositories:
220 repo.update_commit_cache()
221 repo.update_commit_cache()
221
222
222 def get_repos_as_dict(self, repo_list=None, admin=False,
223 def get_repos_as_dict(self, repo_list=None, admin=False,
223 super_user_actions=False):
224 super_user_actions=False):
224
225
225 from rhodecode.lib.utils import PartialRenderer
226 from rhodecode.lib.utils import PartialRenderer
226 _render = PartialRenderer('data_table/_dt_elements.html')
227 _render = PartialRenderer('data_table/_dt_elements.html')
227 c = _render.c
228 c = _render.c
228
229
229 def quick_menu(repo_name):
230 def quick_menu(repo_name):
230 return _render('quick_menu', repo_name)
231 return _render('quick_menu', repo_name)
231
232
232 def repo_lnk(name, rtype, rstate, private, fork_of):
233 def repo_lnk(name, rtype, rstate, private, fork_of):
233 return _render('repo_name', name, rtype, rstate, private, fork_of,
234 return _render('repo_name', name, rtype, rstate, private, fork_of,
234 short_name=not admin, admin=False)
235 short_name=not admin, admin=False)
235
236
236 def last_change(last_change):
237 def last_change(last_change):
237 return _render("last_change", last_change)
238 return _render("last_change", last_change)
238
239
239 def rss_lnk(repo_name):
240 def rss_lnk(repo_name):
240 return _render("rss", repo_name)
241 return _render("rss", repo_name)
241
242
242 def atom_lnk(repo_name):
243 def atom_lnk(repo_name):
243 return _render("atom", repo_name)
244 return _render("atom", repo_name)
244
245
245 def last_rev(repo_name, cs_cache):
246 def last_rev(repo_name, cs_cache):
246 return _render('revision', repo_name, cs_cache.get('revision'),
247 return _render('revision', repo_name, cs_cache.get('revision'),
247 cs_cache.get('raw_id'), cs_cache.get('author'),
248 cs_cache.get('raw_id'), cs_cache.get('author'),
248 cs_cache.get('message'))
249 cs_cache.get('message'))
249
250
250 def desc(desc):
251 def desc(desc):
251 if c.visual.stylify_metatags:
252 if c.visual.stylify_metatags:
252 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
253 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
253 else:
254 else:
254 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
255 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
255
256
256 def state(repo_state):
257 def state(repo_state):
257 return _render("repo_state", repo_state)
258 return _render("repo_state", repo_state)
258
259
259 def repo_actions(repo_name):
260 def repo_actions(repo_name):
260 return _render('repo_actions', repo_name, super_user_actions)
261 return _render('repo_actions', repo_name, super_user_actions)
261
262
262 def user_profile(username):
263 def user_profile(username):
263 return _render('user_profile', username)
264 return _render('user_profile', username)
264
265
265 repos_data = []
266 repos_data = []
266 for repo in repo_list:
267 for repo in repo_list:
267 cs_cache = repo.changeset_cache
268 cs_cache = repo.changeset_cache
268 row = {
269 row = {
269 "menu": quick_menu(repo.repo_name),
270 "menu": quick_menu(repo.repo_name),
270
271
271 "name": repo_lnk(repo.repo_name, repo.repo_type,
272 "name": repo_lnk(repo.repo_name, repo.repo_type,
272 repo.repo_state, repo.private, repo.fork),
273 repo.repo_state, repo.private, repo.fork),
273 "name_raw": repo.repo_name.lower(),
274 "name_raw": repo.repo_name.lower(),
274
275
275 "last_change": last_change(repo.last_db_change),
276 "last_change": last_change(repo.last_db_change),
276 "last_change_raw": datetime_to_time(repo.last_db_change),
277 "last_change_raw": datetime_to_time(repo.last_db_change),
277
278
278 "last_changeset": last_rev(repo.repo_name, cs_cache),
279 "last_changeset": last_rev(repo.repo_name, cs_cache),
279 "last_changeset_raw": cs_cache.get('revision'),
280 "last_changeset_raw": cs_cache.get('revision'),
280
281
281 "desc": desc(repo.description),
282 "desc": desc(repo.description),
282 "owner": user_profile(repo.user.username),
283 "owner": user_profile(repo.user.username),
283
284
284 "state": state(repo.repo_state),
285 "state": state(repo.repo_state),
285 "rss": rss_lnk(repo.repo_name),
286 "rss": rss_lnk(repo.repo_name),
286
287
287 "atom": atom_lnk(repo.repo_name),
288 "atom": atom_lnk(repo.repo_name),
288 }
289 }
289 if admin:
290 if admin:
290 row.update({
291 row.update({
291 "action": repo_actions(repo.repo_name),
292 "action": repo_actions(repo.repo_name),
292 })
293 })
293 repos_data.append(row)
294 repos_data.append(row)
294
295
295 return repos_data
296 return repos_data
296
297
297 def _get_defaults(self, repo_name):
298 def _get_defaults(self, repo_name):
298 """
299 """
299 Gets information about repository, and returns a dict for
300 Gets information about repository, and returns a dict for
300 usage in forms
301 usage in forms
301
302
302 :param repo_name:
303 :param repo_name:
303 """
304 """
304
305
305 repo_info = Repository.get_by_repo_name(repo_name)
306 repo_info = Repository.get_by_repo_name(repo_name)
306
307
307 if repo_info is None:
308 if repo_info is None:
308 return None
309 return None
309
310
310 defaults = repo_info.get_dict()
311 defaults = repo_info.get_dict()
311 defaults['repo_name'] = repo_info.just_name
312 defaults['repo_name'] = repo_info.just_name
312
313
313 groups = repo_info.groups_with_parents
314 groups = repo_info.groups_with_parents
314 parent_group = groups[-1] if groups else None
315 parent_group = groups[-1] if groups else None
315
316
316 # we use -1 as this is how in HTML, we mark an empty group
317 # we use -1 as this is how in HTML, we mark an empty group
317 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
318 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
318
319
319 keys_to_process = (
320 keys_to_process = (
320 {'k': 'repo_type', 'strip': False},
321 {'k': 'repo_type', 'strip': False},
321 {'k': 'repo_enable_downloads', 'strip': True},
322 {'k': 'repo_enable_downloads', 'strip': True},
322 {'k': 'repo_description', 'strip': True},
323 {'k': 'repo_description', 'strip': True},
323 {'k': 'repo_enable_locking', 'strip': True},
324 {'k': 'repo_enable_locking', 'strip': True},
324 {'k': 'repo_landing_rev', 'strip': True},
325 {'k': 'repo_landing_rev', 'strip': True},
325 {'k': 'clone_uri', 'strip': False},
326 {'k': 'clone_uri', 'strip': False},
326 {'k': 'repo_private', 'strip': True},
327 {'k': 'repo_private', 'strip': True},
327 {'k': 'repo_enable_statistics', 'strip': True}
328 {'k': 'repo_enable_statistics', 'strip': True}
328 )
329 )
329
330
330 for item in keys_to_process:
331 for item in keys_to_process:
331 attr = item['k']
332 attr = item['k']
332 if item['strip']:
333 if item['strip']:
333 attr = remove_prefix(item['k'], 'repo_')
334 attr = remove_prefix(item['k'], 'repo_')
334
335
335 val = defaults[attr]
336 val = defaults[attr]
336 if item['k'] == 'repo_landing_rev':
337 if item['k'] == 'repo_landing_rev':
337 val = ':'.join(defaults[attr])
338 val = ':'.join(defaults[attr])
338 defaults[item['k']] = val
339 defaults[item['k']] = val
339 if item['k'] == 'clone_uri':
340 if item['k'] == 'clone_uri':
340 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
341 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
341
342
342 # fill owner
343 # fill owner
343 if repo_info.user:
344 if repo_info.user:
344 defaults.update({'user': repo_info.user.username})
345 defaults.update({'user': repo_info.user.username})
345 else:
346 else:
346 replacement_user = User.get_first_super_admin().username
347 replacement_user = User.get_first_super_admin().username
347 defaults.update({'user': replacement_user})
348 defaults.update({'user': replacement_user})
348
349
349 # fill repository users
350 # fill repository users
350 for p in repo_info.repo_to_perm:
351 for p in repo_info.repo_to_perm:
351 defaults.update({'u_perm_%s' % p.user.user_id:
352 defaults.update({'u_perm_%s' % p.user.user_id:
352 p.permission.permission_name})
353 p.permission.permission_name})
353
354
354 # fill repository groups
355 # fill repository groups
355 for p in repo_info.users_group_to_perm:
356 for p in repo_info.users_group_to_perm:
356 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
357 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
357 p.permission.permission_name})
358 p.permission.permission_name})
358
359
359 return defaults
360 return defaults
360
361
361 def update(self, repo, **kwargs):
362 def update(self, repo, **kwargs):
362 try:
363 try:
363 cur_repo = self._get_repo(repo)
364 cur_repo = self._get_repo(repo)
364 source_repo_name = cur_repo.repo_name
365 source_repo_name = cur_repo.repo_name
365 if 'user' in kwargs:
366 if 'user' in kwargs:
366 cur_repo.user = User.get_by_username(kwargs['user'])
367 cur_repo.user = User.get_by_username(kwargs['user'])
367
368
368 if 'repo_group' in kwargs:
369 if 'repo_group' in kwargs:
369 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
370 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
370 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
371 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
371
372
372 update_keys = [
373 update_keys = [
373 (1, 'repo_enable_downloads'),
374 (1, 'repo_enable_downloads'),
374 (1, 'repo_description'),
375 (1, 'repo_description'),
375 (1, 'repo_enable_locking'),
376 (1, 'repo_enable_locking'),
376 (1, 'repo_landing_rev'),
377 (1, 'repo_landing_rev'),
377 (1, 'repo_private'),
378 (1, 'repo_private'),
378 (1, 'repo_enable_statistics'),
379 (1, 'repo_enable_statistics'),
379 (0, 'clone_uri'),
380 (0, 'clone_uri'),
380 (0, 'fork_id')
381 (0, 'fork_id')
381 ]
382 ]
382 for strip, k in update_keys:
383 for strip, k in update_keys:
383 if k in kwargs:
384 if k in kwargs:
384 val = kwargs[k]
385 val = kwargs[k]
385 if strip:
386 if strip:
386 k = remove_prefix(k, 'repo_')
387 k = remove_prefix(k, 'repo_')
387 if k == 'clone_uri':
388 if k == 'clone_uri':
388 from rhodecode.model.validators import Missing
389 from rhodecode.model.validators import Missing
389 _change = kwargs.get('clone_uri_change')
390 _change = kwargs.get('clone_uri_change')
390 if _change in [Missing, 'OLD']:
391 if _change in [Missing, 'OLD']:
391 # we don't change the value, so use original one
392 # we don't change the value, so use original one
392 val = cur_repo.clone_uri
393 val = cur_repo.clone_uri
393
394
394 setattr(cur_repo, k, val)
395 setattr(cur_repo, k, val)
395
396
396 new_name = cur_repo.get_new_name(kwargs['repo_name'])
397 new_name = cur_repo.get_new_name(kwargs['repo_name'])
397 cur_repo.repo_name = new_name
398 cur_repo.repo_name = new_name
398
399
399 # if private flag is set, reset default permission to NONE
400 # if private flag is set, reset default permission to NONE
400 if kwargs.get('repo_private'):
401 if kwargs.get('repo_private'):
401 EMPTY_PERM = 'repository.none'
402 EMPTY_PERM = 'repository.none'
402 RepoModel().grant_user_permission(
403 RepoModel().grant_user_permission(
403 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
404 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
404 )
405 )
405
406
406 # handle extra fields
407 # handle extra fields
407 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
408 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
408 kwargs):
409 kwargs):
409 k = RepositoryField.un_prefix_key(field)
410 k = RepositoryField.un_prefix_key(field)
410 ex_field = RepositoryField.get_by_key_name(
411 ex_field = RepositoryField.get_by_key_name(
411 key=k, repo=cur_repo)
412 key=k, repo=cur_repo)
412 if ex_field:
413 if ex_field:
413 ex_field.field_value = kwargs[field]
414 ex_field.field_value = kwargs[field]
414 self.sa.add(ex_field)
415 self.sa.add(ex_field)
415 self.sa.add(cur_repo)
416 self.sa.add(cur_repo)
416
417
417 if source_repo_name != new_name:
418 if source_repo_name != new_name:
418 # rename repository
419 # rename repository
419 self._rename_filesystem_repo(
420 self._rename_filesystem_repo(
420 old=source_repo_name, new=new_name)
421 old=source_repo_name, new=new_name)
421
422
422 return cur_repo
423 return cur_repo
423 except Exception:
424 except Exception:
424 log.error(traceback.format_exc())
425 log.error(traceback.format_exc())
425 raise
426 raise
426
427
427 def _create_repo(self, repo_name, repo_type, description, owner,
428 def _create_repo(self, repo_name, repo_type, description, owner,
428 private=False, clone_uri=None, repo_group=None,
429 private=False, clone_uri=None, repo_group=None,
429 landing_rev='rev:tip', fork_of=None,
430 landing_rev='rev:tip', fork_of=None,
430 copy_fork_permissions=False, enable_statistics=False,
431 copy_fork_permissions=False, enable_statistics=False,
431 enable_locking=False, enable_downloads=False,
432 enable_locking=False, enable_downloads=False,
432 copy_group_permissions=False,
433 copy_group_permissions=False,
433 state=Repository.STATE_PENDING):
434 state=Repository.STATE_PENDING):
434 """
435 """
435 Create repository inside database with PENDING state, this should be
436 Create repository inside database with PENDING state, this should be
436 only executed by create() repo. With exception of importing existing
437 only executed by create() repo. With exception of importing existing
437 repos
438 repos
438 """
439 """
439 from rhodecode.model.scm import ScmModel
440 from rhodecode.model.scm import ScmModel
440
441
441 owner = self._get_user(owner)
442 owner = self._get_user(owner)
442 fork_of = self._get_repo(fork_of)
443 fork_of = self._get_repo(fork_of)
443 repo_group = self._get_repo_group(safe_int(repo_group))
444 repo_group = self._get_repo_group(safe_int(repo_group))
444
445
445 try:
446 try:
446 repo_name = safe_unicode(repo_name)
447 repo_name = safe_unicode(repo_name)
447 description = safe_unicode(description)
448 description = safe_unicode(description)
448 # repo name is just a name of repository
449 # repo name is just a name of repository
449 # while repo_name_full is a full qualified name that is combined
450 # while repo_name_full is a full qualified name that is combined
450 # with name and path of group
451 # with name and path of group
451 repo_name_full = repo_name
452 repo_name_full = repo_name
452 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
453 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
453
454
454 new_repo = Repository()
455 new_repo = Repository()
455 new_repo.repo_state = state
456 new_repo.repo_state = state
456 new_repo.enable_statistics = False
457 new_repo.enable_statistics = False
457 new_repo.repo_name = repo_name_full
458 new_repo.repo_name = repo_name_full
458 new_repo.repo_type = repo_type
459 new_repo.repo_type = repo_type
459 new_repo.user = owner
460 new_repo.user = owner
460 new_repo.group = repo_group
461 new_repo.group = repo_group
461 new_repo.description = description or repo_name
462 new_repo.description = description or repo_name
462 new_repo.private = private
463 new_repo.private = private
463 new_repo.clone_uri = clone_uri
464 new_repo.clone_uri = clone_uri
464 new_repo.landing_rev = landing_rev
465 new_repo.landing_rev = landing_rev
465
466
466 new_repo.enable_statistics = enable_statistics
467 new_repo.enable_statistics = enable_statistics
467 new_repo.enable_locking = enable_locking
468 new_repo.enable_locking = enable_locking
468 new_repo.enable_downloads = enable_downloads
469 new_repo.enable_downloads = enable_downloads
469
470
470 if repo_group:
471 if repo_group:
471 new_repo.enable_locking = repo_group.enable_locking
472 new_repo.enable_locking = repo_group.enable_locking
472
473
473 if fork_of:
474 if fork_of:
474 parent_repo = fork_of
475 parent_repo = fork_of
475 new_repo.fork = parent_repo
476 new_repo.fork = parent_repo
476
477
477 events.trigger(events.RepoPreCreateEvent(new_repo))
478 events.trigger(events.RepoPreCreateEvent(new_repo))
478
479
479 self.sa.add(new_repo)
480 self.sa.add(new_repo)
480
481
481 EMPTY_PERM = 'repository.none'
482 EMPTY_PERM = 'repository.none'
482 if fork_of and copy_fork_permissions:
483 if fork_of and copy_fork_permissions:
483 repo = fork_of
484 repo = fork_of
484 user_perms = UserRepoToPerm.query() \
485 user_perms = UserRepoToPerm.query() \
485 .filter(UserRepoToPerm.repository == repo).all()
486 .filter(UserRepoToPerm.repository == repo).all()
486 group_perms = UserGroupRepoToPerm.query() \
487 group_perms = UserGroupRepoToPerm.query() \
487 .filter(UserGroupRepoToPerm.repository == repo).all()
488 .filter(UserGroupRepoToPerm.repository == repo).all()
488
489
489 for perm in user_perms:
490 for perm in user_perms:
490 UserRepoToPerm.create(
491 UserRepoToPerm.create(
491 perm.user, new_repo, perm.permission)
492 perm.user, new_repo, perm.permission)
492
493
493 for perm in group_perms:
494 for perm in group_perms:
494 UserGroupRepoToPerm.create(
495 UserGroupRepoToPerm.create(
495 perm.users_group, new_repo, perm.permission)
496 perm.users_group, new_repo, perm.permission)
496 # in case we copy permissions and also set this repo to private
497 # in case we copy permissions and also set this repo to private
497 # override the default user permission to make it a private
498 # override the default user permission to make it a private
498 # repo
499 # repo
499 if private:
500 if private:
500 RepoModel(self.sa).grant_user_permission(
501 RepoModel(self.sa).grant_user_permission(
501 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
502 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
502
503
503 elif repo_group and copy_group_permissions:
504 elif repo_group and copy_group_permissions:
504 user_perms = UserRepoGroupToPerm.query() \
505 user_perms = UserRepoGroupToPerm.query() \
505 .filter(UserRepoGroupToPerm.group == repo_group).all()
506 .filter(UserRepoGroupToPerm.group == repo_group).all()
506
507
507 group_perms = UserGroupRepoGroupToPerm.query() \
508 group_perms = UserGroupRepoGroupToPerm.query() \
508 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
509 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
509
510
510 for perm in user_perms:
511 for perm in user_perms:
511 perm_name = perm.permission.permission_name.replace(
512 perm_name = perm.permission.permission_name.replace(
512 'group.', 'repository.')
513 'group.', 'repository.')
513 perm_obj = Permission.get_by_key(perm_name)
514 perm_obj = Permission.get_by_key(perm_name)
514 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
515 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
515
516
516 for perm in group_perms:
517 for perm in group_perms:
517 perm_name = perm.permission.permission_name.replace(
518 perm_name = perm.permission.permission_name.replace(
518 'group.', 'repository.')
519 'group.', 'repository.')
519 perm_obj = Permission.get_by_key(perm_name)
520 perm_obj = Permission.get_by_key(perm_name)
520 UserGroupRepoToPerm.create(
521 UserGroupRepoToPerm.create(
521 perm.users_group, new_repo, perm_obj)
522 perm.users_group, new_repo, perm_obj)
522
523
523 if private:
524 if private:
524 RepoModel(self.sa).grant_user_permission(
525 RepoModel(self.sa).grant_user_permission(
525 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
526 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
526
527
527 else:
528 else:
528 perm_obj = self._create_default_perms(new_repo, private)
529 perm_obj = self._create_default_perms(new_repo, private)
529 self.sa.add(perm_obj)
530 self.sa.add(perm_obj)
530
531
531 # now automatically start following this repository as owner
532 # now automatically start following this repository as owner
532 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
533 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
533 owner.user_id)
534 owner.user_id)
534
535
535 # we need to flush here, in order to check if database won't
536 # we need to flush here, in order to check if database won't
536 # throw any exceptions, create filesystem dirs at the very end
537 # throw any exceptions, create filesystem dirs at the very end
537 self.sa.flush()
538 self.sa.flush()
538 events.trigger(events.RepoCreateEvent(new_repo))
539 events.trigger(events.RepoCreateEvent(new_repo))
539 return new_repo
540 return new_repo
540
541
541 except Exception:
542 except Exception:
542 log.error(traceback.format_exc())
543 log.error(traceback.format_exc())
543 raise
544 raise
544
545
545 def create(self, form_data, cur_user):
546 def create(self, form_data, cur_user):
546 """
547 """
547 Create repository using celery tasks
548 Create repository using celery tasks
548
549
549 :param form_data:
550 :param form_data:
550 :param cur_user:
551 :param cur_user:
551 """
552 """
552 from rhodecode.lib.celerylib import tasks, run_task
553 from rhodecode.lib.celerylib import tasks, run_task
553 return run_task(tasks.create_repo, form_data, cur_user)
554 return run_task(tasks.create_repo, form_data, cur_user)
554
555
555 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
556 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
556 perm_deletions=None, check_perms=True,
557 perm_deletions=None, check_perms=True,
557 cur_user=None):
558 cur_user=None):
558 if not perm_additions:
559 if not perm_additions:
559 perm_additions = []
560 perm_additions = []
560 if not perm_updates:
561 if not perm_updates:
561 perm_updates = []
562 perm_updates = []
562 if not perm_deletions:
563 if not perm_deletions:
563 perm_deletions = []
564 perm_deletions = []
564
565
565 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
566 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
566
567
567 # update permissions
568 # update permissions
568 for member_id, perm, member_type in perm_updates:
569 for member_id, perm, member_type in perm_updates:
569 member_id = int(member_id)
570 member_id = int(member_id)
570 if member_type == 'user':
571 if member_type == 'user':
571 # this updates also current one if found
572 # this updates also current one if found
572 self.grant_user_permission(
573 self.grant_user_permission(
573 repo=repo, user=member_id, perm=perm)
574 repo=repo, user=member_id, perm=perm)
574 else: # set for user group
575 else: # set for user group
575 # check if we have permissions to alter this usergroup
576 # check if we have permissions to alter this usergroup
576 member_name = UserGroup.get(member_id).users_group_name
577 member_name = UserGroup.get(member_id).users_group_name
577 if not check_perms or HasUserGroupPermissionAny(
578 if not check_perms or HasUserGroupPermissionAny(
578 *req_perms)(member_name, user=cur_user):
579 *req_perms)(member_name, user=cur_user):
579 self.grant_user_group_permission(
580 self.grant_user_group_permission(
580 repo=repo, group_name=member_id, perm=perm)
581 repo=repo, group_name=member_id, perm=perm)
581
582
582 # set new permissions
583 # set new permissions
583 for member_id, perm, member_type in perm_additions:
584 for member_id, perm, member_type in perm_additions:
584 member_id = int(member_id)
585 member_id = int(member_id)
585 if member_type == 'user':
586 if member_type == 'user':
586 self.grant_user_permission(
587 self.grant_user_permission(
587 repo=repo, user=member_id, perm=perm)
588 repo=repo, user=member_id, perm=perm)
588 else: # set for user group
589 else: # set for user group
589 # check if we have permissions to alter this usergroup
590 # check if we have permissions to alter this usergroup
590 member_name = UserGroup.get(member_id).users_group_name
591 member_name = UserGroup.get(member_id).users_group_name
591 if not check_perms or HasUserGroupPermissionAny(
592 if not check_perms or HasUserGroupPermissionAny(
592 *req_perms)(member_name, user=cur_user):
593 *req_perms)(member_name, user=cur_user):
593 self.grant_user_group_permission(
594 self.grant_user_group_permission(
594 repo=repo, group_name=member_id, perm=perm)
595 repo=repo, group_name=member_id, perm=perm)
595
596
596 # delete permissions
597 # delete permissions
597 for member_id, perm, member_type in perm_deletions:
598 for member_id, perm, member_type in perm_deletions:
598 member_id = int(member_id)
599 member_id = int(member_id)
599 if member_type == 'user':
600 if member_type == 'user':
600 self.revoke_user_permission(repo=repo, user=member_id)
601 self.revoke_user_permission(repo=repo, user=member_id)
601 else: # set for user group
602 else: # set for user group
602 # check if we have permissions to alter this usergroup
603 # check if we have permissions to alter this usergroup
603 member_name = UserGroup.get(member_id).users_group_name
604 member_name = UserGroup.get(member_id).users_group_name
604 if not check_perms or HasUserGroupPermissionAny(
605 if not check_perms or HasUserGroupPermissionAny(
605 *req_perms)(member_name, user=cur_user):
606 *req_perms)(member_name, user=cur_user):
606 self.revoke_user_group_permission(
607 self.revoke_user_group_permission(
607 repo=repo, group_name=member_id)
608 repo=repo, group_name=member_id)
608
609
609 def create_fork(self, form_data, cur_user):
610 def create_fork(self, form_data, cur_user):
610 """
611 """
611 Simple wrapper into executing celery task for fork creation
612 Simple wrapper into executing celery task for fork creation
612
613
613 :param form_data:
614 :param form_data:
614 :param cur_user:
615 :param cur_user:
615 """
616 """
616 from rhodecode.lib.celerylib import tasks, run_task
617 from rhodecode.lib.celerylib import tasks, run_task
617 return run_task(tasks.create_repo_fork, form_data, cur_user)
618 return run_task(tasks.create_repo_fork, form_data, cur_user)
618
619
619 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
620 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
620 """
621 """
621 Delete given repository, forks parameter defines what do do with
622 Delete given repository, forks parameter defines what do do with
622 attached forks. Throws AttachedForksError if deleted repo has attached
623 attached forks. Throws AttachedForksError if deleted repo has attached
623 forks
624 forks
624
625
625 :param repo:
626 :param repo:
626 :param forks: str 'delete' or 'detach'
627 :param forks: str 'delete' or 'detach'
627 :param fs_remove: remove(archive) repo from filesystem
628 :param fs_remove: remove(archive) repo from filesystem
628 """
629 """
629 if not cur_user:
630 if not cur_user:
630 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
631 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
631 repo = self._get_repo(repo)
632 repo = self._get_repo(repo)
632 if repo:
633 if repo:
633 if forks == 'detach':
634 if forks == 'detach':
634 for r in repo.forks:
635 for r in repo.forks:
635 r.fork = None
636 r.fork = None
636 self.sa.add(r)
637 self.sa.add(r)
637 elif forks == 'delete':
638 elif forks == 'delete':
638 for r in repo.forks:
639 for r in repo.forks:
639 self.delete(r, forks='delete')
640 self.delete(r, forks='delete')
640 elif [f for f in repo.forks]:
641 elif [f for f in repo.forks]:
641 raise AttachedForksError()
642 raise AttachedForksError()
642
643
643 old_repo_dict = repo.get_dict()
644 old_repo_dict = repo.get_dict()
644 events.trigger(events.RepoPreDeleteEvent(repo))
645 events.trigger(events.RepoPreDeleteEvent(repo))
645 try:
646 try:
646 self.sa.delete(repo)
647 self.sa.delete(repo)
647 if fs_remove:
648 if fs_remove:
648 self._delete_filesystem_repo(repo)
649 self._delete_filesystem_repo(repo)
649 else:
650 else:
650 log.debug('skipping removal from filesystem')
651 log.debug('skipping removal from filesystem')
651 old_repo_dict.update({
652 old_repo_dict.update({
652 'deleted_by': cur_user,
653 'deleted_by': cur_user,
653 'deleted_on': time.time(),
654 'deleted_on': time.time(),
654 })
655 })
655 log_delete_repository(**old_repo_dict)
656 log_delete_repository(**old_repo_dict)
656 events.trigger(events.RepoDeleteEvent(repo))
657 events.trigger(events.RepoDeleteEvent(repo))
657 except Exception:
658 except Exception:
658 log.error(traceback.format_exc())
659 log.error(traceback.format_exc())
659 raise
660 raise
660
661
661 def grant_user_permission(self, repo, user, perm):
662 def grant_user_permission(self, repo, user, perm):
662 """
663 """
663 Grant permission for user on given repository, or update existing one
664 Grant permission for user on given repository, or update existing one
664 if found
665 if found
665
666
666 :param repo: Instance of Repository, repository_id, or repository name
667 :param repo: Instance of Repository, repository_id, or repository name
667 :param user: Instance of User, user_id or username
668 :param user: Instance of User, user_id or username
668 :param perm: Instance of Permission, or permission_name
669 :param perm: Instance of Permission, or permission_name
669 """
670 """
670 user = self._get_user(user)
671 user = self._get_user(user)
671 repo = self._get_repo(repo)
672 repo = self._get_repo(repo)
672 permission = self._get_perm(perm)
673 permission = self._get_perm(perm)
673
674
674 # check if we have that permission already
675 # check if we have that permission already
675 obj = self.sa.query(UserRepoToPerm) \
676 obj = self.sa.query(UserRepoToPerm) \
676 .filter(UserRepoToPerm.user == user) \
677 .filter(UserRepoToPerm.user == user) \
677 .filter(UserRepoToPerm.repository == repo) \
678 .filter(UserRepoToPerm.repository == repo) \
678 .scalar()
679 .scalar()
679 if obj is None:
680 if obj is None:
680 # create new !
681 # create new !
681 obj = UserRepoToPerm()
682 obj = UserRepoToPerm()
682 obj.repository = repo
683 obj.repository = repo
683 obj.user = user
684 obj.user = user
684 obj.permission = permission
685 obj.permission = permission
685 self.sa.add(obj)
686 self.sa.add(obj)
686 log.debug('Granted perm %s to %s on %s', perm, user, repo)
687 log.debug('Granted perm %s to %s on %s', perm, user, repo)
687 action_logger_generic(
688 action_logger_generic(
688 'granted permission: {} to user: {} on repo: {}'.format(
689 'granted permission: {} to user: {} on repo: {}'.format(
689 perm, user, repo), namespace='security.repo')
690 perm, user, repo), namespace='security.repo')
690 return obj
691 return obj
691
692
692 def revoke_user_permission(self, repo, user):
693 def revoke_user_permission(self, repo, user):
693 """
694 """
694 Revoke permission for user on given repository
695 Revoke permission for user on given repository
695
696
696 :param repo: Instance of Repository, repository_id, or repository name
697 :param repo: Instance of Repository, repository_id, or repository name
697 :param user: Instance of User, user_id or username
698 :param user: Instance of User, user_id or username
698 """
699 """
699
700
700 user = self._get_user(user)
701 user = self._get_user(user)
701 repo = self._get_repo(repo)
702 repo = self._get_repo(repo)
702
703
703 obj = self.sa.query(UserRepoToPerm) \
704 obj = self.sa.query(UserRepoToPerm) \
704 .filter(UserRepoToPerm.repository == repo) \
705 .filter(UserRepoToPerm.repository == repo) \
705 .filter(UserRepoToPerm.user == user) \
706 .filter(UserRepoToPerm.user == user) \
706 .scalar()
707 .scalar()
707 if obj:
708 if obj:
708 self.sa.delete(obj)
709 self.sa.delete(obj)
709 log.debug('Revoked perm on %s on %s', repo, user)
710 log.debug('Revoked perm on %s on %s', repo, user)
710 action_logger_generic(
711 action_logger_generic(
711 'revoked permission from user: {} on repo: {}'.format(
712 'revoked permission from user: {} on repo: {}'.format(
712 user, repo), namespace='security.repo')
713 user, repo), namespace='security.repo')
713
714
714 def grant_user_group_permission(self, repo, group_name, perm):
715 def grant_user_group_permission(self, repo, group_name, perm):
715 """
716 """
716 Grant permission for user group on given repository, or update
717 Grant permission for user group on given repository, or update
717 existing one if found
718 existing one if found
718
719
719 :param repo: Instance of Repository, repository_id, or repository name
720 :param repo: Instance of Repository, repository_id, or repository name
720 :param group_name: Instance of UserGroup, users_group_id,
721 :param group_name: Instance of UserGroup, users_group_id,
721 or user group name
722 or user group name
722 :param perm: Instance of Permission, or permission_name
723 :param perm: Instance of Permission, or permission_name
723 """
724 """
724 repo = self._get_repo(repo)
725 repo = self._get_repo(repo)
725 group_name = self._get_user_group(group_name)
726 group_name = self._get_user_group(group_name)
726 permission = self._get_perm(perm)
727 permission = self._get_perm(perm)
727
728
728 # check if we have that permission already
729 # check if we have that permission already
729 obj = self.sa.query(UserGroupRepoToPerm) \
730 obj = self.sa.query(UserGroupRepoToPerm) \
730 .filter(UserGroupRepoToPerm.users_group == group_name) \
731 .filter(UserGroupRepoToPerm.users_group == group_name) \
731 .filter(UserGroupRepoToPerm.repository == repo) \
732 .filter(UserGroupRepoToPerm.repository == repo) \
732 .scalar()
733 .scalar()
733
734
734 if obj is None:
735 if obj is None:
735 # create new
736 # create new
736 obj = UserGroupRepoToPerm()
737 obj = UserGroupRepoToPerm()
737
738
738 obj.repository = repo
739 obj.repository = repo
739 obj.users_group = group_name
740 obj.users_group = group_name
740 obj.permission = permission
741 obj.permission = permission
741 self.sa.add(obj)
742 self.sa.add(obj)
742 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
743 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
743 action_logger_generic(
744 action_logger_generic(
744 'granted permission: {} to usergroup: {} on repo: {}'.format(
745 'granted permission: {} to usergroup: {} on repo: {}'.format(
745 perm, group_name, repo), namespace='security.repo')
746 perm, group_name, repo), namespace='security.repo')
746
747
747 return obj
748 return obj
748
749
749 def revoke_user_group_permission(self, repo, group_name):
750 def revoke_user_group_permission(self, repo, group_name):
750 """
751 """
751 Revoke permission for user group on given repository
752 Revoke permission for user group on given repository
752
753
753 :param repo: Instance of Repository, repository_id, or repository name
754 :param repo: Instance of Repository, repository_id, or repository name
754 :param group_name: Instance of UserGroup, users_group_id,
755 :param group_name: Instance of UserGroup, users_group_id,
755 or user group name
756 or user group name
756 """
757 """
757 repo = self._get_repo(repo)
758 repo = self._get_repo(repo)
758 group_name = self._get_user_group(group_name)
759 group_name = self._get_user_group(group_name)
759
760
760 obj = self.sa.query(UserGroupRepoToPerm) \
761 obj = self.sa.query(UserGroupRepoToPerm) \
761 .filter(UserGroupRepoToPerm.repository == repo) \
762 .filter(UserGroupRepoToPerm.repository == repo) \
762 .filter(UserGroupRepoToPerm.users_group == group_name) \
763 .filter(UserGroupRepoToPerm.users_group == group_name) \
763 .scalar()
764 .scalar()
764 if obj:
765 if obj:
765 self.sa.delete(obj)
766 self.sa.delete(obj)
766 log.debug('Revoked perm to %s on %s', repo, group_name)
767 log.debug('Revoked perm to %s on %s', repo, group_name)
767 action_logger_generic(
768 action_logger_generic(
768 'revoked permission from usergroup: {} on repo: {}'.format(
769 'revoked permission from usergroup: {} on repo: {}'.format(
769 group_name, repo), namespace='security.repo')
770 group_name, repo), namespace='security.repo')
770
771
771 def delete_stats(self, repo_name):
772 def delete_stats(self, repo_name):
772 """
773 """
773 removes stats for given repo
774 removes stats for given repo
774
775
775 :param repo_name:
776 :param repo_name:
776 """
777 """
777 repo = self._get_repo(repo_name)
778 repo = self._get_repo(repo_name)
778 try:
779 try:
779 obj = self.sa.query(Statistics) \
780 obj = self.sa.query(Statistics) \
780 .filter(Statistics.repository == repo).scalar()
781 .filter(Statistics.repository == repo).scalar()
781 if obj:
782 if obj:
782 self.sa.delete(obj)
783 self.sa.delete(obj)
783 except Exception:
784 except Exception:
784 log.error(traceback.format_exc())
785 log.error(traceback.format_exc())
785 raise
786 raise
786
787
787 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
788 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
788 field_type='str', field_desc=''):
789 field_type='str', field_desc=''):
789
790
790 repo = self._get_repo(repo_name)
791 repo = self._get_repo(repo_name)
791
792
792 new_field = RepositoryField()
793 new_field = RepositoryField()
793 new_field.repository = repo
794 new_field.repository = repo
794 new_field.field_key = field_key
795 new_field.field_key = field_key
795 new_field.field_type = field_type # python type
796 new_field.field_type = field_type # python type
796 new_field.field_value = field_value
797 new_field.field_value = field_value
797 new_field.field_desc = field_desc
798 new_field.field_desc = field_desc
798 new_field.field_label = field_label
799 new_field.field_label = field_label
799 self.sa.add(new_field)
800 self.sa.add(new_field)
800 return new_field
801 return new_field
801
802
802 def delete_repo_field(self, repo_name, field_key):
803 def delete_repo_field(self, repo_name, field_key):
803 repo = self._get_repo(repo_name)
804 repo = self._get_repo(repo_name)
804 field = RepositoryField.get_by_key_name(field_key, repo)
805 field = RepositoryField.get_by_key_name(field_key, repo)
805 if field:
806 if field:
806 self.sa.delete(field)
807 self.sa.delete(field)
807
808
808 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
809 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
809 clone_uri=None, repo_store_location=None,
810 clone_uri=None, repo_store_location=None,
810 use_global_config=False):
811 use_global_config=False):
811 """
812 """
812 makes repository on filesystem. It's group aware means it'll create
813 makes repository on filesystem. It's group aware means it'll create
813 a repository within a group, and alter the paths accordingly of
814 a repository within a group, and alter the paths accordingly of
814 group location
815 group location
815
816
816 :param repo_name:
817 :param repo_name:
817 :param alias:
818 :param alias:
818 :param parent:
819 :param parent:
819 :param clone_uri:
820 :param clone_uri:
820 :param repo_store_location:
821 :param repo_store_location:
821 """
822 """
822 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
823 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
823 from rhodecode.model.scm import ScmModel
824 from rhodecode.model.scm import ScmModel
824
825
825 if Repository.NAME_SEP in repo_name:
826 if Repository.NAME_SEP in repo_name:
826 raise ValueError(
827 raise ValueError(
827 'repo_name must not contain groups got `%s`' % repo_name)
828 'repo_name must not contain groups got `%s`' % repo_name)
828
829
829 if isinstance(repo_group, RepoGroup):
830 if isinstance(repo_group, RepoGroup):
830 new_parent_path = os.sep.join(repo_group.full_path_splitted)
831 new_parent_path = os.sep.join(repo_group.full_path_splitted)
831 else:
832 else:
832 new_parent_path = repo_group or ''
833 new_parent_path = repo_group or ''
833
834
834 if repo_store_location:
835 if repo_store_location:
835 _paths = [repo_store_location]
836 _paths = [repo_store_location]
836 else:
837 else:
837 _paths = [self.repos_path, new_parent_path, repo_name]
838 _paths = [self.repos_path, new_parent_path, repo_name]
838 # we need to make it str for mercurial
839 # we need to make it str for mercurial
839 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
840 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
840
841
841 # check if this path is not a repository
842 # check if this path is not a repository
842 if is_valid_repo(repo_path, self.repos_path):
843 if is_valid_repo(repo_path, self.repos_path):
843 raise Exception('This path %s is a valid repository' % repo_path)
844 raise Exception('This path %s is a valid repository' % repo_path)
844
845
845 # check if this path is a group
846 # check if this path is a group
846 if is_valid_repo_group(repo_path, self.repos_path):
847 if is_valid_repo_group(repo_path, self.repos_path):
847 raise Exception('This path %s is a valid group' % repo_path)
848 raise Exception('This path %s is a valid group' % repo_path)
848
849
849 log.info('creating repo %s in %s from url: `%s`',
850 log.info('creating repo %s in %s from url: `%s`',
850 repo_name, safe_unicode(repo_path),
851 repo_name, safe_unicode(repo_path),
851 obfuscate_url_pw(clone_uri))
852 obfuscate_url_pw(clone_uri))
852
853
853 backend = get_backend(repo_type)
854 backend = get_backend(repo_type)
854
855
855 config_repo = None if use_global_config else repo_name
856 config_repo = None if use_global_config else repo_name
856 if config_repo and new_parent_path:
857 if config_repo and new_parent_path:
857 config_repo = Repository.NAME_SEP.join(
858 config_repo = Repository.NAME_SEP.join(
858 (new_parent_path, config_repo))
859 (new_parent_path, config_repo))
859 config = make_db_config(clear_session=False, repo=config_repo)
860 config = make_db_config(clear_session=False, repo=config_repo)
860 config.set('extensions', 'largefiles', '')
861 config.set('extensions', 'largefiles', '')
861
862
862 # patch and reset hooks section of UI config to not run any
863 # patch and reset hooks section of UI config to not run any
863 # hooks on creating remote repo
864 # hooks on creating remote repo
864 config.clear_section('hooks')
865 config.clear_section('hooks')
865
866
866 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
867 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
867 if repo_type == 'git':
868 if repo_type == 'git':
868 repo = backend(
869 repo = backend(
869 repo_path, config=config, create=True, src_url=clone_uri,
870 repo_path, config=config, create=True, src_url=clone_uri,
870 bare=True)
871 bare=True)
871 else:
872 else:
872 repo = backend(
873 repo = backend(
873 repo_path, config=config, create=True, src_url=clone_uri)
874 repo_path, config=config, create=True, src_url=clone_uri)
874
875
875 ScmModel().install_hooks(repo, repo_type=repo_type)
876 ScmModel().install_hooks(repo, repo_type=repo_type)
876
877
877 log.debug('Created repo %s with %s backend',
878 log.debug('Created repo %s with %s backend',
878 safe_unicode(repo_name), safe_unicode(repo_type))
879 safe_unicode(repo_name), safe_unicode(repo_type))
879 return repo
880 return repo
880
881
881 def _rename_filesystem_repo(self, old, new):
882 def _rename_filesystem_repo(self, old, new):
882 """
883 """
883 renames repository on filesystem
884 renames repository on filesystem
884
885
885 :param old: old name
886 :param old: old name
886 :param new: new name
887 :param new: new name
887 """
888 """
888 log.info('renaming repo from %s to %s', old, new)
889 log.info('renaming repo from %s to %s', old, new)
889
890
890 old_path = os.path.join(self.repos_path, old)
891 old_path = os.path.join(self.repos_path, old)
891 new_path = os.path.join(self.repos_path, new)
892 new_path = os.path.join(self.repos_path, new)
892 if os.path.isdir(new_path):
893 if os.path.isdir(new_path):
893 raise Exception(
894 raise Exception(
894 'Was trying to rename to already existing dir %s' % new_path
895 'Was trying to rename to already existing dir %s' % new_path
895 )
896 )
896 shutil.move(old_path, new_path)
897 shutil.move(old_path, new_path)
897
898
898 def _delete_filesystem_repo(self, repo):
899 def _delete_filesystem_repo(self, repo):
899 """
900 """
900 removes repo from filesystem, the removal is acctually made by
901 removes repo from filesystem, the removal is acctually made by
901 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
902 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
902 repository is no longer valid for rhodecode, can be undeleted later on
903 repository is no longer valid for rhodecode, can be undeleted later on
903 by reverting the renames on this repository
904 by reverting the renames on this repository
904
905
905 :param repo: repo object
906 :param repo: repo object
906 """
907 """
907 rm_path = os.path.join(self.repos_path, repo.repo_name)
908 rm_path = os.path.join(self.repos_path, repo.repo_name)
908 repo_group = repo.group
909 repo_group = repo.group
909 log.info("Removing repository %s", rm_path)
910 log.info("Removing repository %s", rm_path)
910 # disable hg/git internal that it doesn't get detected as repo
911 # disable hg/git internal that it doesn't get detected as repo
911 alias = repo.repo_type
912 alias = repo.repo_type
912
913
913 config = make_db_config(clear_session=False)
914 config = make_db_config(clear_session=False)
914 config.set('extensions', 'largefiles', '')
915 config.set('extensions', 'largefiles', '')
915 bare = getattr(repo.scm_instance(config=config), 'bare', False)
916 bare = getattr(repo.scm_instance(config=config), 'bare', False)
916
917
917 # skip this for bare git repos
918 # skip this for bare git repos
918 if not bare:
919 if not bare:
919 # disable VCS repo
920 # disable VCS repo
920 vcs_path = os.path.join(rm_path, '.%s' % alias)
921 vcs_path = os.path.join(rm_path, '.%s' % alias)
921 if os.path.exists(vcs_path):
922 if os.path.exists(vcs_path):
922 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
923 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
923
924
924 _now = datetime.now()
925 _now = datetime.now()
925 _ms = str(_now.microsecond).rjust(6, '0')
926 _ms = str(_now.microsecond).rjust(6, '0')
926 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
927 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
927 repo.just_name)
928 repo.just_name)
928 if repo_group:
929 if repo_group:
929 # if repository is in group, prefix the removal path with the group
930 # if repository is in group, prefix the removal path with the group
930 args = repo_group.full_path_splitted + [_d]
931 args = repo_group.full_path_splitted + [_d]
931 _d = os.path.join(*args)
932 _d = os.path.join(*args)
932
933
933 if os.path.isdir(rm_path):
934 if os.path.isdir(rm_path):
934 shutil.move(rm_path, os.path.join(self.repos_path, _d))
935 shutil.move(rm_path, os.path.join(self.repos_path, _d))
@@ -1,26 +1,58 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from rhodecode.lib.utils2 import obfuscate_url_pw
21 import pytest
22
23 from rhodecode.lib.utils2 import (
24 obfuscate_url_pw, get_routes_generator_for_server_url)
22
25
23
26
24 def test_obfuscate_url_pw():
27 def test_obfuscate_url_pw():
25 engine = u'/home/repos/malmö'
28 engine = u'/home/repos/malmö'
26 assert obfuscate_url_pw(engine)
29 assert obfuscate_url_pw(engine)
30
31
32 @pytest.mark.parametrize('scheme', ['https', 'http'])
33 @pytest.mark.parametrize('domain', [
34 'www.test.com', 'test.com', 'test.co.uk', '192.168.1.3'])
35 @pytest.mark.parametrize('port', [None, '80', '443', '999'])
36 @pytest.mark.parametrize('script_path', [None, '/', '/prefix', '/prefix/more'])
37 def test_routes_generator(pylonsapp, scheme, domain, port, script_path):
38 server_url = '%s://%s' % (scheme, domain)
39 if port is not None:
40 server_url += ':' + port
41 if script_path:
42 server_url += script_path
43
44
45 expected_url = '%s://%s' % (scheme, domain)
46 if scheme == 'https':
47 if port not in (None, '443'):
48 expected_url += ':' + port
49 elif scheme == 'http':
50 if port not in ('80', None):
51 expected_url += ':' + port
52
53 if script_path:
54 expected_url = (expected_url + script_path).rstrip('/')
55
56 url_generator = get_routes_generator_for_server_url(server_url)
57 assert url_generator(
58 '/a_test_path', qualified=True) == expected_url + '/a_test_path'
General Comments 0
You need to be logged in to leave comments. Login now