##// END OF EJS Templates
slack: fix links in commit/pr events
dan -
r419:d85a7d46 default
parent child Browse files
Show More
@@ -1,278 +1,270 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 22 import logging
23 23 import urlparse
24 24 import threading
25 25 from BaseHTTPServer import BaseHTTPRequestHandler
26 26 from SocketServer import TCPServer
27 27 from routes.util import URLGenerator
28 28
29 29 import Pyro4
30 30 import pylons
31 31 import rhodecode
32 32
33 33 from rhodecode.lib import hooks_base
34 from rhodecode.lib.utils2 import AttributeDict
34 from rhodecode.lib.utils2 import (
35 AttributeDict, safe_str, get_routes_generator_for_server_url)
35 36
36 37
37 38 log = logging.getLogger(__name__)
38 39
39 40
40 41 class HooksHttpHandler(BaseHTTPRequestHandler):
41 42 def do_POST(self):
42 43 method, extras = self._read_request()
43 44 try:
44 45 result = self._call_hook(method, extras)
45 46 except Exception as e:
46 47 result = {
47 48 'exception': e.__class__.__name__,
48 49 'exception_args': e.args
49 50 }
50 51 self._write_response(result)
51 52
52 53 def _read_request(self):
53 54 length = int(self.headers['Content-Length'])
54 55 body = self.rfile.read(length).decode('utf-8')
55 56 data = json.loads(body)
56 57 return data['method'], data['extras']
57 58
58 59 def _write_response(self, result):
59 60 self.send_response(200)
60 61 self.send_header("Content-type", "text/json")
61 62 self.end_headers()
62 63 self.wfile.write(json.dumps(result))
63 64
64 65 def _call_hook(self, method, extras):
65 66 hooks = Hooks()
66 67 result = getattr(hooks, method)(extras)
67 68 return result
68 69
69 70 def log_message(self, format, *args):
70 71 """
71 72 This is an overriden method of BaseHTTPRequestHandler which logs using
72 73 logging library instead of writing directly to stderr.
73 74 """
74 75
75 76 message = format % args
76 77
77 78 # TODO: mikhail: add different log levels support
78 79 log.debug(
79 80 "%s - - [%s] %s", self.client_address[0],
80 81 self.log_date_time_string(), message)
81 82
82 83
83 84 class DummyHooksCallbackDaemon(object):
84 85 def __init__(self):
85 86 self.hooks_module = Hooks.__module__
86 87
87 88 def __enter__(self):
88 89 log.debug('Running dummy hooks callback daemon')
89 90 return self
90 91
91 92 def __exit__(self, exc_type, exc_val, exc_tb):
92 93 log.debug('Exiting dummy hooks callback daemon')
93 94
94 95
95 96 class ThreadedHookCallbackDaemon(object):
96 97
97 98 _callback_thread = None
98 99 _daemon = None
99 100 _done = False
100 101
101 102 def __init__(self):
102 103 self._prepare()
103 104
104 105 def __enter__(self):
105 106 self._run()
106 107 return self
107 108
108 109 def __exit__(self, exc_type, exc_val, exc_tb):
109 110 self._stop()
110 111
111 112 def _prepare(self):
112 113 raise NotImplementedError()
113 114
114 115 def _run(self):
115 116 raise NotImplementedError()
116 117
117 118 def _stop(self):
118 119 raise NotImplementedError()
119 120
120 121
121 122 class Pyro4HooksCallbackDaemon(ThreadedHookCallbackDaemon):
122 123 """
123 124 Context manager which will run a callback daemon in a background thread.
124 125 """
125 126
126 127 hooks_uri = None
127 128
128 129 def _prepare(self):
129 130 log.debug("Preparing callback daemon and registering hook object")
130 131 self._daemon = Pyro4.Daemon()
131 132 hooks_interface = Hooks()
132 133 self.hooks_uri = str(self._daemon.register(hooks_interface))
133 134 log.debug("Hooks uri is: %s", self.hooks_uri)
134 135
135 136 def _run(self):
136 137 log.debug("Running event loop of callback daemon in background thread")
137 138 callback_thread = threading.Thread(
138 139 target=self._daemon.requestLoop,
139 140 kwargs={'loopCondition': lambda: not self._done})
140 141 callback_thread.daemon = True
141 142 callback_thread.start()
142 143 self._callback_thread = callback_thread
143 144
144 145 def _stop(self):
145 146 log.debug("Waiting for background thread to finish.")
146 147 self._done = True
147 148 self._callback_thread.join()
148 149 self._daemon.close()
149 150 self._daemon = None
150 151 self._callback_thread = None
151 152
152 153
153 154 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
154 155 """
155 156 Context manager which will run a callback daemon in a background thread.
156 157 """
157 158
158 159 hooks_uri = None
159 160
160 161 IP_ADDRESS = '127.0.0.1'
161 162
162 163 # From Python docs: Polling reduces our responsiveness to a shutdown
163 164 # request and wastes cpu at all other times.
164 165 POLL_INTERVAL = 0.1
165 166
166 167 def _prepare(self):
167 168 log.debug("Preparing callback daemon and registering hook object")
168 169
169 170 self._done = False
170 171 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
171 172 _, port = self._daemon.server_address
172 173 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
173 174
174 175 log.debug("Hooks uri is: %s", self.hooks_uri)
175 176
176 177 def _run(self):
177 178 log.debug("Running event loop of callback daemon in background thread")
178 179 callback_thread = threading.Thread(
179 180 target=self._daemon.serve_forever,
180 181 kwargs={'poll_interval': self.POLL_INTERVAL})
181 182 callback_thread.daemon = True
182 183 callback_thread.start()
183 184 self._callback_thread = callback_thread
184 185
185 186 def _stop(self):
186 187 log.debug("Waiting for background thread to finish.")
187 188 self._daemon.shutdown()
188 189 self._callback_thread.join()
189 190 self._daemon = None
190 191 self._callback_thread = None
191 192
192 193
193 194 def prepare_callback_daemon(extras, protocol=None, use_direct_calls=False):
194 195 callback_daemon = None
195 196 protocol = protocol.lower() if protocol else None
196 197
197 198 if use_direct_calls:
198 199 callback_daemon = DummyHooksCallbackDaemon()
199 200 extras['hooks_module'] = callback_daemon.hooks_module
200 201 else:
201 202 callback_daemon = (
202 203 Pyro4HooksCallbackDaemon()
203 204 if protocol == 'pyro4'
204 205 else HttpHooksCallbackDaemon())
205 206 extras['hooks_uri'] = callback_daemon.hooks_uri
206 207 extras['hooks_protocol'] = protocol
207 208
208 209 return callback_daemon, extras
209 210
210 211
211 212 class Hooks(object):
212 213 """
213 214 Exposes the hooks for remote call backs
214 215 """
215 216
216 217 @Pyro4.callback
217 218 def repo_size(self, extras):
218 219 log.debug("Called repo_size of Hooks object")
219 220 return self._call_hook(hooks_base.repo_size, extras)
220 221
221 222 @Pyro4.callback
222 223 def pre_pull(self, extras):
223 224 log.debug("Called pre_pull of Hooks object")
224 225 return self._call_hook(hooks_base.pre_pull, extras)
225 226
226 227 @Pyro4.callback
227 228 def post_pull(self, extras):
228 229 log.debug("Called post_pull of Hooks object")
229 230 return self._call_hook(hooks_base.post_pull, extras)
230 231
231 232 @Pyro4.callback
232 233 def pre_push(self, extras):
233 234 log.debug("Called pre_push of Hooks object")
234 235 return self._call_hook(hooks_base.pre_push, extras)
235 236
236 237 @Pyro4.callback
237 238 def post_push(self, extras):
238 239 log.debug("Called post_push of Hooks object")
239 240 return self._call_hook(hooks_base.post_push, extras)
240 241
241 242 def _call_hook(self, hook, extras):
242 243 extras = AttributeDict(extras)
243 netloc = urlparse.urlparse(extras.server_url).netloc
244 environ = {
245 'SERVER_NAME': netloc.split(':')[0],
246 'SERVER_PORT': ':' in netloc and netloc.split(':')[1] or '80',
247 'SCRIPT_NAME': '',
248 'PATH_INFO': '/',
249 'HTTP_HOST': 'localhost',
250 'REQUEST_METHOD': 'GET',
251 }
252 pylons_router = URLGenerator(rhodecode.CONFIG['routes.map'], environ)
244 pylons_router = get_routes_generator_for_server_url(extras.server_url)
253 245 pylons.url._push_object(pylons_router)
254 246
255 247 try:
256 248 result = hook(extras)
257 249 except Exception as error:
258 250 log.exception('Exception when handling hook %s', hook)
259 251 error_args = error.args
260 252 return {
261 253 'status': 128,
262 254 'output': '',
263 255 'exception': type(error).__name__,
264 256 'exception_args': error_args,
265 257 }
266 258 finally:
267 259 pylons.url._pop_object()
268 260
269 261 return {
270 262 'status': result.status,
271 263 'output': result.output,
272 264 }
273 265
274 266 def __enter__(self):
275 267 return self
276 268
277 269 def __exit__(self, exc_type, exc_val, exc_tb):
278 270 pass
@@ -1,860 +1,886 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Some simple helper functions
24 24 """
25 25
26 26
27 27 import collections
28 28 import datetime
29 29 import dateutil.relativedelta
30 30 import hashlib
31 31 import logging
32 32 import re
33 33 import sys
34 34 import time
35 35 import threading
36 36 import urllib
37 37 import urlobject
38 38 import uuid
39 39
40 40 import pygments.lexers
41 41 import sqlalchemy
42 42 import sqlalchemy.engine.url
43 43 import webob
44 import routes.util
44 45
45 46 import rhodecode
46 47
47 48
48 49 def md5(s):
49 50 return hashlib.md5(s).hexdigest()
50 51
51 52
52 53 def md5_safe(s):
53 54 return md5(safe_str(s))
54 55
55 56
56 57 def __get_lem():
57 58 """
58 59 Get language extension map based on what's inside pygments lexers
59 60 """
60 61 d = collections.defaultdict(lambda: [])
61 62
62 63 def __clean(s):
63 64 s = s.lstrip('*')
64 65 s = s.lstrip('.')
65 66
66 67 if s.find('[') != -1:
67 68 exts = []
68 69 start, stop = s.find('['), s.find(']')
69 70
70 71 for suffix in s[start + 1:stop]:
71 72 exts.append(s[:s.find('[')] + suffix)
72 73 return [e.lower() for e in exts]
73 74 else:
74 75 return [s.lower()]
75 76
76 77 for lx, t in sorted(pygments.lexers.LEXERS.items()):
77 78 m = map(__clean, t[-2])
78 79 if m:
79 80 m = reduce(lambda x, y: x + y, m)
80 81 for ext in m:
81 82 desc = lx.replace('Lexer', '')
82 83 d[ext].append(desc)
83 84
84 85 return dict(d)
85 86
86 87
87 88 def str2bool(_str):
88 89 """
89 90 returs True/False value from given string, it tries to translate the
90 91 string into boolean
91 92
92 93 :param _str: string value to translate into boolean
93 94 :rtype: boolean
94 95 :returns: boolean from given string
95 96 """
96 97 if _str is None:
97 98 return False
98 99 if _str in (True, False):
99 100 return _str
100 101 _str = str(_str).strip().lower()
101 102 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
102 103
103 104
104 105 def aslist(obj, sep=None, strip=True):
105 106 """
106 107 Returns given string separated by sep as list
107 108
108 109 :param obj:
109 110 :param sep:
110 111 :param strip:
111 112 """
112 113 if isinstance(obj, (basestring)):
113 114 lst = obj.split(sep)
114 115 if strip:
115 116 lst = [v.strip() for v in lst]
116 117 return lst
117 118 elif isinstance(obj, (list, tuple)):
118 119 return obj
119 120 elif obj is None:
120 121 return []
121 122 else:
122 123 return [obj]
123 124
124 125
125 126 def convert_line_endings(line, mode):
126 127 """
127 128 Converts a given line "line end" accordingly to given mode
128 129
129 130 Available modes are::
130 131 0 - Unix
131 132 1 - Mac
132 133 2 - DOS
133 134
134 135 :param line: given line to convert
135 136 :param mode: mode to convert to
136 137 :rtype: str
137 138 :return: converted line according to mode
138 139 """
139 140 if mode == 0:
140 141 line = line.replace('\r\n', '\n')
141 142 line = line.replace('\r', '\n')
142 143 elif mode == 1:
143 144 line = line.replace('\r\n', '\r')
144 145 line = line.replace('\n', '\r')
145 146 elif mode == 2:
146 147 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
147 148 return line
148 149
149 150
150 151 def detect_mode(line, default):
151 152 """
152 153 Detects line break for given line, if line break couldn't be found
153 154 given default value is returned
154 155
155 156 :param line: str line
156 157 :param default: default
157 158 :rtype: int
158 159 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
159 160 """
160 161 if line.endswith('\r\n'):
161 162 return 2
162 163 elif line.endswith('\n'):
163 164 return 0
164 165 elif line.endswith('\r'):
165 166 return 1
166 167 else:
167 168 return default
168 169
169 170
170 171 def safe_int(val, default=None):
171 172 """
172 173 Returns int() of val if val is not convertable to int use default
173 174 instead
174 175
175 176 :param val:
176 177 :param default:
177 178 """
178 179
179 180 try:
180 181 val = int(val)
181 182 except (ValueError, TypeError):
182 183 val = default
183 184
184 185 return val
185 186
186 187
187 188 def safe_unicode(str_, from_encoding=None):
188 189 """
189 190 safe unicode function. Does few trick to turn str_ into unicode
190 191
191 192 In case of UnicodeDecode error, we try to return it with encoding detected
192 193 by chardet library if it fails fallback to unicode with errors replaced
193 194
194 195 :param str_: string to decode
195 196 :rtype: unicode
196 197 :returns: unicode object
197 198 """
198 199 if isinstance(str_, unicode):
199 200 return str_
200 201
201 202 if not from_encoding:
202 203 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
203 204 'utf8'), sep=',')
204 205 from_encoding = DEFAULT_ENCODINGS
205 206
206 207 if not isinstance(from_encoding, (list, tuple)):
207 208 from_encoding = [from_encoding]
208 209
209 210 try:
210 211 return unicode(str_)
211 212 except UnicodeDecodeError:
212 213 pass
213 214
214 215 for enc in from_encoding:
215 216 try:
216 217 return unicode(str_, enc)
217 218 except UnicodeDecodeError:
218 219 pass
219 220
220 221 try:
221 222 import chardet
222 223 encoding = chardet.detect(str_)['encoding']
223 224 if encoding is None:
224 225 raise Exception()
225 226 return str_.decode(encoding)
226 227 except (ImportError, UnicodeDecodeError, Exception):
227 228 return unicode(str_, from_encoding[0], 'replace')
228 229
229 230
230 231 def safe_str(unicode_, to_encoding=None):
231 232 """
232 233 safe str function. Does few trick to turn unicode_ into string
233 234
234 235 In case of UnicodeEncodeError, we try to return it with encoding detected
235 236 by chardet library if it fails fallback to string with errors replaced
236 237
237 238 :param unicode_: unicode to encode
238 239 :rtype: str
239 240 :returns: str object
240 241 """
241 242
242 243 # if it's not basestr cast to str
243 244 if not isinstance(unicode_, basestring):
244 245 return str(unicode_)
245 246
246 247 if isinstance(unicode_, str):
247 248 return unicode_
248 249
249 250 if not to_encoding:
250 251 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
251 252 'utf8'), sep=',')
252 253 to_encoding = DEFAULT_ENCODINGS
253 254
254 255 if not isinstance(to_encoding, (list, tuple)):
255 256 to_encoding = [to_encoding]
256 257
257 258 for enc in to_encoding:
258 259 try:
259 260 return unicode_.encode(enc)
260 261 except UnicodeEncodeError:
261 262 pass
262 263
263 264 try:
264 265 import chardet
265 266 encoding = chardet.detect(unicode_)['encoding']
266 267 if encoding is None:
267 268 raise UnicodeEncodeError()
268 269
269 270 return unicode_.encode(encoding)
270 271 except (ImportError, UnicodeEncodeError):
271 272 return unicode_.encode(to_encoding[0], 'replace')
272 273
273 274
274 275 def remove_suffix(s, suffix):
275 276 if s.endswith(suffix):
276 277 s = s[:-1 * len(suffix)]
277 278 return s
278 279
279 280
280 281 def remove_prefix(s, prefix):
281 282 if s.startswith(prefix):
282 283 s = s[len(prefix):]
283 284 return s
284 285
285 286
286 287 def find_calling_context(ignore_modules=None):
287 288 """
288 289 Look through the calling stack and return the frame which called
289 290 this function and is part of core module ( ie. rhodecode.* )
290 291
291 292 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
292 293 """
293 294
294 295 ignore_modules = ignore_modules or []
295 296
296 297 f = sys._getframe(2)
297 298 while f.f_back is not None:
298 299 name = f.f_globals.get('__name__')
299 300 if name and name.startswith(__name__.split('.')[0]):
300 301 if name not in ignore_modules:
301 302 return f
302 303 f = f.f_back
303 304 return None
304 305
305 306
306 307 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
307 308 """Custom engine_from_config functions."""
308 309 log = logging.getLogger('sqlalchemy.engine')
309 310 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
310 311
311 312 def color_sql(sql):
312 313 color_seq = '\033[1;33m' # This is yellow: code 33
313 314 normal = '\x1b[0m'
314 315 return ''.join([color_seq, sql, normal])
315 316
316 317 if configuration['debug']:
317 318 # attach events only for debug configuration
318 319
319 320 def before_cursor_execute(conn, cursor, statement,
320 321 parameters, context, executemany):
321 322 setattr(conn, 'query_start_time', time.time())
322 323 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
323 324 calling_context = find_calling_context(ignore_modules=[
324 325 'rhodecode.lib.caching_query',
325 326 'rhodecode.model.settings',
326 327 ])
327 328 if calling_context:
328 329 log.info(color_sql('call context %s:%s' % (
329 330 calling_context.f_code.co_filename,
330 331 calling_context.f_lineno,
331 332 )))
332 333
333 334 def after_cursor_execute(conn, cursor, statement,
334 335 parameters, context, executemany):
335 336 delattr(conn, 'query_start_time')
336 337
337 338 sqlalchemy.event.listen(engine, "before_cursor_execute",
338 339 before_cursor_execute)
339 340 sqlalchemy.event.listen(engine, "after_cursor_execute",
340 341 after_cursor_execute)
341 342
342 343 return engine
343 344
344 345
345 346 def get_encryption_key(config):
346 347 secret = config.get('rhodecode.encrypted_values.secret')
347 348 default = config['beaker.session.secret']
348 349 return secret or default
349 350
350 351
351 352 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
352 353 short_format=False):
353 354 """
354 355 Turns a datetime into an age string.
355 356 If show_short_version is True, this generates a shorter string with
356 357 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
357 358
358 359 * IMPORTANT*
359 360 Code of this function is written in special way so it's easier to
360 361 backport it to javascript. If you mean to update it, please also update
361 362 `jquery.timeago-extension.js` file
362 363
363 364 :param prevdate: datetime object
364 365 :param now: get current time, if not define we use
365 366 `datetime.datetime.now()`
366 367 :param show_short_version: if it should approximate the date and
367 368 return a shorter string
368 369 :param show_suffix:
369 370 :param short_format: show short format, eg 2D instead of 2 days
370 371 :rtype: unicode
371 372 :returns: unicode words describing age
372 373 """
373 374 from pylons.i18n.translation import _, ungettext
374 375
375 376 def _get_relative_delta(now, prevdate):
376 377 base = dateutil.relativedelta.relativedelta(now, prevdate)
377 378 return {
378 379 'year': base.years,
379 380 'month': base.months,
380 381 'day': base.days,
381 382 'hour': base.hours,
382 383 'minute': base.minutes,
383 384 'second': base.seconds,
384 385 }
385 386
386 387 def _is_leap_year(year):
387 388 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
388 389
389 390 def get_month(prevdate):
390 391 return prevdate.month
391 392
392 393 def get_year(prevdate):
393 394 return prevdate.year
394 395
395 396 now = now or datetime.datetime.now()
396 397 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
397 398 deltas = {}
398 399 future = False
399 400
400 401 if prevdate > now:
401 402 now_old = now
402 403 now = prevdate
403 404 prevdate = now_old
404 405 future = True
405 406 if future:
406 407 prevdate = prevdate.replace(microsecond=0)
407 408 # Get date parts deltas
408 409 for part in order:
409 410 rel_delta = _get_relative_delta(now, prevdate)
410 411 deltas[part] = rel_delta[part]
411 412
412 413 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
413 414 # not 1 hour, -59 minutes and -59 seconds)
414 415 offsets = [[5, 60], [4, 60], [3, 24]]
415 416 for element in offsets: # seconds, minutes, hours
416 417 num = element[0]
417 418 length = element[1]
418 419
419 420 part = order[num]
420 421 carry_part = order[num - 1]
421 422
422 423 if deltas[part] < 0:
423 424 deltas[part] += length
424 425 deltas[carry_part] -= 1
425 426
426 427 # Same thing for days except that the increment depends on the (variable)
427 428 # number of days in the month
428 429 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
429 430 if deltas['day'] < 0:
430 431 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
431 432 deltas['day'] += 29
432 433 else:
433 434 deltas['day'] += month_lengths[get_month(prevdate) - 1]
434 435
435 436 deltas['month'] -= 1
436 437
437 438 if deltas['month'] < 0:
438 439 deltas['month'] += 12
439 440 deltas['year'] -= 1
440 441
441 442 # Format the result
442 443 if short_format:
443 444 fmt_funcs = {
444 445 'year': lambda d: u'%dy' % d,
445 446 'month': lambda d: u'%dm' % d,
446 447 'day': lambda d: u'%dd' % d,
447 448 'hour': lambda d: u'%dh' % d,
448 449 'minute': lambda d: u'%dmin' % d,
449 450 'second': lambda d: u'%dsec' % d,
450 451 }
451 452 else:
452 453 fmt_funcs = {
453 454 'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
454 455 'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
455 456 'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
456 457 'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
457 458 'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
458 459 'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
459 460 }
460 461
461 462 i = 0
462 463 for part in order:
463 464 value = deltas[part]
464 465 if value != 0:
465 466
466 467 if i < 5:
467 468 sub_part = order[i + 1]
468 469 sub_value = deltas[sub_part]
469 470 else:
470 471 sub_value = 0
471 472
472 473 if sub_value == 0 or show_short_version:
473 474 _val = fmt_funcs[part](value)
474 475 if future:
475 476 if show_suffix:
476 477 return _(u'in %s') % _val
477 478 else:
478 479 return _val
479 480
480 481 else:
481 482 if show_suffix:
482 483 return _(u'%s ago') % _val
483 484 else:
484 485 return _val
485 486
486 487 val = fmt_funcs[part](value)
487 488 val_detail = fmt_funcs[sub_part](sub_value)
488 489
489 490 if short_format:
490 491 datetime_tmpl = u'%s, %s'
491 492 if show_suffix:
492 493 datetime_tmpl = _(u'%s, %s ago')
493 494 if future:
494 495 datetime_tmpl = _(u'in %s, %s')
495 496 else:
496 497 datetime_tmpl = _(u'%s and %s')
497 498 if show_suffix:
498 499 datetime_tmpl = _(u'%s and %s ago')
499 500 if future:
500 501 datetime_tmpl = _(u'in %s and %s')
501 502
502 503 return datetime_tmpl % (val, val_detail)
503 504 i += 1
504 505 return _(u'just now')
505 506
506 507
507 508 def uri_filter(uri):
508 509 """
509 510 Removes user:password from given url string
510 511
511 512 :param uri:
512 513 :rtype: unicode
513 514 :returns: filtered list of strings
514 515 """
515 516 if not uri:
516 517 return ''
517 518
518 519 proto = ''
519 520
520 521 for pat in ('https://', 'http://'):
521 522 if uri.startswith(pat):
522 523 uri = uri[len(pat):]
523 524 proto = pat
524 525 break
525 526
526 527 # remove passwords and username
527 528 uri = uri[uri.find('@') + 1:]
528 529
529 530 # get the port
530 531 cred_pos = uri.find(':')
531 532 if cred_pos == -1:
532 533 host, port = uri, None
533 534 else:
534 535 host, port = uri[:cred_pos], uri[cred_pos + 1:]
535 536
536 537 return filter(None, [proto, host, port])
537 538
538 539
539 540 def credentials_filter(uri):
540 541 """
541 542 Returns a url with removed credentials
542 543
543 544 :param uri:
544 545 """
545 546
546 547 uri = uri_filter(uri)
547 548 # check if we have port
548 549 if len(uri) > 2 and uri[2]:
549 550 uri[2] = ':' + uri[2]
550 551
551 552 return ''.join(uri)
552 553
553 554
554 555 def get_clone_url(uri_tmpl, qualifed_home_url, repo_name, repo_id, **override):
555 556 parsed_url = urlobject.URLObject(qualifed_home_url)
556 557 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
557 558 args = {
558 559 'scheme': parsed_url.scheme,
559 560 'user': '',
560 561 # path if we use proxy-prefix
561 562 'netloc': parsed_url.netloc+decoded_path,
562 563 'prefix': decoded_path,
563 564 'repo': repo_name,
564 565 'repoid': str(repo_id)
565 566 }
566 567 args.update(override)
567 568 args['user'] = urllib.quote(safe_str(args['user']))
568 569
569 570 for k, v in args.items():
570 571 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
571 572
572 573 # remove leading @ sign if it's present. Case of empty user
573 574 url_obj = urlobject.URLObject(uri_tmpl)
574 575 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
575 576
576 577 return safe_unicode(url)
577 578
578 579
579 580 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
580 581 """
581 582 Safe version of get_commit if this commit doesn't exists for a
582 583 repository it returns a Dummy one instead
583 584
584 585 :param repo: repository instance
585 586 :param commit_id: commit id as str
586 587 :param pre_load: optional list of commit attributes to load
587 588 """
588 589 # TODO(skreft): remove these circular imports
589 590 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
590 591 from rhodecode.lib.vcs.exceptions import RepositoryError
591 592 if not isinstance(repo, BaseRepository):
592 593 raise Exception('You must pass an Repository '
593 594 'object as first argument got %s', type(repo))
594 595
595 596 try:
596 597 commit = repo.get_commit(
597 598 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
598 599 except (RepositoryError, LookupError):
599 600 commit = EmptyCommit()
600 601 return commit
601 602
602 603
603 604 def datetime_to_time(dt):
604 605 if dt:
605 606 return time.mktime(dt.timetuple())
606 607
607 608
608 609 def time_to_datetime(tm):
609 610 if tm:
610 611 if isinstance(tm, basestring):
611 612 try:
612 613 tm = float(tm)
613 614 except ValueError:
614 615 return
615 616 return datetime.datetime.fromtimestamp(tm)
616 617
617 618
618 619 def time_to_utcdatetime(tm):
619 620 if tm:
620 621 if isinstance(tm, basestring):
621 622 try:
622 623 tm = float(tm)
623 624 except ValueError:
624 625 return
625 626 return datetime.datetime.utcfromtimestamp(tm)
626 627
627 628
628 629 MENTIONS_REGEX = re.compile(
629 630 # ^@ or @ without any special chars in front
630 631 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
631 632 # main body starts with letter, then can be . - _
632 633 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
633 634 re.VERBOSE | re.MULTILINE)
634 635
635 636
636 637 def extract_mentioned_users(s):
637 638 """
638 639 Returns unique usernames from given string s that have @mention
639 640
640 641 :param s: string to get mentions
641 642 """
642 643 usrs = set()
643 644 for username in MENTIONS_REGEX.findall(s):
644 645 usrs.add(username)
645 646
646 647 return sorted(list(usrs), key=lambda k: k.lower())
647 648
648 649
649 650 class AttributeDict(dict):
650 651 def __getattr__(self, attr):
651 652 return self.get(attr, None)
652 653 __setattr__ = dict.__setitem__
653 654 __delattr__ = dict.__delitem__
654 655
655 656
656 657 def fix_PATH(os_=None):
657 658 """
658 659 Get current active python path, and append it to PATH variable to fix
659 660 issues of subprocess calls and different python versions
660 661 """
661 662 if os_ is None:
662 663 import os
663 664 else:
664 665 os = os_
665 666
666 667 cur_path = os.path.split(sys.executable)[0]
667 668 if not os.environ['PATH'].startswith(cur_path):
668 669 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
669 670
670 671
671 672 def obfuscate_url_pw(engine):
672 673 _url = engine or ''
673 674 try:
674 675 _url = sqlalchemy.engine.url.make_url(engine)
675 676 if _url.password:
676 677 _url.password = 'XXXXX'
677 678 except Exception:
678 679 pass
679 680 return unicode(_url)
680 681
681 682
682 683 def get_server_url(environ):
683 684 req = webob.Request(environ)
684 685 return req.host_url + req.script_name
685 686
686 687
687 688 def unique_id(hexlen=32):
688 689 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
689 690 return suuid(truncate_to=hexlen, alphabet=alphabet)
690 691
691 692
692 693 def suuid(url=None, truncate_to=22, alphabet=None):
693 694 """
694 695 Generate and return a short URL safe UUID.
695 696
696 697 If the url parameter is provided, set the namespace to the provided
697 698 URL and generate a UUID.
698 699
699 700 :param url to get the uuid for
700 701 :truncate_to: truncate the basic 22 UUID to shorter version
701 702
702 703 The IDs won't be universally unique any longer, but the probability of
703 704 a collision will still be very low.
704 705 """
705 706 # Define our alphabet.
706 707 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
707 708
708 709 # If no URL is given, generate a random UUID.
709 710 if url is None:
710 711 unique_id = uuid.uuid4().int
711 712 else:
712 713 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
713 714
714 715 alphabet_length = len(_ALPHABET)
715 716 output = []
716 717 while unique_id > 0:
717 718 digit = unique_id % alphabet_length
718 719 output.append(_ALPHABET[digit])
719 720 unique_id = int(unique_id / alphabet_length)
720 721 return "".join(output)[:truncate_to]
721 722
722 723
723 724 def get_current_rhodecode_user():
724 725 """
725 726 Gets rhodecode user from threadlocal tmpl_context variable if it's
726 727 defined, else returns None.
727 728 """
728 729 from pylons import tmpl_context as c
729 730 if hasattr(c, 'rhodecode_user'):
730 731 return c.rhodecode_user
731 732
732 733 return None
733 734
734 735
735 736 def action_logger_generic(action, namespace=''):
736 737 """
737 738 A generic logger for actions useful to the system overview, tries to find
738 739 an acting user for the context of the call otherwise reports unknown user
739 740
740 741 :param action: logging message eg 'comment 5 deleted'
741 742 :param type: string
742 743
743 744 :param namespace: namespace of the logging message eg. 'repo.comments'
744 745 :param type: string
745 746
746 747 """
747 748
748 749 logger_name = 'rhodecode.actions'
749 750
750 751 if namespace:
751 752 logger_name += '.' + namespace
752 753
753 754 log = logging.getLogger(logger_name)
754 755
755 756 # get a user if we can
756 757 user = get_current_rhodecode_user()
757 758
758 759 logfunc = log.info
759 760
760 761 if not user:
761 762 user = '<unknown user>'
762 763 logfunc = log.warning
763 764
764 765 logfunc('Logging action by {}: {}'.format(user, action))
765 766
766 767
767 768 def escape_split(text, sep=',', maxsplit=-1):
768 769 r"""
769 770 Allows for escaping of the separator: e.g. arg='foo\, bar'
770 771
771 772 It should be noted that the way bash et. al. do command line parsing, those
772 773 single quotes are required.
773 774 """
774 775 escaped_sep = r'\%s' % sep
775 776
776 777 if escaped_sep not in text:
777 778 return text.split(sep, maxsplit)
778 779
779 780 before, _mid, after = text.partition(escaped_sep)
780 781 startlist = before.split(sep, maxsplit) # a regular split is fine here
781 782 unfinished = startlist[-1]
782 783 startlist = startlist[:-1]
783 784
784 785 # recurse because there may be more escaped separators
785 786 endlist = escape_split(after, sep, maxsplit)
786 787
787 788 # finish building the escaped value. we use endlist[0] becaue the first
788 789 # part of the string sent in recursion is the rest of the escaped value.
789 790 unfinished += sep + endlist[0]
790 791
791 792 return startlist + [unfinished] + endlist[1:] # put together all the parts
792 793
793 794
794 795 class OptionalAttr(object):
795 796 """
796 797 Special Optional Option that defines other attribute. Example::
797 798
798 799 def test(apiuser, userid=Optional(OAttr('apiuser')):
799 800 user = Optional.extract(userid)
800 801 # calls
801 802
802 803 """
803 804
804 805 def __init__(self, attr_name):
805 806 self.attr_name = attr_name
806 807
807 808 def __repr__(self):
808 809 return '<OptionalAttr:%s>' % self.attr_name
809 810
810 811 def __call__(self):
811 812 return self
812 813
813 814
814 815 # alias
815 816 OAttr = OptionalAttr
816 817
817 818
818 819 class Optional(object):
819 820 """
820 821 Defines an optional parameter::
821 822
822 823 param = param.getval() if isinstance(param, Optional) else param
823 824 param = param() if isinstance(param, Optional) else param
824 825
825 826 is equivalent of::
826 827
827 828 param = Optional.extract(param)
828 829
829 830 """
830 831
831 832 def __init__(self, type_):
832 833 self.type_ = type_
833 834
834 835 def __repr__(self):
835 836 return '<Optional:%s>' % self.type_.__repr__()
836 837
837 838 def __call__(self):
838 839 return self.getval()
839 840
840 841 def getval(self):
841 842 """
842 843 returns value from this Optional instance
843 844 """
844 845 if isinstance(self.type_, OAttr):
845 846 # use params name
846 847 return self.type_.attr_name
847 848 return self.type_
848 849
849 850 @classmethod
850 851 def extract(cls, val):
851 852 """
852 853 Extracts value from Optional() instance
853 854
854 855 :param val:
855 856 :return: original value if it's not Optional instance else
856 857 value of instance
857 858 """
858 859 if isinstance(val, cls):
859 860 return val.getval()
860 861 return val
862
863
864 def get_routes_generator_for_server_url(server_url):
865 parsed_url = urlobject.URLObject(server_url)
866 netloc = safe_str(parsed_url.netloc)
867 script_name = safe_str(parsed_url.path)
868
869 if ':' in netloc:
870 server_name, server_port = netloc.split(':')
871 else:
872 server_name = netloc
873 server_port = (parsed_url.scheme == 'https' and '443' or '80')
874
875 environ = {
876 'REQUEST_METHOD': 'GET',
877 'PATH_INFO': '/',
878 'SERVER_NAME': server_name,
879 'SERVER_PORT': server_port,
880 'SCRIPT_NAME': script_name,
881 }
882 if parsed_url.scheme == 'https':
883 environ['HTTPS'] = 'on'
884 environ['wsgi.url_scheme'] = 'https'
885
886 return routes.util.URLGenerator(rhodecode.CONFIG['routes.map'], environ)
@@ -1,1154 +1,1154 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30
31 31 from pylons.i18n.translation import _
32 32 from pylons.i18n.translation import lazy_ugettext
33 33
34 34 import rhodecode
35 35 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 36 from rhodecode.lib.compat import OrderedDict
37 37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 38 from rhodecode.lib.markup_renderer import (
39 39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 40 from rhodecode.lib.utils import action_logger
41 41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 42 from rhodecode.lib.vcs.backends.base import (
43 43 Reference, MergeResponse, MergeFailureReason)
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 CommitDoesNotExistError, EmptyRepositoryError)
46 46 from rhodecode.model import BaseModel
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import ChangesetCommentsModel
49 49 from rhodecode.model.db import (
50 50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
51 51 PullRequestVersion, ChangesetComment)
52 52 from rhodecode.model.meta import Session
53 53 from rhodecode.model.notification import NotificationModel, \
54 54 EmailNotificationModel
55 55 from rhodecode.model.scm import ScmModel
56 56 from rhodecode.model.settings import VcsSettingsModel
57 57
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class PullRequestModel(BaseModel):
63 63
64 64 cls = PullRequest
65 65
66 66 DIFF_CONTEXT = 3
67 67
68 68 MERGE_STATUS_MESSAGES = {
69 69 MergeFailureReason.NONE: lazy_ugettext(
70 70 'This pull request can be automatically merged.'),
71 71 MergeFailureReason.UNKNOWN: lazy_ugettext(
72 72 'This pull request cannot be merged because of an unhandled'
73 73 ' exception.'),
74 74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
75 75 'This pull request cannot be merged because of conflicts.'),
76 76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
77 77 'This pull request could not be merged because push to target'
78 78 ' failed.'),
79 79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
80 80 'This pull request cannot be merged because the target is not a'
81 81 ' head.'),
82 82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
83 83 'This pull request cannot be merged because the source contains'
84 84 ' more branches than the target.'),
85 85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
86 86 'This pull request cannot be merged because the target has'
87 87 ' multiple heads.'),
88 88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
89 89 'This pull request cannot be merged because the target repository'
90 90 ' is locked.'),
91 91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
92 92 'This pull request cannot be merged because the target or the '
93 93 'source reference is missing.'),
94 94 }
95 95
96 96 def __get_pull_request(self, pull_request):
97 97 return self._get_instance(PullRequest, pull_request)
98 98
99 99 def _check_perms(self, perms, pull_request, user, api=False):
100 100 if not api:
101 101 return h.HasRepoPermissionAny(*perms)(
102 102 user=user, repo_name=pull_request.target_repo.repo_name)
103 103 else:
104 104 return h.HasRepoPermissionAnyApi(*perms)(
105 105 user=user, repo_name=pull_request.target_repo.repo_name)
106 106
107 107 def check_user_read(self, pull_request, user, api=False):
108 108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
109 109 return self._check_perms(_perms, pull_request, user, api)
110 110
111 111 def check_user_merge(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_update(self, pull_request, user, api=False):
116 116 owner = user.user_id == pull_request.user_id
117 117 return self.check_user_merge(pull_request, user, api) or owner
118 118
119 119 def check_user_change_status(self, pull_request, user, api=False):
120 120 reviewer = user.user_id in [x.user_id for x in
121 121 pull_request.reviewers]
122 122 return self.check_user_update(pull_request, user, api) or reviewer
123 123
124 124 def get(self, pull_request):
125 125 return self.__get_pull_request(pull_request)
126 126
127 127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
128 128 opened_by=None, order_by=None,
129 129 order_dir='desc'):
130 130 repo = self._get_repo(repo_name)
131 131 q = PullRequest.query()
132 132 # source or target
133 133 if source:
134 134 q = q.filter(PullRequest.source_repo == repo)
135 135 else:
136 136 q = q.filter(PullRequest.target_repo == repo)
137 137
138 138 # closed,opened
139 139 if statuses:
140 140 q = q.filter(PullRequest.status.in_(statuses))
141 141
142 142 # opened by filter
143 143 if opened_by:
144 144 q = q.filter(PullRequest.user_id.in_(opened_by))
145 145
146 146 if order_by:
147 147 order_map = {
148 148 'name_raw': PullRequest.pull_request_id,
149 149 'title': PullRequest.title,
150 150 'updated_on_raw': PullRequest.updated_on
151 151 }
152 152 if order_dir == 'asc':
153 153 q = q.order_by(order_map[order_by].asc())
154 154 else:
155 155 q = q.order_by(order_map[order_by].desc())
156 156
157 157 return q
158 158
159 159 def count_all(self, repo_name, source=False, statuses=None,
160 160 opened_by=None):
161 161 """
162 162 Count the number of pull requests for a specific repository.
163 163
164 164 :param repo_name: target or source repo
165 165 :param source: boolean flag to specify if repo_name refers to source
166 166 :param statuses: list of pull request statuses
167 167 :param opened_by: author user of the pull request
168 168 :returns: int number of pull requests
169 169 """
170 170 q = self._prepare_get_all_query(
171 171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
172 172
173 173 return q.count()
174 174
175 175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
176 176 offset=0, length=None, order_by=None, order_dir='desc'):
177 177 """
178 178 Get all pull requests for a specific repository.
179 179
180 180 :param repo_name: target or source repo
181 181 :param source: boolean flag to specify if repo_name refers to source
182 182 :param statuses: list of pull request statuses
183 183 :param opened_by: author user of the pull request
184 184 :param offset: pagination offset
185 185 :param length: length of returned list
186 186 :param order_by: order of the returned list
187 187 :param order_dir: 'asc' or 'desc' ordering direction
188 188 :returns: list of pull requests
189 189 """
190 190 q = self._prepare_get_all_query(
191 191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
192 192 order_by=order_by, order_dir=order_dir)
193 193
194 194 if length:
195 195 pull_requests = q.limit(length).offset(offset).all()
196 196 else:
197 197 pull_requests = q.all()
198 198
199 199 return pull_requests
200 200
201 201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
202 202 opened_by=None):
203 203 """
204 204 Count the number of pull requests for a specific repository that are
205 205 awaiting review.
206 206
207 207 :param repo_name: target or source repo
208 208 :param source: boolean flag to specify if repo_name refers to source
209 209 :param statuses: list of pull request statuses
210 210 :param opened_by: author user of the pull request
211 211 :returns: int number of pull requests
212 212 """
213 213 pull_requests = self.get_awaiting_review(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215 215
216 216 return len(pull_requests)
217 217
218 218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
219 219 opened_by=None, offset=0, length=None,
220 220 order_by=None, order_dir='desc'):
221 221 """
222 222 Get all pull requests for a specific repository that are awaiting
223 223 review.
224 224
225 225 :param repo_name: target or source repo
226 226 :param source: boolean flag to specify if repo_name refers to source
227 227 :param statuses: list of pull request statuses
228 228 :param opened_by: author user of the pull request
229 229 :param offset: pagination offset
230 230 :param length: length of returned list
231 231 :param order_by: order of the returned list
232 232 :param order_dir: 'asc' or 'desc' ordering direction
233 233 :returns: list of pull requests
234 234 """
235 235 pull_requests = self.get_all(
236 236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
237 237 order_by=order_by, order_dir=order_dir)
238 238
239 239 _filtered_pull_requests = []
240 240 for pr in pull_requests:
241 241 status = pr.calculated_review_status()
242 242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
243 243 ChangesetStatus.STATUS_UNDER_REVIEW]:
244 244 _filtered_pull_requests.append(pr)
245 245 if length:
246 246 return _filtered_pull_requests[offset:offset+length]
247 247 else:
248 248 return _filtered_pull_requests
249 249
250 250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None, user_id=None):
252 252 """
253 253 Count the number of pull requests for a specific repository that are
254 254 awaiting review from a specific user.
255 255
256 256 :param repo_name: target or source repo
257 257 :param source: boolean flag to specify if repo_name refers to source
258 258 :param statuses: list of pull request statuses
259 259 :param opened_by: author user of the pull request
260 260 :param user_id: reviewer user of the pull request
261 261 :returns: int number of pull requests
262 262 """
263 263 pull_requests = self.get_awaiting_my_review(
264 264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
265 265 user_id=user_id)
266 266
267 267 return len(pull_requests)
268 268
269 269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
270 270 opened_by=None, user_id=None, offset=0,
271 271 length=None, order_by=None, order_dir='desc'):
272 272 """
273 273 Get all pull requests for a specific repository that are awaiting
274 274 review from a specific user.
275 275
276 276 :param repo_name: target or source repo
277 277 :param source: boolean flag to specify if repo_name refers to source
278 278 :param statuses: list of pull request statuses
279 279 :param opened_by: author user of the pull request
280 280 :param user_id: reviewer user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _my = PullRequestModel().get_not_reviewed(user_id)
292 292 my_participation = []
293 293 for pr in pull_requests:
294 294 if pr in _my:
295 295 my_participation.append(pr)
296 296 _filtered_pull_requests = my_participation
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def get_not_reviewed(self, user_id):
303 303 return [
304 304 x.pull_request for x in PullRequestReviewers.query().filter(
305 305 PullRequestReviewers.user_id == user_id).all()
306 306 ]
307 307
308 308 def get_versions(self, pull_request):
309 309 """
310 310 returns version of pull request sorted by ID descending
311 311 """
312 312 return PullRequestVersion.query()\
313 313 .filter(PullRequestVersion.pull_request == pull_request)\
314 314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
315 315 .all()
316 316
317 317 def create(self, created_by, source_repo, source_ref, target_repo,
318 318 target_ref, revisions, reviewers, title, description=None):
319 319 created_by_user = self._get_user(created_by)
320 320 source_repo = self._get_repo(source_repo)
321 321 target_repo = self._get_repo(target_repo)
322 322
323 323 pull_request = PullRequest()
324 324 pull_request.source_repo = source_repo
325 325 pull_request.source_ref = source_ref
326 326 pull_request.target_repo = target_repo
327 327 pull_request.target_ref = target_ref
328 328 pull_request.revisions = revisions
329 329 pull_request.title = title
330 330 pull_request.description = description
331 331 pull_request.author = created_by_user
332 332
333 333 Session().add(pull_request)
334 334 Session().flush()
335 335
336 336 # members / reviewers
337 337 for user_id in set(reviewers):
338 338 user = self._get_user(user_id)
339 339 reviewer = PullRequestReviewers(user, pull_request)
340 340 Session().add(reviewer)
341 341
342 342 # Set approval status to "Under Review" for all commits which are
343 343 # part of this pull request.
344 344 ChangesetStatusModel().set_status(
345 345 repo=target_repo,
346 346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
347 347 user=created_by_user,
348 348 pull_request=pull_request
349 349 )
350 350
351 351 self.notify_reviewers(pull_request, reviewers)
352 352 self._trigger_pull_request_hook(
353 353 pull_request, created_by_user, 'create')
354 354
355 355 return pull_request
356 356
357 357 def _trigger_pull_request_hook(self, pull_request, user, action):
358 358 pull_request = self.__get_pull_request(pull_request)
359 359 target_scm = pull_request.target_repo.scm_instance()
360 360 if action == 'create':
361 361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
362 362 elif action == 'merge':
363 363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
364 364 elif action == 'close':
365 365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
366 366 elif action == 'review_status_change':
367 367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
368 368 elif action == 'update':
369 369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
370 370 else:
371 371 return
372 372
373 373 trigger_hook(
374 374 username=user.username,
375 375 repo_name=pull_request.target_repo.repo_name,
376 376 repo_alias=target_scm.alias,
377 377 pull_request=pull_request)
378 378
379 379 def _get_commit_ids(self, pull_request):
380 380 """
381 381 Return the commit ids of the merged pull request.
382 382
383 383 This method is not dealing correctly yet with the lack of autoupdates
384 384 nor with the implicit target updates.
385 385 For example: if a commit in the source repo is already in the target it
386 386 will be reported anyways.
387 387 """
388 388 merge_rev = pull_request.merge_rev
389 389 if merge_rev is None:
390 390 raise ValueError('This pull request was not merged yet')
391 391
392 392 commit_ids = list(pull_request.revisions)
393 393 if merge_rev not in commit_ids:
394 394 commit_ids.append(merge_rev)
395 395
396 396 return commit_ids
397 397
398 398 def merge(self, pull_request, user, extras):
399 399 log.debug("Merging pull request %s", pull_request.pull_request_id)
400 400 merge_state = self._merge_pull_request(pull_request, user, extras)
401 401 if merge_state.executed:
402 402 log.debug(
403 403 "Merge was successful, updating the pull request comments.")
404 404 self._comment_and_close_pr(pull_request, user, merge_state)
405 405 self._log_action('user_merged_pull_request', user, pull_request)
406 406 else:
407 407 log.warn("Merge failed, not updating the pull request.")
408 408 return merge_state
409 409
410 410 def _merge_pull_request(self, pull_request, user, extras):
411 411 target_vcs = pull_request.target_repo.scm_instance()
412 412 source_vcs = pull_request.source_repo.scm_instance()
413 413 target_ref = self._refresh_reference(
414 414 pull_request.target_ref_parts, target_vcs)
415 415
416 416 message = _(
417 417 'Merge pull request #%(pr_id)s from '
418 418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
419 419 'pr_id': pull_request.pull_request_id,
420 420 'source_repo': source_vcs.name,
421 421 'source_ref_name': pull_request.source_ref_parts.name,
422 422 'pr_title': pull_request.title
423 423 }
424 424
425 425 workspace_id = self._workspace_id(pull_request)
426 426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
427 427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
428 428 use_rebase = self._use_rebase_for_merging(pull_request)
429 429
430 430 callback_daemon, extras = prepare_callback_daemon(
431 431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
432 432
433 433 with callback_daemon:
434 434 # TODO: johbo: Implement a clean way to run a config_override
435 435 # for a single call.
436 436 target_vcs.config.set(
437 437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
438 438 merge_state = target_vcs.merge(
439 439 target_ref, source_vcs, pull_request.source_ref_parts,
440 440 workspace_id, user_name=user.username,
441 441 user_email=user.email, message=message, use_rebase=use_rebase)
442 442 return merge_state
443 443
444 444 def _comment_and_close_pr(self, pull_request, user, merge_state):
445 445 pull_request.merge_rev = merge_state.merge_commit_id
446 446 pull_request.updated_on = datetime.datetime.now()
447 447
448 448 ChangesetCommentsModel().create(
449 449 text=unicode(_('Pull request merged and closed')),
450 450 repo=pull_request.target_repo.repo_id,
451 451 user=user.user_id,
452 452 pull_request=pull_request.pull_request_id,
453 453 f_path=None,
454 454 line_no=None,
455 455 closing_pr=True
456 456 )
457 457
458 458 Session().add(pull_request)
459 459 Session().flush()
460 460 # TODO: paris: replace invalidation with less radical solution
461 461 ScmModel().mark_for_invalidation(
462 462 pull_request.target_repo.repo_name)
463 463 self._trigger_pull_request_hook(pull_request, user, 'merge')
464 464
465 465 def has_valid_update_type(self, pull_request):
466 466 source_ref_type = pull_request.source_ref_parts.type
467 467 return source_ref_type in ['book', 'branch', 'tag']
468 468
469 469 def update_commits(self, pull_request):
470 470 """
471 471 Get the updated list of commits for the pull request
472 472 and return the new pull request version and the list
473 473 of commits processed by this update action
474 474 """
475 475
476 476 pull_request = self.__get_pull_request(pull_request)
477 477 source_ref_type = pull_request.source_ref_parts.type
478 478 source_ref_name = pull_request.source_ref_parts.name
479 479 source_ref_id = pull_request.source_ref_parts.commit_id
480 480
481 481 if not self.has_valid_update_type(pull_request):
482 482 log.debug(
483 483 "Skipping update of pull request %s due to ref type: %s",
484 484 pull_request, source_ref_type)
485 485 return (None, None)
486 486
487 487 source_repo = pull_request.source_repo.scm_instance()
488 488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
489 489 if source_ref_id == source_commit.raw_id:
490 490 log.debug("Nothing changed in pull request %s", pull_request)
491 491 return (None, None)
492 492
493 493 # Finally there is a need for an update
494 494 pull_request_version = self._create_version_from_snapshot(pull_request)
495 495 self._link_comments_to_version(pull_request_version)
496 496
497 497 target_ref_type = pull_request.target_ref_parts.type
498 498 target_ref_name = pull_request.target_ref_parts.name
499 499 target_ref_id = pull_request.target_ref_parts.commit_id
500 500 target_repo = pull_request.target_repo.scm_instance()
501 501
502 502 if target_ref_type in ('tag', 'branch', 'book'):
503 503 target_commit = target_repo.get_commit(target_ref_name)
504 504 else:
505 505 target_commit = target_repo.get_commit(target_ref_id)
506 506
507 507 # re-compute commit ids
508 508 old_commit_ids = set(pull_request.revisions)
509 509 pre_load = ["author", "branch", "date", "message"]
510 510 commit_ranges = target_repo.compare(
511 511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
512 512 pre_load=pre_load)
513 513
514 514 ancestor = target_repo.get_common_ancestor(
515 515 target_commit.raw_id, source_commit.raw_id, source_repo)
516 516
517 517 pull_request.source_ref = '%s:%s:%s' % (
518 518 source_ref_type, source_ref_name, source_commit.raw_id)
519 519 pull_request.target_ref = '%s:%s:%s' % (
520 520 target_ref_type, target_ref_name, ancestor)
521 521 pull_request.revisions = [
522 522 commit.raw_id for commit in reversed(commit_ranges)]
523 523 pull_request.updated_on = datetime.datetime.now()
524 524 Session().add(pull_request)
525 525 new_commit_ids = set(pull_request.revisions)
526 526
527 527 changes = self._calculate_commit_id_changes(
528 528 old_commit_ids, new_commit_ids)
529 529
530 530 old_diff_data, new_diff_data = self._generate_update_diffs(
531 531 pull_request, pull_request_version)
532 532
533 533 ChangesetCommentsModel().outdate_comments(
534 534 pull_request, old_diff_data=old_diff_data,
535 535 new_diff_data=new_diff_data)
536 536
537 537 file_changes = self._calculate_file_changes(
538 538 old_diff_data, new_diff_data)
539 539
540 540 # Add an automatic comment to the pull request
541 541 update_comment = ChangesetCommentsModel().create(
542 542 text=self._render_update_message(changes, file_changes),
543 543 repo=pull_request.target_repo,
544 544 user=pull_request.author,
545 545 pull_request=pull_request,
546 546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
547 547
548 548 # Update status to "Under Review" for added commits
549 549 for commit_id in changes.added:
550 550 ChangesetStatusModel().set_status(
551 551 repo=pull_request.source_repo,
552 552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
553 553 comment=update_comment,
554 554 user=pull_request.author,
555 555 pull_request=pull_request,
556 556 revision=commit_id)
557 557
558 558 log.debug(
559 559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
560 560 'removed_ids: %s', pull_request.pull_request_id,
561 561 changes.added, changes.common, changes.removed)
562 562 log.debug('Updated pull request with the following file changes: %s',
563 563 file_changes)
564 564
565 565 log.info(
566 566 "Updated pull request %s from commit %s to commit %s, "
567 567 "stored new version %s of this pull request.",
568 568 pull_request.pull_request_id, source_ref_id,
569 569 pull_request.source_ref_parts.commit_id,
570 570 pull_request_version.pull_request_version_id)
571 571 Session().commit()
572 572 self._trigger_pull_request_hook(pull_request, pull_request.author,
573 573 'update')
574 574 return (pull_request_version, changes)
575 575
576 576 def _create_version_from_snapshot(self, pull_request):
577 577 version = PullRequestVersion()
578 578 version.title = pull_request.title
579 579 version.description = pull_request.description
580 580 version.status = pull_request.status
581 581 version.created_on = pull_request.created_on
582 582 version.updated_on = pull_request.updated_on
583 583 version.user_id = pull_request.user_id
584 584 version.source_repo = pull_request.source_repo
585 585 version.source_ref = pull_request.source_ref
586 586 version.target_repo = pull_request.target_repo
587 587 version.target_ref = pull_request.target_ref
588 588
589 589 version._last_merge_source_rev = pull_request._last_merge_source_rev
590 590 version._last_merge_target_rev = pull_request._last_merge_target_rev
591 591 version._last_merge_status = pull_request._last_merge_status
592 592 version.merge_rev = pull_request.merge_rev
593 593
594 594 version.revisions = pull_request.revisions
595 595 version.pull_request = pull_request
596 596 Session().add(version)
597 597 Session().flush()
598 598
599 599 return version
600 600
601 601 def _generate_update_diffs(self, pull_request, pull_request_version):
602 602 diff_context = (
603 603 self.DIFF_CONTEXT +
604 604 ChangesetCommentsModel.needed_extra_diff_context())
605 605 old_diff = self._get_diff_from_pr_or_version(
606 606 pull_request_version, context=diff_context)
607 607 new_diff = self._get_diff_from_pr_or_version(
608 608 pull_request, context=diff_context)
609 609
610 610 old_diff_data = diffs.DiffProcessor(old_diff)
611 611 old_diff_data.prepare()
612 612 new_diff_data = diffs.DiffProcessor(new_diff)
613 613 new_diff_data.prepare()
614 614
615 615 return old_diff_data, new_diff_data
616 616
617 617 def _link_comments_to_version(self, pull_request_version):
618 618 """
619 619 Link all unlinked comments of this pull request to the given version.
620 620
621 621 :param pull_request_version: The `PullRequestVersion` to which
622 622 the comments shall be linked.
623 623
624 624 """
625 625 pull_request = pull_request_version.pull_request
626 626 comments = ChangesetComment.query().filter(
627 627 # TODO: johbo: Should we query for the repo at all here?
628 628 # Pending decision on how comments of PRs are to be related
629 629 # to either the source repo, the target repo or no repo at all.
630 630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
631 631 ChangesetComment.pull_request == pull_request,
632 632 ChangesetComment.pull_request_version == None)
633 633
634 634 # TODO: johbo: Find out why this breaks if it is done in a bulk
635 635 # operation.
636 636 for comment in comments:
637 637 comment.pull_request_version_id = (
638 638 pull_request_version.pull_request_version_id)
639 639 Session().add(comment)
640 640
641 641 def _calculate_commit_id_changes(self, old_ids, new_ids):
642 642 added = new_ids.difference(old_ids)
643 643 common = old_ids.intersection(new_ids)
644 644 removed = old_ids.difference(new_ids)
645 645 return ChangeTuple(added, common, removed)
646 646
647 647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
648 648
649 649 old_files = OrderedDict()
650 650 for diff_data in old_diff_data.parsed_diff:
651 651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
652 652
653 653 added_files = []
654 654 modified_files = []
655 655 removed_files = []
656 656 for diff_data in new_diff_data.parsed_diff:
657 657 new_filename = diff_data['filename']
658 658 new_hash = md5_safe(diff_data['raw_diff'])
659 659
660 660 old_hash = old_files.get(new_filename)
661 661 if not old_hash:
662 662 # file is not present in old diff, means it's added
663 663 added_files.append(new_filename)
664 664 else:
665 665 if new_hash != old_hash:
666 666 modified_files.append(new_filename)
667 667 # now remove a file from old, since we have seen it already
668 668 del old_files[new_filename]
669 669
670 670 # removed files is when there are present in old, but not in NEW,
671 671 # since we remove old files that are present in new diff, left-overs
672 672 # if any should be the removed files
673 673 removed_files.extend(old_files.keys())
674 674
675 675 return FileChangeTuple(added_files, modified_files, removed_files)
676 676
677 677 def _render_update_message(self, changes, file_changes):
678 678 """
679 679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
680 680 so it's always looking the same disregarding on which default
681 681 renderer system is using.
682 682
683 683 :param changes: changes named tuple
684 684 :param file_changes: file changes named tuple
685 685
686 686 """
687 687 new_status = ChangesetStatus.get_status_lbl(
688 688 ChangesetStatus.STATUS_UNDER_REVIEW)
689 689
690 690 changed_files = (
691 691 file_changes.added + file_changes.modified + file_changes.removed)
692 692
693 693 params = {
694 694 'under_review_label': new_status,
695 695 'added_commits': changes.added,
696 696 'removed_commits': changes.removed,
697 697 'changed_files': changed_files,
698 698 'added_files': file_changes.added,
699 699 'modified_files': file_changes.modified,
700 700 'removed_files': file_changes.removed,
701 701 }
702 702 renderer = RstTemplateRenderer()
703 703 return renderer.render('pull_request_update.mako', **params)
704 704
705 705 def edit(self, pull_request, title, description):
706 706 pull_request = self.__get_pull_request(pull_request)
707 707 if pull_request.is_closed():
708 708 raise ValueError('This pull request is closed')
709 709 if title:
710 710 pull_request.title = title
711 711 pull_request.description = description
712 712 pull_request.updated_on = datetime.datetime.now()
713 713 Session().add(pull_request)
714 714
715 715 def update_reviewers(self, pull_request, reviewers_ids):
716 716 reviewers_ids = set(reviewers_ids)
717 717 pull_request = self.__get_pull_request(pull_request)
718 718 current_reviewers = PullRequestReviewers.query()\
719 719 .filter(PullRequestReviewers.pull_request ==
720 720 pull_request).all()
721 721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
722 722
723 723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
724 724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
725 725
726 726 log.debug("Adding %s reviewers", ids_to_add)
727 727 log.debug("Removing %s reviewers", ids_to_remove)
728 728 changed = False
729 729 for uid in ids_to_add:
730 730 changed = True
731 731 _usr = self._get_user(uid)
732 732 reviewer = PullRequestReviewers(_usr, pull_request)
733 733 Session().add(reviewer)
734 734
735 735 self.notify_reviewers(pull_request, ids_to_add)
736 736
737 737 for uid in ids_to_remove:
738 738 changed = True
739 739 reviewer = PullRequestReviewers.query()\
740 740 .filter(PullRequestReviewers.user_id == uid,
741 741 PullRequestReviewers.pull_request == pull_request)\
742 742 .scalar()
743 743 if reviewer:
744 744 Session().delete(reviewer)
745 745 if changed:
746 746 pull_request.updated_on = datetime.datetime.now()
747 747 Session().add(pull_request)
748 748
749 749 return ids_to_add, ids_to_remove
750 750
751 751 def get_url(self, pull_request):
752 752 return h.url('pullrequest_show',
753 repo_name=pull_request.target_repo.repo_name,
753 repo_name=safe_str(pull_request.target_repo.repo_name),
754 754 pull_request_id=pull_request.pull_request_id,
755 755 qualified=True)
756 756
757 757 def notify_reviewers(self, pull_request, reviewers_ids):
758 758 # notification to reviewers
759 759 if not reviewers_ids:
760 760 return
761 761
762 762 pull_request_obj = pull_request
763 763 # get the current participants of this pull request
764 764 recipients = reviewers_ids
765 765 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
766 766
767 767 pr_source_repo = pull_request_obj.source_repo
768 768 pr_target_repo = pull_request_obj.target_repo
769 769
770 770 pr_url = h.url(
771 771 'pullrequest_show',
772 772 repo_name=pr_target_repo.repo_name,
773 773 pull_request_id=pull_request_obj.pull_request_id,
774 774 qualified=True,)
775 775
776 776 # set some variables for email notification
777 777 pr_target_repo_url = h.url(
778 778 'summary_home',
779 779 repo_name=pr_target_repo.repo_name,
780 780 qualified=True)
781 781
782 782 pr_source_repo_url = h.url(
783 783 'summary_home',
784 784 repo_name=pr_source_repo.repo_name,
785 785 qualified=True)
786 786
787 787 # pull request specifics
788 788 pull_request_commits = [
789 789 (x.raw_id, x.message)
790 790 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
791 791
792 792 kwargs = {
793 793 'user': pull_request.author,
794 794 'pull_request': pull_request_obj,
795 795 'pull_request_commits': pull_request_commits,
796 796
797 797 'pull_request_target_repo': pr_target_repo,
798 798 'pull_request_target_repo_url': pr_target_repo_url,
799 799
800 800 'pull_request_source_repo': pr_source_repo,
801 801 'pull_request_source_repo_url': pr_source_repo_url,
802 802
803 803 'pull_request_url': pr_url,
804 804 }
805 805
806 806 # pre-generate the subject for notification itself
807 807 (subject,
808 808 _h, _e, # we don't care about those
809 809 body_plaintext) = EmailNotificationModel().render_email(
810 810 notification_type, **kwargs)
811 811
812 812 # create notification objects, and emails
813 813 NotificationModel().create(
814 814 created_by=pull_request.author,
815 815 notification_subject=subject,
816 816 notification_body=body_plaintext,
817 817 notification_type=notification_type,
818 818 recipients=recipients,
819 819 email_kwargs=kwargs,
820 820 )
821 821
822 822 def delete(self, pull_request):
823 823 pull_request = self.__get_pull_request(pull_request)
824 824 self._cleanup_merge_workspace(pull_request)
825 825 Session().delete(pull_request)
826 826
827 827 def close_pull_request(self, pull_request, user):
828 828 pull_request = self.__get_pull_request(pull_request)
829 829 self._cleanup_merge_workspace(pull_request)
830 830 pull_request.status = PullRequest.STATUS_CLOSED
831 831 pull_request.updated_on = datetime.datetime.now()
832 832 Session().add(pull_request)
833 833 self._trigger_pull_request_hook(
834 834 pull_request, pull_request.author, 'close')
835 835 self._log_action('user_closed_pull_request', user, pull_request)
836 836
837 837 def close_pull_request_with_comment(self, pull_request, user, repo,
838 838 message=None):
839 839 status = ChangesetStatus.STATUS_REJECTED
840 840
841 841 if not message:
842 842 message = (
843 843 _('Status change %(transition_icon)s %(status)s') % {
844 844 'transition_icon': '>',
845 845 'status': ChangesetStatus.get_status_lbl(status)})
846 846
847 847 internal_message = _('Closing with') + ' ' + message
848 848
849 849 comm = ChangesetCommentsModel().create(
850 850 text=internal_message,
851 851 repo=repo.repo_id,
852 852 user=user.user_id,
853 853 pull_request=pull_request.pull_request_id,
854 854 f_path=None,
855 855 line_no=None,
856 856 status_change=ChangesetStatus.get_status_lbl(status),
857 857 closing_pr=True
858 858 )
859 859
860 860 ChangesetStatusModel().set_status(
861 861 repo.repo_id,
862 862 status,
863 863 user.user_id,
864 864 comm,
865 865 pull_request=pull_request.pull_request_id
866 866 )
867 867 Session().flush()
868 868
869 869 PullRequestModel().close_pull_request(
870 870 pull_request.pull_request_id, user)
871 871
872 872 def merge_status(self, pull_request):
873 873 if not self._is_merge_enabled(pull_request):
874 874 return False, _('Server-side pull request merging is disabled.')
875 875 if pull_request.is_closed():
876 876 return False, _('This pull request is closed.')
877 877 merge_possible, msg = self._check_repo_requirements(
878 878 target=pull_request.target_repo, source=pull_request.source_repo)
879 879 if not merge_possible:
880 880 return merge_possible, msg
881 881
882 882 try:
883 883 resp = self._try_merge(pull_request)
884 884 status = resp.possible, self.merge_status_message(
885 885 resp.failure_reason)
886 886 except NotImplementedError:
887 887 status = False, _('Pull request merging is not supported.')
888 888
889 889 return status
890 890
891 891 def _check_repo_requirements(self, target, source):
892 892 """
893 893 Check if `target` and `source` have compatible requirements.
894 894
895 895 Currently this is just checking for largefiles.
896 896 """
897 897 target_has_largefiles = self._has_largefiles(target)
898 898 source_has_largefiles = self._has_largefiles(source)
899 899 merge_possible = True
900 900 message = u''
901 901
902 902 if target_has_largefiles != source_has_largefiles:
903 903 merge_possible = False
904 904 if source_has_largefiles:
905 905 message = _(
906 906 'Target repository large files support is disabled.')
907 907 else:
908 908 message = _(
909 909 'Source repository large files support is disabled.')
910 910
911 911 return merge_possible, message
912 912
913 913 def _has_largefiles(self, repo):
914 914 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
915 915 'extensions', 'largefiles')
916 916 return largefiles_ui and largefiles_ui[0].active
917 917
918 918 def _try_merge(self, pull_request):
919 919 """
920 920 Try to merge the pull request and return the merge status.
921 921 """
922 922 log.debug(
923 923 "Trying out if the pull request %s can be merged.",
924 924 pull_request.pull_request_id)
925 925 target_vcs = pull_request.target_repo.scm_instance()
926 926 target_ref = self._refresh_reference(
927 927 pull_request.target_ref_parts, target_vcs)
928 928
929 929 target_locked = pull_request.target_repo.locked
930 930 if target_locked and target_locked[0]:
931 931 log.debug("The target repository is locked.")
932 932 merge_state = MergeResponse(
933 933 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
934 934 elif self._needs_merge_state_refresh(pull_request, target_ref):
935 935 log.debug("Refreshing the merge status of the repository.")
936 936 merge_state = self._refresh_merge_state(
937 937 pull_request, target_vcs, target_ref)
938 938 else:
939 939 possible = pull_request.\
940 940 _last_merge_status == MergeFailureReason.NONE
941 941 merge_state = MergeResponse(
942 942 possible, False, None, pull_request._last_merge_status)
943 943 log.debug("Merge response: %s", merge_state)
944 944 return merge_state
945 945
946 946 def _refresh_reference(self, reference, vcs_repository):
947 947 if reference.type in ('branch', 'book'):
948 948 name_or_id = reference.name
949 949 else:
950 950 name_or_id = reference.commit_id
951 951 refreshed_commit = vcs_repository.get_commit(name_or_id)
952 952 refreshed_reference = Reference(
953 953 reference.type, reference.name, refreshed_commit.raw_id)
954 954 return refreshed_reference
955 955
956 956 def _needs_merge_state_refresh(self, pull_request, target_reference):
957 957 return not(
958 958 pull_request.revisions and
959 959 pull_request.revisions[0] == pull_request._last_merge_source_rev and
960 960 target_reference.commit_id == pull_request._last_merge_target_rev)
961 961
962 962 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
963 963 workspace_id = self._workspace_id(pull_request)
964 964 source_vcs = pull_request.source_repo.scm_instance()
965 965 use_rebase = self._use_rebase_for_merging(pull_request)
966 966 merge_state = target_vcs.merge(
967 967 target_reference, source_vcs, pull_request.source_ref_parts,
968 968 workspace_id, dry_run=True, use_rebase=use_rebase)
969 969
970 970 # Do not store the response if there was an unknown error.
971 971 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
972 972 pull_request._last_merge_source_rev = pull_request.\
973 973 source_ref_parts.commit_id
974 974 pull_request._last_merge_target_rev = target_reference.commit_id
975 975 pull_request._last_merge_status = (
976 976 merge_state.failure_reason)
977 977 Session().add(pull_request)
978 978 Session().flush()
979 979
980 980 return merge_state
981 981
982 982 def _workspace_id(self, pull_request):
983 983 workspace_id = 'pr-%s' % pull_request.pull_request_id
984 984 return workspace_id
985 985
986 986 def merge_status_message(self, status_code):
987 987 """
988 988 Return a human friendly error message for the given merge status code.
989 989 """
990 990 return self.MERGE_STATUS_MESSAGES[status_code]
991 991
992 992 def generate_repo_data(self, repo, commit_id=None, branch=None,
993 993 bookmark=None):
994 994 all_refs, selected_ref = \
995 995 self._get_repo_pullrequest_sources(
996 996 repo.scm_instance(), commit_id=commit_id,
997 997 branch=branch, bookmark=bookmark)
998 998
999 999 refs_select2 = []
1000 1000 for element in all_refs:
1001 1001 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1002 1002 refs_select2.append({'text': element[1], 'children': children})
1003 1003
1004 1004 return {
1005 1005 'user': {
1006 1006 'user_id': repo.user.user_id,
1007 1007 'username': repo.user.username,
1008 1008 'firstname': repo.user.firstname,
1009 1009 'lastname': repo.user.lastname,
1010 1010 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1011 1011 },
1012 1012 'description': h.chop_at_smart(repo.description, '\n'),
1013 1013 'refs': {
1014 1014 'all_refs': all_refs,
1015 1015 'selected_ref': selected_ref,
1016 1016 'select2_refs': refs_select2
1017 1017 }
1018 1018 }
1019 1019
1020 1020 def generate_pullrequest_title(self, source, source_ref, target):
1021 1021 return '{source}#{at_ref} to {target}'.format(
1022 1022 source=source,
1023 1023 at_ref=source_ref,
1024 1024 target=target,
1025 1025 )
1026 1026
1027 1027 def _cleanup_merge_workspace(self, pull_request):
1028 1028 # Merging related cleanup
1029 1029 target_scm = pull_request.target_repo.scm_instance()
1030 1030 workspace_id = 'pr-%s' % pull_request.pull_request_id
1031 1031
1032 1032 try:
1033 1033 target_scm.cleanup_merge_workspace(workspace_id)
1034 1034 except NotImplementedError:
1035 1035 pass
1036 1036
1037 1037 def _get_repo_pullrequest_sources(
1038 1038 self, repo, commit_id=None, branch=None, bookmark=None):
1039 1039 """
1040 1040 Return a structure with repo's interesting commits, suitable for
1041 1041 the selectors in pullrequest controller
1042 1042
1043 1043 :param commit_id: a commit that must be in the list somehow
1044 1044 and selected by default
1045 1045 :param branch: a branch that must be in the list and selected
1046 1046 by default - even if closed
1047 1047 :param bookmark: a bookmark that must be in the list and selected
1048 1048 """
1049 1049
1050 1050 commit_id = safe_str(commit_id) if commit_id else None
1051 1051 branch = safe_str(branch) if branch else None
1052 1052 bookmark = safe_str(bookmark) if bookmark else None
1053 1053
1054 1054 selected = None
1055 1055
1056 1056 # order matters: first source that has commit_id in it will be selected
1057 1057 sources = []
1058 1058 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1059 1059 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1060 1060
1061 1061 if commit_id:
1062 1062 ref_commit = (h.short_id(commit_id), commit_id)
1063 1063 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1064 1064
1065 1065 sources.append(
1066 1066 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1067 1067 )
1068 1068
1069 1069 groups = []
1070 1070 for group_key, ref_list, group_name, match in sources:
1071 1071 group_refs = []
1072 1072 for ref_name, ref_id in ref_list:
1073 1073 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1074 1074 group_refs.append((ref_key, ref_name))
1075 1075
1076 1076 if not selected:
1077 1077 if set([commit_id, match]) & set([ref_id, ref_name]):
1078 1078 selected = ref_key
1079 1079
1080 1080 if group_refs:
1081 1081 groups.append((group_refs, group_name))
1082 1082
1083 1083 if not selected:
1084 1084 ref = commit_id or branch or bookmark
1085 1085 if ref:
1086 1086 raise CommitDoesNotExistError(
1087 1087 'No commit refs could be found matching: %s' % ref)
1088 1088 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1089 1089 selected = 'branch:%s:%s' % (
1090 1090 repo.DEFAULT_BRANCH_NAME,
1091 1091 repo.branches[repo.DEFAULT_BRANCH_NAME]
1092 1092 )
1093 1093 elif repo.commit_ids:
1094 1094 rev = repo.commit_ids[0]
1095 1095 selected = 'rev:%s:%s' % (rev, rev)
1096 1096 else:
1097 1097 raise EmptyRepositoryError()
1098 1098 return groups, selected
1099 1099
1100 1100 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1101 1101 pull_request = self.__get_pull_request(pull_request)
1102 1102 return self._get_diff_from_pr_or_version(pull_request, context=context)
1103 1103
1104 1104 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1105 1105 source_repo = pr_or_version.source_repo
1106 1106
1107 1107 # we swap org/other ref since we run a simple diff on one repo
1108 1108 target_ref_id = pr_or_version.target_ref_parts.commit_id
1109 1109 source_ref_id = pr_or_version.source_ref_parts.commit_id
1110 1110 target_commit = source_repo.get_commit(
1111 1111 commit_id=safe_str(target_ref_id))
1112 1112 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1113 1113 vcs_repo = source_repo.scm_instance()
1114 1114
1115 1115 # TODO: johbo: In the context of an update, we cannot reach
1116 1116 # the old commit anymore with our normal mechanisms. It needs
1117 1117 # some sort of special support in the vcs layer to avoid this
1118 1118 # workaround.
1119 1119 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1120 1120 vcs_repo.alias == 'git'):
1121 1121 source_commit.raw_id = safe_str(source_ref_id)
1122 1122
1123 1123 log.debug('calculating diff between '
1124 1124 'source_ref:%s and target_ref:%s for repo `%s`',
1125 1125 target_ref_id, source_ref_id,
1126 1126 safe_unicode(vcs_repo.path))
1127 1127
1128 1128 vcs_diff = vcs_repo.get_diff(
1129 1129 commit1=target_commit, commit2=source_commit, context=context)
1130 1130 return vcs_diff
1131 1131
1132 1132 def _is_merge_enabled(self, pull_request):
1133 1133 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1134 1134 settings = settings_model.get_general_settings()
1135 1135 return settings.get('rhodecode_pr_merge_enabled', False)
1136 1136
1137 1137 def _use_rebase_for_merging(self, pull_request):
1138 1138 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1139 1139 settings = settings_model.get_general_settings()
1140 1140 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1141 1141
1142 1142 def _log_action(self, action, user, pull_request):
1143 1143 action_logger(
1144 1144 user,
1145 1145 '{action}:{pr_id}'.format(
1146 1146 action=action, pr_id=pull_request.pull_request_id),
1147 1147 pull_request.target_repo)
1148 1148
1149 1149
1150 1150 ChangeTuple = namedtuple('ChangeTuple',
1151 1151 ['added', 'common', 'removed'])
1152 1152
1153 1153 FileChangeTuple = namedtuple('FileChangeTuple',
1154 1154 ['added', 'modified', 'removed'])
@@ -1,934 +1,935 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Repository model for rhodecode
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import shutil
29 29 import time
30 30 import traceback
31 31 from datetime import datetime
32 32
33 33 from sqlalchemy.sql import func
34 34 from sqlalchemy.sql.expression import true, or_
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h
39 39 from rhodecode.lib.auth import HasUserGroupPermissionAny
40 40 from rhodecode.lib.caching_query import FromCache
41 41 from rhodecode.lib.exceptions import AttachedForksError
42 42 from rhodecode.lib.hooks_base import log_delete_repository
43 43 from rhodecode.lib.utils import make_db_config
44 44 from rhodecode.lib.utils2 import (
45 45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
46 46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
47 47 from rhodecode.lib.vcs.backends import get_backend
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
51 51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
52 52 RepoGroup, RepositoryField)
53 53 from rhodecode.model.scm import UserGroupList
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class RepoModel(BaseModel):
61 61
62 62 cls = Repository
63 63
64 64 def _get_user_group(self, users_group):
65 65 return self._get_instance(UserGroup, users_group,
66 66 callback=UserGroup.get_by_group_name)
67 67
68 68 def _get_repo_group(self, repo_group):
69 69 return self._get_instance(RepoGroup, repo_group,
70 70 callback=RepoGroup.get_by_group_name)
71 71
72 72 def _create_default_perms(self, repository, private):
73 73 # create default permission
74 74 default = 'repository.read'
75 75 def_user = User.get_default_user()
76 76 for p in def_user.user_perms:
77 77 if p.permission.permission_name.startswith('repository.'):
78 78 default = p.permission.permission_name
79 79 break
80 80
81 81 default_perm = 'repository.none' if private else default
82 82
83 83 repo_to_perm = UserRepoToPerm()
84 84 repo_to_perm.permission = Permission.get_by_key(default_perm)
85 85
86 86 repo_to_perm.repository = repository
87 87 repo_to_perm.user_id = def_user.user_id
88 88
89 89 return repo_to_perm
90 90
91 91 @LazyProperty
92 92 def repos_path(self):
93 93 """
94 94 Gets the repositories root path from database
95 95 """
96 96 settings_model = VcsSettingsModel(sa=self.sa)
97 97 return settings_model.get_repos_location()
98 98
99 99 def get(self, repo_id, cache=False):
100 100 repo = self.sa.query(Repository) \
101 101 .filter(Repository.repo_id == repo_id)
102 102
103 103 if cache:
104 104 repo = repo.options(FromCache("sql_cache_short",
105 105 "get_repo_%s" % repo_id))
106 106 return repo.scalar()
107 107
108 108 def get_repo(self, repository):
109 109 return self._get_repo(repository)
110 110
111 111 def get_by_repo_name(self, repo_name, cache=False):
112 112 repo = self.sa.query(Repository) \
113 113 .filter(Repository.repo_name == repo_name)
114 114
115 115 if cache:
116 116 repo = repo.options(FromCache("sql_cache_short",
117 117 "get_repo_%s" % repo_name))
118 118 return repo.scalar()
119 119
120 120 def _extract_id_from_repo_name(self, repo_name):
121 121 if repo_name.startswith('/'):
122 122 repo_name = repo_name.lstrip('/')
123 123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 124 if by_id_match:
125 125 return by_id_match.groups()[0]
126 126
127 127 def get_repo_by_id(self, repo_name):
128 128 """
129 129 Extracts repo_name by id from special urls.
130 130 Example url is _11/repo_name
131 131
132 132 :param repo_name:
133 133 :return: repo object if matched else None
134 134 """
135 135 try:
136 136 _repo_id = self._extract_id_from_repo_name(repo_name)
137 137 if _repo_id:
138 138 return self.get(_repo_id)
139 139 except Exception:
140 140 log.exception('Failed to extract repo_name from URL')
141 141
142 142 return None
143 143
144 144 def get_url(self, repo):
145 return h.url('summary_home', repo_name=repo.repo_name, qualified=True)
145 return h.url('summary_home', repo_name=safe_str(repo.repo_name),
146 qualified=True)
146 147
147 148 def get_users(self, name_contains=None, limit=20, only_active=True):
148 149 # TODO: mikhail: move this method to the UserModel.
149 150 query = self.sa.query(User)
150 151 if only_active:
151 152 query = query.filter(User.active == true())
152 153
153 154 if name_contains:
154 155 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
155 156 query = query.filter(
156 157 or_(
157 158 User.name.ilike(ilike_expression),
158 159 User.lastname.ilike(ilike_expression),
159 160 User.username.ilike(ilike_expression)
160 161 )
161 162 )
162 163 query = query.limit(limit)
163 164 users = query.all()
164 165
165 166 _users = [
166 167 {
167 168 'id': user.user_id,
168 169 'first_name': user.name,
169 170 'last_name': user.lastname,
170 171 'username': user.username,
171 172 'icon_link': h.gravatar_url(user.email, 14),
172 173 'value_display': h.person(user.email),
173 174 'value': user.username,
174 175 'value_type': 'user',
175 176 'active': user.active,
176 177 }
177 178 for user in users
178 179 ]
179 180 return _users
180 181
181 182 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
182 183 # TODO: mikhail: move this method to the UserGroupModel.
183 184 query = self.sa.query(UserGroup)
184 185 if only_active:
185 186 query = query.filter(UserGroup.users_group_active == true())
186 187
187 188 if name_contains:
188 189 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
189 190 query = query.filter(
190 191 UserGroup.users_group_name.ilike(ilike_expression))\
191 192 .order_by(func.length(UserGroup.users_group_name))\
192 193 .order_by(UserGroup.users_group_name)
193 194
194 195 query = query.limit(limit)
195 196 user_groups = query.all()
196 197 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
197 198 user_groups = UserGroupList(user_groups, perm_set=perm_set)
198 199
199 200 _groups = [
200 201 {
201 202 'id': group.users_group_id,
202 203 # TODO: marcink figure out a way to generate the url for the
203 204 # icon
204 205 'icon_link': '',
205 206 'value_display': 'Group: %s (%d members)' % (
206 207 group.users_group_name, len(group.members),),
207 208 'value': group.users_group_name,
208 209 'value_type': 'user_group',
209 210 'active': group.users_group_active,
210 211 }
211 212 for group in user_groups
212 213 ]
213 214 return _groups
214 215
215 216 @classmethod
216 217 def update_repoinfo(cls, repositories=None):
217 218 if not repositories:
218 219 repositories = Repository.getAll()
219 220 for repo in repositories:
220 221 repo.update_commit_cache()
221 222
222 223 def get_repos_as_dict(self, repo_list=None, admin=False,
223 224 super_user_actions=False):
224 225
225 226 from rhodecode.lib.utils import PartialRenderer
226 227 _render = PartialRenderer('data_table/_dt_elements.html')
227 228 c = _render.c
228 229
229 230 def quick_menu(repo_name):
230 231 return _render('quick_menu', repo_name)
231 232
232 233 def repo_lnk(name, rtype, rstate, private, fork_of):
233 234 return _render('repo_name', name, rtype, rstate, private, fork_of,
234 235 short_name=not admin, admin=False)
235 236
236 237 def last_change(last_change):
237 238 return _render("last_change", last_change)
238 239
239 240 def rss_lnk(repo_name):
240 241 return _render("rss", repo_name)
241 242
242 243 def atom_lnk(repo_name):
243 244 return _render("atom", repo_name)
244 245
245 246 def last_rev(repo_name, cs_cache):
246 247 return _render('revision', repo_name, cs_cache.get('revision'),
247 248 cs_cache.get('raw_id'), cs_cache.get('author'),
248 249 cs_cache.get('message'))
249 250
250 251 def desc(desc):
251 252 if c.visual.stylify_metatags:
252 253 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
253 254 else:
254 255 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
255 256
256 257 def state(repo_state):
257 258 return _render("repo_state", repo_state)
258 259
259 260 def repo_actions(repo_name):
260 261 return _render('repo_actions', repo_name, super_user_actions)
261 262
262 263 def user_profile(username):
263 264 return _render('user_profile', username)
264 265
265 266 repos_data = []
266 267 for repo in repo_list:
267 268 cs_cache = repo.changeset_cache
268 269 row = {
269 270 "menu": quick_menu(repo.repo_name),
270 271
271 272 "name": repo_lnk(repo.repo_name, repo.repo_type,
272 273 repo.repo_state, repo.private, repo.fork),
273 274 "name_raw": repo.repo_name.lower(),
274 275
275 276 "last_change": last_change(repo.last_db_change),
276 277 "last_change_raw": datetime_to_time(repo.last_db_change),
277 278
278 279 "last_changeset": last_rev(repo.repo_name, cs_cache),
279 280 "last_changeset_raw": cs_cache.get('revision'),
280 281
281 282 "desc": desc(repo.description),
282 283 "owner": user_profile(repo.user.username),
283 284
284 285 "state": state(repo.repo_state),
285 286 "rss": rss_lnk(repo.repo_name),
286 287
287 288 "atom": atom_lnk(repo.repo_name),
288 289 }
289 290 if admin:
290 291 row.update({
291 292 "action": repo_actions(repo.repo_name),
292 293 })
293 294 repos_data.append(row)
294 295
295 296 return repos_data
296 297
297 298 def _get_defaults(self, repo_name):
298 299 """
299 300 Gets information about repository, and returns a dict for
300 301 usage in forms
301 302
302 303 :param repo_name:
303 304 """
304 305
305 306 repo_info = Repository.get_by_repo_name(repo_name)
306 307
307 308 if repo_info is None:
308 309 return None
309 310
310 311 defaults = repo_info.get_dict()
311 312 defaults['repo_name'] = repo_info.just_name
312 313
313 314 groups = repo_info.groups_with_parents
314 315 parent_group = groups[-1] if groups else None
315 316
316 317 # we use -1 as this is how in HTML, we mark an empty group
317 318 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
318 319
319 320 keys_to_process = (
320 321 {'k': 'repo_type', 'strip': False},
321 322 {'k': 'repo_enable_downloads', 'strip': True},
322 323 {'k': 'repo_description', 'strip': True},
323 324 {'k': 'repo_enable_locking', 'strip': True},
324 325 {'k': 'repo_landing_rev', 'strip': True},
325 326 {'k': 'clone_uri', 'strip': False},
326 327 {'k': 'repo_private', 'strip': True},
327 328 {'k': 'repo_enable_statistics', 'strip': True}
328 329 )
329 330
330 331 for item in keys_to_process:
331 332 attr = item['k']
332 333 if item['strip']:
333 334 attr = remove_prefix(item['k'], 'repo_')
334 335
335 336 val = defaults[attr]
336 337 if item['k'] == 'repo_landing_rev':
337 338 val = ':'.join(defaults[attr])
338 339 defaults[item['k']] = val
339 340 if item['k'] == 'clone_uri':
340 341 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
341 342
342 343 # fill owner
343 344 if repo_info.user:
344 345 defaults.update({'user': repo_info.user.username})
345 346 else:
346 347 replacement_user = User.get_first_super_admin().username
347 348 defaults.update({'user': replacement_user})
348 349
349 350 # fill repository users
350 351 for p in repo_info.repo_to_perm:
351 352 defaults.update({'u_perm_%s' % p.user.user_id:
352 353 p.permission.permission_name})
353 354
354 355 # fill repository groups
355 356 for p in repo_info.users_group_to_perm:
356 357 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
357 358 p.permission.permission_name})
358 359
359 360 return defaults
360 361
361 362 def update(self, repo, **kwargs):
362 363 try:
363 364 cur_repo = self._get_repo(repo)
364 365 source_repo_name = cur_repo.repo_name
365 366 if 'user' in kwargs:
366 367 cur_repo.user = User.get_by_username(kwargs['user'])
367 368
368 369 if 'repo_group' in kwargs:
369 370 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
370 371 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
371 372
372 373 update_keys = [
373 374 (1, 'repo_enable_downloads'),
374 375 (1, 'repo_description'),
375 376 (1, 'repo_enable_locking'),
376 377 (1, 'repo_landing_rev'),
377 378 (1, 'repo_private'),
378 379 (1, 'repo_enable_statistics'),
379 380 (0, 'clone_uri'),
380 381 (0, 'fork_id')
381 382 ]
382 383 for strip, k in update_keys:
383 384 if k in kwargs:
384 385 val = kwargs[k]
385 386 if strip:
386 387 k = remove_prefix(k, 'repo_')
387 388 if k == 'clone_uri':
388 389 from rhodecode.model.validators import Missing
389 390 _change = kwargs.get('clone_uri_change')
390 391 if _change in [Missing, 'OLD']:
391 392 # we don't change the value, so use original one
392 393 val = cur_repo.clone_uri
393 394
394 395 setattr(cur_repo, k, val)
395 396
396 397 new_name = cur_repo.get_new_name(kwargs['repo_name'])
397 398 cur_repo.repo_name = new_name
398 399
399 400 # if private flag is set, reset default permission to NONE
400 401 if kwargs.get('repo_private'):
401 402 EMPTY_PERM = 'repository.none'
402 403 RepoModel().grant_user_permission(
403 404 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
404 405 )
405 406
406 407 # handle extra fields
407 408 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
408 409 kwargs):
409 410 k = RepositoryField.un_prefix_key(field)
410 411 ex_field = RepositoryField.get_by_key_name(
411 412 key=k, repo=cur_repo)
412 413 if ex_field:
413 414 ex_field.field_value = kwargs[field]
414 415 self.sa.add(ex_field)
415 416 self.sa.add(cur_repo)
416 417
417 418 if source_repo_name != new_name:
418 419 # rename repository
419 420 self._rename_filesystem_repo(
420 421 old=source_repo_name, new=new_name)
421 422
422 423 return cur_repo
423 424 except Exception:
424 425 log.error(traceback.format_exc())
425 426 raise
426 427
427 428 def _create_repo(self, repo_name, repo_type, description, owner,
428 429 private=False, clone_uri=None, repo_group=None,
429 430 landing_rev='rev:tip', fork_of=None,
430 431 copy_fork_permissions=False, enable_statistics=False,
431 432 enable_locking=False, enable_downloads=False,
432 433 copy_group_permissions=False,
433 434 state=Repository.STATE_PENDING):
434 435 """
435 436 Create repository inside database with PENDING state, this should be
436 437 only executed by create() repo. With exception of importing existing
437 438 repos
438 439 """
439 440 from rhodecode.model.scm import ScmModel
440 441
441 442 owner = self._get_user(owner)
442 443 fork_of = self._get_repo(fork_of)
443 444 repo_group = self._get_repo_group(safe_int(repo_group))
444 445
445 446 try:
446 447 repo_name = safe_unicode(repo_name)
447 448 description = safe_unicode(description)
448 449 # repo name is just a name of repository
449 450 # while repo_name_full is a full qualified name that is combined
450 451 # with name and path of group
451 452 repo_name_full = repo_name
452 453 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
453 454
454 455 new_repo = Repository()
455 456 new_repo.repo_state = state
456 457 new_repo.enable_statistics = False
457 458 new_repo.repo_name = repo_name_full
458 459 new_repo.repo_type = repo_type
459 460 new_repo.user = owner
460 461 new_repo.group = repo_group
461 462 new_repo.description = description or repo_name
462 463 new_repo.private = private
463 464 new_repo.clone_uri = clone_uri
464 465 new_repo.landing_rev = landing_rev
465 466
466 467 new_repo.enable_statistics = enable_statistics
467 468 new_repo.enable_locking = enable_locking
468 469 new_repo.enable_downloads = enable_downloads
469 470
470 471 if repo_group:
471 472 new_repo.enable_locking = repo_group.enable_locking
472 473
473 474 if fork_of:
474 475 parent_repo = fork_of
475 476 new_repo.fork = parent_repo
476 477
477 478 events.trigger(events.RepoPreCreateEvent(new_repo))
478 479
479 480 self.sa.add(new_repo)
480 481
481 482 EMPTY_PERM = 'repository.none'
482 483 if fork_of and copy_fork_permissions:
483 484 repo = fork_of
484 485 user_perms = UserRepoToPerm.query() \
485 486 .filter(UserRepoToPerm.repository == repo).all()
486 487 group_perms = UserGroupRepoToPerm.query() \
487 488 .filter(UserGroupRepoToPerm.repository == repo).all()
488 489
489 490 for perm in user_perms:
490 491 UserRepoToPerm.create(
491 492 perm.user, new_repo, perm.permission)
492 493
493 494 for perm in group_perms:
494 495 UserGroupRepoToPerm.create(
495 496 perm.users_group, new_repo, perm.permission)
496 497 # in case we copy permissions and also set this repo to private
497 498 # override the default user permission to make it a private
498 499 # repo
499 500 if private:
500 501 RepoModel(self.sa).grant_user_permission(
501 502 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
502 503
503 504 elif repo_group and copy_group_permissions:
504 505 user_perms = UserRepoGroupToPerm.query() \
505 506 .filter(UserRepoGroupToPerm.group == repo_group).all()
506 507
507 508 group_perms = UserGroupRepoGroupToPerm.query() \
508 509 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
509 510
510 511 for perm in user_perms:
511 512 perm_name = perm.permission.permission_name.replace(
512 513 'group.', 'repository.')
513 514 perm_obj = Permission.get_by_key(perm_name)
514 515 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
515 516
516 517 for perm in group_perms:
517 518 perm_name = perm.permission.permission_name.replace(
518 519 'group.', 'repository.')
519 520 perm_obj = Permission.get_by_key(perm_name)
520 521 UserGroupRepoToPerm.create(
521 522 perm.users_group, new_repo, perm_obj)
522 523
523 524 if private:
524 525 RepoModel(self.sa).grant_user_permission(
525 526 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
526 527
527 528 else:
528 529 perm_obj = self._create_default_perms(new_repo, private)
529 530 self.sa.add(perm_obj)
530 531
531 532 # now automatically start following this repository as owner
532 533 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
533 534 owner.user_id)
534 535
535 536 # we need to flush here, in order to check if database won't
536 537 # throw any exceptions, create filesystem dirs at the very end
537 538 self.sa.flush()
538 539 events.trigger(events.RepoCreateEvent(new_repo))
539 540 return new_repo
540 541
541 542 except Exception:
542 543 log.error(traceback.format_exc())
543 544 raise
544 545
545 546 def create(self, form_data, cur_user):
546 547 """
547 548 Create repository using celery tasks
548 549
549 550 :param form_data:
550 551 :param cur_user:
551 552 """
552 553 from rhodecode.lib.celerylib import tasks, run_task
553 554 return run_task(tasks.create_repo, form_data, cur_user)
554 555
555 556 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
556 557 perm_deletions=None, check_perms=True,
557 558 cur_user=None):
558 559 if not perm_additions:
559 560 perm_additions = []
560 561 if not perm_updates:
561 562 perm_updates = []
562 563 if not perm_deletions:
563 564 perm_deletions = []
564 565
565 566 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
566 567
567 568 # update permissions
568 569 for member_id, perm, member_type in perm_updates:
569 570 member_id = int(member_id)
570 571 if member_type == 'user':
571 572 # this updates also current one if found
572 573 self.grant_user_permission(
573 574 repo=repo, user=member_id, perm=perm)
574 575 else: # set for user group
575 576 # check if we have permissions to alter this usergroup
576 577 member_name = UserGroup.get(member_id).users_group_name
577 578 if not check_perms or HasUserGroupPermissionAny(
578 579 *req_perms)(member_name, user=cur_user):
579 580 self.grant_user_group_permission(
580 581 repo=repo, group_name=member_id, perm=perm)
581 582
582 583 # set new permissions
583 584 for member_id, perm, member_type in perm_additions:
584 585 member_id = int(member_id)
585 586 if member_type == 'user':
586 587 self.grant_user_permission(
587 588 repo=repo, user=member_id, perm=perm)
588 589 else: # set for user group
589 590 # check if we have permissions to alter this usergroup
590 591 member_name = UserGroup.get(member_id).users_group_name
591 592 if not check_perms or HasUserGroupPermissionAny(
592 593 *req_perms)(member_name, user=cur_user):
593 594 self.grant_user_group_permission(
594 595 repo=repo, group_name=member_id, perm=perm)
595 596
596 597 # delete permissions
597 598 for member_id, perm, member_type in perm_deletions:
598 599 member_id = int(member_id)
599 600 if member_type == 'user':
600 601 self.revoke_user_permission(repo=repo, user=member_id)
601 602 else: # set for user group
602 603 # check if we have permissions to alter this usergroup
603 604 member_name = UserGroup.get(member_id).users_group_name
604 605 if not check_perms or HasUserGroupPermissionAny(
605 606 *req_perms)(member_name, user=cur_user):
606 607 self.revoke_user_group_permission(
607 608 repo=repo, group_name=member_id)
608 609
609 610 def create_fork(self, form_data, cur_user):
610 611 """
611 612 Simple wrapper into executing celery task for fork creation
612 613
613 614 :param form_data:
614 615 :param cur_user:
615 616 """
616 617 from rhodecode.lib.celerylib import tasks, run_task
617 618 return run_task(tasks.create_repo_fork, form_data, cur_user)
618 619
619 620 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
620 621 """
621 622 Delete given repository, forks parameter defines what do do with
622 623 attached forks. Throws AttachedForksError if deleted repo has attached
623 624 forks
624 625
625 626 :param repo:
626 627 :param forks: str 'delete' or 'detach'
627 628 :param fs_remove: remove(archive) repo from filesystem
628 629 """
629 630 if not cur_user:
630 631 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
631 632 repo = self._get_repo(repo)
632 633 if repo:
633 634 if forks == 'detach':
634 635 for r in repo.forks:
635 636 r.fork = None
636 637 self.sa.add(r)
637 638 elif forks == 'delete':
638 639 for r in repo.forks:
639 640 self.delete(r, forks='delete')
640 641 elif [f for f in repo.forks]:
641 642 raise AttachedForksError()
642 643
643 644 old_repo_dict = repo.get_dict()
644 645 events.trigger(events.RepoPreDeleteEvent(repo))
645 646 try:
646 647 self.sa.delete(repo)
647 648 if fs_remove:
648 649 self._delete_filesystem_repo(repo)
649 650 else:
650 651 log.debug('skipping removal from filesystem')
651 652 old_repo_dict.update({
652 653 'deleted_by': cur_user,
653 654 'deleted_on': time.time(),
654 655 })
655 656 log_delete_repository(**old_repo_dict)
656 657 events.trigger(events.RepoDeleteEvent(repo))
657 658 except Exception:
658 659 log.error(traceback.format_exc())
659 660 raise
660 661
661 662 def grant_user_permission(self, repo, user, perm):
662 663 """
663 664 Grant permission for user on given repository, or update existing one
664 665 if found
665 666
666 667 :param repo: Instance of Repository, repository_id, or repository name
667 668 :param user: Instance of User, user_id or username
668 669 :param perm: Instance of Permission, or permission_name
669 670 """
670 671 user = self._get_user(user)
671 672 repo = self._get_repo(repo)
672 673 permission = self._get_perm(perm)
673 674
674 675 # check if we have that permission already
675 676 obj = self.sa.query(UserRepoToPerm) \
676 677 .filter(UserRepoToPerm.user == user) \
677 678 .filter(UserRepoToPerm.repository == repo) \
678 679 .scalar()
679 680 if obj is None:
680 681 # create new !
681 682 obj = UserRepoToPerm()
682 683 obj.repository = repo
683 684 obj.user = user
684 685 obj.permission = permission
685 686 self.sa.add(obj)
686 687 log.debug('Granted perm %s to %s on %s', perm, user, repo)
687 688 action_logger_generic(
688 689 'granted permission: {} to user: {} on repo: {}'.format(
689 690 perm, user, repo), namespace='security.repo')
690 691 return obj
691 692
692 693 def revoke_user_permission(self, repo, user):
693 694 """
694 695 Revoke permission for user on given repository
695 696
696 697 :param repo: Instance of Repository, repository_id, or repository name
697 698 :param user: Instance of User, user_id or username
698 699 """
699 700
700 701 user = self._get_user(user)
701 702 repo = self._get_repo(repo)
702 703
703 704 obj = self.sa.query(UserRepoToPerm) \
704 705 .filter(UserRepoToPerm.repository == repo) \
705 706 .filter(UserRepoToPerm.user == user) \
706 707 .scalar()
707 708 if obj:
708 709 self.sa.delete(obj)
709 710 log.debug('Revoked perm on %s on %s', repo, user)
710 711 action_logger_generic(
711 712 'revoked permission from user: {} on repo: {}'.format(
712 713 user, repo), namespace='security.repo')
713 714
714 715 def grant_user_group_permission(self, repo, group_name, perm):
715 716 """
716 717 Grant permission for user group on given repository, or update
717 718 existing one if found
718 719
719 720 :param repo: Instance of Repository, repository_id, or repository name
720 721 :param group_name: Instance of UserGroup, users_group_id,
721 722 or user group name
722 723 :param perm: Instance of Permission, or permission_name
723 724 """
724 725 repo = self._get_repo(repo)
725 726 group_name = self._get_user_group(group_name)
726 727 permission = self._get_perm(perm)
727 728
728 729 # check if we have that permission already
729 730 obj = self.sa.query(UserGroupRepoToPerm) \
730 731 .filter(UserGroupRepoToPerm.users_group == group_name) \
731 732 .filter(UserGroupRepoToPerm.repository == repo) \
732 733 .scalar()
733 734
734 735 if obj is None:
735 736 # create new
736 737 obj = UserGroupRepoToPerm()
737 738
738 739 obj.repository = repo
739 740 obj.users_group = group_name
740 741 obj.permission = permission
741 742 self.sa.add(obj)
742 743 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
743 744 action_logger_generic(
744 745 'granted permission: {} to usergroup: {} on repo: {}'.format(
745 746 perm, group_name, repo), namespace='security.repo')
746 747
747 748 return obj
748 749
749 750 def revoke_user_group_permission(self, repo, group_name):
750 751 """
751 752 Revoke permission for user group on given repository
752 753
753 754 :param repo: Instance of Repository, repository_id, or repository name
754 755 :param group_name: Instance of UserGroup, users_group_id,
755 756 or user group name
756 757 """
757 758 repo = self._get_repo(repo)
758 759 group_name = self._get_user_group(group_name)
759 760
760 761 obj = self.sa.query(UserGroupRepoToPerm) \
761 762 .filter(UserGroupRepoToPerm.repository == repo) \
762 763 .filter(UserGroupRepoToPerm.users_group == group_name) \
763 764 .scalar()
764 765 if obj:
765 766 self.sa.delete(obj)
766 767 log.debug('Revoked perm to %s on %s', repo, group_name)
767 768 action_logger_generic(
768 769 'revoked permission from usergroup: {} on repo: {}'.format(
769 770 group_name, repo), namespace='security.repo')
770 771
771 772 def delete_stats(self, repo_name):
772 773 """
773 774 removes stats for given repo
774 775
775 776 :param repo_name:
776 777 """
777 778 repo = self._get_repo(repo_name)
778 779 try:
779 780 obj = self.sa.query(Statistics) \
780 781 .filter(Statistics.repository == repo).scalar()
781 782 if obj:
782 783 self.sa.delete(obj)
783 784 except Exception:
784 785 log.error(traceback.format_exc())
785 786 raise
786 787
787 788 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
788 789 field_type='str', field_desc=''):
789 790
790 791 repo = self._get_repo(repo_name)
791 792
792 793 new_field = RepositoryField()
793 794 new_field.repository = repo
794 795 new_field.field_key = field_key
795 796 new_field.field_type = field_type # python type
796 797 new_field.field_value = field_value
797 798 new_field.field_desc = field_desc
798 799 new_field.field_label = field_label
799 800 self.sa.add(new_field)
800 801 return new_field
801 802
802 803 def delete_repo_field(self, repo_name, field_key):
803 804 repo = self._get_repo(repo_name)
804 805 field = RepositoryField.get_by_key_name(field_key, repo)
805 806 if field:
806 807 self.sa.delete(field)
807 808
808 809 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
809 810 clone_uri=None, repo_store_location=None,
810 811 use_global_config=False):
811 812 """
812 813 makes repository on filesystem. It's group aware means it'll create
813 814 a repository within a group, and alter the paths accordingly of
814 815 group location
815 816
816 817 :param repo_name:
817 818 :param alias:
818 819 :param parent:
819 820 :param clone_uri:
820 821 :param repo_store_location:
821 822 """
822 823 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
823 824 from rhodecode.model.scm import ScmModel
824 825
825 826 if Repository.NAME_SEP in repo_name:
826 827 raise ValueError(
827 828 'repo_name must not contain groups got `%s`' % repo_name)
828 829
829 830 if isinstance(repo_group, RepoGroup):
830 831 new_parent_path = os.sep.join(repo_group.full_path_splitted)
831 832 else:
832 833 new_parent_path = repo_group or ''
833 834
834 835 if repo_store_location:
835 836 _paths = [repo_store_location]
836 837 else:
837 838 _paths = [self.repos_path, new_parent_path, repo_name]
838 839 # we need to make it str for mercurial
839 840 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
840 841
841 842 # check if this path is not a repository
842 843 if is_valid_repo(repo_path, self.repos_path):
843 844 raise Exception('This path %s is a valid repository' % repo_path)
844 845
845 846 # check if this path is a group
846 847 if is_valid_repo_group(repo_path, self.repos_path):
847 848 raise Exception('This path %s is a valid group' % repo_path)
848 849
849 850 log.info('creating repo %s in %s from url: `%s`',
850 851 repo_name, safe_unicode(repo_path),
851 852 obfuscate_url_pw(clone_uri))
852 853
853 854 backend = get_backend(repo_type)
854 855
855 856 config_repo = None if use_global_config else repo_name
856 857 if config_repo and new_parent_path:
857 858 config_repo = Repository.NAME_SEP.join(
858 859 (new_parent_path, config_repo))
859 860 config = make_db_config(clear_session=False, repo=config_repo)
860 861 config.set('extensions', 'largefiles', '')
861 862
862 863 # patch and reset hooks section of UI config to not run any
863 864 # hooks on creating remote repo
864 865 config.clear_section('hooks')
865 866
866 867 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
867 868 if repo_type == 'git':
868 869 repo = backend(
869 870 repo_path, config=config, create=True, src_url=clone_uri,
870 871 bare=True)
871 872 else:
872 873 repo = backend(
873 874 repo_path, config=config, create=True, src_url=clone_uri)
874 875
875 876 ScmModel().install_hooks(repo, repo_type=repo_type)
876 877
877 878 log.debug('Created repo %s with %s backend',
878 879 safe_unicode(repo_name), safe_unicode(repo_type))
879 880 return repo
880 881
881 882 def _rename_filesystem_repo(self, old, new):
882 883 """
883 884 renames repository on filesystem
884 885
885 886 :param old: old name
886 887 :param new: new name
887 888 """
888 889 log.info('renaming repo from %s to %s', old, new)
889 890
890 891 old_path = os.path.join(self.repos_path, old)
891 892 new_path = os.path.join(self.repos_path, new)
892 893 if os.path.isdir(new_path):
893 894 raise Exception(
894 895 'Was trying to rename to already existing dir %s' % new_path
895 896 )
896 897 shutil.move(old_path, new_path)
897 898
898 899 def _delete_filesystem_repo(self, repo):
899 900 """
900 901 removes repo from filesystem, the removal is acctually made by
901 902 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
902 903 repository is no longer valid for rhodecode, can be undeleted later on
903 904 by reverting the renames on this repository
904 905
905 906 :param repo: repo object
906 907 """
907 908 rm_path = os.path.join(self.repos_path, repo.repo_name)
908 909 repo_group = repo.group
909 910 log.info("Removing repository %s", rm_path)
910 911 # disable hg/git internal that it doesn't get detected as repo
911 912 alias = repo.repo_type
912 913
913 914 config = make_db_config(clear_session=False)
914 915 config.set('extensions', 'largefiles', '')
915 916 bare = getattr(repo.scm_instance(config=config), 'bare', False)
916 917
917 918 # skip this for bare git repos
918 919 if not bare:
919 920 # disable VCS repo
920 921 vcs_path = os.path.join(rm_path, '.%s' % alias)
921 922 if os.path.exists(vcs_path):
922 923 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
923 924
924 925 _now = datetime.now()
925 926 _ms = str(_now.microsecond).rjust(6, '0')
926 927 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
927 928 repo.just_name)
928 929 if repo_group:
929 930 # if repository is in group, prefix the removal path with the group
930 931 args = repo_group.full_path_splitted + [_d]
931 932 _d = os.path.join(*args)
932 933
933 934 if os.path.isdir(rm_path):
934 935 shutil.move(rm_path, os.path.join(self.repos_path, _d))
@@ -1,26 +1,58 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 from rhodecode.lib.utils2 import obfuscate_url_pw
21 import pytest
22
23 from rhodecode.lib.utils2 import (
24 obfuscate_url_pw, get_routes_generator_for_server_url)
22 25
23 26
24 27 def test_obfuscate_url_pw():
25 28 engine = u'/home/repos/malmö'
26 29 assert obfuscate_url_pw(engine)
30
31
32 @pytest.mark.parametrize('scheme', ['https', 'http'])
33 @pytest.mark.parametrize('domain', [
34 'www.test.com', 'test.com', 'test.co.uk', '192.168.1.3'])
35 @pytest.mark.parametrize('port', [None, '80', '443', '999'])
36 @pytest.mark.parametrize('script_path', [None, '/', '/prefix', '/prefix/more'])
37 def test_routes_generator(pylonsapp, scheme, domain, port, script_path):
38 server_url = '%s://%s' % (scheme, domain)
39 if port is not None:
40 server_url += ':' + port
41 if script_path:
42 server_url += script_path
43
44
45 expected_url = '%s://%s' % (scheme, domain)
46 if scheme == 'https':
47 if port not in (None, '443'):
48 expected_url += ':' + port
49 elif scheme == 'http':
50 if port not in ('80', None):
51 expected_url += ':' + port
52
53 if script_path:
54 expected_url = (expected_url + script_path).rstrip('/')
55
56 url_generator = get_routes_generator_for_server_url(server_url)
57 assert url_generator(
58 '/a_test_path', qualified=True) == expected_url + '/a_test_path'
General Comments 0
You need to be logged in to leave comments. Login now